156 add_node_to_connection_graph(n, &delayed_worklist);
157 PointsToNode* ptn = ptnode_adr(n->_idx);
158 if (ptn != NULL && ptn != phantom_obj) {
159 ptnodes_worklist.append(ptn);
160 if (ptn->is_JavaObject()) {
161 java_objects_worklist.append(ptn->as_JavaObject());
162 if ((n->is_Allocate() || n->is_CallStaticJava()) &&
163 (ptn->escape_state() < PointsToNode::GlobalEscape)) {
164 // Only allocations and java static calls results are interesting.
165 non_escaped_worklist.append(ptn->as_JavaObject());
166 }
167 } else if (ptn->is_Field() && ptn->as_Field()->is_oop()) {
168 oop_fields_worklist.append(ptn->as_Field());
169 }
170 }
171 if (n->is_MergeMem()) {
172 // Collect all MergeMem nodes to add memory slices for
173 // scalar replaceable objects in split_unique_types().
174 _mergemem_worklist.append(n->as_MergeMem());
175 } else if (OptimizePtrCompare && n->is_Cmp() &&
176 ((n->Opcode() == Op_CmpP && !(((CmpPNode*)n)->has_perturbed_operand() != NULL)) ||
177 n->Opcode() == Op_CmpN)) {
178 // Collect compare pointers nodes.
179 ptr_cmp_worklist.append(n);
180 } else if (n->is_MemBarStoreStore()) {
181 // Collect all MemBarStoreStore nodes so that depending on the
182 // escape status of the associated Allocate node some of them
183 // may be eliminated.
184 storestore_worklist.append(n);
185 } else if (n->is_MemBar() && (n->Opcode() == Op_MemBarRelease) &&
186 (n->req() > MemBarNode::Precedent)) {
187 record_for_optimizer(n);
188 #ifdef ASSERT
189 } else if (n->is_AddP()) {
190 // Collect address nodes for graph verification.
191 addp_worklist.append(n);
192 #endif
193 } else if (n->is_ArrayCopy()) {
194 // Keep a list of ArrayCopy nodes so if one of its input is non
195 // escaping, we can record a unique type
196 arraycopy_worklist.append(n->as_ArrayCopy());
197 }
|
156 add_node_to_connection_graph(n, &delayed_worklist);
157 PointsToNode* ptn = ptnode_adr(n->_idx);
158 if (ptn != NULL && ptn != phantom_obj) {
159 ptnodes_worklist.append(ptn);
160 if (ptn->is_JavaObject()) {
161 java_objects_worklist.append(ptn->as_JavaObject());
162 if ((n->is_Allocate() || n->is_CallStaticJava()) &&
163 (ptn->escape_state() < PointsToNode::GlobalEscape)) {
164 // Only allocations and java static calls results are interesting.
165 non_escaped_worklist.append(ptn->as_JavaObject());
166 }
167 } else if (ptn->is_Field() && ptn->as_Field()->is_oop()) {
168 oop_fields_worklist.append(ptn->as_Field());
169 }
170 }
171 if (n->is_MergeMem()) {
172 // Collect all MergeMem nodes to add memory slices for
173 // scalar replaceable objects in split_unique_types().
174 _mergemem_worklist.append(n->as_MergeMem());
175 } else if (OptimizePtrCompare && n->is_Cmp() &&
176 (n->Opcode() == Op_CmpP || n->Opcode() == Op_CmpN)) {
177 // Collect compare pointers nodes.
178 ptr_cmp_worklist.append(n);
179 } else if (n->is_MemBarStoreStore()) {
180 // Collect all MemBarStoreStore nodes so that depending on the
181 // escape status of the associated Allocate node some of them
182 // may be eliminated.
183 storestore_worklist.append(n);
184 } else if (n->is_MemBar() && (n->Opcode() == Op_MemBarRelease) &&
185 (n->req() > MemBarNode::Precedent)) {
186 record_for_optimizer(n);
187 #ifdef ASSERT
188 } else if (n->is_AddP()) {
189 // Collect address nodes for graph verification.
190 addp_worklist.append(n);
191 #endif
192 } else if (n->is_ArrayCopy()) {
193 // Keep a list of ArrayCopy nodes so if one of its input is non
194 // escaping, we can record a unique type
195 arraycopy_worklist.append(n->as_ArrayCopy());
196 }
|