354 uint next_pidx() { return _next_pidx++; }
355
356 // Add nodes to ConnectionGraph.
357 void add_local_var(Node* n, PointsToNode::EscapeState es);
358 void add_java_object(Node* n, PointsToNode::EscapeState es);
359 void add_field(Node* n, PointsToNode::EscapeState es, int offset);
360 void add_arraycopy(Node* n, PointsToNode::EscapeState es, PointsToNode* src, PointsToNode* dst);
361
362 // Compute the escape state for arguments to a call.
363 void process_call_arguments(CallNode *call);
364
365 // Add PointsToNode node corresponding to a call
366 void add_call_node(CallNode* call);
367
368 // Map ideal node to existing PointsTo node (usually phantom_object).
369 void map_ideal_node(Node *n, PointsToNode* ptn) {
370 assert(ptn != NULL, "only existing PointsTo node");
371 _nodes.at_put(n->_idx, ptn);
372 }
373
374 // Utility function for nodes that load an object
375 void add_objload_to_connection_graph(Node *n, Unique_Node_List *delayed_worklist);
376 // Create PointsToNode node and add it to Connection Graph.
377 void add_node_to_connection_graph(Node *n, Unique_Node_List *delayed_worklist);
378
379 // Add final simple edges to graph.
380 void add_final_edges(Node *n);
381
382 // Finish Graph construction.
383 bool complete_connection_graph(GrowableArray<PointsToNode*>& ptnodes_worklist,
384 GrowableArray<JavaObjectNode*>& non_escaped_worklist,
385 GrowableArray<JavaObjectNode*>& java_objects_worklist,
386 GrowableArray<FieldNode*>& oop_fields_worklist);
387
388 #ifdef ASSERT
389 void verify_connection_graph(GrowableArray<PointsToNode*>& ptnodes_worklist,
390 GrowableArray<JavaObjectNode*>& non_escaped_worklist,
391 GrowableArray<JavaObjectNode*>& java_objects_worklist,
392 GrowableArray<Node*>& addp_worklist);
393 #endif
394
395 // Add all references to this JavaObject node.
494 return false; // already has phantom_obj base
495 }
496 from->set_has_unknown_base();
497 }
498 bool is_new = from->add_base(to);
499 assert(to != phantom_obj || is_new, "sanity");
500 if (is_new) { // New edge?
501 assert(!_verify, "graph is incomplete");
502 if (to == null_obj)
503 return is_new; // Don't add fields to NULL pointer.
504 if (to->is_JavaObject()) {
505 is_new = to->add_edge(from);
506 } else {
507 is_new = to->add_base_use(from);
508 }
509 assert(is_new, "use should be also new");
510 }
511 return is_new;
512 }
513
514 // Add LocalVar node and edge if possible
515 void add_local_var_and_edge(Node* n, PointsToNode::EscapeState es, Node* to,
516 Unique_Node_List *delayed_worklist) {
517 PointsToNode* ptn = ptnode_adr(to->_idx);
518 if (delayed_worklist != NULL) { // First iteration of CG construction
519 add_local_var(n, es);
520 if (ptn == NULL) {
521 delayed_worklist->push(n);
522 return; // Process it later.
523 }
524 } else {
525 assert(ptn != NULL, "node should be registered");
526 }
527 add_edge(ptnode_adr(n->_idx), ptn);
528 }
529 // Helper functions
530 bool is_oop_field(Node* n, int offset, bool* unsafe);
531 Node* get_addp_base(Node *addp);
532 static Node* find_second_addp(Node* addp, Node* n);
533 // offset of a field reference
534 int address_offset(Node* adr, PhaseTransform *phase);
535
536
537 // Propagate unique types created for unescaped allocated objects
538 // through the graph
539 void split_unique_types(GrowableArray<Node *> &alloc_worklist, GrowableArray<ArrayCopyNode*> &arraycopy_worklist);
540
541 // Helper methods for unique types split.
542 bool split_AddP(Node *addp, Node *base);
543
544 PhiNode *create_split_phi(PhiNode *orig_phi, int alias_idx, GrowableArray<PhiNode *> &orig_phi_worklist, bool &new_created);
545 PhiNode *split_memory_phi(PhiNode *orig_phi, int alias_idx, GrowableArray<PhiNode *> &orig_phi_worklist);
546
547 void move_inst_mem(Node* n, GrowableArray<PhiNode *> &orig_phis);
548 Node* find_inst_mem(Node* mem, int alias_idx,GrowableArray<PhiNode *> &orig_phi_worklist);
549 Node* step_through_mergemem(MergeMemNode *mmem, int alias_idx, const TypeOopPtr *toop);
550
551
557 // from this Phi
558 // MemNode - new memory input for this node
559 // ChecCastPP - allocation that this is a cast of
560 // allocation - CheckCastPP of the allocation
561
562 // manage entries in _node_map
563
564 void set_map(Node* from, Node* to) {
565 ideal_nodes.push(from);
566 _node_map.map(from->_idx, to);
567 }
568
569 Node* get_map(int idx) { return _node_map[idx]; }
570
571 PhiNode* get_map_phi(int idx) {
572 Node* phi = _node_map[idx];
573 return (phi == NULL) ? NULL : phi->as_Phi();
574 }
575
576 // Notify optimizer that a node has been modified
577 void record_for_optimizer(Node *n) {
578 _igvn->_worklist.push(n);
579 _igvn->add_users_to_worklist(n);
580 }
581
582 // Compute the escape information
583 bool compute_escape();
584
585 public:
586 ConnectionGraph(Compile *C, PhaseIterGVN *igvn);
587
588 // Check for non-escaping candidates
589 static bool has_candidates(Compile *C);
590
591 // Perform escape analysis
592 static void do_analysis(Compile *C, PhaseIterGVN *igvn);
593
594 bool not_global_escape(Node *n);
595
596 #ifndef PRODUCT
597 void dump(GrowableArray<PointsToNode*>& ptnodes_worklist);
598 #endif
599 };
600
601 inline PointsToNode::PointsToNode(ConnectionGraph *CG, Node* n, EscapeState es, NodeType type):
602 _edges(CG->_compile->comp_arena(), 2, 0, NULL),
603 _uses (CG->_compile->comp_arena(), 2, 0, NULL),
604 _type((u1)type),
605 _flags(ScalarReplaceable),
606 _escape((u1)es),
607 _fields_escape((u1)es),
608 _node(n),
609 _idx(n->_idx),
610 _pidx(CG->next_pidx()) {
611 assert(n != NULL && es != UnknownEscape, "sanity");
612 }
613
614 #endif // SHARE_VM_OPTO_ESCAPE_HPP
|
354 uint next_pidx() { return _next_pidx++; }
355
356 // Add nodes to ConnectionGraph.
357 void add_local_var(Node* n, PointsToNode::EscapeState es);
358 void add_java_object(Node* n, PointsToNode::EscapeState es);
359 void add_field(Node* n, PointsToNode::EscapeState es, int offset);
360 void add_arraycopy(Node* n, PointsToNode::EscapeState es, PointsToNode* src, PointsToNode* dst);
361
362 // Compute the escape state for arguments to a call.
363 void process_call_arguments(CallNode *call);
364
365 // Add PointsToNode node corresponding to a call
366 void add_call_node(CallNode* call);
367
368 // Map ideal node to existing PointsTo node (usually phantom_object).
369 void map_ideal_node(Node *n, PointsToNode* ptn) {
370 assert(ptn != NULL, "only existing PointsTo node");
371 _nodes.at_put(n->_idx, ptn);
372 }
373
374 // Create PointsToNode node and add it to Connection Graph.
375 void add_node_to_connection_graph(Node *n, Unique_Node_List *delayed_worklist);
376
377 // Add final simple edges to graph.
378 void add_final_edges(Node *n);
379
380 // Finish Graph construction.
381 bool complete_connection_graph(GrowableArray<PointsToNode*>& ptnodes_worklist,
382 GrowableArray<JavaObjectNode*>& non_escaped_worklist,
383 GrowableArray<JavaObjectNode*>& java_objects_worklist,
384 GrowableArray<FieldNode*>& oop_fields_worklist);
385
386 #ifdef ASSERT
387 void verify_connection_graph(GrowableArray<PointsToNode*>& ptnodes_worklist,
388 GrowableArray<JavaObjectNode*>& non_escaped_worklist,
389 GrowableArray<JavaObjectNode*>& java_objects_worklist,
390 GrowableArray<Node*>& addp_worklist);
391 #endif
392
393 // Add all references to this JavaObject node.
492 return false; // already has phantom_obj base
493 }
494 from->set_has_unknown_base();
495 }
496 bool is_new = from->add_base(to);
497 assert(to != phantom_obj || is_new, "sanity");
498 if (is_new) { // New edge?
499 assert(!_verify, "graph is incomplete");
500 if (to == null_obj)
501 return is_new; // Don't add fields to NULL pointer.
502 if (to->is_JavaObject()) {
503 is_new = to->add_edge(from);
504 } else {
505 is_new = to->add_base_use(from);
506 }
507 assert(is_new, "use should be also new");
508 }
509 return is_new;
510 }
511
512 // Helper functions
513 bool is_oop_field(Node* n, int offset, bool* unsafe);
514 static Node* find_second_addp(Node* addp, Node* n);
515 // offset of a field reference
516 int address_offset(Node* adr, PhaseTransform *phase);
517
518
519 // Propagate unique types created for unescaped allocated objects
520 // through the graph
521 void split_unique_types(GrowableArray<Node *> &alloc_worklist, GrowableArray<ArrayCopyNode*> &arraycopy_worklist);
522
523 // Helper methods for unique types split.
524 bool split_AddP(Node *addp, Node *base);
525
526 PhiNode *create_split_phi(PhiNode *orig_phi, int alias_idx, GrowableArray<PhiNode *> &orig_phi_worklist, bool &new_created);
527 PhiNode *split_memory_phi(PhiNode *orig_phi, int alias_idx, GrowableArray<PhiNode *> &orig_phi_worklist);
528
529 void move_inst_mem(Node* n, GrowableArray<PhiNode *> &orig_phis);
530 Node* find_inst_mem(Node* mem, int alias_idx,GrowableArray<PhiNode *> &orig_phi_worklist);
531 Node* step_through_mergemem(MergeMemNode *mmem, int alias_idx, const TypeOopPtr *toop);
532
533
539 // from this Phi
540 // MemNode - new memory input for this node
541 // ChecCastPP - allocation that this is a cast of
542 // allocation - CheckCastPP of the allocation
543
544 // manage entries in _node_map
545
546 void set_map(Node* from, Node* to) {
547 ideal_nodes.push(from);
548 _node_map.map(from->_idx, to);
549 }
550
551 Node* get_map(int idx) { return _node_map[idx]; }
552
553 PhiNode* get_map_phi(int idx) {
554 Node* phi = _node_map[idx];
555 return (phi == NULL) ? NULL : phi->as_Phi();
556 }
557
558 // Notify optimizer that a node has been modified
559 void record_for_optimizer(Node *n);
560
561 // Compute the escape information
562 bool compute_escape();
563
564 public:
565 ConnectionGraph(Compile *C, PhaseIterGVN *igvn);
566
567 // Check for non-escaping candidates
568 static bool has_candidates(Compile *C);
569
570 // Perform escape analysis
571 static void do_analysis(Compile *C, PhaseIterGVN *igvn);
572
573 bool not_global_escape(Node *n);
574
575 // To be used by, e.g., BarrierSetC2 impls
576 Node* get_addp_base(Node* addp);
577
578 // Utility function for nodes that load an object
579 void add_objload_to_connection_graph(Node* n, Unique_Node_List* delayed_worklist);
580
581 // Add LocalVar node and edge if possible
582 void add_local_var_and_edge(Node* n, PointsToNode::EscapeState es, Node* to,
583 Unique_Node_List *delayed_worklist) {
584 PointsToNode* ptn = ptnode_adr(to->_idx);
585 if (delayed_worklist != NULL) { // First iteration of CG construction
586 add_local_var(n, es);
587 if (ptn == NULL) {
588 delayed_worklist->push(n);
589 return; // Process it later.
590 }
591 } else {
592 assert(ptn != NULL, "node should be registered");
593 }
594 add_edge(ptnode_adr(n->_idx), ptn);
595 }
596
597 void add_to_congraph_unsafe_access(Node* n, uint opcode, Unique_Node_List* delayed_worklist);
598 bool add_final_edges_unsafe_access(Node* n, uint opcode);
599
600 #ifndef PRODUCT
601 void dump(GrowableArray<PointsToNode*>& ptnodes_worklist);
602 #endif
603 };
604
605 inline PointsToNode::PointsToNode(ConnectionGraph *CG, Node* n, EscapeState es, NodeType type):
606 _edges(CG->_compile->comp_arena(), 2, 0, NULL),
607 _uses (CG->_compile->comp_arena(), 2, 0, NULL),
608 _type((u1)type),
609 _flags(ScalarReplaceable),
610 _escape((u1)es),
611 _fields_escape((u1)es),
612 _node(n),
613 _idx(n->_idx),
614 _pidx(CG->next_pidx()) {
615 assert(n != NULL && es != UnknownEscape, "sanity");
616 }
617
618 #endif // SHARE_VM_OPTO_ESCAPE_HPP
|