< prev index next >

src/hotspot/share/opto/compile.cpp

Print this page


 753   Unique_Node_List for_igvn(comp_arena());
 754   set_for_igvn(&for_igvn);
 755 
 756   // GVN that will be run immediately on new nodes
 757   uint estimated_size = method()->code_size()*4+64;
 758   estimated_size = (estimated_size < MINIMUM_NODE_HASH ? MINIMUM_NODE_HASH : estimated_size);
 759   PhaseGVN gvn(node_arena(), estimated_size);
 760   set_initial_gvn(&gvn);
 761 
 762   print_inlining_init();
 763   { // Scope for timing the parser
 764     TracePhase tp("parse", &timers[_t_parser]);
 765 
 766     // Put top into the hash table ASAP.
 767     initial_gvn()->transform_no_reclaim(top());
 768 
 769     // Set up tf(), start(), and find a CallGenerator.
 770     CallGenerator* cg = NULL;
 771     if (is_osr_compilation()) {
 772       const TypeTuple *domain = StartOSRNode::osr_domain();
 773       const TypeTuple *range = TypeTuple::make_range(method()->signature());
 774       init_tf(TypeFunc::make(domain, range));
 775       StartNode* s = new StartOSRNode(root(), domain);
 776       initial_gvn()->set_type_bottom(s);
 777       init_start(s);
 778       cg = CallGenerator::for_osr(method(), entry_bci());
 779     } else {
 780       // Normal case.
 781       init_tf(TypeFunc::make(method()));
 782       StartNode* s = new StartNode(root(), tf()->domain_cc());
 783       initial_gvn()->set_type_bottom(s);
 784       init_start(s);
 785       if (method()->intrinsic_id() == vmIntrinsics::_Reference_get) {
 786         // With java.lang.ref.reference.get() we must go through the
 787         // intrinsic - even when get() is the root
 788         // method of the compile - so that, if necessary, the value in
 789         // the referent field of the reference object gets recorded by
 790         // the pre-barrier code.
 791         cg = find_intrinsic(method(), false);
 792       }
 793       if (cg == NULL) {




 753   Unique_Node_List for_igvn(comp_arena());
 754   set_for_igvn(&for_igvn);
 755 
 756   // GVN that will be run immediately on new nodes
 757   uint estimated_size = method()->code_size()*4+64;
 758   estimated_size = (estimated_size < MINIMUM_NODE_HASH ? MINIMUM_NODE_HASH : estimated_size);
 759   PhaseGVN gvn(node_arena(), estimated_size);
 760   set_initial_gvn(&gvn);
 761 
 762   print_inlining_init();
 763   { // Scope for timing the parser
 764     TracePhase tp("parse", &timers[_t_parser]);
 765 
 766     // Put top into the hash table ASAP.
 767     initial_gvn()->transform_no_reclaim(top());
 768 
 769     // Set up tf(), start(), and find a CallGenerator.
 770     CallGenerator* cg = NULL;
 771     if (is_osr_compilation()) {
 772       const TypeTuple *domain = StartOSRNode::osr_domain();
 773       const TypeTuple *range = TypeTuple::make_range(method());
 774       init_tf(TypeFunc::make(domain, range));
 775       StartNode* s = new StartOSRNode(root(), domain);
 776       initial_gvn()->set_type_bottom(s);
 777       init_start(s);
 778       cg = CallGenerator::for_osr(method(), entry_bci());
 779     } else {
 780       // Normal case.
 781       init_tf(TypeFunc::make(method()));
 782       StartNode* s = new StartNode(root(), tf()->domain_cc());
 783       initial_gvn()->set_type_bottom(s);
 784       init_start(s);
 785       if (method()->intrinsic_id() == vmIntrinsics::_Reference_get) {
 786         // With java.lang.ref.reference.get() we must go through the
 787         // intrinsic - even when get() is the root
 788         // method of the compile - so that, if necessary, the value in
 789         // the referent field of the reference object gets recorded by
 790         // the pre-barrier code.
 791         cg = find_intrinsic(method(), false);
 792       }
 793       if (cg == NULL) {


< prev index next >