< prev index next >

src/hotspot/share/opto/parse1.cpp

Print this page




 567   Node_Notes* caller_nn = C->default_node_notes();
 568   // Collect debug info for inlined calls unless -XX:-DebugInlinedCalls.
 569   if (DebugInlinedCalls || depth() == 1) {
 570     C->set_default_node_notes(make_node_notes(caller_nn));
 571   }
 572 
 573   if (is_osr_parse()) {
 574     Node* osr_buf = entry_map->in(TypeFunc::Parms+0);
 575     entry_map->set_req(TypeFunc::Parms+0, top());
 576     set_map(entry_map);
 577     load_interpreter_state(osr_buf);
 578   } else {
 579     set_map(entry_map);
 580     do_method_entry();
 581     if (depth() == 1 && C->age_code()) {
 582       decrement_age();
 583     }
 584   }
 585 
 586   if (depth() == 1 && !failing()) {





 587     // Add check to deoptimize the nmethod if RTM state was changed
 588     rtm_deopt();
 589   }
 590 
 591   // Check for bailouts during method entry or RTM state check setup.
 592   if (failing()) {
 593     if (log)  log->done("parse");
 594     C->set_default_node_notes(caller_nn);
 595     return;
 596   }
 597 
 598   entry_map = map();  // capture any changes performed by method setup code
 599   assert(jvms()->endoff() == map()->req(), "map matches JVMS layout");
 600 
 601   // We begin parsing as if we have just encountered a jump to the
 602   // method entry.
 603   Block* entry_block = start_block();
 604   assert(entry_block->start() == (is_osr_parse() ? osr_bci() : 0), "");
 605   set_map_clone(entry_map);
 606   merge_common(entry_block, entry_block->next_path_num());


2085                                    receiver);
2086     make_slow_call_ex(call, env()->Throwable_klass(), true);
2087 
2088     Node* fast_io  = call->in(TypeFunc::I_O);
2089     Node* fast_mem = call->in(TypeFunc::Memory);
2090     // These two phis are pre-filled with copies of of the fast IO and Memory
2091     Node* io_phi   = PhiNode::make(result_rgn, fast_io,  Type::ABIO);
2092     Node* mem_phi  = PhiNode::make(result_rgn, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
2093 
2094     result_rgn->init_req(2, control());
2095     io_phi    ->init_req(2, i_o());
2096     mem_phi   ->init_req(2, reset_memory());
2097 
2098     set_all_memory( _gvn.transform(mem_phi) );
2099     set_i_o(        _gvn.transform(io_phi) );
2100   }
2101 
2102   set_control( _gvn.transform(result_rgn) );
2103 }
2104 


























2105 // Add check to deoptimize if RTM state is not ProfileRTM
2106 void Parse::rtm_deopt() {
2107 #if INCLUDE_RTM_OPT
2108   if (C->profile_rtm()) {
2109     assert(C->method() != NULL, "only for normal compilations");
2110     assert(!C->method()->method_data()->is_empty(), "MDO is needed to record RTM state");
2111     assert(depth() == 1, "generate check only for main compiled method");
2112 
2113     // Set starting bci for uncommon trap.
2114     set_parse_bci(is_osr_parse() ? osr_bci() : 0);
2115 
2116     // Load the rtm_state from the MethodData.
2117     const TypePtr* adr_type = TypeMetadataPtr::make(C->method()->method_data());
2118     Node* mdo = makecon(adr_type);
2119     int offset = MethodData::rtm_state_offset_in_bytes();
2120     Node* adr_node = basic_plus_adr(mdo, mdo, offset);
2121     Node* rtm_state = make_load(control(), adr_node, TypeInt::INT, T_INT, adr_type, MemNode::unordered);
2122 
2123     // Separate Load from Cmp by Opaque.
2124     // In expand_macro_nodes() it will be replaced either
2125     // with this load when there are locks in the code
2126     // or with ProfileRTM (cmp->in(2)) otherwise so that
2127     // the check will fold.
2128     Node* profile_state = makecon(TypeInt::make(ProfileRTM));
2129     Node* opq   = _gvn.transform( new Opaque3Node(C, rtm_state, Opaque3Node::RTM_OPT) );




 567   Node_Notes* caller_nn = C->default_node_notes();
 568   // Collect debug info for inlined calls unless -XX:-DebugInlinedCalls.
 569   if (DebugInlinedCalls || depth() == 1) {
 570     C->set_default_node_notes(make_node_notes(caller_nn));
 571   }
 572 
 573   if (is_osr_parse()) {
 574     Node* osr_buf = entry_map->in(TypeFunc::Parms+0);
 575     entry_map->set_req(TypeFunc::Parms+0, top());
 576     set_map(entry_map);
 577     load_interpreter_state(osr_buf);
 578   } else {
 579     set_map(entry_map);
 580     do_method_entry();
 581     if (depth() == 1 && C->age_code()) {
 582       decrement_age();
 583     }
 584   }
 585 
 586   if (depth() == 1 && !failing()) {
 587     if (C->clinit_barrier_on_entry()) {
 588       // Add check to deoptimize the nmethod once the holder class is fully initialized
 589       clinit_deopt();
 590     }
 591 
 592     // Add check to deoptimize the nmethod if RTM state was changed
 593     rtm_deopt();
 594   }
 595 
 596   // Check for bailouts during method entry or RTM state check setup.
 597   if (failing()) {
 598     if (log)  log->done("parse");
 599     C->set_default_node_notes(caller_nn);
 600     return;
 601   }
 602 
 603   entry_map = map();  // capture any changes performed by method setup code
 604   assert(jvms()->endoff() == map()->req(), "map matches JVMS layout");
 605 
 606   // We begin parsing as if we have just encountered a jump to the
 607   // method entry.
 608   Block* entry_block = start_block();
 609   assert(entry_block->start() == (is_osr_parse() ? osr_bci() : 0), "");
 610   set_map_clone(entry_map);
 611   merge_common(entry_block, entry_block->next_path_num());


2090                                    receiver);
2091     make_slow_call_ex(call, env()->Throwable_klass(), true);
2092 
2093     Node* fast_io  = call->in(TypeFunc::I_O);
2094     Node* fast_mem = call->in(TypeFunc::Memory);
2095     // These two phis are pre-filled with copies of of the fast IO and Memory
2096     Node* io_phi   = PhiNode::make(result_rgn, fast_io,  Type::ABIO);
2097     Node* mem_phi  = PhiNode::make(result_rgn, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
2098 
2099     result_rgn->init_req(2, control());
2100     io_phi    ->init_req(2, i_o());
2101     mem_phi   ->init_req(2, reset_memory());
2102 
2103     set_all_memory( _gvn.transform(mem_phi) );
2104     set_i_o(        _gvn.transform(io_phi) );
2105   }
2106 
2107   set_control( _gvn.transform(result_rgn) );
2108 }
2109 
2110 // Add check to deoptimize once holder klass is fully initialized.
2111 void Parse::clinit_deopt() {
2112   assert(C->has_method(), "only for normal compilations");
2113   assert(depth() == 1, "only for main compiled method");
2114   assert(is_normal_parse(), "no barrier needed on osr entry");
2115   assert(method()->needs_clinit_barrier(), "barrier not needed");
2116   assert(method()->holder()->is_being_initialized(), "barrier not needed");
2117 
2118   set_parse_bci(0);
2119 
2120   Node* holder = makecon(TypeKlassPtr::make(method()->holder()));
2121   int init_state_off = in_bytes(InstanceKlass::init_state_offset());
2122   const Type* t = TypeKlassPtr::make(method()->holder(), init_state_off);
2123   Node* adr = basic_plus_adr(top(), holder, init_state_off);
2124   Node* init_state = make_load(control(), adr, TypeInt::BYTE, T_BYTE, MemNode::unordered);
2125 
2126   Node* fully_initialized_state = makecon(TypeInt::make(InstanceKlass::fully_initialized));
2127 
2128   Node* chk = gvn().transform(new CmpINode(init_state, fully_initialized_state));
2129   Node* tst = gvn().transform(new BoolNode(chk, BoolTest::ne));
2130 
2131   { BuildCutout unless(this, tst, PROB_MAX);
2132     uncommon_trap(Deoptimization::Reason_initialized, Deoptimization::Action_reinterpret);
2133   }
2134 }
2135 
2136 // Add check to deoptimize if RTM state is not ProfileRTM
2137 void Parse::rtm_deopt() {
2138 #if INCLUDE_RTM_OPT
2139   if (C->profile_rtm()) {
2140     assert(C->has_method(), "only for normal compilations");
2141     assert(!C->method()->method_data()->is_empty(), "MDO is needed to record RTM state");
2142     assert(depth() == 1, "generate check only for main compiled method");
2143 
2144     // Set starting bci for uncommon trap.
2145     set_parse_bci(is_osr_parse() ? osr_bci() : 0);
2146 
2147     // Load the rtm_state from the MethodData.
2148     const TypePtr* adr_type = TypeMetadataPtr::make(C->method()->method_data());
2149     Node* mdo = makecon(adr_type);
2150     int offset = MethodData::rtm_state_offset_in_bytes();
2151     Node* adr_node = basic_plus_adr(mdo, mdo, offset);
2152     Node* rtm_state = make_load(control(), adr_node, TypeInt::INT, T_INT, adr_type, MemNode::unordered);
2153 
2154     // Separate Load from Cmp by Opaque.
2155     // In expand_macro_nodes() it will be replaced either
2156     // with this load when there are locks in the code
2157     // or with ProfileRTM (cmp->in(2)) otherwise so that
2158     // the check will fold.
2159     Node* profile_state = makecon(TypeInt::make(ProfileRTM));
2160     Node* opq   = _gvn.transform( new Opaque3Node(C, rtm_state, Opaque3Node::RTM_OPT) );


< prev index next >