547 if (failing() || entry_map == NULL) {
548 if (log) log->done("parse");
549 return;
550 }
551
552 Node_Notes* caller_nn = C->default_node_notes();
553 // Collect debug info for inlined calls unless -XX:-DebugInlinedCalls.
554 if (DebugInlinedCalls || depth() == 1) {
555 C->set_default_node_notes(make_node_notes(caller_nn));
556 }
557
558 if (is_osr_parse()) {
559 Node* osr_buf = entry_map->in(TypeFunc::Parms+0);
560 entry_map->set_req(TypeFunc::Parms+0, top());
561 set_map(entry_map);
562 load_interpreter_state(osr_buf);
563 } else {
564 set_map(entry_map);
565 do_method_entry();
566 }
567
568 // Check for bailouts during method entry.
569 if (failing()) {
570 if (log) log->done("parse");
571 C->set_default_node_notes(caller_nn);
572 return;
573 }
574
575 entry_map = map(); // capture any changes performed by method setup code
576 assert(jvms()->endoff() == map()->req(), "map matches JVMS layout");
577
578 // We begin parsing as if we have just encountered a jump to the
579 // method entry.
580 Block* entry_block = start_block();
581 assert(entry_block->start() == (is_osr_parse() ? osr_bci() : 0), "");
582 set_map_clone(entry_map);
583 merge_common(entry_block, entry_block->next_path_num());
584
585 #ifndef PRODUCT
586 BytecodeParseHistogram *parse_histogram_obj = new (C->env()->arena()) BytecodeParseHistogram(this, C);
1958 receiver);
1959 make_slow_call_ex(call, env()->Throwable_klass(), true);
1960
1961 Node* fast_io = call->in(TypeFunc::I_O);
1962 Node* fast_mem = call->in(TypeFunc::Memory);
1963 // These two phis are pre-filled with copies of of the fast IO and Memory
1964 Node* io_phi = PhiNode::make(result_rgn, fast_io, Type::ABIO);
1965 Node* mem_phi = PhiNode::make(result_rgn, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
1966
1967 result_rgn->init_req(2, control());
1968 io_phi ->init_req(2, i_o());
1969 mem_phi ->init_req(2, reset_memory());
1970
1971 set_all_memory( _gvn.transform(mem_phi) );
1972 set_i_o( _gvn.transform(io_phi) );
1973 }
1974
1975 set_control( _gvn.transform(result_rgn) );
1976 }
1977
1978 //------------------------------return_current---------------------------------
1979 // Append current _map to _exit_return
1980 void Parse::return_current(Node* value) {
1981 if (RegisterFinalizersAtInit &&
1982 method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1983 call_register_finalizer();
1984 }
1985
1986 // Do not set_parse_bci, so that return goo is credited to the return insn.
1987 set_bci(InvocationEntryBci);
1988 if (method()->is_synchronized() && GenerateSynchronizationCode) {
1989 shared_unlock(_synch_lock->box_node(), _synch_lock->obj_node());
1990 }
1991 if (C->env()->dtrace_method_probes()) {
1992 make_dtrace_method_exit(method());
1993 }
1994 SafePointNode* exit_return = _exits.map();
1995 exit_return->in( TypeFunc::Control )->add_req( control() );
1996 exit_return->in( TypeFunc::I_O )->add_req( i_o () );
1997 Node *mem = exit_return->in( TypeFunc::Memory );
|
547 if (failing() || entry_map == NULL) {
548 if (log) log->done("parse");
549 return;
550 }
551
552 Node_Notes* caller_nn = C->default_node_notes();
553 // Collect debug info for inlined calls unless -XX:-DebugInlinedCalls.
554 if (DebugInlinedCalls || depth() == 1) {
555 C->set_default_node_notes(make_node_notes(caller_nn));
556 }
557
558 if (is_osr_parse()) {
559 Node* osr_buf = entry_map->in(TypeFunc::Parms+0);
560 entry_map->set_req(TypeFunc::Parms+0, top());
561 set_map(entry_map);
562 load_interpreter_state(osr_buf);
563 } else {
564 set_map(entry_map);
565 do_method_entry();
566 }
567 if (depth() == 1) {
568 // Add check to deoptimize the nmethod if RTM state was changed
569 rtm_deopt();
570 }
571
572 // Check for bailouts during method entry.
573 if (failing()) {
574 if (log) log->done("parse");
575 C->set_default_node_notes(caller_nn);
576 return;
577 }
578
579 entry_map = map(); // capture any changes performed by method setup code
580 assert(jvms()->endoff() == map()->req(), "map matches JVMS layout");
581
582 // We begin parsing as if we have just encountered a jump to the
583 // method entry.
584 Block* entry_block = start_block();
585 assert(entry_block->start() == (is_osr_parse() ? osr_bci() : 0), "");
586 set_map_clone(entry_map);
587 merge_common(entry_block, entry_block->next_path_num());
588
589 #ifndef PRODUCT
590 BytecodeParseHistogram *parse_histogram_obj = new (C->env()->arena()) BytecodeParseHistogram(this, C);
1962 receiver);
1963 make_slow_call_ex(call, env()->Throwable_klass(), true);
1964
1965 Node* fast_io = call->in(TypeFunc::I_O);
1966 Node* fast_mem = call->in(TypeFunc::Memory);
1967 // These two phis are pre-filled with copies of of the fast IO and Memory
1968 Node* io_phi = PhiNode::make(result_rgn, fast_io, Type::ABIO);
1969 Node* mem_phi = PhiNode::make(result_rgn, fast_mem, Type::MEMORY, TypePtr::BOTTOM);
1970
1971 result_rgn->init_req(2, control());
1972 io_phi ->init_req(2, i_o());
1973 mem_phi ->init_req(2, reset_memory());
1974
1975 set_all_memory( _gvn.transform(mem_phi) );
1976 set_i_o( _gvn.transform(io_phi) );
1977 }
1978
1979 set_control( _gvn.transform(result_rgn) );
1980 }
1981
1982 // Add check to deoptimize if RTM state is not ProfileRTM
1983 void Parse::rtm_deopt() {
1984 #if INCLUDE_RTM_OPT
1985 if (C->profile_rtm()) {
1986 assert(C->method() != NULL, "only for normal compilations");
1987 assert(!C->method()->method_data()->is_empty(), "MDO is needed to record RTM state");
1988 assert(depth() == 1, "generate check only for main compiled method");
1989
1990 // Set starting bci for uncommon trap.
1991 set_parse_bci(is_osr_parse() ? osr_bci() : 0);
1992
1993 // Load the rtm_state from the MethodData.
1994 const TypePtr* adr_type = TypeMetadataPtr::make(C->method()->method_data());
1995 Node* mdo = makecon(adr_type);
1996 int offset = MethodData::rtm_state_offset_in_bytes();
1997 Node* adr_node = basic_plus_adr(mdo, mdo, offset);
1998 Node* rtm_state = make_load(control(), adr_node, TypeInt::INT, T_INT, adr_type, MemNode::unordered);
1999
2000 // Separate Load from Cmp by Opaque.
2001 // In expand_macro_nodes() it will be replaced either
2002 // with this load when there are locks in the code
2003 // or with ProfileRTM (cmp->in(2)) otherwise so that
2004 // the check will fold.
2005 Node* profile_state = makecon(TypeInt::make(ProfileRTM));
2006 Node* opq = _gvn.transform( new (C) Opaque3Node(C, rtm_state, Opaque3Node::RTM_OPT) );
2007 Node* chk = _gvn.transform( new (C) CmpINode(opq, profile_state) );
2008 Node* tst = _gvn.transform( new (C) BoolNode(chk, BoolTest::eq) );
2009 // Branch to failure if state was changed
2010 { BuildCutout unless(this, tst, PROB_ALWAYS);
2011 uncommon_trap(Deoptimization::Reason_rtm_state_change,
2012 Deoptimization::Action_make_not_entrant);
2013 }
2014 }
2015 #endif
2016 }
2017
2018 //------------------------------return_current---------------------------------
2019 // Append current _map to _exit_return
2020 void Parse::return_current(Node* value) {
2021 if (RegisterFinalizersAtInit &&
2022 method()->intrinsic_id() == vmIntrinsics::_Object_init) {
2023 call_register_finalizer();
2024 }
2025
2026 // Do not set_parse_bci, so that return goo is credited to the return insn.
2027 set_bci(InvocationEntryBci);
2028 if (method()->is_synchronized() && GenerateSynchronizationCode) {
2029 shared_unlock(_synch_lock->box_node(), _synch_lock->obj_node());
2030 }
2031 if (C->env()->dtrace_method_probes()) {
2032 make_dtrace_method_exit(method());
2033 }
2034 SafePointNode* exit_return = _exits.map();
2035 exit_return->in( TypeFunc::Control )->add_req( control() );
2036 exit_return->in( TypeFunc::I_O )->add_req( i_o () );
2037 Node *mem = exit_return->in( TypeFunc::Memory );
|