< prev index next >

src/hotspot/share/opto/parse1.cpp

Print this page




1101       // In case of concurrent class loading, the type we set for the
1102       // ret_phi in build_exits() may have been too optimistic and the
1103       // ret_phi may be top now.
1104       // Otherwise, we've encountered an error and have to mark the method as
1105       // not compilable. Just using an assertion instead would be dangerous
1106       // as this could lead to an infinite compile loop in non-debug builds.
1107       {
1108         MutexLockerEx ml(Compile_lock, Mutex::_no_safepoint_check_flag);
1109         if (C->env()->system_dictionary_modification_counter_changed()) {
1110           C->record_failure(C2Compiler::retry_class_loading_during_parsing());
1111         } else {
1112           C->record_method_not_compilable("Can't determine return type.");
1113         }
1114       }
1115       return;
1116     }
1117     if (ret_type->isa_int()) {
1118       BasicType ret_bt = method()->return_type()->basic_type();
1119       ret_phi = mask_int_value(ret_phi, ret_bt, &_gvn);
1120     }
1121     if (_caller->has_method() && ret_type->is_valuetypeptr()) {
1122       // Inlined methods return a ValueTypeNode
1123       _exits.push_node(T_VALUETYPE, ret_phi);
1124     } else {
1125       _exits.push_node(ret_type->basic_type(), ret_phi);
1126     }
1127   }
1128 
1129   // Note:  Logic for creating and optimizing the ReturnNode is in Compile.
1130 
1131   // Unlock along the exceptional paths.
1132   // This is done late so that we can common up equivalent exceptions
1133   // (e.g., null checks) arising from multiple points within this method.
1134   // See GraphKit::add_exception_state, which performs the commoning.
1135   bool do_synch = method()->is_synchronized() && GenerateSynchronizationCode;
1136 
1137   // record exit from a method if compiled while Dtrace is turned on.
1138   if (do_synch || C->env()->dtrace_method_probes() || _replaced_nodes_for_exceptions) {
1139     // First move the exception list out of _exits:
1140     GraphKit kit(_exits.transfer_exceptions_into_jvms());
1141     SafePointNode* normal_map = kit.map();  // keep this guy safe
1142     // Now re-collect the exceptions into _exits:
1143     SafePointNode* ex_map;
1144     while ((ex_map = kit.pop_exception_state()) != NULL) {
1145       Node* ex_oop = kit.use_exception_state(ex_map);
1146       // Force the exiting JVM state to have this method at InvocationEntryBci.
1147       // The exiting JVM state is otherwise a copy of the calling JVMS.


2312 void Parse::return_current(Node* value) {
2313   if (RegisterFinalizersAtInit &&
2314       method()->intrinsic_id() == vmIntrinsics::_Object_init) {
2315     call_register_finalizer();
2316   }
2317 
2318   // Do not set_parse_bci, so that return goo is credited to the return insn.
2319   // vreturn can trigger an allocation so vreturn can throw. Setting
2320   // the bci here breaks exception handling. Commenting this out
2321   // doesn't seem to break anything.
2322   //  set_bci(InvocationEntryBci);
2323   if (method()->is_synchronized() && GenerateSynchronizationCode) {
2324     shared_unlock(_synch_lock->box_node(), _synch_lock->obj_node());
2325   }
2326   if (C->env()->dtrace_method_probes()) {
2327     make_dtrace_method_exit(method());
2328   }
2329   // frame pointer is always same, already captured
2330   if (value != NULL) {
2331     Node* phi = _exits.argument(0);
2332     const TypeOopPtr* tr = phi->bottom_type()->isa_oopptr();
2333     if (tf()->returns_value_type_as_fields() && !_caller->has_method() && !value->is_ValueType()) {
2334       // TODO there should be a checkcast in between, right?
2335       value = ValueTypeNode::make_from_oop(this, value, phi->bottom_type()->is_valuetype()->value_klass());
2336     }
2337     if (value->is_ValueType() && !_caller->has_method()) {
2338       // Value type is returned as oop from root method
2339       if (tf()->returns_value_type_as_fields()) {
2340         // Make sure non-flattened value type fields are allocated


2341         value = value->as_ValueType()->allocate_fields(this);
2342       } else {
2343         // Make sure value type is allocated
2344         value = value->as_ValueType()->allocate(this)->get_oop();
2345       }




2346     } else if (tr && tr->isa_instptr() && tr->klass()->is_loaded() && tr->klass()->is_interface()) {
2347       // If returning oops to an interface-return, there is a silent free
2348       // cast from oop to interface allowed by the Verifier. Make it explicit here.
2349       const TypeInstPtr* tp = value->bottom_type()->isa_instptr();
2350       if (tp && tp->klass()->is_loaded() && !tp->klass()->is_interface()) {
2351         // sharpen the type eagerly; this eases certain assert checking
2352         if (tp->higher_equal(TypeInstPtr::NOTNULL)) {
2353           tr = tr->join_speculative(TypeInstPtr::NOTNULL)->is_instptr();
2354         }
2355         value = _gvn.transform(new CheckCastPPNode(0, value, tr));
2356       }
2357     } else {
2358       // Handle returns of oop-arrays to an arrays-of-interface return
2359       const TypeInstPtr* phi_tip;
2360       const TypeInstPtr* val_tip;
2361       Type::get_arrays_base_elements(phi->bottom_type(), value->bottom_type(), &phi_tip, &val_tip);
2362       if (phi_tip != NULL && phi_tip->is_loaded() && phi_tip->klass()->is_interface() &&
2363           val_tip != NULL && val_tip->is_loaded() && !val_tip->klass()->is_interface()) {
2364         value = _gvn.transform(new CheckCastPPNode(0, value, phi->bottom_type()));
2365       }
2366     }
2367     phi->add_req(value);
2368   }
2369 
2370   SafePointNode* exit_return = _exits.map();
2371   exit_return->in( TypeFunc::Control  )->add_req( control() );
2372   exit_return->in( TypeFunc::I_O      )->add_req( i_o    () );
2373   Node *mem = exit_return->in( TypeFunc::Memory   );
2374   for (MergeMemStream mms(mem->as_MergeMem(), merged_memory()); mms.next_non_empty2(); ) {
2375     if (mms.is_empty()) {
2376       // get a copy of the base memory, and patch just this one input
2377       const TypePtr* adr_type = mms.adr_type(C);
2378       Node* phi = mms.force_memory()->as_Phi()->slice_memory(adr_type);
2379       assert(phi->as_Phi()->region() == mms.base_memory()->in(0), "");
2380       gvn().set_type_bottom(phi);
2381       phi->del_req(phi->req()-1);  // prepare to re-patch
2382       mms.set_memory(phi);
2383     }
2384     mms.memory()->add_req(mms.memory2());




1101       // In case of concurrent class loading, the type we set for the
1102       // ret_phi in build_exits() may have been too optimistic and the
1103       // ret_phi may be top now.
1104       // Otherwise, we've encountered an error and have to mark the method as
1105       // not compilable. Just using an assertion instead would be dangerous
1106       // as this could lead to an infinite compile loop in non-debug builds.
1107       {
1108         MutexLockerEx ml(Compile_lock, Mutex::_no_safepoint_check_flag);
1109         if (C->env()->system_dictionary_modification_counter_changed()) {
1110           C->record_failure(C2Compiler::retry_class_loading_during_parsing());
1111         } else {
1112           C->record_method_not_compilable("Can't determine return type.");
1113         }
1114       }
1115       return;
1116     }
1117     if (ret_type->isa_int()) {
1118       BasicType ret_bt = method()->return_type()->basic_type();
1119       ret_phi = mask_int_value(ret_phi, ret_bt, &_gvn);
1120     }




1121     _exits.push_node(ret_type->basic_type(), ret_phi);
1122   }

1123 
1124   // Note:  Logic for creating and optimizing the ReturnNode is in Compile.
1125 
1126   // Unlock along the exceptional paths.
1127   // This is done late so that we can common up equivalent exceptions
1128   // (e.g., null checks) arising from multiple points within this method.
1129   // See GraphKit::add_exception_state, which performs the commoning.
1130   bool do_synch = method()->is_synchronized() && GenerateSynchronizationCode;
1131 
1132   // record exit from a method if compiled while Dtrace is turned on.
1133   if (do_synch || C->env()->dtrace_method_probes() || _replaced_nodes_for_exceptions) {
1134     // First move the exception list out of _exits:
1135     GraphKit kit(_exits.transfer_exceptions_into_jvms());
1136     SafePointNode* normal_map = kit.map();  // keep this guy safe
1137     // Now re-collect the exceptions into _exits:
1138     SafePointNode* ex_map;
1139     while ((ex_map = kit.pop_exception_state()) != NULL) {
1140       Node* ex_oop = kit.use_exception_state(ex_map);
1141       // Force the exiting JVM state to have this method at InvocationEntryBci.
1142       // The exiting JVM state is otherwise a copy of the calling JVMS.


2307 void Parse::return_current(Node* value) {
2308   if (RegisterFinalizersAtInit &&
2309       method()->intrinsic_id() == vmIntrinsics::_Object_init) {
2310     call_register_finalizer();
2311   }
2312 
2313   // Do not set_parse_bci, so that return goo is credited to the return insn.
2314   // vreturn can trigger an allocation so vreturn can throw. Setting
2315   // the bci here breaks exception handling. Commenting this out
2316   // doesn't seem to break anything.
2317   //  set_bci(InvocationEntryBci);
2318   if (method()->is_synchronized() && GenerateSynchronizationCode) {
2319     shared_unlock(_synch_lock->box_node(), _synch_lock->obj_node());
2320   }
2321   if (C->env()->dtrace_method_probes()) {
2322     make_dtrace_method_exit(method());
2323   }
2324   // frame pointer is always same, already captured
2325   if (value != NULL) {
2326     Node* phi = _exits.argument(0);
2327     const Type* return_type = phi->bottom_type();
2328     const TypeOopPtr* tr = return_type->isa_oopptr();
2329     if (return_type->isa_valuetype()) {
2330       // Value type is returned as fields, make sure it is scalarized
2331       if (!value->is_ValueType()) {
2332         value = ValueTypeNode::make_from_oop(this, value, return_type->is_valuetype()->value_klass());
2333       }
2334       if (!_caller->has_method()) {
2335         // Value type is returned as fields from root method, make
2336         // sure all non-flattened value type fields are allocated.
2337         assert(tf()->returns_value_type_as_fields(), "must be returned as fields");
2338         value = value->as_ValueType()->allocate_fields(this);



2339       }
2340     } else if (value->is_ValueType()) {
2341       // Value type is returned as oop, make sure it is allocated
2342       assert(tr && tr->can_be_value_type(), "must return a value type pointer");
2343       value = ValueTypePtrNode::make_from_value_type(this, value->as_ValueType());
2344     } else if (tr && tr->isa_instptr() && tr->klass()->is_loaded() && tr->klass()->is_interface()) {
2345       // If returning oops to an interface-return, there is a silent free
2346       // cast from oop to interface allowed by the Verifier. Make it explicit here.
2347       const TypeInstPtr* tp = value->bottom_type()->isa_instptr();
2348       if (tp && tp->klass()->is_loaded() && !tp->klass()->is_interface()) {
2349         // sharpen the type eagerly; this eases certain assert checking
2350         if (tp->higher_equal(TypeInstPtr::NOTNULL)) {
2351           tr = tr->join_speculative(TypeInstPtr::NOTNULL)->is_instptr();
2352         }
2353         value = _gvn.transform(new CheckCastPPNode(0, value, tr));
2354       }
2355     } else {
2356       // Handle returns of oop-arrays to an arrays-of-interface return
2357       const TypeInstPtr* phi_tip;
2358       const TypeInstPtr* val_tip;
2359       Type::get_arrays_base_elements(return_type, value->bottom_type(), &phi_tip, &val_tip);
2360       if (phi_tip != NULL && phi_tip->is_loaded() && phi_tip->klass()->is_interface() &&
2361           val_tip != NULL && val_tip->is_loaded() && !val_tip->klass()->is_interface()) {
2362         value = _gvn.transform(new CheckCastPPNode(0, value, return_type));
2363       }
2364     }
2365     phi->add_req(value);
2366   }
2367 
2368   SafePointNode* exit_return = _exits.map();
2369   exit_return->in( TypeFunc::Control  )->add_req( control() );
2370   exit_return->in( TypeFunc::I_O      )->add_req( i_o    () );
2371   Node *mem = exit_return->in( TypeFunc::Memory   );
2372   for (MergeMemStream mms(mem->as_MergeMem(), merged_memory()); mms.next_non_empty2(); ) {
2373     if (mms.is_empty()) {
2374       // get a copy of the base memory, and patch just this one input
2375       const TypePtr* adr_type = mms.adr_type(C);
2376       Node* phi = mms.force_memory()->as_Phi()->slice_memory(adr_type);
2377       assert(phi->as_Phi()->region() == mms.base_memory()->in(0), "");
2378       gvn().set_type_bottom(phi);
2379       phi->del_req(phi->req()-1);  // prepare to re-patch
2380       mms.set_memory(phi);
2381     }
2382     mms.memory()->add_req(mms.memory2());


< prev index next >