src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8014013 Sdiff src/share/vm/opto

src/share/vm/opto/library_call.cpp

Print this page




3712       generate_arraycopy(TypeAryPtr::OOPS, T_OBJECT,
3713                          original, start, newcopy, intcon(0), moved,
3714                          disjoint_bases, length_never_negative);
3715     }
3716   } // original reexecute is set back here
3717 
3718   C->set_has_split_ifs(true); // Has chance for split-if optimization
3719   if (!stopped()) {
3720     set_result(newcopy);
3721   }
3722   return true;
3723 }
3724 
3725 
3726 //----------------------generate_virtual_guard---------------------------
3727 // Helper for hashCode and clone.  Peeks inside the vtable to avoid a call.
3728 Node* LibraryCallKit::generate_virtual_guard(Node* obj_klass,
3729                                              RegionNode* slow_region) {
3730   ciMethod* method = callee();
3731   int vtable_index = method->vtable_index();


3732   // Get the Method* out of the appropriate vtable entry.
3733   int entry_offset  = (InstanceKlass::vtable_start_offset() +
3734                      vtable_index*vtableEntry::size()) * wordSize +
3735                      vtableEntry::method_offset_in_bytes();
3736   Node* entry_addr  = basic_plus_adr(obj_klass, entry_offset);
3737   Node* target_call = make_load(NULL, entry_addr, TypePtr::NOTNULL, T_ADDRESS);
3738 
3739   // Compare the target method with the expected method (e.g., Object.hashCode).
3740   const TypePtr* native_call_addr = TypeMetadataPtr::make(method);
3741 
3742   Node* native_call = makecon(native_call_addr);
3743   Node* chk_native  = _gvn.transform(new(C) CmpPNode(target_call, native_call));
3744   Node* test_native = _gvn.transform(new(C) BoolNode(chk_native, BoolTest::ne));
3745 
3746   return generate_slow_guard(test_native, slow_region);
3747 }
3748 
3749 //-----------------------generate_method_call----------------------------
3750 // Use generate_method_call to make a slow-call to the real
3751 // method if the fast path fails.  An alternative would be to


3762   // ensure the JVMS we have will be correct for this call
3763   guarantee(method_id == method->intrinsic_id(), "must match");
3764 
3765   const TypeFunc* tf = TypeFunc::make(method);
3766   CallJavaNode* slow_call;
3767   if (is_static) {
3768     assert(!is_virtual, "");
3769     slow_call = new(C) CallStaticJavaNode(C, tf,
3770                            SharedRuntime::get_resolve_static_call_stub(),
3771                            method, bci());
3772   } else if (is_virtual) {
3773     null_check_receiver();
3774     int vtable_index = Method::invalid_vtable_index;
3775     if (UseInlineCaches) {
3776       // Suppress the vtable call
3777     } else {
3778       // hashCode and clone are not a miranda methods,
3779       // so the vtable index is fixed.
3780       // No need to use the linkResolver to get it.
3781        vtable_index = method->vtable_index();


3782     }
3783     slow_call = new(C) CallDynamicJavaNode(tf,
3784                           SharedRuntime::get_resolve_virtual_call_stub(),
3785                           method, vtable_index, bci());
3786   } else {  // neither virtual nor static:  opt_virtual
3787     null_check_receiver();
3788     slow_call = new(C) CallStaticJavaNode(C, tf,
3789                                 SharedRuntime::get_resolve_opt_virtual_call_stub(),
3790                                 method, bci());
3791     slow_call->set_optimized_virtual(true);
3792   }
3793   set_arguments_for_java_call(slow_call);
3794   set_edges_for_java_call(slow_call);
3795   return slow_call;
3796 }
3797 
3798 
3799 //------------------------------inline_native_hashcode--------------------
3800 // Build special case code for calls to hashCode on an object.
3801 bool LibraryCallKit::inline_native_hashcode(bool is_virtual, bool is_static) {




3712       generate_arraycopy(TypeAryPtr::OOPS, T_OBJECT,
3713                          original, start, newcopy, intcon(0), moved,
3714                          disjoint_bases, length_never_negative);
3715     }
3716   } // original reexecute is set back here
3717 
3718   C->set_has_split_ifs(true); // Has chance for split-if optimization
3719   if (!stopped()) {
3720     set_result(newcopy);
3721   }
3722   return true;
3723 }
3724 
3725 
3726 //----------------------generate_virtual_guard---------------------------
3727 // Helper for hashCode and clone.  Peeks inside the vtable to avoid a call.
3728 Node* LibraryCallKit::generate_virtual_guard(Node* obj_klass,
3729                                              RegionNode* slow_region) {
3730   ciMethod* method = callee();
3731   int vtable_index = method->vtable_index();
3732   assert(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index,
3733          err_msg_res("bad index %d", vtable_index));
3734   // Get the Method* out of the appropriate vtable entry.
3735   int entry_offset  = (InstanceKlass::vtable_start_offset() +
3736                      vtable_index*vtableEntry::size()) * wordSize +
3737                      vtableEntry::method_offset_in_bytes();
3738   Node* entry_addr  = basic_plus_adr(obj_klass, entry_offset);
3739   Node* target_call = make_load(NULL, entry_addr, TypePtr::NOTNULL, T_ADDRESS);
3740 
3741   // Compare the target method with the expected method (e.g., Object.hashCode).
3742   const TypePtr* native_call_addr = TypeMetadataPtr::make(method);
3743 
3744   Node* native_call = makecon(native_call_addr);
3745   Node* chk_native  = _gvn.transform(new(C) CmpPNode(target_call, native_call));
3746   Node* test_native = _gvn.transform(new(C) BoolNode(chk_native, BoolTest::ne));
3747 
3748   return generate_slow_guard(test_native, slow_region);
3749 }
3750 
3751 //-----------------------generate_method_call----------------------------
3752 // Use generate_method_call to make a slow-call to the real
3753 // method if the fast path fails.  An alternative would be to


3764   // ensure the JVMS we have will be correct for this call
3765   guarantee(method_id == method->intrinsic_id(), "must match");
3766 
3767   const TypeFunc* tf = TypeFunc::make(method);
3768   CallJavaNode* slow_call;
3769   if (is_static) {
3770     assert(!is_virtual, "");
3771     slow_call = new(C) CallStaticJavaNode(C, tf,
3772                            SharedRuntime::get_resolve_static_call_stub(),
3773                            method, bci());
3774   } else if (is_virtual) {
3775     null_check_receiver();
3776     int vtable_index = Method::invalid_vtable_index;
3777     if (UseInlineCaches) {
3778       // Suppress the vtable call
3779     } else {
3780       // hashCode and clone are not a miranda methods,
3781       // so the vtable index is fixed.
3782       // No need to use the linkResolver to get it.
3783        vtable_index = method->vtable_index();
3784        assert(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index,
3785               err_msg_res("bad index %d", vtable_index));
3786     }
3787     slow_call = new(C) CallDynamicJavaNode(tf,
3788                           SharedRuntime::get_resolve_virtual_call_stub(),
3789                           method, vtable_index, bci());
3790   } else {  // neither virtual nor static:  opt_virtual
3791     null_check_receiver();
3792     slow_call = new(C) CallStaticJavaNode(C, tf,
3793                                 SharedRuntime::get_resolve_opt_virtual_call_stub(),
3794                                 method, bci());
3795     slow_call->set_optimized_virtual(true);
3796   }
3797   set_arguments_for_java_call(slow_call);
3798   set_edges_for_java_call(slow_call);
3799   return slow_call;
3800 }
3801 
3802 
3803 //------------------------------inline_native_hashcode--------------------
3804 // Build special case code for calls to hashCode on an object.
3805 bool LibraryCallKit::inline_native_hashcode(bool is_virtual, bool is_static) {


src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File