src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8014013 Sdiff src/share/vm/opto

src/share/vm/opto/library_call.cpp

Print this page




3666       generate_arraycopy(TypeAryPtr::OOPS, T_OBJECT,
3667                          original, start, newcopy, intcon(0), moved,
3668                          disjoint_bases, length_never_negative);
3669     }
3670   } // original reexecute is set back here
3671 
3672   C->set_has_split_ifs(true); // Has chance for split-if optimization
3673   if (!stopped()) {
3674     set_result(newcopy);
3675   }
3676   return true;
3677 }
3678 
3679 
3680 //----------------------generate_virtual_guard---------------------------
3681 // Helper for hashCode and clone.  Peeks inside the vtable to avoid a call.
3682 Node* LibraryCallKit::generate_virtual_guard(Node* obj_klass,
3683                                              RegionNode* slow_region) {
3684   ciMethod* method = callee();
3685   int vtable_index = method->vtable_index();

3686   // Get the Method* out of the appropriate vtable entry.
3687   int entry_offset  = (InstanceKlass::vtable_start_offset() +
3688                      vtable_index*vtableEntry::size()) * wordSize +
3689                      vtableEntry::method_offset_in_bytes();
3690   Node* entry_addr  = basic_plus_adr(obj_klass, entry_offset);
3691   Node* target_call = make_load(NULL, entry_addr, TypePtr::NOTNULL, T_ADDRESS);
3692 
3693   // Compare the target method with the expected method (e.g., Object.hashCode).
3694   const TypePtr* native_call_addr = TypeMetadataPtr::make(method);
3695 
3696   Node* native_call = makecon(native_call_addr);
3697   Node* chk_native  = _gvn.transform(new(C) CmpPNode(target_call, native_call));
3698   Node* test_native = _gvn.transform(new(C) BoolNode(chk_native, BoolTest::ne));
3699 
3700   return generate_slow_guard(test_native, slow_region);
3701 }
3702 
3703 //-----------------------generate_method_call----------------------------
3704 // Use generate_method_call to make a slow-call to the real
3705 // method if the fast path fails.  An alternative would be to


3716   // ensure the JVMS we have will be correct for this call
3717   guarantee(method_id == method->intrinsic_id(), "must match");
3718 
3719   const TypeFunc* tf = TypeFunc::make(method);
3720   CallJavaNode* slow_call;
3721   if (is_static) {
3722     assert(!is_virtual, "");
3723     slow_call = new(C) CallStaticJavaNode(C, tf,
3724                            SharedRuntime::get_resolve_static_call_stub(),
3725                            method, bci());
3726   } else if (is_virtual) {
3727     null_check_receiver();
3728     int vtable_index = Method::invalid_vtable_index;
3729     if (UseInlineCaches) {
3730       // Suppress the vtable call
3731     } else {
3732       // hashCode and clone are not a miranda methods,
3733       // so the vtable index is fixed.
3734       // No need to use the linkResolver to get it.
3735        vtable_index = method->vtable_index();

3736     }
3737     slow_call = new(C) CallDynamicJavaNode(tf,
3738                           SharedRuntime::get_resolve_virtual_call_stub(),
3739                           method, vtable_index, bci());
3740   } else {  // neither virtual nor static:  opt_virtual
3741     null_check_receiver();
3742     slow_call = new(C) CallStaticJavaNode(C, tf,
3743                                 SharedRuntime::get_resolve_opt_virtual_call_stub(),
3744                                 method, bci());
3745     slow_call->set_optimized_virtual(true);
3746   }
3747   set_arguments_for_java_call(slow_call);
3748   set_edges_for_java_call(slow_call);
3749   return slow_call;
3750 }
3751 
3752 
3753 //------------------------------inline_native_hashcode--------------------
3754 // Build special case code for calls to hashCode on an object.
3755 bool LibraryCallKit::inline_native_hashcode(bool is_virtual, bool is_static) {




3666       generate_arraycopy(TypeAryPtr::OOPS, T_OBJECT,
3667                          original, start, newcopy, intcon(0), moved,
3668                          disjoint_bases, length_never_negative);
3669     }
3670   } // original reexecute is set back here
3671 
3672   C->set_has_split_ifs(true); // Has chance for split-if optimization
3673   if (!stopped()) {
3674     set_result(newcopy);
3675   }
3676   return true;
3677 }
3678 
3679 
3680 //----------------------generate_virtual_guard---------------------------
3681 // Helper for hashCode and clone.  Peeks inside the vtable to avoid a call.
3682 Node* LibraryCallKit::generate_virtual_guard(Node* obj_klass,
3683                                              RegionNode* slow_region) {
3684   ciMethod* method = callee();
3685   int vtable_index = method->vtable_index();
3686   assert(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index, err_msg("bad index %d", vtable_index));
3687   // Get the Method* out of the appropriate vtable entry.
3688   int entry_offset  = (InstanceKlass::vtable_start_offset() +
3689                      vtable_index*vtableEntry::size()) * wordSize +
3690                      vtableEntry::method_offset_in_bytes();
3691   Node* entry_addr  = basic_plus_adr(obj_klass, entry_offset);
3692   Node* target_call = make_load(NULL, entry_addr, TypePtr::NOTNULL, T_ADDRESS);
3693 
3694   // Compare the target method with the expected method (e.g., Object.hashCode).
3695   const TypePtr* native_call_addr = TypeMetadataPtr::make(method);
3696 
3697   Node* native_call = makecon(native_call_addr);
3698   Node* chk_native  = _gvn.transform(new(C) CmpPNode(target_call, native_call));
3699   Node* test_native = _gvn.transform(new(C) BoolNode(chk_native, BoolTest::ne));
3700 
3701   return generate_slow_guard(test_native, slow_region);
3702 }
3703 
3704 //-----------------------generate_method_call----------------------------
3705 // Use generate_method_call to make a slow-call to the real
3706 // method if the fast path fails.  An alternative would be to


3717   // ensure the JVMS we have will be correct for this call
3718   guarantee(method_id == method->intrinsic_id(), "must match");
3719 
3720   const TypeFunc* tf = TypeFunc::make(method);
3721   CallJavaNode* slow_call;
3722   if (is_static) {
3723     assert(!is_virtual, "");
3724     slow_call = new(C) CallStaticJavaNode(C, tf,
3725                            SharedRuntime::get_resolve_static_call_stub(),
3726                            method, bci());
3727   } else if (is_virtual) {
3728     null_check_receiver();
3729     int vtable_index = Method::invalid_vtable_index;
3730     if (UseInlineCaches) {
3731       // Suppress the vtable call
3732     } else {
3733       // hashCode and clone are not a miranda methods,
3734       // so the vtable index is fixed.
3735       // No need to use the linkResolver to get it.
3736        vtable_index = method->vtable_index();
3737        assert(vtable_index >= 0 || vtable_index == Method::nonvirtual_vtable_index, err_msg("bad index %d", vtable_index));
3738     }
3739     slow_call = new(C) CallDynamicJavaNode(tf,
3740                           SharedRuntime::get_resolve_virtual_call_stub(),
3741                           method, vtable_index, bci());
3742   } else {  // neither virtual nor static:  opt_virtual
3743     null_check_receiver();
3744     slow_call = new(C) CallStaticJavaNode(C, tf,
3745                                 SharedRuntime::get_resolve_opt_virtual_call_stub(),
3746                                 method, bci());
3747     slow_call->set_optimized_virtual(true);
3748   }
3749   set_arguments_for_java_call(slow_call);
3750   set_edges_for_java_call(slow_call);
3751   return slow_call;
3752 }
3753 
3754 
3755 //------------------------------inline_native_hashcode--------------------
3756 // Build special case code for calls to hashCode on an object.
3757 bool LibraryCallKit::inline_native_hashcode(bool is_virtual, bool is_static) {


src/share/vm/opto/library_call.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File