src/share/vm/opto/library_call.cpp

Print this page




2762       // Don't need to load pre_val. The old value is returned by load_store.
2763       // The pre_barrier can execute after the xchg as long as no safepoint
2764       // gets inserted between them.
2765       pre_barrier(false /* do_load */,
2766                   control(), NULL, NULL, max_juint, NULL, NULL,
2767                   load_store /* pre_val */,
2768                   T_OBJECT);
2769     }
2770   }
2771 
2772   // Add the trailing membar surrounding the access
2773   insert_mem_bar(Op_MemBarCPUOrder);
2774   insert_mem_bar(Op_MemBarAcquire);
2775 
2776   assert(type2size[load_store->bottom_type()->basic_type()] == type2size[rtype], "result type should match");
2777   set_result(load_store);
2778   return true;
2779 }
2780 
2781 //----------------------------inline_unsafe_ordered_store----------------------
2782 // public native void sun.misc.Unsafe.putOrderedObject(Object o, long offset, Object x);
2783 // public native void sun.misc.Unsafe.putOrderedInt(Object o, long offset, int x);
2784 // public native void sun.misc.Unsafe.putOrderedLong(Object o, long offset, long x);
2785 bool LibraryCallKit::inline_unsafe_ordered_store(BasicType type) {
2786   // This is another variant of inline_unsafe_access, differing in
2787   // that it always issues store-store ("release") barrier and ensures
2788   // store-atomicity (which only matters for "long").
2789 
2790   if (callee()->is_static())  return false;  // caller must have the capability!
2791 
2792 #ifndef PRODUCT
2793   {
2794     ResourceMark rm;
2795     // Check the signatures.
2796     ciSignature* sig = callee()->signature();
2797 #ifdef ASSERT
2798     BasicType rtype = sig->return_type()->basic_type();
2799     assert(rtype == T_VOID, "must return void");
2800     assert(sig->count() == 3, "has 3 arguments");
2801     assert(sig->type_at(0)->basic_type() == T_OBJECT, "base is object");
2802     assert(sig->type_at(1)->basic_type() == T_LONG, "offset is long");
2803 #endif // ASSERT
2804   }


2858     default:
2859       fatal_unexpected_iid(id);
2860       return false;
2861   }
2862 }
2863 
2864 bool LibraryCallKit::klass_needs_init_guard(Node* kls) {
2865   if (!kls->is_Con()) {
2866     return true;
2867   }
2868   const TypeKlassPtr* klsptr = kls->bottom_type()->isa_klassptr();
2869   if (klsptr == NULL) {
2870     return true;
2871   }
2872   ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
2873   // don't need a guard for a klass that is already initialized
2874   return !ik->is_initialized();
2875 }
2876 
2877 //----------------------------inline_unsafe_allocate---------------------------
2878 // public native Object sun.misc.Unsafe.allocateInstance(Class<?> cls);
2879 bool LibraryCallKit::inline_unsafe_allocate() {
2880   if (callee()->is_static())  return false;  // caller must have the capability!
2881 
2882   null_check_receiver();  // null-check, then ignore
2883   Node* cls = null_check(argument(1));
2884   if (stopped())  return true;
2885 
2886   Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
2887   kls = null_check(kls);
2888   if (stopped())  return true;  // argument was like int.class
2889 
2890   Node* test = NULL;
2891   if (LibraryCallKit::klass_needs_init_guard(kls)) {
2892     // Note:  The argument might still be an illegal value like
2893     // Serializable.class or Object[].class.   The runtime will handle it.
2894     // But we must make an explicit check for initialization.
2895     Node* insp = basic_plus_adr(kls, in_bytes(InstanceKlass::init_state_offset()));
2896     // Use T_BOOLEAN for InstanceKlass::_init_state so the compiler
2897     // can generate code to load it as unsigned byte.
2898     Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN, MemNode::unordered);


4177     result = phi;
4178     assert(result->bottom_type()->isa_int(), "must be");
4179     break;
4180   }
4181 
4182   default:
4183     fatal_unexpected_iid(id);
4184     break;
4185   }
4186   set_result(_gvn.transform(result));
4187   return true;
4188 }
4189 
4190 #ifdef _LP64
4191 #define XTOP ,top() /*additional argument*/
4192 #else  //_LP64
4193 #define XTOP        /*no additional argument*/
4194 #endif //_LP64
4195 
4196 //----------------------inline_unsafe_copyMemory-------------------------
4197 // public native void sun.misc.Unsafe.copyMemory(Object srcBase, long srcOffset, Object destBase, long destOffset, long bytes);
4198 bool LibraryCallKit::inline_unsafe_copyMemory() {
4199   if (callee()->is_static())  return false;  // caller must have the capability!
4200   null_check_receiver();  // null-check receiver
4201   if (stopped())  return true;
4202 
4203   C->set_has_unsafe_access(true);  // Mark eventual nmethod as "unsafe".
4204 
4205   Node* src_ptr =         argument(1);   // type: oop
4206   Node* src_off = ConvL2X(argument(2));  // type: long
4207   Node* dst_ptr =         argument(4);   // type: oop
4208   Node* dst_off = ConvL2X(argument(5));  // type: long
4209   Node* size    = ConvL2X(argument(7));  // type: long
4210 
4211   assert(Unsafe_field_offset_to_byte_offset(11) == 11,
4212          "fieldOffset must be byte-scaled");
4213 
4214   Node* src = make_unsafe_address(src_ptr, src_off);
4215   Node* dst = make_unsafe_address(dst_ptr, dst_off);
4216 
4217   // Conservatively insert a memory barrier on all memory slices.




2762       // Don't need to load pre_val. The old value is returned by load_store.
2763       // The pre_barrier can execute after the xchg as long as no safepoint
2764       // gets inserted between them.
2765       pre_barrier(false /* do_load */,
2766                   control(), NULL, NULL, max_juint, NULL, NULL,
2767                   load_store /* pre_val */,
2768                   T_OBJECT);
2769     }
2770   }
2771 
2772   // Add the trailing membar surrounding the access
2773   insert_mem_bar(Op_MemBarCPUOrder);
2774   insert_mem_bar(Op_MemBarAcquire);
2775 
2776   assert(type2size[load_store->bottom_type()->basic_type()] == type2size[rtype], "result type should match");
2777   set_result(load_store);
2778   return true;
2779 }
2780 
2781 //----------------------------inline_unsafe_ordered_store----------------------
2782 // public native void [sun|jdk.internal].misc.Unsafe.putOrderedObject(Object o, long offset, Object x);
2783 // public native void [sun|jdk.internal].misc.Unsafe.putOrderedInt(Object o, long offset, int x);
2784 // public native void [sun|jdk.internal].misc.Unsafe.putOrderedLong(Object o, long offset, long x);
2785 bool LibraryCallKit::inline_unsafe_ordered_store(BasicType type) {
2786   // This is another variant of inline_unsafe_access, differing in
2787   // that it always issues store-store ("release") barrier and ensures
2788   // store-atomicity (which only matters for "long").
2789 
2790   if (callee()->is_static())  return false;  // caller must have the capability!
2791 
2792 #ifndef PRODUCT
2793   {
2794     ResourceMark rm;
2795     // Check the signatures.
2796     ciSignature* sig = callee()->signature();
2797 #ifdef ASSERT
2798     BasicType rtype = sig->return_type()->basic_type();
2799     assert(rtype == T_VOID, "must return void");
2800     assert(sig->count() == 3, "has 3 arguments");
2801     assert(sig->type_at(0)->basic_type() == T_OBJECT, "base is object");
2802     assert(sig->type_at(1)->basic_type() == T_LONG, "offset is long");
2803 #endif // ASSERT
2804   }


2858     default:
2859       fatal_unexpected_iid(id);
2860       return false;
2861   }
2862 }
2863 
2864 bool LibraryCallKit::klass_needs_init_guard(Node* kls) {
2865   if (!kls->is_Con()) {
2866     return true;
2867   }
2868   const TypeKlassPtr* klsptr = kls->bottom_type()->isa_klassptr();
2869   if (klsptr == NULL) {
2870     return true;
2871   }
2872   ciInstanceKlass* ik = klsptr->klass()->as_instance_klass();
2873   // don't need a guard for a klass that is already initialized
2874   return !ik->is_initialized();
2875 }
2876 
2877 //----------------------------inline_unsafe_allocate---------------------------
2878 // public native Object [sun|jdk.internal].misc.Unsafe.allocateInstance(Class<?> cls);
2879 bool LibraryCallKit::inline_unsafe_allocate() {
2880   if (callee()->is_static())  return false;  // caller must have the capability!
2881 
2882   null_check_receiver();  // null-check, then ignore
2883   Node* cls = null_check(argument(1));
2884   if (stopped())  return true;
2885 
2886   Node* kls = load_klass_from_mirror(cls, false, NULL, 0);
2887   kls = null_check(kls);
2888   if (stopped())  return true;  // argument was like int.class
2889 
2890   Node* test = NULL;
2891   if (LibraryCallKit::klass_needs_init_guard(kls)) {
2892     // Note:  The argument might still be an illegal value like
2893     // Serializable.class or Object[].class.   The runtime will handle it.
2894     // But we must make an explicit check for initialization.
2895     Node* insp = basic_plus_adr(kls, in_bytes(InstanceKlass::init_state_offset()));
2896     // Use T_BOOLEAN for InstanceKlass::_init_state so the compiler
2897     // can generate code to load it as unsigned byte.
2898     Node* inst = make_load(NULL, insp, TypeInt::UBYTE, T_BOOLEAN, MemNode::unordered);


4177     result = phi;
4178     assert(result->bottom_type()->isa_int(), "must be");
4179     break;
4180   }
4181 
4182   default:
4183     fatal_unexpected_iid(id);
4184     break;
4185   }
4186   set_result(_gvn.transform(result));
4187   return true;
4188 }
4189 
4190 #ifdef _LP64
4191 #define XTOP ,top() /*additional argument*/
4192 #else  //_LP64
4193 #define XTOP        /*no additional argument*/
4194 #endif //_LP64
4195 
4196 //----------------------inline_unsafe_copyMemory-------------------------
4197 // public native void [sun|jdk.internal].misc.Unsafe.copyMemory(Object srcBase, long srcOffset, Object destBase, long destOffset, long bytes);
4198 bool LibraryCallKit::inline_unsafe_copyMemory() {
4199   if (callee()->is_static())  return false;  // caller must have the capability!
4200   null_check_receiver();  // null-check receiver
4201   if (stopped())  return true;
4202 
4203   C->set_has_unsafe_access(true);  // Mark eventual nmethod as "unsafe".
4204 
4205   Node* src_ptr =         argument(1);   // type: oop
4206   Node* src_off = ConvL2X(argument(2));  // type: long
4207   Node* dst_ptr =         argument(4);   // type: oop
4208   Node* dst_off = ConvL2X(argument(5));  // type: long
4209   Node* size    = ConvL2X(argument(7));  // type: long
4210 
4211   assert(Unsafe_field_offset_to_byte_offset(11) == 11,
4212          "fieldOffset must be byte-scaled");
4213 
4214   Node* src = make_unsafe_address(src_ptr, src_off);
4215   Node* dst = make_unsafe_address(dst_ptr, dst_off);
4216 
4217   // Conservatively insert a memory barrier on all memory slices.