< prev index next >

src/share/vm/opto/library_call.cpp

Print this page

        

*** 45,54 **** --- 45,55 ---- #include "opto/opaquenode.hpp" #include "opto/parse.hpp" #include "opto/runtime.hpp" #include "opto/subnode.hpp" #include "prims/nativeLookup.hpp" + #include "prims/unsafe.hpp" #include "runtime/sharedRuntime.hpp" #include "trace/traceMacros.hpp" class LibraryIntrinsic : public InlineCallGenerator { // Extend the set of intrinsics known to the runtime:
*** 225,241 **** bool inline_pow(); Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName); bool inline_min_max(vmIntrinsics::ID id); Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y); // This returns Type::AnyPtr, RawPtr, or OopPtr. ! int classify_unsafe_addr(Node* &base, Node* &offset); ! Node* make_unsafe_address(Node* base, Node* offset); // Helper for inline_unsafe_access. // Generates the guards that check whether the result of // Unsafe.getObject should be recorded in an SATB log buffer. void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar); ! bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile); static bool klass_needs_init_guard(Node* kls); bool inline_unsafe_allocate(); bool inline_unsafe_copyMemory(); bool inline_native_currentThread(); #ifdef TRACE_HAVE_INTRINSICS --- 226,242 ---- bool inline_pow(); Node* finish_pow_exp(Node* result, Node* x, Node* y, const TypeFunc* call_type, address funcAddr, const char* funcName); bool inline_min_max(vmIntrinsics::ID id); Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y); // This returns Type::AnyPtr, RawPtr, or OopPtr. ! int classify_unsafe_addr(Node* &base, Node* &offset, bool decode_offset); ! Node* make_unsafe_address(Node* base, Node* offset, bool decode_offset); // Helper for inline_unsafe_access. // Generates the guards that check whether the result of // Unsafe.getObject should be recorded in an SATB log buffer. void insert_pre_barrier(Node* base_oop, Node* offset, Node* pre_val, bool need_mem_bar); ! bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool unaligned = false); static bool klass_needs_init_guard(Node* kls); bool inline_unsafe_allocate(); bool inline_unsafe_copyMemory(); bool inline_native_currentThread(); #ifdef TRACE_HAVE_INTRINSICS
*** 822,840 **** case vmIntrinsics::_putIntVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, is_volatile); case vmIntrinsics::_putLongVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, is_volatile); case vmIntrinsics::_putFloatVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, is_volatile); case vmIntrinsics::_putDoubleVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, is_volatile); ! case vmIntrinsics::_getShortUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, !is_volatile); ! case vmIntrinsics::_getCharUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, !is_volatile); ! case vmIntrinsics::_getIntUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, !is_volatile); ! case vmIntrinsics::_getLongUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, !is_volatile); ! ! case vmIntrinsics::_putShortUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, !is_volatile); ! case vmIntrinsics::_putCharUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, !is_volatile); ! case vmIntrinsics::_putIntUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, !is_volatile); ! case vmIntrinsics::_putLongUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, !is_volatile); case vmIntrinsics::_compareAndSwapObject: return inline_unsafe_load_store(T_OBJECT, LS_cmpxchg); case vmIntrinsics::_compareAndSwapInt: return inline_unsafe_load_store(T_INT, LS_cmpxchg); case vmIntrinsics::_compareAndSwapLong: return inline_unsafe_load_store(T_LONG, LS_cmpxchg); --- 823,841 ---- case vmIntrinsics::_putIntVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, is_volatile); case vmIntrinsics::_putLongVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, is_volatile); case vmIntrinsics::_putFloatVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_FLOAT, is_volatile); case vmIntrinsics::_putDoubleVolatile: return inline_unsafe_access(!is_native_ptr, is_store, T_DOUBLE, is_volatile); ! case vmIntrinsics::_getShortUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_SHORT, !is_volatile, /*unaligned=*/true); ! case vmIntrinsics::_getCharUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_CHAR, !is_volatile, /*unaligned=*/true); ! case vmIntrinsics::_getIntUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_INT, !is_volatile, /*unaligned=*/true); ! case vmIntrinsics::_getLongUnaligned: return inline_unsafe_access(!is_native_ptr, !is_store, T_LONG, !is_volatile, /*unaligned=*/true); ! ! case vmIntrinsics::_putShortUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_SHORT, !is_volatile, /*unaligned=*/true); ! case vmIntrinsics::_putCharUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_CHAR, !is_volatile, /*unaligned=*/true); ! case vmIntrinsics::_putIntUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_INT, !is_volatile, /*unaligned=*/true); ! case vmIntrinsics::_putLongUnaligned: return inline_unsafe_access(!is_native_ptr, is_store, T_LONG, !is_volatile, /*unaligned=*/true); case vmIntrinsics::_compareAndSwapObject: return inline_unsafe_load_store(T_OBJECT, LS_cmpxchg); case vmIntrinsics::_compareAndSwapInt: return inline_unsafe_load_store(T_INT, LS_cmpxchg); case vmIntrinsics::_compareAndSwapLong: return inline_unsafe_load_store(T_LONG, LS_cmpxchg);
*** 2278,2288 **** } */ } inline int ! LibraryCallKit::classify_unsafe_addr(Node* &base, Node* &offset) { const TypePtr* base_type = TypePtr::NULL_PTR; if (base != NULL) base_type = _gvn.type(base)->isa_ptr(); if (base_type == NULL) { // Unknown type. return Type::AnyPtr; --- 2279,2289 ---- } */ } inline int ! LibraryCallKit::classify_unsafe_addr(Node* &base, Node* &offset, bool decode_offset) { const TypePtr* base_type = TypePtr::NULL_PTR; if (base != NULL) base_type = _gvn.type(base)->isa_ptr(); if (base_type == NULL) { // Unknown type. return Type::AnyPtr;
*** 2292,2323 **** offset = MakeConX(0); return Type::RawPtr; } else if (base_type->base() == Type::RawPtr) { return Type::RawPtr; } else if (base_type->isa_oopptr()) { // Base is never null => always a heap address. if (base_type->ptr() == TypePtr::NotNull) { return Type::OopPtr; } // Offset is small => always a heap address. const TypeX* offset_type = _gvn.type(offset)->isa_intptr_t(); if (offset_type != NULL && base_type->offset() == 0 && // (should always be?) offset_type->_lo >= 0 && !MacroAssembler::needs_explicit_null_check(offset_type->_hi)) { return Type::OopPtr; } // Otherwise, it might either be oop+off or NULL+addr. return Type::AnyPtr; } else { // No information: return Type::AnyPtr; } } ! inline Node* LibraryCallKit::make_unsafe_address(Node* base, Node* offset) { ! int kind = classify_unsafe_addr(base, offset); if (kind == Type::RawPtr) { return basic_plus_adr(top(), base, offset); } else { return basic_plus_adr(base, offset); } --- 2293,2345 ---- offset = MakeConX(0); return Type::RawPtr; } else if (base_type->base() == Type::RawPtr) { return Type::RawPtr; } else if (base_type->isa_oopptr()) { + Node* decoded_offset = offset; + if (decode_offset) { + decoded_offset = _gvn.transform(new RShiftXNode(offset, intcon(Unsafe::offset_shift))); + } // Base is never null => always a heap address. if (base_type->ptr() == TypePtr::NotNull) { + offset = decoded_offset; return Type::OopPtr; } // Offset is small => always a heap address. const TypeX* offset_type = _gvn.type(offset)->isa_intptr_t(); if (offset_type != NULL && base_type->offset() == 0 && // (should always be?) offset_type->_lo >= 0 && !MacroAssembler::needs_explicit_null_check(offset_type->_hi)) { + offset = decoded_offset; return Type::OopPtr; } // Otherwise, it might either be oop+off or NULL+addr. + // For oop+off case the offset should be decoded first, but + // NULL+addr can be used as is. + IdealKit ideal(this); + #define __ ideal. + IdealVariable off(ideal); + __ declarations_done(); + __ set(off, offset); + __ if_then(base, BoolTest::ne, null(), PROB_UNLIKELY(0.999)); { + __ set(off, decoded_offset); + } __ end_if(); + // Final sync IdealKit and GraphKit. + decoded_offset = __ value(off); + final_sync(ideal); + #undef __ + offset = decoded_offset; return Type::AnyPtr; } else { // No information: return Type::AnyPtr; } } ! inline Node* LibraryCallKit::make_unsafe_address(Node* base, Node* offset, bool decode_offset) { ! int kind = classify_unsafe_addr(base, offset, decode_offset); if (kind == Type::RawPtr) { return basic_plus_adr(top(), base, offset); } else { return basic_plus_adr(base, offset); }
*** 2458,2471 **** // Final sync IdealKit and GraphKit. final_sync(ideal); #undef __ } - - // Interpret Unsafe.fieldOffset cookies correctly: - extern jlong Unsafe_field_offset_to_byte_offset(jlong field_offset); - const TypeOopPtr* LibraryCallKit::sharpen_unsafe_type(Compile::AliasType* alias_type, const TypePtr *adr_type, bool is_native_ptr) { // Attempt to infer a sharper value type from the offset and base type. ciKlass* sharpened_klass = NULL; // See if it is an instance field, with an object type. --- 2480,2489 ----
*** 2501,2511 **** return tjp; } return NULL; } ! bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile) { if (callee()->is_static()) return false; // caller must have the capability! #ifndef PRODUCT { ResourceMark rm; --- 2519,2529 ---- return tjp; } return NULL; } ! bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile, bool unaligned) { if (callee()->is_static()) return false; // caller must have the capability! #ifndef PRODUCT { ResourceMark rm;
*** 2559,2582 **** if (!is_native_ptr) { // The base is either a Java object or a value produced by Unsafe.staticFieldBase Node* base = argument(1); // type: oop // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset offset = argument(2); // type: long - // We currently rely on the cookies produced by Unsafe.xxxFieldOffset - // to be plain byte offsets, which are also the same as those accepted - // by oopDesc::field_base. - assert(Unsafe_field_offset_to_byte_offset(11) == 11, - "fieldOffset must be byte-scaled"); // 32-bit machines ignore the high half! offset = ConvL2X(offset); ! adr = make_unsafe_address(base, offset); heap_base_oop = base; val = is_store ? argument(4) : NULL; } else { Node* ptr = argument(1); // type: long ptr = ConvL2X(ptr); // adjust Java long to machine word ! adr = make_unsafe_address(NULL, ptr); val = is_store ? argument(3) : NULL; } const TypePtr *adr_type = _gvn.type(adr)->isa_ptr(); --- 2577,2595 ---- if (!is_native_ptr) { // The base is either a Java object or a value produced by Unsafe.staticFieldBase Node* base = argument(1); // type: oop // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset offset = argument(2); // type: long // 32-bit machines ignore the high half! offset = ConvL2X(offset); ! adr = make_unsafe_address(base, offset, /*decode=*/!unaligned); heap_base_oop = base; val = is_store ? argument(4) : NULL; } else { Node* ptr = argument(1); // type: long ptr = ConvL2X(ptr); // adjust Java long to machine word ! adr = make_unsafe_address(NULL, ptr, /*decode=*/false); val = is_store ? argument(3) : NULL; } const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
*** 2650,2661 **** // Try to constant fold a load from a constant field ciField* field = alias_type->field(); if (heap_base_oop != top() && field != NULL && field->is_constant() && field->layout_type() == type) { // final or stable field ! const Type* con_type = Type::make_constant(alias_type->field(), heap_base_oop); if (con_type != NULL) { p = makecon(con_type); } } if (p == NULL) { MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered; --- 2663,2681 ---- // Try to constant fold a load from a constant field ciField* field = alias_type->field(); if (heap_base_oop != top() && field != NULL && field->is_constant() && field->layout_type() == type) { // final or stable field ! ciField* field = alias_type->field(); ! const Type* con_type = Type::make_constant(field, heap_base_oop); if (con_type != NULL) { + if (TrustFinalNonStaticFields && + !field->is_static() && heap_base_oop->is_Con()) { + const TypeOopPtr* oop_ptr = heap_base_oop->bottom_type()->isa_oopptr(); + ciObject* constant_oop = oop_ptr->const_oop(); + C->dependencies()->assert_constant_field_value_instance(field, constant_oop); + } p = makecon(con_type); } } if (p == NULL) { MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
*** 2704,2713 **** --- 2724,2744 ---- val = ConvL2X(val); val = _gvn.transform(new CastX2PNode(val)); break; } + { // Need to check all dependent nmethods when final field is updated through Unsafe. + Node* final_bit = _gvn.transform(new AndXNode(/*offset*/argument(2), MakeConX(Unsafe::final_mask))); + Node* cmp_final_bit = _gvn.transform(new CmpXNode(final_bit, MakeConX(0))); + Node* bol_final_bit = _gvn.transform(new BoolNode(cmp_final_bit, BoolTest::eq)); + + BuildCutout unless(this, bol_final_bit, PROB_MAX); + uncommon_trap(Deoptimization::Reason_intrinsic, + Deoptimization::Action_none, + NULL, "final_field_unsafe_update"); + } + MemNode::MemOrd mo = is_volatile ? MemNode::release : MemNode::unordered; if (type != T_OBJECT ) { (void) store_to_memory(control(), adr, val, type, adr_type, mo, is_volatile); } else { // Possibly an oop being stored to Java heap or native memory
*** 2838,2854 **** if (stopped()) { return true; } // Build field offset expression. - // We currently rely on the cookies produced by Unsafe.xxxFieldOffset - // to be plain byte offsets, which are also the same as those accepted - // by oopDesc::field_base. - assert(Unsafe_field_offset_to_byte_offset(11) == 11, "fieldOffset must be byte-scaled"); // 32-bit machines ignore the high half of long offsets offset = ConvL2X(offset); ! Node* adr = make_unsafe_address(base, offset); const TypePtr *adr_type = _gvn.type(adr)->isa_ptr(); // For CAS, unlike inline_unsafe_access, there seems no point in // trying to refine types. Just use the coarse types here. const Type *value_type = Type::get_const_basic_type(type); --- 2869,2881 ---- if (stopped()) { return true; } // Build field offset expression. // 32-bit machines ignore the high half of long offsets offset = ConvL2X(offset); ! Node* adr = make_unsafe_address(base, offset, /*decode=*/true); const TypePtr *adr_type = _gvn.type(adr)->isa_ptr(); // For CAS, unlike inline_unsafe_access, there seems no point in // trying to refine types. Just use the coarse types here. const Type *value_type = Type::get_const_basic_type(type);
*** 3033,3046 **** if (stopped()) { return true; } // Build field offset expression. - assert(Unsafe_field_offset_to_byte_offset(11) == 11, "fieldOffset must be byte-scaled"); // 32-bit machines ignore the high half of long offsets offset = ConvL2X(offset); ! Node* adr = make_unsafe_address(base, offset); const TypePtr *adr_type = _gvn.type(adr)->isa_ptr(); const Type *value_type = Type::get_const_basic_type(type); Compile::AliasType* alias_type = C->alias_type(adr_type); insert_mem_bar(Op_MemBarRelease); --- 3060,3072 ---- if (stopped()) { return true; } // Build field offset expression. // 32-bit machines ignore the high half of long offsets offset = ConvL2X(offset); ! Node* adr = make_unsafe_address(base, offset, /*decode=*/true); const TypePtr *adr_type = _gvn.type(adr)->isa_ptr(); const Type *value_type = Type::get_const_basic_type(type); Compile::AliasType* alias_type = C->alias_type(adr_type); insert_mem_bar(Op_MemBarRelease);
*** 4422,4436 **** Node* src_off = ConvL2X(argument(2)); // type: long Node* dst_ptr = argument(4); // type: oop Node* dst_off = ConvL2X(argument(5)); // type: long Node* size = ConvL2X(argument(7)); // type: long ! assert(Unsafe_field_offset_to_byte_offset(11) == 11, ! "fieldOffset must be byte-scaled"); ! ! Node* src = make_unsafe_address(src_ptr, src_off); ! Node* dst = make_unsafe_address(dst_ptr, dst_off); // Conservatively insert a memory barrier on all memory slices. // Do not let writes of the copy source or destination float below the copy. insert_mem_bar(Op_MemBarCPUOrder); --- 4448,4459 ---- Node* src_off = ConvL2X(argument(2)); // type: long Node* dst_ptr = argument(4); // type: oop Node* dst_off = ConvL2X(argument(5)); // type: long Node* size = ConvL2X(argument(7)); // type: long ! Node* src = make_unsafe_address(src_ptr, src_off, /*decode=*/true); ! Node* dst = make_unsafe_address(dst_ptr, dst_off, /*decode=*/true); // Conservatively insert a memory barrier on all memory slices. // Do not let writes of the copy source or destination float below the copy. insert_mem_bar(Op_MemBarCPUOrder);
< prev index next >