src/share/vm/opto/library_call.cpp

Print this page
rev 2237 : [mq]: initial-intrinsification-changes
rev 2238 : [mq]: code-review-comments-vladimir
rev 2239 : [mq]: client_assertion_fauilure
rev 2240 : [mq]: code-review-comments-tom
rev 2242 : [mq]: reflection-unsafe-read-barrier

*** 163,172 **** --- 163,176 ---- bool inline_min_max(vmIntrinsics::ID id); Node* generate_min_max(vmIntrinsics::ID id, Node* x, Node* y); // This returns Type::AnyPtr, RawPtr, or OopPtr. int classify_unsafe_addr(Node* &base, Node* &offset); Node* make_unsafe_address(Node* base, Node* offset); + // Helper for inline_unsafe_access. + // Generates the guards that check whether the result of + // Unsafe.getObject should be recorded in an SATB log buffer. + void insert_g1_pre_barrier(Node* base_oop, Node* offset, Node* pre_val); bool inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile); bool inline_unsafe_prefetch(bool is_native_ptr, bool is_store, bool is_static); bool inline_unsafe_allocate(); bool inline_unsafe_copyMemory(); bool inline_native_currentThread();
*** 237,246 **** --- 241,252 ---- bool inline_fp_conversions(vmIntrinsics::ID id); bool inline_numberOfLeadingZeros(vmIntrinsics::ID id); bool inline_numberOfTrailingZeros(vmIntrinsics::ID id); bool inline_bitCount(vmIntrinsics::ID id); bool inline_reverseBytes(vmIntrinsics::ID id); + + bool inline_reference_get(); }; //---------------------------make_vm_intrinsic---------------------------- CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
*** 333,342 **** --- 339,356 ---- case vmIntrinsics::_bitCount_i: case vmIntrinsics::_bitCount_l: if (!UsePopCountInstruction) return NULL; break; + case vmIntrinsics::_Reference_get: + // It is only when G1 is enabled that we absolutely + // need to use the intrinsic version of Reference.get() + // so that the value in the referent field, if necessary, + // can be registered by the pre-barrier code. + if (!UseG1GC) return NULL; + break; + default: assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility"); assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?"); break; }
*** 384,400 **** --- 398,423 ---- char buf[1000]; const char* str = vmIntrinsics::short_name_as_C_string(intrinsic_id(), buf, sizeof(buf)); tty->print_cr("Intrinsic %s", str); } #endif + if (kit.try_to_inline()) { if (PrintIntrinsics || PrintInlining NOT_PRODUCT( || PrintOptoInlining) ) { + if (jvms->has_method()) { + // Not a root compile. tty->print("Inlining intrinsic %s%s at bci:%d in", vmIntrinsics::name_at(intrinsic_id()), (is_virtual() ? " (virtual)" : ""), kit.bci()); kit.caller()->print_short_name(tty); tty->print_cr(" (%d bytes)", kit.caller()->code_size()); + } else { + // Root compile + tty->print_cr("Generating intrinsic %s%s at bci:%d", + vmIntrinsics::name_at(intrinsic_id()), + (is_virtual() ? " (virtual)" : ""), kit.bci()); + } } C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_worked); if (C->log()) { C->log()->elem("intrinsic id='%s'%s nodes='%d'", vmIntrinsics::name_at(intrinsic_id()),
*** 403,417 **** --- 426,448 ---- } return kit.transfer_exceptions_into_jvms(); } if (PrintIntrinsics) { + if (jvms->has_method()) { + // Not a root compile. tty->print("Did not inline intrinsic %s%s at bci:%d in", vmIntrinsics::name_at(intrinsic_id()), (is_virtual() ? " (virtual)" : ""), kit.bci()); kit.caller()->print_short_name(tty); tty->print_cr(" (%d bytes)", kit.caller()->code_size()); + } else { + // Root compile + tty->print("Did not generate intrinsic %s%s at bci:%d in", + vmIntrinsics::name_at(intrinsic_id()), + (is_virtual() ? " (virtual)" : ""), kit.bci()); + } } C->gather_intrinsic_statistics(intrinsic_id(), is_virtual(), Compile::_intrinsic_failed); return NULL; }
*** 419,428 **** --- 450,467 ---- // Handle symbolic names for otherwise undistinguished boolean switches: const bool is_store = true; const bool is_native_ptr = true; const bool is_static = true; + if (!jvms()->has_method()) { + // Root JVMState has a null method. + assert(map()->memory()->Opcode() == Op_Parm, ""); + // Insert the memory aliasing node + set_all_memory(reset_memory()); + } + assert(merged_memory(), ""); + switch (intrinsic_id()) { case vmIntrinsics::_hashCode: return inline_native_hashcode(intrinsic()->is_virtual(), !is_static); case vmIntrinsics::_identityHashCode: return inline_native_hashcode(/*!virtual*/ false, is_static);
*** 659,668 **** --- 698,710 ---- return inline_native_AtomicLong_attemptUpdate(); case vmIntrinsics::_getCallerClass: return inline_native_Reflection_getCallerClass(); + case vmIntrinsics::_Reference_get: + return inline_reference_get(); + default: // If you get here, it may be that someone has added a new intrinsic // to the list in vmSymbols.hpp without implementing it here. #ifndef PRODUCT if ((PrintMiscellaneous && (Verbose || WizardMode)) || PrintOpto) {
*** 2077,2086 **** --- 2119,2190 ---- //----------------------------inline_unsafe_access---------------------------- const static BasicType T_ADDRESS_HOLDER = T_LONG; + // Helper that guards and inserts a G1 pre-barrier. + void LibraryCallKit::insert_g1_pre_barrier(Node* base_oop, Node* offset, Node* pre_val) { + assert(UseG1GC, "should not call this otherwise"); + + // We need to generate the following.... + // + // if (offset == java_lang_ref_Reference::_reference_offset) { + // if (base != null) { + // if (klass(base)->reference_type() != REF_NONE)) { + // pre_barrier(_, pre_val, ...); + // } + // } + // } + + float likely = PROB_LIKELY(0.999); + float unlikely = PROB_UNLIKELY(0.999); + + IdealKit ideal(gvn(), control(), merged_memory()); + #define __ ideal. + + const int reference_type_offset = instanceKlass::reference_type_offset_in_bytes() + + sizeof(oopDesc); + + Node* referent_off = __ ConI(java_lang_ref_Reference::referent_offset); + + __ if_then(offset, BoolTest::eq, referent_off, unlikely); { + __ if_then(base_oop, BoolTest::ne, null(), likely); { + Node* k_adr = __ AddP(base_oop, base_oop, __ ConX(oopDesc::klass_offset_in_bytes())); + + Node* klass = _gvn.transform( LoadKlassNode::make(gvn(), immutable_memory(), k_adr, + TypeRawPtr::BOTTOM, + TypeKlassPtr::OBJECT_OR_NULL) ); + + Node* ref_typ_adr = __ AddP(klass, klass, __ ConX(reference_type_offset)); + Node* ref_none_val = __ ConI(REF_NONE); + + Node* ref_typ = __ load(__ ctrl(), ref_typ_adr, TypeInt::INT, T_INT, Compile::AliasIdxRaw); + + __ if_then(ref_typ, BoolTest::ne, ref_none_val, unlikely); { + + // Sync IdealKit and graphKit. + set_all_memory( __ merged_memory()); + + // Use the pre-barrier to record the value in the referent field + pre_barrier(false /* do_load */, + __ ctrl(), + NULL /* obj */, NULL /* adr */, -1 /* alias_idx */, NULL /* val */, NULL /* val_type */, + pre_val /* pre_val */, + T_OBJECT); + // Update IdealKit memory. + __ set_all_memory(merged_memory()); + __ set_ctrl(control()); + } __ end_if(); // _ref_type != ref_none + } __ end_if(); // base != NULL + } __ end_if(); // offset == referent_offset + + // Final sync IdealKit and GraphKit. + sync_kit(ideal); + #undef __ + } + + // Interpret Unsafe.fieldOffset cookies correctly: extern jlong Unsafe_field_offset_to_byte_offset(jlong field_offset); bool LibraryCallKit::inline_unsafe_access(bool is_native_ptr, bool is_store, BasicType type, bool is_volatile) { if (callee()->is_static()) return false; // caller must have the capability!
*** 2153,2165 **** } // Build address expression. See the code in inline_unsafe_prefetch. Node *adr; Node *heap_base_oop = top(); if (!is_native_ptr) { // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset ! Node* offset = pop_pair(); // The base is either a Java object or a value produced by Unsafe.staticFieldBase Node* base = pop(); // We currently rely on the cookies produced by Unsafe.xxxFieldOffset // to be plain byte offsets, which are also the same as those accepted // by oopDesc::field_base. --- 2257,2271 ---- } // Build address expression. See the code in inline_unsafe_prefetch. Node *adr; Node *heap_base_oop = top(); + Node* offset = top(); + if (!is_native_ptr) { // The offset is a value produced by Unsafe.staticFieldOffset or Unsafe.objectFieldOffset ! offset = pop_pair(); // The base is either a Java object or a value produced by Unsafe.staticFieldBase Node* base = pop(); // We currently rely on the cookies produced by Unsafe.xxxFieldOffset // to be plain byte offsets, which are also the same as those accepted // by oopDesc::field_base.
*** 2196,2205 **** --- 2302,2318 ---- // the barriers get omitted and the unsafe reference begins to "pollute" // the alias analysis of the rest of the graph, either Compile::can_alias // or Compile::must_alias will throw a diagnostic assert.) bool need_mem_bar = (alias_type->adr_type() == TypeOopPtr::BOTTOM); + // If we are reading the value of the referent field of a Reference + // object (either by using Unsafe directly or through reflection) + // then, if G1 is enabled, we need to record the referent in an + // SATB log buffer using the pre-barrier mechanism. + bool need_read_barrier = UseG1GC && !is_native_ptr && !is_store && + offset != top() && heap_base_oop != top(); + if (!is_store && type == T_OBJECT) { // Attempt to infer a sharper value type from the offset and base type. ciKlass* sharpened_klass = NULL; // See if it is an instance field, with an object type.
*** 2279,2290 **** case T_CHAR: case T_BYTE: case T_SHORT: case T_INT: case T_FLOAT: case T_OBJECT: ! push( p ); break; case T_ADDRESS: // Cast to an int type. p = _gvn.transform( new (C, 2) CastP2XNode(NULL,p) ); p = ConvX2L(p); --- 2392,2408 ---- case T_CHAR: case T_BYTE: case T_SHORT: case T_INT: case T_FLOAT: + push(p); + break; case T_OBJECT: ! if (need_read_barrier) { ! insert_g1_pre_barrier(heap_base_oop, offset, p); ! } ! push(p); break; case T_ADDRESS: // Cast to an int type. p = _gvn.transform( new (C, 2) CastP2XNode(NULL,p) ); p = ConvX2L(p);
*** 2537,2547 **** cas = _gvn.transform(new (C, 5) CompareAndSwapLNode(control(), mem, adr, newval, oldval)); break; case T_OBJECT: // reference stores need a store barrier. // (They don't if CAS fails, but it isn't worth checking.) ! pre_barrier(control(), base, adr, alias_idx, newval, value_type->make_oopptr(), T_OBJECT); #ifdef _LP64 if (adr->bottom_type()->is_ptr_to_narrowoop()) { Node *newval_enc = _gvn.transform(new (C, 2) EncodePNode(newval, newval->bottom_type()->make_narrowoop())); Node *oldval_enc = _gvn.transform(new (C, 2) EncodePNode(oldval, oldval->bottom_type()->make_narrowoop())); cas = _gvn.transform(new (C, 5) CompareAndSwapNNode(control(), mem, adr, --- 2655,2668 ---- cas = _gvn.transform(new (C, 5) CompareAndSwapLNode(control(), mem, adr, newval, oldval)); break; case T_OBJECT: // reference stores need a store barrier. // (They don't if CAS fails, but it isn't worth checking.) ! pre_barrier(true /* do_load*/, ! control(), base, adr, alias_idx, newval, value_type->make_oopptr(), ! NULL /* pre_val*/, ! T_OBJECT); #ifdef _LP64 if (adr->bottom_type()->is_ptr_to_narrowoop()) { Node *newval_enc = _gvn.transform(new (C, 2) EncodePNode(newval, newval->bottom_type()->make_narrowoop())); Node *oldval_enc = _gvn.transform(new (C, 2) EncodePNode(oldval, oldval->bottom_type()->make_narrowoop())); cas = _gvn.transform(new (C, 5) CompareAndSwapNNode(control(), mem, adr,
*** 5313,5317 **** --- 5434,5479 ---- make_runtime_call(RC_LEAF|RC_NO_FP, OptoRuntime::fast_arraycopy_Type(), copyfunc_addr, copyfunc_name, adr_type, src_start, dest_start, copy_length XTOP); } + + //----------------------------inline_reference_get---------------------------- + + bool LibraryCallKit::inline_reference_get() { + const int nargs = 1; // self + + guarantee(java_lang_ref_Reference::referent_offset > 0, + "should have already been set"); + + int referent_offset = java_lang_ref_Reference::referent_offset; + + // Restore the stack and pop off the argument + _sp += nargs; + Node *reference_obj = pop(); + + // Null check on self without removing any arguments. + _sp += nargs; + reference_obj = do_null_check(reference_obj, T_OBJECT); + _sp -= nargs;; + + if (stopped()) return true; + + Node *adr = basic_plus_adr(reference_obj, reference_obj, referent_offset); + + ciInstanceKlass* klass = env()->Object_klass(); + const TypeOopPtr* object_type = TypeOopPtr::make_from_klass(klass); + + Node* no_ctrl = NULL; + Node *result = make_load(no_ctrl, adr, object_type, T_OBJECT); + + // Use the pre-barrier to record the value in the referent field + pre_barrier(false /* do_load */, + control(), + NULL /* obj */, NULL /* adr */, -1 /* alias_idx */, NULL /* val */, NULL /* val_type */, + result /* pre_val */, + T_OBJECT); + + push(result); + return true; + } +