< prev index next >

src/hotspot/share/opto/library_call.cpp

Print this page

        

*** 250,259 **** --- 250,261 ---- DecoratorSet mo_decorator_for_access_kind(AccessKind kind); bool inline_unsafe_access(bool is_store, BasicType type, AccessKind kind, bool is_unaligned); static bool klass_needs_init_guard(Node* kls); bool inline_unsafe_allocate(); bool inline_unsafe_newArray(bool uninitialized); + bool inline_unsafe_writeback0(); + bool inline_unsafe_writebackSync0(bool isPre); bool inline_unsafe_copyMemory(); bool inline_native_currentThread(); bool inline_native_time_funcs(address method, const char* funcName); #ifdef JFR_HAVE_INTRINSICS
*** 748,757 **** --- 750,762 ---- case vmIntrinsics::_getClassId: return inline_native_classID(); case vmIntrinsics::_getEventWriter: return inline_native_getEventWriter(); #endif case vmIntrinsics::_currentTimeMillis: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeMillis), "currentTimeMillis"); case vmIntrinsics::_nanoTime: return inline_native_time_funcs(CAST_FROM_FN_PTR(address, os::javaTimeNanos), "nanoTime"); + case vmIntrinsics::_writeback0: return inline_unsafe_writeback0(); + case vmIntrinsics::_writebackPreSync0: return inline_unsafe_writebackSync0(true); + case vmIntrinsics::_writebackPostSync0: return inline_unsafe_writebackSync0(false); case vmIntrinsics::_allocateInstance: return inline_unsafe_allocate(); case vmIntrinsics::_copyMemory: return inline_unsafe_copyMemory(); case vmIntrinsics::_getLength: return inline_native_getLength(); case vmIntrinsics::_copyOf: return inline_array_copyOf(false); case vmIntrinsics::_copyOfRange: return inline_array_copyOf(true);
*** 2752,2761 **** --- 2757,2815 ---- ciInstanceKlass* ik = klsptr->klass()->as_instance_klass(); // don't need a guard for a klass that is already initialized return !ik->is_initialized(); } + //----------------------------inline_unsafe_writeback0------------------------- + // public native void Unsafe.writeback0(long address) + bool LibraryCallKit::inline_unsafe_writeback0() { + if (!Matcher::has_match_rule(Op_CacheWB)) { + return false; + } + #ifndef PRODUCT + assert(Matcher::has_match_rule(Op_CacheWBPreSync), "found match rule for CacheWB but not CacheWBPreSync"); + assert(Matcher::has_match_rule(Op_CacheWBPostSync), "found match rule for CacheWB but not CacheWBPostSync"); + ciSignature* sig = callee()->signature(); + assert(sig->type_at(0)->basic_type() == T_LONG, "Unsafe_writeback0 address is long!"); + #endif + null_check_receiver(); // null-check, then ignore + Node *addr = argument(1); + addr = new CastX2PNode(addr); + addr = _gvn.transform(addr); + Node *flush = new CacheWBNode(control(), memory(TypeRawPtr::BOTTOM), addr); + flush = _gvn.transform(flush); + set_memory(flush, TypeRawPtr::BOTTOM); + return true; + } + + //----------------------------inline_unsafe_writeback0------------------------- + // public native void Unsafe.writeback0(long address) + bool LibraryCallKit::inline_unsafe_writebackSync0(bool isPre) { + if (isPre && !Matcher::has_match_rule(Op_CacheWBPreSync)) { + return false; + } + if (!isPre && !Matcher::has_match_rule(Op_CacheWBPostSync)) { + return false; + } + #ifndef PRODUCT + assert(Matcher::has_match_rule(Op_CacheWB), + (isPre ? "found match rule for CacheWBPreSync but not CacheWB" + : "found match rule for CacheWBPostSync but not CacheWB")); + + #endif + null_check_receiver(); // null-check, then ignore + Node *sync; + if (isPre) { + sync = new CacheWBPreSyncNode(control(), memory(TypeRawPtr::BOTTOM)); + } else { + sync = new CacheWBPostSyncNode(control(), memory(TypeRawPtr::BOTTOM)); + } + sync = _gvn.transform(sync); + set_memory(sync, TypeRawPtr::BOTTOM); + return true; + } + //----------------------------inline_unsafe_allocate--------------------------- // public native Object Unsafe.allocateInstance(Class<?> cls); bool LibraryCallKit::inline_unsafe_allocate() { if (callee()->is_static()) return false; // caller must have the capability!
< prev index next >