< prev index next >

src/share/vm/c1/c1_Runtime1.cpp

Print this page
rev 8961 : [mq]: diff-shenandoah.patch

*** 58,67 **** --- 58,68 ---- #include "runtime/vframe.hpp" #include "runtime/vframeArray.hpp" #include "runtime/vm_version.hpp" #include "utilities/copy.hpp" #include "utilities/events.hpp" + #include "gc/shenandoah/shenandoahBarrierSet.hpp" // Implementation of StubAssembler StubAssembler::StubAssembler(CodeBuffer* code, const char * name, int stub_id) : C1_MacroAssembler(code) {
*** 202,211 **** --- 203,213 ---- #ifdef ASSERT // Make sure that stubs that need oopmaps have them switch (id) { // These stubs don't need to have an oopmap case dtrace_object_alloc_id: + case shenandoah_write_barrier_slow_id: case g1_pre_barrier_slow_id: case g1_post_barrier_slow_id: case slow_subtype_check_id: case fpu2long_stub_id: case unwind_exception_id:
*** 315,324 **** --- 317,327 ---- FUNCTION_CASE(entry, trace_block_entry); #ifdef TRACE_HAVE_INTRINSICS FUNCTION_CASE(entry, TRACE_TIME_METHOD); #endif FUNCTION_CASE(entry, StubRoutines::updateBytesCRC32()); + FUNCTION_CASE(entry, ShenandoahBarrierSet::resolve_and_maybe_copy_oop_c1); #undef FUNCTION_CASE // Soft float adds more runtime names. return pd_name_for_address(entry);
*** 669,679 **** JRT_ENTRY_NO_ASYNC(void, Runtime1::monitorenter(JavaThread* thread, oopDesc* obj, BasicObjectLock* lock)) NOT_PRODUCT(_monitorenter_slowcase_cnt++;) if (PrintBiasedLockingStatistics) { Atomic::inc(BiasedLocking::slow_path_entry_count_addr()); } ! Handle h_obj(thread, obj); assert(h_obj()->is_oop(), "must be NULL or an object"); if (UseBiasedLocking) { // Retry fast entry if bias is revoked to avoid unnecessary inflation ObjectSynchronizer::fast_enter(h_obj, lock->lock(), true, CHECK); } else { --- 672,682 ---- JRT_ENTRY_NO_ASYNC(void, Runtime1::monitorenter(JavaThread* thread, oopDesc* obj, BasicObjectLock* lock)) NOT_PRODUCT(_monitorenter_slowcase_cnt++;) if (PrintBiasedLockingStatistics) { Atomic::inc(BiasedLocking::slow_path_entry_count_addr()); } ! Handle h_obj(thread, oopDesc::bs()->resolve_and_maybe_copy_oop(obj)); assert(h_obj()->is_oop(), "must be NULL or an object"); if (UseBiasedLocking) { // Retry fast entry if bias is revoked to avoid unnecessary inflation ObjectSynchronizer::fast_enter(h_obj, lock->lock(), true, CHECK); } else {
*** 694,704 **** assert(thread == JavaThread::current(), "threads must correspond"); assert(thread->last_Java_sp(), "last_Java_sp must be set"); // monitorexit is non-blocking (leaf routine) => no exceptions can be thrown EXCEPTION_MARK; ! oop obj = lock->obj(); assert(obj->is_oop(), "must be NULL or an object"); if (UseFastLocking) { // When using fast locking, the compiled code has already tried the fast case ObjectSynchronizer::slow_exit(obj, lock->lock(), THREAD); } else { --- 697,707 ---- assert(thread == JavaThread::current(), "threads must correspond"); assert(thread->last_Java_sp(), "last_Java_sp must be set"); // monitorexit is non-blocking (leaf routine) => no exceptions can be thrown EXCEPTION_MARK; ! oop obj = oopDesc::bs()->resolve_and_maybe_copy_oop(lock->obj()); assert(obj->is_oop(), "must be NULL or an object"); if (UseFastLocking) { // When using fast locking, the compiled code has already tried the fast case ObjectSynchronizer::slow_exit(obj, lock->lock(), THREAD); } else {
*** 1404,1413 **** --- 1407,1420 ---- if (!dst->is_array() || !src->is_array()) return ac_failed; if ((unsigned int) arrayOop(src)->length() < (unsigned int)src_pos + (unsigned int)length) return ac_failed; if ((unsigned int) arrayOop(dst)->length() < (unsigned int)dst_pos + (unsigned int)length) return ac_failed; if (length == 0) return ac_ok; + + oopDesc::bs()->resolve_oop(src); + oopDesc::bs()->resolve_and_maybe_copy_oop(dst); + if (src->is_typeArray()) { Klass* klass_oop = src->klass(); if (klass_oop != dst->klass()) return ac_failed; TypeArrayKlass* klass = TypeArrayKlass::cast(klass_oop); const int l2es = klass->log2_element_size();
< prev index next >