48 }
49
50 inline void Thread::set_has_async_exception() {
51 set_suspend_flag(_has_async_exception);
52 }
53 inline void Thread::clear_has_async_exception() {
54 clear_suspend_flag(_has_async_exception);
55 }
56 inline void Thread::set_critical_native_unlock() {
57 set_suspend_flag(_critical_native_unlock);
58 }
59 inline void Thread::clear_critical_native_unlock() {
60 clear_suspend_flag(_critical_native_unlock);
61 }
62 inline void Thread::set_trace_flag() {
63 set_suspend_flag(_trace_flag);
64 }
65 inline void Thread::clear_trace_flag() {
66 clear_suspend_flag(_trace_flag);
67 }
68
69 inline jlong Thread::cooked_allocated_bytes() {
70 jlong allocated_bytes = OrderAccess::load_acquire(&_allocated_bytes);
71 if (UseTLAB) {
72 size_t used_bytes = tlab().used_bytes();
73 if (used_bytes <= ThreadLocalAllocBuffer::max_size_in_bytes()) {
74 // Comparing used_bytes with the maximum allowed size will ensure
75 // that we don't add the used bytes from a semi-initialized TLAB
76 // ending up with incorrect values. There is still a race between
77 // incrementing _allocated_bytes and clearing the TLAB, that might
78 // cause double counting in rare cases.
79 return allocated_bytes + used_bytes;
80 }
81 }
82 return allocated_bytes;
83 }
84
85 inline ThreadsList* Thread::cmpxchg_threads_hazard_ptr(ThreadsList* exchange_value, ThreadsList* compare_value) {
86 return (ThreadsList*)Atomic::cmpxchg(exchange_value, &_threads_hazard_ptr, compare_value);
87 }
|
48 }
49
50 inline void Thread::set_has_async_exception() {
51 set_suspend_flag(_has_async_exception);
52 }
53 inline void Thread::clear_has_async_exception() {
54 clear_suspend_flag(_has_async_exception);
55 }
56 inline void Thread::set_critical_native_unlock() {
57 set_suspend_flag(_critical_native_unlock);
58 }
59 inline void Thread::clear_critical_native_unlock() {
60 clear_suspend_flag(_critical_native_unlock);
61 }
62 inline void Thread::set_trace_flag() {
63 set_suspend_flag(_trace_flag);
64 }
65 inline void Thread::clear_trace_flag() {
66 clear_suspend_flag(_trace_flag);
67 }
68 inline void Thread::set_ea_obj_deopt_flag() {
69 set_suspend_flag(_ea_obj_deopt);
70 }
71 inline void Thread::clear_ea_obj_deopt_flag() {
72 clear_suspend_flag(_ea_obj_deopt);
73 }
74
75 inline jlong Thread::cooked_allocated_bytes() {
76 jlong allocated_bytes = OrderAccess::load_acquire(&_allocated_bytes);
77 if (UseTLAB) {
78 size_t used_bytes = tlab().used_bytes();
79 if (used_bytes <= ThreadLocalAllocBuffer::max_size_in_bytes()) {
80 // Comparing used_bytes with the maximum allowed size will ensure
81 // that we don't add the used bytes from a semi-initialized TLAB
82 // ending up with incorrect values. There is still a race between
83 // incrementing _allocated_bytes and clearing the TLAB, that might
84 // cause double counting in rare cases.
85 return allocated_bytes + used_bytes;
86 }
87 }
88 return allocated_bytes;
89 }
90
91 inline ThreadsList* Thread::cmpxchg_threads_hazard_ptr(ThreadsList* exchange_value, ThreadsList* compare_value) {
92 return (ThreadsList*)Atomic::cmpxchg(exchange_value, &_threads_hazard_ptr, compare_value);
93 }
|