289 // and ::Release()
290 _ParkEvent = ParkEvent::Allocate(this);
291 _SleepEvent = ParkEvent::Allocate(this);
292 _MutexEvent = ParkEvent::Allocate(this);
293 _MuxEvent = ParkEvent::Allocate(this);
294
295 #ifdef CHECK_UNHANDLED_OOPS
296 if (CheckUnhandledOops) {
297 _unhandled_oops = new UnhandledOops(this);
298 }
299 #endif // CHECK_UNHANDLED_OOPS
300 #ifdef ASSERT
301 if (UseBiasedLocking) {
302 assert((((uintptr_t) this) & (markOopDesc::biased_lock_alignment - 1)) == 0, "forced alignment of thread object failed");
303 assert(this == _real_malloc_address ||
304 this == align_up(_real_malloc_address, (int)markOopDesc::biased_lock_alignment),
305 "bug in forced alignment of thread objects");
306 }
307 #endif // ASSERT
308
309 // Notify the barrier set that a thread is being created. Note that some
310 // threads are created before a barrier set is available. The call to
311 // BarrierSet::on_thread_create() for these threads is therefore deferred
312 // to BarrierSet::set_barrier_set().
313 BarrierSet* const barrier_set = BarrierSet::barrier_set();
314 if (barrier_set != NULL) {
315 barrier_set->on_thread_create(this);
316 } else {
317 DEBUG_ONLY(Threads::inc_threads_before_barrier_set();)
318 }
319 }
320
321 void Thread::initialize_thread_current() {
322 #ifndef USE_LIBRARY_BASED_TLS_ONLY
323 assert(_thr_current == NULL, "Thread::current already initialized");
324 _thr_current = this;
325 #endif
326 assert(ThreadLocalStorage::thread() == NULL, "ThreadLocalStorage::thread already initialized");
327 ThreadLocalStorage::set_thread(this);
328 assert(Thread::current() == ThreadLocalStorage::thread(), "TLS mismatch!");
329 }
330
331 void Thread::clear_thread_current() {
332 assert(Thread::current() == ThreadLocalStorage::thread(), "TLS mismatch!");
333 #ifndef USE_LIBRARY_BASED_TLS_ONLY
334 _thr_current = NULL;
335 #endif
336 ThreadLocalStorage::set_thread(NULL);
337 }
3378 // The Threads class links together all active threads, and provides
3379 // operations over all threads. It is protected by the Threads_lock,
3380 // which is also used in other global contexts like safepointing.
3381 // ThreadsListHandles are used to safely perform operations on one
3382 // or more threads without the risk of the thread exiting during the
3383 // operation.
3384 //
3385 // Note: The Threads_lock is currently more widely used than we
3386 // would like. We are actively migrating Threads_lock uses to other
3387 // mechanisms in order to reduce Threads_lock contention.
3388
3389 JavaThread* Threads::_thread_list = NULL;
3390 int Threads::_number_of_threads = 0;
3391 int Threads::_number_of_non_daemon_threads = 0;
3392 int Threads::_return_code = 0;
3393 int Threads::_thread_claim_parity = 0;
3394 size_t JavaThread::_stack_size_at_create = 0;
3395
3396 #ifdef ASSERT
3397 bool Threads::_vm_complete = false;
3398 size_t Threads::_threads_before_barrier_set = 0;
3399 #endif
3400
3401 static inline void *prefetch_and_load_ptr(void **addr, intx prefetch_interval) {
3402 Prefetch::read((void*)addr, prefetch_interval);
3403 return *addr;
3404 }
3405
3406 // Possibly the ugliest for loop the world has seen. C++ does not allow
3407 // multiple types in the declaration section of the for loop. In this case
3408 // we are only dealing with pointers and hence can cast them. It looks ugly
3409 // but macros are ugly and therefore it's fine to make things absurdly ugly.
3410 #define DO_JAVA_THREADS(LIST, X) \
3411 for (JavaThread *MACRO_scan_interval = (JavaThread*)(uintptr_t)PrefetchScanIntervalInBytes, \
3412 *MACRO_list = (JavaThread*)(LIST), \
3413 **MACRO_end = ((JavaThread**)((ThreadsList*)MACRO_list)->threads()) + ((ThreadsList*)MACRO_list)->length(), \
3414 **MACRO_current_p = (JavaThread**)((ThreadsList*)MACRO_list)->threads(), \
3415 *X = (JavaThread*)prefetch_and_load_ptr((void**)MACRO_current_p, (intx)MACRO_scan_interval); \
3416 MACRO_current_p != MACRO_end; \
3417 MACRO_current_p++, \
3418 X = (JavaThread*)prefetch_and_load_ptr((void**)MACRO_current_p, (intx)MACRO_scan_interval))
|
289 // and ::Release()
290 _ParkEvent = ParkEvent::Allocate(this);
291 _SleepEvent = ParkEvent::Allocate(this);
292 _MutexEvent = ParkEvent::Allocate(this);
293 _MuxEvent = ParkEvent::Allocate(this);
294
295 #ifdef CHECK_UNHANDLED_OOPS
296 if (CheckUnhandledOops) {
297 _unhandled_oops = new UnhandledOops(this);
298 }
299 #endif // CHECK_UNHANDLED_OOPS
300 #ifdef ASSERT
301 if (UseBiasedLocking) {
302 assert((((uintptr_t) this) & (markOopDesc::biased_lock_alignment - 1)) == 0, "forced alignment of thread object failed");
303 assert(this == _real_malloc_address ||
304 this == align_up(_real_malloc_address, (int)markOopDesc::biased_lock_alignment),
305 "bug in forced alignment of thread objects");
306 }
307 #endif // ASSERT
308
309 // Notify the barrier set that a thread is being created. The initial
310 // thread is created before the barrier set is available. The call to
311 // BarrierSet::on_thread_create() for this thread is therefore deferred
312 // to BarrierSet::set_barrier_set().
313 BarrierSet* const barrier_set = BarrierSet::barrier_set();
314 if (barrier_set != NULL) {
315 barrier_set->on_thread_create(this);
316 } else {
317 #ifdef ASSERT
318 static bool initial_thread_created = false;
319 assert(!initial_thread_created, "creating thread before barrier set");
320 initial_thread_created = true;
321 #endif // ASSERT
322 }
323 }
324
325 void Thread::initialize_thread_current() {
326 #ifndef USE_LIBRARY_BASED_TLS_ONLY
327 assert(_thr_current == NULL, "Thread::current already initialized");
328 _thr_current = this;
329 #endif
330 assert(ThreadLocalStorage::thread() == NULL, "ThreadLocalStorage::thread already initialized");
331 ThreadLocalStorage::set_thread(this);
332 assert(Thread::current() == ThreadLocalStorage::thread(), "TLS mismatch!");
333 }
334
335 void Thread::clear_thread_current() {
336 assert(Thread::current() == ThreadLocalStorage::thread(), "TLS mismatch!");
337 #ifndef USE_LIBRARY_BASED_TLS_ONLY
338 _thr_current = NULL;
339 #endif
340 ThreadLocalStorage::set_thread(NULL);
341 }
3382 // The Threads class links together all active threads, and provides
3383 // operations over all threads. It is protected by the Threads_lock,
3384 // which is also used in other global contexts like safepointing.
3385 // ThreadsListHandles are used to safely perform operations on one
3386 // or more threads without the risk of the thread exiting during the
3387 // operation.
3388 //
3389 // Note: The Threads_lock is currently more widely used than we
3390 // would like. We are actively migrating Threads_lock uses to other
3391 // mechanisms in order to reduce Threads_lock contention.
3392
3393 JavaThread* Threads::_thread_list = NULL;
3394 int Threads::_number_of_threads = 0;
3395 int Threads::_number_of_non_daemon_threads = 0;
3396 int Threads::_return_code = 0;
3397 int Threads::_thread_claim_parity = 0;
3398 size_t JavaThread::_stack_size_at_create = 0;
3399
3400 #ifdef ASSERT
3401 bool Threads::_vm_complete = false;
3402 #endif
3403
3404 static inline void *prefetch_and_load_ptr(void **addr, intx prefetch_interval) {
3405 Prefetch::read((void*)addr, prefetch_interval);
3406 return *addr;
3407 }
3408
3409 // Possibly the ugliest for loop the world has seen. C++ does not allow
3410 // multiple types in the declaration section of the for loop. In this case
3411 // we are only dealing with pointers and hence can cast them. It looks ugly
3412 // but macros are ugly and therefore it's fine to make things absurdly ugly.
3413 #define DO_JAVA_THREADS(LIST, X) \
3414 for (JavaThread *MACRO_scan_interval = (JavaThread*)(uintptr_t)PrefetchScanIntervalInBytes, \
3415 *MACRO_list = (JavaThread*)(LIST), \
3416 **MACRO_end = ((JavaThread**)((ThreadsList*)MACRO_list)->threads()) + ((ThreadsList*)MACRO_list)->length(), \
3417 **MACRO_current_p = (JavaThread**)((ThreadsList*)MACRO_list)->threads(), \
3418 *X = (JavaThread*)prefetch_and_load_ptr((void**)MACRO_current_p, (intx)MACRO_scan_interval); \
3419 MACRO_current_p != MACRO_end; \
3420 MACRO_current_p++, \
3421 X = (JavaThread*)prefetch_and_load_ptr((void**)MACRO_current_p, (intx)MACRO_scan_interval))
|