2280 ClassLoaderData *loader_data = class_loader_data(class_loader);
2281
2282 {
2283 MutexLocker mu1(SystemDictionary_lock, THREAD);
2284
2285 // See whether biased locking is enabled and if so set it for this
2286 // klass.
2287 // Note that this must be done past the last potential blocking
2288 // point / safepoint. We enable biased locking lazily using a
2289 // VM_Operation to iterate the SystemDictionary and installing the
2290 // biasable mark word into each InstanceKlass's prototype header.
2291 // To avoid race conditions where we accidentally miss enabling the
2292 // optimization for one class in the process of being added to the
2293 // dictionary, we must not safepoint after the test of
2294 // BiasedLocking::enabled().
2295 if (UseBiasedLocking && BiasedLocking::enabled()) {
2296 // Set biased locking bit for all loaded classes; it will be
2297 // cleared if revocation occurs too often for this type
2298 // NOTE that we must only do this when the class is initally
2299 // defined, not each time it is referenced from a new class loader
2300 if (k->class_loader() == class_loader()) {
2301 k->set_prototype_header(markOopDesc::biased_locking_prototype());
2302 }
2303 }
2304
2305 // Make a new dictionary entry.
2306 Dictionary* dictionary = loader_data->dictionary();
2307 InstanceKlass* sd_check = find_class(d_hash, name, dictionary);
2308 if (sd_check == NULL) {
2309 dictionary->add_klass(d_hash, name, k);
2310 notice_modification();
2311 }
2312 #ifdef ASSERT
2313 sd_check = find_class(d_hash, name, dictionary);
2314 assert (sd_check != NULL, "should have entry in dictionary");
2315 // Note: there may be a placeholder entry: for circularity testing
2316 // or for parallel defines
2317 #endif
2318 SystemDictionary_lock->notify_all();
2319 }
2320 }
|
2280 ClassLoaderData *loader_data = class_loader_data(class_loader);
2281
2282 {
2283 MutexLocker mu1(SystemDictionary_lock, THREAD);
2284
2285 // See whether biased locking is enabled and if so set it for this
2286 // klass.
2287 // Note that this must be done past the last potential blocking
2288 // point / safepoint. We enable biased locking lazily using a
2289 // VM_Operation to iterate the SystemDictionary and installing the
2290 // biasable mark word into each InstanceKlass's prototype header.
2291 // To avoid race conditions where we accidentally miss enabling the
2292 // optimization for one class in the process of being added to the
2293 // dictionary, we must not safepoint after the test of
2294 // BiasedLocking::enabled().
2295 if (UseBiasedLocking && BiasedLocking::enabled()) {
2296 // Set biased locking bit for all loaded classes; it will be
2297 // cleared if revocation occurs too often for this type
2298 // NOTE that we must only do this when the class is initally
2299 // defined, not each time it is referenced from a new class loader
2300 if (k->class_loader() == class_loader() && !k->is_value()) {
2301 k->set_prototype_header(markOopDesc::biased_locking_prototype());
2302 }
2303 }
2304
2305 // Make a new dictionary entry.
2306 Dictionary* dictionary = loader_data->dictionary();
2307 InstanceKlass* sd_check = find_class(d_hash, name, dictionary);
2308 if (sd_check == NULL) {
2309 dictionary->add_klass(d_hash, name, k);
2310 notice_modification();
2311 }
2312 #ifdef ASSERT
2313 sd_check = find_class(d_hash, name, dictionary);
2314 assert (sd_check != NULL, "should have entry in dictionary");
2315 // Note: there may be a placeholder entry: for circularity testing
2316 // or for parallel defines
2317 #endif
2318 SystemDictionary_lock->notify_all();
2319 }
2320 }
|