< prev index next >

src/hotspot/share/classfile/classLoaderData.cpp

Print this page

        

@@ -185,15 +185,15 @@
 }
 
 oop* ClassLoaderData::ChunkedHandleList::add(oop o) {
   if (_head == NULL || _head->_size == Chunk::CAPACITY) {
     Chunk* next = new Chunk(_head);
-    OrderAccess::release_store(&_head, next);
+    Atomic::release_store(&_head, next);
   }
   oop* handle = &_head->_data[_head->_size];
   NativeAccess<IS_DEST_UNINITIALIZED>::oop_store(handle, o);
-  OrderAccess::release_store(&_head->_size, _head->_size + 1);
+  Atomic::release_store(&_head->_size, _head->_size + 1);
   return handle;
 }
 
 int ClassLoaderData::ChunkedHandleList::count() const {
   int count = 0;

@@ -212,14 +212,14 @@
     }
   }
 }
 
 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) {
-  Chunk* head = OrderAccess::load_acquire(&_head);
+  Chunk* head = Atomic::load_acquire(&_head);
   if (head != NULL) {
     // Must be careful when reading size of head
-    oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size));
+    oops_do_chunk(f, head, Atomic::load_acquire(&head->_size));
     for (Chunk* c = head->_next; c != NULL; c = c->_next) {
       oops_do_chunk(f, c, c->_size);
     }
   }
 }

@@ -324,36 +324,36 @@
   _handles.oops_do(f);
 }
 
 void ClassLoaderData::classes_do(KlassClosure* klass_closure) {
   // Lock-free access requires load_acquire
-  for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
+  for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
     klass_closure->do_klass(k);
     assert(k != k->next_link(), "no loops!");
   }
 }
 
 void ClassLoaderData::classes_do(void f(Klass * const)) {
   // Lock-free access requires load_acquire
-  for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
+  for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
     f(k);
     assert(k != k->next_link(), "no loops!");
   }
 }
 
 void ClassLoaderData::methods_do(void f(Method*)) {
   // Lock-free access requires load_acquire
-  for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
+  for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
     if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) {
       InstanceKlass::cast(k)->methods_do(f);
     }
   }
 }
 
 void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) {
   // Lock-free access requires load_acquire
-  for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
+  for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
     // Do not filter ArrayKlass oops here...
     if (k->is_array_klass() || (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded())) {
 #ifdef ASSERT
       oop m = k->java_mirror();
       assert(m != NULL, "NULL mirror");

@@ -364,11 +364,11 @@
   }
 }
 
 void ClassLoaderData::classes_do(void f(InstanceKlass*)) {
   // Lock-free access requires load_acquire
-  for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
+  for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
     if (k->is_instance_klass()) {
       f(InstanceKlass::cast(k));
     }
     assert(k != k->next_link(), "no loops!");
   }

@@ -463,11 +463,11 @@
     MutexLocker ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
     Klass* old_value = _klasses;
     k->set_next_link(old_value);
     // Link the new item into the list, making sure the linked class is stable
     // since the list can be walked without a lock
-    OrderAccess::release_store(&_klasses, k);
+    Atomic::release_store(&_klasses, k);
     if (k->is_array_klass()) {
       ClassLoaderDataGraph::inc_array_classes(1);
     } else {
       ClassLoaderDataGraph::inc_instance_classes(1);
     }

@@ -550,21 +550,21 @@
 }
 
 ModuleEntryTable* ClassLoaderData::modules() {
   // Lazily create the module entry table at first request.
   // Lock-free access requires load_acquire.
-  ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules);
+  ModuleEntryTable* modules = Atomic::load_acquire(&_modules);
   if (modules == NULL) {
     MutexLocker m1(Module_lock);
     // Check if _modules got allocated while we were waiting for this lock.
     if ((modules = _modules) == NULL) {
       modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
 
       {
         MutexLocker m1(metaspace_lock(), Mutex::_no_safepoint_check_flag);
         // Ensure _modules is stable, since it is examined without a lock
-        OrderAccess::release_store(&_modules, modules);
+        Atomic::release_store(&_modules, modules);
       }
     }
   }
   return modules;
 }

@@ -750,11 +750,11 @@
   // If the metaspace has not been allocated, create a new one.  Might want
   // to create smaller arena for Reflection class loaders also.
   // The reason for the delayed allocation is because some class loaders are
   // simply for delegating with no metadata of their own.
   // Lock-free access requires load_acquire.
-  ClassLoaderMetaspace* metaspace = OrderAccess::load_acquire(&_metaspace);
+  ClassLoaderMetaspace* metaspace = Atomic::load_acquire(&_metaspace);
   if (metaspace == NULL) {
     MutexLocker ml(_metaspace_lock,  Mutex::_no_safepoint_check_flag);
     // Check if _metaspace got allocated while we were waiting for this lock.
     if ((metaspace = _metaspace) == NULL) {
       if (this == the_null_class_loader_data()) {

@@ -766,11 +766,11 @@
         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType);
       } else {
         metaspace = new ClassLoaderMetaspace(_metaspace_lock, Metaspace::StandardMetaspaceType);
       }
       // Ensure _metaspace is stable, since it is examined without a lock
-      OrderAccess::release_store(&_metaspace, metaspace);
+      Atomic::release_store(&_metaspace, metaspace);
     }
   }
   return metaspace;
 }
 

@@ -967,10 +967,10 @@
   }
 }
 
 bool ClassLoaderData::contains_klass(Klass* klass) {
   // Lock-free access requires load_acquire
-  for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
+  for (Klass* k = Atomic::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
     if (k == klass) return true;
   }
   return false;
 }
< prev index next >