< prev index next >

src/hotspot/share/oops/valueKlass.cpp

Print this page
rev 59083 : DRAFT 8236522: NonTearable marker interface for inline classes to enforce atomicity

@@ -137,14 +137,10 @@
   instanceOop oop = (instanceOop)Universe::heap()->obj_buffer_allocate(this, size, CHECK_NULL);
   assert(oop->mark().is_always_locked(), "Unlocked value type");
   return oop;
 }
 
-bool ValueKlass::is_atomic() {
-  return (nonstatic_field_size() * heapOopSize) <= longSize;
-}
-
 int ValueKlass::nonstatic_oop_count() {
   int oops = 0;
   int map_count = nonstatic_oop_map_count();
   OopMapBlock* block = start_of_nonstatic_oop_maps();
   OopMapBlock* end = block + map_count;

@@ -193,10 +189,15 @@
   // Too many embedded oops
   if ((ValueArrayElemMaxFlatOops >= 0) && (nonstatic_oop_count() > ValueArrayElemMaxFlatOops)) {
     return false;
   }
 
+  // Declared atomic but not naturally atomic.
+  if (is_declared_atomic() && !is_naturally_atomic()) {
+    return false;
+  }
+
   return true;
 }
 
 void ValueKlass::remove_unshareable_info() {
   InstanceKlass::remove_unshareable_info();

@@ -251,11 +252,11 @@
   }
   return vak->array_klass(storage_props, rank, THREAD);
 }
 
 Klass* ValueKlass::allocate_value_array_klass(TRAPS) {
-  if (flatten_array() && (is_atomic() || (!ValueArrayAtomicAccess))) {
+  if (flatten_array() && (is_naturally_atomic() || (!ValueArrayAtomicAccess))) {
     return ValueArrayKlass::allocate_klass(ArrayStorageProperties::flattened_and_null_free, this, THREAD);
   }
   return ObjArrayKlass::allocate_objArray_klass(ArrayStorageProperties::null_free, 1, this, THREAD);
 }
 
< prev index next >