< prev index next >

src/hotspot/share/memory/metaspaceShared.cpp

Print this page

        

@@ -661,10 +661,37 @@
       }
     }
   }
 };
 
+// Global object for holding symbols that created during class loading. See SymbolTable::new_symbol
+static GrowableArray<Symbol*>* _global_symbol_objects = NULL;
+
+static int compare_symbols_by_address(Symbol** a, Symbol** b) {
+  if (a[0] < b[0]) {
+    return -1;
+  } else if (a[0] == b[0]) {
+    ResourceMark rm;
+    log_warning(cds)("Duplicated symbol %s unexpected", (*a)->as_C_string());
+    return 0;
+  } else {
+    return 1;
+  }
+}
+
+void MetaspaceShared::add_symbol(Symbol* sym) {
+  MutexLocker ml(CDSAddSymbol_lock, Mutex::_no_safepoint_check_flag);
+  if (_global_symbol_objects == NULL) {
+    _global_symbol_objects = new (ResourceObj::C_HEAP, mtSymbol) GrowableArray<Symbol*>(2048, mtSymbol);
+  }
+  _global_symbol_objects->append(sym);
+}
+
+GrowableArray<Symbol*>* MetaspaceShared::collected_symbols() {
+  return _global_symbol_objects;
+}
+
 static void remove_unshareable_in_classes() {
   for (int i = 0; i < _global_klass_objects->length(); i++) {
     Klass* k = _global_klass_objects->at(i);
     if (!k->is_objArray_klass()) {
       // InstanceKlass and TypeArrayKlass will in turn call remove_unshareable_info

@@ -1236,38 +1263,10 @@
   VMOp_Type type() const { return VMOp_PopulateDumpSharedSpace; }
   void doit();   // outline because gdb sucks
   bool allow_nested_vm_operations() const { return true; }
 }; // class VM_PopulateDumpSharedSpace
 
-class SortedSymbolClosure: public SymbolClosure {
-  GrowableArray<Symbol*> _symbols;
-  virtual void do_symbol(Symbol** sym) {
-    assert((*sym)->is_permanent(), "archived symbols must be permanent");
-    _symbols.append(*sym);
-  }
-  static int compare_symbols_by_address(Symbol** a, Symbol** b) {
-    if (a[0] < b[0]) {
-      return -1;
-    } else if (a[0] == b[0]) {
-      ResourceMark rm;
-      log_warning(cds)("Duplicated symbol %s unexpected", (*a)->as_C_string());
-      return 0;
-    } else {
-      return 1;
-    }
-  }
-
-public:
-  SortedSymbolClosure() {
-    SymbolTable::symbols_do(this);
-    _symbols.sort(compare_symbols_by_address);
-  }
-  GrowableArray<Symbol*>* get_sorted_symbols() {
-    return &_symbols;
-  }
-};
-
 // ArchiveCompactor --
 //
 // This class is the central piece of shared archive compaction -- all metaspace data are
 // initially allocated outside of the shared regions. ArchiveCompactor copies the
 // metaspace data into their final location in the shared regions.

@@ -1275,11 +1274,10 @@
 class ArchiveCompactor : AllStatic {
   static const int INITIAL_TABLE_SIZE = 8087;
   static const int MAX_TABLE_SIZE     = 1000000;
 
   static DumpAllocStats* _alloc_stats;
-  static SortedSymbolClosure* _ssc;
 
   typedef KVHashtable<address, address, mtInternal> RelocationTable;
   static RelocationTable* _new_loc_table;
 
 public:

@@ -1419,12 +1417,10 @@
 #endif
 
 public:
   static void copy_and_compact() {
     ResourceMark rm;
-    SortedSymbolClosure the_ssc; // StackObj
-    _ssc = &the_ssc;
 
     log_info(cds)("Scanning all metaspace objects ... ");
     {
       // allocate and shallow-copy RW objects, immediately following the MC region
       log_info(cds)("Allocating RW objects ... ");

@@ -1456,27 +1452,24 @@
       iterate_roots(&ext_reloc);
     }
     {
       log_info(cds)("Fixing symbol identity hash ... ");
       os::init_random(0x12345678);
-      GrowableArray<Symbol*>* symbols = _ssc->get_sorted_symbols();
-      for (int i=0; i<symbols->length(); i++) {
-        symbols->at(i)->update_identity_hash();
+      GrowableArray<Symbol*>* all_symbols = MetaspaceShared::collected_symbols();
+      all_symbols->sort(compare_symbols_by_address);
+      for (int i = 0; i < all_symbols->length(); i++) {
+        all_symbols->at(i)->update_identity_hash();
       }
     }
 #ifdef ASSERT
     {
       log_info(cds)("Verifying external roots ... ");
       ResourceMark rm;
       IsRefInArchiveChecker checker;
       iterate_roots(&checker);
     }
 #endif
-
-
-    // cleanup
-    _ssc = NULL;
   }
 
   // We must relocate the System::_well_known_klasses only after we have copied the
   // java objects in during dump_java_heap_objects(): during the object copy, we operate on
   // old objects which assert that their klass is the original klass.

@@ -1508,12 +1501,12 @@
     // the SharedClassListFile (class loading is done in a single thread and the JIT
     // is disabled). Also, Symbols are allocated in monotonically increasing addresses
     // (see Symbol::operator new(size_t, int)). So if we iterate the Symbols by
     // ascending address order, we ensure that all Symbols are copied into deterministic
     // locations in the archive.
-    GrowableArray<Symbol*>* symbols = _ssc->get_sorted_symbols();
-    for (int i=0; i<symbols->length(); i++) {
+    GrowableArray<Symbol*>* symbols = _global_symbol_objects;
+    for (int i = 0; i < symbols->length(); i++) {
       it->push(symbols->adr_at(i));
     }
     if (_global_klass_objects != NULL) {
       // Need to fix up the pointers
       for (int i = 0; i < _global_klass_objects->length(); i++) {

@@ -1539,11 +1532,10 @@
     return klass;
   }
 };
 
 DumpAllocStats* ArchiveCompactor::_alloc_stats;
-SortedSymbolClosure* ArchiveCompactor::_ssc;
 ArchiveCompactor::RelocationTable* ArchiveCompactor::_new_loc_table;
 
 void VM_PopulateDumpSharedSpace::dump_symbols() {
   log_info(cds)("Dumping symbol table ...");
 
< prev index next >