< prev index next >

test/hotspot/gtest/memory/test_metaspace_allocation.cpp

Print this page
rev 49833 : [mq]: 8202073-MetaspaceAllocationTest-gtest-shall-lock-during-space-creation


  87 
  88   virtual void TearDown() {
  89     for (int i = 0; i < NUM_PARALLEL_METASPACES; i ++) {
  90       if (_spaces[i].space != NULL) {
  91         delete _spaces[i].space;
  92         delete _spaces[i].lock;
  93       }
  94     }
  95   }
  96 
  97   void create_space(int i) {
  98     assert(i >= 0 && i < NUM_PARALLEL_METASPACES, "Sanity");
  99     assert(_spaces[i].space == NULL && _spaces[i].allocated == 0, "Sanity");
 100     if (_spaces[i].lock == NULL) {
 101       _spaces[i].lock = new Mutex(Monitor::native, "gtest-MetaspaceAllocationTest-lock", false, Monitor::_safepoint_check_never);
 102       ASSERT_TRUE(_spaces[i].lock != NULL);
 103     }
 104     // Let every ~10th space be an anonymous one to test different allocation patterns.
 105     const Metaspace::MetaspaceType msType = (os::random() % 100 < 10) ?
 106       Metaspace::AnonymousMetaspaceType : Metaspace::StandardMetaspaceType;




 107     _spaces[i].space = new ClassLoaderMetaspace(_spaces[i].lock, msType);

 108     _spaces[i].allocated = 0;
 109     ASSERT_TRUE(_spaces[i].space != NULL);
 110   }
 111 
 112   // Returns the index of a random space where index is [0..metaspaces) and which is
 113   //   empty, non-empty or full.
 114   // Returns -1 if no matching space exists.
 115   enum fillgrade { fg_empty, fg_non_empty, fg_full };
 116   int get_random_matching_space(int metaspaces, fillgrade fg) {
 117     const int start_index = os::random() % metaspaces;
 118     int i = start_index;
 119     do {
 120       if (fg == fg_empty && _spaces[i].is_empty()) {
 121         return i;
 122       } else if ((fg == fg_full && _spaces[i].is_full()) ||
 123                  (fg == fg_non_empty && !_spaces[i].is_full() && !_spaces[i].is_empty())) {
 124         return i;
 125       }
 126       i ++;
 127       if (i == metaspaces) {


 154         }
 155         if (index == -1) {
 156           // All spaces are full, switch to freeing.
 157           force_switch = true;
 158         } else {
 159           // create space if it does not yet exist.
 160           if (_spaces[index].space == NULL) {
 161             create_space(index);
 162           }
 163           // Allocate a bunch of blocks from it. Mostly small stuff but mix in large allocations
 164           //  to force humongous chunk allocations.
 165           int allocs_done = 0;
 166           while (allocs_done < allocs_per_phase && !_spaces[index].is_full()) {
 167             size_t size = 0;
 168             int r = os::random() % 1000;
 169             if ((float)r < probability_for_large_allocations * 1000.0) {
 170               size = (os::random() % _chunk_geometry.medium_chunk_word_size) + _chunk_geometry.medium_chunk_word_size;
 171             } else {
 172               size = os::random() % 64;
 173             }

 174             MetaWord* const p = _spaces[index].space->allocate(size, mdType);
 175             if (p == NULL) {
 176               // We very probably did hit the metaspace "until-gc" limit.
 177 #ifdef DEBUG_VERBOSE
 178               tty->print_cr("OOM for " SIZE_FORMAT " words. ", size);
 179 #endif
 180               // Just switch to deallocation and resume tests.
 181               force_switch = true;
 182               break;
 183             } else {
 184               _spaces[index].allocated += size;
 185               allocs_done ++;
 186             }
 187           }
 188         }
 189       } else {
 190         // freeing: find a metaspace and delete it, with preference for completely filled spaces.
 191         int index = get_random_full_space(metaspaces);
 192         if (index == -1) {
 193           index = get_random_non_emtpy_space(metaspaces);
 194         }
 195         if (index == -1) {
 196           force_switch = true;
 197         } else {
 198           assert(_spaces[index].space != NULL && _spaces[index].allocated > 0, "Sanity");

 199           delete _spaces[index].space;
 200           _spaces[index].space = NULL;
 201           _spaces[index].allocated = 0;
 202         }
 203       }
 204 
 205       if (force_switch) {
 206         allocating = !allocating;
 207       } else {
 208         // periodically switch between allocating and freeing, but prefer allocation because
 209         // we want to intermingle allocations of multiple metaspaces.
 210         allocating = os::random() % 5 < 4;
 211       }
 212       phases_done ++;
 213 #ifdef DEBUG_VERBOSE
 214       int metaspaces_in_use = 0;
 215       size_t total_allocated = 0;
 216       for (int i = 0; i < metaspaces; i ++) {
 217         if (_spaces[i].allocated > 0) {
 218           total_allocated += _spaces[i].allocated;




  87 
  88   virtual void TearDown() {
  89     for (int i = 0; i < NUM_PARALLEL_METASPACES; i ++) {
  90       if (_spaces[i].space != NULL) {
  91         delete _spaces[i].space;
  92         delete _spaces[i].lock;
  93       }
  94     }
  95   }
  96 
  97   void create_space(int i) {
  98     assert(i >= 0 && i < NUM_PARALLEL_METASPACES, "Sanity");
  99     assert(_spaces[i].space == NULL && _spaces[i].allocated == 0, "Sanity");
 100     if (_spaces[i].lock == NULL) {
 101       _spaces[i].lock = new Mutex(Monitor::native, "gtest-MetaspaceAllocationTest-lock", false, Monitor::_safepoint_check_never);
 102       ASSERT_TRUE(_spaces[i].lock != NULL);
 103     }
 104     // Let every ~10th space be an anonymous one to test different allocation patterns.
 105     const Metaspace::MetaspaceType msType = (os::random() % 100 < 10) ?
 106       Metaspace::AnonymousMetaspaceType : Metaspace::StandardMetaspaceType;
 107     {
 108       // Pull lock during space creation, since this is what happens in the VM too
 109       // (see ClassLoaderData::metaspace_non_null(), which we mimick here).
 110       MutexLockerEx ml(_spaces[i].lock,  Mutex::_no_safepoint_check_flag);
 111       _spaces[i].space = new ClassLoaderMetaspace(_spaces[i].lock, msType);
 112     }
 113     _spaces[i].allocated = 0;
 114     ASSERT_TRUE(_spaces[i].space != NULL);
 115   }
 116 
 117   // Returns the index of a random space where index is [0..metaspaces) and which is
 118   //   empty, non-empty or full.
 119   // Returns -1 if no matching space exists.
 120   enum fillgrade { fg_empty, fg_non_empty, fg_full };
 121   int get_random_matching_space(int metaspaces, fillgrade fg) {
 122     const int start_index = os::random() % metaspaces;
 123     int i = start_index;
 124     do {
 125       if (fg == fg_empty && _spaces[i].is_empty()) {
 126         return i;
 127       } else if ((fg == fg_full && _spaces[i].is_full()) ||
 128                  (fg == fg_non_empty && !_spaces[i].is_full() && !_spaces[i].is_empty())) {
 129         return i;
 130       }
 131       i ++;
 132       if (i == metaspaces) {


 159         }
 160         if (index == -1) {
 161           // All spaces are full, switch to freeing.
 162           force_switch = true;
 163         } else {
 164           // create space if it does not yet exist.
 165           if (_spaces[index].space == NULL) {
 166             create_space(index);
 167           }
 168           // Allocate a bunch of blocks from it. Mostly small stuff but mix in large allocations
 169           //  to force humongous chunk allocations.
 170           int allocs_done = 0;
 171           while (allocs_done < allocs_per_phase && !_spaces[index].is_full()) {
 172             size_t size = 0;
 173             int r = os::random() % 1000;
 174             if ((float)r < probability_for_large_allocations * 1000.0) {
 175               size = (os::random() % _chunk_geometry.medium_chunk_word_size) + _chunk_geometry.medium_chunk_word_size;
 176             } else {
 177               size = os::random() % 64;
 178             }
 179             // Note: In contrast to space creation, no need to lock here. ClassLoaderMetaspace::allocate() will lock itself.
 180             MetaWord* const p = _spaces[index].space->allocate(size, mdType);
 181             if (p == NULL) {
 182               // We very probably did hit the metaspace "until-gc" limit.
 183 #ifdef DEBUG_VERBOSE
 184               tty->print_cr("OOM for " SIZE_FORMAT " words. ", size);
 185 #endif
 186               // Just switch to deallocation and resume tests.
 187               force_switch = true;
 188               break;
 189             } else {
 190               _spaces[index].allocated += size;
 191               allocs_done ++;
 192             }
 193           }
 194         }
 195       } else {
 196         // freeing: find a metaspace and delete it, with preference for completely filled spaces.
 197         int index = get_random_full_space(metaspaces);
 198         if (index == -1) {
 199           index = get_random_non_emtpy_space(metaspaces);
 200         }
 201         if (index == -1) {
 202           force_switch = true;
 203         } else {
 204           assert(_spaces[index].space != NULL && _spaces[index].allocated > 0, "Sanity");
 205           // Note: do not lock here. In the "wild" (the VM), we do not so either (see ~ClassLoaderData()).
 206           delete _spaces[index].space;
 207           _spaces[index].space = NULL;
 208           _spaces[index].allocated = 0;
 209         }
 210       }
 211 
 212       if (force_switch) {
 213         allocating = !allocating;
 214       } else {
 215         // periodically switch between allocating and freeing, but prefer allocation because
 216         // we want to intermingle allocations of multiple metaspaces.
 217         allocating = os::random() % 5 < 4;
 218       }
 219       phases_done ++;
 220 #ifdef DEBUG_VERBOSE
 221       int metaspaces_in_use = 0;
 222       size_t total_allocated = 0;
 223       for (int i = 0; i < metaspaces; i ++) {
 224         if (_spaces[i].allocated > 0) {
 225           total_allocated += _spaces[i].allocated;


< prev index next >