1 /*
   2  * Copyright (c) 2018, 2020, Oracle and/or its affiliates. All rights reserved.
   3  * Copyright (c) 2018, 2020 SAP SE. All rights reserved.
   4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   5  *
   6  * This code is free software; you can redistribute it and/or modify it
   7  * under the terms of the GNU General Public License version 2 only, as
   8  * published by the Free Software Foundation.
   9  *
  10  * This code is distributed in the hope that it will be useful, but WITHOUT
  11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  13  * version 2 for more details (a copy is included in the LICENSE file that
  14  * accompanied this code).
  15  *
  16  * You should have received a copy of the GNU General Public License version
  17  * 2 along with this work; if not, write to the Free Software Foundation,
  18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  *
  24  */
  25 
  26 #include "precompiled.hpp"
  27 
  28 //#define LOG_PLEASE
  29 #include "metaspace/metaspaceTestsCommon.hpp"
  30 #include "metaspace/metaspaceTestContexts.hpp"
  31 #include "metaspace/metaspace_sparsearray.hpp"
  32 
  33 // Little randomness helper
  34 static bool fifty_fifty() {
  35   return IntRange(100).random_value() < 50;
  36 }
  37 
  38 // See metaspaceArena.cpp : needed for predicting commit sizes.
  39 namespace metaspace {
  40   extern size_t get_raw_word_size_for_requested_word_size(size_t net_word_size);
  41 }
  42 
  43 // A MetaspaceArenaTestBed contains a single MetaspaceArena and its lock.
  44 // It keeps track of allocations done from this MetaspaceArena.
  45 class MetaspaceArenaTestBed : public CHeapObj<mtInternal> {
  46 
  47   MetaspaceArena* _arena;
  48 
  49   Mutex* _lock;
  50 
  51   const SizeRange _allocation_range;
  52   size_t _size_of_last_failed_allocation;
  53 
  54   // We keep track of all allocations done thru the MetaspaceArena to
  55   // later check for overwriters.
  56   struct allocation_t {
  57     allocation_t* next;
  58     MetaWord* p; // NULL if deallocated
  59     size_t word_size;
  60     void mark() {
  61       mark_range(p, word_size);
  62     }
  63     void verify() const {
  64       if (p != NULL) {
  65         check_marked_range(p, word_size);
  66       }
  67     }
  68   };
  69 
  70   allocation_t* _allocations;
  71 
  72   // We count how much we did allocate and deallocate
  73   MemRangeCounter _alloc_count;
  74   MemRangeCounter _dealloc_count;
  75 
  76   // Check statistics returned by MetaspaceArena::add_to_statistics() against what
  77   // we know we allocated. This is a bit flaky since MetaspaceArena has internal
  78   // overhead.
  79   void verify_arena_statistics() const {
  80 
  81     arena_stats_t stats;
  82     _arena->add_to_statistics(&stats);
  83     in_use_chunk_stats_t in_use_stats = stats.totals();
  84 
  85     assert(_dealloc_count.total_size() <= _alloc_count.total_size() &&
  86            _dealloc_count.count() <= _alloc_count.count(), "Sanity");
  87 
  88     // Check consistency of stats
  89     ASSERT_GE(in_use_stats.word_size, in_use_stats.committed_words);
  90     ASSERT_EQ(in_use_stats.committed_words,
  91               in_use_stats.used_words + in_use_stats.free_words + in_use_stats.waste_words);
  92     ASSERT_GE(in_use_stats.used_words, stats.free_blocks_word_size);
  93 
  94     // Note: reasons why the outside alloc counter and the inside used counter can differ:
  95     // - alignment/padding of allocations
  96     // - inside used counter contains blocks in free list
  97     // - free block list splinter threshold
  98 
  99     // Since what we deallocated may have been given back to us in a following allocation,
 100     // we only know fore sure we allocated what we did not give back.
 101     const size_t at_least_allocated = _alloc_count.total_size() - _dealloc_count.total_size();
 102 
 103     // At most we allocated this:
 104     const size_t max_word_overhead_per_alloc = 4;
 105     const size_t at_most_allocated = _alloc_count.total_size() + max_word_overhead_per_alloc * _alloc_count.count();
 106 
 107     ASSERT_LE(at_least_allocated, in_use_stats.used_words - stats.free_blocks_word_size);
 108     ASSERT_GE(at_most_allocated, in_use_stats.used_words - stats.free_blocks_word_size);
 109 
 110   }
 111 
 112 public:
 113 
 114   MetaspaceArena* arena() { return _arena; }
 115 
 116   MetaspaceArenaTestBed(ChunkManager* cm, const ArenaGrowthPolicy* alloc_sequence,
 117                       SizeAtomicCounter* used_words_counter, SizeRange allocation_range)
 118     : _arena(NULL),
 119       _lock(NULL),
 120       _allocation_range(allocation_range),
 121       _size_of_last_failed_allocation(0),
 122       _allocations(NULL),
 123       _alloc_count(), _dealloc_count()
 124   {
 125     _lock = new Mutex(Monitor::native, "gtest-MetaspaceArenaTestBed-lock", false, Monitor::_safepoint_check_never);
 126     // Lock during space creation, since this is what happens in the VM too
 127     //  (see ClassLoaderData::metaspace_non_null(), which we mimick here).
 128     MutexLocker ml(_lock,  Mutex::_no_safepoint_check_flag);
 129     _arena = new MetaspaceArena(cm, alloc_sequence, _lock, used_words_counter, "gtest-MetaspaceArenaTestBed-sm");
 130   }
 131 
 132   ~MetaspaceArenaTestBed() {
 133 
 134     verify_arena_statistics();
 135 
 136     allocation_t* a = _allocations;
 137     while (a != NULL) {
 138       allocation_t* b = a->next;
 139       a->verify();
 140       FREE_C_HEAP_OBJ(a);
 141       a = b;
 142     }
 143 
 144     DEBUG_ONLY(_arena->verify(true);)
 145 
 146     // Delete MetaspaceArena. That should clean up all metaspace.
 147     delete _arena;
 148     delete _lock;
 149 
 150   }
 151 
 152   size_t words_allocated() const        { return _alloc_count.total_size(); }
 153   int num_allocations() const           { return _alloc_count.count(); }
 154 
 155   size_t size_of_last_failed_allocation() const { return _size_of_last_failed_allocation; }
 156 
 157   // Allocate a random amount. Return false if the allocation failed.
 158   bool checked_random_allocate() {
 159     size_t word_size = 1 + _allocation_range.random_value();
 160     MetaWord* p = _arena->allocate(word_size);
 161     if (p != NULL) {
 162       EXPECT_TRUE(is_aligned(p, sizeof(MetaWord)));
 163       allocation_t* a = NEW_C_HEAP_OBJ(allocation_t, mtInternal);
 164       a->word_size = word_size;
 165       a->p = p;
 166       a->mark();
 167       a->next = _allocations;
 168       _allocations = a;
 169       _alloc_count.add(word_size);
 170       if ((_alloc_count.count() % 20) == 0) {
 171         verify_arena_statistics();
 172         DEBUG_ONLY(_arena->verify(true);)
 173       }
 174       return true;
 175     } else {
 176       _size_of_last_failed_allocation = word_size;
 177     }
 178     return false;
 179   }
 180 
 181   // Deallocate a random allocation
 182   void checked_random_deallocate() {
 183     allocation_t* a = _allocations;
 184     while (a && a->p != NULL && os::random() % 10 != 0) {
 185       a = a->next;
 186     }
 187     if (a != NULL && a->p != NULL) {
 188       a->verify();
 189       _arena->deallocate(a->p, a->word_size);
 190       _dealloc_count.add(a->word_size);
 191       a->p = NULL; a->word_size = 0;
 192       if ((_dealloc_count.count() % 20) == 0) {
 193         verify_arena_statistics();
 194         DEBUG_ONLY(_arena->verify(true);)
 195       }
 196     }
 197   }
 198 
 199 }; // End: MetaspaceArenaTestBed
 200 
 201 
 202 class MetaspaceArenaTest {
 203 
 204   MetaspaceTestContext _helper;
 205 
 206   SizeAtomicCounter _used_words_counter;
 207 
 208   SparseArray<MetaspaceArenaTestBed*> _testbeds;
 209   IntCounter _num_beds;
 210 
 211   //////// Bed creation, destruction ///////
 212 
 213   void create_new_test_bed_at(int slotindex, const ArenaGrowthPolicy* growth_policy, SizeRange allocation_range) {
 214     DEBUG_ONLY(_testbeds.check_slot_is_null(slotindex));
 215     MetaspaceArenaTestBed* bed = new MetaspaceArenaTestBed(&_helper.cm(), growth_policy,
 216                                                        &_used_words_counter, allocation_range);
 217     _testbeds.set_at(slotindex, bed);
 218     _num_beds.increment();
 219   }
 220 
 221   void create_random_test_bed_at(int slotindex) {
 222     SizeRange allocation_range(1, 100); // randomize too?
 223     const ArenaGrowthPolicy* growth_policy = ArenaGrowthPolicy::policy_for_space_type(
 224         (fifty_fifty() ? Metaspace::StandardMetaspaceType : Metaspace::ReflectionMetaspaceType),
 225          fifty_fifty());
 226     create_new_test_bed_at(slotindex, growth_policy, allocation_range);
 227    }
 228 
 229   // Randomly create a random test bed at a random slot, and return its slot index
 230   // (returns false if we reached max number of test beds)
 231   bool create_random_test_bed() {
 232     const int slot = _testbeds.random_null_slot_index();
 233     if (slot != -1) {
 234       create_random_test_bed_at(slot);
 235     }
 236     return slot;
 237   }
 238 
 239   // Create test beds for all slots
 240   void create_all_test_beds() {
 241     for (int slot = 0; slot < _testbeds.size(); slot ++) {
 242       if (_testbeds.slot_is_null(slot)) {
 243         create_random_test_bed_at(slot);
 244       }
 245     }
 246   }
 247 
 248   void delete_test_bed_at(int slotindex) {
 249     DEBUG_ONLY(_testbeds.check_slot_is_not_null(slotindex));
 250     MetaspaceArenaTestBed* bed = _testbeds.at(slotindex);
 251     delete bed; // This will return all its memory to the chunk manager
 252     _testbeds.set_at(slotindex, NULL);
 253     _num_beds.decrement();
 254   }
 255 
 256   // Randomly delete a random test bed at a random slot
 257   // Return false if there are no test beds to delete.
 258   bool delete_random_test_bed() {
 259     const int slotindex = _testbeds.random_non_null_slot_index();
 260     if (slotindex != -1) {
 261       delete_test_bed_at(slotindex);
 262       return true;
 263     }
 264     return false;
 265   }
 266 
 267   // Delete all test beds.
 268   void delete_all_test_beds() {
 269     for (int slot = _testbeds.first_non_null_slot(); slot != -1; slot = _testbeds.next_non_null_slot(slot)) {
 270       delete_test_bed_at(slot);
 271     }
 272   }
 273 
 274   //////// Allocating metaspace from test beds ///////
 275 
 276   bool random_allocate_from_testbed(int slotindex) {
 277     DEBUG_ONLY(_testbeds.check_slot_is_not_null(slotindex);)
 278     MetaspaceArenaTestBed* bed = _testbeds.at(slotindex);
 279     bool success = bed->checked_random_allocate();
 280     if (success == false) {
 281       // We must have hit a limit.
 282       EXPECT_LT(_helper.commit_limiter().possible_expansion_words(),
 283                 metaspace::get_raw_word_size_for_requested_word_size(bed->size_of_last_failed_allocation()));
 284     }
 285     return success;
 286   }
 287 
 288   // Allocate multiple times random sizes from a single MetaspaceArena.
 289   bool random_allocate_multiple_times_from_testbed(int slotindex, int num_allocations) {
 290     bool success = true;
 291     int n = 0;
 292     while (success && n < num_allocations) {
 293       success = random_allocate_from_testbed(slotindex);
 294       n ++;
 295     }
 296     return success;
 297   }
 298 
 299   // Allocate multiple times random sizes from a single random MetaspaceArena.
 300   bool random_allocate_random_times_from_random_testbed() {
 301     int slot = _testbeds.random_non_null_slot_index();
 302     bool success = false;
 303     if (slot != -1) {
 304       const int n = IntRange(5, 20).random_value();
 305       success = random_allocate_multiple_times_from_testbed(slot, n);
 306     }
 307     return success;
 308   }
 309 
 310   /////// Deallocating from testbed ///////////////////
 311 
 312   void deallocate_from_testbed(int slotindex) {
 313     DEBUG_ONLY(_testbeds.check_slot_is_not_null(slotindex);)
 314     MetaspaceArenaTestBed* bed = _testbeds.at(slotindex);
 315     bed->checked_random_deallocate();
 316   }
 317 
 318   void deallocate_from_random_testbed() {
 319     int slot = _testbeds.random_non_null_slot_index();
 320     if (slot != -1) {
 321       deallocate_from_testbed(slot);
 322     }
 323   }
 324 
 325   /////// Stats ///////////////////////////////////////
 326 
 327   int get_total_number_of_allocations() const {
 328     int sum = 0;
 329     for (int i = _testbeds.first_non_null_slot(); i != -1; i = _testbeds.next_non_null_slot(i)) {
 330       sum += _testbeds.at(i)->num_allocations();
 331     }
 332     return sum;
 333   }
 334 
 335   size_t get_total_words_allocated() const {
 336     size_t sum = 0;
 337     for (int i = _testbeds.first_non_null_slot(); i != -1; i = _testbeds.next_non_null_slot(i)) {
 338       sum += _testbeds.at(i)->words_allocated();
 339     }
 340     return sum;
 341   }
 342 
 343 public:
 344 
 345   MetaspaceArenaTest(size_t commit_limit, int num_testbeds)
 346     : _helper(commit_limit),
 347       _testbeds(num_testbeds),
 348       _num_beds()
 349   {}
 350 
 351   ~MetaspaceArenaTest () {
 352 
 353     delete_all_test_beds();
 354 
 355   }
 356 
 357 
 358   //////////////// Tests ////////////////////////
 359 
 360   void test() {
 361 
 362     // In a big loop, randomly chose one of these actions
 363     // - creating a test bed (simulates a new loader creation)
 364     // - allocating from a test bed (simulates allocating metaspace for a loader)
 365     // - (rarely) deallocate (simulates metaspace deallocation, e.g. class redefinitions)
 366     // - delete a test bed (simulates collection of a loader and subsequent return of metaspace to freelists)
 367 
 368     const int iterations = 10000;
 369 
 370     // Lets have a ceiling on number of words allocated (this is independent from the commit limit)
 371     const size_t max_allocation_size = 8 * M;
 372 
 373     bool force_bed_deletion = false;
 374 
 375     for (int niter = 0; niter < iterations; niter ++) {
 376 
 377       const int r = IntRange(100).random_value();
 378 
 379       if (force_bed_deletion || r < 10) {
 380 
 381         force_bed_deletion = false;
 382         delete_random_test_bed();
 383 
 384       } else if (r < 20 || _num_beds.get() < (unsigned)_testbeds.size() / 2) {
 385 
 386         create_random_test_bed();
 387 
 388       } else if (r < 95) {
 389 
 390         // If allocation fails, we hit the commit limit and should delete some beds first
 391         force_bed_deletion = ! random_allocate_random_times_from_random_testbed();
 392 
 393       } else {
 394 
 395         // Note: does not affect the used words counter.
 396         deallocate_from_random_testbed();
 397 
 398       }
 399 
 400       // If we are close to our quota, start bed deletion
 401       if (_used_words_counter.get() >= max_allocation_size) {
 402 
 403         force_bed_deletion = true;
 404 
 405       }
 406 
 407     }
 408 
 409   }
 410 
 411 
 412 };
 413 
 414 
 415 // 32 parallel MetaspaceArena objects, random allocating without commit limit
 416 TEST_VM(metaspace, MetaspaceArena_random_allocs_32_beds_no_commit_limit) {
 417   MetaspaceArenaTest test(max_uintx, 32);
 418   test.test();
 419 }
 420 
 421 // 32 parallel Metaspace arena objects, random allocating with commit limit
 422 TEST_VM(metaspace, MetaspaceArena_random_allocs_32_beds_with_commit_limit) {
 423   MetaspaceArenaTest test(2 * M, 32);
 424   test.test();
 425 }
 426 
 427 // A single MetaspaceArena, random allocating without commit limit. This should exercise
 428 //  chunk enlargement since allocation is undisturbed.
 429 TEST_VM(metaspace, MetaspaceArena_random_allocs_1_bed_no_commit_limit) {
 430   MetaspaceArenaTest test(max_uintx, 1);
 431   test.test();
 432 }
 433 
 434 
 435 
 436 
 437