1 /*
   2  * Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * Copyright (c) 2018, SAP.
   4  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   5  *
   6  * This code is free software; you can redistribute it and/or modify it
   7  * under the terms of the GNU General Public License version 2 only, as
   8  * published by the Free Software Foundation.
   9  *
  10  * This code is distributed in the hope that it will be useful, but WITHOUT
  11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  13  * version 2 for more details (a copy is included in the LICENSE file that
  14  * accompanied this code).
  15  *
  16  * You should have received a copy of the GNU General Public License version
  17  * 2 along with this work; if not, write to the Free Software Foundation,
  18  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  19  *
  20  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  21  * or visit www.oracle.com if you need additional information or have any
  22  * questions.
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "memory/allocation.inline.hpp"
  27 #include "memory/metaspace.hpp"
  28 #include "runtime/mutex.hpp"
  29 #include "runtime/mutexLocker.hpp"
  30 #include "runtime/os.hpp"
  31 #include "utilities/align.hpp"
  32 #include "utilities/debug.hpp"
  33 #include "utilities/globalDefinitions.hpp"
  34 #include "utilities/ostream.hpp"
  35 #include "unittest.hpp"
  36 
  37 #if 0
  38 
  39 #define NUM_PARALLEL_METASPACES                 50
  40 #define MAX_PER_METASPACE_ALLOCATION_WORDSIZE   (512 * K)
  41 
  42 //#define DEBUG_VERBOSE true
  43 
  44 #ifdef DEBUG_VERBOSE
  45 
  46 struct chunkmanager_statistics_t {
  47   int num_specialized_chunks;
  48   int num_small_chunks;
  49   int num_medium_chunks;
  50   int num_humongous_chunks;
  51 };
  52 
  53 extern void test_metaspace_retrieve_chunkmanager_statistics(Metaspace::MetadataType mdType, chunkmanager_statistics_t* out);
  54 
  55 static void print_chunkmanager_statistics(outputStream* st, Metaspace::MetadataType mdType) {
  56   chunkmanager_statistics_t stat;
  57   test_metaspace_retrieve_chunkmanager_statistics(mdType, &stat);
  58   st->print_cr("free chunks: %d / %d / %d / %d", stat.num_specialized_chunks, stat.num_small_chunks,
  59                stat.num_medium_chunks, stat.num_humongous_chunks);
  60 }
  61 
  62 #endif
  63 
  64 struct chunk_geometry_t {
  65   size_t specialized_chunk_word_size;
  66   size_t small_chunk_word_size;
  67   size_t medium_chunk_word_size;
  68 };
  69 
  70 extern void test_metaspace_retrieve_chunk_geometry(Metaspace::MetadataType mdType, chunk_geometry_t* out);
  71 
  72 
  73 class MetaspaceAllocationTest : public ::testing::Test {
  74 protected:
  75 
  76   struct {
  77     size_t allocated;
  78     Mutex* lock;
  79     ClassLoaderMetaspace* space;
  80     bool is_empty() const { return allocated == 0; }
  81     bool is_full() const { return allocated >= MAX_PER_METASPACE_ALLOCATION_WORDSIZE; }
  82   } _spaces[NUM_PARALLEL_METASPACES];
  83 
  84   chunk_geometry_t _chunk_geometry;
  85 
  86   virtual void SetUp() {
  87     ::memset(_spaces, 0, sizeof(_spaces));
  88     test_metaspace_retrieve_chunk_geometry(Metaspace::NonClassType, &_chunk_geometry);
  89   }
  90 
  91   virtual void TearDown() {
  92     for (int i = 0; i < NUM_PARALLEL_METASPACES; i ++) {
  93       if (_spaces[i].space != NULL) {
  94         delete _spaces[i].space;
  95         delete _spaces[i].lock;
  96       }
  97     }
  98   }
  99 
 100   void create_space(int i) {
 101     assert(i >= 0 && i < NUM_PARALLEL_METASPACES, "Sanity");
 102     assert(_spaces[i].space == NULL && _spaces[i].allocated == 0, "Sanity");
 103     if (_spaces[i].lock == NULL) {
 104       _spaces[i].lock = new Mutex(Monitor::native, "gtest-MetaspaceAllocationTest-lock", false, Monitor::_safepoint_check_never);
 105       ASSERT_TRUE(_spaces[i].lock != NULL);
 106     }
 107     // Let every ~10th space be an unsafe anonymous one to test different allocation patterns.
 108     const Metaspace::MetaspaceType msType = (os::random() % 100 < 10) ?
 109       Metaspace::UnsafeAnonymousMetaspaceType : Metaspace::StandardMetaspaceType;
 110     {
 111       // Pull lock during space creation, since this is what happens in the VM too
 112       // (see ClassLoaderData::metaspace_non_null(), which we mimick here).
 113       MutexLocker ml(_spaces[i].lock,  Mutex::_no_safepoint_check_flag);
 114       _spaces[i].space = new ClassLoaderMetaspace(_spaces[i].lock, msType);
 115     }
 116     _spaces[i].allocated = 0;
 117     ASSERT_TRUE(_spaces[i].space != NULL);
 118   }
 119 
 120   // Returns the index of a random space where index is [0..metaspaces) and which is
 121   //   empty, non-empty or full.
 122   // Returns -1 if no matching space exists.
 123   enum fillgrade { fg_empty, fg_non_empty, fg_full };
 124   int get_random_matching_space(int metaspaces, fillgrade fg) {
 125     const int start_index = os::random() % metaspaces;
 126     int i = start_index;
 127     do {
 128       if (fg == fg_empty && _spaces[i].is_empty()) {
 129         return i;
 130       } else if ((fg == fg_full && _spaces[i].is_full()) ||
 131                  (fg == fg_non_empty && !_spaces[i].is_full() && !_spaces[i].is_empty())) {
 132         return i;
 133       }
 134       i ++;
 135       if (i == metaspaces) {
 136         i = 0;
 137       }
 138     } while (i != start_index);
 139     return -1;
 140   }
 141 
 142   int get_random_emtpy_space(int metaspaces) { return get_random_matching_space(metaspaces, fg_empty); }
 143   int get_random_non_emtpy_space(int metaspaces) { return get_random_matching_space(metaspaces, fg_non_empty); }
 144   int get_random_full_space(int metaspaces) { return get_random_matching_space(metaspaces, fg_full); }
 145 
 146   void do_test(Metaspace::MetadataType mdType, int metaspaces, int phases, int allocs_per_phase,
 147                float probability_for_large_allocations // 0.0-1.0
 148   ) {
 149     // Alternate between breathing in (allocating n blocks for a random Metaspace) and
 150     // breathing out (deleting a random Metaspace). The intent is to stress the coalescation
 151     // and splitting of free chunks.
 152     int phases_done = 0;
 153     bool allocating = true;
 154     while (phases_done < phases) {
 155       bool force_switch = false;
 156       if (allocating) {
 157         // Allocate space from metaspace, with a preference for completely empty spaces. This
 158         // should provide a good mixture of metaspaces in the virtual space.
 159         int index = get_random_emtpy_space(metaspaces);
 160         if (index == -1) {
 161           index = get_random_non_emtpy_space(metaspaces);
 162         }
 163         if (index == -1) {
 164           // All spaces are full, switch to freeing.
 165           force_switch = true;
 166         } else {
 167           // create space if it does not yet exist.
 168           if (_spaces[index].space == NULL) {
 169             create_space(index);
 170           }
 171           // Allocate a bunch of blocks from it. Mostly small stuff but mix in large allocations
 172           //  to force humongous chunk allocations.
 173           int allocs_done = 0;
 174           while (allocs_done < allocs_per_phase && !_spaces[index].is_full()) {
 175             size_t size = 0;
 176             int r = os::random() % 1000;
 177             if ((float)r < probability_for_large_allocations * 1000.0) {
 178               size = (os::random() % _chunk_geometry.medium_chunk_word_size) + _chunk_geometry.medium_chunk_word_size;
 179             } else {
 180               size = os::random() % 64;
 181             }
 182             // Note: In contrast to space creation, no need to lock here. ClassLoaderMetaspace::allocate() will lock itself.
 183             MetaWord* const p = _spaces[index].space->allocate(size, mdType);
 184             if (p == NULL) {
 185               // We very probably did hit the metaspace "until-gc" limit.
 186 #ifdef DEBUG_VERBOSE
 187               tty->print_cr("OOM for " SIZE_FORMAT " words. ", size);
 188 #endif
 189               // Just switch to deallocation and resume tests.
 190               force_switch = true;
 191               break;
 192             } else {
 193               _spaces[index].allocated += size;
 194               allocs_done ++;
 195             }
 196           }
 197         }
 198       } else {
 199         // freeing: find a metaspace and delete it, with preference for completely filled spaces.
 200         int index = get_random_full_space(metaspaces);
 201         if (index == -1) {
 202           index = get_random_non_emtpy_space(metaspaces);
 203         }
 204         if (index == -1) {
 205           force_switch = true;
 206         } else {
 207           assert(_spaces[index].space != NULL && _spaces[index].allocated > 0, "Sanity");
 208           // Note: do not lock here. In the "wild" (the VM), we do not so either (see ~ClassLoaderData()).
 209           delete _spaces[index].space;
 210           _spaces[index].space = NULL;
 211           _spaces[index].allocated = 0;
 212         }
 213       }
 214 
 215       if (force_switch) {
 216         allocating = !allocating;
 217       } else {
 218         // periodically switch between allocating and freeing, but prefer allocation because
 219         // we want to intermingle allocations of multiple metaspaces.
 220         allocating = os::random() % 5 < 4;
 221       }
 222       phases_done ++;
 223 #ifdef DEBUG_VERBOSE
 224       int metaspaces_in_use = 0;
 225       size_t total_allocated = 0;
 226       for (int i = 0; i < metaspaces; i ++) {
 227         if (_spaces[i].allocated > 0) {
 228           total_allocated += _spaces[i].allocated;
 229           metaspaces_in_use ++;
 230         }
 231       }
 232       tty->print("%u:\tspaces: %d total words: " SIZE_FORMAT "\t\t\t", phases_done, metaspaces_in_use, total_allocated);
 233       print_chunkmanager_statistics(tty, mdType);
 234 #endif
 235     }
 236 #ifdef DEBUG_VERBOSE
 237     tty->print_cr("Test finished. ");
 238     MetaspaceUtils::print_metaspace_map(tty, mdType);
 239     print_chunkmanager_statistics(tty, mdType);
 240 #endif
 241   }
 242 };
 243 
 244 
 245 
 246 TEST_F(MetaspaceAllocationTest, chunk_geometry) {
 247   ASSERT_GT(_chunk_geometry.specialized_chunk_word_size, (size_t) 0);
 248   ASSERT_GT(_chunk_geometry.small_chunk_word_size, _chunk_geometry.specialized_chunk_word_size);
 249   ASSERT_EQ(_chunk_geometry.small_chunk_word_size % _chunk_geometry.specialized_chunk_word_size, (size_t)0);
 250   ASSERT_GT(_chunk_geometry.medium_chunk_word_size, _chunk_geometry.small_chunk_word_size);
 251   ASSERT_EQ(_chunk_geometry.medium_chunk_word_size % _chunk_geometry.small_chunk_word_size, (size_t)0);
 252 }
 253 
 254 
 255 TEST_VM_F(MetaspaceAllocationTest, single_space_nonclass) {
 256   do_test(Metaspace::NonClassType, 1, 1000, 100, 0);
 257 }
 258 
 259 TEST_VM_F(MetaspaceAllocationTest, single_space_class) {
 260   do_test(Metaspace::ClassType, 1, 1000, 100, 0);
 261 }
 262 
 263 TEST_VM_F(MetaspaceAllocationTest, multi_space_nonclass) {
 264   do_test(Metaspace::NonClassType, NUM_PARALLEL_METASPACES, 100, 1000, 0.0);
 265 }
 266 
 267 TEST_VM_F(MetaspaceAllocationTest, multi_space_class) {
 268   do_test(Metaspace::ClassType, NUM_PARALLEL_METASPACES, 100, 1000, 0.0);
 269 }
 270 
 271 TEST_VM_F(MetaspaceAllocationTest, multi_space_nonclass_2) {
 272   // many metaspaces, with humongous chunks mixed in.
 273   do_test(Metaspace::NonClassType, NUM_PARALLEL_METASPACES, 100, 1000, .006f);
 274 }
 275 
 276 #endif