1 /*
   2  * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "memory/allocation.hpp"
  27 #include "memory/allocation.inline.hpp"
  28 #include "memory/arena.hpp"
  29 #include "memory/metaspaceShared.hpp"
  30 #include "memory/resourceArea.hpp"
  31 #include "memory/universe.hpp"
  32 #include "runtime/atomic.hpp"
  33 #include "runtime/os.hpp"
  34 #include "runtime/task.hpp"
  35 #include "runtime/threadCritical.hpp"
  36 #include "services/memTracker.hpp"
  37 #include "utilities/ostream.hpp"
  38 
  39 // allocate using malloc; will fail if no memory available
  40 char* AllocateHeap(size_t size,
  41                    MEMFLAGS flags,
  42                    const NativeCallStack& stack,
  43                    AllocFailType alloc_failmode /* = AllocFailStrategy::EXIT_OOM*/) {
  44   char* p = (char*) os::malloc(size, flags, stack);
  45   if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) {
  46     vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "AllocateHeap");
  47   }
  48   return p;
  49 }
  50 
  51 char* AllocateHeap(size_t size,
  52                    MEMFLAGS flags,
  53                    AllocFailType alloc_failmode /* = AllocFailStrategy::EXIT_OOM*/) {
  54   return AllocateHeap(size, flags, CALLER_PC);
  55 }
  56 
  57 char* ReallocateHeap(char *old,
  58                      size_t size,
  59                      MEMFLAGS flag,
  60                      AllocFailType alloc_failmode) {
  61   char* p = (char*) os::realloc(old, size, flag, CALLER_PC);
  62   if (p == NULL && alloc_failmode == AllocFailStrategy::EXIT_OOM) {
  63     vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "ReallocateHeap");
  64   }
  65   return p;
  66 }
  67 
  68 void FreeHeap(void* p) {
  69   os::free(p);
  70 }
  71 
  72 void* MetaspaceObj::_shared_metaspace_base = NULL;
  73 void* MetaspaceObj::_shared_metaspace_top  = NULL;
  74 
  75 void* StackObj::operator new(size_t size)     throw() { ShouldNotCallThis(); return 0; }
  76 void  StackObj::operator delete(void* p)              { ShouldNotCallThis(); }
  77 void* StackObj::operator new [](size_t size)  throw() { ShouldNotCallThis(); return 0; }
  78 void  StackObj::operator delete [](void* p)           { ShouldNotCallThis(); }
  79 
  80 void* MetaspaceObj::operator new(size_t size, ClassLoaderData* loader_data,
  81                                  size_t word_size,
  82                                  MetaspaceObj::Type type, TRAPS) throw() {
  83   // Klass has it's own operator new
  84   return Metaspace::allocate(loader_data, word_size, type, THREAD);
  85 }
  86 
  87 bool MetaspaceObj::is_metaspace_object() const {
  88   return Metaspace::contains((void*)this);
  89 }
  90 
  91 void MetaspaceObj::print_address_on(outputStream* st) const {
  92   st->print(" {" INTPTR_FORMAT "}", p2i(this));
  93 }
  94 
  95 void* ResourceObj::operator new(size_t size, Arena *arena) throw() {
  96   address res = (address)arena->Amalloc(size);
  97   DEBUG_ONLY(set_allocation_type(res, ARENA);)
  98   return res;
  99 }
 100 
 101 void* ResourceObj::operator new [](size_t size, Arena *arena) throw() {
 102   address res = (address)arena->Amalloc(size);
 103   DEBUG_ONLY(set_allocation_type(res, ARENA);)
 104   return res;
 105 }
 106 
 107 void* ResourceObj::operator new(size_t size, allocation_type type, MEMFLAGS flags) throw() {
 108   address res = NULL;
 109   switch (type) {
 110    case C_HEAP:
 111     res = (address)AllocateHeap(size, flags, CALLER_PC);
 112     DEBUG_ONLY(set_allocation_type(res, C_HEAP);)
 113     break;
 114    case RESOURCE_AREA:
 115     // new(size) sets allocation type RESOURCE_AREA.
 116     res = (address)operator new(size);
 117     break;
 118    default:
 119     ShouldNotReachHere();
 120   }
 121   return res;
 122 }
 123 
 124 void* ResourceObj::operator new [](size_t size, allocation_type type, MEMFLAGS flags) throw() {
 125   return (address) operator new(size, type, flags);
 126 }
 127 
 128 void* ResourceObj::operator new(size_t size, const std::nothrow_t&  nothrow_constant,
 129     allocation_type type, MEMFLAGS flags) throw() {
 130   // should only call this with std::nothrow, use other operator new() otherwise
 131   address res = NULL;
 132   switch (type) {
 133    case C_HEAP:
 134     res = (address)AllocateHeap(size, flags, CALLER_PC, AllocFailStrategy::RETURN_NULL);
 135     DEBUG_ONLY(if (res!= NULL) set_allocation_type(res, C_HEAP);)
 136     break;
 137    case RESOURCE_AREA:
 138     // new(size) sets allocation type RESOURCE_AREA.
 139     res = (address)operator new(size, std::nothrow);
 140     break;
 141    default:
 142     ShouldNotReachHere();
 143   }
 144   return res;
 145 }
 146 
 147 void* ResourceObj::operator new [](size_t size, const std::nothrow_t&  nothrow_constant,
 148     allocation_type type, MEMFLAGS flags) throw() {
 149   return (address)operator new(size, nothrow_constant, type, flags);
 150 }
 151 
 152 void ResourceObj::operator delete(void* p) {
 153   assert(((ResourceObj *)p)->allocated_on_C_heap(),
 154          "delete only allowed for C_HEAP objects");
 155   DEBUG_ONLY(((ResourceObj *)p)->_allocation_t[0] = (uintptr_t)badHeapOopVal;)
 156   FreeHeap(p);
 157 }
 158 
 159 void ResourceObj::operator delete [](void* p) {
 160   operator delete(p);
 161 }
 162 
 163 #ifdef ASSERT
 164 void ResourceObj::set_allocation_type(address res, allocation_type type) {
 165     // Set allocation type in the resource object
 166     uintptr_t allocation = (uintptr_t)res;
 167     assert((allocation & allocation_mask) == 0, "address should be aligned to 4 bytes at least: " INTPTR_FORMAT, p2i(res));
 168     assert(type <= allocation_mask, "incorrect allocation type");
 169     ResourceObj* resobj = (ResourceObj *)res;
 170     resobj->_allocation_t[0] = ~(allocation + type);
 171     if (type != STACK_OR_EMBEDDED) {
 172       // Called from operator new() and CollectionSetChooser(),
 173       // set verification value.
 174       resobj->_allocation_t[1] = (uintptr_t)&(resobj->_allocation_t[1]) + type;
 175     }
 176 }
 177 
 178 ResourceObj::allocation_type ResourceObj::get_allocation_type() const {
 179     assert(~(_allocation_t[0] | allocation_mask) == (uintptr_t)this, "lost resource object");
 180     return (allocation_type)((~_allocation_t[0]) & allocation_mask);
 181 }
 182 
 183 bool ResourceObj::is_type_set() const {
 184     allocation_type type = (allocation_type)(_allocation_t[1] & allocation_mask);
 185     return get_allocation_type()  == type &&
 186            (_allocation_t[1] - type) == (uintptr_t)(&_allocation_t[1]);
 187 }
 188 
 189 ResourceObj::ResourceObj() { // default constructor
 190     if (~(_allocation_t[0] | allocation_mask) != (uintptr_t)this) {
 191       // Operator new() is not called for allocations
 192       // on stack and for embedded objects.
 193       set_allocation_type((address)this, STACK_OR_EMBEDDED);
 194     } else if (allocated_on_stack()) { // STACK_OR_EMBEDDED
 195       // For some reason we got a value which resembles
 196       // an embedded or stack object (operator new() does not
 197       // set such type). Keep it since it is valid value
 198       // (even if it was garbage).
 199       // Ignore garbage in other fields.
 200     } else if (is_type_set()) {
 201       // Operator new() was called and type was set.
 202       assert(!allocated_on_stack(),
 203              "not embedded or stack, this(" PTR_FORMAT ") type %d a[0]=(" PTR_FORMAT ") a[1]=(" PTR_FORMAT ")",
 204              p2i(this), get_allocation_type(), _allocation_t[0], _allocation_t[1]);
 205     } else {
 206       // Operator new() was not called.
 207       // Assume that it is embedded or stack object.
 208       set_allocation_type((address)this, STACK_OR_EMBEDDED);
 209     }
 210     _allocation_t[1] = 0; // Zap verification value
 211 }
 212 
 213 ResourceObj::ResourceObj(const ResourceObj& r) { // default copy constructor
 214     // Used in ClassFileParser::parse_constant_pool_entries() for ClassFileStream.
 215     // Note: garbage may resembles valid value.
 216     assert(~(_allocation_t[0] | allocation_mask) != (uintptr_t)this || !is_type_set(),
 217            "embedded or stack only, this(" PTR_FORMAT ") type %d a[0]=(" PTR_FORMAT ") a[1]=(" PTR_FORMAT ")",
 218            p2i(this), get_allocation_type(), _allocation_t[0], _allocation_t[1]);
 219     set_allocation_type((address)this, STACK_OR_EMBEDDED);
 220     _allocation_t[1] = 0; // Zap verification value
 221 }
 222 
 223 ResourceObj& ResourceObj::operator=(const ResourceObj& r) { // default copy assignment
 224     // Used in InlineTree::ok_to_inline() for WarmCallInfo.
 225     assert(allocated_on_stack(),
 226            "copy only into local, this(" PTR_FORMAT ") type %d a[0]=(" PTR_FORMAT ") a[1]=(" PTR_FORMAT ")",
 227            p2i(this), get_allocation_type(), _allocation_t[0], _allocation_t[1]);
 228     // Keep current _allocation_t value;
 229     return *this;
 230 }
 231 
 232 ResourceObj::~ResourceObj() {
 233     // allocated_on_C_heap() also checks that encoded (in _allocation) address == this.
 234     if (!allocated_on_C_heap()) { // ResourceObj::delete() will zap _allocation for C_heap.
 235       _allocation_t[0] = (uintptr_t)badHeapOopVal; // zap type
 236     }
 237 }
 238 #endif // ASSERT
 239 
 240 //--------------------------------------------------------------------------------------
 241 // Non-product code
 242 
 243 #ifndef PRODUCT
 244 void AllocatedObj::print() const       { print_on(tty); }
 245 void AllocatedObj::print_value() const { print_value_on(tty); }
 246 
 247 void AllocatedObj::print_on(outputStream* st) const {
 248   st->print_cr("AllocatedObj(" INTPTR_FORMAT ")", p2i(this));
 249 }
 250 
 251 void AllocatedObj::print_value_on(outputStream* st) const {
 252   st->print("AllocatedObj(" INTPTR_FORMAT ")", p2i(this));
 253 }
 254 
 255 AllocStats::AllocStats() {
 256   start_mallocs      = os::num_mallocs;
 257   start_frees        = os::num_frees;
 258   start_malloc_bytes = os::alloc_bytes;
 259   start_mfree_bytes  = os::free_bytes;
 260   start_res_bytes    = Arena::_bytes_allocated;
 261 }
 262 
 263 julong  AllocStats::num_mallocs() { return os::num_mallocs - start_mallocs; }
 264 julong  AllocStats::alloc_bytes() { return os::alloc_bytes - start_malloc_bytes; }
 265 julong  AllocStats::num_frees()   { return os::num_frees - start_frees; }
 266 julong  AllocStats::free_bytes()  { return os::free_bytes - start_mfree_bytes; }
 267 julong  AllocStats::resource_bytes() { return Arena::_bytes_allocated - start_res_bytes; }
 268 void    AllocStats::print() {
 269   tty->print_cr(UINT64_FORMAT " mallocs (" UINT64_FORMAT "MB), "
 270                 UINT64_FORMAT " frees (" UINT64_FORMAT "MB), " UINT64_FORMAT "MB resrc",
 271                 num_mallocs(), alloc_bytes()/M, num_frees(), free_bytes()/M, resource_bytes()/M);
 272 }
 273 
 274 ReallocMark::ReallocMark() {
 275 #ifdef ASSERT
 276   Thread *thread = Thread::current();
 277   _nesting = thread->resource_area()->nesting();
 278 #endif
 279 }
 280 
 281 void ReallocMark::check() {
 282 #ifdef ASSERT
 283   if (_nesting != Thread::current()->resource_area()->nesting()) {
 284     fatal("allocation bug: array could grow within nested ResourceMark");
 285   }
 286 #endif
 287 }
 288 
 289 #endif // Non-product