1 /*
   2  * Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/javaClasses.hpp"
  27 #include "gc/shared/allocTracer.hpp"
  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "gc/shared/memAllocator.hpp"
  30 #include "gc/shared/threadLocalAllocBuffer.inline.hpp"
  31 #include "memory/universe.hpp"
  32 #include "oops/arrayOop.hpp"
  33 #include "oops/oop.inline.hpp"
  34 #include "prims/jvmtiExport.hpp"
  35 #include "runtime/sharedRuntime.hpp"
  36 #include "runtime/handles.inline.hpp"
  37 #include "runtime/thread.inline.hpp"
  38 #include "services/lowMemoryDetector.hpp"
  39 #include "utilities/align.hpp"
  40 #include "utilities/copy.hpp"
  41 
  42 class MemAllocator::Allocation: StackObj {
  43   friend class MemAllocator;
  44 
  45   const MemAllocator& _allocator;
  46   Thread*             _thread;
  47   Handle              _handle;
  48   bool                _overhead_limit_exceeded;
  49   bool                _allocated_outside_tlab;
  50   size_t              _allocated_tlab_size;
  51   bool                _tlab_end_reset_for_sample;
  52 
  53   bool check_out_of_memory();
  54   void verify_before();
  55   void verify_after();
  56   void notify_allocation();
  57   void notify_allocation_jvmti_allocation_event();
  58   void notify_allocation_jvmti_sampler();
  59   void notify_allocation_low_memory_detector();
  60   void notify_allocation_jfr_sampler();
  61   void notify_allocation_dtrace_sampler();
  62   void check_for_bad_heap_word_value() const;
  63 #ifdef ASSERT
  64   void check_for_valid_allocation_state() const;
  65 #endif
  66 
  67 public:
  68   Allocation(const MemAllocator& allocator)
  69     : _allocator(allocator),
  70       _thread(Thread::current()),
  71       _overhead_limit_exceeded(false),
  72       _allocated_outside_tlab(false),
  73       _allocated_tlab_size(0),
  74       _tlab_end_reset_for_sample(false)
  75   {
  76     verify_before();
  77   }
  78 
  79   ~Allocation() {
  80     if (!check_out_of_memory()) {
  81       verify_after();
  82       notify_allocation();
  83     }
  84   }
  85 
  86   void set_obj(oop obj) { _handle = Handle(_thread, obj); }
  87   oop obj() const { return _handle(); }
  88 };
  89 
  90 bool MemAllocator::Allocation::check_out_of_memory() {
  91   Thread* THREAD = _thread;
  92   assert(!HAS_PENDING_EXCEPTION, "Unexpected exception, will result in uninitialized storage");
  93 
  94   if (obj() != NULL) {
  95     return false;
  96   }
  97 
  98   const char* message = _overhead_limit_exceeded ? "GC overhead limit exceeded" : "Java heap space";
  99   if (!THREAD->in_retryable_allocation()) {
 100     // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
 101     report_java_out_of_memory(message);
 102 
 103     if (JvmtiExport::should_post_resource_exhausted()) {
 104       JvmtiExport::post_resource_exhausted(
 105         JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR | JVMTI_RESOURCE_EXHAUSTED_JAVA_HEAP,
 106         message);
 107     }
 108     oop exception = _overhead_limit_exceeded ?
 109         Universe::out_of_memory_error_gc_overhead_limit() :
 110         Universe::out_of_memory_error_java_heap();
 111     THROW_OOP_(exception, true);
 112   } else {
 113     THROW_OOP_(Universe::out_of_memory_error_retry(), true);
 114   }
 115 }
 116 
 117 void MemAllocator::Allocation::verify_before() {
 118   // Clear unhandled oops for memory allocation.  Memory allocation might
 119   // not take out a lock if from tlab, so clear here.
 120   Thread* THREAD = _thread;
 121   CHECK_UNHANDLED_OOPS_ONLY(THREAD->clear_unhandled_oops();)
 122   assert(!HAS_PENDING_EXCEPTION, "Should not allocate with exception pending");
 123   debug_only(check_for_valid_allocation_state());
 124   assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed");
 125 }
 126 
 127 void MemAllocator::Allocation::verify_after() {
 128   NOT_PRODUCT(check_for_bad_heap_word_value();)
 129 }
 130 
 131 void MemAllocator::Allocation::check_for_bad_heap_word_value() const {
 132   MemRegion obj_range = _allocator.obj_memory_range(obj());
 133   HeapWord* addr = obj_range.start();
 134   size_t size = obj_range.word_size();
 135   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 136     for (size_t slot = 0; slot < size; slot += 1) {
 137       assert((*(intptr_t*) (addr + slot)) != ((intptr_t) badHeapWordVal),
 138              "Found badHeapWordValue in post-allocation check");
 139     }
 140   }
 141 }
 142 
 143 #ifdef ASSERT
 144 void MemAllocator::Allocation::check_for_valid_allocation_state() const {
 145   // How to choose between a pending exception and a potential
 146   // OutOfMemoryError?  Don't allow pending exceptions.
 147   // This is a VM policy failure, so how do we exhaustively test it?
 148   assert(!_thread->has_pending_exception(),
 149          "shouldn't be allocating with pending exception");
 150   if (StrictSafepointChecks) {
 151     assert(_thread->allow_allocation(),
 152            "Allocation done by thread for which allocation is blocked "
 153            "by No_Allocation_Verifier!");
 154     // Allocation of an oop can always invoke a safepoint,
 155     // hence, the true argument
 156     _thread->check_for_valid_safepoint_state(true);
 157   }
 158 }
 159 #endif
 160 
 161 void MemAllocator::Allocation::notify_allocation_jvmti_sampler() {
 162   // support for JVMTI VMObjectAlloc event (no-op if not enabled)
 163   JvmtiExport::vm_object_alloc_event_collector(obj());
 164 
 165   if (!JvmtiExport::should_post_sampled_object_alloc()) {
 166     // Sampling disabled
 167     return;
 168   }
 169 
 170   if (!_allocated_outside_tlab && _allocated_tlab_size == 0 && !_tlab_end_reset_for_sample) {
 171     // Sample if it's a non-TLAB allocation, or a TLAB allocation that either refills the TLAB
 172     // or expands it due to taking a sampler induced slow path.
 173     return;
 174   }
 175 
 176   // If we want to be sampling, the allocated object is protected with a Handle
 177   // before doing the callback. The callback is done in the destructor of
 178   // the JvmtiSampledObjectAllocEventCollector.
 179   size_t bytes_since_last = 0;
 180 
 181   {
 182     JvmtiSampledObjectAllocEventCollector collector;
 183     size_t size_in_bytes = _allocator._word_size * HeapWordSize;
 184     ThreadLocalAllocBuffer& tlab = _thread->tlab();
 185 
 186     if (!_allocated_outside_tlab) {
 187       bytes_since_last = tlab.bytes_since_last_sample_point();
 188     }
 189 
 190     _thread->heap_sampler().check_for_sampling(obj(), size_in_bytes, bytes_since_last);
 191   }
 192 
 193   if (_tlab_end_reset_for_sample || _allocated_tlab_size != 0) {
 194     // Tell tlab to forget bytes_since_last if we passed it to the heap sampler.
 195     _thread->tlab().set_sample_end(bytes_since_last != 0);
 196   }
 197 }
 198 
 199 void MemAllocator::Allocation::notify_allocation_low_memory_detector() {
 200   // support low memory notifications (no-op if not enabled)
 201   LowMemoryDetector::detect_low_memory_for_collected_pools();
 202 }
 203 
 204 void MemAllocator::Allocation::notify_allocation_jfr_sampler() {
 205   HeapWord* mem = (HeapWord*)obj();
 206   size_t size_in_bytes = _allocator._word_size * HeapWordSize;
 207 
 208   if (_allocated_outside_tlab) {
 209     AllocTracer::send_allocation_outside_tlab(_allocator._klass, mem, size_in_bytes, _thread);
 210   } else if (_allocated_tlab_size != 0) {
 211     // TLAB was refilled
 212     AllocTracer::send_allocation_in_new_tlab(_allocator._klass, mem, _allocated_tlab_size * HeapWordSize,
 213                                              size_in_bytes, _thread);
 214   }
 215 }
 216 
 217 void MemAllocator::Allocation::notify_allocation_dtrace_sampler() {
 218   if (DTraceAllocProbes) {
 219     // support for Dtrace object alloc event (no-op most of the time)
 220     Klass* klass = _allocator._klass;
 221     size_t word_size = _allocator._word_size;
 222     if (klass != NULL && klass->name() != NULL) {
 223       SharedRuntime::dtrace_object_alloc(obj(), (int)word_size);
 224     }
 225   }
 226 }
 227 
 228 void MemAllocator::Allocation::notify_allocation() {
 229   notify_allocation_low_memory_detector();
 230   notify_allocation_jfr_sampler();
 231   notify_allocation_dtrace_sampler();
 232   notify_allocation_jvmti_sampler();
 233 }
 234 
 235 HeapWord* MemAllocator::allocate_outside_tlab(Allocation& allocation) const {
 236   allocation._allocated_outside_tlab = true;
 237   HeapWord* mem = Universe::heap()->mem_allocate(_word_size, &allocation._overhead_limit_exceeded);
 238   if (mem == NULL) {
 239     return mem;
 240   }
 241 
 242   NOT_PRODUCT(Universe::heap()->check_for_non_bad_heap_word_value(mem, _word_size));
 243   size_t size_in_bytes = _word_size * HeapWordSize;
 244   _thread->incr_allocated_bytes(size_in_bytes);
 245 
 246   return mem;
 247 }
 248 
 249 HeapWord* MemAllocator::allocate_inside_tlab(Allocation& allocation) const {
 250   assert(UseTLAB, "should use UseTLAB");
 251 
 252   // Try allocating from an existing TLAB.
 253   HeapWord* mem = _thread->tlab().allocate(_word_size);
 254   if (mem != NULL) {
 255     return mem;
 256   }
 257 
 258   // Try refilling the TLAB and allocating the object in it.
 259   return allocate_inside_tlab_slow(allocation);
 260 }
 261 
 262 HeapWord* MemAllocator::allocate_inside_tlab_slow(Allocation& allocation) const {
 263   HeapWord* mem = NULL;
 264   ThreadLocalAllocBuffer& tlab = _thread->tlab();
 265 
 266   if (JvmtiExport::should_post_sampled_object_alloc()) {
 267     tlab.set_back_allocation_end();
 268     mem = tlab.allocate(_word_size);
 269 
 270     // We set back the allocation sample point to try to allocate this, reset it
 271     // when done.
 272     allocation._tlab_end_reset_for_sample = true;
 273 
 274     if (mem != NULL) {
 275       return mem;
 276     }
 277   }
 278 
 279   // Retain tlab and allocate object in shared space if
 280   // the amount free in the tlab is too large to discard.
 281   if (tlab.free() > tlab.refill_waste_limit()) {
 282     tlab.record_slow_allocation(_word_size);
 283     return NULL;
 284   }
 285 
 286   // Discard tlab and allocate a new one.
 287   // To minimize fragmentation, the last TLAB may be smaller than the rest.
 288   size_t new_tlab_size = tlab.compute_size(_word_size);
 289 
 290   tlab.retire_before_allocation();
 291 
 292   if (new_tlab_size == 0) {
 293     return NULL;
 294   }
 295 
 296   // Allocate a new TLAB requesting new_tlab_size. Any size
 297   // between minimal and new_tlab_size is accepted.
 298   size_t min_tlab_size = ThreadLocalAllocBuffer::compute_min_size(_word_size);
 299   mem = Universe::heap()->allocate_new_tlab(min_tlab_size, new_tlab_size, &allocation._allocated_tlab_size);
 300   if (mem == NULL) {
 301     assert(allocation._allocated_tlab_size == 0,
 302            "Allocation failed, but actual size was updated. min: " SIZE_FORMAT
 303            ", desired: " SIZE_FORMAT ", actual: " SIZE_FORMAT,
 304            min_tlab_size, new_tlab_size, allocation._allocated_tlab_size);
 305     return NULL;
 306   }
 307   assert(allocation._allocated_tlab_size != 0, "Allocation succeeded but actual size not updated. mem at: "
 308          PTR_FORMAT " min: " SIZE_FORMAT ", desired: " SIZE_FORMAT,
 309          p2i(mem), min_tlab_size, new_tlab_size);
 310 
 311   if (ZeroTLAB) {
 312     // ..and clear it.
 313     Copy::zero_to_words(mem, allocation._allocated_tlab_size);
 314   } else {
 315     // ...and zap just allocated object.
 316 #ifdef ASSERT
 317     // Skip mangling the space corresponding to the object header to
 318     // ensure that the returned space is not considered parsable by
 319     // any concurrent GC thread.
 320     size_t hdr_size = oopDesc::header_size();
 321     Copy::fill_to_words(mem + hdr_size, allocation._allocated_tlab_size - hdr_size, badHeapWordVal);
 322 #endif // ASSERT
 323   }
 324 
 325   tlab.fill(mem, mem + _word_size, allocation._allocated_tlab_size);
 326   return mem;
 327 }
 328 
 329 HeapWord* MemAllocator::mem_allocate(Allocation& allocation) const {
 330   if (UseTLAB) {
 331     HeapWord* result = allocate_inside_tlab(allocation);
 332     if (result != NULL) {
 333       return result;
 334     }
 335   }
 336 
 337   return allocate_outside_tlab(allocation);
 338 }
 339 
 340 oop MemAllocator::allocate() const {
 341   HandleMark hm(_thread);
 342   Handle obj_h;
 343   {
 344     Allocation allocation(*this);
 345     HeapWord* mem = mem_allocate(allocation);
 346     if (mem != NULL) {
 347       oop obj = initialize(mem);
 348       // Save obj in a handle for destructor safepoints
 349       allocation.set_obj(obj);
 350       obj_h = Handle(_thread, obj);
 351     }
 352   }
 353   return obj_h();
 354 }
 355 
 356 void MemAllocator::mem_clear(HeapWord* mem) const {
 357   assert(mem != NULL, "cannot initialize NULL object");
 358   const size_t hs = oopDesc::header_size();
 359   assert(_word_size >= hs, "unexpected object size");
 360   oopDesc::set_klass_gap(mem, 0);
 361   Copy::fill_to_aligned_words(mem + hs, _word_size - hs);
 362 }
 363 
 364 oop MemAllocator::finish(HeapWord* mem) const {
 365   assert(mem != NULL, "NULL object pointer");
 366   if (UseBiasedLocking) {
 367     oopDesc::set_mark_raw(mem, _klass->prototype_header());
 368   } else {
 369     // May be bootstrapping
 370     oopDesc::set_mark_raw(mem, markOopDesc::prototype());
 371   }
 372   // Need a release store to ensure array/class length, mark word, and
 373   // object zeroing are visible before setting the klass non-NULL, for
 374   // concurrent collectors.
 375   oopDesc::release_set_klass(mem, _klass);
 376   return oop(mem);
 377 }
 378 
 379 oop ObjAllocator::initialize(HeapWord* mem) const {
 380   mem_clear(mem);
 381   return finish(mem);
 382 }
 383 
 384 MemRegion ObjArrayAllocator::obj_memory_range(oop obj) const {
 385   if (_do_zero) {
 386     return MemAllocator::obj_memory_range(obj);
 387   }
 388   ArrayKlass* array_klass = ArrayKlass::cast(_klass);
 389   const size_t hs = arrayOopDesc::header_size(array_klass->element_type());
 390   return MemRegion(((HeapWord*)obj) + hs, _word_size - hs);
 391 }
 392 
 393 oop ObjArrayAllocator::initialize(HeapWord* mem) const {
 394   // Set array length before setting the _klass field because a
 395   // non-NULL klass field indicates that the object is parsable by
 396   // concurrent GC.
 397   assert(_length >= 0, "length should be non-negative");
 398   if (_do_zero) {
 399     mem_clear(mem);
 400   }
 401   arrayOopDesc::set_length(mem, _length);
 402   return finish(mem);
 403 }
 404 
 405 oop ClassAllocator::initialize(HeapWord* mem) const {
 406   // Set oop_size field before setting the _klass field because a
 407   // non-NULL _klass field indicates that the object is parsable by
 408   // concurrent GC.
 409   assert(_word_size > 0, "oop_size must be positive.");
 410   mem_clear(mem);
 411   java_lang_Class::set_oop_size(mem, (int)_word_size);
 412   return finish(mem);
 413 }