1 /*
   2  * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "classfile/javaClasses.hpp"
  27 #include "gc/shared/allocTracer.hpp"
  28 #include "gc/shared/collectedHeap.hpp"
  29 #include "gc/shared/memAllocator.hpp"
  30 #include "gc/shared/threadLocalAllocBuffer.inline.hpp"
  31 #include "memory/universe.hpp"
  32 #include "oops/arrayOop.hpp"
  33 #include "oops/oop.inline.hpp"
  34 #include "prims/jvmtiExport.hpp"
  35 #include "prims/jvmtiEventController.inline.hpp"
  36 #include "prims/jvmtiThreadState.inline.hpp"
  37 #include "runtime/sharedRuntime.hpp"
  38 #include "runtime/handles.inline.hpp"
  39 #include "runtime/thread.inline.hpp"
  40 #include "services/lowMemoryDetector.hpp"
  41 #include "utilities/align.hpp"
  42 #include "utilities/copy.hpp"
  43 
  44 class MemAllocator::Allocation: StackObj {
  45   friend class MemAllocator;
  46 
  47   const MemAllocator& _allocator;
  48   Thread*             _thread;
  49   oop*                _obj_ptr;
  50   bool                _overhead_limit_exceeded;
  51   bool                _allocated_outside_tlab;
  52   size_t              _allocated_tlab_size;
  53   bool                _tlab_end_reset_for_sample;
  54 
  55   bool check_out_of_memory();
  56   void verify_before();
  57   void verify_after();
  58   void notify_allocation();
  59   void notify_allocation_jvmti_allocation_event();
  60   void notify_allocation_jvmti_sampler();
  61   void notify_allocation_low_memory_detector();
  62   void notify_allocation_jfr_sampler();
  63   void notify_allocation_dtrace_sampler();
  64   void check_for_bad_heap_word_value() const;
  65 #ifdef ASSERT
  66   void check_for_valid_allocation_state() const;
  67 #endif
  68 
  69   class PreserveObj;
  70 
  71 public:
  72   Allocation(const MemAllocator& allocator, oop* obj_ptr)
  73     : _allocator(allocator),
  74       _thread(Thread::current()),
  75       _obj_ptr(obj_ptr),
  76       _overhead_limit_exceeded(false),
  77       _allocated_outside_tlab(false),
  78       _allocated_tlab_size(0),
  79       _tlab_end_reset_for_sample(false)
  80   {
  81     verify_before();
  82   }
  83 
  84   ~Allocation() {
  85     if (!check_out_of_memory()) {
  86       verify_after();
  87       notify_allocation();
  88     }
  89   }
  90 
  91   oop obj() const { return *_obj_ptr; }
  92 };
  93 
  94 class MemAllocator::Allocation::PreserveObj: StackObj {
  95   HandleMark _handle_mark;
  96   Handle     _handle;
  97   oop* const _obj_ptr;
  98 
  99 public:
 100   PreserveObj(Thread* thread, oop* obj_ptr)
 101     : _handle_mark(thread),
 102       _handle(thread, *obj_ptr),
 103       _obj_ptr(obj_ptr)
 104   {
 105     *obj_ptr = NULL;
 106   }
 107 
 108   ~PreserveObj() {
 109     *_obj_ptr = _handle();
 110   }
 111 
 112   oop operator()() const {
 113     return _handle();
 114   }
 115 };
 116 
 117 bool MemAllocator::Allocation::check_out_of_memory() {
 118   Thread* THREAD = _thread;
 119   assert(!HAS_PENDING_EXCEPTION, "Unexpected exception, will result in uninitialized storage");
 120 
 121   if (obj() != NULL) {
 122     return false;
 123   }
 124 
 125   const char* message = _overhead_limit_exceeded ? "GC overhead limit exceeded" : "Java heap space";
 126   if (!THREAD->in_retryable_allocation()) {
 127     // -XX:+HeapDumpOnOutOfMemoryError and -XX:OnOutOfMemoryError support
 128     report_java_out_of_memory(message);
 129 
 130     if (JvmtiExport::should_post_resource_exhausted()) {
 131       JvmtiExport::post_resource_exhausted(
 132         JVMTI_RESOURCE_EXHAUSTED_OOM_ERROR | JVMTI_RESOURCE_EXHAUSTED_JAVA_HEAP,
 133         message);
 134     }
 135     oop exception = _overhead_limit_exceeded ?
 136         Universe::out_of_memory_error_gc_overhead_limit() :
 137         Universe::out_of_memory_error_java_heap();
 138     THROW_OOP_(exception, true);
 139   } else {
 140     THROW_OOP_(Universe::out_of_memory_error_retry(), true);
 141   }
 142 }
 143 
 144 void MemAllocator::Allocation::verify_before() {
 145   // Clear unhandled oops for memory allocation.  Memory allocation might
 146   // not take out a lock if from tlab, so clear here.
 147   Thread* THREAD = _thread;
 148   CHECK_UNHANDLED_OOPS_ONLY(THREAD->clear_unhandled_oops();)
 149   assert(!HAS_PENDING_EXCEPTION, "Should not allocate with exception pending");
 150   debug_only(check_for_valid_allocation_state());
 151   assert(!Universe::heap()->is_gc_active(), "Allocation during gc not allowed");
 152 }
 153 
 154 void MemAllocator::Allocation::verify_after() {
 155   NOT_PRODUCT(check_for_bad_heap_word_value();)
 156 }
 157 
 158 void MemAllocator::Allocation::check_for_bad_heap_word_value() const {
 159   MemRegion obj_range = _allocator.obj_memory_range(obj());
 160   HeapWord* addr = obj_range.start();
 161   size_t size = obj_range.word_size();
 162   if (CheckMemoryInitialization && ZapUnusedHeapArea) {
 163     for (size_t slot = 0; slot < size; slot += 1) {
 164       assert((*(intptr_t*) (addr + slot)) != ((intptr_t) badHeapWordVal),
 165              "Found badHeapWordValue in post-allocation check");
 166     }
 167   }
 168 }
 169 
 170 #ifdef ASSERT
 171 void MemAllocator::Allocation::check_for_valid_allocation_state() const {
 172   // How to choose between a pending exception and a potential
 173   // OutOfMemoryError?  Don't allow pending exceptions.
 174   // This is a VM policy failure, so how do we exhaustively test it?
 175   assert(!_thread->has_pending_exception(),
 176          "shouldn't be allocating with pending exception");
 177   if (StrictSafepointChecks) {
 178     assert(_thread->allow_allocation(),
 179            "Allocation done by thread for which allocation is blocked "
 180            "by No_Allocation_Verifier!");
 181     // Allocation of an oop can always invoke a safepoint,
 182     // hence, the true argument
 183     _thread->check_for_valid_safepoint_state(true);
 184   }
 185 }
 186 #endif
 187 
 188 static bool thread_enabled_for_one_jvmti_env() {
 189   JavaThread *thread  = JavaThread::current();
 190   JvmtiThreadState *state = thread->jvmti_thread_state();
 191   if (state == NULL) {
 192     return false;
 193   }
 194 
 195   JvmtiEnvThreadStateIterator it(state);
 196   for (JvmtiEnvThreadState* ets = it.first(); ets != NULL; ets = it.next(ets)) {
 197     if (ets->is_enabled(JVMTI_EVENT_SAMPLED_OBJECT_ALLOC)) {
 198       return true;
 199     }
 200   }
 201 
 202   return false;
 203 }
 204 
 205 void MemAllocator::Allocation::notify_allocation_jvmti_sampler() {
 206   // support for JVMTI VMObjectAlloc event (no-op if not enabled)
 207   JvmtiExport::vm_object_alloc_event_collector(obj());
 208 
 209   if (!JvmtiExport::should_post_sampled_object_alloc()) {
 210     // Sampling disabled
 211     return;
 212   }
 213 
 214   // Sampling is enabled for at least one thread, is it this one?
 215   if (!thread_enabled_for_one_jvmti_env()) {
 216     return;
 217   }
 218 
 219   if (!_allocated_outside_tlab && _allocated_tlab_size == 0 && !_tlab_end_reset_for_sample) {
 220     // Sample if it's a non-TLAB allocation, or a TLAB allocation that either refills the TLAB
 221     // or expands it due to taking a sampler induced slow path.
 222     return;
 223   }
 224 
 225   if (JvmtiExport::should_post_sampled_object_alloc()) {
 226     // If we want to be sampling, protect the allocated object with a Handle
 227     // before doing the callback. The callback is done in the destructor of
 228     // the JvmtiSampledObjectAllocEventCollector.
 229     PreserveObj obj_h(_thread, _obj_ptr);
 230     JvmtiSampledObjectAllocEventCollector collector;
 231     size_t size_in_bytes = _allocator._word_size * HeapWordSize;
 232     ThreadLocalAllocBuffer& tlab = _thread->tlab();
 233     size_t bytes_since_last = _allocated_outside_tlab ? 0 : tlab.bytes_since_last_sample_point();
 234     _thread->heap_sampler().check_for_sampling(obj_h(), size_in_bytes, bytes_since_last);
 235   }
 236 
 237   if (_tlab_end_reset_for_sample || _allocated_tlab_size != 0) {
 238     _thread->tlab().set_sample_end();
 239   }
 240 }
 241 
 242 void MemAllocator::Allocation::notify_allocation_low_memory_detector() {
 243   // support low memory notifications (no-op if not enabled)
 244   LowMemoryDetector::detect_low_memory_for_collected_pools();
 245 }
 246 
 247 void MemAllocator::Allocation::notify_allocation_jfr_sampler() {
 248   HeapWord* mem = (HeapWord*)obj();
 249   size_t size_in_bytes = _allocator._word_size * HeapWordSize;
 250 
 251   if (_allocated_outside_tlab) {
 252     AllocTracer::send_allocation_outside_tlab(_allocator._klass, mem, size_in_bytes, _thread);
 253   } else if (_allocated_tlab_size != 0) {
 254     // TLAB was refilled
 255     AllocTracer::send_allocation_in_new_tlab(_allocator._klass, mem, _allocated_tlab_size * HeapWordSize,
 256                                              size_in_bytes, _thread);
 257   }
 258 }
 259 
 260 void MemAllocator::Allocation::notify_allocation_dtrace_sampler() {
 261   if (DTraceAllocProbes) {
 262     // support for Dtrace object alloc event (no-op most of the time)
 263     Klass* klass = _allocator._klass;
 264     size_t word_size = _allocator._word_size;
 265     if (klass != NULL && klass->name() != NULL) {
 266       SharedRuntime::dtrace_object_alloc(obj(), (int)word_size);
 267     }
 268   }
 269 }
 270 
 271 void MemAllocator::Allocation::notify_allocation() {
 272   notify_allocation_low_memory_detector();
 273   notify_allocation_jfr_sampler();
 274   notify_allocation_dtrace_sampler();
 275   notify_allocation_jvmti_sampler();
 276 }
 277 
 278 HeapWord* MemAllocator::allocate_outside_tlab(Allocation& allocation) const {
 279   allocation._allocated_outside_tlab = true;
 280   HeapWord* mem = _heap->mem_allocate(_word_size, &allocation._overhead_limit_exceeded);
 281   if (mem == NULL) {
 282     return mem;
 283   }
 284 
 285   NOT_PRODUCT(_heap->check_for_non_bad_heap_word_value(mem, _word_size));
 286   size_t size_in_bytes = _word_size * HeapWordSize;
 287   _thread->incr_allocated_bytes(size_in_bytes);
 288 
 289   return mem;
 290 }
 291 
 292 HeapWord* MemAllocator::allocate_inside_tlab(Allocation& allocation) const {
 293   assert(UseTLAB, "should use UseTLAB");
 294 
 295   // Try allocating from an existing TLAB.
 296   HeapWord* mem = _thread->tlab().allocate(_word_size);
 297   if (mem != NULL) {
 298     return mem;
 299   }
 300 
 301   // Try refilling the TLAB and allocating the object in it.
 302   return allocate_inside_tlab_slow(allocation);
 303 }
 304 
 305 HeapWord* MemAllocator::allocate_inside_tlab_slow(Allocation& allocation) const {
 306   HeapWord* mem = NULL;
 307   ThreadLocalAllocBuffer& tlab = _thread->tlab();
 308 
 309   if (JvmtiExport::should_post_sampled_object_alloc()) {
 310     // Try to allocate the sampled object from TLAB, it is possible a sample
 311     // point was put and the TLAB still has space.
 312     tlab.set_back_allocation_end();
 313     mem = tlab.allocate(_word_size);
 314     if (mem != NULL) {
 315       allocation._tlab_end_reset_for_sample = true;
 316       return mem;
 317     }
 318   }
 319 
 320   // Retain tlab and allocate object in shared space if
 321   // the amount free in the tlab is too large to discard.
 322   if (tlab.free() > tlab.refill_waste_limit()) {
 323     tlab.record_slow_allocation(_word_size);
 324     return NULL;
 325   }
 326 
 327   // Discard tlab and allocate a new one.
 328   // To minimize fragmentation, the last TLAB may be smaller than the rest.
 329   size_t new_tlab_size = tlab.compute_size(_word_size);
 330 
 331   tlab.retire_before_allocation();
 332 
 333   if (new_tlab_size == 0) {
 334     return NULL;
 335   }
 336 
 337   // Allocate a new TLAB requesting new_tlab_size. Any size
 338   // between minimal and new_tlab_size is accepted.
 339   size_t min_tlab_size = ThreadLocalAllocBuffer::compute_min_size(_word_size);
 340   mem = _heap->allocate_new_tlab(min_tlab_size, new_tlab_size, &allocation._allocated_tlab_size);
 341   if (mem == NULL) {
 342     assert(allocation._allocated_tlab_size == 0,
 343            "Allocation failed, but actual size was updated. min: " SIZE_FORMAT
 344            ", desired: " SIZE_FORMAT ", actual: " SIZE_FORMAT,
 345            min_tlab_size, new_tlab_size, allocation._allocated_tlab_size);
 346     return NULL;
 347   }
 348   assert(allocation._allocated_tlab_size != 0, "Allocation succeeded but actual size not updated. mem at: "
 349          PTR_FORMAT " min: " SIZE_FORMAT ", desired: " SIZE_FORMAT,
 350          p2i(mem), min_tlab_size, new_tlab_size);
 351 
 352   if (ZeroTLAB) {
 353     // ..and clear it.
 354     Copy::zero_to_words(mem, allocation._allocated_tlab_size);
 355   } else {
 356     // ...and zap just allocated object.
 357 #ifdef ASSERT
 358     // Skip mangling the space corresponding to the object header to
 359     // ensure that the returned space is not considered parsable by
 360     // any concurrent GC thread.
 361     size_t hdr_size = oopDesc::header_size();
 362     Copy::fill_to_words(mem + hdr_size, allocation._allocated_tlab_size - hdr_size, badHeapWordVal);
 363 #endif // ASSERT
 364   }
 365 
 366   tlab.fill(mem, mem + _word_size, allocation._allocated_tlab_size);
 367   return mem;
 368 }
 369 
 370 HeapWord* MemAllocator::mem_allocate(Allocation& allocation) const {
 371   if (UseTLAB) {
 372     HeapWord* result = allocate_inside_tlab(allocation);
 373     if (result != NULL) {
 374       return result;
 375     }
 376   }
 377 
 378   return allocate_outside_tlab(allocation);
 379 }
 380 
 381 oop MemAllocator::allocate() const {
 382   oop obj = NULL;
 383   {
 384     Allocation allocation(*this, &obj);
 385     HeapWord* mem = mem_allocate(allocation);
 386     if (mem != NULL) {
 387       obj = initialize(mem);
 388     }
 389   }
 390   return obj;
 391 }
 392 
 393 void MemAllocator::mem_clear(HeapWord* mem) const {
 394   assert(mem != NULL, "cannot initialize NULL object");
 395   const size_t hs = oopDesc::header_size();
 396   assert(_word_size >= hs, "unexpected object size");
 397   oopDesc::set_klass_gap(mem, 0);
 398   Copy::fill_to_aligned_words(mem + hs, _word_size - hs);
 399 }
 400 
 401 oop MemAllocator::finish(HeapWord* mem) const {
 402   assert(mem != NULL, "NULL object pointer");
 403   if (UseBiasedLocking) {
 404     oopDesc::set_mark_raw(mem, _klass->prototype_header());
 405   } else {
 406     // May be bootstrapping
 407     oopDesc::set_mark_raw(mem, markOopDesc::prototype());
 408   }
 409   // Need a release store to ensure array/class length, mark word, and
 410   // object zeroing are visible before setting the klass non-NULL, for
 411   // concurrent collectors.
 412   oopDesc::release_set_klass(mem, _klass);
 413   return oop(mem);
 414 }
 415 
 416 oop ObjAllocator::initialize(HeapWord* mem) const {
 417   mem_clear(mem);
 418   return finish(mem);
 419 }
 420 
 421 MemRegion ObjArrayAllocator::obj_memory_range(oop obj) const {
 422   if (_do_zero) {
 423     return MemAllocator::obj_memory_range(obj);
 424   }
 425   ArrayKlass* array_klass = ArrayKlass::cast(_klass);
 426   const size_t hs = arrayOopDesc::header_size(array_klass->element_type());
 427   return MemRegion(((HeapWord*)obj) + hs, _word_size - hs);
 428 }
 429 
 430 oop ObjArrayAllocator::initialize(HeapWord* mem) const {
 431   // Set array length before setting the _klass field because a
 432   // non-NULL klass field indicates that the object is parsable by
 433   // concurrent GC.
 434   assert(_length >= 0, "length should be non-negative");
 435   if (_do_zero) {
 436     mem_clear(mem);
 437   }
 438   arrayOopDesc::set_length(mem, _length);
 439   return finish(mem);
 440 }
 441 
 442 oop ClassAllocator::initialize(HeapWord* mem) const {
 443   // Set oop_size field before setting the _klass field because a
 444   // non-NULL _klass field indicates that the object is parsable by
 445   // concurrent GC.
 446   assert(_word_size > 0, "oop_size must be positive.");
 447   mem_clear(mem);
 448   java_lang_Class::set_oop_size(mem, (int)_word_size);
 449   return finish(mem);
 450 }