< prev index next >

src/share/vm/gc_interface/collectedHeap.cpp

Print this page
rev 8910 : full patch for jfr
   1 /*
   2  * Copyright (c) 2001, 2014, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *


  71   }
  72 }
  73 
  74 VirtualSpaceSummary CollectedHeap::create_heap_space_summary() {
  75   size_t capacity_in_words = capacity() / HeapWordSize;
  76 
  77   return VirtualSpaceSummary(
  78     reserved_region().start(), reserved_region().start() + capacity_in_words, reserved_region().end());
  79 }
  80 
  81 GCHeapSummary CollectedHeap::create_heap_summary() {
  82   VirtualSpaceSummary heap_space = create_heap_space_summary();
  83   return GCHeapSummary(heap_space, used());
  84 }
  85 
  86 MetaspaceSummary CollectedHeap::create_metaspace_summary() {
  87   const MetaspaceSizes meta_space(
  88       MetaspaceAux::committed_bytes(),
  89       MetaspaceAux::used_bytes(),
  90       MetaspaceAux::reserved_bytes());

  91   const MetaspaceSizes data_space(
  92       MetaspaceAux::committed_bytes(Metaspace::NonClassType),
  93       MetaspaceAux::used_bytes(Metaspace::NonClassType),
  94       MetaspaceAux::reserved_bytes(Metaspace::NonClassType));

  95   const MetaspaceSizes class_space(
  96       MetaspaceAux::committed_bytes(Metaspace::ClassType),
  97       MetaspaceAux::used_bytes(Metaspace::ClassType),
  98       MetaspaceAux::reserved_bytes(Metaspace::ClassType));
  99 
 100   const MetaspaceChunkFreeListSummary& ms_chunk_free_list_summary =
 101     MetaspaceAux::chunk_free_list_summary(Metaspace::NonClassType);
 102   const MetaspaceChunkFreeListSummary& class_chunk_free_list_summary =
 103     MetaspaceAux::chunk_free_list_summary(Metaspace::ClassType);
 104 
 105   return MetaspaceSummary(MetaspaceGC::capacity_until_GC(), meta_space, data_space, class_space,
 106                           ms_chunk_free_list_summary, class_chunk_free_list_summary);
 107 }
 108 
 109 void CollectedHeap::print_heap_before_gc() {
 110   if (PrintHeapAtGC) {
 111     Universe::print_heap_before_gc();
 112   }
 113   if (_gc_heap_log != NULL) {
 114     _gc_heap_log->log_heap_before();


 269     thread->tlab().record_slow_allocation(size);
 270     return NULL;
 271   }
 272 
 273   // Discard tlab and allocate a new one.
 274   // To minimize fragmentation, the last TLAB may be smaller than the rest.
 275   size_t new_tlab_size = thread->tlab().compute_size(size);
 276 
 277   thread->tlab().clear_before_allocation();
 278 
 279   if (new_tlab_size == 0) {
 280     return NULL;
 281   }
 282 
 283   // Allocate a new TLAB...
 284   HeapWord* obj = Universe::heap()->allocate_new_tlab(new_tlab_size);
 285   if (obj == NULL) {
 286     return NULL;
 287   }
 288 
 289   AllocTracer::send_allocation_in_new_tlab_event(klass, new_tlab_size * HeapWordSize, size * HeapWordSize);
 290 
 291   if (ZeroTLAB) {
 292     // ..and clear it.
 293     Copy::zero_to_words(obj, new_tlab_size);
 294   } else {
 295     // ...and zap just allocated object.
 296 #ifdef ASSERT
 297     // Skip mangling the space corresponding to the object header to
 298     // ensure that the returned space is not considered parsable by
 299     // any concurrent GC thread.
 300     size_t hdr_size = oopDesc::header_size();
 301     Copy::fill_to_words(obj + hdr_size, new_tlab_size - hdr_size, badHeapWordVal);
 302 #endif // ASSERT
 303   }
 304   thread->tlab().fill(obj, obj + size, new_tlab_size);
 305   return obj;
 306 }
 307 
 308 void CollectedHeap::flush_deferred_store_barrier(JavaThread* thread) {
 309   MemRegion deferred = thread->deferred_card_mark();


   1 /*
   2  * Copyright (c) 2001, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *


  71   }
  72 }
  73 
  74 VirtualSpaceSummary CollectedHeap::create_heap_space_summary() {
  75   size_t capacity_in_words = capacity() / HeapWordSize;
  76 
  77   return VirtualSpaceSummary(
  78     reserved_region().start(), reserved_region().start() + capacity_in_words, reserved_region().end());
  79 }
  80 
  81 GCHeapSummary CollectedHeap::create_heap_summary() {
  82   VirtualSpaceSummary heap_space = create_heap_space_summary();
  83   return GCHeapSummary(heap_space, used());
  84 }
  85 
  86 MetaspaceSummary CollectedHeap::create_metaspace_summary() {
  87   const MetaspaceSizes meta_space(
  88       MetaspaceAux::committed_bytes(),
  89       MetaspaceAux::used_bytes(),
  90       MetaspaceAux::reserved_bytes());
  91 
  92   const MetaspaceSizes data_space(
  93       MetaspaceAux::committed_bytes(Metaspace::NonClassType),
  94       MetaspaceAux::used_bytes(Metaspace::NonClassType),
  95       MetaspaceAux::reserved_bytes(Metaspace::NonClassType));
  96 
  97   const MetaspaceSizes class_space(
  98       MetaspaceAux::committed_bytes(Metaspace::ClassType),
  99       MetaspaceAux::used_bytes(Metaspace::ClassType),
 100       MetaspaceAux::reserved_bytes(Metaspace::ClassType));
 101 
 102   const MetaspaceChunkFreeListSummary& ms_chunk_free_list_summary =
 103     MetaspaceAux::chunk_free_list_summary(Metaspace::NonClassType);
 104   const MetaspaceChunkFreeListSummary& class_chunk_free_list_summary =
 105     MetaspaceAux::chunk_free_list_summary(Metaspace::ClassType);
 106 
 107   return MetaspaceSummary(MetaspaceGC::capacity_until_GC(), meta_space, data_space, class_space,
 108                           ms_chunk_free_list_summary, class_chunk_free_list_summary);
 109 }
 110 
 111 void CollectedHeap::print_heap_before_gc() {
 112   if (PrintHeapAtGC) {
 113     Universe::print_heap_before_gc();
 114   }
 115   if (_gc_heap_log != NULL) {
 116     _gc_heap_log->log_heap_before();


 271     thread->tlab().record_slow_allocation(size);
 272     return NULL;
 273   }
 274 
 275   // Discard tlab and allocate a new one.
 276   // To minimize fragmentation, the last TLAB may be smaller than the rest.
 277   size_t new_tlab_size = thread->tlab().compute_size(size);
 278 
 279   thread->tlab().clear_before_allocation();
 280 
 281   if (new_tlab_size == 0) {
 282     return NULL;
 283   }
 284 
 285   // Allocate a new TLAB...
 286   HeapWord* obj = Universe::heap()->allocate_new_tlab(new_tlab_size);
 287   if (obj == NULL) {
 288     return NULL;
 289   }
 290 
 291   AllocTracer::send_allocation_in_new_tlab_event(klass, obj, new_tlab_size * HeapWordSize, size * HeapWordSize, thread);
 292 
 293   if (ZeroTLAB) {
 294     // ..and clear it.
 295     Copy::zero_to_words(obj, new_tlab_size);
 296   } else {
 297     // ...and zap just allocated object.
 298 #ifdef ASSERT
 299     // Skip mangling the space corresponding to the object header to
 300     // ensure that the returned space is not considered parsable by
 301     // any concurrent GC thread.
 302     size_t hdr_size = oopDesc::header_size();
 303     Copy::fill_to_words(obj + hdr_size, new_tlab_size - hdr_size, badHeapWordVal);
 304 #endif // ASSERT
 305   }
 306   thread->tlab().fill(obj, obj + size, new_tlab_size);
 307   return obj;
 308 }
 309 
 310 void CollectedHeap::flush_deferred_store_barrier(JavaThread* thread) {
 311   MemRegion deferred = thread->deferred_card_mark();


< prev index next >