23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/systemDictionary.hpp"
27 #include "gc/shared/allocTracer.hpp"
28 #include "gc/shared/barrierSet.inline.hpp"
29 #include "gc/shared/collectedHeap.hpp"
30 #include "gc/shared/collectedHeap.inline.hpp"
31 #include "gc/shared/gcHeapSummary.hpp"
32 #include "gc/shared/gcTrace.hpp"
33 #include "gc/shared/gcTraceTime.inline.hpp"
34 #include "gc/shared/gcWhen.hpp"
35 #include "gc/shared/vmGCOperations.hpp"
36 #include "logging/log.hpp"
37 #include "memory/metaspace.hpp"
38 #include "memory/resourceArea.hpp"
39 #include "oops/instanceMirrorKlass.hpp"
40 #include "oops/oop.inline.hpp"
41 #include "runtime/init.hpp"
42 #include "runtime/thread.inline.hpp"
43 #include "services/heapDumper.hpp"
44 #include "utilities/align.hpp"
45
46
47 #ifdef ASSERT
48 int CollectedHeap::_fire_out_of_memory_count = 0;
49 #endif
50
51 size_t CollectedHeap::_filler_array_max_size = 0;
52
53 template <>
54 void EventLogBase<GCMessage>::print(outputStream* st, GCMessage& m) {
55 st->print_cr("GC heap %s", m.is_before ? "before" : "after");
56 st->print_raw(m);
57 }
58
59 void GCHeapLog::log_heap(CollectedHeap* heap, bool before) {
60 if (!should_log()) {
61 return;
62 }
533 }
534
535 void CollectedHeap::ensure_parsability(bool retire_tlabs) {
536 // The second disjunct in the assertion below makes a concession
537 // for the start-up verification done while the VM is being
538 // created. Callers be careful that you know that mutators
539 // aren't going to interfere -- for instance, this is permissible
540 // if we are still single-threaded and have either not yet
541 // started allocating (nothing much to verify) or we have
542 // started allocating but are now a full-fledged JavaThread
543 // (and have thus made our TLAB's) available for filling.
544 assert(SafepointSynchronize::is_at_safepoint() ||
545 !is_init_completed(),
546 "Should only be called at a safepoint or at start-up"
547 " otherwise concurrent mutator activity may make heap "
548 " unparsable again");
549 const bool use_tlab = UseTLAB;
550 const bool deferred = _defer_initial_card_mark;
551 // The main thread starts allocating via a TLAB even before it
552 // has added itself to the threads list at vm boot-up.
553 assert(!use_tlab || Threads::first() != NULL,
554 "Attempt to fill tlabs before main thread has been added"
555 " to threads list is doomed to failure!");
556 for (JavaThread *thread = Threads::first(); thread; thread = thread->next()) {
557 if (use_tlab) thread->tlab().make_parsable(retire_tlabs);
558 #if defined(COMPILER2) || INCLUDE_JVMCI
559 // The deferred store barriers must all have been flushed to the
560 // card-table (or other remembered set structure) before GC starts
561 // processing the card-table (or other remembered set).
562 if (deferred) flush_deferred_store_barrier(thread);
563 #else
564 assert(!deferred, "Should be false");
565 assert(thread->deferred_card_mark().is_empty(), "Should be empty");
566 #endif
567 }
568 }
569
570 void CollectedHeap::accumulate_statistics_all_tlabs() {
571 if (UseTLAB) {
572 assert(SafepointSynchronize::is_at_safepoint() ||
573 !is_init_completed(),
574 "should only accumulate statistics on tlabs at safepoint");
575
576 ThreadLocalAllocBuffer::accumulate_statistics_before_gc();
|
23 */
24
25 #include "precompiled.hpp"
26 #include "classfile/systemDictionary.hpp"
27 #include "gc/shared/allocTracer.hpp"
28 #include "gc/shared/barrierSet.inline.hpp"
29 #include "gc/shared/collectedHeap.hpp"
30 #include "gc/shared/collectedHeap.inline.hpp"
31 #include "gc/shared/gcHeapSummary.hpp"
32 #include "gc/shared/gcTrace.hpp"
33 #include "gc/shared/gcTraceTime.inline.hpp"
34 #include "gc/shared/gcWhen.hpp"
35 #include "gc/shared/vmGCOperations.hpp"
36 #include "logging/log.hpp"
37 #include "memory/metaspace.hpp"
38 #include "memory/resourceArea.hpp"
39 #include "oops/instanceMirrorKlass.hpp"
40 #include "oops/oop.inline.hpp"
41 #include "runtime/init.hpp"
42 #include "runtime/thread.inline.hpp"
43 #include "runtime/threadSMR.hpp"
44 #include "services/heapDumper.hpp"
45 #include "utilities/align.hpp"
46
47
48 #ifdef ASSERT
49 int CollectedHeap::_fire_out_of_memory_count = 0;
50 #endif
51
52 size_t CollectedHeap::_filler_array_max_size = 0;
53
54 template <>
55 void EventLogBase<GCMessage>::print(outputStream* st, GCMessage& m) {
56 st->print_cr("GC heap %s", m.is_before ? "before" : "after");
57 st->print_raw(m);
58 }
59
60 void GCHeapLog::log_heap(CollectedHeap* heap, bool before) {
61 if (!should_log()) {
62 return;
63 }
534 }
535
536 void CollectedHeap::ensure_parsability(bool retire_tlabs) {
537 // The second disjunct in the assertion below makes a concession
538 // for the start-up verification done while the VM is being
539 // created. Callers be careful that you know that mutators
540 // aren't going to interfere -- for instance, this is permissible
541 // if we are still single-threaded and have either not yet
542 // started allocating (nothing much to verify) or we have
543 // started allocating but are now a full-fledged JavaThread
544 // (and have thus made our TLAB's) available for filling.
545 assert(SafepointSynchronize::is_at_safepoint() ||
546 !is_init_completed(),
547 "Should only be called at a safepoint or at start-up"
548 " otherwise concurrent mutator activity may make heap "
549 " unparsable again");
550 const bool use_tlab = UseTLAB;
551 const bool deferred = _defer_initial_card_mark;
552 // The main thread starts allocating via a TLAB even before it
553 // has added itself to the threads list at vm boot-up.
554 ThreadsListHandle tlh;
555 assert(!use_tlab || tlh.length() > 0,
556 "Attempt to fill tlabs before main thread has been added"
557 " to threads list is doomed to failure!");
558 JavaThreadIterator jti(tlh.list());
559 for (JavaThread *thread = jti.first(); thread != NULL; thread = jti.next()) {
560 if (use_tlab) thread->tlab().make_parsable(retire_tlabs);
561 #if defined(COMPILER2) || INCLUDE_JVMCI
562 // The deferred store barriers must all have been flushed to the
563 // card-table (or other remembered set structure) before GC starts
564 // processing the card-table (or other remembered set).
565 if (deferred) flush_deferred_store_barrier(thread);
566 #else
567 assert(!deferred, "Should be false");
568 assert(thread->deferred_card_mark().is_empty(), "Should be empty");
569 #endif
570 }
571 }
572
573 void CollectedHeap::accumulate_statistics_all_tlabs() {
574 if (UseTLAB) {
575 assert(SafepointSynchronize::is_at_safepoint() ||
576 !is_init_completed(),
577 "should only accumulate statistics on tlabs at safepoint");
578
579 ThreadLocalAllocBuffer::accumulate_statistics_before_gc();
|