503 void print_gc_threads() {
504 print_gc_threads_on(tty);
505 }
506 // Iterator for all GC threads (other than VM thread)
507 virtual void gc_threads_do(ThreadClosure* tc) const = 0;
508
509 // Print any relevant tracing info that flags imply.
510 // Default implementation does nothing.
511 virtual void print_tracing_info() const = 0;
512
513 void print_heap_before_gc();
514 void print_heap_after_gc();
515
516 // An object is scavengable if its location may move during a scavenge.
517 // (A scavenge is a GC which is not a full GC.)
518 virtual bool is_scavengable(oop obj) = 0;
519 // Registering and unregistering an nmethod (compiled code) with the heap.
520 // Override with specific mechanism for each specialized heap type.
521 virtual void register_nmethod(nmethod* nm) {}
522 virtual void unregister_nmethod(nmethod* nm) {}
523 virtual void verify_nmethod(nmethod* nmethod) {}
524
525 void trace_heap_before_gc(const GCTracer* gc_tracer);
526 void trace_heap_after_gc(const GCTracer* gc_tracer);
527
528 // Heap verification
529 virtual void verify(VerifyOption option) = 0;
530
531 // Return true if concurrent phase control (via
532 // request_concurrent_phase_control) is supported by this collector.
533 // The default implementation returns false.
534 virtual bool supports_concurrent_phase_control() const;
535
536 // Request the collector enter the indicated concurrent phase, and
537 // wait until it does so. Supports WhiteBox testing. Only one
538 // request may be active at a time. Phases are designated by name;
539 // the set of names and their meaning is GC-specific. Once the
540 // requested phase has been reached, the collector will attempt to
541 // avoid transitioning to a new phase until a new request is made.
|
503 void print_gc_threads() {
504 print_gc_threads_on(tty);
505 }
506 // Iterator for all GC threads (other than VM thread)
507 virtual void gc_threads_do(ThreadClosure* tc) const = 0;
508
509 // Print any relevant tracing info that flags imply.
510 // Default implementation does nothing.
511 virtual void print_tracing_info() const = 0;
512
513 void print_heap_before_gc();
514 void print_heap_after_gc();
515
516 // An object is scavengable if its location may move during a scavenge.
517 // (A scavenge is a GC which is not a full GC.)
518 virtual bool is_scavengable(oop obj) = 0;
519 // Registering and unregistering an nmethod (compiled code) with the heap.
520 // Override with specific mechanism for each specialized heap type.
521 virtual void register_nmethod(nmethod* nm) {}
522 virtual void unregister_nmethod(nmethod* nm) {}
523 virtual void flush_nmethod(nmethod* nm) {}
524 virtual void verify_nmethod(nmethod* nmethod) {}
525
526 void trace_heap_before_gc(const GCTracer* gc_tracer);
527 void trace_heap_after_gc(const GCTracer* gc_tracer);
528
529 // Heap verification
530 virtual void verify(VerifyOption option) = 0;
531
532 // Return true if concurrent phase control (via
533 // request_concurrent_phase_control) is supported by this collector.
534 // The default implementation returns false.
535 virtual bool supports_concurrent_phase_control() const;
536
537 // Request the collector enter the indicated concurrent phase, and
538 // wait until it does so. Supports WhiteBox testing. Only one
539 // request may be active at a time. Phases are designated by name;
540 // the set of names and their meaning is GC-specific. Once the
541 // requested phase has been reached, the collector will attempt to
542 // avoid transitioning to a new phase until a new request is made.
|