--- old/src/share/vm/memory/heapInspection.cpp 2013-03-16 11:20:20.071378635 +0100 +++ new/src/share/vm/memory/heapInspection.cpp 2013-03-16 11:20:20.007378636 +0100 @@ -228,59 +228,84 @@ private: KlassInfoTable* _cit; size_t _missed_count; + BoolObjectClosure* _filter; public: - RecordInstanceClosure(KlassInfoTable* cit) : - _cit(cit), _missed_count(0) {} + RecordInstanceClosure(BoolObjectClosure* filter, KlassInfoTable* cit) : + _cit(cit), _missed_count(0), _filter(filter) {} void do_object(oop obj) { - if (!_cit->record_instance(obj)) { - _missed_count++; + if (should_visit(obj)) { + if (!_cit->record_instance(obj)) { + _missed_count++; + } } } size_t missed_count() { return _missed_count; } + + private: + bool should_visit(oop obj) { + return _filter == NULL || _filter->do_object_b(obj); + } }; -void HeapInspection::heap_inspection(outputStream* st, bool need_prologue) { +size_t HeapInspection::iterate_over_heap(KlassInfoTable* cit) { + return iterate_over_heap(NULL, cit); +} + +size_t HeapInspection::iterate_over_heap(BoolObjectClosure* object_filter, KlassInfoTable* cit) { + RecordInstanceClosure ric(object_filter, cit); + Universe::heap()->object_iterate(&ric); + return ric.missed_count(); +} + +void HeapInspection::instance_inspection(KlassInfoClosure* closure) { + return instance_inspection(NULL, closure); +} + +void HeapInspection::instance_inspection(BoolObjectClosure* object_filter, + KlassInfoClosure* closure) { ResourceMark rm; - HeapWord* ref; - CollectedHeap* heap = Universe::heap(); - bool is_shared_heap = false; - switch (heap->kind()) { - case CollectedHeap::G1CollectedHeap: - case CollectedHeap::GenCollectedHeap: { - is_shared_heap = true; - SharedHeap* sh = (SharedHeap*)heap; - if (need_prologue) { - sh->gc_prologue(false /* !full */); // get any necessary locks, etc. - } - ref = sh->perm_gen()->used_region().start(); - break; - } + KlassInfoTable cit(KlassInfoTable::cit_size, create_random_seed()); + + if (!cit.allocation_failed()) { + iterate_over_heap(object_filter, &cit); + cit.iterate(closure); + } +} + +HeapWord* HeapInspection::create_random_seed() { + if (is_shared_heap()) { + SharedHeap* sh = (SharedHeap*)Universe::heap(); + return sh->perm_gen()->used_region().start(); + } #ifndef SERIALGC - case CollectedHeap::ParallelScavengeHeap: { - ParallelScavengeHeap* psh = (ParallelScavengeHeap*)heap; - ref = psh->perm_gen()->object_space()->used_region().start(); - break; - } + ParallelScavengeHeap* psh = (ParallelScavengeHeap*)Universe::heap(); + return psh->perm_gen()->object_space()->used_region().start(); #endif // SERIALGC - default: - ShouldNotReachHere(); // Unexpected heap kind for this op + ShouldNotReachHere(); + return NULL; +} + +bool HeapInspection::is_shared_heap() { + CollectedHeap* heap = Universe::heap(); + return heap->kind() == CollectedHeap::G1CollectedHeap || + heap->kind() == CollectedHeap::GenCollectedHeap; +} + +void HeapInspection::heap_inspection(outputStream* st, bool need_prologue) { + ResourceMark rm; + + if (need_prologue && is_shared_heap()) { + SharedHeap* sh = (SharedHeap*)Universe::heap(); + sh->gc_prologue(false /* !full */); // get any necessary locks, etc. } + // Collect klass instance info - KlassInfoTable cit(KlassInfoTable::cit_size, ref); + KlassInfoTable cit(KlassInfoTable::cit_size, create_random_seed()); if (!cit.allocation_failed()) { - // Iterate over objects in the heap - RecordInstanceClosure ric(&cit); - // If this operation encounters a bad object when using CMS, - // consider using safe_object_iterate() which avoids perm gen - // objects that may contain bad references. - Universe::heap()->object_iterate(&ric); - - // Report if certain classes are not counted because of - // running out of C-heap for the histogram. - size_t missed_count = ric.missed_count(); + size_t missed_count = iterate_over_heap(&cit); if (missed_count != 0) { st->print_cr("WARNING: Ran out of C-heap; undercounted " SIZE_FORMAT " total instances in data below", @@ -300,8 +325,8 @@ } st->flush(); - if (need_prologue && is_shared_heap) { - SharedHeap* sh = (SharedHeap*)heap; + if (need_prologue && is_shared_heap()) { + SharedHeap* sh = (SharedHeap*)Universe::heap(); sh->gc_epilogue(false /* !full */); // release all acquired locks, etc. } }