312 if (++_next_id == _num_q) {
313 _next_id = 0;
314 }
315 assert(_next_id < _num_q, "_next_id %u _num_q %u _max_num_q %u", _next_id, _num_q, _max_num_q);
316 return id;
317 }
318 DiscoveredList* get_discovered_list(ReferenceType rt);
319 inline void add_to_discovered_list_mt(DiscoveredList& refs_list, oop obj,
320 HeapWord* discovered_addr);
321
322 void clear_discovered_references(DiscoveredList& refs_list);
323
324 void log_reflist_counts(DiscoveredList ref_lists[], uint active_length, size_t total_count) PRODUCT_RETURN;
325
326 // Balances reference queues.
327 void balance_queues(DiscoveredList ref_lists[]);
328
329 // Update (advance) the soft ref master clock field.
330 void update_soft_ref_master_clock();
331
332 template <class T>
333 bool is_subject_to_discovery(T const obj) const;
334 public:
335 // Default parameters give you a vanilla reference processor.
336 ReferenceProcessor(BoolObjectClosure* is_subject_to_discovery,
337 bool mt_processing = false, uint mt_processing_degree = 1,
338 bool mt_discovery = false, uint mt_discovery_degree = 1,
339 bool atomic_discovery = true,
340 BoolObjectClosure* is_alive_non_header = NULL);
341
342 // RefDiscoveryPolicy values
343 enum DiscoveryPolicy {
344 ReferenceBasedDiscovery = 0,
345 ReferentBasedDiscovery = 1,
346 DiscoveryPolicyMin = ReferenceBasedDiscovery,
347 DiscoveryPolicyMax = ReferentBasedDiscovery
348 };
349
350 static void init_statics();
351
352 public:
353 // get and set "is_alive_non_header" field
402 VoidClosure* complete_gc,
403 AbstractRefProcTaskExecutor* task_executor,
404 ReferenceProcessorPhaseTimes* phase_times);
405
406 // Enqueue references at end of GC (called by the garbage collector)
407 void enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor,
408 ReferenceProcessorPhaseTimes* phase_times);
409
410 // If a discovery is in process that is being superceded, abandon it: all
411 // the discovered lists will be empty, and all the objects on them will
412 // have NULL discovered fields. Must be called only at a safepoint.
413 void abandon_partial_discovery();
414
415 size_t total_reference_count(ReferenceType rt) const;
416
417 // debugging
418 void verify_no_references_recorded() PRODUCT_RETURN;
419 void verify_referent(oop obj) PRODUCT_RETURN;
420 };
421
422 // A reference processor that uses a single memory span to determine the area that
423 // is subject to discovery. Useful for collectors which have contiguous generations.
424 class SpanReferenceProcessor : public ReferenceProcessor {
425 class SpanBasedDiscoverer : public BoolObjectClosure {
426 public:
427 MemRegion _span;
428
429 SpanBasedDiscoverer(MemRegion span) : BoolObjectClosure(), _span(span) { }
430
431 virtual bool do_object_b(oop obj) {
432 return _span.contains(obj);
433 }
434 };
435
436 SpanBasedDiscoverer _span_based_discoverer;
437 public:
438 SpanReferenceProcessor(MemRegion span,
439 bool mt_processing = false, uint mt_processing_degree = 1,
440 bool mt_discovery = false, uint mt_discovery_degree = 1,
441 bool atomic_discovery = true,
442 BoolObjectClosure* is_alive_non_header = NULL);
443
444 // get and set span
445 MemRegion span() { return _span_based_discoverer._span; }
446 void set_span(MemRegion span) { _span_based_discoverer._span = span; }
447 };
448
449 // A utility class to disable reference discovery in
450 // the scope which contains it, for given ReferenceProcessor.
451 class NoRefDiscovery: StackObj {
452 private:
453 ReferenceProcessor* _rp;
454 bool _was_discovering_refs;
455 public:
456 NoRefDiscovery(ReferenceProcessor* rp) : _rp(rp) {
457 _was_discovering_refs = _rp->discovery_enabled();
458 if (_was_discovering_refs) {
459 _rp->disable_discovery();
460 }
461 }
462
463 ~NoRefDiscovery() {
464 if (_was_discovering_refs) {
465 _rp->enable_discovery(false /*check_no_refs*/);
466 }
467 }
468 };
469
470 // A utility class to temporarily mutate the subject discovery closure of the
471 // given ReferenceProcessor in the scope that contains it.
472 class ReferenceProcessorSubjectToDiscoveryMutator : StackObj {
473 private:
474 ReferenceProcessor* _rp;
475 BoolObjectClosure* _saved_cl;
476
477 public:
478 ReferenceProcessorSubjectToDiscoveryMutator(ReferenceProcessor* rp, BoolObjectClosure* cl):
479 _rp(rp) {
480 _saved_cl = _rp->is_subject_to_discovery_closure();
481 _rp->set_is_subject_to_discovery_closure(cl);
482 }
483
484 ~ReferenceProcessorSubjectToDiscoveryMutator() {
485 _rp->set_is_subject_to_discovery_closure(_saved_cl);
486 }
487 };
488
489 // A utility class to temporarily mutate the span of the
490 // given ReferenceProcessor in the scope that contains it.
491 class ReferenceProcessorSpanMutator: StackObj {
492 private:
493 SpanReferenceProcessor* _rp;
494 MemRegion _saved_span;
495
496 public:
497 ReferenceProcessorSpanMutator(SpanReferenceProcessor* rp,
498 MemRegion span):
499 _rp(rp) {
500 _saved_span = _rp->span();
501 _rp->set_span(span);
502 }
503
504 ~ReferenceProcessorSpanMutator() {
505 _rp->set_span(_saved_span);
506 }
507 };
508
509 // A utility class to temporarily change the MT'ness of
510 // reference discovery for the given ReferenceProcessor
511 // in the scope that contains it.
512 class ReferenceProcessorMTDiscoveryMutator: StackObj {
513 private:
514 ReferenceProcessor* _rp;
515 bool _saved_mt;
516
517 public:
518 ReferenceProcessorMTDiscoveryMutator(ReferenceProcessor* rp,
519 bool mt):
520 _rp(rp) {
521 _saved_mt = _rp->discovery_is_mt();
522 _rp->set_mt_discovery(mt);
523 }
524
525 ~ReferenceProcessorMTDiscoveryMutator() {
|
312 if (++_next_id == _num_q) {
313 _next_id = 0;
314 }
315 assert(_next_id < _num_q, "_next_id %u _num_q %u _max_num_q %u", _next_id, _num_q, _max_num_q);
316 return id;
317 }
318 DiscoveredList* get_discovered_list(ReferenceType rt);
319 inline void add_to_discovered_list_mt(DiscoveredList& refs_list, oop obj,
320 HeapWord* discovered_addr);
321
322 void clear_discovered_references(DiscoveredList& refs_list);
323
324 void log_reflist_counts(DiscoveredList ref_lists[], uint active_length, size_t total_count) PRODUCT_RETURN;
325
326 // Balances reference queues.
327 void balance_queues(DiscoveredList ref_lists[]);
328
329 // Update (advance) the soft ref master clock field.
330 void update_soft_ref_master_clock();
331
332 bool is_subject_to_discovery(oop const obj) const;
333
334 public:
335 // Default parameters give you a vanilla reference processor.
336 ReferenceProcessor(BoolObjectClosure* is_subject_to_discovery,
337 bool mt_processing = false, uint mt_processing_degree = 1,
338 bool mt_discovery = false, uint mt_discovery_degree = 1,
339 bool atomic_discovery = true,
340 BoolObjectClosure* is_alive_non_header = NULL);
341
342 // RefDiscoveryPolicy values
343 enum DiscoveryPolicy {
344 ReferenceBasedDiscovery = 0,
345 ReferentBasedDiscovery = 1,
346 DiscoveryPolicyMin = ReferenceBasedDiscovery,
347 DiscoveryPolicyMax = ReferentBasedDiscovery
348 };
349
350 static void init_statics();
351
352 public:
353 // get and set "is_alive_non_header" field
402 VoidClosure* complete_gc,
403 AbstractRefProcTaskExecutor* task_executor,
404 ReferenceProcessorPhaseTimes* phase_times);
405
406 // Enqueue references at end of GC (called by the garbage collector)
407 void enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor,
408 ReferenceProcessorPhaseTimes* phase_times);
409
410 // If a discovery is in process that is being superceded, abandon it: all
411 // the discovered lists will be empty, and all the objects on them will
412 // have NULL discovered fields. Must be called only at a safepoint.
413 void abandon_partial_discovery();
414
415 size_t total_reference_count(ReferenceType rt) const;
416
417 // debugging
418 void verify_no_references_recorded() PRODUCT_RETURN;
419 void verify_referent(oop obj) PRODUCT_RETURN;
420 };
421
422 // A subject-to-discovery closure that uses a single memory span to determine the area that
423 // is subject to discovery. Useful for collectors which have contiguous generations.
424 class SpanSubjectToDiscoveryClosure : public BoolObjectClosure {
425 MemRegion _span;
426
427 public:
428 SpanSubjectToDiscoveryClosure() : BoolObjectClosure(), _span() { }
429 SpanSubjectToDiscoveryClosure(MemRegion span) : BoolObjectClosure(), _span(span) { }
430
431 MemRegion span() const { return _span; }
432
433 void set_span(MemRegion mr) {
434 _span = mr;
435 }
436
437 virtual bool do_object_b(oop obj) {
438 return _span.contains(obj);
439 }
440 };
441
442 // A utility class to disable reference discovery in
443 // the scope which contains it, for given ReferenceProcessor.
444 class NoRefDiscovery: StackObj {
445 private:
446 ReferenceProcessor* _rp;
447 bool _was_discovering_refs;
448 public:
449 NoRefDiscovery(ReferenceProcessor* rp) : _rp(rp) {
450 _was_discovering_refs = _rp->discovery_enabled();
451 if (_was_discovering_refs) {
452 _rp->disable_discovery();
453 }
454 }
455
456 ~NoRefDiscovery() {
457 if (_was_discovering_refs) {
458 _rp->enable_discovery(false /*check_no_refs*/);
459 }
460 }
461 };
462
463 // A utility class to temporarily mutate the subject discovery closure of the
464 // given ReferenceProcessor in the scope that contains it.
465 class ReferenceProcessorSubjectToDiscoveryMutator : StackObj {
466 ReferenceProcessor* _rp;
467 BoolObjectClosure* _saved_cl;
468
469 public:
470 ReferenceProcessorSubjectToDiscoveryMutator(ReferenceProcessor* rp, BoolObjectClosure* cl):
471 _rp(rp) {
472 _saved_cl = _rp->is_subject_to_discovery_closure();
473 _rp->set_is_subject_to_discovery_closure(cl);
474 }
475
476 ~ReferenceProcessorSubjectToDiscoveryMutator() {
477 _rp->set_is_subject_to_discovery_closure(_saved_cl);
478 }
479 };
480
481 // A utility class to temporarily mutate the span of the
482 // given ReferenceProcessor in the scope that contains it.
483 class ReferenceProcessorSpanMutator : StackObj {
484 ReferenceProcessor* _rp;
485 SpanSubjectToDiscoveryClosure _discoverer;
486 BoolObjectClosure* _old_discoverer;
487
488 public:
489 ReferenceProcessorSpanMutator(ReferenceProcessor* rp,
490 MemRegion span):
491 _rp(rp), _discoverer(span) {
492 _old_discoverer = rp->is_subject_to_discovery_closure();
493 rp->set_is_subject_to_discovery_closure(&_discoverer);
494 }
495
496 ~ReferenceProcessorSpanMutator() {
497 _rp->set_is_subject_to_discovery_closure(_old_discoverer);
498 }
499 };
500
501 // A utility class to temporarily change the MT'ness of
502 // reference discovery for the given ReferenceProcessor
503 // in the scope that contains it.
504 class ReferenceProcessorMTDiscoveryMutator: StackObj {
505 private:
506 ReferenceProcessor* _rp;
507 bool _saved_mt;
508
509 public:
510 ReferenceProcessorMTDiscoveryMutator(ReferenceProcessor* rp,
511 bool mt):
512 _rp(rp) {
513 _saved_mt = _rp->discovery_is_mt();
514 _rp->set_mt_discovery(mt);
515 }
516
517 ~ReferenceProcessorMTDiscoveryMutator() {
|