< prev index next >

src/share/vm/memory/referenceProcessor.hpp

Print this page
rev 7522 : * * *
8066827: Remove ReferenceProcessor::clean_up_discovered_references()
Summary: Abandon rather than clean up discovered references.
Reviewed-by:


 336 
 337   // Enqueue references with a certain reachability level
 338   void enqueue_discovered_reflist(DiscoveredList& refs_list, HeapWord* pending_list_addr);
 339 
 340   // "Preclean" all the discovered reference lists
 341   // by removing references with strongly reachable referents.
 342   // The first argument is a predicate on an oop that indicates
 343   // its (strong) reachability and the second is a closure that
 344   // may be used to incrementalize or abort the precleaning process.
 345   // The caller is responsible for taking care of potential
 346   // interference with concurrent operations on these lists
 347   // (or predicates involved) by other threads. Currently
 348   // only used by the CMS collector.
 349   void preclean_discovered_references(BoolObjectClosure* is_alive,
 350                                       OopClosure*        keep_alive,
 351                                       VoidClosure*       complete_gc,
 352                                       YieldClosure*      yield,
 353                                       GCTimer*           gc_timer,
 354                                       GCId               gc_id);
 355 
 356   // Delete entries in the discovered lists that have
 357   // either a null referent or are not active. Such
 358   // Reference objects can result from the clearing
 359   // or enqueueing of Reference objects concurrent
 360   // with their discovery by a (concurrent) collector.
 361   // For a definition of "active" see java.lang.ref.Reference;
 362   // Refs are born active, become inactive when enqueued,
 363   // and never become active again. The state of being
 364   // active is encoded as follows: A Ref is active
 365   // if and only if its "next" field is NULL.
 366   void clean_up_discovered_references();
 367   void clean_up_discovered_reflist(DiscoveredList& refs_list);
 368 
 369   // Returns the name of the discovered reference list
 370   // occupying the i / _num_q slot.
 371   const char* list_name(uint i);
 372 
 373   void enqueue_discovered_reflists(HeapWord* pending_list_addr, AbstractRefProcTaskExecutor* task_executor);
 374 
 375  protected:
 376   // "Preclean" the given discovered reference list
 377   // by removing references with strongly reachable referents.
 378   // Currently used in support of CMS only.
 379   void preclean_discovered_reflist(DiscoveredList&    refs_list,
 380                                    BoolObjectClosure* is_alive,
 381                                    OopClosure*        keep_alive,
 382                                    VoidClosure*       complete_gc,
 383                                    YieldClosure*      yield);
 384 
 385   // round-robin mod _num_q (not: _not_ mode _max_num_q)
 386   uint next_id() {
 387     uint id = _next_id;
 388     if (++_next_id == _num_q) {


 422     DiscoveryPolicyMin      = ReferenceBasedDiscovery,
 423     DiscoveryPolicyMax      = ReferentBasedDiscovery
 424   };
 425 
 426   static void init_statics();
 427 
 428  public:
 429   // get and set "is_alive_non_header" field
 430   BoolObjectClosure* is_alive_non_header() {
 431     return _is_alive_non_header;
 432   }
 433   void set_is_alive_non_header(BoolObjectClosure* is_alive_non_header) {
 434     _is_alive_non_header = is_alive_non_header;
 435   }
 436 
 437   // get and set span
 438   MemRegion span()                   { return _span; }
 439   void      set_span(MemRegion span) { _span = span; }
 440 
 441   // start and stop weak ref discovery
 442   void enable_discovery(bool verify_disabled, bool check_no_refs);
 443   void disable_discovery()  { _discovering_refs = false; }
 444   bool discovery_enabled()  { return _discovering_refs;  }
 445 
 446   // whether discovery is atomic wrt other collectors
 447   bool discovery_is_atomic() const { return _discovery_is_atomic; }
 448   void set_atomic_discovery(bool atomic) { _discovery_is_atomic = atomic; }
 449 
 450   // whether the JDK in which we are embedded is a pre-4965777 JDK,
 451   // and thus whether or not it uses the discovered field to chain
 452   // the entries in the pending list.
 453   static bool pending_list_uses_discovered_field() {
 454     return _pending_list_uses_discovered_field;
 455   }
 456 
 457   // whether discovery is done by multiple threads same-old-timeously
 458   bool discovery_is_mt() const { return _discovery_is_mt; }
 459   void set_mt_discovery(bool mt) { _discovery_is_mt = mt; }
 460 
 461   // Whether we are in a phase when _processing_ is MT.
 462   bool processing_is_mt() const { return _processing_is_mt; }


 500   // clear the discovered lists (unlinking each entry).
 501   void clear_discovered_references() PRODUCT_RETURN;
 502 };
 503 
 504 // A utility class to disable reference discovery in
 505 // the scope which contains it, for given ReferenceProcessor.
 506 class NoRefDiscovery: StackObj {
 507  private:
 508   ReferenceProcessor* _rp;
 509   bool _was_discovering_refs;
 510  public:
 511   NoRefDiscovery(ReferenceProcessor* rp) : _rp(rp) {
 512     _was_discovering_refs = _rp->discovery_enabled();
 513     if (_was_discovering_refs) {
 514       _rp->disable_discovery();
 515     }
 516   }
 517 
 518   ~NoRefDiscovery() {
 519     if (_was_discovering_refs) {
 520       _rp->enable_discovery(true /*verify_disabled*/, false /*check_no_refs*/);
 521     }
 522   }
 523 };
 524 
 525 
 526 // A utility class to temporarily mutate the span of the
 527 // given ReferenceProcessor in the scope that contains it.
 528 class ReferenceProcessorSpanMutator: StackObj {
 529  private:
 530   ReferenceProcessor* _rp;
 531   MemRegion           _saved_span;
 532 
 533  public:
 534   ReferenceProcessorSpanMutator(ReferenceProcessor* rp,
 535                                 MemRegion span):
 536     _rp(rp) {
 537     _saved_span = _rp->span();
 538     _rp->set_span(span);
 539   }
 540 




 336 
 337   // Enqueue references with a certain reachability level
 338   void enqueue_discovered_reflist(DiscoveredList& refs_list, HeapWord* pending_list_addr);
 339 
 340   // "Preclean" all the discovered reference lists
 341   // by removing references with strongly reachable referents.
 342   // The first argument is a predicate on an oop that indicates
 343   // its (strong) reachability and the second is a closure that
 344   // may be used to incrementalize or abort the precleaning process.
 345   // The caller is responsible for taking care of potential
 346   // interference with concurrent operations on these lists
 347   // (or predicates involved) by other threads. Currently
 348   // only used by the CMS collector.
 349   void preclean_discovered_references(BoolObjectClosure* is_alive,
 350                                       OopClosure*        keep_alive,
 351                                       VoidClosure*       complete_gc,
 352                                       YieldClosure*      yield,
 353                                       GCTimer*           gc_timer,
 354                                       GCId               gc_id);
 355 













 356   // Returns the name of the discovered reference list
 357   // occupying the i / _num_q slot.
 358   const char* list_name(uint i);
 359 
 360   void enqueue_discovered_reflists(HeapWord* pending_list_addr, AbstractRefProcTaskExecutor* task_executor);
 361 
 362  protected:
 363   // "Preclean" the given discovered reference list
 364   // by removing references with strongly reachable referents.
 365   // Currently used in support of CMS only.
 366   void preclean_discovered_reflist(DiscoveredList&    refs_list,
 367                                    BoolObjectClosure* is_alive,
 368                                    OopClosure*        keep_alive,
 369                                    VoidClosure*       complete_gc,
 370                                    YieldClosure*      yield);
 371 
 372   // round-robin mod _num_q (not: _not_ mode _max_num_q)
 373   uint next_id() {
 374     uint id = _next_id;
 375     if (++_next_id == _num_q) {


 409     DiscoveryPolicyMin      = ReferenceBasedDiscovery,
 410     DiscoveryPolicyMax      = ReferentBasedDiscovery
 411   };
 412 
 413   static void init_statics();
 414 
 415  public:
 416   // get and set "is_alive_non_header" field
 417   BoolObjectClosure* is_alive_non_header() {
 418     return _is_alive_non_header;
 419   }
 420   void set_is_alive_non_header(BoolObjectClosure* is_alive_non_header) {
 421     _is_alive_non_header = is_alive_non_header;
 422   }
 423 
 424   // get and set span
 425   MemRegion span()                   { return _span; }
 426   void      set_span(MemRegion span) { _span = span; }
 427 
 428   // start and stop weak ref discovery
 429   void enable_discovery(bool check_no_refs = true);
 430   void disable_discovery()  { _discovering_refs = false; }
 431   bool discovery_enabled()  { return _discovering_refs;  }
 432 
 433   // whether discovery is atomic wrt other collectors
 434   bool discovery_is_atomic() const { return _discovery_is_atomic; }
 435   void set_atomic_discovery(bool atomic) { _discovery_is_atomic = atomic; }
 436 
 437   // whether the JDK in which we are embedded is a pre-4965777 JDK,
 438   // and thus whether or not it uses the discovered field to chain
 439   // the entries in the pending list.
 440   static bool pending_list_uses_discovered_field() {
 441     return _pending_list_uses_discovered_field;
 442   }
 443 
 444   // whether discovery is done by multiple threads same-old-timeously
 445   bool discovery_is_mt() const { return _discovery_is_mt; }
 446   void set_mt_discovery(bool mt) { _discovery_is_mt = mt; }
 447 
 448   // Whether we are in a phase when _processing_ is MT.
 449   bool processing_is_mt() const { return _processing_is_mt; }


 487   // clear the discovered lists (unlinking each entry).
 488   void clear_discovered_references() PRODUCT_RETURN;
 489 };
 490 
 491 // A utility class to disable reference discovery in
 492 // the scope which contains it, for given ReferenceProcessor.
 493 class NoRefDiscovery: StackObj {
 494  private:
 495   ReferenceProcessor* _rp;
 496   bool _was_discovering_refs;
 497  public:
 498   NoRefDiscovery(ReferenceProcessor* rp) : _rp(rp) {
 499     _was_discovering_refs = _rp->discovery_enabled();
 500     if (_was_discovering_refs) {
 501       _rp->disable_discovery();
 502     }
 503   }
 504 
 505   ~NoRefDiscovery() {
 506     if (_was_discovering_refs) {
 507       _rp->enable_discovery(false /*check_no_refs*/);
 508     }
 509   }
 510 };
 511 
 512 
 513 // A utility class to temporarily mutate the span of the
 514 // given ReferenceProcessor in the scope that contains it.
 515 class ReferenceProcessorSpanMutator: StackObj {
 516  private:
 517   ReferenceProcessor* _rp;
 518   MemRegion           _saved_span;
 519 
 520  public:
 521   ReferenceProcessorSpanMutator(ReferenceProcessor* rp,
 522                                 MemRegion span):
 523     _rp(rp) {
 524     _saved_span = _rp->span();
 525     _rp->set_span(span);
 526   }
 527 


< prev index next >