< prev index next >

src/hotspot/share/oops/klass.hpp

klass is_unloading no cache

639   // wanting to reduce the initial scope of this optimization. There                                                                 
640   // are potential problems in setting the bias pattern for                                                                          
641   // JVM-internal oops.                                                                                                              
642   inline void set_prototype_header(markOop header);                                                                                  
643   static ByteSize prototype_header_offset() { return in_ByteSize(offset_of(Klass, _prototype_header)); }                             
644 
645   int  biased_lock_revocation_count() const { return (int) _biased_lock_revocation_count; }                                          
646   // Atomically increments biased_lock_revocation_count and returns updated value                                                    
647   int atomic_incr_biased_lock_revocation_count();                                                                                    
648   void set_biased_lock_revocation_count(int val) { _biased_lock_revocation_count = (jint) val; }                                     
649   jlong last_biased_lock_bulk_revocation_time() { return _last_biased_lock_bulk_revocation_time; }                                   
650   void  set_last_biased_lock_bulk_revocation_time(jlong cur_time) { _last_biased_lock_bulk_revocation_time = cur_time; }             
651 
652   JFR_ONLY(DEFINE_TRACE_ID_METHODS;)                                                                                                 
653 
654   virtual void metaspace_pointers_do(MetaspaceClosure* iter);                                                                        
655   virtual MetaspaceObj::Type type() const { return ClassType; }                                                                      
656 
657   // Iff the class loader (or mirror for unsafe anonymous classes) is alive the                                                      
658   // Klass is considered alive.  Has already been marked as unloading.                                                               
659   bool is_loader_alive() const { return !class_loader_data()->is_unloading(); }                                                      
660 
661   // Load the klass's holder as a phantom. This is useful when a weak Klass                                                          
662   // pointer has been "peeked" and then must be kept alive before it may                                                             
663   // be used safely.                                                                                                                 
664   oop holder_phantom() const;                                                                                                        
665 
666   static void clean_weak_klass_links(bool unloading_occurred, bool clean_alive_klasses = true);                                      
667   static void clean_subklass_tree() {                                                                                                
668     clean_weak_klass_links(/*unloading_occurred*/ true , /* clean_alive_klasses */ false);                                           
669   }                                                                                                                                  
670 
671   // GC specific object visitors                                                                                                     
672   //                                                                                                                                 
673 #if INCLUDE_PARALLELGC                                                                                                               
674   // Parallel Scavenge                                                                                                               
675   virtual void oop_ps_push_contents(  oop obj, PSPromotionManager* pm)   = 0;                                                        
676   // Parallel Compact                                                                                                                
677   virtual void oop_pc_follow_contents(oop obj, ParCompactionManager* cm) = 0;                                                        
678   virtual void oop_pc_update_pointers(oop obj, ParCompactionManager* cm) = 0;                                                        

639   // wanting to reduce the initial scope of this optimization. There
640   // are potential problems in setting the bias pattern for
641   // JVM-internal oops.
642   inline void set_prototype_header(markOop header);
643   static ByteSize prototype_header_offset() { return in_ByteSize(offset_of(Klass, _prototype_header)); }
644 
645   int  biased_lock_revocation_count() const { return (int) _biased_lock_revocation_count; }
646   // Atomically increments biased_lock_revocation_count and returns updated value
647   int atomic_incr_biased_lock_revocation_count();
648   void set_biased_lock_revocation_count(int val) { _biased_lock_revocation_count = (jint) val; }
649   jlong last_biased_lock_bulk_revocation_time() { return _last_biased_lock_bulk_revocation_time; }
650   void  set_last_biased_lock_bulk_revocation_time(jlong cur_time) { _last_biased_lock_bulk_revocation_time = cur_time; }
651 
652   JFR_ONLY(DEFINE_TRACE_ID_METHODS;)
653 
654   virtual void metaspace_pointers_do(MetaspaceClosure* iter);
655   virtual MetaspaceObj::Type type() const { return ClassType; }
656 
657   // Iff the class loader (or mirror for unsafe anonymous classes) is alive the
658   // Klass is considered alive.  Has already been marked as unloading.
659   bool is_loader_alive() const { return class_loader_data()->is_alive(); }
660 
661   // Load the klass's holder as a phantom. This is useful when a weak Klass
662   // pointer has been "peeked" and then must be kept alive before it may
663   // be used safely.
664   oop holder_phantom() const;
665 
666   static void clean_weak_klass_links(bool unloading_occurred, bool clean_alive_klasses = true);
667   static void clean_subklass_tree() {
668     clean_weak_klass_links(/*unloading_occurred*/ true , /* clean_alive_klasses */ false);
669   }
670 
671   // GC specific object visitors
672   //
673 #if INCLUDE_PARALLELGC
674   // Parallel Scavenge
675   virtual void oop_ps_push_contents(  oop obj, PSPromotionManager* pm)   = 0;
676   // Parallel Compact
677   virtual void oop_pc_follow_contents(oop obj, ParCompactionManager* cm) = 0;
678   virtual void oop_pc_update_pointers(oop obj, ParCompactionManager* cm) = 0;
< prev index next >