536 inline void set_prototype_header(markOop header);
537 static ByteSize prototype_header_offset() { return in_ByteSize(offset_of(Klass, _prototype_header)); }
538
539 int biased_lock_revocation_count() const { return (int) _biased_lock_revocation_count; }
540 // Atomically increments biased_lock_revocation_count and returns updated value
541 int atomic_incr_biased_lock_revocation_count();
542 void set_biased_lock_revocation_count(int val) { _biased_lock_revocation_count = (jint) val; }
543 jlong last_biased_lock_bulk_revocation_time() { return _last_biased_lock_bulk_revocation_time; }
544 void set_last_biased_lock_bulk_revocation_time(jlong cur_time) { _last_biased_lock_bulk_revocation_time = cur_time; }
545
546 TRACE_DEFINE_KLASS_METHODS;
547
548 // garbage collection support
549 virtual void oops_do(OopClosure* cl);
550
551 // Iff the class loader (or mirror for anonymous classes) is alive the
552 // Klass is considered alive.
553 // The is_alive closure passed in depends on the Garbage Collector used.
554 bool is_loader_alive(BoolObjectClosure* is_alive);
555
556 static void clean_weak_klass_links(BoolObjectClosure* is_alive);
557
558 // iterators
559 virtual int oop_oop_iterate(oop obj, ExtendedOopClosure* blk) = 0;
560 virtual int oop_oop_iterate_v(oop obj, ExtendedOopClosure* blk) {
561 return oop_oop_iterate(obj, blk);
562 }
563
564 #if INCLUDE_ALL_GCS
565 // In case we don't have a specialized backward scanner use forward
566 // iteration.
567 virtual int oop_oop_iterate_backwards_v(oop obj, ExtendedOopClosure* blk) {
568 return oop_oop_iterate_v(obj, blk);
569 }
570 #endif // INCLUDE_ALL_GCS
571
572 // Iterates "blk" over all the oops in "obj" (of type "this") within "mr".
573 // (I don't see why the _m should be required, but without it the Solaris
574 // C++ gives warning messages about overridings of the "oop_oop_iterate"
575 // defined above "hiding" this virtual function. (DLD, 6/20/00)) */
576 virtual int oop_oop_iterate_m(oop obj, ExtendedOopClosure* blk, MemRegion mr) = 0;
643 #ifndef PRODUCT
644 bool verify_vtable_index(int index);
645 bool verify_itable_index(int index);
646 #endif
647
648 virtual void oop_verify_on(oop obj, outputStream* st);
649
650 static bool is_null(narrowKlass obj);
651 static bool is_null(Klass* obj);
652
653 // klass encoding for klass pointer in objects.
654 static narrowKlass encode_klass_not_null(Klass* v);
655 static narrowKlass encode_klass(Klass* v);
656
657 static Klass* decode_klass_not_null(narrowKlass v);
658 static Klass* decode_klass(narrowKlass v);
659
660 private:
661 // barriers used by klass_oop_store
662 void klass_update_barrier_set(oop v);
663 void klass_update_barrier_set_pre(void* p, oop v);
664 };
665
666 #endif // SHARE_VM_OOPS_KLASS_HPP
|
536 inline void set_prototype_header(markOop header);
537 static ByteSize prototype_header_offset() { return in_ByteSize(offset_of(Klass, _prototype_header)); }
538
539 int biased_lock_revocation_count() const { return (int) _biased_lock_revocation_count; }
540 // Atomically increments biased_lock_revocation_count and returns updated value
541 int atomic_incr_biased_lock_revocation_count();
542 void set_biased_lock_revocation_count(int val) { _biased_lock_revocation_count = (jint) val; }
543 jlong last_biased_lock_bulk_revocation_time() { return _last_biased_lock_bulk_revocation_time; }
544 void set_last_biased_lock_bulk_revocation_time(jlong cur_time) { _last_biased_lock_bulk_revocation_time = cur_time; }
545
546 TRACE_DEFINE_KLASS_METHODS;
547
548 // garbage collection support
549 virtual void oops_do(OopClosure* cl);
550
551 // Iff the class loader (or mirror for anonymous classes) is alive the
552 // Klass is considered alive.
553 // The is_alive closure passed in depends on the Garbage Collector used.
554 bool is_loader_alive(BoolObjectClosure* is_alive);
555
556 static void clean_weak_klass_links(BoolObjectClosure* is_alive, bool clean_alive_klasses = true);
557 static void clean_subklass_tree(BoolObjectClosure* is_alive) {
558 clean_weak_klass_links(is_alive, false /* clean_alive_klasses */);
559 }
560
561 // iterators
562 virtual int oop_oop_iterate(oop obj, ExtendedOopClosure* blk) = 0;
563 virtual int oop_oop_iterate_v(oop obj, ExtendedOopClosure* blk) {
564 return oop_oop_iterate(obj, blk);
565 }
566
567 #if INCLUDE_ALL_GCS
568 // In case we don't have a specialized backward scanner use forward
569 // iteration.
570 virtual int oop_oop_iterate_backwards_v(oop obj, ExtendedOopClosure* blk) {
571 return oop_oop_iterate_v(obj, blk);
572 }
573 #endif // INCLUDE_ALL_GCS
574
575 // Iterates "blk" over all the oops in "obj" (of type "this") within "mr".
576 // (I don't see why the _m should be required, but without it the Solaris
577 // C++ gives warning messages about overridings of the "oop_oop_iterate"
578 // defined above "hiding" this virtual function. (DLD, 6/20/00)) */
579 virtual int oop_oop_iterate_m(oop obj, ExtendedOopClosure* blk, MemRegion mr) = 0;
646 #ifndef PRODUCT
647 bool verify_vtable_index(int index);
648 bool verify_itable_index(int index);
649 #endif
650
651 virtual void oop_verify_on(oop obj, outputStream* st);
652
653 static bool is_null(narrowKlass obj);
654 static bool is_null(Klass* obj);
655
656 // klass encoding for klass pointer in objects.
657 static narrowKlass encode_klass_not_null(Klass* v);
658 static narrowKlass encode_klass(Klass* v);
659
660 static Klass* decode_klass_not_null(narrowKlass v);
661 static Klass* decode_klass(narrowKlass v);
662
663 private:
664 // barriers used by klass_oop_store
665 void klass_update_barrier_set(oop v);
666 void klass_update_barrier_set_pre(oop* p, oop v);
667 };
668
669 #endif // SHARE_VM_OOPS_KLASS_HPP
|