--- old/src/share/vm/oops/instanceKlass.hpp 2015-11-05 21:52:44.000000000 +0300 +++ new/src/share/vm/oops/instanceKlass.hpp 2015-11-05 21:52:44.000000000 +0300 @@ -104,6 +104,68 @@ uint _count; }; +// Utility class to manipulate nmethod dependency context. +// The context consists of nmethodBucket* (a head of a linked list) +// and a boolean flag (does the list contains stale entries). The structure is +// encoded as an intptr_t: lower bit is used for the flag. It is possible since +// nmethodBucket* is aligned - the structure is malloc'ed in C heap. +// Dependency context can be attached either to an InstanceKlass (_dep_context field) +// or CallSiteContext oop for call_site_target dependencies (see javaClasses.hpp). +// DependencyContext class operates on some location which holds a intptr_t value. +class DependencyContext : public StackObj { + friend class VMStructs; + friend class TestDependencyContext; + private: + enum TagBits { _has_stale_entries_bit = 1, _has_stale_entries_mask = 1 }; + + intptr_t* _dependency_context_addr; + + void set_dependencies(nmethodBucket* b) { + assert((intptr_t(b) & _has_stale_entries_mask) == 0, "should be aligned"); + if (has_stale_entries()) { + *_dependency_context_addr = intptr_t(b) | _has_stale_entries_mask; + } else { + *_dependency_context_addr = intptr_t(b); + } + } + + void set_has_stale_entries(bool b) { + if (b) { + *_dependency_context_addr |= _has_stale_entries_mask; + } else { + *_dependency_context_addr &= ~_has_stale_entries_mask; + } + } + + nmethodBucket* dependencies() { + intptr_t value = *_dependency_context_addr; + return (nmethodBucket*) (value & ~_has_stale_entries_mask); + } + + bool has_stale_entries() const { + intptr_t value = *_dependency_context_addr; + return (value & _has_stale_entries_mask) != 0; + }; + + public: + DependencyContext(intptr_t* addr) : _dependency_context_addr(addr) {} + + static const intptr_t EMPTY = 0; // dependencies = NULL, has_stale_entries = false + + int mark_dependent_nmethods(DepChange& changes); + void add_dependent_nmethod(nmethod* nm, bool expunge_stale_entries = false); + void remove_dependent_nmethod(nmethod* nm, bool expunge_stale_entries = false); + int remove_all_dependents(); + + void expunge_stale_entries(); + +#ifndef PRODUCT + void print_dependent_nmethods(bool verbose); + bool is_dependent_nmethod(nmethod* nm); + bool find_stale_entries(); +#endif //PRODUCT +}; + struct JvmtiCachedClassFileData; class InstanceKlass: public Klass { @@ -198,7 +260,6 @@ // _is_marked_dependent can be set concurrently, thus cannot be part of the // _misc_flags. bool _is_marked_dependent; // used for marking during flushing and deoptimization - bool _has_unloaded_dependent; // The low two bits of _misc_flags contains the kind field. // This can be used to quickly discriminate among the four kinds of @@ -235,7 +296,7 @@ MemberNameTable* _member_names; // Member names JNIid* _jni_ids; // First JNI identifier for static fields in this class jmethodID* _methods_jmethod_ids; // jmethodIDs corresponding to method_idnum, or NULL if none - nmethodBucket* _dependencies; // list of dependent nmethods + intptr_t _dep_context; // packed DependencyContext structure nmethod* _osr_nmethods_head; // Head of list of on-stack replacement nmethods for this class BreakpointInfo* _breakpoints; // bpt lists, managed by Method* // Linked instanceKlasses of previous versions @@ -468,9 +529,6 @@ bool is_marked_dependent() const { return _is_marked_dependent; } void set_is_marked_dependent(bool value) { _is_marked_dependent = value; } - bool has_unloaded_dependent() const { return _has_unloaded_dependent; } - void set_has_unloaded_dependent(bool value) { _has_unloaded_dependent = value; } - // initialization (virtuals from Klass) bool should_be_initialized() const; // means that initialize should be called void initialize(TRAPS); @@ -835,7 +893,7 @@ JNIid* jni_id_for(int offset); // maintenance of deoptimization dependencies - int mark_dependent_nmethods(DepChange& changes); + int mark_dependent_nmethods(DepChange& changes); void add_dependent_nmethod(nmethod* nm); void remove_dependent_nmethod(nmethod* nm, bool delete_immediately); @@ -1024,7 +1082,6 @@ void clean_weak_instanceklass_links(BoolObjectClosure* is_alive); void clean_implementors_list(BoolObjectClosure* is_alive); void clean_method_data(BoolObjectClosure* is_alive); - void clean_dependent_nmethods(); // Explicit metaspace deallocation of fields // For RedefineClasses and class file parsing errors, we need to deallocate @@ -1336,27 +1393,15 @@ nmethodBucket* _next; public: - nmethodBucket(nmethod* nmethod, nmethodBucket* next) { - _nmethod = nmethod; - _next = next; - _count = 1; - } + nmethodBucket(nmethod* nmethod, nmethodBucket* next) : + _nmethod(nmethod), _next(next), _count(1) {} + int count() { return _count; } int increment() { _count += 1; return _count; } int decrement(); nmethodBucket* next() { return _next; } void set_next(nmethodBucket* b) { _next = b; } nmethod* get_nmethod() { return _nmethod; } - - static int mark_dependent_nmethods(nmethodBucket* deps, DepChange& changes); - static nmethodBucket* add_dependent_nmethod(nmethodBucket* deps, nmethod* nm); - static bool remove_dependent_nmethod(nmethodBucket** deps, nmethod* nm, bool delete_immediately); - static bool remove_dependent_nmethod(nmethodBucket* deps, nmethod* nm); - static nmethodBucket* clean_dependent_nmethods(nmethodBucket* deps); -#ifndef PRODUCT - static void print_dependent_nmethods(nmethodBucket* deps, bool verbose); - static bool is_dependent_nmethod(nmethodBucket* deps, nmethod* nm); -#endif //PRODUCT }; // An iterator that's used to access the inner classes indices in the