1 /* 2 * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_VM_CODE_CODECACHE_HPP 26 #define SHARE_VM_CODE_CODECACHE_HPP 27 28 #include "code/codeBlob.hpp" 29 #include "code/nmethod.hpp" 30 #include "memory/allocation.hpp" 31 #include "memory/heap.hpp" 32 #include "oops/instanceKlass.hpp" 33 #include "oops/oopsHierarchy.hpp" 34 #include "runtime/mutexLocker.hpp" 35 36 // The CodeCache implements the code cache for various pieces of generated 37 // code, e.g., compiled java methods, runtime stubs, transition frames, etc. 38 // The entries in the CodeCache are all CodeBlob's. 39 40 // -- Implementation -- 41 // The CodeCache consists of one or more CodeHeaps, each of which contains 42 // CodeBlobs of a specific CodeBlobType. Currently heaps for the following 43 // types are available: 44 // - Non-nmethods: Non-nmethods like Buffers, Adapters and Runtime Stubs 45 // - Profiled nmethods: nmethods that are profiled, i.e., those 46 // executed at level 2 or 3 47 // - Non-Profiled nmethods: nmethods that are not profiled, i.e., those 48 // executed at level 1 or 4 and native methods 49 // - All: Used for code of all types if code cache segmentation is disabled. 50 // 51 // In the rare case of the non-nmethod code heap getting full, non-nmethod code 52 // will be stored in the non-profiled code heap as a fallback solution. 53 // 54 // Depending on the availability of compilers and TieredCompilation there 55 // may be fewer heaps. The size of the code heaps depends on the values of 56 // ReservedCodeCacheSize, NonProfiledCodeHeapSize and ProfiledCodeHeapSize 57 // (see CodeCache::heap_available(..) and CodeCache::initialize_heaps(..) 58 // for details). 59 // 60 // Code cache segmentation is controlled by the flag SegmentedCodeCache. 61 // If turned off, all code types are stored in a single code heap. By default 62 // code cache segmentation is turned on if TieredCompilation is enabled and 63 // ReservedCodeCacheSize >= 240 MB. 64 // 65 // All methods of the CodeCache accepting a CodeBlobType only apply to 66 // CodeBlobs of the given type. For example, iteration over the 67 // CodeBlobs of a specific type can be done by using CodeCache::first_blob(..) 68 // and CodeCache::next_blob(..) and providing the corresponding CodeBlobType. 69 // 70 // IMPORTANT: If you add new CodeHeaps to the code cache or change the 71 // existing ones, make sure to adapt the dtrace scripts (jhelper.d) for 72 // Solaris and BSD. 73 74 class OopClosure; 75 class KlassDepChange; 76 class ShenandoahParallelCodeHeapIterator; 77 78 class CodeCache : AllStatic { 79 friend class VMStructs; 80 friend class JVMCIVMStructs; 81 template <class T, class Filter> friend class CodeBlobIterator; 82 friend class WhiteBox; 83 friend class CodeCacheLoader; 84 friend class ShenandoahParallelCodeHeapIterator; 85 private: 86 // CodeHeaps of the cache 87 static GrowableArray<CodeHeap*>* _heaps; 88 static GrowableArray<CodeHeap*>* _compiled_heaps; 89 static GrowableArray<CodeHeap*>* _nmethod_heaps; 90 static GrowableArray<CodeHeap*>* _allocable_heaps; 91 92 static address _low_bound; // Lower bound of CodeHeap addresses 93 static address _high_bound; // Upper bound of CodeHeap addresses 94 static int _number_of_nmethods_with_dependencies; // Total number of nmethods with dependencies 95 static bool _needs_cache_clean; // True if inline caches of the nmethods needs to be flushed 96 static nmethod* _scavenge_root_nmethods; // linked via nm->scavenge_root_link() 97 98 static void mark_scavenge_root_nmethods() PRODUCT_RETURN; 99 static void verify_perm_nmethods(CodeBlobClosure* f_or_null) PRODUCT_RETURN; 100 101 // CodeHeap management 102 static void initialize_heaps(); // Initializes the CodeHeaps 103 // Check the code heap sizes set by the user via command line 104 static void check_heap_sizes(size_t non_nmethod_size, size_t profiled_size, size_t non_profiled_size, size_t cache_size, bool all_set); 105 // Creates a new heap with the given name and size, containing CodeBlobs of the given type 106 static void add_heap(ReservedSpace rs, const char* name, int code_blob_type); 107 static CodeHeap* get_code_heap_containing(void* p); // Returns the CodeHeap containing the given pointer, or NULL 108 static CodeHeap* get_code_heap(const CodeBlob* cb); // Returns the CodeHeap for the given CodeBlob 109 static CodeHeap* get_code_heap(int code_blob_type); // Returns the CodeHeap for the given CodeBlobType 110 // Returns the name of the VM option to set the size of the corresponding CodeHeap 111 static const char* get_code_heap_flag_name(int code_blob_type); 112 static size_t page_size(bool aligned = true); // Returns the page size used by the CodeCache 113 static ReservedCodeSpace reserve_heap_memory(size_t size); // Reserves one continuous chunk of memory for the CodeHeaps 114 115 // Iteration 116 static CodeBlob* first_blob(CodeHeap* heap); // Returns the first CodeBlob on the given CodeHeap 117 static CodeBlob* first_blob(int code_blob_type); // Returns the first CodeBlob of the given type 118 static CodeBlob* next_blob(CodeHeap* heap, CodeBlob* cb); // Returns the next CodeBlob on the given CodeHeap 119 120 static size_t bytes_allocated_in_freelists(); 121 static int allocated_segments(); 122 static size_t freelists_length(); 123 124 static void set_scavenge_root_nmethods(nmethod* nm) { _scavenge_root_nmethods = nm; } 125 static void prune_scavenge_root_nmethods(); 126 static void unlink_scavenge_root_nmethod(nmethod* nm, nmethod* prev); 127 128 // Make private to prevent unsafe calls. Not all CodeBlob*'s are embedded in a CodeHeap. 129 static bool contains(CodeBlob *p) { fatal("don't call me!"); return false; } 130 131 public: 132 // Initialization 133 static void initialize(); 134 135 static int code_heap_compare(CodeHeap* const &lhs, CodeHeap* const &rhs); 136 137 static void add_heap(CodeHeap* heap); 138 static const GrowableArray<CodeHeap*>* heaps() { return _heaps; } 139 static const GrowableArray<CodeHeap*>* compiled_heaps() { return _compiled_heaps; } 140 static const GrowableArray<CodeHeap*>* nmethod_heaps() { return _nmethod_heaps; } 141 142 // Allocation/administration 143 static CodeBlob* allocate(int size, int code_blob_type, int orig_code_blob_type = CodeBlobType::All); // allocates a new CodeBlob 144 static void commit(CodeBlob* cb); // called when the allocated CodeBlob has been filled 145 static int alignment_unit(); // guaranteed alignment of all CodeBlobs 146 static int alignment_offset(); // guaranteed offset of first CodeBlob byte within alignment unit (i.e., allocation header) 147 static void free(CodeBlob* cb); // frees a CodeBlob 148 static void free_unused_tail(CodeBlob* cb, size_t used); // frees the unused tail of a CodeBlob (only used by TemplateInterpreter::initialize()) 149 static bool contains(void *p); // returns whether p is included 150 static bool contains(nmethod* nm); // returns whether nm is included 151 static void blobs_do(void f(CodeBlob* cb)); // iterates over all CodeBlobs 152 static void blobs_do(CodeBlobClosure* f); // iterates over all CodeBlobs 153 static void nmethods_do(void f(nmethod* nm)); // iterates over all nmethods 154 static void metadata_do(void f(Metadata* m)); // iterates over metadata in alive nmethods 155 156 // Lookup 157 static CodeBlob* find_blob(void* start); // Returns the CodeBlob containing the given address 158 static CodeBlob* find_blob_unsafe(void* start); // Same as find_blob but does not fail if looking up a zombie method 159 static nmethod* find_nmethod(void* start); // Returns the nmethod containing the given address 160 static CompiledMethod* find_compiled(void* start); 161 162 static int blob_count(); // Returns the total number of CodeBlobs in the cache 163 static int blob_count(int code_blob_type); 164 static int adapter_count(); // Returns the total number of Adapters in the cache 165 static int adapter_count(int code_blob_type); 166 static int nmethod_count(); // Returns the total number of nmethods in the cache 167 static int nmethod_count(int code_blob_type); 168 169 // GC support 170 static void gc_epilogue(); 171 static void gc_prologue(); 172 static void verify_oops(); 173 // If any oops are not marked this method unloads (i.e., breaks root links 174 // to) any unmarked codeBlobs in the cache. Sets "marked_for_unloading" 175 // to "true" iff some code got unloaded. 176 // "unloading_occurred" controls whether metadata should be cleaned because of class unloading. 177 static void do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred); 178 static void asserted_non_scavengable_nmethods_do(CodeBlobClosure* f = NULL) PRODUCT_RETURN; 179 180 // Apply f to every live code blob in scavengable nmethods. Prune nmethods 181 // from the list of scavengable nmethods if f->fix_relocations() and a nmethod 182 // no longer has scavengable oops. If f->fix_relocations(), then f must copy 183 // objects to their new location immediately to avoid fixing nmethods on the 184 // basis of the old object locations. 185 static void scavenge_root_nmethods_do(CodeBlobToOopClosure* f); 186 187 static nmethod* scavenge_root_nmethods() { return _scavenge_root_nmethods; } 188 // register_scavenge_root_nmethod() conditionally adds the nmethod to the list 189 // if it is not already on the list and has a scavengeable root 190 static void register_scavenge_root_nmethod(nmethod* nm); 191 static void verify_scavenge_root_nmethod(nmethod* nm); 192 static void add_scavenge_root_nmethod(nmethod* nm); 193 static void drop_scavenge_root_nmethod(nmethod* nm); 194 195 // Printing/debugging 196 static void print(); // prints summary 197 static void print_internals(); 198 static void print_memory_overhead(); 199 static void verify(); // verifies the code cache 200 static void print_trace(const char* event, CodeBlob* cb, int size = 0) PRODUCT_RETURN; 201 static void print_summary(outputStream* st, bool detailed = true); // Prints a summary of the code cache usage 202 static void log_state(outputStream* st); 203 static const char* get_code_heap_name(int code_blob_type) { return (heap_available(code_blob_type) ? get_code_heap(code_blob_type)->name() : "Unused"); } 204 static void report_codemem_full(int code_blob_type, bool print); 205 206 // Dcmd (Diagnostic commands) 207 static void print_codelist(outputStream* st); 208 static void print_layout(outputStream* st); 209 210 // The full limits of the codeCache 211 static address low_bound() { return _low_bound; } 212 static address low_bound(int code_blob_type); 213 static address high_bound() { return _high_bound; } 214 static address high_bound(int code_blob_type); 215 216 // Have to use far call instructions to call this pc. 217 static bool is_far_target(address pc); 218 219 // Profiling 220 static size_t capacity(); 221 static size_t unallocated_capacity(int code_blob_type); 222 static size_t unallocated_capacity(); 223 static size_t max_capacity(); 224 225 static double reverse_free_ratio(int code_blob_type); 226 227 static bool needs_cache_clean() { return _needs_cache_clean; } 228 static void set_needs_cache_clean(bool v) { _needs_cache_clean = v; } 229 230 static void clear_inline_caches(); // clear all inline caches 231 static void cleanup_inline_caches(); // clean unloaded/zombie nmethods from inline caches 232 static void do_unloading_nmethod_caches(bool class_unloading_occurred); // clean all nmethod caches for unloading, including inline caches 233 234 // Returns true if an own CodeHeap for the given CodeBlobType is available 235 static bool heap_available(int code_blob_type); 236 237 // Returns the CodeBlobType for the given CompiledMethod 238 static int get_code_blob_type(CompiledMethod* cm) { 239 return get_code_heap(cm)->code_blob_type(); 240 } 241 242 static bool code_blob_type_accepts_compiled(int type) { 243 bool result = type == CodeBlobType::All || type <= CodeBlobType::MethodProfiled; 244 AOT_ONLY( result = result || type == CodeBlobType::AOT; ) 245 return result; 246 } 247 248 static bool code_blob_type_accepts_nmethod(int type) { 249 return type == CodeBlobType::All || type <= CodeBlobType::MethodProfiled; 250 } 251 252 static bool code_blob_type_accepts_allocable(int type) { 253 return type <= CodeBlobType::All; 254 } 255 256 257 // Returns the CodeBlobType for the given compilation level 258 static int get_code_blob_type(int comp_level) { 259 if (comp_level == CompLevel_none || 260 comp_level == CompLevel_simple || 261 comp_level == CompLevel_full_optimization) { 262 // Non profiled methods 263 return CodeBlobType::MethodNonProfiled; 264 } else if (comp_level == CompLevel_limited_profile || 265 comp_level == CompLevel_full_profile) { 266 // Profiled methods 267 return CodeBlobType::MethodProfiled; 268 } 269 ShouldNotReachHere(); 270 return 0; 271 } 272 273 static void verify_clean_inline_caches(); 274 static void verify_icholder_relocations(); 275 276 // Deoptimization 277 private: 278 static int mark_for_deoptimization(KlassDepChange& changes); 279 #ifdef HOTSWAP 280 static int mark_for_evol_deoptimization(InstanceKlass* dependee); 281 #endif // HOTSWAP 282 283 public: 284 static void mark_all_nmethods_for_deoptimization(); 285 static int mark_for_deoptimization(Method* dependee); 286 static void make_marked_nmethods_not_entrant(); 287 288 // Flushing and deoptimization 289 static void flush_dependents_on(InstanceKlass* dependee); 290 #ifdef HOTSWAP 291 // Flushing and deoptimization in case of evolution 292 static void flush_evol_dependents_on(InstanceKlass* dependee); 293 #endif // HOTSWAP 294 // Support for fullspeed debugging 295 static void flush_dependents_on_method(const methodHandle& dependee); 296 297 // tells how many nmethods have dependencies 298 static int number_of_nmethods_with_dependencies(); 299 300 static int get_codemem_full_count(int code_blob_type) { 301 CodeHeap* heap = get_code_heap(code_blob_type); 302 return (heap != NULL) ? heap->full_count() : 0; 303 } 304 305 // CodeHeap State Analytics. 306 // interface methods for CodeHeap printing, called by CompileBroker 307 static void aggregate(outputStream *out, size_t granularity); 308 static void discard(outputStream *out); 309 static void print_usedSpace(outputStream *out); 310 static void print_freeSpace(outputStream *out); 311 static void print_count(outputStream *out); 312 static void print_space(outputStream *out); 313 static void print_age(outputStream *out); 314 static void print_names(outputStream *out); 315 }; 316 317 318 // Iterator to iterate over nmethods in the CodeCache. 319 template <class T, class Filter> class CodeBlobIterator : public StackObj { 320 private: 321 CodeBlob* _code_blob; // Current CodeBlob 322 GrowableArrayIterator<CodeHeap*> _heap; 323 GrowableArrayIterator<CodeHeap*> _end; 324 325 public: 326 CodeBlobIterator(T* nm = NULL) { 327 if (Filter::heaps() == NULL) { 328 return; 329 } 330 _heap = Filter::heaps()->begin(); 331 _end = Filter::heaps()->end(); 332 // If set to NULL, initialized by first call to next() 333 _code_blob = (CodeBlob*)nm; 334 if (nm != NULL) { 335 while(!(*_heap)->contains_blob(_code_blob)) { 336 ++_heap; 337 } 338 assert((*_heap)->contains_blob(_code_blob), "match not found"); 339 } 340 } 341 342 // Advance iterator to next blob 343 bool next() { 344 assert_locked_or_safepoint(CodeCache_lock); 345 346 bool result = next_blob(); 347 while (!result && _heap != _end) { 348 // Advance to next code heap of segmented code cache 349 if (++_heap == _end) { 350 break; 351 } 352 result = next_blob(); 353 } 354 355 return result; 356 } 357 358 // Advance iterator to next alive blob 359 bool next_alive() { 360 bool result = next(); 361 while(result && !_code_blob->is_alive()) { 362 result = next(); 363 } 364 return result; 365 } 366 367 bool end() const { return _code_blob == NULL; } 368 T* method() const { return (T*)_code_blob; } 369 370 private: 371 372 // Advance iterator to the next blob in the current code heap 373 bool next_blob() { 374 if (_heap == _end) { 375 return false; 376 } 377 CodeHeap *heap = *_heap; 378 // Get first method CodeBlob 379 if (_code_blob == NULL) { 380 _code_blob = CodeCache::first_blob(heap); 381 if (_code_blob == NULL) { 382 return false; 383 } else if (Filter::apply(_code_blob)) { 384 return true; 385 } 386 } 387 // Search for next method CodeBlob 388 _code_blob = CodeCache::next_blob(heap, _code_blob); 389 while (_code_blob != NULL && !Filter::apply(_code_blob)) { 390 _code_blob = CodeCache::next_blob(heap, _code_blob); 391 } 392 return _code_blob != NULL; 393 } 394 }; 395 396 397 struct CompiledMethodFilter { 398 static bool apply(CodeBlob* cb) { return cb->is_compiled(); } 399 static const GrowableArray<CodeHeap*>* heaps() { return CodeCache::compiled_heaps(); } 400 }; 401 402 403 struct NMethodFilter { 404 static bool apply(CodeBlob* cb) { return cb->is_nmethod(); } 405 static const GrowableArray<CodeHeap*>* heaps() { return CodeCache::nmethod_heaps(); } 406 }; 407 408 409 typedef CodeBlobIterator<CompiledMethod, CompiledMethodFilter> CompiledMethodIterator; 410 typedef CodeBlobIterator<nmethod, NMethodFilter> NMethodIterator; 411 412 #endif // SHARE_VM_CODE_CODECACHE_HPP