src/share/vm/code/codeCache.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hotspot Sdiff src/share/vm/code

src/share/vm/code/codeCache.cpp

Print this page




 237           os::vm_page_size();
 238   const size_t granularity = os::vm_allocation_granularity();
 239   const size_t r_align = MAX2(page_size, granularity);
 240   const size_t r_size = align_size_up(size, r_align);
 241   const size_t rs_align = page_size == (size_t) os::vm_page_size() ? 0 :
 242     MAX2(page_size, granularity);
 243 
 244   ReservedCodeSpace rs(r_size, rs_align, rs_align > 0);
 245 
 246   // Initialize bounds
 247   _low_bound = (address)rs.base();
 248   _high_bound = _low_bound + rs.size();
 249 
 250   return rs;
 251 }
 252 
 253 bool CodeCache::heap_available(int code_blob_type) {
 254   if (!SegmentedCodeCache) {
 255     // No segmentation: use a single code heap
 256     return (code_blob_type == CodeBlobType::All);
 257   } else if ((Arguments::mode() == Arguments::_int) ||
 258              (TieredStopAtLevel == CompLevel_none)) {
 259     // Interpreter only: we don't need any method code heaps
 260     return (code_blob_type == CodeBlobType::NonNMethod);
 261   } else if (TieredCompilation && (TieredStopAtLevel > CompLevel_simple)) {
 262     // Tiered compilation: use all code heaps
 263     return (code_blob_type < CodeBlobType::All);
 264   } else {
 265     // No TieredCompilation: we only need the non-nmethod and non-profiled code heap
 266     return (code_blob_type == CodeBlobType::NonNMethod) ||
 267            (code_blob_type == CodeBlobType::MethodNonProfiled);
 268   }
 269 }
 270 
 271 void CodeCache::add_heap(ReservedSpace rs, const char* name, size_t size_initial, int code_blob_type) {
 272   // Check if heap is needed
 273   if (!heap_available(code_blob_type)) {
 274     return;
 275   }
 276 
 277   // Create CodeHeap
 278   CodeHeap* heap = new CodeHeap(name, code_blob_type);




 237           os::vm_page_size();
 238   const size_t granularity = os::vm_allocation_granularity();
 239   const size_t r_align = MAX2(page_size, granularity);
 240   const size_t r_size = align_size_up(size, r_align);
 241   const size_t rs_align = page_size == (size_t) os::vm_page_size() ? 0 :
 242     MAX2(page_size, granularity);
 243 
 244   ReservedCodeSpace rs(r_size, rs_align, rs_align > 0);
 245 
 246   // Initialize bounds
 247   _low_bound = (address)rs.base();
 248   _high_bound = _low_bound + rs.size();
 249 
 250   return rs;
 251 }
 252 
 253 bool CodeCache::heap_available(int code_blob_type) {
 254   if (!SegmentedCodeCache) {
 255     // No segmentation: use a single code heap
 256     return (code_blob_type == CodeBlobType::All);
 257   } else if (Arguments::mode() == Arguments::_int) {

 258     // Interpreter only: we don't need any method code heaps
 259     return (code_blob_type == CodeBlobType::NonNMethod);
 260   } else if (TieredCompilation && (TieredStopAtLevel > CompLevel_simple)) {
 261     // Tiered compilation: use all code heaps
 262     return (code_blob_type < CodeBlobType::All);
 263   } else {
 264     // No TieredCompilation: we only need the non-nmethod and non-profiled code heap
 265     return (code_blob_type == CodeBlobType::NonNMethod) ||
 266            (code_blob_type == CodeBlobType::MethodNonProfiled);
 267   }
 268 }
 269 
 270 void CodeCache::add_heap(ReservedSpace rs, const char* name, size_t size_initial, int code_blob_type) {
 271   // Check if heap is needed
 272   if (!heap_available(code_blob_type)) {
 273     return;
 274   }
 275 
 276   // Create CodeHeap
 277   CodeHeap* heap = new CodeHeap(name, code_blob_type);


src/share/vm/code/codeCache.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File