< prev index next >

src/share/vm/code/codeCache.cpp

Print this page




  33 #include "code/pcDesc.hpp"
  34 #include "compiler/compileBroker.hpp"
  35 #include "gc/shared/gcLocker.hpp"
  36 #include "memory/allocation.inline.hpp"
  37 #include "memory/iterator.hpp"
  38 #include "memory/resourceArea.hpp"
  39 #include "oops/method.hpp"
  40 #include "oops/objArrayOop.hpp"
  41 #include "oops/oop.inline.hpp"
  42 #include "oops/verifyOopClosure.hpp"
  43 #include "runtime/arguments.hpp"
  44 #include "runtime/compilationPolicy.hpp"
  45 #include "runtime/deoptimization.hpp"
  46 #include "runtime/handles.inline.hpp"
  47 #include "runtime/icache.hpp"
  48 #include "runtime/java.hpp"
  49 #include "runtime/mutexLocker.hpp"
  50 #include "runtime/sweeper.hpp"
  51 #include "services/memoryService.hpp"
  52 #include "trace/tracing.hpp"

  53 #include "utilities/xmlstream.hpp"
  54 #ifdef COMPILER1
  55 #include "c1/c1_Compilation.hpp"
  56 #include "c1/c1_Compiler.hpp"
  57 #endif
  58 #ifdef COMPILER2
  59 #include "opto/c2compiler.hpp"
  60 #include "opto/compile.hpp"
  61 #include "opto/node.hpp"
  62 #endif
  63 
  64 // Helper class for printing in CodeCache
  65 class CodeBlob_sizes {
  66  private:
  67   int count;
  68   int total_size;
  69   int header_size;
  70   int code_size;
  71   int stub_size;
  72   int relocation_size;


 576   S390_ONLY(if (_heaps == NULL) return false;)
 577   // It should be ok to call contains without holding a lock.
 578   FOR_ALL_HEAPS(heap) {
 579     if ((*heap)->contains(p)) {
 580       return true;
 581     }
 582   }
 583   return false;
 584 }
 585 
 586 bool CodeCache::contains(nmethod *nm) {
 587   return contains((void *)nm);
 588 }
 589 
 590 // This method is safe to call without holding the CodeCache_lock, as long as a dead CodeBlob is not
 591 // looked up (i.e., one that has been marked for deletion). It only depends on the _segmap to contain
 592 // valid indices, which it will always do, as long as the CodeBlob is not in the process of being recycled.
 593 CodeBlob* CodeCache::find_blob(void* start) {
 594   CodeBlob* result = find_blob_unsafe(start);
 595   // We could potentially look up non_entrant methods
 596   guarantee(result == NULL || !result->is_zombie() || result->is_locked_by_vm() || is_error_reported(), "unsafe access to zombie method");
 597   return result;
 598 }
 599 
 600 // Lookup that does not fail if you lookup a zombie method (if you call this, be sure to know
 601 // what you are doing)
 602 CodeBlob* CodeCache::find_blob_unsafe(void* start) {
 603   // NMT can walk the stack before code cache is created
 604   if (_heaps != NULL && !_heaps->is_empty()) {
 605     FOR_ALL_HEAPS(heap) {
 606       CodeBlob* result = (*heap)->find_blob_unsafe(start);
 607       if (result != NULL) {
 608         return result;
 609       }
 610     }
 611   }
 612   return NULL;
 613 }
 614 
 615 nmethod* CodeCache::find_nmethod(void* start) {
 616   CodeBlob* cb = find_blob(start);




  33 #include "code/pcDesc.hpp"
  34 #include "compiler/compileBroker.hpp"
  35 #include "gc/shared/gcLocker.hpp"
  36 #include "memory/allocation.inline.hpp"
  37 #include "memory/iterator.hpp"
  38 #include "memory/resourceArea.hpp"
  39 #include "oops/method.hpp"
  40 #include "oops/objArrayOop.hpp"
  41 #include "oops/oop.inline.hpp"
  42 #include "oops/verifyOopClosure.hpp"
  43 #include "runtime/arguments.hpp"
  44 #include "runtime/compilationPolicy.hpp"
  45 #include "runtime/deoptimization.hpp"
  46 #include "runtime/handles.inline.hpp"
  47 #include "runtime/icache.hpp"
  48 #include "runtime/java.hpp"
  49 #include "runtime/mutexLocker.hpp"
  50 #include "runtime/sweeper.hpp"
  51 #include "services/memoryService.hpp"
  52 #include "trace/tracing.hpp"
  53 #include "utilities/vmError.hpp"
  54 #include "utilities/xmlstream.hpp"
  55 #ifdef COMPILER1
  56 #include "c1/c1_Compilation.hpp"
  57 #include "c1/c1_Compiler.hpp"
  58 #endif
  59 #ifdef COMPILER2
  60 #include "opto/c2compiler.hpp"
  61 #include "opto/compile.hpp"
  62 #include "opto/node.hpp"
  63 #endif
  64 
  65 // Helper class for printing in CodeCache
  66 class CodeBlob_sizes {
  67  private:
  68   int count;
  69   int total_size;
  70   int header_size;
  71   int code_size;
  72   int stub_size;
  73   int relocation_size;


 577   S390_ONLY(if (_heaps == NULL) return false;)
 578   // It should be ok to call contains without holding a lock.
 579   FOR_ALL_HEAPS(heap) {
 580     if ((*heap)->contains(p)) {
 581       return true;
 582     }
 583   }
 584   return false;
 585 }
 586 
 587 bool CodeCache::contains(nmethod *nm) {
 588   return contains((void *)nm);
 589 }
 590 
 591 // This method is safe to call without holding the CodeCache_lock, as long as a dead CodeBlob is not
 592 // looked up (i.e., one that has been marked for deletion). It only depends on the _segmap to contain
 593 // valid indices, which it will always do, as long as the CodeBlob is not in the process of being recycled.
 594 CodeBlob* CodeCache::find_blob(void* start) {
 595   CodeBlob* result = find_blob_unsafe(start);
 596   // We could potentially look up non_entrant methods
 597   guarantee(result == NULL || !result->is_zombie() || result->is_locked_by_vm() || VMError::is_error_reported(), "unsafe access to zombie method");
 598   return result;
 599 }
 600 
 601 // Lookup that does not fail if you lookup a zombie method (if you call this, be sure to know
 602 // what you are doing)
 603 CodeBlob* CodeCache::find_blob_unsafe(void* start) {
 604   // NMT can walk the stack before code cache is created
 605   if (_heaps != NULL && !_heaps->is_empty()) {
 606     FOR_ALL_HEAPS(heap) {
 607       CodeBlob* result = (*heap)->find_blob_unsafe(start);
 608       if (result != NULL) {
 609         return result;
 610       }
 611     }
 612   }
 613   return NULL;
 614 }
 615 
 616 nmethod* CodeCache::find_nmethod(void* start) {
 617   CodeBlob* cb = find_blob(start);


< prev index next >