src/share/vm/runtime/sweeper.cpp

Print this page
rev 1083 : code cache unloading for webrev 091214
rev 1084 : This rev fixes all the print format stuff and resets less counters as recommended by Vladimir.
rev 1085 : checkpoint unloading changes on 100107
rev 1086 : use alive_nmethod when traversing code cache
rev 1087 : use enum for passing compiler on/off state

@@ -31,10 +31,15 @@
 int       NMethodSweeper::_invocations = 0;  // No. of invocations left until we are completed with this pass
 
 jint      NMethodSweeper::_locked_seen = 0;
 jint      NMethodSweeper::_not_entrant_seen_on_stack = 0;
 bool      NMethodSweeper::_rescan = false;
+bool      NMethodSweeper::_was_full = false;
+jlong     NMethodSweeper::_advise_to_sweep = 0;
+jlong     NMethodSweeper::_last_was_full = 0;
+uint      NMethodSweeper::_highest_marked = 0;
+long      NMethodSweeper::_was_full_traversal = 0;
 
 class MarkActivationClosure: public CodeBlobClosure {
 public:
   virtual void do_code_blob(CodeBlob* cb) {
     // If we see an activation belonging to a non_entrant nmethod, we mark it.

@@ -112,10 +117,38 @@
     // matter much.
     if (PrintMethodFlushing) {
       tty->print_cr("### Couldn't make progress on some nmethods so stopping sweep");
     }
   }
+  
+  if (UseCodeCacheFlushing) {
+    if (CodeCache::unallocated_capacity() > CodeCacheFlushingMinimumFreeSpace) {
+      // In a safepoint, no race with setters
+      _advise_to_sweep = false;
+    }
+    
+    if (was_full()) {
+      // There was some progress so attempt to restart the compiler
+      jlong now           = os::javaTimeMillis();
+      jlong max_interval  = (jlong)MinCodeCacheFlushingInterval * (jlong)1000;
+      jlong curr_interval = now - _last_was_full;
+      if ((CodeCache::unallocated_capacity() > CodeCacheFlushingMinimumFreeSpace) &&
+          (curr_interval > max_interval)){
+        CompileBroker::set_should_compile_new_jobs(CompileBroker::run_compilation);
+        set_was_full(false);
+
+        // Update the _last_was_full time so we can tell how fast the 
+        // code cache is filling up
+        _last_was_full = os::javaTimeMillis();
+        
+        if (PrintMethodFlushing) {
+          tty->print_cr("### sweeper: " UINT32_FORMAT "/" SIZE_FORMAT "/" SIZE_FORMAT " restarting compiler", 
+            CodeCache::nof_blobs(), CodeCache::unallocated_capacity(), CodeCache::max_capacity());
+        }
+      } 
+    }
+  }
 }
 
 
 void NMethodSweeper::process_nmethod(nmethod *nm) {
   // Skip methods that are currently referenced by the VM

@@ -175,9 +208,134 @@
       nm->make_zombie();
       _rescan = true;
     }
   } else {
     assert(nm->is_alive(), "should be alive");
+
+    if (UseCodeCacheFlushing) {
+      if ((nm->method()->code() != nm) && !(nm->is_locked_by_vm()) && !(nm->is_osr_method()) &&
+          (_traversals > _was_full_traversal+2) && (((uint)nm->compile_id()) < _highest_marked) &&
+          (CodeCache::unallocated_capacity() < CodeCacheFlushingMinimumFreeSpace)) {
+        // This method has not been called since the forced cleanup happened
+        nm->make_not_entrant();
+        nm->method()->set_saved_code(NULL);
+      }
+    }
+
     // Clean-up all inline caches that points to zombie/non-reentrant methods
     nm->cleanup_inline_caches();
   }
 }
+
+// Code cache unloading: when compilers notice the code cache is getting full,
+// they will call a vm op that comes here. This code attempts to speculatively
+// unload the oldest half of the nmethods (based on the compile job id) by
+// hiding the methodOop's ref to the nmethod in the _saved_code field. Then 
+// execution resumes. If a method so marked is not called by the second
+// safepoint from the current one, the nmethod will be marked non-entrant and 
+// got rid of by normal sweeping. If the method is called, the methodOop's
+// _code field is restored from the _saved_code field and the methodOop/nmethod
+// go back to their normal state.
+void NMethodSweeper::handle_full_code_cache(bool is_full) {
+  // Only the first one to notice can advise us to start early cleaning
+  if (!is_full){
+    jlong old = Atomic::cmpxchg( 1, &_advise_to_sweep, 0 );
+    if (old != 0) {
+      return;
+    }
+  }
+
+  if (is_full) {
+    // Since code cache is full, immediately stop new compiles
+    bool did_set = CompileBroker::set_should_compile_new_jobs(CompileBroker::stop_compilation);
+    if (!did_set) {
+      // only the first to notice can start the cleaning, 
+      // others will go back and block
+      return;
+    }
+    set_was_full(true);
+    
+    // If we run out within MinCodeCacheFlushingInterval of the last unload time, give up 
+    jlong now = os::javaTimeMillis();
+    jlong max_interval = (jlong)MinCodeCacheFlushingInterval * (jlong)1000;
+    jlong curr_interval = now - _last_was_full;
+    if (curr_interval < max_interval) {
+      _rescan = true;
+      if (PrintMethodFlushing) {
+        tty->print_cr("### handle full too often, turning off compiler");
+      }    
+      return;
+    }
+  }
+    
+  VM_HandleFullCodeCache op(is_full);
+  VMThread::execute(&op);
+  
+  // rescan again as soon as possible
+  _rescan = true;
+}
+
+void NMethodSweeper::speculative_disconnect_nmethods(bool is_full) {
+  // If there was a race in detecting full code cache, only run  
+  // one vm op for it or keep the compiler shut off
+
+  debug_only(jlong start = os::javaTimeMillis();)
+
+  if ((!was_full()) && (is_full)) {
+    if (CodeCache::unallocated_capacity() > CodeCacheFlushingMinimumFreeSpace) {
+      if (PrintMethodFlushing) {
+        tty->print_cr("### sweeper: " UINT32_FORMAT "/" SIZE_FORMAT "/" SIZE_FORMAT " restarting compiler", 
+          CodeCache::nof_blobs(), CodeCache::unallocated_capacity(), CodeCache::max_capacity());
+      }
+      CompileBroker::set_should_compile_new_jobs(CompileBroker::run_compilation);
+      return;
+    }
+  }
+
+  // Traverse the code cache trying to dump the oldest nmethods
+  uint curr_max_comp_id = CompileBroker::get_compilation_id();
+  uint flush_target = ((curr_max_comp_id - _highest_marked) >> 1) + _highest_marked;
+  if (PrintMethodFlushing) {
+    tty->print_cr("### Cleaning code cache: " UINT32_FORMAT "/" SIZE_FORMAT "/" SIZE_FORMAT,
+        CodeCache::nof_blobs(), CodeCache::unallocated_capacity(), CodeCache::max_capacity());
+  }
+
+  nmethod* nm = CodeCache::alive_nmethod(CodeCache::first());
+
+        while ((nm != NULL)){
+    uint curr_comp_id = nm->compile_id();
+
+    // OSR methods cannot be flushed like this. Also, don't flush native methods
+    // since they are part of the JDK in most cases
+    if(nm->is_in_use() && (!nm->is_osr_method()) && (!nm->is_locked_by_vm()) &&
+        (!nm->is_native_method()) && ((curr_comp_id < flush_target))) {
+
+      if ((nm->method()->code() == nm)) {
+        // This method has not been previously considered for 
+        // unloading or it was restored already
+        nm->method()->clear_code_hedge();
+      } else if (nm->method()->saved_code() == nm) {              
+        // This method was previously considered for preemptive unloading and was not called since then
+        nm->method()->set_saved_code(NULL);
+        nm->method()->invocation_counter()->decay();
+        nm->method()->backedge_counter()->decay();
+        nm->make_not_entrant();
+      }
+    
+      if (curr_comp_id > _highest_marked) {
+        _highest_marked = curr_comp_id;
+      }
+    }
+    nm = CodeCache::alive_nmethod(CodeCache::next(nm));
+  }
+  
+  // Shut off compiler. Sweeper will run exiting from this safepoint
+  // and turn it back on if it clears enough space
+  if (was_full()) {
+    _last_was_full = os::javaTimeMillis();
+    CompileBroker::set_should_compile_new_jobs(CompileBroker::stop_compilation);
+  }
+  
+  // After two more traversals the sweeper will get rid of unrestored nmethods
+  _was_full_traversal = _traversals;
+  debug_only(jlong end = os::javaTimeMillis(); if(PrintMethodFlushing) tty->print_cr("### sweeper: unload time: " INT64_FORMAT, end-start);)
+}