< prev index next >

src/hotspot/share/prims/whitebox.cpp

Print this page
rev 47287 : Port 09.17.Thread_SMR_logging_update from JDK9 to JDK10
rev 47289 : eosterlund, stefank CR - refactor code into threadSMR.cpp and threadSMR.hpp
rev 47292 : stefank, coleenp CR - refactor most JavaThreadIterator usage to use JavaThreadIteratorWithHandle.


 644 WB_END
 645 #endif // INCLUDE_NMT
 646 
 647 static jmethodID reflected_method_to_jmid(JavaThread* thread, JNIEnv* env, jobject method) {
 648   assert(method != NULL, "method should not be null");
 649   ThreadToNativeFromVM ttn(thread);
 650   return env->FromReflectedMethod(method);
 651 }
 652 
 653 // Deoptimizes all compiled frames and makes nmethods not entrant if it's requested
 654 class VM_WhiteBoxDeoptimizeFrames : public VM_WhiteBoxOperation {
 655  private:
 656   int _result;
 657   const bool _make_not_entrant;
 658  public:
 659   VM_WhiteBoxDeoptimizeFrames(bool make_not_entrant) :
 660         _result(0), _make_not_entrant(make_not_entrant) { }
 661   int  result() const { return _result; }
 662 
 663   void doit() {
 664     ThreadsListHandle tlh;
 665     JavaThreadIterator jti(tlh.list());
 666     for (JavaThread* t = jti.first(); t != NULL; t = jti.next()) {
 667       if (t->has_last_Java_frame()) {
 668         for (StackFrameStream fst(t, UseBiasedLocking); !fst.is_done(); fst.next()) {
 669           frame* f = fst.current();
 670           if (f->can_be_deoptimized() && !f->is_deoptimized_frame()) {
 671             RegisterMap* reg_map = fst.register_map();
 672             Deoptimization::deoptimize(t, *f, reg_map);
 673             if (_make_not_entrant) {
 674                 CompiledMethod* cm = CodeCache::find_compiled(f->pc());
 675                 assert(cm != NULL, "sanity check");
 676                 cm->make_not_entrant();
 677             }
 678             ++_result;
 679           }
 680         }
 681       }
 682     }
 683   }
 684 };
 685 
 686 WB_ENTRY(jint, WB_DeoptimizeFrames(JNIEnv* env, jobject o, jboolean make_not_entrant))




 644 WB_END
 645 #endif // INCLUDE_NMT
 646 
 647 static jmethodID reflected_method_to_jmid(JavaThread* thread, JNIEnv* env, jobject method) {
 648   assert(method != NULL, "method should not be null");
 649   ThreadToNativeFromVM ttn(thread);
 650   return env->FromReflectedMethod(method);
 651 }
 652 
 653 // Deoptimizes all compiled frames and makes nmethods not entrant if it's requested
 654 class VM_WhiteBoxDeoptimizeFrames : public VM_WhiteBoxOperation {
 655  private:
 656   int _result;
 657   const bool _make_not_entrant;
 658  public:
 659   VM_WhiteBoxDeoptimizeFrames(bool make_not_entrant) :
 660         _result(0), _make_not_entrant(make_not_entrant) { }
 661   int  result() const { return _result; }
 662 
 663   void doit() {
 664     for (JavaThreadIteratorWithHandle jtiwh; JavaThread *t = jtiwh.next(); ) {


 665       if (t->has_last_Java_frame()) {
 666         for (StackFrameStream fst(t, UseBiasedLocking); !fst.is_done(); fst.next()) {
 667           frame* f = fst.current();
 668           if (f->can_be_deoptimized() && !f->is_deoptimized_frame()) {
 669             RegisterMap* reg_map = fst.register_map();
 670             Deoptimization::deoptimize(t, *f, reg_map);
 671             if (_make_not_entrant) {
 672                 CompiledMethod* cm = CodeCache::find_compiled(f->pc());
 673                 assert(cm != NULL, "sanity check");
 674                 cm->make_not_entrant();
 675             }
 676             ++_result;
 677           }
 678         }
 679       }
 680     }
 681   }
 682 };
 683 
 684 WB_ENTRY(jint, WB_DeoptimizeFrames(JNIEnv* env, jobject o, jboolean make_not_entrant))


< prev index next >