< prev index next >

src/hotspot/share/runtime/deoptimization.cpp

Print this page
rev 60137 : 8227745: Enable Escape Analysis for Better Performance in the Presence of JVMTI Agents
Reviewed-by: mdoerr, goetz
rev 60138 : 8227745: delta webrev.5 -> webrev.6


 268             } else {
 269               tty->print_cr("     object <" INTPTR_FORMAT "> locked", p2i(mi->owner()));
 270             }
 271           }
 272         }
 273       }
 274 #endif // !PRODUCT
 275     }
 276   }
 277 }
 278 
 279 // Deoptimize objects, that is reallocate and relock them, just before they escape through JVMTI.
 280 // The given vframes cover one physical frame.
 281 bool Deoptimization::deoptimize_objects_internal(JavaThread* thread, GrowableArray<compiledVFrame*>* chunk, bool& realloc_failures) {
 282   frame deoptee = chunk->at(0)->fr();
 283   JavaThread* deoptee_thread = chunk->at(0)->thread();
 284   CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
 285   RegisterMap map(chunk->at(0)->register_map());
 286   bool deoptimized_objects = false;
 287 
 288   // Reallocate the non-escaping objects and restore their fields. Then
 289   // relock objects if synchronization on them was eliminated.
 290   if (DoEscapeAnalysis && EliminateAllocations) {

 291     realloc_failures = eliminate_allocations(thread, Unpack_none, cm, deoptee, map, chunk, deoptimized_objects);
 292   }
 293 
 294   // Revoke biases of objects with eliminated locks in the given frame.
 295   Deoptimization::revoke_for_object_deoptimization(deoptee_thread, deoptee, &map, thread);
 296 
 297   // MonitorInfo structures used in eliminate_locks are not GC safe.
 298   NoSafepointVerifier no_safepoint;
 299 
 300   if ((DoEscapeAnalysis || EliminateNestedLocks) && EliminateLocks) {

 301     eliminate_locks(thread, chunk, realloc_failures, deoptee, Unpack_none, deoptimized_objects);
 302   }
 303   return deoptimized_objects;
 304 }
 305 #endif // COMPILER2_OR_JVMCI
 306 
 307 // This is factored, since it is both called from a JRT_LEAF (deoptimization) and a JRT_ENTRY (uncommon_trap)
 308 Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread* thread, int exec_mode) {
 309 
 310   // Note: there is a safepoint safety issue here. No matter whether we enter
 311   // via vanilla deopt or uncommon trap we MUST NOT stop at a safepoint once
 312   // the vframeArray is created.
 313   //
 314 
 315   // Allocate our special deoptimization ResourceMark
 316   DeoptResourceMark* dmark = new DeoptResourceMark(thread);
 317   assert(thread->deopt_mark() == NULL, "Pending deopt!");
 318   thread->set_deopt_mark(dmark);
 319 
 320   frame stub_frame = thread->last_frame(); // Makes stack walkable as side effect


1622   for (int i = 0; i < len; i++) {
1623     oop obj = (objects_to_revoke->at(i))();
1624     BiasedLocking::revoke_own_lock(objects_to_revoke->at(i), thread);
1625     assert(!obj->mark().has_bias_pattern(), "biases should be revoked by now");
1626   }
1627 }
1628 
1629 // Revoke the bias of objects with eliminated locking to prepare subsequent relocking.
1630 void Deoptimization::revoke_for_object_deoptimization(JavaThread* deoptee_thread, frame fr, RegisterMap* map, JavaThread* thread) {
1631   if (!UseBiasedLocking) {
1632     return;
1633   }
1634   GrowableArray<Handle>* objects_to_revoke = new GrowableArray<Handle>();
1635   // Collect monitors, but only those with eliminated locking.
1636   get_monitors_from_stack(objects_to_revoke, deoptee_thread, fr, map, true);
1637 
1638   int len = objects_to_revoke->length();
1639   for (int i = 0; i < len; i++) {
1640     oop obj = (objects_to_revoke->at(i))();
1641     markWord mark = obj->mark();
1642     if (!mark.has_bias_pattern()
1643         || mark.is_biased_anonymously() // eliminated locking does not bias an object if it wasn't before
1644         || !obj->klass()->prototype_header().has_bias_pattern() // bulk revoke ignores eliminated monitors
1645         || (obj->klass()->prototype_header().bias_epoch() != mark.bias_epoch())) { // bulk rebias ignores eliminated monitors
1646       // We reach here regularly if there's just eliminated locking on obj.
1647       // We must not call BiasedLocking::revoke_own_lock() in this case, as we would hit assertions, because it is a
1648       // prerequisite that there has to be non-eliminated locking on obj by deoptee_thread.
1649       // Luckily we don't have to revoke here, because obj has to be a  non-escaping obj and can be relocked without
1650       // revoking the bias. See Deoptimization::relock_objects().
1651       continue;
1652     }
1653     BiasedLocking::revoke(objects_to_revoke->at(i), thread);
1654     assert(!objects_to_revoke->at(i)->mark().has_bias_pattern(), "biases should be revoked by now");
1655   }
1656 }
1657 
1658 void Deoptimization::deoptimize_single_frame(JavaThread* thread, frame fr, Deoptimization::DeoptReason reason) {
1659   assert(fr.can_be_deoptimized(), "checking frame type");
1660 
1661   gather_statistics(reason, Action_none, Bytecodes::_illegal);
1662 
1663   if (LogCompilation && xtty != NULL) {
1664     CompiledMethod* cm = fr.cb()->as_compiled_method_or_null();
1665     assert(cm != NULL, "only compiled methods can deopt");


2690               bc = Bytecodes::_illegal;
2691             sprintf(name, "%s/%s/%s",
2692                     trap_reason_name(reason),
2693                     trap_action_name(action),
2694                     Bytecodes::is_defined(bc)? Bytecodes::name(bc): "other");
2695             juint r = counter >> LSB_BITS;
2696             tty->print_cr("  %40s: " UINT32_FORMAT " (%.1f%%)", name, r, (r * 100.0) / total);
2697             account -= r;
2698           }
2699         }
2700       }
2701     }
2702     if (account != 0) {
2703       PRINT_STAT_LINE("unaccounted", account);
2704     }
2705     #undef PRINT_STAT_LINE
2706     if (xtty != NULL)  xtty->tail("statistics");
2707   }
2708 }
2709 
2710 #ifdef ASSERT
2711 // Revert optimizations based on escape analysis.
2712 void Deoptimization::deoptimize_objects_alot_loop() {
2713   JavaThread* ct = JavaThread::current();
2714   HandleMark hm(ct);
2715   if (DeoptimizeObjectsALotThreadCount == 1) {
2716     // Revert everything at once
2717     while (!ct->is_terminated()) {
2718       { // Begin new scope for escape barrier
2719         HandleMarkCleaner hmc(ct);
2720         ResourceMark rm(ct);
2721         EscapeBarrier eb(ct, true);
2722         eb.deoptimize_objects_all_threads();
2723       }
2724       // Now sleep after the escape barriers destructor resumed the java threads.
2725       ct->sleep(DeoptimizeObjectsALotInterval);
2726     }
2727   } else {
2728     // Revert everything for one deoptee_thread which gets selected round robin
2729     JavaThread* deoptee_thread = NULL;
2730     while (!ct->is_terminated()) {
2731       { // Begin new scope for escape barrier
2732         HandleMarkCleaner hmc(ct);
2733         ResourceMark rm(ct);
2734         ThreadsListHandle tlh;
2735         int idx = (1 + tlh.list()->find_index_of_JavaThread(deoptee_thread)) % tlh.list()->length();
2736         deoptee_thread = tlh.list()->thread_at(idx);
2737         EscapeBarrier eb(ct, deoptee_thread, true);
2738         eb.deoptimize_objects(100);
2739       }
2740       // Now sleep after the escape barriers destructor resumed deoptee_thread.
2741       ct->sleep(DeoptimizeObjectsALotInterval);
2742     }
2743   }
2744 }
2745 #endif // !ASSERT
2746 
2747 // Returns true iff objects were reallocated and relocked because of access through JVMTI
2748 bool EscapeBarrier::objs_are_deoptimized(JavaThread* thread, intptr_t* fr_id) {
2749   // first/oldest update holds the flag
2750   GrowableArray<jvmtiDeferredLocalVariableSet*>* list = JvmtiDeferredUpdates::deferred_locals(thread);
2751   bool result = false;
2752   if (list != NULL) {
2753     for (int i = 0; i < list->length(); i++) {
2754       if (list->at(i)->matches(fr_id)) {
2755         result = list->at(i)->objects_are_deoptimized();
2756         break;
2757       }
2758     }
2759   }
2760   return result;
2761 }
2762 
2763 // Deoptimize frames with non escaping objects. Deoptimize objects with optimizations based on
2764 // escape analysis. Do it for all frames within the given depth and continue from there until the
2765 // entry frame is reached, because thread local objects passed as arguments might escape from callee
2766 // frames within the given depth.




2767 bool EscapeBarrier::deoptimize_objects(int depth) {
2768   if (barrier_active() && deoptee_thread()->has_last_Java_frame()) {
2769     ResourceMark rm(calling_thread());
2770     HandleMark   hm;
2771     RegisterMap  reg_map(deoptee_thread());
2772     vframe* vf = deoptee_thread()->last_java_vframe(&reg_map);
2773     int cur_depth = 0;
2774     while (vf != NULL && ((cur_depth <= depth) || !vf->is_entry_frame())) {
2775       if (vf->is_compiled_frame()) {
2776         compiledVFrame* cvf = compiledVFrame::cast(vf);
2777         // Deoptimize frame and local objects if any exist.
2778         // If cvf is deeper than depth, then we deoptimize iff local objects are passed as args.
2779         bool should_deopt = cur_depth <= depth ? cvf->not_global_escape_in_scope() : cvf->arg_escape();
2780         if (should_deopt && !deoptimize_objects(cvf->fr().id())) {
2781           // reallocation of scalar replaced objects failed, because heap is exhausted
2782           return false;
2783         }
2784       }
2785 
2786       // move to next physical frame
2787       while(!vf->is_top()) {
2788         cur_depth++;
2789         vf = vf->sender();
2790       }



2791       cur_depth++;
2792       vf = vf->sender();
2793     }
2794   }
2795   return true;
2796 }
2797 
2798 bool EscapeBarrier::deoptimize_objects_all_threads() {
2799   if (!barrier_active()) return true;
2800   ResourceMark rm(calling_thread());
2801   for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
2802     if (jt->has_last_Java_frame()) {
2803       RegisterMap reg_map(jt);
2804       vframe* vf = jt->last_java_vframe(&reg_map);
2805       assert(jt->frame_anchor()->walkable(),
2806              "The stack of JavaThread " PTR_FORMAT " is not walkable. Thread state is %d",
2807              p2i(jt), jt->thread_state());
2808       while (vf != NULL) {
2809         if (vf->is_compiled_frame()) {
2810           compiledVFrame* cvf = compiledVFrame::cast(vf);
2811           if ((cvf->not_global_escape_in_scope() || cvf->arg_escape()) &&
2812               !deoptimize_objects_internal(jt, cvf->fr().id())) {
2813             return false; // reallocation failure
2814           }
2815         }
2816         // move to next physical frame
2817         while(!vf->is_top()) {
2818           vf = vf->sender();
2819         }


2820         vf = vf->sender();
2821       }
2822     }
2823   }
2824   return true; // success
2825 }
2826 
2827 bool EscapeBarrier::_deoptimizing_objects_for_all_threads = false;
2828 bool EscapeBarrier::_self_deoptimization_in_progress      = false;
2829 
2830 class EscapeBarrierSuspendHandshake : public HandshakeClosure {
2831   JavaThread* _excluded_thread;
2832  public:
2833   EscapeBarrierSuspendHandshake(JavaThread* excluded_thread, const char* name) : HandshakeClosure(name), _excluded_thread(excluded_thread) { }
2834   void do_thread(Thread* th) {
2835     if (th->is_Java_thread() && !th->is_hidden_from_external_view() && (th != _excluded_thread)) {
2836       th->set_ea_obj_deopt_flag();
2837     }
2838   }
2839 };
2840 
2841 void EscapeBarrier::sync_and_suspend_one() {
2842   assert(_calling_thread != NULL, "calling thread must not be NULL");
2843   assert(_deoptee_thread != NULL, "deoptee thread must not be NULL");
2844   assert(barrier_active(), "should not call");
2845 
2846   // Sync with other threads that might be doing deoptimizations
2847   {
2848     // Need to switch to _thread_blocked for the wait() call
2849     ThreadBlockInVM tbivm(_calling_thread);
2850     MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
2851     while (_self_deoptimization_in_progress || _deoptee_thread->is_ea_obj_deopt_suspend()) {
2852       ml.wait();
2853     }
2854 
2855     if (self_deopt()) {
2856       _self_deoptimization_in_progress = true;
2857       return;
2858     }
2859 
2860     // set suspend flag for target thread
2861     _deoptee_thread->set_ea_obj_deopt_flag();
2862   }
2863 
2864   // suspend target thread
2865   uint32_t debug_bits = 0;
2866   if (!_deoptee_thread->is_thread_fully_suspended(false, &debug_bits)) {
2867     EscapeBarrierSuspendHandshake sh(NULL, "EscapeBarrierSuspendOne");
2868     Handshake::execute(&sh, _deoptee_thread);
2869   }
2870   assert(!_deoptee_thread->has_last_Java_frame() || _deoptee_thread->frame_anchor()->walkable(),
2871          "stack should be walkable now");
2872 }
2873 
2874 void EscapeBarrier::sync_and_suspend_all() {
2875   assert(barrier_active(), "should not call");
2876   assert(_calling_thread != NULL, "calling thread must not be NULL");
2877   assert(all_threads(), "sanity");
2878 
2879   // Sync with other threads that might be doing deoptimizations
2880   {
2881     // Need to switch to _thread_blocked for the wait() call
2882     ThreadBlockInVM tbivm(_calling_thread);
2883     MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
2884 
2885     bool deopt_in_progress;
2886     do {
2887       deopt_in_progress = _self_deoptimization_in_progress;
2888       for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
2889         deopt_in_progress = (deopt_in_progress || jt->is_ea_obj_deopt_suspend());
2890         if (deopt_in_progress) {
2891           break;
2892         }
2893       }
2894       if (deopt_in_progress) {
2895         ml.wait(); // then check again
2896       }
2897     } while(deopt_in_progress);
2898 
2899     _self_deoptimization_in_progress = true;
2900     _deoptimizing_objects_for_all_threads = true;
2901   }
2902 
2903   EscapeBarrierSuspendHandshake sh(_calling_thread, "EscapeBarrierSuspendAll");
2904   Handshake::execute(&sh);
2905 #ifdef ASSERT
2906   for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
2907     if (jt->is_hidden_from_external_view()) continue;
2908     assert(!jt->has_last_Java_frame() || jt->frame_anchor()->walkable(),
2909            "The stack of JavaThread " PTR_FORMAT " is not walkable. Thread state is %d",
2910            p2i(jt), jt->thread_state());
2911   }
2912 #endif // ASSERT
2913 }
2914 
2915 void EscapeBarrier::resume_one() {
2916   assert(barrier_active(), "should not call");
2917   assert(!all_threads(), "use resume_all()");
2918   MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
2919   if (self_deopt()) {
2920     assert(_self_deoptimization_in_progress, "incorrect synchronization");
2921     _self_deoptimization_in_progress = false;
2922   } else {
2923     _deoptee_thread->clear_ea_obj_deopt_flag();
2924   }
2925   ml.notify_all();
2926 }
2927 
2928 void EscapeBarrier::resume_all() {
2929   assert(barrier_active(), "should not call");
2930   assert(all_threads(), "use resume_one()");
2931   MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
2932   assert(_self_deoptimization_in_progress, "incorrect synchronization");
2933   _deoptimizing_objects_for_all_threads = false;
2934   _self_deoptimization_in_progress = false;
2935   for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
2936     jt->clear_ea_obj_deopt_flag();
2937   }
2938   ml.notify_all();
2939 }
2940 
2941 void EscapeBarrier::thread_added(JavaThread* jt) {
2942   if (!jt->is_hidden_from_external_view()) {
2943     MutexLocker ml(EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
2944     if (_deoptimizing_objects_for_all_threads) {
2945       jt->set_ea_obj_deopt_flag();
2946     } else {
2947       jt->clear_ea_obj_deopt_flag();
2948     }









2949   }
2950 }
2951 
2952 // Remember that objects were reallocated and relocked for the compiled frame with the given id
2953 static void set_objs_are_deoptimized(JavaThread* thread, intptr_t* fr_id) {
2954   // set in first/oldest update
2955   GrowableArray<jvmtiDeferredLocalVariableSet*>* list = JvmtiDeferredUpdates::deferred_locals(thread);
2956   DEBUG_ONLY(bool found = false);
2957   if (list != NULL) {
2958     for (int i = 0; i < list->length(); i++) {
2959       if (list->at(i)->matches(fr_id)) {
2960         DEBUG_ONLY(found = true);
2961         list->at(i)->set_objs_are_deoptimized();
2962         break;
2963       }
2964     }
2965   }
2966   assert(found, "variable set should exist at least for one vframe");
2967 }
2968 




 268             } else {
 269               tty->print_cr("     object <" INTPTR_FORMAT "> locked", p2i(mi->owner()));
 270             }
 271           }
 272         }
 273       }
 274 #endif // !PRODUCT
 275     }
 276   }
 277 }
 278 
 279 // Deoptimize objects, that is reallocate and relock them, just before they escape through JVMTI.
 280 // The given vframes cover one physical frame.
 281 bool Deoptimization::deoptimize_objects_internal(JavaThread* thread, GrowableArray<compiledVFrame*>* chunk, bool& realloc_failures) {
 282   frame deoptee = chunk->at(0)->fr();
 283   JavaThread* deoptee_thread = chunk->at(0)->thread();
 284   CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
 285   RegisterMap map(chunk->at(0)->register_map());
 286   bool deoptimized_objects = false;
 287 
 288   bool const jvmci_enabled = JVMCI_ONLY(UseJVMCICompiler) NOT_JVMCI(false);
 289 
 290   // Reallocate the non-escaping objects and restore their fields.
 291   if (jvmci_enabled COMPILER2_PRESENT(|| (DoEscapeAnalysis && EliminateAllocations))) {
 292     realloc_failures = eliminate_allocations(thread, Unpack_none, cm, deoptee, map, chunk, deoptimized_objects);
 293   }
 294 
 295   // Revoke biases of objects with eliminated locks in the given frame.
 296   Deoptimization::revoke_for_object_deoptimization(deoptee_thread, deoptee, &map, thread);
 297 
 298   // MonitorInfo structures used in eliminate_locks are not GC safe.
 299   NoSafepointVerifier no_safepoint;
 300 
 301   // Now relock objects if synchronization on them was eliminated.
 302   if (jvmci_enabled COMPILER2_PRESENT(|| ((DoEscapeAnalysis || EliminateNestedLocks) && EliminateLocks))) {
 303     eliminate_locks(thread, chunk, realloc_failures, deoptee, Unpack_none, deoptimized_objects);
 304   }
 305   return deoptimized_objects;
 306 }
 307 #endif // COMPILER2_OR_JVMCI
 308 
 309 // This is factored, since it is both called from a JRT_LEAF (deoptimization) and a JRT_ENTRY (uncommon_trap)
 310 Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread* thread, int exec_mode) {
 311 
 312   // Note: there is a safepoint safety issue here. No matter whether we enter
 313   // via vanilla deopt or uncommon trap we MUST NOT stop at a safepoint once
 314   // the vframeArray is created.
 315   //
 316 
 317   // Allocate our special deoptimization ResourceMark
 318   DeoptResourceMark* dmark = new DeoptResourceMark(thread);
 319   assert(thread->deopt_mark() == NULL, "Pending deopt!");
 320   thread->set_deopt_mark(dmark);
 321 
 322   frame stub_frame = thread->last_frame(); // Makes stack walkable as side effect


1624   for (int i = 0; i < len; i++) {
1625     oop obj = (objects_to_revoke->at(i))();
1626     BiasedLocking::revoke_own_lock(objects_to_revoke->at(i), thread);
1627     assert(!obj->mark().has_bias_pattern(), "biases should be revoked by now");
1628   }
1629 }
1630 
1631 // Revoke the bias of objects with eliminated locking to prepare subsequent relocking.
1632 void Deoptimization::revoke_for_object_deoptimization(JavaThread* deoptee_thread, frame fr, RegisterMap* map, JavaThread* thread) {
1633   if (!UseBiasedLocking) {
1634     return;
1635   }
1636   GrowableArray<Handle>* objects_to_revoke = new GrowableArray<Handle>();
1637   // Collect monitors, but only those with eliminated locking.
1638   get_monitors_from_stack(objects_to_revoke, deoptee_thread, fr, map, true);
1639 
1640   int len = objects_to_revoke->length();
1641   for (int i = 0; i < len; i++) {
1642     oop obj = (objects_to_revoke->at(i))();
1643     markWord mark = obj->mark();
1644     if (!mark.has_bias_pattern() ||
1645         mark.is_biased_anonymously() || // eliminated locking does not bias an object if it wasn't before
1646         !obj->klass()->prototype_header().has_bias_pattern() || // bulk revoke ignores eliminated monitors
1647         (obj->klass()->prototype_header().bias_epoch() != mark.bias_epoch())) { // bulk rebias ignores eliminated monitors
1648       // We reach here regularly if there's just eliminated locking on obj.
1649       // We must not call BiasedLocking::revoke_own_lock() in this case, as we would hit assertions, because it is a
1650       // prerequisite that there has to be non-eliminated locking on obj by deoptee_thread.
1651       // Luckily we don't have to revoke here, because obj has to be a  non-escaping obj and can be relocked without
1652       // revoking the bias. See Deoptimization::relock_objects().
1653       continue;
1654     }
1655     BiasedLocking::revoke(objects_to_revoke->at(i), thread);
1656     assert(!objects_to_revoke->at(i)->mark().has_bias_pattern(), "biases should be revoked by now");
1657   }
1658 }
1659 
1660 void Deoptimization::deoptimize_single_frame(JavaThread* thread, frame fr, Deoptimization::DeoptReason reason) {
1661   assert(fr.can_be_deoptimized(), "checking frame type");
1662 
1663   gather_statistics(reason, Action_none, Bytecodes::_illegal);
1664 
1665   if (LogCompilation && xtty != NULL) {
1666     CompiledMethod* cm = fr.cb()->as_compiled_method_or_null();
1667     assert(cm != NULL, "only compiled methods can deopt");


2692               bc = Bytecodes::_illegal;
2693             sprintf(name, "%s/%s/%s",
2694                     trap_reason_name(reason),
2695                     trap_action_name(action),
2696                     Bytecodes::is_defined(bc)? Bytecodes::name(bc): "other");
2697             juint r = counter >> LSB_BITS;
2698             tty->print_cr("  %40s: " UINT32_FORMAT " (%.1f%%)", name, r, (r * 100.0) / total);
2699             account -= r;
2700           }
2701         }
2702       }
2703     }
2704     if (account != 0) {
2705       PRINT_STAT_LINE("unaccounted", account);
2706     }
2707     #undef PRINT_STAT_LINE
2708     if (xtty != NULL)  xtty->tail("statistics");
2709   }
2710 }
2711 





































2712 // Returns true iff objects were reallocated and relocked because of access through JVMTI
2713 bool EscapeBarrier::objs_are_deoptimized(JavaThread* thread, intptr_t* fr_id) {
2714   // first/oldest update holds the flag
2715   GrowableArray<jvmtiDeferredLocalVariableSet*>* list = JvmtiDeferredUpdates::deferred_locals(thread);
2716   bool result = false;
2717   if (list != NULL) {
2718     for (int i = 0; i < list->length(); i++) {
2719       if (list->at(i)->matches(fr_id)) {
2720         result = list->at(i)->objects_are_deoptimized();
2721         break;
2722       }
2723     }
2724   }
2725   return result;
2726 }
2727 
2728 // Object references of frames up to the given depth are about to be accessed. Frames with
2729 // optimizations based on escape state that is potentially changed by the accesses need to be
2730 // deoptimized and the referenced objects need to be reallocated and relocked.
2731 // Up to depth this is done for frames with not escaping objects in scope. For deeper frames it is
2732 // done only, if they pass not escaping objects as arguments, because they potentially escape from
2733 // callee frames within the given depth.
2734 // The search for deeper frames is ended if an entry frame is found, because arguments to
2735 // native methods are considered to escape globally.
2736 bool EscapeBarrier::deoptimize_objects(int depth) {
2737   if (barrier_active() && deoptee_thread()->has_last_Java_frame()) {
2738     ResourceMark rm(calling_thread());
2739     HandleMark   hm;
2740     RegisterMap  reg_map(deoptee_thread());
2741     vframe* vf = deoptee_thread()->last_java_vframe(&reg_map);
2742     int cur_depth = 0;
2743     while (vf != NULL && ((cur_depth <= depth) || !vf->is_entry_frame())) {
2744       if (vf->is_compiled_frame()) {
2745         compiledVFrame* cvf = compiledVFrame::cast(vf);
2746         // Deoptimize frame and local objects if any exist.
2747         // If cvf is deeper than depth, then we deoptimize iff local objects are passed as args.
2748         bool should_deopt = cur_depth <= depth ? cvf->not_global_escape_in_scope() : cvf->arg_escape();
2749         if (should_deopt && !deoptimize_objects(cvf->fr().id())) {
2750           // reallocation of scalar replaced objects failed, because heap is exhausted
2751           return false;
2752         }

2753 
2754         // move to top frame
2755         while(!vf->is_top()) {
2756           cur_depth++;
2757           vf = vf->sender();
2758         }
2759       }
2760 
2761       // move to next physical frame
2762       cur_depth++;
2763       vf = vf->sender();
2764     }
2765   }
2766   return true;
2767 }
2768 
2769 bool EscapeBarrier::deoptimize_objects_all_threads() {
2770   if (!barrier_active()) return true;
2771   ResourceMark rm(calling_thread());
2772   for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
2773     if (jt->has_last_Java_frame()) {
2774       RegisterMap reg_map(jt);
2775       vframe* vf = jt->last_java_vframe(&reg_map);
2776       assert(jt->frame_anchor()->walkable(),
2777              "The stack of JavaThread " PTR_FORMAT " is not walkable. Thread state is %d",
2778              p2i(jt), jt->thread_state());
2779       while (vf != NULL) {
2780         if (vf->is_compiled_frame()) {
2781           compiledVFrame* cvf = compiledVFrame::cast(vf);
2782           if ((cvf->not_global_escape_in_scope() || cvf->arg_escape()) &&
2783               !deoptimize_objects_internal(jt, cvf->fr().id())) {
2784             return false; // reallocation failure
2785           }
2786           // move to top frame

2787           while(!vf->is_top()) {
2788             vf = vf->sender();
2789           }
2790         }
2791         // move to next physical frame
2792         vf = vf->sender();
2793       }
2794     }
2795   }
2796   return true; // success
2797 }
2798 
2799 bool EscapeBarrier::_deoptimizing_objects_for_all_threads = false;
2800 bool EscapeBarrier::_self_deoptimization_in_progress      = false;
2801 
2802 class EscapeBarrierSuspendHandshake : public HandshakeClosure {
2803   JavaThread* _excluded_thread;
2804  public:
2805   EscapeBarrierSuspendHandshake(JavaThread* excluded_thread, const char* name) : HandshakeClosure(name), _excluded_thread(excluded_thread) { }
2806   void do_thread(Thread* th) {
2807     if (th->is_Java_thread() && !th->is_hidden_from_external_view() && (th != _excluded_thread)) {
2808       th->set_obj_deopt_flag();
2809     }
2810   }
2811 };
2812 
2813 void EscapeBarrier::sync_and_suspend_one() {
2814   assert(_calling_thread != NULL, "calling thread must not be NULL");
2815   assert(_deoptee_thread != NULL, "deoptee thread must not be NULL");
2816   assert(barrier_active(), "should not call");
2817 
2818   // Sync with other threads that might be doing deoptimizations
2819   {
2820     // Need to switch to _thread_blocked for the wait() call
2821     ThreadBlockInVM tbivm(_calling_thread);
2822     MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
2823     while (_self_deoptimization_in_progress || _deoptee_thread->is_obj_deopt_suspend()) {
2824       ml.wait();
2825     }
2826 
2827     if (self_deopt()) {
2828       _self_deoptimization_in_progress = true;
2829       return;
2830     }
2831 
2832     // set suspend flag for target thread
2833     _deoptee_thread->set_obj_deopt_flag();
2834   }
2835 
2836   // suspend target thread


2837   EscapeBarrierSuspendHandshake sh(NULL, "EscapeBarrierSuspendOne");
2838   Handshake::execute_direct(&sh, _deoptee_thread);

2839   assert(!_deoptee_thread->has_last_Java_frame() || _deoptee_thread->frame_anchor()->walkable(),
2840          "stack should be walkable now");
2841 }
2842 
2843 void EscapeBarrier::sync_and_suspend_all() {
2844   assert(barrier_active(), "should not call");
2845   assert(_calling_thread != NULL, "calling thread must not be NULL");
2846   assert(all_threads(), "sanity");
2847 
2848   // Sync with other threads that might be doing deoptimizations
2849   {
2850     // Need to switch to _thread_blocked for the wait() call
2851     ThreadBlockInVM tbivm(_calling_thread);
2852     MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
2853 
2854     bool deopt_in_progress;
2855     do {
2856       deopt_in_progress = _self_deoptimization_in_progress;
2857       for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
2858         deopt_in_progress = (deopt_in_progress || jt->is_obj_deopt_suspend());
2859         if (deopt_in_progress) {
2860           break;
2861         }
2862       }
2863       if (deopt_in_progress) {
2864         ml.wait(); // then check again
2865       }
2866     } while(deopt_in_progress);
2867 
2868     _self_deoptimization_in_progress = true;
2869     _deoptimizing_objects_for_all_threads = true;
2870   }
2871 
2872   EscapeBarrierSuspendHandshake sh(_calling_thread, "EscapeBarrierSuspendAll");
2873   Handshake::execute(&sh);
2874 #ifdef ASSERT
2875   for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
2876     if (jt->is_hidden_from_external_view()) continue;
2877     assert(!jt->has_last_Java_frame() || jt->frame_anchor()->walkable(),
2878            "The stack of JavaThread " PTR_FORMAT " is not walkable. Thread state is %d",
2879            p2i(jt), jt->thread_state());
2880   }
2881 #endif // ASSERT
2882 }
2883 
2884 void EscapeBarrier::resume_one() {
2885   assert(barrier_active(), "should not call");
2886   assert(!all_threads(), "use resume_all()");
2887   MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
2888   if (self_deopt()) {
2889     assert(_self_deoptimization_in_progress, "incorrect synchronization");
2890     _self_deoptimization_in_progress = false;
2891   } else {
2892     _deoptee_thread->clear_obj_deopt_flag();
2893   }
2894   ml.notify_all();
2895 }
2896 
2897 void EscapeBarrier::resume_all() {
2898   assert(barrier_active(), "should not call");
2899   assert(all_threads(), "use resume_one()");
2900   MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
2901   assert(_self_deoptimization_in_progress, "incorrect synchronization");
2902   _deoptimizing_objects_for_all_threads = false;
2903   _self_deoptimization_in_progress = false;
2904   for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
2905     jt->clear_obj_deopt_flag();
2906   }
2907   ml.notify_all();
2908 }
2909 
2910 void EscapeBarrier::thread_added(JavaThread* jt) {
2911   if (!jt->is_hidden_from_external_view()) {
2912     MutexLocker ml(EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
2913     if (_deoptimizing_objects_for_all_threads) {
2914       jt->set_obj_deopt_flag();
2915     }

2916   }
2917 }
2918 
2919 void EscapeBarrier::thread_removed(JavaThread* jt) {
2920   MonitorLocker ml(EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
2921   if (jt->is_obj_deopt_suspend()) {
2922     // jt terminated before it self suspended.
2923     // Other threads might be waiting to perform deoptimizations for it.
2924     jt->clear_obj_deopt_flag();
2925     ml.notify_all();
2926   }
2927 }
2928 
2929 // Remember that objects were reallocated and relocked for the compiled frame with the given id
2930 static void set_objs_are_deoptimized(JavaThread* thread, intptr_t* fr_id) {
2931   // set in first/oldest update
2932   GrowableArray<jvmtiDeferredLocalVariableSet*>* list = JvmtiDeferredUpdates::deferred_locals(thread);
2933   DEBUG_ONLY(bool found = false);
2934   if (list != NULL) {
2935     for (int i = 0; i < list->length(); i++) {
2936       if (list->at(i)->matches(fr_id)) {
2937         DEBUG_ONLY(found = true);
2938         list->at(i)->set_objs_are_deoptimized();
2939         break;
2940       }
2941     }
2942   }
2943   assert(found, "variable set should exist at least for one vframe");
2944 }
2945 


< prev index next >