1 /*
   2  * Copyright (c) 1997, 2020, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #ifndef SHARE_RUNTIME_DEOPTIMIZATION_HPP
  26 #define SHARE_RUNTIME_DEOPTIMIZATION_HPP
  27 
  28 #include "memory/allocation.hpp"
  29 #include "runtime/frame.hpp"
  30 
  31 class ProfileData;
  32 class vframeArray;
  33 class MonitorInfo;
  34 class MonitorValue;
  35 class ObjectValue;
  36 class AutoBoxObjectValue;
  37 class ScopeValue;
  38 class compiledVFrame;
  39 class EscapeBarrier;
  40 
  41 template<class E> class GrowableArray;
  42 
  43 class Deoptimization : AllStatic {
  44   friend class VMStructs;
  45   friend class EscapeBarrier;
  46 
  47  public:
  48   // What condition caused the deoptimization?
  49   enum DeoptReason {
  50     Reason_many = -1,             // indicates presence of several reasons
  51     Reason_none = 0,              // indicates absence of a relevant deopt.
  52     // Next 8 reasons are recorded per bytecode in DataLayout::trap_bits.
  53     // This is more complicated for JVMCI as JVMCI may deoptimize to *some* bytecode before the
  54     // bytecode that actually caused the deopt (with inlining, JVMCI may even deoptimize to a
  55     // bytecode in another method):
  56     //  - bytecode y in method b() causes deopt
  57     //  - JVMCI deoptimizes to bytecode x in method a()
  58     // -> the deopt reason will be recorded for method a() at bytecode x
  59     Reason_null_check,            // saw unexpected null or zero divisor (@bci)
  60     Reason_null_assert,           // saw unexpected non-null or non-zero (@bci)
  61     Reason_range_check,           // saw unexpected array index (@bci)
  62     Reason_class_check,           // saw unexpected object class (@bci)
  63     Reason_array_check,           // saw unexpected array class (aastore @bci)
  64     Reason_intrinsic,             // saw unexpected operand to intrinsic (@bci)
  65     Reason_bimorphic,             // saw unexpected object class in bimorphic inlining (@bci)
  66 
  67 #if INCLUDE_JVMCI
  68     Reason_unreached0             = Reason_null_assert,
  69     Reason_type_checked_inlining  = Reason_intrinsic,
  70     Reason_optimized_type_check   = Reason_bimorphic,
  71 #endif
  72 
  73     Reason_profile_predicate,     // compiler generated predicate moved from frequent branch in a loop failed
  74 
  75     // recorded per method
  76     Reason_unloaded,              // unloaded class or constant pool entry
  77     Reason_uninitialized,         // bad class state (uninitialized)
  78     Reason_initialized,           // class has been fully initialized
  79     Reason_unreached,             // code is not reached, compiler
  80     Reason_unhandled,             // arbitrary compiler limitation
  81     Reason_constraint,            // arbitrary runtime constraint violated
  82     Reason_div0_check,            // a null_check due to division by zero
  83     Reason_age,                   // nmethod too old; tier threshold reached
  84     Reason_predicate,             // compiler generated predicate failed
  85     Reason_loop_limit_check,      // compiler generated loop limits check failed
  86     Reason_speculate_class_check, // saw unexpected object class from type speculation
  87     Reason_speculate_null_check,  // saw unexpected null from type speculation
  88     Reason_speculate_null_assert, // saw unexpected null from type speculation
  89     Reason_rtm_state_change,      // rtm state change detected
  90     Reason_unstable_if,           // a branch predicted always false was taken
  91     Reason_unstable_fused_if,     // fused two ifs that had each one untaken branch. One is now taken.
  92 #if INCLUDE_JVMCI
  93     Reason_aliasing,              // optimistic assumption about aliasing failed
  94     Reason_transfer_to_interpreter, // explicit transferToInterpreter()
  95     Reason_not_compiled_exception_handler,
  96     Reason_unresolved,
  97     Reason_jsr_mismatch,
  98 #endif
  99 
 100     // Reason_tenured is counted separately, add normal counted Reasons above.
 101     // Related to MethodData::_trap_hist_limit where Reason_tenured isn't included
 102     Reason_tenured,               // age of the code has reached the limit
 103     Reason_LIMIT,
 104 
 105     // Note:  Keep this enum in sync. with _trap_reason_name.
 106     Reason_RECORDED_LIMIT = Reason_profile_predicate  // some are not recorded per bc
 107     // Note:  Reason_RECORDED_LIMIT should fit into 31 bits of
 108     // DataLayout::trap_bits.  This dependency is enforced indirectly
 109     // via asserts, to avoid excessive direct header-to-header dependencies.
 110     // See Deoptimization::trap_state_reason and class DataLayout.
 111   };
 112 
 113   // What action must be taken by the runtime?
 114   enum DeoptAction {
 115     Action_none,                  // just interpret, do not invalidate nmethod
 116     Action_maybe_recompile,       // recompile the nmethod; need not invalidate
 117     Action_reinterpret,           // invalidate the nmethod, reset IC, maybe recompile
 118     Action_make_not_entrant,      // invalidate the nmethod, recompile (probably)
 119     Action_make_not_compilable,   // invalidate the nmethod and do not compile
 120     Action_LIMIT
 121     // Note:  Keep this enum in sync. with _trap_action_name.
 122   };
 123 
 124   enum {
 125     _action_bits = 3,
 126     _reason_bits = 5,
 127     _debug_id_bits = 23,
 128     _action_shift = 0,
 129     _reason_shift = _action_shift+_action_bits,
 130     _debug_id_shift = _reason_shift+_reason_bits,
 131     BC_CASE_LIMIT = PRODUCT_ONLY(1) NOT_PRODUCT(4) // for _deoptimization_hist
 132   };
 133 
 134   enum UnpackType {
 135     Unpack_deopt                = 0, // normal deoptimization, use pc computed in unpack_vframe_on_stack
 136     Unpack_exception            = 1, // exception is pending
 137     Unpack_uncommon_trap        = 2, // redo last byte code (C2 only)
 138     Unpack_reexecute            = 3, // reexecute bytecode (C1 only)
 139     Unpack_none                 = 4, // not deoptimizing the frame, just reallocating/relocking for JVMTI
 140     Unpack_LIMIT                = 5
 141   };
 142 
 143 #if INCLUDE_JVMCI
 144   // Can reconstruct virtualized unsafe large accesses to byte arrays.
 145   static const int _support_large_access_byte_array_virtualization = 1;
 146 #endif
 147 
 148   // Make all nmethods that are marked_for_deoptimization not_entrant and deoptimize any live
 149   // activations using those nmethods.  If an nmethod is passed as an argument then it is
 150   // marked_for_deoptimization and made not_entrant.  Otherwise a scan of the code cache is done to
 151   // find all marked nmethods and they are made not_entrant.
 152   static void deoptimize_all_marked(nmethod* nmethod_only = NULL);
 153 
 154  private:
 155   // Revoke biased locks at deopt.
 156   static void revoke_from_deopt_handler(JavaThread* thread, frame fr, RegisterMap* map);
 157 
 158   static void revoke_for_object_deoptimization(JavaThread* deoptee_thread, frame fr, RegisterMap* map, JavaThread* thread);
 159 
 160  public:
 161   // Deoptimizes a frame lazily. Deopt happens on return to the frame.
 162   static void deoptimize(JavaThread* thread, frame fr, DeoptReason reason = Reason_constraint);
 163 
 164 #if INCLUDE_JVMCI
 165   static address deoptimize_for_missing_exception_handler(CompiledMethod* cm);
 166   static oop get_cached_box(AutoBoxObjectValue* bv, frame* fr, RegisterMap* reg_map, TRAPS);
 167 #endif
 168 
 169   private:
 170   // Does the actual work for deoptimizing a single frame
 171   static void deoptimize_single_frame(JavaThread* thread, frame fr, DeoptReason reason);
 172 
 173 #if COMPILER2_OR_JVMCI
 174   // Deoptimize objects, that is reallocate and relock them, just before they escape through JVMTI.
 175   // The given vframes cover one physical frame.
 176   static bool deoptimize_objects_internal(JavaThread* thread, GrowableArray<compiledVFrame*>* chunk, bool& realloc_failures);
 177 
 178  public:
 179 
 180   // Support for restoring non-escaping objects
 181   static bool realloc_objects(JavaThread* thread, frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, TRAPS);
 182   static void reassign_type_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type);
 183   static void reassign_object_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, objArrayOop obj);
 184   static void reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal);
 185   static bool relock_objects(JavaThread* thread, GrowableArray<MonitorInfo*>* monitors,
 186                              JavaThread* deoptee_thread, frame& fr, int exec_mode, bool realloc_failures);
 187   static void pop_frames_failed_reallocs(JavaThread* thread, vframeArray* array);
 188   NOT_PRODUCT(static void print_objects(GrowableArray<ScopeValue*>* objects, bool realloc_failures);)
 189 #endif // COMPILER2_OR_JVMCI
 190 
 191   public:
 192   static vframeArray* create_vframeArray(JavaThread* thread, frame fr, RegisterMap *reg_map, GrowableArray<compiledVFrame*>* chunk, bool realloc_failures);
 193 
 194   // Interface used for unpacking deoptimized frames
 195 
 196   // UnrollBlock is returned by fetch_unroll_info() to the deoptimization handler (blob).
 197   // This is only a CheapObj to ease debugging after a deopt failure
 198   class UnrollBlock : public CHeapObj<mtCompiler> {
 199     friend class VMStructs;
 200     friend class JVMCIVMStructs;
 201    private:
 202     int       _size_of_deoptimized_frame; // Size, in bytes, of current deoptimized frame
 203     int       _caller_adjustment;         // Adjustment, in bytes, to caller's SP by initial interpreted frame
 204     int       _number_of_frames;          // Number frames to unroll
 205     int       _total_frame_sizes;         // Total of number*sizes frames
 206     intptr_t* _frame_sizes;               // Array of frame sizes, in bytes, for unrolling the stack
 207     address*  _frame_pcs;                 // Array of frame pc's, in bytes, for unrolling the stack
 208     intptr_t* _register_block;            // Block for storing callee-saved registers.
 209     BasicType _return_type;               // Tells if we have to restore double or long return value
 210     intptr_t  _initial_info;              // Platform dependent data for the sender frame (was FP on x86)
 211     int       _caller_actual_parameters;  // The number of actual arguments at the
 212                                           // interpreted caller of the deoptimized frame
 213     int       _unpack_kind;               // exec_mode that can be changed during fetch_unroll_info
 214 
 215     // The following fields are used as temps during the unpacking phase
 216     // (which is tight on registers, especially on x86). They really ought
 217     // to be PD variables but that involves moving this class into its own
 218     // file to use the pd include mechanism. Maybe in a later cleanup ...
 219     intptr_t  _counter_temp;              // SHOULD BE PD VARIABLE (x86 frame count temp)
 220     intptr_t  _sender_sp_temp;            // SHOULD BE PD VARIABLE (x86 sender_sp)
 221    public:
 222     // Constructor
 223     UnrollBlock(int  size_of_deoptimized_frame,
 224                 int  caller_adjustment,
 225                 int  caller_actual_parameters,
 226                 int  number_of_frames,
 227                 intptr_t* frame_sizes,
 228                 address* frames_pcs,
 229                 BasicType return_type,
 230                 int unpack_kind);
 231     ~UnrollBlock();
 232 
 233     // Returns where a register is located.
 234     intptr_t* value_addr_at(int register_number) const;
 235 
 236     // Accessors
 237     intptr_t* frame_sizes()  const { return _frame_sizes; }
 238     int number_of_frames()  const { return _number_of_frames; }
 239     address*  frame_pcs()   const { return _frame_pcs ; }
 240     int  unpack_kind()   const { return _unpack_kind; }
 241 
 242     // Returns the total size of frames
 243     int size_of_frames() const;
 244 
 245     void set_initial_info(intptr_t info) { _initial_info = info; }
 246 
 247     int caller_actual_parameters() const { return _caller_actual_parameters; }
 248 
 249     // Accessors used by the code generator for the unpack stub.
 250     static int size_of_deoptimized_frame_offset_in_bytes() { return offset_of(UnrollBlock, _size_of_deoptimized_frame); }
 251     static int caller_adjustment_offset_in_bytes()         { return offset_of(UnrollBlock, _caller_adjustment);         }
 252     static int number_of_frames_offset_in_bytes()          { return offset_of(UnrollBlock, _number_of_frames);          }
 253     static int frame_sizes_offset_in_bytes()               { return offset_of(UnrollBlock, _frame_sizes);               }
 254     static int total_frame_sizes_offset_in_bytes()         { return offset_of(UnrollBlock, _total_frame_sizes);         }
 255     static int frame_pcs_offset_in_bytes()                 { return offset_of(UnrollBlock, _frame_pcs);                 }
 256     static int register_block_offset_in_bytes()            { return offset_of(UnrollBlock, _register_block);            }
 257     static int return_type_offset_in_bytes()               { return offset_of(UnrollBlock, _return_type);               }
 258     static int counter_temp_offset_in_bytes()              { return offset_of(UnrollBlock, _counter_temp);              }
 259     static int initial_info_offset_in_bytes()              { return offset_of(UnrollBlock, _initial_info);              }
 260     static int unpack_kind_offset_in_bytes()               { return offset_of(UnrollBlock, _unpack_kind);               }
 261     static int sender_sp_temp_offset_in_bytes()            { return offset_of(UnrollBlock, _sender_sp_temp);            }
 262 
 263     BasicType return_type() const { return _return_type; }
 264     void print();
 265   };
 266 
 267   //** Returns an UnrollBlock continuing information
 268   // how to make room for the resulting interpreter frames.
 269   // Called by assembly stub after execution has returned to
 270   // deoptimized frame.
 271   // @argument thread.     Thread where stub_frame resides.
 272   // @see OptoRuntime::deoptimization_fetch_unroll_info_C
 273   static UnrollBlock* fetch_unroll_info(JavaThread* thread, int exec_mode);
 274 
 275   //** Unpacks vframeArray onto execution stack
 276   // Called by assembly stub after execution has returned to
 277   // deoptimized frame and after the stack unrolling.
 278   // @argument thread.     Thread where stub_frame resides.
 279   // @argument exec_mode.  Determines how execution should be continued in top frame.
 280   //                       0 means continue after current byte code
 281   //                       1 means exception has happened, handle exception
 282   //                       2 means reexecute current bytecode (for uncommon traps).
 283   // @see OptoRuntime::deoptimization_unpack_frames_C
 284   // Return BasicType of call return type, if any
 285   static BasicType unpack_frames(JavaThread* thread, int exec_mode);
 286 
 287   // Cleans up deoptimization bits on thread after unpacking or in the
 288   // case of an exception.
 289   static void cleanup_deopt_info(JavaThread  *thread,
 290                                  vframeArray * array);
 291 
 292   // Restores callee saved values from deoptimized frame into oldest interpreter frame
 293   // so caller of the deoptimized frame will get back the values it expects.
 294   static void unwind_callee_save_values(frame* f, vframeArray* vframe_array);
 295 
 296   //** Performs an uncommon trap for compiled code.
 297   // The top most compiler frame is converted into interpreter frames
 298   static UnrollBlock* uncommon_trap(JavaThread* thread, jint unloaded_class_index, jint exec_mode);
 299   // Helper routine that enters the VM and may block
 300   static void uncommon_trap_inner(JavaThread* thread, jint unloaded_class_index);
 301 
 302   //** Deoptimizes the frame identified by id.
 303   // Only called from VMDeoptimizeFrame
 304   // @argument thread.     Thread where stub_frame resides.
 305   // @argument id.         id of frame that should be deoptimized.
 306   static void deoptimize_frame_internal(JavaThread* thread, intptr_t* id, DeoptReason reason);
 307 
 308   // if thread is not the current thread then execute
 309   // VM_DeoptimizeFrame otherwise deoptimize directly.
 310   static void deoptimize_frame(JavaThread* thread, intptr_t* id, DeoptReason reason);
 311   static void deoptimize_frame(JavaThread* thread, intptr_t* id);
 312 
 313   // Statistics
 314   static void gather_statistics(DeoptReason reason, DeoptAction action,
 315                                 Bytecodes::Code bc = Bytecodes::_illegal);
 316   static void print_statistics();
 317 
 318   // How much room to adjust the last frame's SP by, to make space for
 319   // the callee's interpreter frame (which expects locals to be next to
 320   // incoming arguments)
 321   static int last_frame_adjust(int callee_parameters, int callee_locals);
 322 
 323   // trap_request codes
 324   static DeoptReason trap_request_reason(int trap_request) {
 325     if (trap_request < 0)
 326       return (DeoptReason)
 327         ((~(trap_request) >> _reason_shift) & right_n_bits(_reason_bits));
 328     else
 329       // standard reason for unloaded CP entry
 330       return Reason_unloaded;
 331   }
 332   static DeoptAction trap_request_action(int trap_request) {
 333     if (trap_request < 0)
 334       return (DeoptAction)
 335         ((~(trap_request) >> _action_shift) & right_n_bits(_action_bits));
 336     else
 337       // standard action for unloaded CP entry
 338       return _unloaded_action;
 339   }
 340   static int trap_request_debug_id(int trap_request) {
 341     if (trap_request < 0) {
 342       return ((~(trap_request) >> _debug_id_shift) & right_n_bits(_debug_id_bits));
 343     } else {
 344       // standard action for unloaded CP entry
 345       return 0;
 346     }
 347   }
 348   static int trap_request_index(int trap_request) {
 349     if (trap_request < 0)
 350       return -1;
 351     else
 352       return trap_request;
 353   }
 354   static int make_trap_request(DeoptReason reason, DeoptAction action,
 355                                int index = -1) {
 356     assert((1 << _reason_bits) >= Reason_LIMIT, "enough bits");
 357     assert((1 << _action_bits) >= Action_LIMIT, "enough bits");
 358     int trap_request;
 359     if (index != -1)
 360       trap_request = index;
 361     else
 362       trap_request = (~(((reason) << _reason_shift)
 363                         + ((action) << _action_shift)));
 364     assert(reason == trap_request_reason(trap_request), "valid reason");
 365     assert(action == trap_request_action(trap_request), "valid action");
 366     assert(index  == trap_request_index(trap_request),  "valid index");
 367     return trap_request;
 368   }
 369 
 370   // The trap_state stored in a MDO is decoded here.
 371   // It records two items of information.
 372   //  reason:  If a deoptimization happened here, what its reason was,
 373   //           or if there were multiple deopts with differing reasons.
 374   //  recompiled: If a deoptimization here triggered a recompilation.
 375   // Note that not all reasons are recorded per-bci.
 376   static DeoptReason trap_state_reason(int trap_state);
 377   static int  trap_state_has_reason(int trap_state, int reason);
 378   static int  trap_state_add_reason(int trap_state, int reason);
 379   static bool trap_state_is_recompiled(int trap_state);
 380   static int  trap_state_set_recompiled(int trap_state, bool z);
 381   static const char* format_trap_state(char* buf, size_t buflen,
 382                                        int trap_state);
 383 
 384   static bool reason_is_recorded_per_bytecode(DeoptReason reason) {
 385     return reason > Reason_none && reason <= Reason_RECORDED_LIMIT;
 386   }
 387 
 388   static DeoptReason reason_recorded_per_bytecode_if_any(DeoptReason reason) {
 389     if (reason_is_recorded_per_bytecode(reason))
 390       return reason;
 391     else if (reason == Reason_div0_check) // null check due to divide-by-zero?
 392       return Reason_null_check;           // recorded per BCI as a null check
 393     else if (reason == Reason_speculate_class_check)
 394       return Reason_class_check;
 395     else if (reason == Reason_speculate_null_check)
 396       return Reason_null_check;
 397     else if (reason == Reason_speculate_null_assert)
 398       return Reason_null_assert;
 399     else if (reason == Reason_unstable_if)
 400       return Reason_intrinsic;
 401     else if (reason == Reason_unstable_fused_if)
 402       return Reason_range_check;
 403     else
 404       return Reason_none;
 405   }
 406 
 407   static bool reason_is_speculate(int reason) {
 408     if (reason == Reason_speculate_class_check ||
 409         reason == Reason_speculate_null_check ||
 410         reason == Reason_speculate_null_assert) {
 411       return true;
 412     }
 413     return false;
 414   }
 415 
 416   static DeoptReason reason_null_check(bool speculative) {
 417     return speculative ? Deoptimization::Reason_speculate_null_check : Deoptimization::Reason_null_check;
 418   }
 419 
 420   static DeoptReason reason_class_check(bool speculative) {
 421     return speculative ? Deoptimization::Reason_speculate_class_check : Deoptimization::Reason_class_check;
 422   }
 423 
 424   static DeoptReason reason_null_assert(bool speculative) {
 425     return speculative ? Deoptimization::Reason_speculate_null_assert : Deoptimization::Reason_null_assert;
 426   }
 427 
 428   static uint per_method_trap_limit(int reason) {
 429     return reason_is_speculate(reason) ? (uint)PerMethodSpecTrapLimit : (uint)PerMethodTrapLimit;
 430   }
 431 
 432   static const char* trap_reason_name(int reason);
 433   static const char* trap_action_name(int action);
 434   // Format like reason='foo' action='bar' index='123'.
 435   // This is suitable both for XML and for tty output.
 436   static const char* format_trap_request(char* buf, size_t buflen,
 437                                          int trap_request);
 438 
 439   static jint total_deoptimization_count();
 440 
 441   // JVMTI PopFrame support
 442 
 443   // Preserves incoming arguments to the popped frame when it is
 444   // returning to a deoptimized caller
 445   static void popframe_preserve_args(JavaThread* thread, int bytes_to_save, void* start_address);
 446 
 447   static MethodData* get_method_data(JavaThread* thread, const methodHandle& m, bool create_if_missing);
 448  private:
 449   // Update the mdo's count and per-BCI reason bits, returning previous state:
 450   static ProfileData* query_update_method_data(MethodData* trap_mdo,
 451                                                int trap_bci,
 452                                                DeoptReason reason,
 453                                                bool update_total_trap_count,
 454 #if INCLUDE_JVMCI
 455                                                bool is_osr,
 456 #endif
 457                                                Method* compiled_method,
 458                                                //outputs:
 459                                                uint& ret_this_trap_count,
 460                                                bool& ret_maybe_prior_trap,
 461                                                bool& ret_maybe_prior_recompile);
 462   // class loading support for uncommon trap
 463   static void load_class_by_index(const constantPoolHandle& constant_pool, int index, TRAPS);
 464   static void load_class_by_index(const constantPoolHandle& constant_pool, int index);
 465 
 466   static UnrollBlock* fetch_unroll_info_helper(JavaThread* thread, int exec_mode);
 467 
 468   static DeoptAction _unloaded_action; // == Action_reinterpret;
 469   static const char* _trap_reason_name[];
 470   static const char* _trap_action_name[];
 471 
 472   static juint _deoptimization_hist[Reason_LIMIT][1+Action_LIMIT][BC_CASE_LIMIT];
 473   // Note:  Histogram array size is 1-2 Kb.
 474 
 475  public:
 476   static void update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason);
 477 };
 478 
 479 // EscapeBarriers should be put on execution paths, where JVMTI agents can access object
 480 // references held by java threads.
 481 // They provide means to revert optimizations based on escape analysis in a well synchronized manner
 482 // just before local references escape through JVMTI.
 483 class EscapeBarrier : StackObj {
 484 #if COMPILER2_OR_JVMCI
 485   JavaThread* const _calling_thread;
 486   JavaThread* const _deoptee_thread;
 487   bool        const _barrier_active;
 488 
 489   static bool _deoptimizing_objects_for_all_threads;
 490   static bool _self_deoptimization_in_progress;
 491 
 492   void sync_and_suspend_one();
 493   void sync_and_suspend_all();
 494   void resume_one();
 495   void resume_all();
 496 
 497   // Deoptimize the given frame and deoptimize objects with optimizations based on escape analysis.
 498   bool deoptimize_objects_internal(JavaThread* deoptee, intptr_t* fr_id);
 499 
 500 public:
 501   // Revert ea based optimizations for given deoptee thread
 502   EscapeBarrier(JavaThread* calling_thread, JavaThread* deoptee_thread, bool barrier_active)
 503     : _calling_thread(calling_thread), _deoptee_thread(deoptee_thread),
 504       _barrier_active(barrier_active && (JVMCI_ONLY(UseJVMCICompiler) NOT_JVMCI(false)
 505                       COMPILER2_PRESENT(|| DoEscapeAnalysis)))
 506   {
 507     if (_barrier_active) sync_and_suspend_one();
 508   }
 509 
 510   // Revert ea based optimizations for all java threads
 511   EscapeBarrier(JavaThread* calling_thread, bool barrier_active)
 512     : _calling_thread(calling_thread), _deoptee_thread(NULL),
 513       _barrier_active(barrier_active && (JVMCI_ONLY(UseJVMCICompiler) NOT_JVMCI(false)
 514                       COMPILER2_PRESENT(|| DoEscapeAnalysis)))
 515   {
 516     if (_barrier_active) sync_and_suspend_all();
 517   }
 518 #else
 519 public:
 520   EscapeBarrier(JavaThread* calling_thread, JavaThread* deoptee_thread, bool barrier_active) { }
 521   EscapeBarrier(JavaThread* calling_thread, bool barrier_active) { }
 522   static bool deoptimizing_objects_for_all_threads() { return false; }
 523 #endif // COMPILER2_OR_JVMCI
 524 
 525   // Deoptimize objects, i.e. reallocate and relock them. The target frames are deoptimized.
 526   // The methods return false iff at least one reallocation failed.
 527   bool deoptimize_objects(intptr_t* fr_id) {
 528     return true COMPILER2_OR_JVMCI_PRESENT(&& deoptimize_objects_internal(deoptee_thread(), fr_id));
 529   }
 530   bool deoptimize_objects(int depth)                           NOT_COMPILER2_OR_JVMCI_RETURN_(true);
 531   // Find and deoptimize non escaping objects and the holding frames on all stacks.
 532   bool deoptimize_objects_all_threads()                        NOT_COMPILER2_OR_JVMCI_RETURN_(true);
 533 
 534   // A java thread was added to the list of threads
 535   static void thread_added(JavaThread* jt)                     NOT_COMPILER2_OR_JVMCI_RETURN;
 536   // A java thread was removed from the list of threads
 537   static void thread_removed(JavaThread* jt)                   NOT_COMPILER2_OR_JVMCI_RETURN;
 538 
 539 #if COMPILER2_OR_JVMCI
 540   // Returns true iff objects were reallocated and relocked because of access through JVMTI
 541   static bool objs_are_deoptimized(JavaThread* thread, intptr_t* fr_id);
 542 
 543   static bool deoptimizing_objects_for_all_threads() { return _deoptimizing_objects_for_all_threads; }
 544 
 545   ~EscapeBarrier() {
 546     if (!barrier_active()) return;
 547     if (all_threads()) {
 548       resume_all();
 549     } else {
 550       resume_one();
 551     }
 552   }
 553 
 554 
 555   bool all_threads()    const { return _deoptee_thread == NULL; }            // Should revert optimizations for all threads.
 556   bool self_deopt()     const { return _calling_thread == _deoptee_thread; } // Current thread deoptimizes its own objects.
 557   bool barrier_active() const { return _barrier_active; }                    // Inactive barriers are created if no local objects can escape.
 558 
 559   // accessors
 560   JavaThread* calling_thread() const     { return _calling_thread; }
 561   JavaThread* deoptee_thread() const     { return _deoptee_thread; }
 562 #endif // COMPILER2_OR_JVMCI
 563 };
 564 
 565 class DeoptimizationMarker : StackObj {  // for profiling
 566   static bool _is_active;
 567 public:
 568   DeoptimizationMarker()  { _is_active = true; }
 569   ~DeoptimizationMarker() { _is_active = false; }
 570   static bool is_active() { return _is_active; }
 571 };
 572 
 573 #endif // SHARE_RUNTIME_DEOPTIMIZATION_HPP