1 /*
2 * Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_RUNTIME_DEOPTIMIZATION_HPP
26 #define SHARE_RUNTIME_DEOPTIMIZATION_HPP
27
28 #include "memory/allocation.hpp"
29 #include "runtime/frame.hpp"
30
31 class ProfileData;
32 class vframeArray;
33 class MonitorInfo;
34 class MonitorValue;
35 class ObjectValue;
36 class AutoBoxObjectValue;
37 class ScopeValue;
38 class compiledVFrame;
39
40 template<class E> class GrowableArray;
41
42 class Deoptimization : AllStatic {
43 friend class VMStructs;
44
45 public:
46 // What condition caused the deoptimization?
47 enum DeoptReason {
48 Reason_many = -1, // indicates presence of several reasons
49 Reason_none = 0, // indicates absence of a relevant deopt.
50 // Next 8 reasons are recorded per bytecode in DataLayout::trap_bits.
51 // This is more complicated for JVMCI as JVMCI may deoptimize to *some* bytecode before the
52 // bytecode that actually caused the deopt (with inlining, JVMCI may even deoptimize to a
53 // bytecode in another method):
54 // - bytecode y in method b() causes deopt
55 // - JVMCI deoptimizes to bytecode x in method a()
56 // -> the deopt reason will be recorded for method a() at bytecode x
57 Reason_null_check, // saw unexpected null or zero divisor (@bci)
58 Reason_null_assert, // saw unexpected non-null or non-zero (@bci)
59 Reason_range_check, // saw unexpected array index (@bci)
60 Reason_class_check, // saw unexpected object class (@bci)
61 Reason_array_check, // saw unexpected array class (aastore @bci)
62 Reason_intrinsic, // saw unexpected operand to intrinsic (@bci)
63 Reason_bimorphic, // saw unexpected object class in bimorphic inlining (@bci)
117 Action_make_not_compilable, // invalidate the nmethod and do not compile
118 Action_LIMIT
119 // Note: Keep this enum in sync. with _trap_action_name.
120 };
121
122 enum {
123 _action_bits = 3,
124 _reason_bits = 5,
125 _debug_id_bits = 23,
126 _action_shift = 0,
127 _reason_shift = _action_shift+_action_bits,
128 _debug_id_shift = _reason_shift+_reason_bits,
129 BC_CASE_LIMIT = PRODUCT_ONLY(1) NOT_PRODUCT(4) // for _deoptimization_hist
130 };
131
132 enum UnpackType {
133 Unpack_deopt = 0, // normal deoptimization, use pc computed in unpack_vframe_on_stack
134 Unpack_exception = 1, // exception is pending
135 Unpack_uncommon_trap = 2, // redo last byte code (C2 only)
136 Unpack_reexecute = 3, // reexecute bytecode (C1 only)
137 Unpack_LIMIT = 4
138 };
139
140 #if INCLUDE_JVMCI
141 // Can reconstruct virtualized unsafe large accesses to byte arrays.
142 static const int _support_large_access_byte_array_virtualization = 1;
143 #endif
144
145 // Make all nmethods that are marked_for_deoptimization not_entrant and deoptimize any live
146 // activations using those nmethods. If an nmethod is passed as an argument then it is
147 // marked_for_deoptimization and made not_entrant. Otherwise a scan of the code cache is done to
148 // find all marked nmethods and they are made not_entrant.
149 static void deoptimize_all_marked(nmethod* nmethod_only = NULL);
150
151 private:
152 // Revoke biased locks at deopt.
153 static void revoke_from_deopt_handler(JavaThread* thread, frame fr, RegisterMap* map);
154
155 public:
156 // Deoptimizes a frame lazily. Deopt happens on return to the frame.
157 static void deoptimize(JavaThread* thread, frame fr, DeoptReason reason = Reason_constraint);
158
159 #if INCLUDE_JVMCI
160 static address deoptimize_for_missing_exception_handler(CompiledMethod* cm);
161 static oop get_cached_box(AutoBoxObjectValue* bv, frame* fr, RegisterMap* reg_map, TRAPS);
162 #endif
163
164 private:
165 // Does the actual work for deoptimizing a single frame
166 static void deoptimize_single_frame(JavaThread* thread, frame fr, DeoptReason reason);
167
168 #if COMPILER2_OR_JVMCI
169 public:
170
171 // Support for restoring non-escaping objects
172 static bool realloc_objects(JavaThread* thread, frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, TRAPS);
173 static void reassign_type_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type);
174 static void reassign_object_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, objArrayOop obj);
175 static void reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal);
176 static void relock_objects(GrowableArray<MonitorInfo*>* monitors, JavaThread* thread, bool realloc_failures);
177 static void pop_frames_failed_reallocs(JavaThread* thread, vframeArray* array);
178 NOT_PRODUCT(static void print_objects(GrowableArray<ScopeValue*>* objects, bool realloc_failures);)
179 #endif // COMPILER2_OR_JVMCI
180
181 public:
182 static vframeArray* create_vframeArray(JavaThread* thread, frame fr, RegisterMap *reg_map, GrowableArray<compiledVFrame*>* chunk, bool realloc_failures);
183
184 // Interface used for unpacking deoptimized frames
185
186 // UnrollBlock is returned by fetch_unroll_info() to the deoptimization handler (blob).
187 // This is only a CheapObj to ease debugging after a deopt failure
188 class UnrollBlock : public CHeapObj<mtCompiler> {
189 friend class VMStructs;
190 friend class JVMCIVMStructs;
191 private:
192 int _size_of_deoptimized_frame; // Size, in bytes, of current deoptimized frame
193 int _caller_adjustment; // Adjustment, in bytes, to caller's SP by initial interpreted frame
194 int _number_of_frames; // Number frames to unroll
195 int _total_frame_sizes; // Total of number*sizes frames
196 intptr_t* _frame_sizes; // Array of frame sizes, in bytes, for unrolling the stack
447 Method* compiled_method,
448 //outputs:
449 uint& ret_this_trap_count,
450 bool& ret_maybe_prior_trap,
451 bool& ret_maybe_prior_recompile);
452 // class loading support for uncommon trap
453 static void load_class_by_index(const constantPoolHandle& constant_pool, int index, TRAPS);
454 static void load_class_by_index(const constantPoolHandle& constant_pool, int index);
455
456 static UnrollBlock* fetch_unroll_info_helper(JavaThread* thread, int exec_mode);
457
458 static DeoptAction _unloaded_action; // == Action_reinterpret;
459 static const char* _trap_reason_name[];
460 static const char* _trap_action_name[];
461
462 static juint _deoptimization_hist[Reason_LIMIT][1+Action_LIMIT][BC_CASE_LIMIT];
463 // Note: Histogram array size is 1-2 Kb.
464
465 public:
466 static void update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason);
467 };
468
469 class DeoptimizationMarker : StackObj { // for profiling
470 static bool _is_active;
471 public:
472 DeoptimizationMarker() { _is_active = true; }
473 ~DeoptimizationMarker() { _is_active = false; }
474 static bool is_active() { return _is_active; }
475 };
476
477 #endif // SHARE_RUNTIME_DEOPTIMIZATION_HPP
|
1 /*
2 * Copyright (c) 1997, 2020, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_RUNTIME_DEOPTIMIZATION_HPP
26 #define SHARE_RUNTIME_DEOPTIMIZATION_HPP
27
28 #include "memory/allocation.hpp"
29 #include "runtime/frame.hpp"
30
31 class ProfileData;
32 class vframeArray;
33 class MonitorInfo;
34 class MonitorValue;
35 class ObjectValue;
36 class AutoBoxObjectValue;
37 class ScopeValue;
38 class compiledVFrame;
39 class EscapeBarrier;
40
41 template<class E> class GrowableArray;
42
43 class Deoptimization : AllStatic {
44 friend class VMStructs;
45 friend class EscapeBarrier;
46
47 public:
48 // What condition caused the deoptimization?
49 enum DeoptReason {
50 Reason_many = -1, // indicates presence of several reasons
51 Reason_none = 0, // indicates absence of a relevant deopt.
52 // Next 8 reasons are recorded per bytecode in DataLayout::trap_bits.
53 // This is more complicated for JVMCI as JVMCI may deoptimize to *some* bytecode before the
54 // bytecode that actually caused the deopt (with inlining, JVMCI may even deoptimize to a
55 // bytecode in another method):
56 // - bytecode y in method b() causes deopt
57 // - JVMCI deoptimizes to bytecode x in method a()
58 // -> the deopt reason will be recorded for method a() at bytecode x
59 Reason_null_check, // saw unexpected null or zero divisor (@bci)
60 Reason_null_assert, // saw unexpected non-null or non-zero (@bci)
61 Reason_range_check, // saw unexpected array index (@bci)
62 Reason_class_check, // saw unexpected object class (@bci)
63 Reason_array_check, // saw unexpected array class (aastore @bci)
64 Reason_intrinsic, // saw unexpected operand to intrinsic (@bci)
65 Reason_bimorphic, // saw unexpected object class in bimorphic inlining (@bci)
119 Action_make_not_compilable, // invalidate the nmethod and do not compile
120 Action_LIMIT
121 // Note: Keep this enum in sync. with _trap_action_name.
122 };
123
124 enum {
125 _action_bits = 3,
126 _reason_bits = 5,
127 _debug_id_bits = 23,
128 _action_shift = 0,
129 _reason_shift = _action_shift+_action_bits,
130 _debug_id_shift = _reason_shift+_reason_bits,
131 BC_CASE_LIMIT = PRODUCT_ONLY(1) NOT_PRODUCT(4) // for _deoptimization_hist
132 };
133
134 enum UnpackType {
135 Unpack_deopt = 0, // normal deoptimization, use pc computed in unpack_vframe_on_stack
136 Unpack_exception = 1, // exception is pending
137 Unpack_uncommon_trap = 2, // redo last byte code (C2 only)
138 Unpack_reexecute = 3, // reexecute bytecode (C1 only)
139 Unpack_none = 4, // not deoptimizing the frame, just reallocating/relocking for JVMTI
140 Unpack_LIMIT = 5
141 };
142
143 #if INCLUDE_JVMCI
144 // Can reconstruct virtualized unsafe large accesses to byte arrays.
145 static const int _support_large_access_byte_array_virtualization = 1;
146 #endif
147
148 // Make all nmethods that are marked_for_deoptimization not_entrant and deoptimize any live
149 // activations using those nmethods. If an nmethod is passed as an argument then it is
150 // marked_for_deoptimization and made not_entrant. Otherwise a scan of the code cache is done to
151 // find all marked nmethods and they are made not_entrant.
152 static void deoptimize_all_marked(nmethod* nmethod_only = NULL);
153
154 private:
155 // Revoke biased locks at deopt.
156 static void revoke_from_deopt_handler(JavaThread* thread, frame fr, RegisterMap* map);
157
158 static void revoke_for_object_deoptimization(JavaThread* deoptee_thread, frame fr, RegisterMap* map, JavaThread* thread);
159
160 public:
161 // Deoptimizes a frame lazily. Deopt happens on return to the frame.
162 static void deoptimize(JavaThread* thread, frame fr, DeoptReason reason = Reason_constraint);
163
164 #if INCLUDE_JVMCI
165 static address deoptimize_for_missing_exception_handler(CompiledMethod* cm);
166 static oop get_cached_box(AutoBoxObjectValue* bv, frame* fr, RegisterMap* reg_map, TRAPS);
167 #endif
168
169 private:
170 // Does the actual work for deoptimizing a single frame
171 static void deoptimize_single_frame(JavaThread* thread, frame fr, DeoptReason reason);
172
173 #if COMPILER2_OR_JVMCI
174 // Deoptimize objects, that is reallocate and relock them, just before they escape through JVMTI.
175 // The given vframes cover one physical frame.
176 static bool deoptimize_objects_internal(JavaThread* thread, GrowableArray<compiledVFrame*>* chunk, bool& realloc_failures);
177
178 public:
179
180 // Support for restoring non-escaping objects
181 static bool realloc_objects(JavaThread* thread, frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, TRAPS);
182 static void reassign_type_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, typeArrayOop obj, BasicType type);
183 static void reassign_object_array_elements(frame* fr, RegisterMap* reg_map, ObjectValue* sv, objArrayOop obj);
184 static void reassign_fields(frame* fr, RegisterMap* reg_map, GrowableArray<ScopeValue*>* objects, bool realloc_failures, bool skip_internal);
185 static bool relock_objects(JavaThread* thread, GrowableArray<MonitorInfo*>* monitors,
186 JavaThread* deoptee_thread, frame& fr, int exec_mode, bool realloc_failures);
187 static void pop_frames_failed_reallocs(JavaThread* thread, vframeArray* array);
188 NOT_PRODUCT(static void print_objects(GrowableArray<ScopeValue*>* objects, bool realloc_failures);)
189 #endif // COMPILER2_OR_JVMCI
190
191 public:
192 static vframeArray* create_vframeArray(JavaThread* thread, frame fr, RegisterMap *reg_map, GrowableArray<compiledVFrame*>* chunk, bool realloc_failures);
193
194 // Interface used for unpacking deoptimized frames
195
196 // UnrollBlock is returned by fetch_unroll_info() to the deoptimization handler (blob).
197 // This is only a CheapObj to ease debugging after a deopt failure
198 class UnrollBlock : public CHeapObj<mtCompiler> {
199 friend class VMStructs;
200 friend class JVMCIVMStructs;
201 private:
202 int _size_of_deoptimized_frame; // Size, in bytes, of current deoptimized frame
203 int _caller_adjustment; // Adjustment, in bytes, to caller's SP by initial interpreted frame
204 int _number_of_frames; // Number frames to unroll
205 int _total_frame_sizes; // Total of number*sizes frames
206 intptr_t* _frame_sizes; // Array of frame sizes, in bytes, for unrolling the stack
457 Method* compiled_method,
458 //outputs:
459 uint& ret_this_trap_count,
460 bool& ret_maybe_prior_trap,
461 bool& ret_maybe_prior_recompile);
462 // class loading support for uncommon trap
463 static void load_class_by_index(const constantPoolHandle& constant_pool, int index, TRAPS);
464 static void load_class_by_index(const constantPoolHandle& constant_pool, int index);
465
466 static UnrollBlock* fetch_unroll_info_helper(JavaThread* thread, int exec_mode);
467
468 static DeoptAction _unloaded_action; // == Action_reinterpret;
469 static const char* _trap_reason_name[];
470 static const char* _trap_action_name[];
471
472 static juint _deoptimization_hist[Reason_LIMIT][1+Action_LIMIT][BC_CASE_LIMIT];
473 // Note: Histogram array size is 1-2 Kb.
474
475 public:
476 static void update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason);
477 };
478
479 // EscapeBarriers should be put on execution paths, where JVMTI agents can access object
480 // references held by java threads.
481 // They provide means to revert optimizations based on escape analysis in a well synchronized manner
482 // just before local references escape through JVMTI.
483 class EscapeBarrier : StackObj {
484 #if COMPILER2_OR_JVMCI
485 JavaThread* const _calling_thread;
486 JavaThread* const _deoptee_thread;
487 bool const _barrier_active;
488
489 static bool _deoptimizing_objects_for_all_threads;
490 static bool _self_deoptimization_in_progress;
491
492 void sync_and_suspend_one();
493 void sync_and_suspend_all();
494 void resume_one();
495 void resume_all();
496
497 // Deoptimize the given frame and deoptimize objects with optimizations based on escape analysis.
498 bool deoptimize_objects_internal(JavaThread* deoptee, intptr_t* fr_id);
499
500 public:
501 // Revert ea based optimizations for given deoptee thread
502 EscapeBarrier(JavaThread* calling_thread, JavaThread* deoptee_thread, bool barrier_active)
503 : _calling_thread(calling_thread), _deoptee_thread(deoptee_thread),
504 _barrier_active(barrier_active && (JVMCI_ONLY(UseJVMCICompiler) NOT_JVMCI(false)
505 COMPILER2_PRESENT(|| DoEscapeAnalysis)))
506 {
507 if (_barrier_active) sync_and_suspend_one();
508 }
509
510 // Revert ea based optimizations for all java threads
511 EscapeBarrier(JavaThread* calling_thread, bool barrier_active)
512 : _calling_thread(calling_thread), _deoptee_thread(NULL),
513 _barrier_active(barrier_active && (JVMCI_ONLY(UseJVMCICompiler) NOT_JVMCI(false)
514 COMPILER2_PRESENT(|| DoEscapeAnalysis)))
515 {
516 if (_barrier_active) sync_and_suspend_all();
517 }
518 #else
519 public:
520 EscapeBarrier(JavaThread* calling_thread, JavaThread* deoptee_thread, bool barrier_active) { }
521 EscapeBarrier(JavaThread* calling_thread, bool barrier_active) { }
522 static bool deoptimizing_objects_for_all_threads() { return false; }
523 #endif // COMPILER2_OR_JVMCI
524
525 // Deoptimize objects, i.e. reallocate and relock them. The target frames are deoptimized.
526 // The methods return false iff at least one reallocation failed.
527 bool deoptimize_objects(intptr_t* fr_id) {
528 return true COMPILER2_OR_JVMCI_PRESENT(&& deoptimize_objects_internal(deoptee_thread(), fr_id));
529 }
530 bool deoptimize_objects(int depth) NOT_COMPILER2_OR_JVMCI_RETURN_(true);
531 // Find and deoptimize non escaping objects and the holding frames on all stacks.
532 bool deoptimize_objects_all_threads() NOT_COMPILER2_OR_JVMCI_RETURN_(true);
533
534 // A java thread was added to the list of threads
535 static void thread_added(JavaThread* jt) NOT_COMPILER2_OR_JVMCI_RETURN;
536 // A java thread was removed from the list of threads
537 static void thread_removed(JavaThread* jt) NOT_COMPILER2_OR_JVMCI_RETURN;
538
539 #if COMPILER2_OR_JVMCI
540 // Returns true iff objects were reallocated and relocked because of access through JVMTI
541 static bool objs_are_deoptimized(JavaThread* thread, intptr_t* fr_id);
542
543 static bool deoptimizing_objects_for_all_threads() { return _deoptimizing_objects_for_all_threads; }
544
545 ~EscapeBarrier() {
546 if (!barrier_active()) return;
547 if (all_threads()) {
548 resume_all();
549 } else {
550 resume_one();
551 }
552 }
553
554
555 bool all_threads() const { return _deoptee_thread == NULL; } // Should revert optimizations for all threads.
556 bool self_deopt() const { return _calling_thread == _deoptee_thread; } // Current thread deoptimizes its own objects.
557 bool barrier_active() const { return _barrier_active; } // Inactive barriers are created if no local objects can escape.
558
559 // accessors
560 JavaThread* calling_thread() const { return _calling_thread; }
561 JavaThread* deoptee_thread() const { return _deoptee_thread; }
562 #endif // COMPILER2_OR_JVMCI
563 };
564
565 class DeoptimizationMarker : StackObj { // for profiling
566 static bool _is_active;
567 public:
568 DeoptimizationMarker() { _is_active = true; }
569 ~DeoptimizationMarker() { _is_active = false; }
570 static bool is_active() { return _is_active; }
571 };
572
573 #endif // SHARE_RUNTIME_DEOPTIMIZATION_HPP
|