< prev index next >
src/hotspot/share/runtime/deoptimization.cpp
Print this page
rev 60137 : 8227745: Enable Escape Analysis for Better Performance in the Presence of JVMTI Agents
Reviewed-by: mdoerr, goetz
rev 60138 : 8227745: delta webrev.5 -> webrev.6
*** 283,305 ****
JavaThread* deoptee_thread = chunk->at(0)->thread();
CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
RegisterMap map(chunk->at(0)->register_map());
bool deoptimized_objects = false;
! // Reallocate the non-escaping objects and restore their fields. Then
! // relock objects if synchronization on them was eliminated.
! if (DoEscapeAnalysis && EliminateAllocations) {
realloc_failures = eliminate_allocations(thread, Unpack_none, cm, deoptee, map, chunk, deoptimized_objects);
}
// Revoke biases of objects with eliminated locks in the given frame.
Deoptimization::revoke_for_object_deoptimization(deoptee_thread, deoptee, &map, thread);
// MonitorInfo structures used in eliminate_locks are not GC safe.
NoSafepointVerifier no_safepoint;
! if ((DoEscapeAnalysis || EliminateNestedLocks) && EliminateLocks) {
eliminate_locks(thread, chunk, realloc_failures, deoptee, Unpack_none, deoptimized_objects);
}
return deoptimized_objects;
}
#endif // COMPILER2_OR_JVMCI
--- 283,307 ----
JavaThread* deoptee_thread = chunk->at(0)->thread();
CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
RegisterMap map(chunk->at(0)->register_map());
bool deoptimized_objects = false;
! bool const jvmci_enabled = JVMCI_ONLY(UseJVMCICompiler) NOT_JVMCI(false);
!
! // Reallocate the non-escaping objects and restore their fields.
! if (jvmci_enabled COMPILER2_PRESENT(|| (DoEscapeAnalysis && EliminateAllocations))) {
realloc_failures = eliminate_allocations(thread, Unpack_none, cm, deoptee, map, chunk, deoptimized_objects);
}
// Revoke biases of objects with eliminated locks in the given frame.
Deoptimization::revoke_for_object_deoptimization(deoptee_thread, deoptee, &map, thread);
// MonitorInfo structures used in eliminate_locks are not GC safe.
NoSafepointVerifier no_safepoint;
! // Now relock objects if synchronization on them was eliminated.
! if (jvmci_enabled COMPILER2_PRESENT(|| ((DoEscapeAnalysis || EliminateNestedLocks) && EliminateLocks))) {
eliminate_locks(thread, chunk, realloc_failures, deoptee, Unpack_none, deoptimized_objects);
}
return deoptimized_objects;
}
#endif // COMPILER2_OR_JVMCI
*** 1637,1650 ****
int len = objects_to_revoke->length();
for (int i = 0; i < len; i++) {
oop obj = (objects_to_revoke->at(i))();
markWord mark = obj->mark();
! if (!mark.has_bias_pattern()
! || mark.is_biased_anonymously() // eliminated locking does not bias an object if it wasn't before
! || !obj->klass()->prototype_header().has_bias_pattern() // bulk revoke ignores eliminated monitors
! || (obj->klass()->prototype_header().bias_epoch() != mark.bias_epoch())) { // bulk rebias ignores eliminated monitors
// We reach here regularly if there's just eliminated locking on obj.
// We must not call BiasedLocking::revoke_own_lock() in this case, as we would hit assertions, because it is a
// prerequisite that there has to be non-eliminated locking on obj by deoptee_thread.
// Luckily we don't have to revoke here, because obj has to be a non-escaping obj and can be relocked without
// revoking the bias. See Deoptimization::relock_objects().
--- 1639,1652 ----
int len = objects_to_revoke->length();
for (int i = 0; i < len; i++) {
oop obj = (objects_to_revoke->at(i))();
markWord mark = obj->mark();
! if (!mark.has_bias_pattern() ||
! mark.is_biased_anonymously() || // eliminated locking does not bias an object if it wasn't before
! !obj->klass()->prototype_header().has_bias_pattern() || // bulk revoke ignores eliminated monitors
! (obj->klass()->prototype_header().bias_epoch() != mark.bias_epoch())) { // bulk rebias ignores eliminated monitors
// We reach here regularly if there's just eliminated locking on obj.
// We must not call BiasedLocking::revoke_own_lock() in this case, as we would hit assertions, because it is a
// prerequisite that there has to be non-eliminated locking on obj by deoptee_thread.
// Luckily we don't have to revoke here, because obj has to be a non-escaping obj and can be relocked without
// revoking the bias. See Deoptimization::relock_objects().
*** 2705,2751 ****
#undef PRINT_STAT_LINE
if (xtty != NULL) xtty->tail("statistics");
}
}
- #ifdef ASSERT
- // Revert optimizations based on escape analysis.
- void Deoptimization::deoptimize_objects_alot_loop() {
- JavaThread* ct = JavaThread::current();
- HandleMark hm(ct);
- if (DeoptimizeObjectsALotThreadCount == 1) {
- // Revert everything at once
- while (!ct->is_terminated()) {
- { // Begin new scope for escape barrier
- HandleMarkCleaner hmc(ct);
- ResourceMark rm(ct);
- EscapeBarrier eb(ct, true);
- eb.deoptimize_objects_all_threads();
- }
- // Now sleep after the escape barriers destructor resumed the java threads.
- ct->sleep(DeoptimizeObjectsALotInterval);
- }
- } else {
- // Revert everything for one deoptee_thread which gets selected round robin
- JavaThread* deoptee_thread = NULL;
- while (!ct->is_terminated()) {
- { // Begin new scope for escape barrier
- HandleMarkCleaner hmc(ct);
- ResourceMark rm(ct);
- ThreadsListHandle tlh;
- int idx = (1 + tlh.list()->find_index_of_JavaThread(deoptee_thread)) % tlh.list()->length();
- deoptee_thread = tlh.list()->thread_at(idx);
- EscapeBarrier eb(ct, deoptee_thread, true);
- eb.deoptimize_objects(100);
- }
- // Now sleep after the escape barriers destructor resumed deoptee_thread.
- ct->sleep(DeoptimizeObjectsALotInterval);
- }
- }
- }
- #endif // !ASSERT
-
// Returns true iff objects were reallocated and relocked because of access through JVMTI
bool EscapeBarrier::objs_are_deoptimized(JavaThread* thread, intptr_t* fr_id) {
// first/oldest update holds the flag
GrowableArray<jvmtiDeferredLocalVariableSet*>* list = JvmtiDeferredUpdates::deferred_locals(thread);
bool result = false;
--- 2707,2716 ----
*** 2758,2771 ****
}
}
return result;
}
! // Deoptimize frames with non escaping objects. Deoptimize objects with optimizations based on
! // escape analysis. Do it for all frames within the given depth and continue from there until the
! // entry frame is reached, because thread local objects passed as arguments might escape from callee
! // frames within the given depth.
bool EscapeBarrier::deoptimize_objects(int depth) {
if (barrier_active() && deoptee_thread()->has_last_Java_frame()) {
ResourceMark rm(calling_thread());
HandleMark hm;
RegisterMap reg_map(deoptee_thread());
--- 2723,2740 ----
}
}
return result;
}
! // Object references of frames up to the given depth are about to be accessed. Frames with
! // optimizations based on escape state that is potentially changed by the accesses need to be
! // deoptimized and the referenced objects need to be reallocated and relocked.
! // Up to depth this is done for frames with not escaping objects in scope. For deeper frames it is
! // done only, if they pass not escaping objects as arguments, because they potentially escape from
! // callee frames within the given depth.
! // The search for deeper frames is ended if an entry frame is found, because arguments to
! // native methods are considered to escape globally.
bool EscapeBarrier::deoptimize_objects(int depth) {
if (barrier_active() && deoptee_thread()->has_last_Java_frame()) {
ResourceMark rm(calling_thread());
HandleMark hm;
RegisterMap reg_map(deoptee_thread());
*** 2779,2795 ****
bool should_deopt = cur_depth <= depth ? cvf->not_global_escape_in_scope() : cvf->arg_escape();
if (should_deopt && !deoptimize_objects(cvf->fr().id())) {
// reallocation of scalar replaced objects failed, because heap is exhausted
return false;
}
- }
! // move to next physical frame
while(!vf->is_top()) {
cur_depth++;
vf = vf->sender();
}
cur_depth++;
vf = vf->sender();
}
}
return true;
--- 2748,2766 ----
bool should_deopt = cur_depth <= depth ? cvf->not_global_escape_in_scope() : cvf->arg_escape();
if (should_deopt && !deoptimize_objects(cvf->fr().id())) {
// reallocation of scalar replaced objects failed, because heap is exhausted
return false;
}
! // move to top frame
while(!vf->is_top()) {
cur_depth++;
vf = vf->sender();
}
+ }
+
+ // move to next physical frame
cur_depth++;
vf = vf->sender();
}
}
return true;
*** 2810,2824 ****
compiledVFrame* cvf = compiledVFrame::cast(vf);
if ((cvf->not_global_escape_in_scope() || cvf->arg_escape()) &&
!deoptimize_objects_internal(jt, cvf->fr().id())) {
return false; // reallocation failure
}
! }
! // move to next physical frame
while(!vf->is_top()) {
vf = vf->sender();
}
vf = vf->sender();
}
}
}
return true; // success
--- 2781,2796 ----
compiledVFrame* cvf = compiledVFrame::cast(vf);
if ((cvf->not_global_escape_in_scope() || cvf->arg_escape()) &&
!deoptimize_objects_internal(jt, cvf->fr().id())) {
return false; // reallocation failure
}
! // move to top frame
while(!vf->is_top()) {
vf = vf->sender();
}
+ }
+ // move to next physical frame
vf = vf->sender();
}
}
}
return true; // success
*** 2831,2841 ****
JavaThread* _excluded_thread;
public:
EscapeBarrierSuspendHandshake(JavaThread* excluded_thread, const char* name) : HandshakeClosure(name), _excluded_thread(excluded_thread) { }
void do_thread(Thread* th) {
if (th->is_Java_thread() && !th->is_hidden_from_external_view() && (th != _excluded_thread)) {
! th->set_ea_obj_deopt_flag();
}
}
};
void EscapeBarrier::sync_and_suspend_one() {
--- 2803,2813 ----
JavaThread* _excluded_thread;
public:
EscapeBarrierSuspendHandshake(JavaThread* excluded_thread, const char* name) : HandshakeClosure(name), _excluded_thread(excluded_thread) { }
void do_thread(Thread* th) {
if (th->is_Java_thread() && !th->is_hidden_from_external_view() && (th != _excluded_thread)) {
! th->set_obj_deopt_flag();
}
}
};
void EscapeBarrier::sync_and_suspend_one() {
*** 2846,2874 ****
// Sync with other threads that might be doing deoptimizations
{
// Need to switch to _thread_blocked for the wait() call
ThreadBlockInVM tbivm(_calling_thread);
MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
! while (_self_deoptimization_in_progress || _deoptee_thread->is_ea_obj_deopt_suspend()) {
ml.wait();
}
if (self_deopt()) {
_self_deoptimization_in_progress = true;
return;
}
// set suspend flag for target thread
! _deoptee_thread->set_ea_obj_deopt_flag();
}
// suspend target thread
- uint32_t debug_bits = 0;
- if (!_deoptee_thread->is_thread_fully_suspended(false, &debug_bits)) {
EscapeBarrierSuspendHandshake sh(NULL, "EscapeBarrierSuspendOne");
! Handshake::execute(&sh, _deoptee_thread);
! }
assert(!_deoptee_thread->has_last_Java_frame() || _deoptee_thread->frame_anchor()->walkable(),
"stack should be walkable now");
}
void EscapeBarrier::sync_and_suspend_all() {
--- 2818,2843 ----
// Sync with other threads that might be doing deoptimizations
{
// Need to switch to _thread_blocked for the wait() call
ThreadBlockInVM tbivm(_calling_thread);
MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
! while (_self_deoptimization_in_progress || _deoptee_thread->is_obj_deopt_suspend()) {
ml.wait();
}
if (self_deopt()) {
_self_deoptimization_in_progress = true;
return;
}
// set suspend flag for target thread
! _deoptee_thread->set_obj_deopt_flag();
}
// suspend target thread
EscapeBarrierSuspendHandshake sh(NULL, "EscapeBarrierSuspendOne");
! Handshake::execute_direct(&sh, _deoptee_thread);
assert(!_deoptee_thread->has_last_Java_frame() || _deoptee_thread->frame_anchor()->walkable(),
"stack should be walkable now");
}
void EscapeBarrier::sync_and_suspend_all() {
*** 2884,2894 ****
bool deopt_in_progress;
do {
deopt_in_progress = _self_deoptimization_in_progress;
for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
! deopt_in_progress = (deopt_in_progress || jt->is_ea_obj_deopt_suspend());
if (deopt_in_progress) {
break;
}
}
if (deopt_in_progress) {
--- 2853,2863 ----
bool deopt_in_progress;
do {
deopt_in_progress = _self_deoptimization_in_progress;
for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
! deopt_in_progress = (deopt_in_progress || jt->is_obj_deopt_suspend());
if (deopt_in_progress) {
break;
}
}
if (deopt_in_progress) {
*** 2918,2928 ****
MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
if (self_deopt()) {
assert(_self_deoptimization_in_progress, "incorrect synchronization");
_self_deoptimization_in_progress = false;
} else {
! _deoptee_thread->clear_ea_obj_deopt_flag();
}
ml.notify_all();
}
void EscapeBarrier::resume_all() {
--- 2887,2897 ----
MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
if (self_deopt()) {
assert(_self_deoptimization_in_progress, "incorrect synchronization");
_self_deoptimization_in_progress = false;
} else {
! _deoptee_thread->clear_obj_deopt_flag();
}
ml.notify_all();
}
void EscapeBarrier::resume_all() {
*** 2931,2953 ****
MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
assert(_self_deoptimization_in_progress, "incorrect synchronization");
_deoptimizing_objects_for_all_threads = false;
_self_deoptimization_in_progress = false;
for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
! jt->clear_ea_obj_deopt_flag();
}
ml.notify_all();
}
void EscapeBarrier::thread_added(JavaThread* jt) {
if (!jt->is_hidden_from_external_view()) {
MutexLocker ml(EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
if (_deoptimizing_objects_for_all_threads) {
! jt->set_ea_obj_deopt_flag();
! } else {
! jt->clear_ea_obj_deopt_flag();
}
}
}
// Remember that objects were reallocated and relocked for the compiled frame with the given id
static void set_objs_are_deoptimized(JavaThread* thread, intptr_t* fr_id) {
--- 2900,2930 ----
MonitorLocker ml(_calling_thread, EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
assert(_self_deoptimization_in_progress, "incorrect synchronization");
_deoptimizing_objects_for_all_threads = false;
_self_deoptimization_in_progress = false;
for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
! jt->clear_obj_deopt_flag();
}
ml.notify_all();
}
void EscapeBarrier::thread_added(JavaThread* jt) {
if (!jt->is_hidden_from_external_view()) {
MutexLocker ml(EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
if (_deoptimizing_objects_for_all_threads) {
! jt->set_obj_deopt_flag();
! }
}
+ }
+
+ void EscapeBarrier::thread_removed(JavaThread* jt) {
+ MonitorLocker ml(EscapeBarrier_lock, Mutex::_no_safepoint_check_flag);
+ if (jt->is_obj_deopt_suspend()) {
+ // jt terminated before it self suspended.
+ // Other threads might be waiting to perform deoptimizations for it.
+ jt->clear_obj_deopt_flag();
+ ml.notify_all();
}
}
// Remember that objects were reallocated and relocked for the compiled frame with the given id
static void set_objs_are_deoptimized(JavaThread* thread, intptr_t* fr_id) {
< prev index next >