937 nm->make_not_entrant();
938 }
939
940 Deoptimization::deoptimize_frame(thread, caller_frame.id());
941
942 // Return to the now deoptimized frame.
943 }
944
945 // Now copy code back
946
947 {
948 MutexLockerEx ml_patch (Patching_lock, Mutex::_no_safepoint_check_flag);
949 //
950 // Deoptimization may have happened while we waited for the lock.
951 // In that case we don't bother to do any patching we just return
952 // and let the deopt happen
953 if (!caller_is_deopted()) {
954 NativeGeneralJump* jump = nativeGeneralJump_at(caller_frame.pc());
955 address instr_pc = jump->jump_destination();
956 NativeInstruction* ni = nativeInstruction_at(instr_pc);
957 if (ni->is_jump() ) {
958 // the jump has not been patched yet
959 // The jump destination is slow case and therefore not part of the stubs
960 // (stubs are only for StaticCalls)
961
962 // format of buffer
963 // ....
964 // instr byte 0 <-- copy_buff
965 // instr byte 1
966 // ..
967 // instr byte n-1
968 // n
969 // .... <-- call destination
970
971 address stub_location = caller_frame.pc() + PatchingStub::patch_info_offset();
972 unsigned char* byte_count = (unsigned char*) (stub_location - 1);
973 unsigned char* byte_skip = (unsigned char*) (stub_location - 2);
974 unsigned char* being_initialized_entry_offset = (unsigned char*) (stub_location - 3);
975 address copy_buff = stub_location - *byte_skip - *byte_count;
976 address being_initialized_entry = stub_location - *being_initialized_entry_offset;
977 if (TracePatching) {
|
937 nm->make_not_entrant();
938 }
939
940 Deoptimization::deoptimize_frame(thread, caller_frame.id());
941
942 // Return to the now deoptimized frame.
943 }
944
945 // Now copy code back
946
947 {
948 MutexLockerEx ml_patch (Patching_lock, Mutex::_no_safepoint_check_flag);
949 //
950 // Deoptimization may have happened while we waited for the lock.
951 // In that case we don't bother to do any patching we just return
952 // and let the deopt happen
953 if (!caller_is_deopted()) {
954 NativeGeneralJump* jump = nativeGeneralJump_at(caller_frame.pc());
955 address instr_pc = jump->jump_destination();
956 NativeInstruction* ni = nativeInstruction_at(instr_pc);
957 if (NOT_AARCH32(ni->is_jump())
958 AARCH32_ONLY(!ni->is_patched_already())) {
959 // the jump has not been patched yet
960 // The jump destination is slow case and therefore not part of the stubs
961 // (stubs are only for StaticCalls)
962
963 // format of buffer
964 // ....
965 // instr byte 0 <-- copy_buff
966 // instr byte 1
967 // ..
968 // instr byte n-1
969 // n
970 // .... <-- call destination
971
972 address stub_location = caller_frame.pc() + PatchingStub::patch_info_offset();
973 unsigned char* byte_count = (unsigned char*) (stub_location - 1);
974 unsigned char* byte_skip = (unsigned char*) (stub_location - 2);
975 unsigned char* being_initialized_entry_offset = (unsigned char*) (stub_location - 3);
976 address copy_buff = stub_location - *byte_skip - *byte_count;
977 address being_initialized_entry = stub_location - *being_initialized_entry_offset;
978 if (TracePatching) {
|