494 // entries run from -1..x where &monitor[x] ==
495
496 {
497 // Must not attempt to lock method until we enter interpreter as gc won't be able to find the
498 // initial frame. However we allocate a free monitor so we don't have to shuffle the expression stack
499 // immediately.
500
501 // synchronize method
502 const Address access_flags (rbx, methodOopDesc::access_flags_offset());
503 const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
504 Label not_synced;
505
506 __ movl(rax, access_flags);
507 __ testl(rax, JVM_ACC_SYNCHRONIZED);
508 __ jcc(Assembler::zero, not_synced);
509
510 // Allocate initial monitor and pre initialize it
511 // get synchronization object
512
513 Label done;
514 const int mirror_offset = klassOopDesc::klass_part_offset_in_bytes() + Klass::java_mirror_offset_in_bytes();
515 __ movl(rax, access_flags);
516 __ testl(rax, JVM_ACC_STATIC);
517 __ movptr(rax, Address(locals, 0)); // get receiver (assume this is frequent case)
518 __ jcc(Assembler::zero, done);
519 __ movptr(rax, Address(rbx, methodOopDesc::constants_offset()));
520 __ movptr(rax, Address(rax, constantPoolOopDesc::pool_holder_offset_in_bytes()));
521 __ movptr(rax, Address(rax, mirror_offset));
522 __ bind(done);
523 // add space for monitor & lock
524 __ subptr(rsp, entry_size); // add space for a monitor entry
525 __ movptr(Address(rsp, BasicObjectLock::obj_offset_in_bytes()), rax); // store object
526 __ bind(not_synced);
527 }
528
529 __ movptr(STATE(_stack_base), rsp); // set expression stack base ( == &monitors[-count])
530 if (native) {
531 __ movptr(STATE(_stack), rsp); // set current expression stack tos
532 __ movptr(STATE(_stack_limit), rsp);
533 } else {
534 __ subptr(rsp, wordSize); // pre-push stack
746 const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
747 const Address access_flags (rbx, methodOopDesc::access_flags_offset());
748
749 const Register monitor = NOT_LP64(rdx) LP64_ONLY(c_rarg1);
750
751 // find initial monitor i.e. monitors[-1]
752 __ movptr(monitor, STATE(_monitor_base)); // get monitor bottom limit
753 __ subptr(monitor, entry_size); // point to initial monitor
754
755 #ifdef ASSERT
756 { Label L;
757 __ movl(rax, access_flags);
758 __ testl(rax, JVM_ACC_SYNCHRONIZED);
759 __ jcc(Assembler::notZero, L);
760 __ stop("method doesn't need synchronization");
761 __ bind(L);
762 }
763 #endif // ASSERT
764 // get synchronization object
765 { Label done;
766 const int mirror_offset = klassOopDesc::klass_part_offset_in_bytes() + Klass::java_mirror_offset_in_bytes();
767 __ movl(rax, access_flags);
768 __ movptr(rdi, STATE(_locals)); // prepare to get receiver (assume common case)
769 __ testl(rax, JVM_ACC_STATIC);
770 __ movptr(rax, Address(rdi, 0)); // get receiver (assume this is frequent case)
771 __ jcc(Assembler::zero, done);
772 __ movptr(rax, Address(rbx, methodOopDesc::constants_offset()));
773 __ movptr(rax, Address(rax, constantPoolOopDesc::pool_holder_offset_in_bytes()));
774 __ movptr(rax, Address(rax, mirror_offset));
775 __ bind(done);
776 }
777 #ifdef ASSERT
778 { Label L;
779 __ cmpptr(rax, Address(monitor, BasicObjectLock::obj_offset_in_bytes())); // correct object?
780 __ jcc(Assembler::equal, L);
781 __ stop("wrong synchronization lobject");
782 __ bind(L);
783 }
784 #endif // ASSERT
785 // can destroy rax, rdx|c_rarg1, rcx, and (via call_VM) rdi!
786 __ lock_object(monitor);
1163
1164 // result handler is in rax
1165 // set result handler
1166 __ movptr(STATE(_result_handler), rax);
1167
1168
1169 // get native function entry point
1170 { Label L;
1171 __ movptr(rax, Address(method, methodOopDesc::native_function_offset()));
1172 __ testptr(rax, rax);
1173 __ jcc(Assembler::notZero, L);
1174 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), method);
1175 __ movptr(method, STATE(_method));
1176 __ verify_oop(method);
1177 __ movptr(rax, Address(method, methodOopDesc::native_function_offset()));
1178 __ bind(L);
1179 }
1180
1181 // pass mirror handle if static call
1182 { Label L;
1183 const int mirror_offset = klassOopDesc::klass_part_offset_in_bytes() + Klass::java_mirror_offset_in_bytes();
1184 __ movl(t, Address(method, methodOopDesc::access_flags_offset()));
1185 __ testl(t, JVM_ACC_STATIC);
1186 __ jcc(Assembler::zero, L);
1187 // get mirror
1188 __ movptr(t, Address(method, methodOopDesc:: constants_offset()));
1189 __ movptr(t, Address(t, constantPoolOopDesc::pool_holder_offset_in_bytes()));
1190 __ movptr(t, Address(t, mirror_offset));
1191 // copy mirror into activation object
1192 __ movptr(STATE(_oop_temp), t);
1193 // pass handle to mirror
1194 #ifdef _LP64
1195 __ lea(c_rarg1, STATE(_oop_temp));
1196 #else
1197 __ lea(t, STATE(_oop_temp));
1198 __ movptr(Address(rsp, wordSize), t);
1199 #endif // _LP64
1200 __ bind(L);
1201 }
1202 #ifdef ASSERT
1203 {
|
494 // entries run from -1..x where &monitor[x] ==
495
496 {
497 // Must not attempt to lock method until we enter interpreter as gc won't be able to find the
498 // initial frame. However we allocate a free monitor so we don't have to shuffle the expression stack
499 // immediately.
500
501 // synchronize method
502 const Address access_flags (rbx, methodOopDesc::access_flags_offset());
503 const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
504 Label not_synced;
505
506 __ movl(rax, access_flags);
507 __ testl(rax, JVM_ACC_SYNCHRONIZED);
508 __ jcc(Assembler::zero, not_synced);
509
510 // Allocate initial monitor and pre initialize it
511 // get synchronization object
512
513 Label done;
514 const int mirror_offset = in_bytes(Klass::java_mirror_offset());
515 __ movl(rax, access_flags);
516 __ testl(rax, JVM_ACC_STATIC);
517 __ movptr(rax, Address(locals, 0)); // get receiver (assume this is frequent case)
518 __ jcc(Assembler::zero, done);
519 __ movptr(rax, Address(rbx, methodOopDesc::constants_offset()));
520 __ movptr(rax, Address(rax, constantPoolOopDesc::pool_holder_offset_in_bytes()));
521 __ movptr(rax, Address(rax, mirror_offset));
522 __ bind(done);
523 // add space for monitor & lock
524 __ subptr(rsp, entry_size); // add space for a monitor entry
525 __ movptr(Address(rsp, BasicObjectLock::obj_offset_in_bytes()), rax); // store object
526 __ bind(not_synced);
527 }
528
529 __ movptr(STATE(_stack_base), rsp); // set expression stack base ( == &monitors[-count])
530 if (native) {
531 __ movptr(STATE(_stack), rsp); // set current expression stack tos
532 __ movptr(STATE(_stack_limit), rsp);
533 } else {
534 __ subptr(rsp, wordSize); // pre-push stack
746 const int entry_size = frame::interpreter_frame_monitor_size() * wordSize;
747 const Address access_flags (rbx, methodOopDesc::access_flags_offset());
748
749 const Register monitor = NOT_LP64(rdx) LP64_ONLY(c_rarg1);
750
751 // find initial monitor i.e. monitors[-1]
752 __ movptr(monitor, STATE(_monitor_base)); // get monitor bottom limit
753 __ subptr(monitor, entry_size); // point to initial monitor
754
755 #ifdef ASSERT
756 { Label L;
757 __ movl(rax, access_flags);
758 __ testl(rax, JVM_ACC_SYNCHRONIZED);
759 __ jcc(Assembler::notZero, L);
760 __ stop("method doesn't need synchronization");
761 __ bind(L);
762 }
763 #endif // ASSERT
764 // get synchronization object
765 { Label done;
766 const int mirror_offset = in_bytes(Klass::java_mirror_offset());
767 __ movl(rax, access_flags);
768 __ movptr(rdi, STATE(_locals)); // prepare to get receiver (assume common case)
769 __ testl(rax, JVM_ACC_STATIC);
770 __ movptr(rax, Address(rdi, 0)); // get receiver (assume this is frequent case)
771 __ jcc(Assembler::zero, done);
772 __ movptr(rax, Address(rbx, methodOopDesc::constants_offset()));
773 __ movptr(rax, Address(rax, constantPoolOopDesc::pool_holder_offset_in_bytes()));
774 __ movptr(rax, Address(rax, mirror_offset));
775 __ bind(done);
776 }
777 #ifdef ASSERT
778 { Label L;
779 __ cmpptr(rax, Address(monitor, BasicObjectLock::obj_offset_in_bytes())); // correct object?
780 __ jcc(Assembler::equal, L);
781 __ stop("wrong synchronization lobject");
782 __ bind(L);
783 }
784 #endif // ASSERT
785 // can destroy rax, rdx|c_rarg1, rcx, and (via call_VM) rdi!
786 __ lock_object(monitor);
1163
1164 // result handler is in rax
1165 // set result handler
1166 __ movptr(STATE(_result_handler), rax);
1167
1168
1169 // get native function entry point
1170 { Label L;
1171 __ movptr(rax, Address(method, methodOopDesc::native_function_offset()));
1172 __ testptr(rax, rax);
1173 __ jcc(Assembler::notZero, L);
1174 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::prepare_native_call), method);
1175 __ movptr(method, STATE(_method));
1176 __ verify_oop(method);
1177 __ movptr(rax, Address(method, methodOopDesc::native_function_offset()));
1178 __ bind(L);
1179 }
1180
1181 // pass mirror handle if static call
1182 { Label L;
1183 const int mirror_offset = in_bytes(Klass::java_mirror_offset());
1184 __ movl(t, Address(method, methodOopDesc::access_flags_offset()));
1185 __ testl(t, JVM_ACC_STATIC);
1186 __ jcc(Assembler::zero, L);
1187 // get mirror
1188 __ movptr(t, Address(method, methodOopDesc:: constants_offset()));
1189 __ movptr(t, Address(t, constantPoolOopDesc::pool_holder_offset_in_bytes()));
1190 __ movptr(t, Address(t, mirror_offset));
1191 // copy mirror into activation object
1192 __ movptr(STATE(_oop_temp), t);
1193 // pass handle to mirror
1194 #ifdef _LP64
1195 __ lea(c_rarg1, STATE(_oop_temp));
1196 #else
1197 __ lea(t, STATE(_oop_temp));
1198 __ movptr(Address(rsp, wordSize), t);
1199 #endif // _LP64
1200 __ bind(L);
1201 }
1202 #ifdef ASSERT
1203 {
|