378 __ pop(r12);
379 __ pop(r11);
380 __ pop(r10);
381 __ pop(r9);
382 __ pop(r8);
383 #endif
384 __ pop(rsi);
385 __ pop(rdi);
386 __ pop(rdx);
387 __ pop(rcx);
388
389 if (dst != rax) {
390 __ movptr(dst, rax);
391 __ pop(rax);
392 }
393
394 __ bind(done);
395 __ block_comment("load_reference_barrier_native { ");
396 }
397
398 #ifdef _LP64
399 void ShenandoahBarrierSetAssembler::c2i_entry_barrier(MacroAssembler* masm) {
400 // Use default version
401 BarrierSetAssembler::c2i_entry_barrier(masm);
402 }
403 #else
404 void ShenandoahBarrierSetAssembler::c2i_entry_barrier(MacroAssembler* masm) {
405 BarrierSetNMethod* bs = BarrierSet::barrier_set()->barrier_set_nmethod();
406 if (bs == NULL) {
407 return;
408 }
409
410 Label bad_call;
411 __ cmpptr(rbx, 0); // rbx contains the incoming method for c2i adapters.
412 __ jcc(Assembler::equal, bad_call);
413
414 Register tmp1 = rax;
415 Register tmp2 = rcx;
416
417 __ push(tmp1);
418 __ push(tmp2);
419
420 // Pointer chase to the method holder to find out if the method is concurrently unloading.
421 Label method_live;
422 __ load_method_holder_cld(tmp1, rbx);
423
424 // Is it a strong CLD?
425 __ cmpl(Address(tmp1, ClassLoaderData::keep_alive_offset()), 0);
426 __ jcc(Assembler::greater, method_live);
427
428 // Is it a weak but alive CLD?
429 __ movptr(tmp1, Address(tmp1, ClassLoaderData::holder_offset()));
430 __ resolve_weak_handle(tmp1, tmp2);
431 __ cmpptr(tmp1, 0);
432 __ jcc(Assembler::notEqual, method_live);
433 __ pop(tmp2);
434 __ pop(tmp1);
435
436 __ bind(bad_call);
437 __ jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub()));
438 __ bind(method_live);
439 __ pop(tmp2);
440 __ pop(tmp1);
441 }
442 #endif
443
444 void ShenandoahBarrierSetAssembler::storeval_barrier(MacroAssembler* masm, Register dst, Register tmp) {
445 if (ShenandoahStoreValEnqueueBarrier) {
446 storeval_barrier_impl(masm, dst, tmp);
447 }
448 }
449
450 void ShenandoahBarrierSetAssembler::storeval_barrier_impl(MacroAssembler* masm, Register dst, Register tmp) {
451 assert(ShenandoahStoreValEnqueueBarrier, "should be enabled");
452
453 if (dst == noreg) return;
454
455 if (ShenandoahStoreValEnqueueBarrier) {
456 // The set of registers to be saved+restored is the same as in the write-barrier above.
457 // Those are the commonly used registers in the interpreter.
458 __ pusha();
459 // __ push_callee_saved_registers();
460 __ subptr(rsp, 2 * Interpreter::stackElementSize);
461 __ movdbl(Address(rsp, 0), xmm0);
462
463 #ifdef _LP64
|
378 __ pop(r12);
379 __ pop(r11);
380 __ pop(r10);
381 __ pop(r9);
382 __ pop(r8);
383 #endif
384 __ pop(rsi);
385 __ pop(rdi);
386 __ pop(rdx);
387 __ pop(rcx);
388
389 if (dst != rax) {
390 __ movptr(dst, rax);
391 __ pop(rax);
392 }
393
394 __ bind(done);
395 __ block_comment("load_reference_barrier_native { ");
396 }
397
398 void ShenandoahBarrierSetAssembler::storeval_barrier(MacroAssembler* masm, Register dst, Register tmp) {
399 if (ShenandoahStoreValEnqueueBarrier) {
400 storeval_barrier_impl(masm, dst, tmp);
401 }
402 }
403
404 void ShenandoahBarrierSetAssembler::storeval_barrier_impl(MacroAssembler* masm, Register dst, Register tmp) {
405 assert(ShenandoahStoreValEnqueueBarrier, "should be enabled");
406
407 if (dst == noreg) return;
408
409 if (ShenandoahStoreValEnqueueBarrier) {
410 // The set of registers to be saved+restored is the same as in the write-barrier above.
411 // Those are the commonly used registers in the interpreter.
412 __ pusha();
413 // __ push_callee_saved_registers();
414 __ subptr(rsp, 2 * Interpreter::stackElementSize);
415 __ movdbl(Address(rsp, 0), xmm0);
416
417 #ifdef _LP64
|