< prev index next >

src/hotspot/cpu/x86/stubGenerator_x86_64.cpp

Print this page




 594   //    c_rarg2: compare_value
 595   //
 596   // Result:
 597   //    if ( compare_value == *dest ) {
 598   //       *dest = exchange_value
 599   //       return compare_value;
 600   //    else
 601   //       return *dest;
 602   address generate_atomic_cmpxchg() {
 603     StubCodeMark mark(this, "StubRoutines", "atomic_cmpxchg");
 604     address start = __ pc();
 605 
 606     __ movl(rax, c_rarg2);
 607    if ( os::is_MP() ) __ lock();
 608     __ cmpxchgl(c_rarg0, Address(c_rarg1, 0));
 609     __ ret(0);
 610 
 611     return start;
 612   }
 613 
 614   // Support for jbyte atomic::atomic_cmpxchg(jbyte exchange_value, volatile jbyte* dest,
 615   //                                          jbyte compare_value)
 616   //
 617   // Arguments :
 618   //    c_rarg0: exchange_value
 619   //    c_rarg1: dest
 620   //    c_rarg2: compare_value
 621   //
 622   // Result:
 623   //    if ( compare_value == *dest ) {
 624   //       *dest = exchange_value
 625   //       return compare_value;
 626   //    else
 627   //       return *dest;
 628   address generate_atomic_cmpxchg_byte() {
 629     StubCodeMark mark(this, "StubRoutines", "atomic_cmpxchg_byte");
 630     address start = __ pc();
 631 
 632     __ movsbq(rax, c_rarg2);
 633    if ( os::is_MP() ) __ lock();
 634     __ cmpxchgb(c_rarg0, Address(c_rarg1, 0));
 635     __ ret(0);
 636 
 637     return start;
 638   }
 639 
 640   // Support for jlong atomic::atomic_cmpxchg(jlong exchange_value,
 641   //                                          volatile jlong* dest,
 642   //                                          jlong compare_value)
 643   // Arguments :
 644   //    c_rarg0: exchange_value
 645   //    c_rarg1: dest
 646   //    c_rarg2: compare_value
 647   //
 648   // Result:
 649   //    if ( compare_value == *dest ) {
 650   //       *dest = exchange_value
 651   //       return compare_value;
 652   //    else
 653   //       return *dest;
 654   address generate_atomic_cmpxchg_long() {
 655     StubCodeMark mark(this, "StubRoutines", "atomic_cmpxchg_long");
 656     address start = __ pc();
 657 
 658     __ movq(rax, c_rarg2);
 659    if ( os::is_MP() ) __ lock();
 660     __ cmpxchgq(c_rarg0, Address(c_rarg1, 0));
 661     __ ret(0);
 662 


 677     address start = __ pc();
 678 
 679     __ movl(rax, c_rarg0);
 680    if ( os::is_MP() ) __ lock();
 681     __ xaddl(Address(c_rarg1, 0), c_rarg0);
 682     __ addl(rax, c_rarg0);
 683     __ ret(0);
 684 
 685     return start;
 686   }
 687 
 688   // Support for intptr_t atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest)
 689   //
 690   // Arguments :
 691   //    c_rarg0: add_value
 692   //    c_rarg1: dest
 693   //
 694   // Result:
 695   //    *dest += add_value
 696   //    return *dest;
 697   address generate_atomic_add_ptr() {
 698     StubCodeMark mark(this, "StubRoutines", "atomic_add_ptr");
 699     address start = __ pc();
 700 
 701     __ movptr(rax, c_rarg0); // Copy to eax we need a return value anyhow
 702    if ( os::is_MP() ) __ lock();
 703     __ xaddptr(Address(c_rarg1, 0), c_rarg0);
 704     __ addptr(rax, c_rarg0);
 705     __ ret(0);
 706 
 707     return start;
 708   }
 709 
 710   // Support for intptr_t OrderAccess::fence()
 711   //
 712   // Arguments :
 713   //
 714   // Result:
 715   address generate_orderaccess_fence() {
 716     StubCodeMark mark(this, "StubRoutines", "orderaccess_fence");
 717     address start = __ pc();
 718     __ membar(Assembler::StoreLoad);


5004     // that could be shared among different platforms - however the
5005     // benefit seems to be smaller than the disadvantage of having a
5006     // much more complicated generator structure. See also comment in
5007     // stubRoutines.hpp.
5008 
5009     StubRoutines::_forward_exception_entry = generate_forward_exception();
5010 
5011     StubRoutines::_call_stub_entry =
5012       generate_call_stub(StubRoutines::_call_stub_return_address);
5013 
5014     // is referenced by megamorphic call
5015     StubRoutines::_catch_exception_entry = generate_catch_exception();
5016 
5017     // atomic calls
5018     StubRoutines::_atomic_xchg_entry         = generate_atomic_xchg();
5019     StubRoutines::_atomic_xchg_long_entry    = generate_atomic_xchg_long();
5020     StubRoutines::_atomic_cmpxchg_entry      = generate_atomic_cmpxchg();
5021     StubRoutines::_atomic_cmpxchg_byte_entry = generate_atomic_cmpxchg_byte();
5022     StubRoutines::_atomic_cmpxchg_long_entry = generate_atomic_cmpxchg_long();
5023     StubRoutines::_atomic_add_entry          = generate_atomic_add();
5024     StubRoutines::_atomic_add_ptr_entry      = generate_atomic_add_ptr();
5025     StubRoutines::_fence_entry               = generate_orderaccess_fence();
5026 
5027     // platform dependent
5028     StubRoutines::x86::_get_previous_fp_entry = generate_get_previous_fp();
5029     StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
5030 
5031     StubRoutines::x86::_verify_mxcsr_entry    = generate_verify_mxcsr();
5032 
5033     // Build this early so it's available for the interpreter.
5034     StubRoutines::_throw_StackOverflowError_entry =
5035       generate_throw_exception("StackOverflowError throw_exception",
5036                                CAST_FROM_FN_PTR(address,
5037                                                 SharedRuntime::
5038                                                 throw_StackOverflowError));
5039     StubRoutines::_throw_delayed_StackOverflowError_entry =
5040       generate_throw_exception("delayed StackOverflowError throw_exception",
5041                                CAST_FROM_FN_PTR(address,
5042                                                 SharedRuntime::
5043                                                 throw_delayed_StackOverflowError));
5044     if (UseCRC32Intrinsics) {




 594   //    c_rarg2: compare_value
 595   //
 596   // Result:
 597   //    if ( compare_value == *dest ) {
 598   //       *dest = exchange_value
 599   //       return compare_value;
 600   //    else
 601   //       return *dest;
 602   address generate_atomic_cmpxchg() {
 603     StubCodeMark mark(this, "StubRoutines", "atomic_cmpxchg");
 604     address start = __ pc();
 605 
 606     __ movl(rax, c_rarg2);
 607    if ( os::is_MP() ) __ lock();
 608     __ cmpxchgl(c_rarg0, Address(c_rarg1, 0));
 609     __ ret(0);
 610 
 611     return start;
 612   }
 613 
 614   // Support for int8_t atomic::atomic_cmpxchg(int8_t exchange_value, volatile int8_t* dest,
 615   //                                           int8_t compare_value)
 616   //
 617   // Arguments :
 618   //    c_rarg0: exchange_value
 619   //    c_rarg1: dest
 620   //    c_rarg2: compare_value
 621   //
 622   // Result:
 623   //    if ( compare_value == *dest ) {
 624   //       *dest = exchange_value
 625   //       return compare_value;
 626   //    else
 627   //       return *dest;
 628   address generate_atomic_cmpxchg_byte() {
 629     StubCodeMark mark(this, "StubRoutines", "atomic_cmpxchg_byte");
 630     address start = __ pc();
 631 
 632     __ movsbq(rax, c_rarg2);
 633    if ( os::is_MP() ) __ lock();
 634     __ cmpxchgb(c_rarg0, Address(c_rarg1, 0));
 635     __ ret(0);
 636 
 637     return start;
 638   }
 639 
 640   // Support for int64_t atomic::atomic_cmpxchg(int64_t exchange_value,
 641   //                                            volatile int64_t* dest,
 642   //                                            int64_t compare_value)
 643   // Arguments :
 644   //    c_rarg0: exchange_value
 645   //    c_rarg1: dest
 646   //    c_rarg2: compare_value
 647   //
 648   // Result:
 649   //    if ( compare_value == *dest ) {
 650   //       *dest = exchange_value
 651   //       return compare_value;
 652   //    else
 653   //       return *dest;
 654   address generate_atomic_cmpxchg_long() {
 655     StubCodeMark mark(this, "StubRoutines", "atomic_cmpxchg_long");
 656     address start = __ pc();
 657 
 658     __ movq(rax, c_rarg2);
 659    if ( os::is_MP() ) __ lock();
 660     __ cmpxchgq(c_rarg0, Address(c_rarg1, 0));
 661     __ ret(0);
 662 


 677     address start = __ pc();
 678 
 679     __ movl(rax, c_rarg0);
 680    if ( os::is_MP() ) __ lock();
 681     __ xaddl(Address(c_rarg1, 0), c_rarg0);
 682     __ addl(rax, c_rarg0);
 683     __ ret(0);
 684 
 685     return start;
 686   }
 687 
 688   // Support for intptr_t atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest)
 689   //
 690   // Arguments :
 691   //    c_rarg0: add_value
 692   //    c_rarg1: dest
 693   //
 694   // Result:
 695   //    *dest += add_value
 696   //    return *dest;
 697   address generate_atomic_add_long() {
 698     StubCodeMark mark(this, "StubRoutines", "atomic_add_long");
 699     address start = __ pc();
 700 
 701     __ movptr(rax, c_rarg0); // Copy to eax we need a return value anyhow
 702    if ( os::is_MP() ) __ lock();
 703     __ xaddptr(Address(c_rarg1, 0), c_rarg0);
 704     __ addptr(rax, c_rarg0);
 705     __ ret(0);
 706 
 707     return start;
 708   }
 709 
 710   // Support for intptr_t OrderAccess::fence()
 711   //
 712   // Arguments :
 713   //
 714   // Result:
 715   address generate_orderaccess_fence() {
 716     StubCodeMark mark(this, "StubRoutines", "orderaccess_fence");
 717     address start = __ pc();
 718     __ membar(Assembler::StoreLoad);


5004     // that could be shared among different platforms - however the
5005     // benefit seems to be smaller than the disadvantage of having a
5006     // much more complicated generator structure. See also comment in
5007     // stubRoutines.hpp.
5008 
5009     StubRoutines::_forward_exception_entry = generate_forward_exception();
5010 
5011     StubRoutines::_call_stub_entry =
5012       generate_call_stub(StubRoutines::_call_stub_return_address);
5013 
5014     // is referenced by megamorphic call
5015     StubRoutines::_catch_exception_entry = generate_catch_exception();
5016 
5017     // atomic calls
5018     StubRoutines::_atomic_xchg_entry          = generate_atomic_xchg();
5019     StubRoutines::_atomic_xchg_long_entry     = generate_atomic_xchg_long();
5020     StubRoutines::_atomic_cmpxchg_entry       = generate_atomic_cmpxchg();
5021     StubRoutines::_atomic_cmpxchg_byte_entry  = generate_atomic_cmpxchg_byte();
5022     StubRoutines::_atomic_cmpxchg_long_entry  = generate_atomic_cmpxchg_long();
5023     StubRoutines::_atomic_add_entry           = generate_atomic_add();
5024     StubRoutines::_atomic_add_long_entry      = generate_atomic_add_long();
5025     StubRoutines::_fence_entry                = generate_orderaccess_fence();
5026 
5027     // platform dependent
5028     StubRoutines::x86::_get_previous_fp_entry = generate_get_previous_fp();
5029     StubRoutines::x86::_get_previous_sp_entry = generate_get_previous_sp();
5030 
5031     StubRoutines::x86::_verify_mxcsr_entry    = generate_verify_mxcsr();
5032 
5033     // Build this early so it's available for the interpreter.
5034     StubRoutines::_throw_StackOverflowError_entry =
5035       generate_throw_exception("StackOverflowError throw_exception",
5036                                CAST_FROM_FN_PTR(address,
5037                                                 SharedRuntime::
5038                                                 throw_StackOverflowError));
5039     StubRoutines::_throw_delayed_StackOverflowError_entry =
5040       generate_throw_exception("delayed StackOverflowError throw_exception",
5041                                CAST_FROM_FN_PTR(address,
5042                                                 SharedRuntime::
5043                                                 throw_delayed_StackOverflowError));
5044     if (UseCRC32Intrinsics) {


< prev index next >