< prev index next >

src/hotspot/os_cpu/bsd_x86/bsd_x86_32.s

Print this page




 618         subl     $4,%esi
 619         subl     $1,%ecx
 620         jnz      2b
 621         addl     %esi,%edi
 622         jmp      4f
 623 3:      rep;     smovl
 624 4:      andl     $1,%eax
 625         je       6f
 626         addl     $2,%esi
 627         addl     $2,%edi
 628 5:      movw     (%esi),%dx
 629         movw     %dx,(%edi)
 630 6:      cld
 631         popl     %edi
 632         popl     %esi
 633         ret
 634 
 635 
 636         # Support for int64_t Atomic::cmpxchg(int64_t exchange_value,
 637         #                                     volatile int64_t* dest,
 638         #                                     int64_t compare_value,
 639         #                                     bool is_MP)
 640         #
 641         .p2align 4,,15
 642         ELF_TYPE(_Atomic_cmpxchg_long,@function)
 643 SYMBOL(_Atomic_cmpxchg_long):
 644                                    #  8(%esp) : return PC
 645         pushl    %ebx              #  4(%esp) : old %ebx
 646         pushl    %edi              #  0(%esp) : old %edi
 647         movl     12(%esp), %ebx    # 12(%esp) : exchange_value (low)
 648         movl     16(%esp), %ecx    # 16(%esp) : exchange_value (high)
 649         movl     24(%esp), %eax    # 24(%esp) : compare_value (low)
 650         movl     28(%esp), %edx    # 28(%esp) : compare_value (high)
 651         movl     20(%esp), %edi    # 20(%esp) : dest
 652         cmpl     $0, 32(%esp)      # 32(%esp) : is_MP
 653         je       1f
 654         lock
 655 1:      cmpxchg8b (%edi)
 656         popl     %edi
 657         popl     %ebx
 658         ret
 659 
 660 
 661         # Support for int64_t Atomic::load and Atomic::store.
 662         # void _Atomic_move_long(const volatile int64_t* src, volatile int64_t* dst)
 663         .p2align 4,,15
 664         ELF_TYPE(_Atomic_move_long,@function)
 665 SYMBOL(_Atomic_move_long):
 666         movl     4(%esp), %eax   # src
 667         fildll    (%eax)
 668         movl     8(%esp), %eax   # dest
 669         fistpll   (%eax)
 670         ret
 671 


 618         subl     $4,%esi
 619         subl     $1,%ecx
 620         jnz      2b
 621         addl     %esi,%edi
 622         jmp      4f
 623 3:      rep;     smovl
 624 4:      andl     $1,%eax
 625         je       6f
 626         addl     $2,%esi
 627         addl     $2,%edi
 628 5:      movw     (%esi),%dx
 629         movw     %dx,(%edi)
 630 6:      cld
 631         popl     %edi
 632         popl     %esi
 633         ret
 634 
 635 
 636         # Support for int64_t Atomic::cmpxchg(int64_t exchange_value,
 637         #                                     volatile int64_t* dest,
 638         #                                     int64_t compare_value)

 639         #
 640         .p2align 4,,15
 641         ELF_TYPE(_Atomic_cmpxchg_long,@function)
 642 SYMBOL(_Atomic_cmpxchg_long):
 643                                    #  8(%esp) : return PC
 644         pushl    %ebx              #  4(%esp) : old %ebx
 645         pushl    %edi              #  0(%esp) : old %edi
 646         movl     12(%esp), %ebx    # 12(%esp) : exchange_value (low)
 647         movl     16(%esp), %ecx    # 16(%esp) : exchange_value (high)
 648         movl     24(%esp), %eax    # 24(%esp) : compare_value (low)
 649         movl     28(%esp), %edx    # 28(%esp) : compare_value (high)
 650         movl     20(%esp), %edi    # 20(%esp) : dest


 651         lock
 652         cmpxchg8b (%edi)
 653         popl     %edi
 654         popl     %ebx
 655         ret
 656 
 657 
 658         # Support for int64_t Atomic::load and Atomic::store.
 659         # void _Atomic_move_long(const volatile int64_t* src, volatile int64_t* dst)
 660         .p2align 4,,15
 661         ELF_TYPE(_Atomic_move_long,@function)
 662 SYMBOL(_Atomic_move_long):
 663         movl     4(%esp), %eax   # src
 664         fildll    (%eax)
 665         movl     8(%esp), %eax   # dest
 666         fistpll   (%eax)
 667         ret
 668 
< prev index next >