< prev index next >

src/os_cpu/linux_x86/vm/linux_x86_32.s

Print this page




 597         subl     $4,%esi
 598         subl     $1,%ecx
 599         jnz      2b
 600         addl     %esi,%edi
 601         jmp      4f
 602 3:      rep;     smovl
 603 4:      andl     $1,%eax
 604         je       6f
 605         addl     $2,%esi
 606         addl     $2,%edi
 607 5:      movw     (%esi),%dx
 608         movw     %dx,(%edi)
 609 6:      cld
 610         popl     %edi
 611         popl     %esi
 612         ret
 613 
 614 
 615         # Support for jlong Atomic::cmpxchg(jlong exchange_value,
 616         #                                   volatile jlong* dest,
 617         #                                   jlong compare_value,
 618         #                                   bool is_MP)
 619         #
 620         .p2align 4,,15
 621         .type    _Atomic_cmpxchg_long,@function
 622 _Atomic_cmpxchg_long:
 623                                    #  8(%esp) : return PC
 624         pushl    %ebx              #  4(%esp) : old %ebx
 625         pushl    %edi              #  0(%esp) : old %edi
 626         movl     12(%esp), %ebx    # 12(%esp) : exchange_value (low)
 627         movl     16(%esp), %ecx    # 16(%esp) : exchange_value (high)
 628         movl     24(%esp), %eax    # 24(%esp) : compare_value (low)
 629         movl     28(%esp), %edx    # 28(%esp) : compare_value (high)
 630         movl     20(%esp), %edi    # 20(%esp) : dest
 631         cmpl     $0, 32(%esp)      # 32(%esp) : is_MP
 632         je       1f
 633         lock
 634 1:      cmpxchg8b (%edi)
 635         popl     %edi
 636         popl     %ebx
 637         ret
 638 
 639 
 640         # Support for jlong Atomic::load and Atomic::store.
 641         # void _Atomic_move_long(volatile jlong* src, volatile jlong* dst)
 642         .p2align 4,,15
 643         .type    _Atomic_move_long,@function
 644 _Atomic_move_long:
 645         movl     4(%esp), %eax   # src
 646         fildll    (%eax)
 647         movl     8(%esp), %eax   # dest
 648         fistpll   (%eax)
 649         ret
 650 


 597         subl     $4,%esi
 598         subl     $1,%ecx
 599         jnz      2b
 600         addl     %esi,%edi
 601         jmp      4f
 602 3:      rep;     smovl
 603 4:      andl     $1,%eax
 604         je       6f
 605         addl     $2,%esi
 606         addl     $2,%edi
 607 5:      movw     (%esi),%dx
 608         movw     %dx,(%edi)
 609 6:      cld
 610         popl     %edi
 611         popl     %esi
 612         ret
 613 
 614 
 615         # Support for jlong Atomic::cmpxchg(jlong exchange_value,
 616         #                                   volatile jlong* dest,
 617         #                                   jlong compare_value)

 618         #
 619         .p2align 4,,15
 620         .type    _Atomic_cmpxchg_long,@function
 621 _Atomic_cmpxchg_long:
 622                                    #  8(%esp) : return PC
 623         pushl    %ebx              #  4(%esp) : old %ebx
 624         pushl    %edi              #  0(%esp) : old %edi
 625         movl     12(%esp), %ebx    # 12(%esp) : exchange_value (low)
 626         movl     16(%esp), %ecx    # 16(%esp) : exchange_value (high)
 627         movl     24(%esp), %eax    # 24(%esp) : compare_value (low)
 628         movl     28(%esp), %edx    # 28(%esp) : compare_value (high)
 629         movl     20(%esp), %edi    # 20(%esp) : dest
 630         lock cmpxchg8b (%edi)



 631         popl     %edi
 632         popl     %ebx
 633         ret
 634 
 635 
 636         # Support for jlong Atomic::load and Atomic::store.
 637         # void _Atomic_move_long(volatile jlong* src, volatile jlong* dst)
 638         .p2align 4,,15
 639         .type    _Atomic_move_long,@function
 640 _Atomic_move_long:
 641         movl     4(%esp), %eax   # src
 642         fildll    (%eax)
 643         movl     8(%esp), %eax   # dest
 644         fistpll   (%eax)
 645         ret
 646 
< prev index next >