< prev index next >

src/hotspot/cpu/x86/macroAssembler_x86.hpp

Print this page
rev 48494 : 8195112: x86 (32 bit): implementation for Thread-local handshakes
Reviewed-by:


 639   void bang_stack_with_offset(int offset) {
 640     // stack grows down, caller passes positive offset
 641     assert(offset > 0, "must bang with negative offset");
 642     movl(Address(rsp, (-offset)), rax);
 643   }
 644 
 645   // Writes to stack successive pages until offset reached to check for
 646   // stack overflow + shadow pages.  Also, clobbers tmp
 647   void bang_stack_size(Register size, Register tmp);
 648 
 649   // Check for reserved stack access in method being exited (for JIT)
 650   void reserved_stack_check();
 651 
 652   virtual RegisterOrConstant delayed_value_impl(intptr_t* delayed_value_addr,
 653                                                 Register tmp,
 654                                                 int offset);
 655 
 656   // Support for serializing memory accesses between threads
 657   void serialize_memory(Register thread, Register tmp);
 658 
 659 #ifdef _LP64
 660   void safepoint_poll(Label& slow_path, Register thread_reg, Register temp_reg);
 661 #else
 662   void safepoint_poll(Label& slow_path);
 663 #endif
 664 
 665   void verify_tlab();
 666 
 667   // Biased locking support
 668   // lock_reg and obj_reg must be loaded up with the appropriate values.
 669   // swap_reg must be rax, and is killed.
 670   // tmp_reg is optional. If it is supplied (i.e., != noreg) it will
 671   // be killed; if not supplied, push/pop will be used internally to
 672   // allocate a temporary (inefficient, avoid if possible).
 673   // Optional slow case is for implementations (interpreter and C1) which branch to
 674   // slow case directly. Leaves condition codes set for C2's Fast_Lock node.
 675   // Returns offset of first potentially-faulting instruction for null
 676   // check info (currently consumed only by C1). If
 677   // swap_reg_contains_mark is true then returns -1 as it is assumed
 678   // the calling code has already passed any potential faults.
 679   int biased_locking_enter(Register lock_reg, Register obj_reg,
 680                            Register swap_reg, Register tmp_reg,
 681                            bool swap_reg_contains_mark,
 682                            Label& done, Label* slow_case = NULL,
 683                            BiasedLockingCounters* counters = NULL);




 639   void bang_stack_with_offset(int offset) {
 640     // stack grows down, caller passes positive offset
 641     assert(offset > 0, "must bang with negative offset");
 642     movl(Address(rsp, (-offset)), rax);
 643   }
 644 
 645   // Writes to stack successive pages until offset reached to check for
 646   // stack overflow + shadow pages.  Also, clobbers tmp
 647   void bang_stack_size(Register size, Register tmp);
 648 
 649   // Check for reserved stack access in method being exited (for JIT)
 650   void reserved_stack_check();
 651 
 652   virtual RegisterOrConstant delayed_value_impl(intptr_t* delayed_value_addr,
 653                                                 Register tmp,
 654                                                 int offset);
 655 
 656   // Support for serializing memory accesses between threads
 657   void serialize_memory(Register thread, Register tmp);
 658 

 659   void safepoint_poll(Label& slow_path, Register thread_reg, Register temp_reg);



 660 
 661   void verify_tlab();
 662 
 663   // Biased locking support
 664   // lock_reg and obj_reg must be loaded up with the appropriate values.
 665   // swap_reg must be rax, and is killed.
 666   // tmp_reg is optional. If it is supplied (i.e., != noreg) it will
 667   // be killed; if not supplied, push/pop will be used internally to
 668   // allocate a temporary (inefficient, avoid if possible).
 669   // Optional slow case is for implementations (interpreter and C1) which branch to
 670   // slow case directly. Leaves condition codes set for C2's Fast_Lock node.
 671   // Returns offset of first potentially-faulting instruction for null
 672   // check info (currently consumed only by C1). If
 673   // swap_reg_contains_mark is true then returns -1 as it is assumed
 674   // the calling code has already passed any potential faults.
 675   int biased_locking_enter(Register lock_reg, Register obj_reg,
 676                            Register swap_reg, Register tmp_reg,
 677                            bool swap_reg_contains_mark,
 678                            Label& done, Label* slow_case = NULL,
 679                            BiasedLockingCounters* counters = NULL);


< prev index next >