< prev index next >

src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp

Print this page
rev 48167 : 8193266: AArch64: TestOptionsWithRanges.java SIGSEGV


  67   // If no java_thread register is specified (noreg) than rthread will be used instead. call_VM_base
  68   // returns the register which contains the thread upon return. If a thread register has been
  69   // specified, the return value will correspond to that register. If no last_java_sp is specified
  70   // (noreg) than rsp will be used instead.
  71   virtual void call_VM_base(           // returns the register containing the thread upon return
  72     Register oop_result,               // where an oop-result ends up if any; use noreg otherwise
  73     Register java_thread,              // the thread if computed before     ; use noreg otherwise
  74     Register last_java_sp,             // to set up last_Java_frame in stubs; use noreg otherwise
  75     address  entry_point,              // the entry point
  76     int      number_of_arguments,      // the number of arguments (w/o thread) to pop after the call
  77     bool     check_exceptions          // whether to check for pending exceptions after return
  78   );
  79 
  80   void call_VM_helper(Register oop_result, address entry_point, int number_of_arguments, bool check_exceptions = true);
  81 
  82   // Maximum size of class area in Metaspace when compressed
  83   uint64_t use_XOR_for_compressed_class_base;
  84 
  85  public:
  86   MacroAssembler(CodeBuffer* code) : Assembler(code) {



  87     use_XOR_for_compressed_class_base
  88       = (operand_valid_for_logical_immediate(false /*is32*/,
  89                                              (uint64_t)Universe::narrow_klass_base())
  90          && ((uint64_t)Universe::narrow_klass_base()
  91              > (1u << log2_intptr(CompressedClassSpaceSize))));
  92   }
  93 
  94  // These routines should emit JVMTI PopFrame and ForceEarlyReturn handling code.
  95  // The implementation is only non-empty for the InterpreterMacroAssembler,
  96  // as only the interpreter handles PopFrame and ForceEarlyReturn requests.
  97  virtual void check_and_handle_popframe(Register java_thread);
  98  virtual void check_and_handle_earlyret(Register java_thread);
  99 
 100   void safepoint_poll(Label& slow_path);
 101   void safepoint_poll_acquire(Label& slow_path);
 102 
 103   // Biased locking support
 104   // lock_reg and obj_reg must be loaded up with the appropriate values.
 105   // swap_reg is killed.
 106   // tmp_reg must be supplied and must not be rscratch1 or rscratch2
 107   // Optional slow case is for implementations (interpreter and C1) which branch to
 108   // slow case directly. Leaves condition codes set for C2's Fast_Lock node.
 109   // Returns offset of first potentially-faulting instruction for null
 110   // check info (currently consumed only by C1). If
 111   // swap_reg_contains_mark is true then returns -1 as it is assumed




  67   // If no java_thread register is specified (noreg) than rthread will be used instead. call_VM_base
  68   // returns the register which contains the thread upon return. If a thread register has been
  69   // specified, the return value will correspond to that register. If no last_java_sp is specified
  70   // (noreg) than rsp will be used instead.
  71   virtual void call_VM_base(           // returns the register containing the thread upon return
  72     Register oop_result,               // where an oop-result ends up if any; use noreg otherwise
  73     Register java_thread,              // the thread if computed before     ; use noreg otherwise
  74     Register last_java_sp,             // to set up last_Java_frame in stubs; use noreg otherwise
  75     address  entry_point,              // the entry point
  76     int      number_of_arguments,      // the number of arguments (w/o thread) to pop after the call
  77     bool     check_exceptions          // whether to check for pending exceptions after return
  78   );
  79 
  80   void call_VM_helper(Register oop_result, address entry_point, int number_of_arguments, bool check_exceptions = true);
  81 
  82   // Maximum size of class area in Metaspace when compressed
  83   uint64_t use_XOR_for_compressed_class_base;
  84 
  85  public:
  86   MacroAssembler(CodeBuffer* code) : Assembler(code) {
  87     /* SRDM This is not friendly */
  88     const uint64_t UnscaledClassSpaceMax = (uint64_t(max_juint) + 1);
  89 
  90     use_XOR_for_compressed_class_base
  91       = (operand_valid_for_logical_immediate(false /*is32*/,
  92                                              (uint64_t)Universe::narrow_klass_base())
  93          && ((uint64_t)Universe::narrow_klass_base()
  94              > (1ul << log2_intptr(UnscaledClassSpaceMax))));
  95   }
  96 
  97  // These routines should emit JVMTI PopFrame and ForceEarlyReturn handling code.
  98  // The implementation is only non-empty for the InterpreterMacroAssembler,
  99  // as only the interpreter handles PopFrame and ForceEarlyReturn requests.
 100  virtual void check_and_handle_popframe(Register java_thread);
 101  virtual void check_and_handle_earlyret(Register java_thread);
 102 
 103   void safepoint_poll(Label& slow_path);
 104   void safepoint_poll_acquire(Label& slow_path);
 105 
 106   // Biased locking support
 107   // lock_reg and obj_reg must be loaded up with the appropriate values.
 108   // swap_reg is killed.
 109   // tmp_reg must be supplied and must not be rscratch1 or rscratch2
 110   // Optional slow case is for implementations (interpreter and C1) which branch to
 111   // slow case directly. Leaves condition codes set for C2's Fast_Lock node.
 112   // Returns offset of first potentially-faulting instruction for null
 113   // check info (currently consumed only by C1). If
 114   // swap_reg_contains_mark is true then returns -1 as it is assumed


< prev index next >