< prev index next >

src/hotspot/cpu/aarch64/gc/shared/barrierSetAssembler_aarch64.cpp

Print this page
rev 50098 : 8202714: Create a MacroAssembler::access_load/store_at wrapper for AArch64
rev 50099 : imported patch JDK-8203157.patch
rev 50100 : 8203172: Primitive heap access for interpreter BarrierSetAssembler/aarch64


  39   switch (type) {
  40   case T_OBJECT:
  41   case T_ARRAY: {
  42     if (on_heap) {
  43       if (UseCompressedOops) {
  44         __ ldrw(dst, src);
  45         if (oop_not_null) {
  46           __ decode_heap_oop_not_null(dst);
  47         } else {
  48           __ decode_heap_oop(dst);
  49         }
  50       } else {
  51         __ ldr(dst, src);
  52       }
  53     } else {
  54       assert(on_root, "why else?");
  55       __ ldr(dst, src);
  56     }
  57     break;
  58   }









  59   default: Unimplemented();
  60   }
  61 }
  62 
  63 void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
  64                                    Address dst, Register val, Register tmp1, Register tmp2) {
  65   bool on_heap = (decorators & IN_HEAP) != 0;
  66   bool on_root = (decorators & IN_ROOT) != 0;
  67   switch (type) {
  68   case T_OBJECT:
  69   case T_ARRAY: {
  70     val = val == noreg ? zr : val;
  71     if (on_heap) {
  72       if (UseCompressedOops) {
  73         assert(!dst.uses(val), "not enough registers");
  74         if (val != zr) {
  75           __ encode_heap_oop(val);
  76         }
  77         __ strw(val, dst);
  78       } else {
  79         __ str(val, dst);
  80       }
  81     } else {
  82       assert(on_root, "why else?");
  83       __ str(val, dst);
  84     }
  85     break;
  86   }












  87   default: Unimplemented();
  88   }
  89 }
  90 
  91 void BarrierSetAssembler::obj_equals(MacroAssembler* masm, DecoratorSet decorators,
  92                                      Register obj1, Register obj2) {
  93   __ cmp(obj1, obj2);
  94 }
  95 
  96 void BarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register jni_env,
  97                                                         Register obj, Register tmp, Label& slowpath) {
  98   // If mask changes we need to ensure that the inverse is still encodable as an immediate
  99   STATIC_ASSERT(JNIHandles::weak_tag_mask == 1);
 100   __ andr(obj, obj, ~JNIHandles::weak_tag_mask);
 101   __ ldr(obj, Address(obj, 0));             // *obj
 102 }


  39   switch (type) {
  40   case T_OBJECT:
  41   case T_ARRAY: {
  42     if (on_heap) {
  43       if (UseCompressedOops) {
  44         __ ldrw(dst, src);
  45         if (oop_not_null) {
  46           __ decode_heap_oop_not_null(dst);
  47         } else {
  48           __ decode_heap_oop(dst);
  49         }
  50       } else {
  51         __ ldr(dst, src);
  52       }
  53     } else {
  54       assert(on_root, "why else?");
  55       __ ldr(dst, src);
  56     }
  57     break;
  58   }
  59   case T_BOOLEAN: __ load_unsigned_byte (dst, src); break;
  60   case T_BYTE:    __ load_signed_byte   (dst, src); break;
  61   case T_CHAR:    __ load_unsigned_short(dst, src); break;
  62   case T_SHORT:   __ load_signed_short  (dst, src); break;
  63   case T_INT:     __ ldrw               (dst, src); break;
  64   case T_LONG:    __ ldr                (dst, src); break;
  65   case T_ADDRESS: __ ldr                (dst, src); break;
  66   case T_FLOAT:   __ ldrs               (v0, src);  break;
  67   case T_DOUBLE:  __ ldrd               (v0, src);  break;
  68   default: Unimplemented();
  69   }
  70 }
  71 
  72 void BarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
  73                                    Address dst, Register val, Register tmp1, Register tmp2) {
  74   bool on_heap = (decorators & IN_HEAP) != 0;
  75   bool on_root = (decorators & IN_ROOT) != 0;
  76   switch (type) {
  77   case T_OBJECT:
  78   case T_ARRAY: {
  79     val = val == noreg ? zr : val;
  80     if (on_heap) {
  81       if (UseCompressedOops) {
  82         assert(!dst.uses(val), "not enough registers");
  83         if (val != zr) {
  84           __ encode_heap_oop(val);
  85         }
  86         __ strw(val, dst);
  87       } else {
  88         __ str(val, dst);
  89       }
  90     } else {
  91       assert(on_root, "why else?");
  92       __ str(val, dst);
  93     }
  94     break;
  95   }
  96   case T_BOOLEAN:
  97     __ andw(val, val, 0x1);  // boolean is true if LSB is 1
  98     __ strb(val, dst);
  99     break;
 100   case T_BYTE:    __ strb(val, dst); break;
 101   case T_CHAR:    __ strh(val, dst); break;
 102   case T_SHORT:   __ strh(val, dst); break;
 103   case T_INT:     __ strw(val, dst); break;
 104   case T_LONG:    __ str (val, dst); break;
 105   case T_ADDRESS: __ str (val, dst); break;
 106   case T_FLOAT:   __ strs(v0,  dst); break;
 107   case T_DOUBLE:  __ strd(v0,  dst); break;
 108   default: Unimplemented();
 109   }
 110 }
 111 
 112 void BarrierSetAssembler::obj_equals(MacroAssembler* masm, DecoratorSet decorators,
 113                                      Register obj1, Register obj2) {
 114   __ cmp(obj1, obj2);
 115 }
 116 
 117 void BarrierSetAssembler::try_resolve_jobject_in_native(MacroAssembler* masm, Register jni_env,
 118                                                         Register obj, Register tmp, Label& slowpath) {
 119   // If mask changes we need to ensure that the inverse is still encodable as an immediate
 120   STATIC_ASSERT(JNIHandles::weak_tag_mask == 1);
 121   __ andr(obj, obj, ~JNIHandles::weak_tag_mask);
 122   __ ldr(obj, Address(obj, 0));             // *obj
 123 }
< prev index next >