< prev index next >

src/hotspot/cpu/aarch64/templateInterpreterGenerator_aarch64.cpp

Print this page




1044       __ ldrw(off, Address(esp, wordSize)); // offset
1045       __ add(buf, buf, off); // + offset
1046       __ ldrw(crc,   Address(esp, 3*wordSize)); // Initial CRC
1047     }
1048     // Can now load 'len' since we're finished with 'off'
1049     __ ldrw(len, Address(esp, 0x0)); // Length
1050 
1051     __ andr(sp, r13, -16); // Restore the caller's SP
1052 
1053     // We are frameless so we can just jump to the stub.
1054     __ b(CAST_FROM_FN_PTR(address, StubRoutines::updateBytesCRC32()));
1055 
1056     // generate a vanilla native entry as the slow path
1057     __ bind(slow_path);
1058     __ jump_to_entry(Interpreter::entry_for_kind(Interpreter::native));
1059     return entry;
1060   }
1061   return NULL;
1062 }
1063 
1064 // Not supported






1065 address TemplateInterpreterGenerator::generate_CRC32C_updateBytes_entry(AbstractInterpreter::MethodKind kind) {






























1066   return NULL;
1067 }
1068 
1069 void TemplateInterpreterGenerator::bang_stack_shadow_pages(bool native_call) {
1070   // Bang each page in the shadow zone. We can't assume it's been done for
1071   // an interpreter frame with greater than a page of locals, so each page
1072   // needs to be checked.  Only true for non-native.
1073   if (UseStackBanging) {
1074     const int n_shadow_pages = JavaThread::stack_shadow_zone_size() / os::vm_page_size();
1075     const int start_page = native_call ? n_shadow_pages : 1;
1076     const int page_size = os::vm_page_size();
1077     for (int pages = start_page; pages <= n_shadow_pages ; pages++) {
1078       __ sub(rscratch2, sp, pages*page_size);
1079       __ str(zr, Address(rscratch2));
1080     }
1081   }
1082 }
1083 
1084 
1085 // Interpreter stub for calling a native method. (asm interpreter)




1044       __ ldrw(off, Address(esp, wordSize)); // offset
1045       __ add(buf, buf, off); // + offset
1046       __ ldrw(crc,   Address(esp, 3*wordSize)); // Initial CRC
1047     }
1048     // Can now load 'len' since we're finished with 'off'
1049     __ ldrw(len, Address(esp, 0x0)); // Length
1050 
1051     __ andr(sp, r13, -16); // Restore the caller's SP
1052 
1053     // We are frameless so we can just jump to the stub.
1054     __ b(CAST_FROM_FN_PTR(address, StubRoutines::updateBytesCRC32()));
1055 
1056     // generate a vanilla native entry as the slow path
1057     __ bind(slow_path);
1058     __ jump_to_entry(Interpreter::entry_for_kind(Interpreter::native));
1059     return entry;
1060   }
1061   return NULL;
1062 }
1063 
1064 /**
1065  * Method entry for intrinsic-candidate (non-native) methods:
1066  *   int java.util.zip.CRC32C.updateBytes(int crc, byte[] b, int off, int end)
1067  *   int java.util.zip.CRC32C.updateDirectByteBuffer(int crc, long buf, int off, int end)
1068  * Unlike CRC32, CRC32C does not have any methods marked as native
1069  * CRC32C also uses an "end" variable instead of the length variable CRC32 uses
1070  */
1071 address TemplateInterpreterGenerator::generate_CRC32C_updateBytes_entry(AbstractInterpreter::MethodKind kind) {
1072   if (UseCRC32Intrinsics) {
1073     address entry = __ pc();
1074 
1075     // Prepare jump to stub using parameters from the stack
1076     const Register crc = c_rarg0; // initial crc
1077     const Register buf = c_rarg1; // source java byte array address
1078     const Register len = c_rarg2; // len argument to the kernel
1079 
1080     const Register end = len; // index of last element to process
1081     const Register off = crc; // offset
1082 
1083     __ ldrw(end, Address(esp)); // int end
1084     __ ldrw(off, Address(esp, wordSize)); // int offset
1085     __ sub(len, end, off);
1086     __ ldr(buf, Address(esp, 2*wordSize)); // byte[] buf | long buf
1087     __ add(buf, buf, off); // + offset
1088     if (kind == Interpreter::java_util_zip_CRC32C_updateDirectByteBuffer) {
1089       __ ldrw(crc, Address(esp, 4*wordSize)); // long crc
1090     } else {
1091       __ add(buf, buf, arrayOopDesc::base_offset_in_bytes(T_BYTE)); // + header size
1092       __ ldrw(crc, Address(esp, 3*wordSize)); // long crc
1093     }
1094 
1095     __ andr(sp, r13, -16); // Restore the caller's SP
1096 
1097     // Jump to the stub.
1098     __ b(CAST_FROM_FN_PTR(address, StubRoutines::updateBytesCRC32C()));
1099 
1100     return entry;
1101   }
1102   return NULL;
1103 }
1104 
1105 void TemplateInterpreterGenerator::bang_stack_shadow_pages(bool native_call) {
1106   // Bang each page in the shadow zone. We can't assume it's been done for
1107   // an interpreter frame with greater than a page of locals, so each page
1108   // needs to be checked.  Only true for non-native.
1109   if (UseStackBanging) {
1110     const int n_shadow_pages = JavaThread::stack_shadow_zone_size() / os::vm_page_size();
1111     const int start_page = native_call ? n_shadow_pages : 1;
1112     const int page_size = os::vm_page_size();
1113     for (int pages = start_page; pages <= n_shadow_pages ; pages++) {
1114       __ sub(rscratch2, sp, pages*page_size);
1115       __ str(zr, Address(rscratch2));
1116     }
1117   }
1118 }
1119 
1120 
1121 // Interpreter stub for calling a native method. (asm interpreter)


< prev index next >