193 // C++ does not guarantee jlong[] array alignment to 8 bytes.
194 // Use middle of array to check that memory before it is not modified.
195 address buffer = (address) round_to((intptr_t)&lbuffer[4], BytesPerLong);
196 address buffer2 = (address) round_to((intptr_t)&lbuffer2[4], BytesPerLong);
197 // do an aligned copy
198 ((arraycopy_fn)func)(buffer, buffer2, 0);
199 for (i = 0; i < sizeof(lbuffer); i++) {
200 assert(fbuffer[i] == v && fbuffer2[i] == v2, "shouldn't have copied anything");
201 }
202 // adjust destination alignment
203 ((arraycopy_fn)func)(buffer, buffer2 + alignment, 0);
204 for (i = 0; i < sizeof(lbuffer); i++) {
205 assert(fbuffer[i] == v && fbuffer2[i] == v2, "shouldn't have copied anything");
206 }
207 // adjust source alignment
208 ((arraycopy_fn)func)(buffer + alignment, buffer2, 0);
209 for (i = 0; i < sizeof(lbuffer); i++) {
210 assert(fbuffer[i] == v && fbuffer2[i] == v2, "shouldn't have copied anything");
211 }
212 }
213 #endif
214
215
216 void StubRoutines::initialize2() {
217 if (_code2 == NULL) {
218 ResourceMark rm;
219 TraceTime timer("StubRoutines generation 2", TraceStartupTime);
220 _code2 = BufferBlob::create("StubRoutines (2)", code_size2);
221 if (_code2 == NULL) {
222 vm_exit_out_of_memory(code_size2, OOM_MALLOC_ERROR, "CodeCache: no room for StubRoutines (2)");
223 }
224 CodeBuffer buffer(_code2);
225 StubGenerator_generate(&buffer, true);
226 }
227
228 #ifdef ASSERT
229
230 #define TEST_ARRAYCOPY(type) \
231 test_arraycopy_func( type##_arraycopy(), sizeof(type)); \
232 test_arraycopy_func( type##_disjoint_arraycopy(), sizeof(type)); \
233 test_arraycopy_func(arrayof_##type##_arraycopy(), sizeof(HeapWord)); \
234 test_arraycopy_func(arrayof_##type##_disjoint_arraycopy(), sizeof(HeapWord))
283
284 #define TEST_COPYRTN(type) \
285 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::conjoint_##type##s_atomic), sizeof(type)); \
286 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::arrayof_conjoint_##type##s), (int)MAX2(sizeof(HeapWord), sizeof(type)))
287
288 // Make sure all the copy runtime routines properly handle zero count
289 TEST_COPYRTN(jbyte);
290 TEST_COPYRTN(jshort);
291 TEST_COPYRTN(jint);
292 TEST_COPYRTN(jlong);
293
294 #undef TEST_COPYRTN
295
296 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::conjoint_words), sizeof(HeapWord));
297 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::disjoint_words), sizeof(HeapWord));
298 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::disjoint_words_atomic), sizeof(HeapWord));
299 // Aligned to BytesPerLong
300 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::aligned_conjoint_words), sizeof(jlong));
301 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::aligned_disjoint_words), sizeof(jlong));
302
303 #endif
304 }
305
306
307 void stubRoutines_init1() { StubRoutines::initialize1(); }
308 void stubRoutines_init2() { StubRoutines::initialize2(); }
309
310 //
311 // Default versions of arraycopy functions
312 //
313
314 static void gen_arraycopy_barrier_pre(oop* dest, size_t count, bool dest_uninitialized) {
315 assert(count != 0, "count should be non-zero");
316 assert(count <= (size_t)max_intx, "count too large");
317 BarrierSet* bs = Universe::heap()->barrier_set();
318 assert(bs->has_write_ref_array_pre_opt(), "Must have pre-barrier opt");
319 bs->write_ref_array_pre(dest, (int)count, dest_uninitialized);
320 }
321
322 static void gen_arraycopy_barrier(oop* dest, size_t count) {
|
193 // C++ does not guarantee jlong[] array alignment to 8 bytes.
194 // Use middle of array to check that memory before it is not modified.
195 address buffer = (address) round_to((intptr_t)&lbuffer[4], BytesPerLong);
196 address buffer2 = (address) round_to((intptr_t)&lbuffer2[4], BytesPerLong);
197 // do an aligned copy
198 ((arraycopy_fn)func)(buffer, buffer2, 0);
199 for (i = 0; i < sizeof(lbuffer); i++) {
200 assert(fbuffer[i] == v && fbuffer2[i] == v2, "shouldn't have copied anything");
201 }
202 // adjust destination alignment
203 ((arraycopy_fn)func)(buffer, buffer2 + alignment, 0);
204 for (i = 0; i < sizeof(lbuffer); i++) {
205 assert(fbuffer[i] == v && fbuffer2[i] == v2, "shouldn't have copied anything");
206 }
207 // adjust source alignment
208 ((arraycopy_fn)func)(buffer + alignment, buffer2, 0);
209 for (i = 0; i < sizeof(lbuffer); i++) {
210 assert(fbuffer[i] == v && fbuffer2[i] == v2, "shouldn't have copied anything");
211 }
212 }
213
214 // simple test for SafeFetch32
215 static void test_safefetch32() {
216 int dummy = 17;
217 int* const p_invalid = (int*) get_segfault_address();
218 int* const p_valid = &dummy;
219 int result_invalid = SafeFetch32(p_invalid, 0xABC);
220 assert(result_invalid == 0xABC, "SafeFetch32 error");
221 int result_valid = SafeFetch32(p_valid, 0xABC);
222 assert(result_valid == 17, "SafeFetch32 error");
223 }
224
225 // simple test for SafeFetchN
226 static void test_safefetchN() {
227 #ifdef _LP64
228 const intptr_t v1 = 0xABCD00000000ABCDULL;
229 const intptr_t v2 = 0xDEFD00000000DEFDULL;
230 #else
231 const intptr_t v1 = 0xABCDABCD;
232 const intptr_t v2 = 0xDEFDDEFD;
233 #endif
234 intptr_t dummy = v1;
235 intptr_t* const p_invalid = (intptr_t*) get_segfault_address();
236 intptr_t* const p_valid = &dummy;
237 intptr_t result_invalid = SafeFetchN(p_invalid, v2);
238 assert(result_invalid == v2, "SafeFetchN error");
239 intptr_t result_valid = SafeFetchN(p_valid, v2);
240 assert(result_valid == v1, "SafeFetchN error");
241 }
242 #endif
243
244 void StubRoutines::initialize2() {
245 if (_code2 == NULL) {
246 ResourceMark rm;
247 TraceTime timer("StubRoutines generation 2", TraceStartupTime);
248 _code2 = BufferBlob::create("StubRoutines (2)", code_size2);
249 if (_code2 == NULL) {
250 vm_exit_out_of_memory(code_size2, OOM_MALLOC_ERROR, "CodeCache: no room for StubRoutines (2)");
251 }
252 CodeBuffer buffer(_code2);
253 StubGenerator_generate(&buffer, true);
254 }
255
256 #ifdef ASSERT
257
258 #define TEST_ARRAYCOPY(type) \
259 test_arraycopy_func( type##_arraycopy(), sizeof(type)); \
260 test_arraycopy_func( type##_disjoint_arraycopy(), sizeof(type)); \
261 test_arraycopy_func(arrayof_##type##_arraycopy(), sizeof(HeapWord)); \
262 test_arraycopy_func(arrayof_##type##_disjoint_arraycopy(), sizeof(HeapWord))
311
312 #define TEST_COPYRTN(type) \
313 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::conjoint_##type##s_atomic), sizeof(type)); \
314 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::arrayof_conjoint_##type##s), (int)MAX2(sizeof(HeapWord), sizeof(type)))
315
316 // Make sure all the copy runtime routines properly handle zero count
317 TEST_COPYRTN(jbyte);
318 TEST_COPYRTN(jshort);
319 TEST_COPYRTN(jint);
320 TEST_COPYRTN(jlong);
321
322 #undef TEST_COPYRTN
323
324 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::conjoint_words), sizeof(HeapWord));
325 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::disjoint_words), sizeof(HeapWord));
326 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::disjoint_words_atomic), sizeof(HeapWord));
327 // Aligned to BytesPerLong
328 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::aligned_conjoint_words), sizeof(jlong));
329 test_arraycopy_func(CAST_FROM_FN_PTR(address, Copy::aligned_disjoint_words), sizeof(jlong));
330
331 // test safefetch routines
332 test_safefetch32();
333 test_safefetchN();
334
335 #endif
336 }
337
338
339 void stubRoutines_init1() { StubRoutines::initialize1(); }
340 void stubRoutines_init2() { StubRoutines::initialize2(); }
341
342 //
343 // Default versions of arraycopy functions
344 //
345
346 static void gen_arraycopy_barrier_pre(oop* dest, size_t count, bool dest_uninitialized) {
347 assert(count != 0, "count should be non-zero");
348 assert(count <= (size_t)max_intx, "count too large");
349 BarrierSet* bs = Universe::heap()->barrier_set();
350 assert(bs->has_write_ref_array_pre_opt(), "Must have pre-barrier opt");
351 bs->write_ref_array_pre(dest, (int)count, dest_uninitialized);
352 }
353
354 static void gen_arraycopy_barrier(oop* dest, size_t count) {
|