347 value.load_for_store(x->elt_type());
348 }
349
350 set_no_result(x);
351
352 // The CodeEmitInfo must be duplicated for each different
353 // LIR-instruction because spilling can occur anywhere between two
354 // instructions and so the debug information must be different.
355 CodeEmitInfo* range_check_info = state_for(x);
356 CodeEmitInfo* null_check_info = NULL;
357 if (x->needs_null_check()) {
358 null_check_info = new CodeEmitInfo(range_check_info);
359 }
360
361 // Emit array address setup early so it schedules better.
362 LIR_Address* array_addr = emit_array_address(array.result(), index.result(), x->elt_type(), obj_store);
363
364 if (GenerateRangeChecks && needs_range_check) {
365 if (use_length) {
366 __ cmp(lir_cond_belowEqual, length.result(), index.result());
367 __ branch(lir_cond_belowEqual, T_INT, new RangeCheckStub(range_check_info, index.result()));
368 } else {
369 array_range_check(array.result(), index.result(), null_check_info, range_check_info);
370 // Range_check also does the null check.
371 null_check_info = NULL;
372 }
373 }
374
375 if (GenerateArrayStoreCheck && needs_store_check) {
376 // Following registers are used by slow_subtype_check:
377 LIR_Opr tmp1 = FrameMap::R4_opr; // super_klass
378 LIR_Opr tmp2 = FrameMap::R5_opr; // sub_klass
379 LIR_Opr tmp3 = FrameMap::R6_opr; // temp
380
381 CodeEmitInfo* store_check_info = new CodeEmitInfo(range_check_info);
382 __ store_check(value.result(), array.result(), tmp1, tmp2, tmp3,
383 store_check_info, x->profiled_method(), x->profiled_bci());
384 }
385
386 if (obj_store) {
387 // Needs GC write barriers.
|
347 value.load_for_store(x->elt_type());
348 }
349
350 set_no_result(x);
351
352 // The CodeEmitInfo must be duplicated for each different
353 // LIR-instruction because spilling can occur anywhere between two
354 // instructions and so the debug information must be different.
355 CodeEmitInfo* range_check_info = state_for(x);
356 CodeEmitInfo* null_check_info = NULL;
357 if (x->needs_null_check()) {
358 null_check_info = new CodeEmitInfo(range_check_info);
359 }
360
361 // Emit array address setup early so it schedules better.
362 LIR_Address* array_addr = emit_array_address(array.result(), index.result(), x->elt_type(), obj_store);
363
364 if (GenerateRangeChecks && needs_range_check) {
365 if (use_length) {
366 __ cmp(lir_cond_belowEqual, length.result(), index.result());
367 __ branch(lir_cond_belowEqual, T_INT, new RangeCheckStub(range_check_info, index.result(), array.result()));
368 } else {
369 array_range_check(array.result(), index.result(), null_check_info, range_check_info);
370 // Range_check also does the null check.
371 null_check_info = NULL;
372 }
373 }
374
375 if (GenerateArrayStoreCheck && needs_store_check) {
376 // Following registers are used by slow_subtype_check:
377 LIR_Opr tmp1 = FrameMap::R4_opr; // super_klass
378 LIR_Opr tmp2 = FrameMap::R5_opr; // sub_klass
379 LIR_Opr tmp3 = FrameMap::R6_opr; // temp
380
381 CodeEmitInfo* store_check_info = new CodeEmitInfo(range_check_info);
382 __ store_check(value.result(), array.result(), tmp1, tmp2, tmp3,
383 store_check_info, x->profiled_method(), x->profiled_bci());
384 }
385
386 if (obj_store) {
387 // Needs GC write barriers.
|