< prev index next >

src/hotspot/cpu/aarch64/c1_LIRGenerator_aarch64.cpp

Print this page
rev 49827 : 8201593: Print array length in ArrayIndexOutOfBoundsException.


 339   }
 340 
 341   set_no_result(x);
 342 
 343   // the CodeEmitInfo must be duplicated for each different
 344   // LIR-instruction because spilling can occur anywhere between two
 345   // instructions and so the debug information must be different
 346   CodeEmitInfo* range_check_info = state_for(x);
 347   CodeEmitInfo* null_check_info = NULL;
 348   if (x->needs_null_check()) {
 349     null_check_info = new CodeEmitInfo(range_check_info);
 350   }
 351 
 352   // emit array address setup early so it schedules better
 353   // FIXME?  No harm in this on aarch64, and it might help
 354   LIR_Address* array_addr = emit_array_address(array.result(), index.result(), x->elt_type(), obj_store);
 355 
 356   if (GenerateRangeChecks && needs_range_check) {
 357     if (use_length) {
 358       __ cmp(lir_cond_belowEqual, length.result(), index.result());
 359       __ branch(lir_cond_belowEqual, T_INT, new RangeCheckStub(range_check_info, index.result()));
 360     } else {
 361       array_range_check(array.result(), index.result(), null_check_info, range_check_info);
 362       // range_check also does the null check
 363       null_check_info = NULL;
 364     }
 365   }
 366 
 367   if (GenerateArrayStoreCheck && needs_store_check) {
 368     LIR_Opr tmp1 = new_register(objectType);
 369     LIR_Opr tmp2 = new_register(objectType);
 370     LIR_Opr tmp3 = new_register(objectType);
 371 
 372     CodeEmitInfo* store_check_info = new CodeEmitInfo(range_check_info);
 373     __ store_check(value.result(), array.result(), tmp1, tmp2, tmp3, store_check_info, x->profiled_method(), x->profiled_bci());
 374   }
 375 
 376   if (obj_store) {
 377     // Needs GC write barriers.
 378     pre_barrier(LIR_OprFact::address(array_addr), LIR_OprFact::illegalOpr /* pre_val */,
 379                 true /* do_load */, false /* patch */, NULL);




 339   }
 340 
 341   set_no_result(x);
 342 
 343   // the CodeEmitInfo must be duplicated for each different
 344   // LIR-instruction because spilling can occur anywhere between two
 345   // instructions and so the debug information must be different
 346   CodeEmitInfo* range_check_info = state_for(x);
 347   CodeEmitInfo* null_check_info = NULL;
 348   if (x->needs_null_check()) {
 349     null_check_info = new CodeEmitInfo(range_check_info);
 350   }
 351 
 352   // emit array address setup early so it schedules better
 353   // FIXME?  No harm in this on aarch64, and it might help
 354   LIR_Address* array_addr = emit_array_address(array.result(), index.result(), x->elt_type(), obj_store);
 355 
 356   if (GenerateRangeChecks && needs_range_check) {
 357     if (use_length) {
 358       __ cmp(lir_cond_belowEqual, length.result(), index.result());
 359       __ branch(lir_cond_belowEqual, T_INT, new RangeCheckStub(range_check_info, index.result(), array.result()));
 360     } else {
 361       array_range_check(array.result(), index.result(), null_check_info, range_check_info);
 362       // range_check also does the null check
 363       null_check_info = NULL;
 364     }
 365   }
 366 
 367   if (GenerateArrayStoreCheck && needs_store_check) {
 368     LIR_Opr tmp1 = new_register(objectType);
 369     LIR_Opr tmp2 = new_register(objectType);
 370     LIR_Opr tmp3 = new_register(objectType);
 371 
 372     CodeEmitInfo* store_check_info = new CodeEmitInfo(range_check_info);
 373     __ store_check(value.result(), array.result(), tmp1, tmp2, tmp3, store_check_info, x->profiled_method(), x->profiled_bci());
 374   }
 375 
 376   if (obj_store) {
 377     // Needs GC write barriers.
 378     pre_barrier(LIR_OprFact::address(array_addr), LIR_OprFact::illegalOpr /* pre_val */,
 379                 true /* do_load */, false /* patch */, NULL);


< prev index next >