< prev index next >

src/cpu/x86/vm/c1_LIRGenerator_x86.cpp

Print this page
rev 13055 : Implement barriers for maintaining connection matrix.


 324       // range_check also does the null check
 325       null_check_info = NULL;
 326     }
 327   }
 328 
 329   if (GenerateArrayStoreCheck && needs_store_check) {
 330     LIR_Opr tmp1 = new_register(objectType);
 331     LIR_Opr tmp2 = new_register(objectType);
 332     LIR_Opr tmp3 = new_register(objectType);
 333 
 334     CodeEmitInfo* store_check_info = new CodeEmitInfo(range_check_info);
 335     __ store_check(val, ary, tmp1, tmp2, tmp3, store_check_info, x->profiled_method(), x->profiled_bci());
 336   }
 337 
 338   if (obj_store) {
 339     // Needs GC write barriers.
 340     pre_barrier(LIR_OprFact::address(array_addr), LIR_OprFact::illegalOpr /* pre_val */,
 341                 true /* do_load */, false /* patch */, NULL);
 342     __ move(val, array_addr, null_check_info);
 343     // Seems to be a precise
 344     post_barrier(LIR_OprFact::address(array_addr), value.result());
 345   } else {
 346     LIR_Opr result = maybe_mask_boolean(x, ary, val, null_check_info);
 347     __ move(result, array_addr, null_check_info);
 348   }
 349 }
 350 
 351 
 352 void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
 353   assert(x->is_pinned(),"");
 354   LIRItem obj(x->obj(), this);
 355   obj.load_item();
 356 
 357   set_no_result(x);
 358 
 359   // "lock" stores the address of the monitor stack slot, so this is not an oop
 360   LIR_Opr lock = new_register(T_INT);
 361   // Need a scratch register for biased locking on x86
 362   LIR_Opr scratch = LIR_OprFact::illegalOpr;
 363   if (UseBiasedLocking) {
 364     scratch = new_register(T_INT);




 324       // range_check also does the null check
 325       null_check_info = NULL;
 326     }
 327   }
 328 
 329   if (GenerateArrayStoreCheck && needs_store_check) {
 330     LIR_Opr tmp1 = new_register(objectType);
 331     LIR_Opr tmp2 = new_register(objectType);
 332     LIR_Opr tmp3 = new_register(objectType);
 333 
 334     CodeEmitInfo* store_check_info = new CodeEmitInfo(range_check_info);
 335     __ store_check(val, ary, tmp1, tmp2, tmp3, store_check_info, x->profiled_method(), x->profiled_bci());
 336   }
 337 
 338   if (obj_store) {
 339     // Needs GC write barriers.
 340     pre_barrier(LIR_OprFact::address(array_addr), LIR_OprFact::illegalOpr /* pre_val */,
 341                 true /* do_load */, false /* patch */, NULL);
 342     __ move(val, array_addr, null_check_info);
 343     // Seems to be a precise
 344     post_barrier(LIR_OprFact::address(array_addr), val);
 345   } else {
 346     LIR_Opr result = maybe_mask_boolean(x, ary, val, null_check_info);
 347     __ move(result, array_addr, null_check_info);
 348   }
 349 }
 350 
 351 
 352 void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
 353   assert(x->is_pinned(),"");
 354   LIRItem obj(x->obj(), this);
 355   obj.load_item();
 356 
 357   set_no_result(x);
 358 
 359   // "lock" stores the address of the monitor stack slot, so this is not an oop
 360   LIR_Opr lock = new_register(T_INT);
 361   // Need a scratch register for biased locking on x86
 362   LIR_Opr scratch = LIR_OprFact::illegalOpr;
 363   if (UseBiasedLocking) {
 364     scratch = new_register(T_INT);


< prev index next >