311 LIR_Opr sp_opr = FrameMap::SP_opr;
312 if ((t == T_LONG || t == T_DOUBLE) &&
313 ((in_bytes(offset_from_sp) - STACK_BIAS) % 8 != 0)) {
314 __ unaligned_move(item, new LIR_Address(sp_opr, in_bytes(offset_from_sp), t));
315 } else {
316 __ move(item, new LIR_Address(sp_opr, in_bytes(offset_from_sp), t));
317 }
318 }
319
320 //----------------------------------------------------------------------
321 // visitor functions
322 //----------------------------------------------------------------------
323
324
325 void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
326 assert(x->is_pinned(),"");
327 bool needs_range_check = true;
328 bool use_length = x->length() != NULL;
329 bool obj_store = x->elt_type() == T_ARRAY || x->elt_type() == T_OBJECT;
330 bool needs_store_check = obj_store && (x->value()->as_Constant() == NULL ||
331 !get_jobject_constant(x->value())->is_null_object());
332
333 LIRItem array(x->array(), this);
334 LIRItem index(x->index(), this);
335 LIRItem value(x->value(), this);
336 LIRItem length(this);
337
338 array.load_item();
339 index.load_nonconstant();
340
341 if (use_length) {
342 needs_range_check = x->compute_needs_range_check();
343 if (needs_range_check) {
344 length.set_instruction(x->length());
345 length.load_item();
346 }
347 }
348 if (needs_store_check) {
349 value.load_item();
350 } else {
351 value.load_for_store(x->elt_type());
365 // emit array address setup early so it schedules better
366 LIR_Address* array_addr = emit_array_address(array.result(), index.result(), x->elt_type(), obj_store);
367
368 if (GenerateRangeChecks && needs_range_check) {
369 if (use_length) {
370 __ cmp(lir_cond_belowEqual, length.result(), index.result());
371 __ branch(lir_cond_belowEqual, T_INT, new RangeCheckStub(range_check_info, index.result()));
372 } else {
373 array_range_check(array.result(), index.result(), null_check_info, range_check_info);
374 // range_check also does the null check
375 null_check_info = NULL;
376 }
377 }
378
379 if (GenerateArrayStoreCheck && needs_store_check) {
380 LIR_Opr tmp1 = FrameMap::G1_opr;
381 LIR_Opr tmp2 = FrameMap::G3_opr;
382 LIR_Opr tmp3 = FrameMap::G5_opr;
383
384 CodeEmitInfo* store_check_info = new CodeEmitInfo(range_check_info);
385 __ store_check(value.result(), array.result(), tmp1, tmp2, tmp3, store_check_info);
386 }
387
388 if (obj_store) {
389 // Needs GC write barriers.
390 pre_barrier(LIR_OprFact::address(array_addr), LIR_OprFact::illegalOpr /* pre_val */,
391 true /* do_load */, false /* patch */, NULL);
392 }
393 __ move(value.result(), array_addr, null_check_info);
394 if (obj_store) {
395 // Precise card mark
396 post_barrier(LIR_OprFact::address(array_addr), value.result());
397 }
398 }
399
400
401 void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
402 assert(x->is_pinned(),"");
403 LIRItem obj(x->obj(), this);
404 obj.load_item();
405
|
311 LIR_Opr sp_opr = FrameMap::SP_opr;
312 if ((t == T_LONG || t == T_DOUBLE) &&
313 ((in_bytes(offset_from_sp) - STACK_BIAS) % 8 != 0)) {
314 __ unaligned_move(item, new LIR_Address(sp_opr, in_bytes(offset_from_sp), t));
315 } else {
316 __ move(item, new LIR_Address(sp_opr, in_bytes(offset_from_sp), t));
317 }
318 }
319
320 //----------------------------------------------------------------------
321 // visitor functions
322 //----------------------------------------------------------------------
323
324
325 void LIRGenerator::do_StoreIndexed(StoreIndexed* x) {
326 assert(x->is_pinned(),"");
327 bool needs_range_check = true;
328 bool use_length = x->length() != NULL;
329 bool obj_store = x->elt_type() == T_ARRAY || x->elt_type() == T_OBJECT;
330 bool needs_store_check = obj_store && (x->value()->as_Constant() == NULL ||
331 !get_jobject_constant(x->value())->is_null_object() ||
332 x->should_profile());
333
334 LIRItem array(x->array(), this);
335 LIRItem index(x->index(), this);
336 LIRItem value(x->value(), this);
337 LIRItem length(this);
338
339 array.load_item();
340 index.load_nonconstant();
341
342 if (use_length) {
343 needs_range_check = x->compute_needs_range_check();
344 if (needs_range_check) {
345 length.set_instruction(x->length());
346 length.load_item();
347 }
348 }
349 if (needs_store_check) {
350 value.load_item();
351 } else {
352 value.load_for_store(x->elt_type());
366 // emit array address setup early so it schedules better
367 LIR_Address* array_addr = emit_array_address(array.result(), index.result(), x->elt_type(), obj_store);
368
369 if (GenerateRangeChecks && needs_range_check) {
370 if (use_length) {
371 __ cmp(lir_cond_belowEqual, length.result(), index.result());
372 __ branch(lir_cond_belowEqual, T_INT, new RangeCheckStub(range_check_info, index.result()));
373 } else {
374 array_range_check(array.result(), index.result(), null_check_info, range_check_info);
375 // range_check also does the null check
376 null_check_info = NULL;
377 }
378 }
379
380 if (GenerateArrayStoreCheck && needs_store_check) {
381 LIR_Opr tmp1 = FrameMap::G1_opr;
382 LIR_Opr tmp2 = FrameMap::G3_opr;
383 LIR_Opr tmp3 = FrameMap::G5_opr;
384
385 CodeEmitInfo* store_check_info = new CodeEmitInfo(range_check_info);
386 __ store_check(value.result(), array.result(), tmp1, tmp2, tmp3, store_check_info, x->profiled_method(), x->profiled_bci());
387 }
388
389 if (obj_store) {
390 // Needs GC write barriers.
391 pre_barrier(LIR_OprFact::address(array_addr), LIR_OprFact::illegalOpr /* pre_val */,
392 true /* do_load */, false /* patch */, NULL);
393 }
394 __ move(value.result(), array_addr, null_check_info);
395 if (obj_store) {
396 // Precise card mark
397 post_barrier(LIR_OprFact::address(array_addr), value.result());
398 }
399 }
400
401
402 void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
403 assert(x->is_pinned(),"");
404 LIRItem obj(x->obj(), this);
405 obj.load_item();
406
|