1484 BarrierSet* bs = Universe::heap()->barrier_set();
1485 set_control(ctl);
1486 switch (bs->kind()) {
1487 case BarrierSet::G1SATBCT:
1488 case BarrierSet::G1SATBCTLogging:
1489 g1_write_barrier_pre(do_load, obj, adr, adr_idx, val, val_type, pre_val, bt);
1490 break;
1491
1492 case BarrierSet::CardTableModRef:
1493 case BarrierSet::CardTableExtension:
1494 case BarrierSet::ModRef:
1495 break;
1496
1497 case BarrierSet::Other:
1498 default :
1499 ShouldNotReachHere();
1500
1501 }
1502 }
1503
1504 void GraphKit::post_barrier(Node* ctl,
1505 Node* store,
1506 Node* obj,
1507 Node* adr,
1508 uint adr_idx,
1509 Node* val,
1510 BasicType bt,
1511 bool use_precise) {
1512 BarrierSet* bs = Universe::heap()->barrier_set();
1513 set_control(ctl);
1514 switch (bs->kind()) {
1515 case BarrierSet::G1SATBCT:
1516 case BarrierSet::G1SATBCTLogging:
1517 g1_write_barrier_post(store, obj, adr, adr_idx, val, bt, use_precise);
1518 break;
1519
1520 case BarrierSet::CardTableModRef:
1521 case BarrierSet::CardTableExtension:
1522 write_barrier_post(store, obj, adr, adr_idx, val, use_precise);
1523 break;
3534 Node* obj,
3535 Node* adr,
3536 uint alias_idx,
3537 Node* val,
3538 const TypeOopPtr* val_type,
3539 Node* pre_val,
3540 BasicType bt) {
3541
3542 // Some sanity checks
3543 // Note: val is unused in this routine.
3544
3545 if (do_load) {
3546 // We need to generate the load of the previous value
3547 assert(obj != NULL, "must have a base");
3548 assert(adr != NULL, "where are loading from?");
3549 assert(pre_val == NULL, "loaded already?");
3550 assert(val_type != NULL, "need a type");
3551 } else {
3552 // In this case both val_type and alias_idx are unused.
3553 assert(pre_val != NULL, "must be loaded already");
3554 assert(pre_val->bottom_type()->basic_type() == T_OBJECT, "or we shouldn't be here");
3555 }
3556 assert(bt == T_OBJECT, "or we shouldn't be here");
3557
3558 IdealKit ideal(this, true);
3559
3560 Node* tls = __ thread(); // ThreadLocalStorage
3561
3562 Node* no_ctrl = NULL;
3563 Node* no_base = __ top();
3564 Node* zero = __ ConI(0);
3565 Node* zeroX = __ ConX(0);
3566
3567 float likely = PROB_LIKELY(0.999);
3568 float unlikely = PROB_UNLIKELY(0.999);
3569
3570 BasicType active_type = in_bytes(PtrQueue::byte_width_of_active()) == 4 ? T_INT : T_BYTE;
3571 assert(in_bytes(PtrQueue::byte_width_of_active()) == 4 || in_bytes(PtrQueue::byte_width_of_active()) == 1, "flag width");
3572
3573 // Offsets into the thread
|
1484 BarrierSet* bs = Universe::heap()->barrier_set();
1485 set_control(ctl);
1486 switch (bs->kind()) {
1487 case BarrierSet::G1SATBCT:
1488 case BarrierSet::G1SATBCTLogging:
1489 g1_write_barrier_pre(do_load, obj, adr, adr_idx, val, val_type, pre_val, bt);
1490 break;
1491
1492 case BarrierSet::CardTableModRef:
1493 case BarrierSet::CardTableExtension:
1494 case BarrierSet::ModRef:
1495 break;
1496
1497 case BarrierSet::Other:
1498 default :
1499 ShouldNotReachHere();
1500
1501 }
1502 }
1503
1504 bool GraphKit::can_move_pre_barrier() const {
1505 BarrierSet* bs = Universe::heap()->barrier_set();
1506 switch (bs->kind()) {
1507 case BarrierSet::G1SATBCT:
1508 case BarrierSet::G1SATBCTLogging:
1509 return true; // Can move it if no safepoint
1510
1511 case BarrierSet::CardTableModRef:
1512 case BarrierSet::CardTableExtension:
1513 case BarrierSet::ModRef:
1514 return true; // There is no pre-barrier
1515
1516 case BarrierSet::Other:
1517 default :
1518 ShouldNotReachHere();
1519 }
1520 return false;
1521 }
1522
1523 void GraphKit::post_barrier(Node* ctl,
1524 Node* store,
1525 Node* obj,
1526 Node* adr,
1527 uint adr_idx,
1528 Node* val,
1529 BasicType bt,
1530 bool use_precise) {
1531 BarrierSet* bs = Universe::heap()->barrier_set();
1532 set_control(ctl);
1533 switch (bs->kind()) {
1534 case BarrierSet::G1SATBCT:
1535 case BarrierSet::G1SATBCTLogging:
1536 g1_write_barrier_post(store, obj, adr, adr_idx, val, bt, use_precise);
1537 break;
1538
1539 case BarrierSet::CardTableModRef:
1540 case BarrierSet::CardTableExtension:
1541 write_barrier_post(store, obj, adr, adr_idx, val, use_precise);
1542 break;
3553 Node* obj,
3554 Node* adr,
3555 uint alias_idx,
3556 Node* val,
3557 const TypeOopPtr* val_type,
3558 Node* pre_val,
3559 BasicType bt) {
3560
3561 // Some sanity checks
3562 // Note: val is unused in this routine.
3563
3564 if (do_load) {
3565 // We need to generate the load of the previous value
3566 assert(obj != NULL, "must have a base");
3567 assert(adr != NULL, "where are loading from?");
3568 assert(pre_val == NULL, "loaded already?");
3569 assert(val_type != NULL, "need a type");
3570 } else {
3571 // In this case both val_type and alias_idx are unused.
3572 assert(pre_val != NULL, "must be loaded already");
3573 // Nothing to be done if pre_val is null.
3574 if (pre_val->bottom_type() == TypePtr::NULL_PTR) return;
3575 assert(pre_val->bottom_type()->basic_type() == T_OBJECT, "or we shouldn't be here");
3576 }
3577 assert(bt == T_OBJECT, "or we shouldn't be here");
3578
3579 IdealKit ideal(this, true);
3580
3581 Node* tls = __ thread(); // ThreadLocalStorage
3582
3583 Node* no_ctrl = NULL;
3584 Node* no_base = __ top();
3585 Node* zero = __ ConI(0);
3586 Node* zeroX = __ ConX(0);
3587
3588 float likely = PROB_LIKELY(0.999);
3589 float unlikely = PROB_UNLIKELY(0.999);
3590
3591 BasicType active_type = in_bytes(PtrQueue::byte_width_of_active()) == 4 ? T_INT : T_BYTE;
3592 assert(in_bytes(PtrQueue::byte_width_of_active()) == 4 || in_bytes(PtrQueue::byte_width_of_active()) == 1, "flag width");
3593
3594 // Offsets into the thread
|