2581 // addition to memory membars when is_volatile. This is a little
2582 // too strong, but avoids the need to insert per-alias-type
2583 // volatile membars (for stores; compare Parse::do_put_xxx), which
2584 // we cannot do effectively here because we probably only have a
2585 // rough approximation of type.
2586 need_mem_bar = true;
2587 // For Stores, place a memory ordering barrier now.
2588 if (is_store) {
2589 insert_mem_bar(Op_MemBarRelease);
2590 } else {
2591 if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
2592 insert_mem_bar(Op_MemBarVolatile);
2593 }
2594 }
2595 }
2596
2597 // Memory barrier to prevent normal and 'unsafe' accesses from
2598 // bypassing each other. Happens after null checks, so the
2599 // exception paths do not take memory state from the memory barrier,
2600 // so there's no problems making a strong assert about mixing users
2601 // of safe & unsafe memory. Otherwise fails in a CTW of rt.jar
2602 // around 5701, class sun/reflect/UnsafeBooleanFieldAccessorImpl.
2603 if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2604
2605 if (!is_store) {
2606 MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
2607 Node* p = make_load(control(), adr, value_type, type, adr_type, mo, is_volatile);
2608 // load value
2609 switch (type) {
2610 case T_BOOLEAN:
2611 case T_CHAR:
2612 case T_BYTE:
2613 case T_SHORT:
2614 case T_INT:
2615 case T_LONG:
2616 case T_FLOAT:
2617 case T_DOUBLE:
2618 break;
2619 case T_OBJECT:
2620 if (need_read_barrier) {
2621 insert_pre_barrier(heap_base_oop, offset, p, !(is_volatile || need_mem_bar));
2622 }
|
2581 // addition to memory membars when is_volatile. This is a little
2582 // too strong, but avoids the need to insert per-alias-type
2583 // volatile membars (for stores; compare Parse::do_put_xxx), which
2584 // we cannot do effectively here because we probably only have a
2585 // rough approximation of type.
2586 need_mem_bar = true;
2587 // For Stores, place a memory ordering barrier now.
2588 if (is_store) {
2589 insert_mem_bar(Op_MemBarRelease);
2590 } else {
2591 if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
2592 insert_mem_bar(Op_MemBarVolatile);
2593 }
2594 }
2595 }
2596
2597 // Memory barrier to prevent normal and 'unsafe' accesses from
2598 // bypassing each other. Happens after null checks, so the
2599 // exception paths do not take memory state from the memory barrier,
2600 // so there's no problems making a strong assert about mixing users
2601 // of safe & unsafe memory.
2602 if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2603
2604 if (!is_store) {
2605 MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
2606 Node* p = make_load(control(), adr, value_type, type, adr_type, mo, is_volatile);
2607 // load value
2608 switch (type) {
2609 case T_BOOLEAN:
2610 case T_CHAR:
2611 case T_BYTE:
2612 case T_SHORT:
2613 case T_INT:
2614 case T_LONG:
2615 case T_FLOAT:
2616 case T_DOUBLE:
2617 break;
2618 case T_OBJECT:
2619 if (need_read_barrier) {
2620 insert_pre_barrier(heap_base_oop, offset, p, !(is_volatile || need_mem_bar));
2621 }
|