< prev index next >

src/share/vm/opto/library_call.cpp

Print this page




2629     // rough approximation of type.
2630     need_mem_bar = true;
2631     // For Stores, place a memory ordering barrier now.
2632     if (is_store) {
2633       insert_mem_bar(Op_MemBarRelease);
2634     } else {
2635       if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
2636         insert_mem_bar(Op_MemBarVolatile);
2637       }
2638     }
2639   }
2640 
2641   // Memory barrier to prevent normal and 'unsafe' accesses from
2642   // bypassing each other.  Happens after null checks, so the
2643   // exception paths do not take memory state from the memory barrier,
2644   // so there's no problems making a strong assert about mixing users
2645   // of safe & unsafe memory.
2646   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2647 
2648   if (!is_store) {












2649     MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
2650     // To be valid, unsafe loads may depend on other conditions than
2651     // the one that guards them: pin the Load node
2652     Node* p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, is_volatile);
2653     // load value
2654     switch (type) {
2655     case T_BOOLEAN:
2656     case T_CHAR:
2657     case T_BYTE:
2658     case T_SHORT:
2659     case T_INT:
2660     case T_LONG:
2661     case T_FLOAT:
2662     case T_DOUBLE:
2663       break;
2664     case T_OBJECT:
2665       if (need_read_barrier) {
2666         insert_pre_barrier(heap_base_oop, offset, p, !(is_volatile || need_mem_bar));
2667       }
2668       break;
2669     case T_ADDRESS:
2670       // Cast to an int type.
2671       p = _gvn.transform(new CastP2XNode(NULL, p));
2672       p = ConvX2UL(p);
2673       break;
2674     default:
2675       fatal(err_msg_res("unexpected type %d: %s", type, type2name(type)));
2676       break;
2677     }

2678     // The load node has the control of the preceding MemBarCPUOrder.  All
2679     // following nodes will have the control of the MemBarCPUOrder inserted at
2680     // the end of this method.  So, pushing the load onto the stack at a later
2681     // point is fine.
2682     set_result(p);
2683   } else {
2684     // place effect of store into memory
2685     switch (type) {
2686     case T_DOUBLE:
2687       val = dstore_rounding(val);
2688       break;
2689     case T_ADDRESS:
2690       // Repackage the long as a pointer.
2691       val = ConvL2X(val);
2692       val = _gvn.transform(new CastX2PNode(val));
2693       break;
2694     }
2695 
2696     MemNode::MemOrd mo = is_volatile ? MemNode::release : MemNode::unordered;
2697     if (type != T_OBJECT ) {




2629     // rough approximation of type.
2630     need_mem_bar = true;
2631     // For Stores, place a memory ordering barrier now.
2632     if (is_store) {
2633       insert_mem_bar(Op_MemBarRelease);
2634     } else {
2635       if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
2636         insert_mem_bar(Op_MemBarVolatile);
2637       }
2638     }
2639   }
2640 
2641   // Memory barrier to prevent normal and 'unsafe' accesses from
2642   // bypassing each other.  Happens after null checks, so the
2643   // exception paths do not take memory state from the memory barrier,
2644   // so there's no problems making a strong assert about mixing users
2645   // of safe & unsafe memory.
2646   if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2647 
2648    if (!is_store) {
2649     Node* p = NULL;
2650     // Try to constant fold a load from a constant field
2651     ciField* field = alias_type->field();
2652     if (heap_base_oop != top() &&
2653         field != NULL && field->is_constant() && field->layout_type() == type) {
2654       // final or stable field
2655       const Type* con_type = Type::make_constant(alias_type->field(), heap_base_oop);
2656       if (con_type != NULL) {
2657         p = makecon(con_type);
2658       }
2659     }
2660     if (p == NULL) {
2661       MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
2662       // To be valid, unsafe loads may depend on other conditions than
2663       // the one that guards them: pin the Load node
2664       p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, is_volatile);
2665       // load value
2666       switch (type) {
2667       case T_BOOLEAN:
2668       case T_CHAR:
2669       case T_BYTE:
2670       case T_SHORT:
2671       case T_INT:
2672       case T_LONG:
2673       case T_FLOAT:
2674       case T_DOUBLE:
2675         break;
2676       case T_OBJECT:
2677         if (need_read_barrier) {
2678           insert_pre_barrier(heap_base_oop, offset, p, !(is_volatile || need_mem_bar));
2679         }
2680         break;
2681       case T_ADDRESS:
2682         // Cast to an int type.
2683         p = _gvn.transform(new CastP2XNode(NULL, p));
2684         p = ConvX2UL(p);
2685         break;
2686       default:
2687         fatal(err_msg_res("unexpected type %d: %s", type, type2name(type)));
2688         break;
2689       }
2690     }
2691     // The load node has the control of the preceding MemBarCPUOrder.  All
2692     // following nodes will have the control of the MemBarCPUOrder inserted at
2693     // the end of this method.  So, pushing the load onto the stack at a later
2694     // point is fine.
2695     set_result(p);
2696   } else {
2697     // place effect of store into memory
2698     switch (type) {
2699     case T_DOUBLE:
2700       val = dstore_rounding(val);
2701       break;
2702     case T_ADDRESS:
2703       // Repackage the long as a pointer.
2704       val = ConvL2X(val);
2705       val = _gvn.transform(new CastX2PNode(val));
2706       break;
2707     }
2708 
2709     MemNode::MemOrd mo = is_volatile ? MemNode::release : MemNode::unordered;
2710     if (type != T_OBJECT ) {


< prev index next >