2652 // For Stores, place a memory ordering barrier now.
2653 if (is_store) {
2654 insert_mem_bar(Op_MemBarRelease);
2655 } else {
2656 if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
2657 insert_mem_bar(Op_MemBarVolatile);
2658 }
2659 }
2660 }
2661
2662 // Memory barrier to prevent normal and 'unsafe' accesses from
2663 // bypassing each other. Happens after null checks, so the
2664 // exception paths do not take memory state from the memory barrier,
2665 // so there's no problems making a strong assert about mixing users
2666 // of safe & unsafe memory. Otherwise fails in a CTW of rt.jar
2667 // around 5701, class sun/reflect/UnsafeBooleanFieldAccessorImpl.
2668 if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2669
2670 if (!is_store) {
2671 MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
2672 Node* p = make_load(control(), adr, value_type, type, adr_type, mo, is_volatile);
2673 // load value
2674 switch (type) {
2675 case T_BOOLEAN:
2676 case T_CHAR:
2677 case T_BYTE:
2678 case T_SHORT:
2679 case T_INT:
2680 case T_LONG:
2681 case T_FLOAT:
2682 case T_DOUBLE:
2683 break;
2684 case T_OBJECT:
2685 if (need_read_barrier) {
2686 insert_pre_barrier(heap_base_oop, offset, p, !(is_volatile || need_mem_bar));
2687 }
2688 break;
2689 case T_ADDRESS:
2690 // Cast to an int type.
2691 p = _gvn.transform(new (C) CastP2XNode(NULL, p));
2692 p = ConvX2UL(p);
6021 bool is_vol = field->is_volatile();
6022 ciType* field_klass = field->type();
6023 assert(field_klass->is_loaded(), "should be loaded");
6024 const TypePtr* adr_type = C->alias_type(field)->adr_type();
6025 Node *adr = basic_plus_adr(fromObj, fromObj, offset);
6026 BasicType bt = field->layout_type();
6027
6028 // Build the resultant type of the load
6029 const Type *type;
6030 if (bt == T_OBJECT) {
6031 type = TypeOopPtr::make_from_klass(field_klass->as_klass());
6032 } else {
6033 type = Type::get_const_basic_type(bt);
6034 }
6035
6036 if (support_IRIW_for_not_multiple_copy_atomic_cpu && is_vol) {
6037 insert_mem_bar(Op_MemBarVolatile); // StoreLoad barrier
6038 }
6039 // Build the load.
6040 MemNode::MemOrd mo = is_vol ? MemNode::acquire : MemNode::unordered;
6041 Node* loadedField = make_load(NULL, adr, type, bt, adr_type, mo, is_vol);
6042 // If reference is volatile, prevent following memory ops from
6043 // floating up past the volatile read. Also prevents commoning
6044 // another volatile read.
6045 if (is_vol) {
6046 // Memory barrier includes bogus read of value to force load BEFORE membar
6047 insert_mem_bar(Op_MemBarAcquire, loadedField);
6048 }
6049 return loadedField;
6050 }
6051
6052
6053 //------------------------------inline_aescrypt_Block-----------------------
6054 bool LibraryCallKit::inline_aescrypt_Block(vmIntrinsics::ID id) {
6055 address stubAddr;
6056 const char *stubName;
6057 assert(UseAES, "need AES instruction support");
6058
6059 switch(id) {
6060 case vmIntrinsics::_aescrypt_encryptBlock:
6061 stubAddr = StubRoutines::aescrypt_encryptBlock();
|
2652 // For Stores, place a memory ordering barrier now.
2653 if (is_store) {
2654 insert_mem_bar(Op_MemBarRelease);
2655 } else {
2656 if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
2657 insert_mem_bar(Op_MemBarVolatile);
2658 }
2659 }
2660 }
2661
2662 // Memory barrier to prevent normal and 'unsafe' accesses from
2663 // bypassing each other. Happens after null checks, so the
2664 // exception paths do not take memory state from the memory barrier,
2665 // so there's no problems making a strong assert about mixing users
2666 // of safe & unsafe memory. Otherwise fails in a CTW of rt.jar
2667 // around 5701, class sun/reflect/UnsafeBooleanFieldAccessorImpl.
2668 if (need_mem_bar) insert_mem_bar(Op_MemBarCPUOrder);
2669
2670 if (!is_store) {
2671 MemNode::MemOrd mo = is_volatile ? MemNode::acquire : MemNode::unordered;
2672 // To be valid, unsafe loads may depend on other conditions than
2673 // the one that guards them: pin the Load node
2674 Node* p = make_load(control(), adr, value_type, type, adr_type, mo, LoadNode::Pinned, is_volatile);
2675 // load value
2676 switch (type) {
2677 case T_BOOLEAN:
2678 case T_CHAR:
2679 case T_BYTE:
2680 case T_SHORT:
2681 case T_INT:
2682 case T_LONG:
2683 case T_FLOAT:
2684 case T_DOUBLE:
2685 break;
2686 case T_OBJECT:
2687 if (need_read_barrier) {
2688 insert_pre_barrier(heap_base_oop, offset, p, !(is_volatile || need_mem_bar));
2689 }
2690 break;
2691 case T_ADDRESS:
2692 // Cast to an int type.
2693 p = _gvn.transform(new (C) CastP2XNode(NULL, p));
2694 p = ConvX2UL(p);
6023 bool is_vol = field->is_volatile();
6024 ciType* field_klass = field->type();
6025 assert(field_klass->is_loaded(), "should be loaded");
6026 const TypePtr* adr_type = C->alias_type(field)->adr_type();
6027 Node *adr = basic_plus_adr(fromObj, fromObj, offset);
6028 BasicType bt = field->layout_type();
6029
6030 // Build the resultant type of the load
6031 const Type *type;
6032 if (bt == T_OBJECT) {
6033 type = TypeOopPtr::make_from_klass(field_klass->as_klass());
6034 } else {
6035 type = Type::get_const_basic_type(bt);
6036 }
6037
6038 if (support_IRIW_for_not_multiple_copy_atomic_cpu && is_vol) {
6039 insert_mem_bar(Op_MemBarVolatile); // StoreLoad barrier
6040 }
6041 // Build the load.
6042 MemNode::MemOrd mo = is_vol ? MemNode::acquire : MemNode::unordered;
6043 Node* loadedField = make_load(NULL, adr, type, bt, adr_type, mo, LoadNode::DependsOnlyOnTest, is_vol);
6044 // If reference is volatile, prevent following memory ops from
6045 // floating up past the volatile read. Also prevents commoning
6046 // another volatile read.
6047 if (is_vol) {
6048 // Memory barrier includes bogus read of value to force load BEFORE membar
6049 insert_mem_bar(Op_MemBarAcquire, loadedField);
6050 }
6051 return loadedField;
6052 }
6053
6054
6055 //------------------------------inline_aescrypt_Block-----------------------
6056 bool LibraryCallKit::inline_aescrypt_Block(vmIntrinsics::ID id) {
6057 address stubAddr;
6058 const char *stubName;
6059 assert(UseAES, "need AES instruction support");
6060
6061 switch(id) {
6062 case vmIntrinsics::_aescrypt_encryptBlock:
6063 stubAddr = StubRoutines::aescrypt_encryptBlock();
|