src/share/vm/opto/memnode.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8036851 Sdiff src/share/vm/opto

src/share/vm/opto/memnode.cpp

Print this page




 938 #ifdef _LP64
 939     if (adr->bottom_type()->is_ptr_to_narrowoop()) {
 940       Node* load  = gvn.transform(new (C) LoadNNode(ctl, mem, adr, adr_type, rt->make_narrowoop(), mo));
 941       return new (C) DecodeNNode(load, load->bottom_type()->make_ptr());
 942     } else
 943 #endif
 944     {
 945       assert(!adr->bottom_type()->is_ptr_to_narrowoop() && !adr->bottom_type()->is_ptr_to_narrowklass(), "should have got back a narrow oop");
 946       return new (C) LoadPNode(ctl, mem, adr, adr_type, rt->is_oopptr(), mo);
 947     }
 948   }
 949   ShouldNotReachHere();
 950   return (LoadNode*)NULL;
 951 }
 952 
 953 LoadLNode* LoadLNode::make_atomic(Compile *C, Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, const Type* rt, MemOrd mo) {
 954   bool require_atomic = true;
 955   return new (C) LoadLNode(ctl, mem, adr, adr_type, rt->is_long(), mo, require_atomic);
 956 }
 957 




 958 
 959 
 960 
 961 //------------------------------hash-------------------------------------------
 962 uint LoadNode::hash() const {
 963   // unroll addition of interesting fields
 964   return (uintptr_t)in(Control) + (uintptr_t)in(Memory) + (uintptr_t)in(Address);
 965 }
 966 
 967 static bool skip_through_membars(Compile::AliasType* atp, const TypeInstPtr* tp, bool eliminate_boxing) {
 968   if ((atp != NULL) && (atp->index() >= Compile::AliasIdxRaw)) {
 969     bool non_volatile = (atp->field() != NULL) && !atp->field()->is_volatile();
 970     bool is_stable_ary = FoldStableValues &&
 971                          (tp != NULL) && (tp->isa_aryptr() != NULL) &&
 972                          tp->isa_aryptr()->is_stable();
 973 
 974     return (eliminate_boxing && non_volatile) || is_stable_ary;
 975   }
 976 
 977   return false;


2378       val = gvn.transform(new (C) EncodePNode(val, val->bottom_type()->make_narrowoop()));
2379       return new (C) StoreNNode(ctl, mem, adr, adr_type, val, mo);
2380     } else if (adr->bottom_type()->is_ptr_to_narrowklass() ||
2381                (UseCompressedClassPointers && val->bottom_type()->isa_klassptr() &&
2382                 adr->bottom_type()->isa_rawptr())) {
2383       val = gvn.transform(new (C) EncodePKlassNode(val, val->bottom_type()->make_narrowklass()));
2384       return new (C) StoreNKlassNode(ctl, mem, adr, adr_type, val, mo);
2385     }
2386 #endif
2387     {
2388       return new (C) StorePNode(ctl, mem, adr, adr_type, val, mo);
2389     }
2390   }
2391   ShouldNotReachHere();
2392   return (StoreNode*)NULL;
2393 }
2394 
2395 StoreLNode* StoreLNode::make_atomic(Compile *C, Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, Node* val, MemOrd mo) {
2396   bool require_atomic = true;
2397   return new (C) StoreLNode(ctl, mem, adr, adr_type, val, mo, require_atomic);





2398 }
2399 
2400 
2401 //--------------------------bottom_type----------------------------------------
2402 const Type *StoreNode::bottom_type() const {
2403   return Type::MEMORY;
2404 }
2405 
2406 //------------------------------hash-------------------------------------------
2407 uint StoreNode::hash() const {
2408   // unroll addition of interesting fields
2409   //return (uintptr_t)in(Control) + (uintptr_t)in(Memory) + (uintptr_t)in(Address) + (uintptr_t)in(ValueIn);
2410 
2411   // Since they are not commoned, do not hash them:
2412   return NO_HASH;
2413 }
2414 
2415 //------------------------------Ideal------------------------------------------
2416 // Change back-to-back Store(, p, x) -> Store(m, p, y) to Store(m, p, x).
2417 // When a store immediately follows a relevant allocation/initialization,




 938 #ifdef _LP64
 939     if (adr->bottom_type()->is_ptr_to_narrowoop()) {
 940       Node* load  = gvn.transform(new (C) LoadNNode(ctl, mem, adr, adr_type, rt->make_narrowoop(), mo));
 941       return new (C) DecodeNNode(load, load->bottom_type()->make_ptr());
 942     } else
 943 #endif
 944     {
 945       assert(!adr->bottom_type()->is_ptr_to_narrowoop() && !adr->bottom_type()->is_ptr_to_narrowklass(), "should have got back a narrow oop");
 946       return new (C) LoadPNode(ctl, mem, adr, adr_type, rt->is_oopptr(), mo);
 947     }
 948   }
 949   ShouldNotReachHere();
 950   return (LoadNode*)NULL;
 951 }
 952 
 953 LoadLNode* LoadLNode::make_atomic(Compile *C, Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, const Type* rt, MemOrd mo) {
 954   bool require_atomic = true;
 955   return new (C) LoadLNode(ctl, mem, adr, adr_type, rt->is_long(), mo, require_atomic);
 956 }
 957 
 958 LoadDNode* LoadDNode::make_atomic(Compile *C, Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, const Type* rt, MemOrd mo) {
 959   bool require_atomic = true;
 960   return new (C) LoadDNode(ctl, mem, adr, adr_type, rt, mo, require_atomic);
 961 }
 962 
 963 
 964 
 965 //------------------------------hash-------------------------------------------
 966 uint LoadNode::hash() const {
 967   // unroll addition of interesting fields
 968   return (uintptr_t)in(Control) + (uintptr_t)in(Memory) + (uintptr_t)in(Address);
 969 }
 970 
 971 static bool skip_through_membars(Compile::AliasType* atp, const TypeInstPtr* tp, bool eliminate_boxing) {
 972   if ((atp != NULL) && (atp->index() >= Compile::AliasIdxRaw)) {
 973     bool non_volatile = (atp->field() != NULL) && !atp->field()->is_volatile();
 974     bool is_stable_ary = FoldStableValues &&
 975                          (tp != NULL) && (tp->isa_aryptr() != NULL) &&
 976                          tp->isa_aryptr()->is_stable();
 977 
 978     return (eliminate_boxing && non_volatile) || is_stable_ary;
 979   }
 980 
 981   return false;


2382       val = gvn.transform(new (C) EncodePNode(val, val->bottom_type()->make_narrowoop()));
2383       return new (C) StoreNNode(ctl, mem, adr, adr_type, val, mo);
2384     } else if (adr->bottom_type()->is_ptr_to_narrowklass() ||
2385                (UseCompressedClassPointers && val->bottom_type()->isa_klassptr() &&
2386                 adr->bottom_type()->isa_rawptr())) {
2387       val = gvn.transform(new (C) EncodePKlassNode(val, val->bottom_type()->make_narrowklass()));
2388       return new (C) StoreNKlassNode(ctl, mem, adr, adr_type, val, mo);
2389     }
2390 #endif
2391     {
2392       return new (C) StorePNode(ctl, mem, adr, adr_type, val, mo);
2393     }
2394   }
2395   ShouldNotReachHere();
2396   return (StoreNode*)NULL;
2397 }
2398 
2399 StoreLNode* StoreLNode::make_atomic(Compile *C, Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, Node* val, MemOrd mo) {
2400   bool require_atomic = true;
2401   return new (C) StoreLNode(ctl, mem, adr, adr_type, val, mo, require_atomic);
2402 }
2403 
2404 StoreDNode* StoreDNode::make_atomic(Compile *C, Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, Node* val, MemOrd mo) {
2405   bool require_atomic = true;
2406   return new (C) StoreDNode(ctl, mem, adr, adr_type, val, mo, require_atomic);
2407 }
2408 
2409 
2410 //--------------------------bottom_type----------------------------------------
2411 const Type *StoreNode::bottom_type() const {
2412   return Type::MEMORY;
2413 }
2414 
2415 //------------------------------hash-------------------------------------------
2416 uint StoreNode::hash() const {
2417   // unroll addition of interesting fields
2418   //return (uintptr_t)in(Control) + (uintptr_t)in(Memory) + (uintptr_t)in(Address) + (uintptr_t)in(ValueIn);
2419 
2420   // Since they are not commoned, do not hash them:
2421   return NO_HASH;
2422 }
2423 
2424 //------------------------------Ideal------------------------------------------
2425 // Change back-to-back Store(, p, x) -> Store(m, p, y) to Store(m, p, x).
2426 // When a store immediately follows a relevant allocation/initialization,


src/share/vm/opto/memnode.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File