src/share/vm/opto/graphKit.hpp

Print this page
rev 5661 : 8024921: PPC64 (part 113): Extend Load and Store nodes to know about memory ordering.

*** 508,548 **** // Create a memory projection from the call, then set_all_memory. void set_all_memory_call(Node* call, bool separate_io_proj = false); // Create a LoadNode, reading from the parser's memory state. // (Note: require_atomic_access is useful only with T_LONG.) Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, ! bool require_atomic_access = false) { // This version computes alias_index from bottom_type return make_load(ctl, adr, t, bt, adr->bottom_type()->is_ptr(), ! require_atomic_access); } ! Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, const TypePtr* adr_type, bool require_atomic_access = false) { // This version computes alias_index from an address type assert(adr_type != NULL, "use other make_load factory"); return make_load(ctl, adr, t, bt, C->get_alias_index(adr_type), ! require_atomic_access); } // This is the base version which is given an alias index. ! Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, int adr_idx, bool require_atomic_access = false); // Create & transform a StoreNode and store the effect into the // parser's memory state. Node* store_to_memory(Node* ctl, Node* adr, Node* val, BasicType bt, const TypePtr* adr_type, ! bool require_atomic_access = false) { // This version computes alias_index from an address type assert(adr_type != NULL, "use other store_to_memory factory"); return store_to_memory(ctl, adr, val, bt, C->get_alias_index(adr_type), ! require_atomic_access); } // This is the base version which is given alias index // Return the new StoreXNode Node* store_to_memory(Node* ctl, Node* adr, Node* val, BasicType bt, int adr_idx, ! bool require_atomic_access = false); // All in one pre-barrier, store, post_barrier // Insert a write-barrier'd store. This is to let generational GC // work; we have to flag all oop-stores before the next GC point. --- 508,562 ---- // Create a memory projection from the call, then set_all_memory. void set_all_memory_call(Node* call, bool separate_io_proj = false); // Create a LoadNode, reading from the parser's memory state. // (Note: require_atomic_access is useful only with T_LONG.) + // + // We choose the unordered semantics by default because we have + // adapted the `do_put_xxx' and `do_get_xxx' procedures for the case + // of volatile fields. Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, ! bool require_atomic_access, LoadNode::Sem sem) { // This version computes alias_index from bottom_type return make_load(ctl, adr, t, bt, adr->bottom_type()->is_ptr(), ! require_atomic_access, sem); } ! Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, const TypePtr* adr_type, ! bool require_atomic_access, LoadNode::Sem sem) { // This version computes alias_index from an address type assert(adr_type != NULL, "use other make_load factory"); return make_load(ctl, adr, t, bt, C->get_alias_index(adr_type), ! require_atomic_access, sem); } // This is the base version which is given an alias index. ! Node* make_load(Node* ctl, Node* adr, const Type* t, BasicType bt, int adr_idx, ! bool require_atomic_access, LoadNode::Sem sem); // Create & transform a StoreNode and store the effect into the // parser's memory state. + // + // We must ensure that stores of object references will be visible + // only after the object's initialization. So the clients of this + // procedure must indicate that the store requires `release' + // semantics, if the stored value is an object reference that might + // point to a new object and may become externally visible. Node* store_to_memory(Node* ctl, Node* adr, Node* val, BasicType bt, const TypePtr* adr_type, ! bool require_atomic_access, ! StoreNode::Sem sem) { // This version computes alias_index from an address type assert(adr_type != NULL, "use other store_to_memory factory"); return store_to_memory(ctl, adr, val, bt, C->get_alias_index(adr_type), ! require_atomic_access, sem); } // This is the base version which is given alias index // Return the new StoreXNode Node* store_to_memory(Node* ctl, Node* adr, Node* val, BasicType bt, int adr_idx, ! bool require_atomic_access, ! StoreNode::Sem); // All in one pre-barrier, store, post_barrier // Insert a write-barrier'd store. This is to let generational GC // work; we have to flag all oop-stores before the next GC point.
*** 560,598 **** Node* adr, // actual adress to store val at const TypePtr* adr_type, Node* val, const TypeOopPtr* val_type, BasicType bt, ! bool use_precise); Node* store_oop_to_object(Node* ctl, Node* obj, // containing obj Node* adr, // actual adress to store val at const TypePtr* adr_type, Node* val, const TypeOopPtr* val_type, ! BasicType bt) { ! return store_oop(ctl, obj, adr, adr_type, val, val_type, bt, false); } Node* store_oop_to_array(Node* ctl, Node* obj, // containing obj Node* adr, // actual adress to store val at const TypePtr* adr_type, Node* val, const TypeOopPtr* val_type, ! BasicType bt) { ! return store_oop(ctl, obj, adr, adr_type, val, val_type, bt, true); } // Could be an array or object we don't know at compile time (unsafe ref.) Node* store_oop_to_unknown(Node* ctl, Node* obj, // containing obj Node* adr, // actual adress to store val at const TypePtr* adr_type, Node* val, ! BasicType bt); // For the few case where the barriers need special help void pre_barrier(bool do_load, Node* ctl, Node* obj, Node* adr, uint adr_idx, Node* val, const TypeOopPtr* val_type, Node* pre_val, --- 574,616 ---- Node* adr, // actual adress to store val at const TypePtr* adr_type, Node* val, const TypeOopPtr* val_type, BasicType bt, ! bool use_precise, ! StoreNode::Sem sem); Node* store_oop_to_object(Node* ctl, Node* obj, // containing obj Node* adr, // actual adress to store val at const TypePtr* adr_type, Node* val, const TypeOopPtr* val_type, ! BasicType bt, ! StoreNode::Sem sem) { ! return store_oop(ctl, obj, adr, adr_type, val, val_type, bt, false, sem); } Node* store_oop_to_array(Node* ctl, Node* obj, // containing obj Node* adr, // actual adress to store val at const TypePtr* adr_type, Node* val, const TypeOopPtr* val_type, ! BasicType bt, ! StoreNode::Sem sem) { ! return store_oop(ctl, obj, adr, adr_type, val, val_type, bt, true, sem); } // Could be an array or object we don't know at compile time (unsafe ref.) Node* store_oop_to_unknown(Node* ctl, Node* obj, // containing obj Node* adr, // actual adress to store val at const TypePtr* adr_type, Node* val, ! BasicType bt, ! StoreNode::Sem sem); // For the few case where the barriers need special help void pre_barrier(bool do_load, Node* ctl, Node* obj, Node* adr, uint adr_idx, Node* val, const TypeOopPtr* val_type, Node* pre_val,