100
101 // Construct a node which can be used to get incoming state for
102 // on stack replacement.
103 Node *Parse::fetch_interpreter_state(int index,
104 BasicType bt,
105 Node *local_addrs,
106 Node *local_addrs_base) {
107 Node *mem = memory(Compile::AliasIdxRaw);
108 Node *adr = basic_plus_adr( local_addrs_base, local_addrs, -index*wordSize );
109 Node *ctl = control();
110
111 // Very similar to LoadNode::make, except we handle un-aligned longs and
112 // doubles on Sparc. Intel can handle them just fine directly.
113 Node *l = NULL;
114 switch (bt) { // Signature is flattened
115 case T_INT: l = new LoadINode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeInt::INT, MemNode::unordered); break;
116 case T_FLOAT: l = new LoadFNode(ctl, mem, adr, TypeRawPtr::BOTTOM, Type::FLOAT, MemNode::unordered); break;
117 case T_ADDRESS: l = new LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeRawPtr::BOTTOM, MemNode::unordered); break;
118 case T_OBJECT: {
119 l = new LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeInstPtr::BOTTOM, MemNode::unordered);
120 if (UseLoadBarrier) {
121 l = _gvn.transform(l);
122 l = load_barrier(l, adr);
123 return l;
124 }
125 break;
126 }
127 case T_LONG:
128 case T_DOUBLE: {
129 // Since arguments are in reverse order, the argument address 'adr'
130 // refers to the back half of the long/double. Recompute adr.
131 adr = basic_plus_adr(local_addrs_base, local_addrs, -(index+1)*wordSize);
132 if (Matcher::misaligned_doubles_ok) {
133 l = (bt == T_DOUBLE)
134 ? (Node*)new LoadDNode(ctl, mem, adr, TypeRawPtr::BOTTOM, Type::DOUBLE, MemNode::unordered)
135 : (Node*)new LoadLNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeLong::LONG, MemNode::unordered);
136 } else {
137 l = (bt == T_DOUBLE)
138 ? (Node*)new LoadD_unalignedNode(ctl, mem, adr, TypeRawPtr::BOTTOM, MemNode::unordered)
139 : (Node*)new LoadL_unalignedNode(ctl, mem, adr, TypeRawPtr::BOTTOM, MemNode::unordered);
140 }
|
100
101 // Construct a node which can be used to get incoming state for
102 // on stack replacement.
103 Node *Parse::fetch_interpreter_state(int index,
104 BasicType bt,
105 Node *local_addrs,
106 Node *local_addrs_base) {
107 Node *mem = memory(Compile::AliasIdxRaw);
108 Node *adr = basic_plus_adr( local_addrs_base, local_addrs, -index*wordSize );
109 Node *ctl = control();
110
111 // Very similar to LoadNode::make, except we handle un-aligned longs and
112 // doubles on Sparc. Intel can handle them just fine directly.
113 Node *l = NULL;
114 switch (bt) { // Signature is flattened
115 case T_INT: l = new LoadINode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeInt::INT, MemNode::unordered); break;
116 case T_FLOAT: l = new LoadFNode(ctl, mem, adr, TypeRawPtr::BOTTOM, Type::FLOAT, MemNode::unordered); break;
117 case T_ADDRESS: l = new LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeRawPtr::BOTTOM, MemNode::unordered); break;
118 case T_OBJECT: {
119 l = new LoadPNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeInstPtr::BOTTOM, MemNode::unordered);
120 if (UseZGC) {
121 l = _gvn.transform(l);
122 l = load_barrier(l, adr);
123 return l;
124 }
125 break;
126 }
127 case T_LONG:
128 case T_DOUBLE: {
129 // Since arguments are in reverse order, the argument address 'adr'
130 // refers to the back half of the long/double. Recompute adr.
131 adr = basic_plus_adr(local_addrs_base, local_addrs, -(index+1)*wordSize);
132 if (Matcher::misaligned_doubles_ok) {
133 l = (bt == T_DOUBLE)
134 ? (Node*)new LoadDNode(ctl, mem, adr, TypeRawPtr::BOTTOM, Type::DOUBLE, MemNode::unordered)
135 : (Node*)new LoadLNode(ctl, mem, adr, TypeRawPtr::BOTTOM, TypeLong::LONG, MemNode::unordered);
136 } else {
137 l = (bt == T_DOUBLE)
138 ? (Node*)new LoadD_unalignedNode(ctl, mem, adr, TypeRawPtr::BOTTOM, MemNode::unordered)
139 : (Node*)new LoadL_unalignedNode(ctl, mem, adr, TypeRawPtr::BOTTOM, MemNode::unordered);
140 }
|