122 }
123 }
124
125 virtual LibraryCallKit* is_LibraryCallKit() const { return (LibraryCallKit*)this; }
126
127 ciMethod* caller() const { return jvms()->method(); }
128 int bci() const { return jvms()->bci(); }
129 LibraryIntrinsic* intrinsic() const { return _intrinsic; }
130 vmIntrinsics::ID intrinsic_id() const { return _intrinsic->intrinsic_id(); }
131 ciMethod* callee() const { return _intrinsic->method(); }
132
133 bool try_to_inline(int predicate);
134 Node* try_to_predicate(int predicate);
135
136 void push_result() {
137 // Push the result onto the stack.
138 Node* res = result();
139 if (!stopped() && res != NULL) {
140 BasicType bt = res->bottom_type()->basic_type();
141 if (C->inlining_incrementally() && res->is_ValueType()) {
142 // The caller expects and oop when incrementally inlining an intrinsic that returns an
143 // inline type. Make sure the call is re-executed if the allocation triggers a deoptimization.
144 PreserveReexecuteState preexecs(this);
145 jvms()->set_should_reexecute(true);
146 res = ValueTypePtrNode::make_from_value_type(this, res->as_ValueType());
147 }
148 push_node(bt, res);
149 }
150 }
151
152 private:
153 void fatal_unexpected_iid(vmIntrinsics::ID iid) {
154 fatal("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid));
155 }
156
157 void set_result(Node* n) { assert(_result == NULL, "only set once"); _result = n; }
158 void set_result(RegionNode* region, PhiNode* value);
159 Node* result() { return _result; }
160
161 virtual int reexecute_sp() { return _reexecute_sp; }
162
|
122 }
123 }
124
125 virtual LibraryCallKit* is_LibraryCallKit() const { return (LibraryCallKit*)this; }
126
127 ciMethod* caller() const { return jvms()->method(); }
128 int bci() const { return jvms()->bci(); }
129 LibraryIntrinsic* intrinsic() const { return _intrinsic; }
130 vmIntrinsics::ID intrinsic_id() const { return _intrinsic->intrinsic_id(); }
131 ciMethod* callee() const { return _intrinsic->method(); }
132
133 bool try_to_inline(int predicate);
134 Node* try_to_predicate(int predicate);
135
136 void push_result() {
137 // Push the result onto the stack.
138 Node* res = result();
139 if (!stopped() && res != NULL) {
140 BasicType bt = res->bottom_type()->basic_type();
141 if (C->inlining_incrementally() && res->is_ValueType()) {
142 // The caller expects an oop when incrementally inlining an intrinsic that returns an
143 // inline type. Make sure the call is re-executed if the allocation triggers a deoptimization.
144 PreserveReexecuteState preexecs(this);
145 jvms()->set_should_reexecute(true);
146 res = ValueTypePtrNode::make_from_value_type(this, res->as_ValueType());
147 }
148 push_node(bt, res);
149 }
150 }
151
152 private:
153 void fatal_unexpected_iid(vmIntrinsics::ID iid) {
154 fatal("unexpected intrinsic %d: %s", iid, vmIntrinsics::name_at(iid));
155 }
156
157 void set_result(Node* n) { assert(_result == NULL, "only set once"); _result = n; }
158 void set_result(RegionNode* region, PhiNode* value);
159 Node* result() { return _result; }
160
161 virtual int reexecute_sp() { return _reexecute_sp; }
162
|