1 /*
2 * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #ifndef SHARE_VM_CODE_COMPILEDIC_HPP
26 #define SHARE_VM_CODE_COMPILEDIC_HPP
27
28 #include "code/nativeInst.hpp"
29 #include "interpreter/linkResolver.hpp"
30 #include "oops/compiledICHolder.hpp"
31
32 //-----------------------------------------------------------------------------
33 // The CompiledIC represents a compiled inline cache.
34 //
35 // In order to make patching of the inline cache MT-safe, we only allow the following
36 // transitions (when not at a safepoint):
37 //
38 //
39 // [1] --<-- Clean -->--- [1]
40 // / (null) \
41 // / \ /-<-\
42 // / [2] \ / \
43 // Interpreted ---------> Monomorphic | [3]
44 // (CompiledICHolder*) (Klass*) |
45 // \ / \ /
46 // [4] \ / [4] \->-/
47 // \->- Megamorphic -<-/
48 // (Method*)
49 //
50 // The text in paranteses () refere to the value of the inline cache receiver (mov instruction)
51 //
52 // The numbers in square brackets refere to the kind of transition:
53 // [1]: Initial fixup. Receiver it found from debug information
54 // [2]: Compilation of a method
55 // [3]: Recompilation of a method (note: only entry is changed. The Klass* must stay the same)
56 // [4]: Inline cache miss. We go directly to megamorphic call.
57 //
58 // The class automatically inserts transition stubs (using the InlineCacheBuffer) when an MT-unsafe
59 // transition is made to a stub.
60 //
61 class CompiledIC;
62 class ICStub;
63
64 class CompiledICInfo : public StackObj {
65 private:
66 address _entry; // entry point for call
67 void* _cached_value; // Value of cached_value (either in stub or inline cache)
68 bool _is_icholder; // Is the cached value a CompiledICHolder*
69 bool _is_optimized; // it is an optimized virtual call (i.e., can be statically bound)
70 bool _to_interpreter; // Call it to interpreter
71 bool _to_aot; // Call it to aot code
72 bool _release_icholder;
73 public:
74 address entry() const { return _entry; }
75 Metadata* cached_metadata() const { assert(!_is_icholder, ""); return (Metadata*)_cached_value; }
76 CompiledICHolder* claim_cached_icholder() {
77 assert(_is_icholder, "");
78 assert(_cached_value != NULL, "must be non-NULL");
79 _release_icholder = false;
80 CompiledICHolder* icholder = (CompiledICHolder*)_cached_value;
81 icholder->claim();
82 return icholder;
83 }
84 bool is_optimized() const { return _is_optimized; }
85 bool to_interpreter() const { return _to_interpreter; }
86 bool to_aot() const { return _to_aot; }
87
88 void set_compiled_entry(address entry, Klass* klass, bool is_optimized) {
89 _entry = entry;
90 _cached_value = (void*)klass;
91 _to_interpreter = false;
92 _to_aot = false;
93 _is_icholder = false;
94 _is_optimized = is_optimized;
95 _release_icholder = false;
96 }
97
98 void set_interpreter_entry(address entry, Method* method) {
99 _entry = entry;
100 _cached_value = (void*)method;
101 _to_interpreter = true;
102 _to_aot = false;
103 _is_icholder = false;
104 _is_optimized = true;
105 _release_icholder = false;
106 }
107
108 void set_aot_entry(address entry, Method* method) {
109 _entry = entry;
110 _cached_value = (void*)method;
111 _to_interpreter = false;
112 _to_aot = true;
113 _is_icholder = false;
114 _is_optimized = true;
115 _release_icholder = false;
116 }
117
118 void set_icholder_entry(address entry, CompiledICHolder* icholder) {
119 _entry = entry;
120 _cached_value = (void*)icholder;
121 _to_interpreter = true;
122 _to_aot = false;
123 _is_icholder = true;
124 _is_optimized = false;
125 _release_icholder = true;
126 }
127
128 CompiledICInfo(): _entry(NULL), _cached_value(NULL), _is_icholder(false),
129 _to_interpreter(false), _to_aot(false), _is_optimized(false), _release_icholder(false) {
130 }
131 ~CompiledICInfo() {
132 // In rare cases the info is computed but not used, so release any
133 // CompiledICHolder* that was created
134 if (_release_icholder) {
135 assert(_is_icholder, "must be");
136 CompiledICHolder* icholder = (CompiledICHolder*)_cached_value;
137 icholder->claim();
138 delete icholder;
139 }
140 }
141 };
142
143 class NativeCallWrapper: public ResourceObj {
144 public:
145 virtual address destination() const = 0;
146 virtual address instruction_address() const = 0;
147 virtual address next_instruction_address() const = 0;
148 virtual address return_address() const = 0;
149 virtual address get_resolve_call_stub(bool is_optimized) const = 0;
150 virtual void set_destination_mt_safe(address dest) = 0;
151 virtual void set_to_interpreted(const methodHandle& method, CompiledICInfo& info) = 0;
152 virtual void verify() const = 0;
153 virtual void verify_resolve_call(address dest) const = 0;
154
155 virtual bool is_call_to_interpreted(address dest) const = 0;
156 virtual bool is_safe_for_patching() const = 0;
157
158 virtual NativeInstruction* get_load_instruction(virtual_call_Relocation* r) const = 0;
159
160 virtual void *get_data(NativeInstruction* instruction) const = 0;
161 virtual void set_data(NativeInstruction* instruction, intptr_t data) = 0;
162 };
163
164 class CompiledIC: public ResourceObj {
165 friend class InlineCacheBuffer;
166 friend class ICStub;
167
168 private:
169 NativeCallWrapper* _call;
170 NativeInstruction* _value; // patchable value cell for this IC
171 bool _is_optimized; // an optimized virtual call (i.e., no compiled IC)
172 CompiledMethod* _method;
173
174 CompiledIC(CompiledMethod* cm, NativeCall* ic_call);
175 CompiledIC(RelocIterator* iter);
176
177 void initialize_from_iter(RelocIterator* iter);
178
179 static bool is_icholder_entry(address entry);
180
181 // low-level inline-cache manipulation. Cannot be accessed directly, since it might not be MT-safe
182 // to change an inline-cache. These changes the underlying inline-cache directly. They *newer* make
183 // changes to a transition stub.
184 void internal_set_ic_destination(address entry_point, bool is_icstub, void* cache, bool is_icholder);
185 void set_ic_destination(ICStub* stub);
186 void set_ic_destination(address entry_point) {
187 assert(_is_optimized, "use set_ic_destination_and_value instead");
188 internal_set_ic_destination(entry_point, false, NULL, false);
189 }
190 // This only for use by ICStubs where the type of the value isn't known
191 void set_ic_destination_and_value(address entry_point, void* value) {
192 internal_set_ic_destination(entry_point, false, value, is_icholder_entry(entry_point));
193 }
194 void set_ic_destination_and_value(address entry_point, Metadata* value) {
195 internal_set_ic_destination(entry_point, false, value, false);
196 }
197 void set_ic_destination_and_value(address entry_point, CompiledICHolder* value) {
198 internal_set_ic_destination(entry_point, false, value, true);
199 }
200
201 // Reads the location of the transition stub. This will fail with an assertion, if no transition stub is
202 // associated with the inline cache.
203 address stub_address() const;
204 bool is_in_transition_state() const; // Use InlineCacheBuffer
205
206 public:
207 // conversion (machine PC to CompiledIC*)
208 friend CompiledIC* CompiledIC_before(CompiledMethod* nm, address return_addr);
209 friend CompiledIC* CompiledIC_at(CompiledMethod* nm, address call_site);
210 friend CompiledIC* CompiledIC_at(Relocation* call_site);
211 friend CompiledIC* CompiledIC_at(RelocIterator* reloc_iter);
212
213 // This is used to release CompiledICHolder*s from nmethods that
214 // are about to be freed. The callsite might contain other stale
215 // values of other kinds so it must be careful.
216 static void cleanup_call_site(virtual_call_Relocation* call_site, const CompiledMethod* cm);
217 static bool is_icholder_call_site(virtual_call_Relocation* call_site, const CompiledMethod* cm);
218
219 // Return the cached_metadata/destination associated with this inline cache. If the cache currently points
220 // to a transition stub, it will read the values from the transition stub.
221 void* cached_value() const;
222 CompiledICHolder* cached_icholder() const {
223 assert(is_icholder_call(), "must be");
224 return (CompiledICHolder*) cached_value();
225 }
226 Metadata* cached_metadata() const {
227 assert(!is_icholder_call(), "must be");
228 return (Metadata*) cached_value();
229 }
230
231 void* get_data() const {
232 return _call->get_data(_value);
233 }
234
235 void set_data(intptr_t data) {
236 _call->set_data(_value, data);
237 }
238
239 address ic_destination() const;
240
241 bool is_optimized() const { return _is_optimized; }
242
243 // State
244 bool is_clean() const;
245 bool is_megamorphic() const;
246 bool is_call_to_compiled() const;
247 bool is_call_to_interpreted() const;
248
249 bool is_icholder_call() const;
250
251 address end_of_call() { return _call->return_address(); }
252
253 // MT-safe patching of inline caches. Note: Only safe to call is_xxx when holding the CompiledIC_ock
254 // so you are guaranteed that no patching takes place. The same goes for verify.
255 //
256 // Note: We do not provide any direct access to the stub code, to prevent parts of the code
257 // to manipulate the inline cache in MT-unsafe ways.
258 //
259 // They all takes a TRAP argument, since they can cause a GC if the inline-cache buffer is full.
260 //
261 void set_to_clean(bool in_use = true);
262 void set_to_monomorphic(CompiledICInfo& info);
263 void clear_ic_stub();
264
265 // Returns true if successful and false otherwise. The call can fail if memory
266 // allocation in the code cache fails.
267 bool set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS);
268
269 static void compute_monomorphic_entry(const methodHandle& method, Klass* receiver_klass,
270 bool is_optimized, bool static_bound, bool caller_is_nmethod,
271 CompiledICInfo& info, TRAPS);
272
273 // Location
274 address instruction_address() const { return _call->instruction_address(); }
275
276 // Misc
277 void print() PRODUCT_RETURN;
278 void print_compiled_ic() PRODUCT_RETURN;
279 void verify() PRODUCT_RETURN;
280 };
281
282 inline CompiledIC* CompiledIC_before(CompiledMethod* nm, address return_addr) {
283 CompiledIC* c_ic = new CompiledIC(nm, nativeCall_before(return_addr));
284 c_ic->verify();
285 return c_ic;
286 }
287
288 inline CompiledIC* CompiledIC_at(CompiledMethod* nm, address call_site) {
289 CompiledIC* c_ic = new CompiledIC(nm, nativeCall_at(call_site));
290 c_ic->verify();
291 return c_ic;
292 }
293
294 inline CompiledIC* CompiledIC_at(Relocation* call_site) {
295 assert(call_site->type() == relocInfo::virtual_call_type ||
296 call_site->type() == relocInfo::opt_virtual_call_type, "wrong reloc. info");
297 CompiledIC* c_ic = new CompiledIC(call_site->code(), nativeCall_at(call_site->addr()));
298 c_ic->verify();
299 return c_ic;
300 }
301
302 inline CompiledIC* CompiledIC_at(RelocIterator* reloc_iter) {
303 assert(reloc_iter->type() == relocInfo::virtual_call_type ||
304 reloc_iter->type() == relocInfo::opt_virtual_call_type, "wrong reloc. info");
305 CompiledIC* c_ic = new CompiledIC(reloc_iter);
306 c_ic->verify();
307 return c_ic;
308 }
309
310 //-----------------------------------------------------------------------------
311 // The CompiledStaticCall represents a call to a static method in the compiled
312 //
313 // Transition diagram of a static call site is somewhat simpler than for an inlined cache:
314 //
315 //
316 // -----<----- Clean ----->-----
317 // / \
318 // / \
319 // compilled code <------------> interpreted code
320 //
321 // Clean: Calls directly to runtime method for fixup
322 // Compiled code: Calls directly to compiled code
323 // Interpreted code: Calls to stub that set Method* reference
324 //
325 //
326
327 class StaticCallInfo {
328 private:
329 address _entry; // Entrypoint
330 methodHandle _callee; // Callee (used when calling interpreter)
331 bool _to_interpreter; // call to interpreted method (otherwise compiled)
332 bool _to_aot; // call to aot method (otherwise compiled)
333
334 friend class CompiledStaticCall;
335 friend class CompiledDirectStaticCall;
336 friend class CompiledPltStaticCall;
337 public:
338 address entry() const { return _entry; }
339 methodHandle callee() const { return _callee; }
340 };
341
342 class CompiledStaticCall : public ResourceObj {
343 public:
344 // Code
345 static address emit_to_interp_stub(CodeBuffer &cbuf, address mark = NULL);
346 static int to_interp_stub_size();
347 static int reloc_to_interp_stub();
348 static void emit_to_aot_stub(CodeBuffer &cbuf, address mark = NULL);
349 static int to_aot_stub_size();
350 static int reloc_to_aot_stub();
351
352 // Compute entry point given a method
353 static void compute_entry(const methodHandle& m, bool caller_is_nmethod, StaticCallInfo& info);
354
355 public:
356 // Clean static call (will force resolving on next use)
357 virtual address destination() const = 0;
358
359 // Clean static call (will force resolving on next use)
360 void set_to_clean();
361
362 // Set state. The entry must be the same, as computed by compute_entry.
363 // Computation and setting is split up, since the actions are separate during
364 // a OptoRuntime::resolve_xxx.
365 void set(const StaticCallInfo& info);
366
367 // State
368 bool is_clean() const;
369 bool is_call_to_compiled() const;
370 virtual bool is_call_to_interpreted() const = 0;
371
372 virtual address instruction_address() const = 0;
373 protected:
374 virtual address resolve_call_stub() const = 0;
375 virtual void set_destination_mt_safe(address dest) = 0;
376 #if INCLUDE_AOT
377 virtual void set_to_far(const methodHandle& callee, address entry) = 0;
378 #endif
379 virtual void set_to_interpreted(const methodHandle& callee, address entry) = 0;
380 virtual const char* name() const = 0;
381
382 void set_to_compiled(address entry);
383 };
384
385 class CompiledDirectStaticCall : public CompiledStaticCall {
386 private:
387 friend class CompiledIC;
388 friend class DirectNativeCallWrapper;
389
390 // Also used by CompiledIC
391 void set_to_interpreted(const methodHandle& callee, address entry);
392 #if INCLUDE_AOT
393 void set_to_far(const methodHandle& callee, address entry);
394 #endif
395 address instruction_address() const { return _call->instruction_address(); }
396 void set_destination_mt_safe(address dest) { _call->set_destination_mt_safe(dest); }
397
398 NativeCall* _call;
399
400 CompiledDirectStaticCall(NativeCall* call) : _call(call) {}
401
402 public:
403 static inline CompiledDirectStaticCall* before(address return_addr) {
404 CompiledDirectStaticCall* st = new CompiledDirectStaticCall(nativeCall_before(return_addr));
405 st->verify();
406 return st;
407 }
408
409 static inline CompiledDirectStaticCall* at(address native_call) {
410 CompiledDirectStaticCall* st = new CompiledDirectStaticCall(nativeCall_at(native_call));
411 st->verify();
412 return st;
413 }
414
415 static inline CompiledDirectStaticCall* at(Relocation* call_site) {
416 return at(call_site->addr());
417 }
418
419 // Delegation
420 address destination() const { return _call->destination(); }
421
422 // State
423 virtual bool is_call_to_interpreted() const;
424 bool is_call_to_far() const;
425
426 // Stub support
427 static address find_stub_for(address instruction, bool is_aot);
428 address find_stub(bool is_aot);
429 static void set_stub_to_clean(static_stub_Relocation* static_stub);
430
431 // Misc.
432 void print() PRODUCT_RETURN;
433 void verify() PRODUCT_RETURN;
434
435 protected:
436 virtual address resolve_call_stub() const;
437 virtual const char* name() const { return "CompiledDirectStaticCall"; }
438 };
439
440 #endif // SHARE_VM_CODE_COMPILEDIC_HPP
--- EOF ---