Print this page
rev 6900 : 8048169: Change 8037816 breaks HS build on PPC64 and CPP-Interpreter platforms
Summary: Fix the matching of format string parameter types to the actual argument types for the PPC64 and CPP-Interpreter files in the same way as 8037816 already did it for all the other files
Reviewed-by: stefank, coleenp, dholmes
Split |
Split |
Close |
Expand all |
Collapse all |
--- old/hotspot/src/cpu/ppc/vm/nativeInst_ppc.cpp
+++ new/hotspot/src/cpu/ppc/vm/nativeInst_ppc.cpp
1 1 /*
2 - * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
3 - * Copyright 2012, 2013 SAP AG. All rights reserved.
2 + * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
3 + * Copyright 2012, 2014 SAP AG. All rights reserved.
4 4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 5 *
6 6 * This code is free software; you can redistribute it and/or modify it
7 7 * under the terms of the GNU General Public License version 2 only, as
8 8 * published by the Free Software Foundation.
9 9 *
10 10 * This code is distributed in the hope that it will be useful, but WITHOUT
11 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
13 13 * version 2 for more details (a copy is included in the LICENSE file that
14 14 * accompanied this code).
15 15 *
16 16 * You should have received a copy of the GNU General Public License version
17 17 * 2 along with this work; if not, write to the Free Software Foundation,
18 18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19 19 *
20 20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21 21 * or visit www.oracle.com if you need additional information or have any
22 22 * questions.
23 23 *
24 24 */
25 25
26 26 #include "precompiled.hpp"
27 27 #include "asm/macroAssembler.inline.hpp"
28 28 #include "memory/resourceArea.hpp"
29 29 #include "nativeInst_ppc.hpp"
30 30 #include "oops/oop.inline.hpp"
31 31 #include "runtime/handles.hpp"
32 32 #include "runtime/sharedRuntime.hpp"
33 33 #include "runtime/stubRoutines.hpp"
34 34 #include "utilities/ostream.hpp"
35 35 #ifdef COMPILER1
36 36 #include "c1/c1_Runtime1.hpp"
37 37 #endif
38 38
39 39 // We use an illtrap for marking a method as not_entrant or zombie iff !UseSIGTRAP
40 40 // Work around a C++ compiler bug which changes 'this'
41 41 bool NativeInstruction::is_sigill_zombie_not_entrant_at(address addr) {
42 42 assert(!UseSIGTRAP, "precondition");
43 43 if (*(int*)addr != 0 /*illtrap*/) return false;
44 44 CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
45 45 if (cb == NULL || !cb->is_nmethod()) return false;
46 46 nmethod *nm = (nmethod *)cb;
47 47 // This method is not_entrant or zombie iff the illtrap instruction is
48 48 // located at the verified entry point.
49 49 return nm->verified_entry_point() == addr;
50 50 }
51 51
52 52 #ifdef ASSERT
53 53 void NativeInstruction::verify() {
54 54 // Make sure code pattern is actually an instruction address.
55 55 address addr = addr_at(0);
56 56 if (addr == 0 || ((intptr_t)addr & 3) != 0) {
57 57 fatal("not an instruction address");
58 58 }
59 59 }
60 60 #endif // ASSERT
61 61
62 62 // Extract call destination from a NativeCall. The call might use a trampoline stub.
63 63 address NativeCall::destination() const {
64 64 address addr = (address)this;
65 65 address destination = Assembler::bxx_destination(addr);
66 66
67 67 // Do we use a trampoline stub for this call?
68 68 CodeBlob* cb = CodeCache::find_blob_unsafe(addr); // Else we get assertion if nmethod is zombie.
69 69 assert(cb && cb->is_nmethod(), "sanity");
70 70 nmethod *nm = (nmethod *)cb;
71 71 if (nm->stub_contains(destination) && is_NativeCallTrampolineStub_at(destination)) {
72 72 // Yes we do, so get the destination from the trampoline stub.
73 73 const address trampoline_stub_addr = destination;
74 74 destination = NativeCallTrampolineStub_at(trampoline_stub_addr)->destination(nm);
75 75 }
76 76
77 77 return destination;
78 78 }
79 79
80 80 // Similar to replace_mt_safe, but just changes the destination. The
81 81 // important thing is that free-running threads are able to execute this
82 82 // call instruction at all times. Thus, the displacement field must be
83 83 // instruction-word-aligned.
84 84 //
85 85 // Used in the runtime linkage of calls; see class CompiledIC.
86 86 //
87 87 // Add parameter assert_lock to switch off assertion
88 88 // during code generation, where no patching lock is needed.
89 89 void NativeCall::set_destination_mt_safe(address dest, bool assert_lock) {
90 90 assert(!assert_lock ||
91 91 (Patching_lock->is_locked() || SafepointSynchronize::is_at_safepoint()),
92 92 "concurrent code patching");
93 93
94 94 ResourceMark rm;
95 95 int code_size = 1 * BytesPerInstWord;
96 96 address addr_call = addr_at(0);
97 97 assert(MacroAssembler::is_bl(*(int*)addr_call), "unexpected code at call-site");
98 98
99 99 CodeBuffer cb(addr_call, code_size + 1);
100 100 MacroAssembler* a = new MacroAssembler(&cb);
101 101
102 102 // Patch the call.
103 103 if (ReoptimizeCallSequences &&
104 104 a->is_within_range_of_b(dest, addr_call)) {
105 105 a->bl(dest);
106 106 } else {
107 107 address trampoline_stub_addr = get_trampoline();
108 108
109 109 // We did not find a trampoline stub because the current codeblob
110 110 // does not provide this information. The branch will be patched
111 111 // later during a final fixup, when all necessary information is
112 112 // available.
113 113 if (trampoline_stub_addr == 0)
114 114 return;
115 115
116 116 // Patch the constant in the call's trampoline stub.
117 117 NativeCallTrampolineStub_at(trampoline_stub_addr)->set_destination(dest);
118 118
119 119 a->bl(trampoline_stub_addr);
120 120 }
121 121 ICache::ppc64_flush_icache_bytes(addr_call, code_size);
122 122 }
123 123
124 124 address NativeCall::get_trampoline() {
125 125 address call_addr = addr_at(0);
126 126
127 127 CodeBlob *code = CodeCache::find_blob(call_addr);
128 128 assert(code != NULL, "Could not find the containing code blob");
129 129
130 130 // There are no relocations available when the code gets relocated
131 131 // because of CodeBuffer expansion.
132 132 if (code->relocation_size() == 0)
133 133 return NULL;
134 134
135 135 address bl_destination = Assembler::bxx_destination(call_addr);
136 136 if (code->content_contains(bl_destination) &&
137 137 is_NativeCallTrampolineStub_at(bl_destination))
138 138 return bl_destination;
139 139
↓ open down ↓ |
126 lines elided |
↑ open up ↑ |
140 140 // If the codeBlob is not a nmethod, this is because we get here from the
141 141 // CodeBlob constructor, which is called within the nmethod constructor.
142 142 return trampoline_stub_Relocation::get_trampoline_for(call_addr, (nmethod*)code);
143 143 }
144 144
145 145 #ifdef ASSERT
146 146 void NativeCall::verify() {
147 147 address addr = addr_at(0);
148 148
149 149 if (!NativeCall::is_call_at(addr)) {
150 - tty->print_cr("not a NativeCall at " PTR_FORMAT, addr);
150 + tty->print_cr("not a NativeCall at " PTR_FORMAT, p2i(addr));
151 151 // TODO: PPC port: Disassembler::decode(addr - 20, addr + 20, tty);
152 - fatal(err_msg("not a NativeCall at " PTR_FORMAT, addr));
152 + fatal(err_msg("not a NativeCall at " PTR_FORMAT, p2i(addr)));
153 153 }
154 154 }
155 155 #endif // ASSERT
156 156
157 157 #ifdef ASSERT
158 158 void NativeFarCall::verify() {
159 159 address addr = addr_at(0);
160 160
161 161 NativeInstruction::verify();
162 162 if (!NativeFarCall::is_far_call_at(addr)) {
163 - tty->print_cr("not a NativeFarCall at " PTR_FORMAT, addr);
163 + tty->print_cr("not a NativeFarCall at " PTR_FORMAT, p2i(addr));
164 164 // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
165 - fatal(err_msg("not a NativeFarCall at " PTR_FORMAT, addr));
165 + fatal(err_msg("not a NativeFarCall at " PTR_FORMAT, p2i(addr)));
166 166 }
167 167 }
168 168 #endif // ASSERT
169 169
170 170 address NativeMovConstReg::next_instruction_address() const {
171 171 #ifdef ASSERT
172 172 CodeBlob* nm = CodeCache::find_blob(instruction_address());
173 173 assert(!MacroAssembler::is_set_narrow_oop(addr_at(0), nm->content_begin()), "Should not patch narrow oop here");
174 174 #endif
175 175
176 176 if (MacroAssembler::is_load_const_from_method_toc_at(addr_at(0))) {
177 177 return addr_at(load_const_from_method_toc_instruction_size);
178 178 } else {
179 179 return addr_at(load_const_instruction_size);
180 180 }
181 181 }
182 182
183 183 intptr_t NativeMovConstReg::data() const {
184 184 address addr = addr_at(0);
185 185
186 186 if (MacroAssembler::is_load_const_at(addr)) {
187 187 return MacroAssembler::get_const(addr);
188 188 }
189 189
190 190 CodeBlob* cb = CodeCache::find_blob_unsafe(addr);
191 191 if (MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) {
192 192 narrowOop no = (narrowOop)MacroAssembler::get_narrow_oop(addr, cb->content_begin());
193 193 return cast_from_oop<intptr_t>(oopDesc::decode_heap_oop(no));
194 194 } else {
195 195 assert(MacroAssembler::is_load_const_from_method_toc_at(addr), "must be load_const_from_pool");
196 196
197 197 address ctable = cb->content_begin();
198 198 int offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
199 199 return *(intptr_t *)(ctable + offset);
200 200 }
201 201 }
202 202
203 203 address NativeMovConstReg::set_data_plain(intptr_t data, CodeBlob *cb) {
204 204 address addr = instruction_address();
205 205 address next_address = NULL;
206 206 if (!cb) cb = CodeCache::find_blob(addr);
207 207
208 208 if (cb != NULL && MacroAssembler::is_load_const_from_method_toc_at(addr)) {
209 209 // A load from the method's TOC (ctable).
210 210 assert(cb->is_nmethod(), "must be nmethod");
211 211 const address ctable = cb->content_begin();
212 212 const int toc_offset = MacroAssembler::get_offset_of_load_const_from_method_toc_at(addr);
213 213 *(intptr_t *)(ctable + toc_offset) = data;
214 214 next_address = addr + BytesPerInstWord;
215 215 } else if (cb != NULL &&
216 216 MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) {
217 217 // A calculation relative to the global TOC.
218 218 if (MacroAssembler::get_address_of_calculate_address_from_global_toc_at(addr, cb->content_begin()) !=
219 219 (address)data) {
220 220 const int invalidated_range =
221 221 MacroAssembler::patch_calculate_address_from_global_toc_at(addr, cb->content_begin(),
222 222 (address)data);
223 223 const address start = invalidated_range < 0 ? addr + invalidated_range : addr;
224 224 // FIXME:
225 225 const int range = invalidated_range < 0 ? 4 - invalidated_range : 8;
226 226 ICache::ppc64_flush_icache_bytes(start, range);
227 227 }
228 228 next_address = addr + 1 * BytesPerInstWord;
229 229 } else if (MacroAssembler::is_load_const_at(addr)) {
230 230 // A normal 5 instruction load_const code sequence.
231 231 if (MacroAssembler::get_const(addr) != (long)data) {
232 232 // This is not mt safe, ok in methods like CodeBuffer::copy_code().
233 233 MacroAssembler::patch_const(addr, (long)data);
234 234 ICache::ppc64_flush_icache_bytes(addr, load_const_instruction_size);
235 235 }
236 236 next_address = addr + 5 * BytesPerInstWord;
237 237 } else if (MacroAssembler::is_bl(* (int*) addr)) {
238 238 // A single branch-and-link instruction.
239 239 ResourceMark rm;
240 240 const int code_size = 1 * BytesPerInstWord;
241 241 CodeBuffer cb(addr, code_size + 1);
242 242 MacroAssembler* a = new MacroAssembler(&cb);
243 243 a->bl((address) data);
244 244 ICache::ppc64_flush_icache_bytes(addr, code_size);
245 245 next_address = addr + code_size;
246 246 } else {
247 247 ShouldNotReachHere();
248 248 }
249 249
250 250 return next_address;
251 251 }
252 252
253 253 void NativeMovConstReg::set_data(intptr_t data) {
254 254 // Store the value into the instruction stream.
255 255 CodeBlob *cb = CodeCache::find_blob(instruction_address());
256 256 address next_address = set_data_plain(data, cb);
257 257
258 258 // Also store the value into an oop_Relocation cell, if any.
259 259 if (cb && cb->is_nmethod()) {
260 260 RelocIterator iter((nmethod *) cb, instruction_address(), next_address);
261 261 oop* oop_addr = NULL;
262 262 Metadata** metadata_addr = NULL;
263 263 while (iter.next()) {
264 264 if (iter.type() == relocInfo::oop_type) {
265 265 oop_Relocation *r = iter.oop_reloc();
266 266 if (oop_addr == NULL) {
267 267 oop_addr = r->oop_addr();
268 268 *oop_addr = cast_to_oop(data);
269 269 } else {
270 270 assert(oop_addr == r->oop_addr(), "must be only one set-oop here") ;
271 271 }
272 272 }
273 273 if (iter.type() == relocInfo::metadata_type) {
274 274 metadata_Relocation *r = iter.metadata_reloc();
275 275 if (metadata_addr == NULL) {
276 276 metadata_addr = r->metadata_addr();
277 277 *metadata_addr = (Metadata*)data;
278 278 } else {
279 279 assert(metadata_addr == r->metadata_addr(), "must be only one set-metadata here");
280 280 }
281 281 }
282 282 }
283 283 }
284 284 }
285 285
286 286 void NativeMovConstReg::set_narrow_oop(narrowOop data, CodeBlob *code /* = NULL */) {
287 287 address addr = addr_at(0);
288 288 CodeBlob* cb = (code) ? code : CodeCache::find_blob(instruction_address());
289 289 if (MacroAssembler::get_narrow_oop(addr, cb->content_begin()) == (long)data) return;
290 290 const int invalidated_range =
291 291 MacroAssembler::patch_set_narrow_oop(addr, cb->content_begin(), (long)data);
292 292 const address start = invalidated_range < 0 ? addr + invalidated_range : addr;
293 293 // FIXME:
294 294 const int range = invalidated_range < 0 ? 4 - invalidated_range : 8;
295 295 ICache::ppc64_flush_icache_bytes(start, range);
296 296 }
297 297
298 298 // Do not use an assertion here. Let clients decide whether they only
↓ open down ↓ |
123 lines elided |
↑ open up ↑ |
299 299 // want this when assertions are enabled.
300 300 #ifdef ASSERT
301 301 void NativeMovConstReg::verify() {
302 302 address addr = addr_at(0);
303 303 if (! MacroAssembler::is_load_const_at(addr) &&
304 304 ! MacroAssembler::is_load_const_from_method_toc_at(addr)) {
305 305 CodeBlob* cb = CodeCache::find_blob_unsafe(addr); // find_nmethod() asserts if nmethod is zombie.
306 306 if (! (cb != NULL && MacroAssembler::is_calculate_address_from_global_toc_at(addr, cb->content_begin())) &&
307 307 ! (cb != NULL && MacroAssembler::is_set_narrow_oop(addr, cb->content_begin())) &&
308 308 ! MacroAssembler::is_bl(*((int*) addr))) {
309 - tty->print_cr("not a NativeMovConstReg at " PTR_FORMAT, addr);
309 + tty->print_cr("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr));
310 310 // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
311 - fatal(err_msg("not a NativeMovConstReg at " PTR_FORMAT, addr));
311 + fatal(err_msg("not a NativeMovConstReg at " PTR_FORMAT, p2i(addr)));
312 312 }
313 313 }
314 314 }
315 315 #endif // ASSERT
316 316
317 317 void NativeJump::patch_verified_entry(address entry, address verified_entry, address dest) {
318 318 ResourceMark rm;
319 319 int code_size = 1 * BytesPerInstWord;
320 320 CodeBuffer cb(verified_entry, code_size + 1);
321 321 MacroAssembler* a = new MacroAssembler(&cb);
322 322 #ifdef COMPILER2
323 323 assert(dest == SharedRuntime::get_handle_wrong_method_stub(), "expected fixed destination of patch");
324 324 #endif
325 325 // Patch this nmethod atomically. Always use illtrap/trap in debug build.
326 326 if (DEBUG_ONLY(false &&) a->is_within_range_of_b(dest, a->pc())) {
327 327 a->b(dest);
328 328 } else {
329 329 // The signal handler will continue at dest=OptoRuntime::handle_wrong_method_stub().
330 330 if (TrapBasedNotEntrantChecks) {
331 331 // We use a special trap for marking a method as not_entrant or zombie.
332 332 a->trap_zombie_not_entrant();
333 333 } else {
334 334 // We use an illtrap for marking a method as not_entrant or zombie.
335 335 a->illtrap();
336 336 }
↓ open down ↓ |
15 lines elided |
↑ open up ↑ |
337 337 }
338 338 ICache::ppc64_flush_icache_bytes(verified_entry, code_size);
339 339 }
340 340
341 341 #ifdef ASSERT
342 342 void NativeJump::verify() {
343 343 address addr = addr_at(0);
344 344
345 345 NativeInstruction::verify();
346 346 if (!NativeJump::is_jump_at(addr)) {
347 - tty->print_cr("not a NativeJump at " PTR_FORMAT, addr);
347 + tty->print_cr("not a NativeJump at " PTR_FORMAT, p2i(addr));
348 348 // TODO: PPC port: Disassembler::decode(addr, 20, 20, tty);
349 - fatal(err_msg("not a NativeJump at " PTR_FORMAT, addr));
349 + fatal(err_msg("not a NativeJump at " PTR_FORMAT, p2i(addr)));
350 350 }
351 351 }
352 352 #endif // ASSERT
353 353
354 354 //-------------------------------------------------------------------
355 355
356 356 // Call trampoline stubs.
357 357 //
358 358 // Layout and instructions of a call trampoline stub:
359 359 // 0: load the TOC (part 1)
360 360 // 4: load the TOC (part 2)
361 361 // 8: load the call target from the constant pool (part 1)
362 362 // [12: load the call target from the constant pool (part 2, optional)]
363 363 // ..: branch via CTR
364 364 //
365 365
366 366 address NativeCallTrampolineStub::encoded_destination_addr() const {
367 367 address instruction_addr = addr_at(2 * BytesPerInstWord);
368 368 assert(MacroAssembler::is_ld_largeoffset(instruction_addr),
369 369 "must be a ld with large offset (from the constant pool)");
370 370
371 371 return instruction_addr;
372 372 }
373 373
374 374 address NativeCallTrampolineStub::destination(nmethod *nm) const {
375 375 CodeBlob* cb = nm ? nm : CodeCache::find_blob_unsafe(addr_at(0));
376 376 address ctable = cb->content_begin();
377 377
378 378 return *(address*)(ctable + destination_toc_offset());
379 379 }
380 380
381 381 int NativeCallTrampolineStub::destination_toc_offset() const {
382 382 return MacroAssembler::get_ld_largeoffset_offset(encoded_destination_addr());
383 383 }
384 384
385 385 void NativeCallTrampolineStub::set_destination(address new_destination) {
386 386 CodeBlob* cb = CodeCache::find_blob(addr_at(0));
387 387 address ctable = cb->content_begin();
388 388
389 389 *(address*)(ctable + destination_toc_offset()) = new_destination;
390 390 }
391 391
↓ open down ↓ |
32 lines elided |
↑ open up ↑ |
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX