357 fatal("not a NativeJump at " PTR_FORMAT, p2i(addr));
358 }
359 }
360 #endif // ASSERT
361
362
363 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) {
364 CodeBuffer cb(code_pos, BytesPerInstWord + 1);
365 MacroAssembler a(&cb);
366 a.b(entry);
367 ICache::ppc64_flush_icache_bytes(code_pos, NativeGeneralJump::instruction_size);
368 }
369
370 // MT-safe patching of a jmp instruction.
371 void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) {
372 // Bytes beyond offset NativeGeneralJump::instruction_size are copied by caller.
373
374 // Finally patch out the jump.
375 volatile juint *jump_addr = (volatile juint*)instr_addr;
376 // Release not needed because caller uses invalidate_range after copying the remaining bytes.
377 //OrderAccess::release_store(jump_addr, *((juint*)code_buffer));
378 *jump_addr = *((juint*)code_buffer); // atomically store code over branch instruction
379 ICache::ppc64_flush_icache_bytes(instr_addr, NativeGeneralJump::instruction_size);
380 }
381
382
383 //-------------------------------------------------------------------
384
385 // Call trampoline stubs.
386 //
387 // Layout and instructions of a call trampoline stub:
388 // 0: load the TOC (part 1)
389 // 4: load the TOC (part 2)
390 // 8: load the call target from the constant pool (part 1)
391 // [12: load the call target from the constant pool (part 2, optional)]
392 // ..: branch via CTR
393 //
394
395 address NativeCallTrampolineStub::encoded_destination_addr() const {
396 address instruction_addr = addr_at(0 * BytesPerInstWord);
397 if (!MacroAssembler::is_ld_largeoffset(instruction_addr)) {
|
357 fatal("not a NativeJump at " PTR_FORMAT, p2i(addr));
358 }
359 }
360 #endif // ASSERT
361
362
363 void NativeGeneralJump::insert_unconditional(address code_pos, address entry) {
364 CodeBuffer cb(code_pos, BytesPerInstWord + 1);
365 MacroAssembler a(&cb);
366 a.b(entry);
367 ICache::ppc64_flush_icache_bytes(code_pos, NativeGeneralJump::instruction_size);
368 }
369
370 // MT-safe patching of a jmp instruction.
371 void NativeGeneralJump::replace_mt_safe(address instr_addr, address code_buffer) {
372 // Bytes beyond offset NativeGeneralJump::instruction_size are copied by caller.
373
374 // Finally patch out the jump.
375 volatile juint *jump_addr = (volatile juint*)instr_addr;
376 // Release not needed because caller uses invalidate_range after copying the remaining bytes.
377 //Atomic::release_store(jump_addr, *((juint*)code_buffer));
378 *jump_addr = *((juint*)code_buffer); // atomically store code over branch instruction
379 ICache::ppc64_flush_icache_bytes(instr_addr, NativeGeneralJump::instruction_size);
380 }
381
382
383 //-------------------------------------------------------------------
384
385 // Call trampoline stubs.
386 //
387 // Layout and instructions of a call trampoline stub:
388 // 0: load the TOC (part 1)
389 // 4: load the TOC (part 2)
390 // 8: load the call target from the constant pool (part 1)
391 // [12: load the call target from the constant pool (part 2, optional)]
392 // ..: branch via CTR
393 //
394
395 address NativeCallTrampolineStub::encoded_destination_addr() const {
396 address instruction_addr = addr_at(0 * BytesPerInstWord);
397 if (!MacroAssembler::is_ld_largeoffset(instruction_addr)) {
|