15 * 2 along with this work; if not, write to the Free Software Foundation,
16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
17 *
18 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
19 * or visit www.oracle.com if you need additional information or have any
20 * questions.
21 *
22 */
23
24 #include "precompiled.hpp"
25 #include "asm/macroAssembler.hpp"
26 #include "c1/c1_CodeStubs.hpp"
27 #include "c1/c1_FrameMap.hpp"
28 #include "c1/c1_LIRAssembler.hpp"
29 #include "c1/c1_MacroAssembler.hpp"
30 #include "c1/c1_Runtime1.hpp"
31 #include "nativeInst_arm.hpp"
32 #include "runtime/sharedRuntime.hpp"
33 #include "utilities/macros.hpp"
34 #include "vmreg_arm.inline.hpp"
35 #if INCLUDE_ALL_GCS
36 #include "gc/g1/g1BarrierSet.hpp"
37 #endif // INCLUDE_ALL_GCS
38
39 #define __ ce->masm()->
40
41 void CounterOverflowStub::emit_code(LIR_Assembler* ce) {
42 __ bind(_entry);
43 ce->store_parameter(_bci, 0);
44 ce->store_parameter(_method->as_constant_ptr()->as_metadata(), 1);
45 __ call(Runtime1::entry_for(Runtime1::counter_overflow_id), relocInfo::runtime_call_type);
46 ce->add_call_info_here(_info);
47 ce->verify_oop_map(_info);
48
49 __ b(_continuation);
50 }
51
52
53 // TODO: ARM - is it possible to inline these stubs into the main code stream?
54
55 RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index,
56 bool throw_index_out_of_bounds_exception)
|
15 * 2 along with this work; if not, write to the Free Software Foundation,
16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
17 *
18 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
19 * or visit www.oracle.com if you need additional information or have any
20 * questions.
21 *
22 */
23
24 #include "precompiled.hpp"
25 #include "asm/macroAssembler.hpp"
26 #include "c1/c1_CodeStubs.hpp"
27 #include "c1/c1_FrameMap.hpp"
28 #include "c1/c1_LIRAssembler.hpp"
29 #include "c1/c1_MacroAssembler.hpp"
30 #include "c1/c1_Runtime1.hpp"
31 #include "nativeInst_arm.hpp"
32 #include "runtime/sharedRuntime.hpp"
33 #include "utilities/macros.hpp"
34 #include "vmreg_arm.inline.hpp"
35
36 #define __ ce->masm()->
37
38 void CounterOverflowStub::emit_code(LIR_Assembler* ce) {
39 __ bind(_entry);
40 ce->store_parameter(_bci, 0);
41 ce->store_parameter(_method->as_constant_ptr()->as_metadata(), 1);
42 __ call(Runtime1::entry_for(Runtime1::counter_overflow_id), relocInfo::runtime_call_type);
43 ce->add_call_info_here(_info);
44 ce->verify_oop_map(_info);
45
46 __ b(_continuation);
47 }
48
49
50 // TODO: ARM - is it possible to inline these stubs into the main code stream?
51
52 RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index,
53 bool throw_index_out_of_bounds_exception)
|
447
448 for (int i = 0; i < 5; i++) {
449 VMReg arg = args[i].first();
450 if (arg->is_stack()) {
451 __ str(r[i], Address(SP, arg->reg2stack() * VMRegImpl::stack_slot_size));
452 } else {
453 assert(r[i] == arg->as_Register(), "Calling conventions must match");
454 }
455 }
456
457 ce->emit_static_call_stub();
458 if (ce->compilation()->bailed_out()) {
459 return; // CodeCache is full
460 }
461 int ret_addr_offset = __ patchable_call(SharedRuntime::get_resolve_static_call_stub(), relocInfo::static_call_type);
462 assert(ret_addr_offset == __ offset(), "embedded return address not allowed");
463 ce->add_call_info_here(info());
464 ce->verify_oop_map(info());
465 __ b(_continuation);
466 }
467
468 /////////////////////////////////////////////////////////////////////////////
469 #if INCLUDE_ALL_GCS
470
471 void G1PreBarrierStub::emit_code(LIR_Assembler* ce) {
472 // At this point we know that marking is in progress.
473 // If do_load() is true then we have to emit the
474 // load of the previous value; otherwise it has already
475 // been loaded into _pre_val.
476
477 __ bind(_entry);
478 assert(pre_val()->is_register(), "Precondition.");
479
480 Register pre_val_reg = pre_val()->as_register();
481
482 if (do_load()) {
483 ce->mem2reg(addr(), pre_val(), T_OBJECT, patch_code(), info(), false /*wide*/, false /*unaligned*/);
484 }
485
486 __ cbz(pre_val_reg, _continuation);
487 ce->verify_reserved_argument_area_size(1);
488 __ str(pre_val_reg, Address(SP));
489 __ call(Runtime1::entry_for(Runtime1::g1_pre_barrier_slow_id), relocInfo::runtime_call_type);
490
491 __ b(_continuation);
492 }
493
494 void G1PostBarrierStub::emit_code(LIR_Assembler* ce) {
495 __ bind(_entry);
496 assert(addr()->is_register(), "Precondition.");
497 assert(new_val()->is_register(), "Precondition.");
498 Register new_val_reg = new_val()->as_register();
499 __ cbz(new_val_reg, _continuation);
500 ce->verify_reserved_argument_area_size(1);
501 __ str(addr()->as_pointer_register(), Address(SP));
502 __ call(Runtime1::entry_for(Runtime1::g1_post_barrier_slow_id), relocInfo::runtime_call_type);
503 __ b(_continuation);
504 }
505
506 #endif // INCLUDE_ALL_GCS
507 /////////////////////////////////////////////////////////////////////////////
508
509 #undef __
|
444
445 for (int i = 0; i < 5; i++) {
446 VMReg arg = args[i].first();
447 if (arg->is_stack()) {
448 __ str(r[i], Address(SP, arg->reg2stack() * VMRegImpl::stack_slot_size));
449 } else {
450 assert(r[i] == arg->as_Register(), "Calling conventions must match");
451 }
452 }
453
454 ce->emit_static_call_stub();
455 if (ce->compilation()->bailed_out()) {
456 return; // CodeCache is full
457 }
458 int ret_addr_offset = __ patchable_call(SharedRuntime::get_resolve_static_call_stub(), relocInfo::static_call_type);
459 assert(ret_addr_offset == __ offset(), "embedded return address not allowed");
460 ce->add_call_info_here(info());
461 ce->verify_oop_map(info());
462 __ b(_continuation);
463 }
464
465 #undef __
|