16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "c1/c1_CodeStubs.hpp"
27 #include "c1/c1_FrameMap.hpp"
28 #include "c1/c1_LIRAssembler.hpp"
29 #include "c1/c1_MacroAssembler.hpp"
30 #include "c1/c1_Runtime1.hpp"
31 #include "nativeInst_s390.hpp"
32 #include "runtime/sharedRuntime.hpp"
33 #include "utilities/align.hpp"
34 #include "utilities/macros.hpp"
35 #include "vmreg_s390.inline.hpp"
36 #if INCLUDE_ALL_GCS
37 #include "gc/g1/g1BarrierSet.hpp"
38 #endif // INCLUDE_ALL_GCS
39
40 #define __ ce->masm()->
41 #undef CHECK_BAILOUT
42 #define CHECK_BAILOUT() { if (ce->compilation()->bailed_out()) return; }
43
44 RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index,
45 bool throw_index_out_of_bounds_exception) :
46 _throw_index_out_of_bounds_exception(throw_index_out_of_bounds_exception),
47 _index(index) {
48 assert(info != NULL, "must have info");
49 _info = new CodeEmitInfo(info);
50 }
51
52 void RangeCheckStub::emit_code(LIR_Assembler* ce) {
53 __ bind(_entry);
54 if (_info->deoptimize_on_exception()) {
55 address a = Runtime1::entry_for (Runtime1::predicate_failed_trap_id);
56 ce->emit_call_c(a);
57 CHECK_BAILOUT();
|
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "c1/c1_CodeStubs.hpp"
27 #include "c1/c1_FrameMap.hpp"
28 #include "c1/c1_LIRAssembler.hpp"
29 #include "c1/c1_MacroAssembler.hpp"
30 #include "c1/c1_Runtime1.hpp"
31 #include "nativeInst_s390.hpp"
32 #include "runtime/sharedRuntime.hpp"
33 #include "utilities/align.hpp"
34 #include "utilities/macros.hpp"
35 #include "vmreg_s390.inline.hpp"
36
37 #define __ ce->masm()->
38 #undef CHECK_BAILOUT
39 #define CHECK_BAILOUT() { if (ce->compilation()->bailed_out()) return; }
40
41 RangeCheckStub::RangeCheckStub(CodeEmitInfo* info, LIR_Opr index,
42 bool throw_index_out_of_bounds_exception) :
43 _throw_index_out_of_bounds_exception(throw_index_out_of_bounds_exception),
44 _index(index) {
45 assert(info != NULL, "must have info");
46 _info = new CodeEmitInfo(info);
47 }
48
49 void RangeCheckStub::emit_code(LIR_Assembler* ce) {
50 __ bind(_entry);
51 if (_info->deoptimize_on_exception()) {
52 address a = Runtime1::entry_for (Runtime1::predicate_failed_trap_id);
53 ce->emit_call_c(a);
54 CHECK_BAILOUT();
|
434
435 assert((__ offset() + NativeCall::call_far_pcrelative_displacement_offset) % NativeCall::call_far_pcrelative_displacement_alignment
436 "must be aligned");
437
438 ce->emit_static_call_stub();
439
440 // Prepend each BRASL with a nop.
441 __ relocate(relocInfo::static_call_type);
442 __ z_nop();
443 __ z_brasl(Z_R14, SharedRuntime::get_resolve_static_call_stub());
444 ce->add_call_info_here(info());
445 ce->verify_oop_map(info());
446
447 #ifndef PRODUCT
448 __ load_const_optimized(Z_R1_scratch, (address)&Runtime1::_arraycopy_slowcase_cnt);
449 __ add2mem_32(Address(Z_R1_scratch), 1, Z_R0_scratch);
450 #endif
451
452 __ branch_optimized(Assembler::bcondAlways, _continuation);
453 }
454
455
456 ///////////////////////////////////////////////////////////////////////////////////
457 #if INCLUDE_ALL_GCS
458
459 void G1PreBarrierStub::emit_code(LIR_Assembler* ce) {
460 // At this point we know that marking is in progress.
461 // If do_load() is true then we have to emit the
462 // load of the previous value; otherwise it has already
463 // been loaded into _pre_val.
464 __ bind(_entry);
465 ce->check_reserved_argument_area(16); // RT stub needs 2 spill slots.
466 assert(pre_val()->is_register(), "Precondition.");
467
468 Register pre_val_reg = pre_val()->as_register();
469
470 if (do_load()) {
471 ce->mem2reg(addr(), pre_val(), T_OBJECT, patch_code(), info(), false /*wide*/, false /*unaligned*/);
472 }
473
474 __ z_ltgr(Z_R1_scratch, pre_val_reg); // Pass oop in Z_R1_scratch to Runtime1::g1_pre_barrier_slow_id.
475 __ branch_optimized(Assembler::bcondZero, _continuation);
476 ce->emit_call_c(Runtime1::entry_for (Runtime1::g1_pre_barrier_slow_id));
477 CHECK_BAILOUT();
478 __ branch_optimized(Assembler::bcondAlways, _continuation);
479 }
480
481 void G1PostBarrierStub::emit_code(LIR_Assembler* ce) {
482 __ bind(_entry);
483 ce->check_reserved_argument_area(16); // RT stub needs 2 spill slots.
484 assert(addr()->is_register(), "Precondition.");
485 assert(new_val()->is_register(), "Precondition.");
486 Register new_val_reg = new_val()->as_register();
487 __ z_ltgr(new_val_reg, new_val_reg);
488 __ branch_optimized(Assembler::bcondZero, _continuation);
489 __ z_lgr(Z_R1_scratch, addr()->as_pointer_register());
490 ce->emit_call_c(Runtime1::entry_for (Runtime1::g1_post_barrier_slow_id));
491 CHECK_BAILOUT();
492 __ branch_optimized(Assembler::bcondAlways, _continuation);
493 }
494
495 #endif // INCLUDE_ALL_GCS
496
497 #undef __
|
431
432 assert((__ offset() + NativeCall::call_far_pcrelative_displacement_offset) % NativeCall::call_far_pcrelative_displacement_alignment
433 "must be aligned");
434
435 ce->emit_static_call_stub();
436
437 // Prepend each BRASL with a nop.
438 __ relocate(relocInfo::static_call_type);
439 __ z_nop();
440 __ z_brasl(Z_R14, SharedRuntime::get_resolve_static_call_stub());
441 ce->add_call_info_here(info());
442 ce->verify_oop_map(info());
443
444 #ifndef PRODUCT
445 __ load_const_optimized(Z_R1_scratch, (address)&Runtime1::_arraycopy_slowcase_cnt);
446 __ add2mem_32(Address(Z_R1_scratch), 1, Z_R0_scratch);
447 #endif
448
449 __ branch_optimized(Assembler::bcondAlways, _continuation);
450 }
451
452 #undef __
|