11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.inline.hpp"
27 #include "registerSaver_s390.hpp"
28 #include "gc/g1/g1CardTable.hpp"
29 #include "gc/g1/g1BarrierSet.hpp"
30 #include "gc/g1/g1BarrierSetAssembler.hpp"
31 #include "gc/g1/g1ThreadLocalData.hpp"
32 #include "gc/g1/heapRegion.hpp"
33 #include "interpreter/interp_masm.hpp"
34 #include "runtime/sharedRuntime.hpp"
35 #ifdef COMPILER1
36 #include "c1/c1_LIRAssembler.hpp"
37 #include "c1/c1_MacroAssembler.hpp"
38 #include "gc/g1/c1/g1BarrierSetC1.hpp"
39 #endif
40
41 #define __ masm->
42
43 #define BLOCK_COMMENT(str) if (PrintAssembly) __ block_comment(str)
44
45 void G1BarrierSetAssembler::gen_write_ref_array_pre_barrier(MacroAssembler* masm, DecoratorSet decorators,
46 Register addr, Register count) {
47 bool dest_uninitialized = (decorators & AS_DEST_NOT_INITIALIZED) != 0;
48
49 // With G1, don't generate the call if we statically know that the target is uninitialized.
50 if (!dest_uninitialized) {
51 // Is marking active?
52 Label filtered;
53 assert_different_registers(addr, Z_R0_scratch); // would be destroyed by push_frame()
54 assert_different_registers(count, Z_R0_scratch); // would be destroyed by push_frame()
55 Register Rtmp1 = Z_R0_scratch;
56 const int active_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset());
57 if (in_bytes(SATBMarkQueue::byte_width_of_active()) == 4) {
58 __ load_and_test_int(Rtmp1, Address(Z_thread, active_offset));
59 } else {
60 guarantee(in_bytes(SATBMarkQueue::byte_width_of_active()) == 1, "Assumption");
61 __ load_and_test_byte(Rtmp1, Address(Z_thread, active_offset));
62 }
63 __ z_bre(filtered); // Activity indicator is zero, so there is no marking going on currently.
64
65 RegisterSaver::save_live_registers(masm, RegisterSaver::arg_registers); // Creates frame.
66
67 if (UseCompressedOops) {
68 __ call_VM_leaf(CAST_FROM_FN_PTR(address, G1BarrierSet::write_ref_array_pre_narrow_oop_entry), addr, count);
69 } else {
70 __ call_VM_leaf(CAST_FROM_FN_PTR(address, G1BarrierSet::write_ref_array_pre_oop_entry), addr, count);
71 }
72
73 RegisterSaver::restore_live_registers(masm, RegisterSaver::arg_registers);
74
75 __ bind(filtered);
76 }
77 }
78
79 void G1BarrierSetAssembler::gen_write_ref_array_post_barrier(MacroAssembler* masm, DecoratorSet decorators,
80 Register addr, Register count, bool do_return) {
81 address entry_point = CAST_FROM_FN_PTR(address, G1BarrierSet::write_ref_array_post_entry);
82 if (!do_return) {
83 assert_different_registers(addr, Z_R0_scratch); // would be destroyed by push_frame()
84 assert_different_registers(count, Z_R0_scratch); // would be destroyed by push_frame()
85 RegisterSaver::save_live_registers(masm, RegisterSaver::arg_registers); // Creates frame.
86 __ call_VM_leaf(entry_point, addr, count);
87 RegisterSaver::restore_live_registers(masm, RegisterSaver::arg_registers);
88 } else {
89 // Tail call: call c and return to stub caller.
90 __ lgr_if_needed(Z_ARG1, addr);
91 __ lgr_if_needed(Z_ARG2, count);
92 __ load_const(Z_R1, entry_point);
93 __ z_br(Z_R1); // Branch without linking, callee will return to stub caller.
94 }
95 }
96
97 void G1BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
98 const Address& src, Register dst, Register tmp1, Register tmp2, Label *L_handle_null) {
99 bool on_oop = type == T_OBJECT || type == T_ARRAY;
100 bool on_weak = (decorators & ON_WEAK_OOP_REF) != 0;
|
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "precompiled.hpp"
26 #include "asm/macroAssembler.inline.hpp"
27 #include "registerSaver_s390.hpp"
28 #include "gc/g1/g1CardTable.hpp"
29 #include "gc/g1/g1BarrierSet.hpp"
30 #include "gc/g1/g1BarrierSetAssembler.hpp"
31 #include "gc/g1/g1BarrierSetRuntime.hpp"
32 #include "gc/g1/g1ThreadLocalData.hpp"
33 #include "gc/g1/heapRegion.hpp"
34 #include "interpreter/interp_masm.hpp"
35 #include "runtime/sharedRuntime.hpp"
36 #ifdef COMPILER1
37 #include "c1/c1_LIRAssembler.hpp"
38 #include "c1/c1_MacroAssembler.hpp"
39 #include "gc/g1/c1/g1BarrierSetC1.hpp"
40 #endif
41
42 #define __ masm->
43
44 #define BLOCK_COMMENT(str) if (PrintAssembly) __ block_comment(str)
45
46 void G1BarrierSetAssembler::gen_write_ref_array_pre_barrier(MacroAssembler* masm, DecoratorSet decorators,
47 Register addr, Register count) {
48 bool dest_uninitialized = (decorators & AS_DEST_NOT_INITIALIZED) != 0;
49
50 // With G1, don't generate the call if we statically know that the target is uninitialized.
51 if (!dest_uninitialized) {
52 // Is marking active?
53 Label filtered;
54 assert_different_registers(addr, Z_R0_scratch); // would be destroyed by push_frame()
55 assert_different_registers(count, Z_R0_scratch); // would be destroyed by push_frame()
56 Register Rtmp1 = Z_R0_scratch;
57 const int active_offset = in_bytes(G1ThreadLocalData::satb_mark_queue_active_offset());
58 if (in_bytes(SATBMarkQueue::byte_width_of_active()) == 4) {
59 __ load_and_test_int(Rtmp1, Address(Z_thread, active_offset));
60 } else {
61 guarantee(in_bytes(SATBMarkQueue::byte_width_of_active()) == 1, "Assumption");
62 __ load_and_test_byte(Rtmp1, Address(Z_thread, active_offset));
63 }
64 __ z_bre(filtered); // Activity indicator is zero, so there is no marking going on currently.
65
66 RegisterSaver::save_live_registers(masm, RegisterSaver::arg_registers); // Creates frame.
67
68 if (UseCompressedOops) {
69 __ call_VM_leaf(CAST_FROM_FN_PTR(address, G1BarrierSetRuntime::write_ref_array_pre_narrow_oop_entry), addr, count);
70 } else {
71 __ call_VM_leaf(CAST_FROM_FN_PTR(address, G1BarrierSetRuntime::write_ref_array_pre_oop_entry), addr, count);
72 }
73
74 RegisterSaver::restore_live_registers(masm, RegisterSaver::arg_registers);
75
76 __ bind(filtered);
77 }
78 }
79
80 void G1BarrierSetAssembler::gen_write_ref_array_post_barrier(MacroAssembler* masm, DecoratorSet decorators,
81 Register addr, Register count, bool do_return) {
82 address entry_point = CAST_FROM_FN_PTR(address, G1BarrierSetRuntime::write_ref_array_post_entry);
83 if (!do_return) {
84 assert_different_registers(addr, Z_R0_scratch); // would be destroyed by push_frame()
85 assert_different_registers(count, Z_R0_scratch); // would be destroyed by push_frame()
86 RegisterSaver::save_live_registers(masm, RegisterSaver::arg_registers); // Creates frame.
87 __ call_VM_leaf(entry_point, addr, count);
88 RegisterSaver::restore_live_registers(masm, RegisterSaver::arg_registers);
89 } else {
90 // Tail call: call c and return to stub caller.
91 __ lgr_if_needed(Z_ARG1, addr);
92 __ lgr_if_needed(Z_ARG2, count);
93 __ load_const(Z_R1, entry_point);
94 __ z_br(Z_R1); // Branch without linking, callee will return to stub caller.
95 }
96 }
97
98 void G1BarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
99 const Address& src, Register dst, Register tmp1, Register tmp2, Label *L_handle_null) {
100 bool on_oop = type == T_OBJECT || type == T_ARRAY;
101 bool on_weak = (decorators & ON_WEAK_OOP_REF) != 0;
|
216 if (Roff != noreg && Roff->is_volatile()) {
217 __ z_stg(Roff, Roff->encoding()*BytesPerWord, Z_SP);
218 }
219 if (Rval != noreg && Rval->is_volatile()) {
220 __ z_stg(Rval, Rval->encoding()*BytesPerWord, Z_SP);
221 }
222
223 // Save Rpre_val (result) over runtime call.
224 Register Rpre_save = Rpre_val;
225 if ((Rpre_val == Z_R0_scratch) || (pre_val_needed && Rpre_val->is_volatile())) {
226 guarantee(!Rtmp1->is_volatile() || !Rtmp2->is_volatile(), "oops!");
227 Rpre_save = !Rtmp1->is_volatile() ? Rtmp1 : Rtmp2;
228 }
229 __ lgr_if_needed(Rpre_save, Rpre_val);
230
231 // Push frame to protect top frame with return pc and spilled register values.
232 __ save_return_pc();
233 __ push_frame_abi160(0); // Will use Z_R0 as tmp.
234
235 // Rpre_val may be destroyed by push_frame().
236 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::g1_wb_pre), Rpre_save, Z_thread);
237
238 __ pop_frame();
239 __ restore_return_pc();
240
241 // Restore spilled values.
242 if (Robj != noreg && Robj->is_volatile()) {
243 __ z_lg(Robj, Robj->encoding()*BytesPerWord, Z_SP);
244 }
245 if (Roff != noreg && Roff->is_volatile()) {
246 __ z_lg(Roff, Roff->encoding()*BytesPerWord, Z_SP);
247 }
248 if (Rval != noreg && Rval->is_volatile()) {
249 __ z_lg(Rval, Rval->encoding()*BytesPerWord, Z_SP);
250 }
251 if (pre_val_needed && Rpre_val->is_volatile()) {
252 __ lgr_if_needed(Rpre_val, Rpre_save);
253 }
254
255 __ bind(filtered);
|
217 if (Roff != noreg && Roff->is_volatile()) {
218 __ z_stg(Roff, Roff->encoding()*BytesPerWord, Z_SP);
219 }
220 if (Rval != noreg && Rval->is_volatile()) {
221 __ z_stg(Rval, Rval->encoding()*BytesPerWord, Z_SP);
222 }
223
224 // Save Rpre_val (result) over runtime call.
225 Register Rpre_save = Rpre_val;
226 if ((Rpre_val == Z_R0_scratch) || (pre_val_needed && Rpre_val->is_volatile())) {
227 guarantee(!Rtmp1->is_volatile() || !Rtmp2->is_volatile(), "oops!");
228 Rpre_save = !Rtmp1->is_volatile() ? Rtmp1 : Rtmp2;
229 }
230 __ lgr_if_needed(Rpre_save, Rpre_val);
231
232 // Push frame to protect top frame with return pc and spilled register values.
233 __ save_return_pc();
234 __ push_frame_abi160(0); // Will use Z_R0 as tmp.
235
236 // Rpre_val may be destroyed by push_frame().
237 __ call_VM_leaf(CAST_FROM_FN_PTR(address, G1BarrierSetRuntime::write_ref_field_pre_entry), Rpre_save, Z_thread);
238
239 __ pop_frame();
240 __ restore_return_pc();
241
242 // Restore spilled values.
243 if (Robj != noreg && Robj->is_volatile()) {
244 __ z_lg(Robj, Robj->encoding()*BytesPerWord, Z_SP);
245 }
246 if (Roff != noreg && Roff->is_volatile()) {
247 __ z_lg(Roff, Roff->encoding()*BytesPerWord, Z_SP);
248 }
249 if (Rval != noreg && Rval->is_volatile()) {
250 __ z_lg(Rval, Rval->encoding()*BytesPerWord, Z_SP);
251 }
252 if (pre_val_needed && Rpre_val->is_volatile()) {
253 __ lgr_if_needed(Rpre_val, Rpre_save);
254 }
255
256 __ bind(filtered);
|
341
342 __ add2reg(Rqueue_index, -wordSize); // Decrement index.
343 __ z_stg(Rqueue_index, qidx_off, Z_thread);
344
345 __ z_stg(Rcard_addr_x, 0, Rqueue_index, Rqueue_buf); // Store card.
346 __ z_bru(filtered);
347
348 __ bind(callRuntime);
349
350 // TODO: do we need a frame? Introduced to be on the safe side.
351 bool needs_frame = true;
352 __ lgr_if_needed(Rcard_addr, Rcard_addr_x); // copy back asap. push_frame will destroy Z_R0_scratch!
353
354 // VM call need frame to access(write) O register.
355 if (needs_frame) {
356 __ save_return_pc();
357 __ push_frame_abi160(0); // Will use Z_R0 as tmp on old CPUs.
358 }
359
360 // Save the live input values.
361 __ call_VM_leaf(CAST_FROM_FN_PTR(address, SharedRuntime::g1_wb_post), Rcard_addr, Z_thread);
362
363 if (needs_frame) {
364 __ pop_frame();
365 __ restore_return_pc();
366 }
367
368 __ bind(filtered);
369
370 BLOCK_COMMENT("} g1_write_barrier_post");
371 }
372
373 void G1BarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
374 const Address& dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
375 bool on_array = (decorators & IN_HEAP_ARRAY) != 0;
376 bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0;
377 bool precise = on_array || on_anonymous;
378 // Load and record the previous value.
379 g1_write_barrier_pre(masm, decorators, &dst, tmp3, val, tmp1, tmp2, false);
380
|
342
343 __ add2reg(Rqueue_index, -wordSize); // Decrement index.
344 __ z_stg(Rqueue_index, qidx_off, Z_thread);
345
346 __ z_stg(Rcard_addr_x, 0, Rqueue_index, Rqueue_buf); // Store card.
347 __ z_bru(filtered);
348
349 __ bind(callRuntime);
350
351 // TODO: do we need a frame? Introduced to be on the safe side.
352 bool needs_frame = true;
353 __ lgr_if_needed(Rcard_addr, Rcard_addr_x); // copy back asap. push_frame will destroy Z_R0_scratch!
354
355 // VM call need frame to access(write) O register.
356 if (needs_frame) {
357 __ save_return_pc();
358 __ push_frame_abi160(0); // Will use Z_R0 as tmp on old CPUs.
359 }
360
361 // Save the live input values.
362 __ call_VM_leaf(CAST_FROM_FN_PTR(address, G1BarrierSetRuntime::write_ref_field_post_entry), Rcard_addr, Z_thread);
363
364 if (needs_frame) {
365 __ pop_frame();
366 __ restore_return_pc();
367 }
368
369 __ bind(filtered);
370
371 BLOCK_COMMENT("} g1_write_barrier_post");
372 }
373
374 void G1BarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorSet decorators, BasicType type,
375 const Address& dst, Register val, Register tmp1, Register tmp2, Register tmp3) {
376 bool on_array = (decorators & IN_HEAP_ARRAY) != 0;
377 bool on_anonymous = (decorators & ON_UNKNOWN_OOP_REF) != 0;
378 bool precise = on_array || on_anonymous;
379 // Load and record the previous value.
380 g1_write_barrier_pre(masm, decorators, &dst, tmp3, val, tmp1, tmp2, false);
381
|