1 /*
2 * Copyright (c) 2008, 2017, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
102 address fast_entry;
103
104 ResourceMark rm;
105 BufferBlob* blob = BufferBlob::create(name, BUFFER_SIZE);
106 CodeBuffer cbuf(blob);
107 MacroAssembler* masm = new MacroAssembler(&cbuf);
108 fast_entry = __ pc();
109
110 // Safepoint check
111 InlinedAddress safepoint_counter_addr(SafepointSynchronize::safepoint_counter_addr());
112 Label slow_case;
113 __ ldr_literal(Rsafepoint_counter_addr, safepoint_counter_addr);
114
115 #ifndef AARCH64
116 __ push(RegisterSet(R0, R3)); // save incoming arguments for slow case
117 #endif // !AARCH64
118
119 __ ldr_s32(Rsafept_cnt, Address(Rsafepoint_counter_addr));
120 __ tbnz(Rsafept_cnt, 0, slow_case);
121
122 #ifdef AARCH64
123 // If mask changes we need to ensure that the inverse is still encodable as an immediate
124 STATIC_ASSERT(JNIHandles::weak_tag_mask == 1);
125 __ andr(R1, R1, ~JNIHandles::weak_tag_mask);
126 #else
127 __ bic(R1, R1, JNIHandles::weak_tag_mask);
128 #endif
129
130 if (os::is_MP()) {
131 // Address dependency restricts memory access ordering. It's cheaper than explicit LoadLoad barrier
132 __ andr(Rtmp1, Rsafept_cnt, (unsigned)1);
133 __ ldr(Robj, Address(R1, Rtmp1));
134 } else {
135 __ ldr(Robj, Address(R1));
136 }
137
138 #ifdef AARCH64
139 __ add(Robj, Robj, AsmOperand(R2, lsr, 2));
140 Address field_addr = Address(Robj);
141 #else
142 Address field_addr;
143 if (type != T_BOOLEAN
144 && type != T_INT
145 #ifndef __ABI_HARD__
146 && type != T_FLOAT
147 #endif // !__ABI_HARD__
148 ) {
149 // Only ldr and ldrb support embedded shift, other loads do not
|
1 /*
2 * Copyright (c) 2008, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
102 address fast_entry;
103
104 ResourceMark rm;
105 BufferBlob* blob = BufferBlob::create(name, BUFFER_SIZE);
106 CodeBuffer cbuf(blob);
107 MacroAssembler* masm = new MacroAssembler(&cbuf);
108 fast_entry = __ pc();
109
110 // Safepoint check
111 InlinedAddress safepoint_counter_addr(SafepointSynchronize::safepoint_counter_addr());
112 Label slow_case;
113 __ ldr_literal(Rsafepoint_counter_addr, safepoint_counter_addr);
114
115 #ifndef AARCH64
116 __ push(RegisterSet(R0, R3)); // save incoming arguments for slow case
117 #endif // !AARCH64
118
119 __ ldr_s32(Rsafept_cnt, Address(Rsafepoint_counter_addr));
120 __ tbnz(Rsafept_cnt, 0, slow_case);
121
122 if (os::is_MP()) {
123 // Address dependency restricts memory access ordering. It's cheaper than explicit LoadLoad barrier
124 __ andr(Rtmp1, Rsafept_cnt, (unsigned)1);
125 __ ldr(Robj, Address(R1, Rtmp1));
126 } else {
127 __ ldr(Robj, Address(R1));
128 }
129
130 #ifdef AARCH64
131 __ add(Robj, Robj, AsmOperand(R2, lsr, 2));
132 Address field_addr = Address(Robj);
133 #else
134 Address field_addr;
135 if (type != T_BOOLEAN
136 && type != T_INT
137 #ifndef __ABI_HARD__
138 && type != T_FLOAT
139 #endif // !__ABI_HARD__
140 ) {
141 // Only ldr and ldrb support embedded shift, other loads do not
|