41 // size and initialize its code.
42
43 #define __ masm->
44
45 #ifndef PRODUCT
46 extern "C" void bad_compiled_vtable_index(JavaThread* thread, oop receiver, int index);
47 #endif
48
49 // Used by compiler only; may use only caller saved, non-argument registers.
50 VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
51
52 const int code_length = VtableStub::pd_code_size_limit(true);
53 VtableStub *s = new(code_length) VtableStub(true, vtable_index);
54 if (s == NULL) { // Indicates OOM In the code cache.
55 return NULL;
56 }
57
58 ResourceMark rm;
59 CodeBuffer cb(s->entry_point(), code_length);
60 MacroAssembler *masm = new MacroAssembler(&cb);
61
62 #if (!defined(PRODUCT) && defined(COMPILER2))
63 if (CountCompiledCalls) {
64 // Count unused bytes
65 __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true);
66
67 // Use generic emitter for direct memory increment.
68 // Abuse Z_method as scratch register for generic emitter.
69 // It is loaded further down anyway before it is first used.
70 __ add2mem_32(Address(Z_R1_scratch), 1, Z_method);
71 }
72 #endif
73
74 assert(VtableStub::receiver_location() == Z_R2->as_VMReg(), "receiver expected in Z_ARG1");
75
76 // Get receiver klass.
77 // Must do an explicit check if implicit checks are disabled.
78 address npe_addr = __ pc(); // npe == NULL ptr exception
79 __ null_check(Z_ARG1, Z_R1_scratch, oopDesc::klass_offset_in_bytes());
80 const Register rcvr_klass = Z_R1_scratch;
81 __ load_klass(rcvr_klass, Z_ARG1);
82
83 // Set method (in case of interpreted method), and destination address.
84 int entry_offset = in_bytes(Klass::vtable_start_offset()) +
85 vtable_index * vtableEntry::size_in_bytes();
86
87 #ifndef PRODUCT
88 if (DebugVtables) {
89 Label L;
90 // Check offset vs vtable length.
91 const Register vtable_idx = Z_R0_scratch;
92
93 __ load_const_optimized_rtn_len(vtable_idx, vtable_index*vtableEntry::size_in_bytes(), true);
94
95 assert(Immediate::is_uimm12(in_bytes(Klass::vtable_length_offset())), "disp to large");
96 __ z_cl(vtable_idx, in_bytes(Klass::vtable_length_offset()), rcvr_klass);
97 __ z_brl(L);
98 __ z_lghi(Z_ARG3, vtable_index); // Debug code, don't optimize.
99 __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), Z_ARG1, Z_ARG3, false);
100 __ bind(L);
101 }
102 #endif
103
104 int v_off = entry_offset + vtableEntry::method_offset_in_bytes();
105
106 // Duplicate safety code from enc_class Java_Dynamic_Call_dynTOC.
107 if (Displacement::is_validDisp(v_off)) {
108 __ z_lg(Z_method/*method oop*/, v_off, rcvr_klass/*class oop*/);
109 } else {
110 // Worse case, offset does not fit in displacement field.
111 __ load_const(Z_method, v_off); // Z_method temporarily holds the offset value.
112 __ z_lg(Z_method/*method oop*/, 0, Z_method/*method offset*/, rcvr_klass/*class oop*/);
113 }
114
115 #ifndef PRODUCT
116 if (DebugVtables) {
117 Label L;
118 __ z_ltgr(Z_method, Z_method);
119 __ z_brne(L);
120 __ stop("Vtable entry is ZERO",102);
121 __ bind(L);
122 }
123 #endif
124
125 address ame_addr = __ pc(); // ame = abstract method error
126
127 // Must do an explicit check if implicit checks are disabled.
128 __ null_check(Z_method, Z_R1_scratch, in_bytes(Method::from_compiled_offset()));
129 __ z_lg(Z_R1_scratch, in_bytes(Method::from_compiled_offset()), Z_method);
130 __ z_br(Z_R1_scratch);
131
132 masm->flush();
133
134 s->set_exception_points(npe_addr, ame_addr);
135
136 return s;
137 }
138
139 VtableStub* VtableStubs::create_itable_stub(int itable_index) {
140 const int code_length = VtableStub::pd_code_size_limit(false);
141 VtableStub *s = new(code_length) VtableStub(false, itable_index);
142 if (s == NULL) { // Indicates OOM in the code cache.
143 return NULL;
144 }
145
146 ResourceMark rm;
147 CodeBuffer cb(s->entry_point(), code_length);
148 MacroAssembler *masm = new MacroAssembler(&cb);
149
150 #if (!defined(PRODUCT) && defined(COMPILER2))
151 if (CountCompiledCalls) {
152 __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true);
153
154 // Use generic emitter for direct memory increment.
155 // Use Z_tmp_1 as scratch register for generic emitter.
156 __ add2mem_32((Z_R1_scratch), 1, Z_tmp_1);
157 }
158 #endif
159
160 assert(VtableStub::receiver_location() == Z_R2->as_VMReg(), "receiver expected in Z_ARG1");
161
162 // Entry arguments:
163 // Z_method: Interface
164 // Z_ARG1: Receiver
165 NearLabel no_such_interface;
166 const Register rcvr_klass = Z_tmp_1,
167 interface = Z_tmp_2;
168
169 // Get receiver klass.
170 // Must do an explicit check if implicit checks are disabled.
171 address npe_addr = __ pc(); // npe == NULL ptr exception
172 __ null_check(Z_ARG1, Z_R1_scratch, oopDesc::klass_offset_in_bytes());
183 Z_method, Z_R1, no_such_interface, /*return_method=*/ true);
184
185 #ifndef PRODUCT
186 if (DebugVtables) {
187 Label ok1;
188 __ z_ltgr(Z_method, Z_method);
189 __ z_brne(ok1);
190 __ stop("method is null",103);
191 __ bind(ok1);
192 }
193 #endif
194
195 address ame_addr = __ pc();
196 // Must do an explicit check if implicit checks are disabled.
197 if (!ImplicitNullChecks) {
198 __ compare64_and_branch(Z_method, (intptr_t) 0, Assembler::bcondEqual, no_such_interface);
199 }
200 __ z_lg(Z_R1_scratch, in_bytes(Method::from_compiled_offset()), Z_method);
201 __ z_br(Z_R1_scratch);
202
203 __ bind(no_such_interface);
204 // Handle IncompatibleClassChangeError in itable stubs.
205 // More detailed error message.
206 // We force resolving of the call site by jumping to the "handle
207 // wrong method" stub, and so let the interpreter runtime do all the
208 // dirty work.
209 __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::get_handle_wrong_method_stub(), true);
210 __ z_br(Z_R1_scratch);
211
212 masm->flush();
213
214 s->set_exception_points(npe_addr, ame_addr);
215 return s;
216 }
217
218 // In order to tune these parameters, run the JVM with VM options
219 // +PrintMiscellaneous and +WizardMode to see information about
220 // actual itable stubs. Run it with -Xmx31G -XX:+UseCompressedOops.
221 int VtableStub::pd_code_size_limit(bool is_vtable_stub) {
222 int size = DebugVtables ? 216 : 0;
223 if (CountCompiledCalls) {
224 size += 6 * 4;
225 }
226 size += is_vtable_stub ? 36 : 140;
227 if (UseCompressedClassPointers) {
228 size += MacroAssembler::instr_size_for_decode_klass_not_null();
229 }
|
41 // size and initialize its code.
42
43 #define __ masm->
44
45 #ifndef PRODUCT
46 extern "C" void bad_compiled_vtable_index(JavaThread* thread, oop receiver, int index);
47 #endif
48
49 // Used by compiler only; may use only caller saved, non-argument registers.
50 VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
51
52 const int code_length = VtableStub::pd_code_size_limit(true);
53 VtableStub *s = new(code_length) VtableStub(true, vtable_index);
54 if (s == NULL) { // Indicates OOM In the code cache.
55 return NULL;
56 }
57
58 ResourceMark rm;
59 CodeBuffer cb(s->entry_point(), code_length);
60 MacroAssembler *masm = new MacroAssembler(&cb);
61 int padding_bytes = 0;
62
63 #if (!defined(PRODUCT) && defined(COMPILER2))
64 if (CountCompiledCalls) {
65 // Count unused bytes
66 // worst case actual size
67 padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true);
68
69 // Use generic emitter for direct memory increment.
70 // Abuse Z_method as scratch register for generic emitter.
71 // It is loaded further down anyway before it is first used.
72 __ add2mem_32(Address(Z_R1_scratch), 1, Z_method);
73 }
74 #endif
75
76 assert(VtableStub::receiver_location() == Z_R2->as_VMReg(), "receiver expected in Z_ARG1");
77
78 // Get receiver klass.
79 // Must do an explicit check if implicit checks are disabled.
80 address npe_addr = __ pc(); // npe == NULL ptr exception
81 __ null_check(Z_ARG1, Z_R1_scratch, oopDesc::klass_offset_in_bytes());
82 const Register rcvr_klass = Z_R1_scratch;
83 __ load_klass(rcvr_klass, Z_ARG1);
84
85 // Set method (in case of interpreted method), and destination address.
86 int entry_offset = in_bytes(Klass::vtable_start_offset()) +
87 vtable_index * vtableEntry::size_in_bytes();
88
89 #ifndef PRODUCT
90 if (DebugVtables) {
91 Label L;
92 // Check offset vs vtable length.
93 const Register vtable_idx = Z_R0_scratch;
94
95 // Count unused bytes.
96 // worst case actual size
97 padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(vtable_idx, vtable_index*vtableEntry::size_in_bytes(), true);
98
99 assert(Immediate::is_uimm12(in_bytes(Klass::vtable_length_offset())), "disp to large");
100 __ z_cl(vtable_idx, in_bytes(Klass::vtable_length_offset()), rcvr_klass);
101 __ z_brl(L);
102 __ z_lghi(Z_ARG3, vtable_index); // Debug code, don't optimize.
103 __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), Z_ARG1, Z_ARG3, false);
104 // Count unused bytes (assume worst case here).
105 padding_bytes += 12;
106 __ bind(L);
107 }
108 #endif
109
110 int v_off = entry_offset + vtableEntry::method_offset_in_bytes();
111
112 // Duplicate safety code from enc_class Java_Dynamic_Call_dynTOC.
113 if (Displacement::is_validDisp(v_off)) {
114 __ z_lg(Z_method/*method oop*/, v_off, rcvr_klass/*class oop*/);
115 // Account for the load_const in the else path.
116 padding_bytes += __ load_const_size();
117 } else {
118 // Worse case, offset does not fit in displacement field.
119 __ load_const(Z_method, v_off); // Z_method temporarily holds the offset value.
120 __ z_lg(Z_method/*method oop*/, 0, Z_method/*method offset*/, rcvr_klass/*class oop*/);
121 }
122
123 #ifndef PRODUCT
124 if (DebugVtables) {
125 Label L;
126 __ z_ltgr(Z_method, Z_method);
127 __ z_brne(L);
128 __ stop("Vtable entry is ZERO",102);
129 __ bind(L);
130 }
131 #endif
132
133 address ame_addr = __ pc(); // ame = abstract method error
134
135 // Must do an explicit check if implicit checks are disabled.
136 __ null_check(Z_method, Z_R1_scratch, in_bytes(Method::from_compiled_offset()));
137 __ z_lg(Z_R1_scratch, in_bytes(Method::from_compiled_offset()), Z_method);
138 __ z_br(Z_R1_scratch);
139
140 masm->flush();
141
142 s->set_exception_points(npe_addr, ame_addr);
143
144 return s;
145 }
146
147 VtableStub* VtableStubs::create_itable_stub(int itable_index) {
148 const int code_length = VtableStub::pd_code_size_limit(false);
149 VtableStub *s = new(code_length) VtableStub(false, itable_index);
150 if (s == NULL) { // Indicates OOM in the code cache.
151 return NULL;
152 }
153
154 ResourceMark rm;
155 CodeBuffer cb(s->entry_point(), code_length);
156 MacroAssembler *masm = new MacroAssembler(&cb);
157 int padding_bytes = 0;
158
159 #if (!defined(PRODUCT) && defined(COMPILER2))
160 if (CountCompiledCalls) {
161 // Count unused bytes
162 // worst case actual size
163 padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true);
164
165 // Use generic emitter for direct memory increment.
166 // Use Z_tmp_1 as scratch register for generic emitter.
167 __ add2mem_32((Z_R1_scratch), 1, Z_tmp_1);
168 }
169 #endif
170
171 assert(VtableStub::receiver_location() == Z_R2->as_VMReg(), "receiver expected in Z_ARG1");
172
173 // Entry arguments:
174 // Z_method: Interface
175 // Z_ARG1: Receiver
176 NearLabel no_such_interface;
177 const Register rcvr_klass = Z_tmp_1,
178 interface = Z_tmp_2;
179
180 // Get receiver klass.
181 // Must do an explicit check if implicit checks are disabled.
182 address npe_addr = __ pc(); // npe == NULL ptr exception
183 __ null_check(Z_ARG1, Z_R1_scratch, oopDesc::klass_offset_in_bytes());
194 Z_method, Z_R1, no_such_interface, /*return_method=*/ true);
195
196 #ifndef PRODUCT
197 if (DebugVtables) {
198 Label ok1;
199 __ z_ltgr(Z_method, Z_method);
200 __ z_brne(ok1);
201 __ stop("method is null",103);
202 __ bind(ok1);
203 }
204 #endif
205
206 address ame_addr = __ pc();
207 // Must do an explicit check if implicit checks are disabled.
208 if (!ImplicitNullChecks) {
209 __ compare64_and_branch(Z_method, (intptr_t) 0, Assembler::bcondEqual, no_such_interface);
210 }
211 __ z_lg(Z_R1_scratch, in_bytes(Method::from_compiled_offset()), Z_method);
212 __ z_br(Z_R1_scratch);
213
214 // Handle IncompatibleClassChangeError in itable stubs.
215 __ bind(no_such_interface);
216 // Count unused bytes
217 // worst case actual size
218 // We force resolving of the call site by jumping to
219 // the "handle wrong method" stub, and so let the
220 // interpreter runtime do all the dirty work.
221 padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::get_handle_wrong_method_stub(), true);
222 __ z_br(Z_R1_scratch);
223
224 masm->flush();
225
226 s->set_exception_points(npe_addr, ame_addr);
227 return s;
228 }
229
230 // In order to tune these parameters, run the JVM with VM options
231 // +PrintMiscellaneous and +WizardMode to see information about
232 // actual itable stubs. Run it with -Xmx31G -XX:+UseCompressedOops.
233 int VtableStub::pd_code_size_limit(bool is_vtable_stub) {
234 int size = DebugVtables ? 216 : 0;
235 if (CountCompiledCalls) {
236 size += 6 * 4;
237 }
238 size += is_vtable_stub ? 36 : 140;
239 if (UseCompressedClassPointers) {
240 size += MacroAssembler::instr_size_for_decode_klass_not_null();
241 }
|