< prev index next >

src/hotspot/cpu/s390/vtableStubs_s390.cpp

Print this page
rev 49017 : 8197405: Improve messages of AbstractMethodErrors and IncompatibleClassChangeErrors.
Reviewed-by: coleenp, dholmes
rev 49018 : [mq]: EditsDavidII.patch

@@ -56,15 +56,17 @@
   }
 
   ResourceMark    rm;
   CodeBuffer      cb(s->entry_point(), code_length);
   MacroAssembler *masm = new MacroAssembler(&cb);
+  int     padding_bytes = 0;
 
 #if (!defined(PRODUCT) && defined(COMPILER2))
   if (CountCompiledCalls) {
     // Count unused bytes
-    __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true);
+    //                  worst case             actual size
+    padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true);
 
     // Use generic emitter for direct memory increment.
     // Abuse Z_method as scratch register for generic emitter.
     // It is loaded further down anyway before it is first used.
     __ add2mem_32(Address(Z_R1_scratch), 1, Z_method);

@@ -88,26 +90,32 @@
   if (DebugVtables) {
     Label L;
     // Check offset vs vtable length.
     const Register vtable_idx = Z_R0_scratch;
 
-    __ load_const_optimized_rtn_len(vtable_idx, vtable_index*vtableEntry::size_in_bytes(), true);
+    // Count unused bytes.
+    //                  worst case             actual size
+    padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(vtable_idx, vtable_index*vtableEntry::size_in_bytes(), true);
 
     assert(Immediate::is_uimm12(in_bytes(Klass::vtable_length_offset())), "disp to large");
     __ z_cl(vtable_idx, in_bytes(Klass::vtable_length_offset()), rcvr_klass);
     __ z_brl(L);
     __ z_lghi(Z_ARG3, vtable_index);  // Debug code, don't optimize.
     __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), Z_ARG1, Z_ARG3, false);
+    // Count unused bytes (assume worst case here).
+    padding_bytes += 12;
     __ bind(L);
   }
 #endif
 
   int v_off = entry_offset + vtableEntry::method_offset_in_bytes();
 
   // Duplicate safety code from enc_class Java_Dynamic_Call_dynTOC.
   if (Displacement::is_validDisp(v_off)) {
     __ z_lg(Z_method/*method oop*/, v_off, rcvr_klass/*class oop*/);
+    // Account for the load_const in the else path.
+    padding_bytes += __ load_const_size();
   } else {
     // Worse case, offset does not fit in displacement field.
     __ load_const(Z_method, v_off); // Z_method temporarily holds the offset value.
     __ z_lg(Z_method/*method oop*/, 0, Z_method/*method offset*/, rcvr_klass/*class oop*/);
   }

@@ -144,14 +152,17 @@
   }
 
   ResourceMark    rm;
   CodeBuffer      cb(s->entry_point(), code_length);
   MacroAssembler *masm = new MacroAssembler(&cb);
+  int     padding_bytes = 0;
 
 #if (!defined(PRODUCT) && defined(COMPILER2))
   if (CountCompiledCalls) {
-    __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true);
+    // Count unused bytes
+    //                  worst case             actual size
+    padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true);
 
     // Use generic emitter for direct memory increment.
     // Use Z_tmp_1 as scratch register for generic emitter.
     __ add2mem_32((Z_R1_scratch), 1, Z_tmp_1);
   }

@@ -198,17 +209,18 @@
     __ compare64_and_branch(Z_method, (intptr_t) 0, Assembler::bcondEqual, no_such_interface);
   }
   __ z_lg(Z_R1_scratch, in_bytes(Method::from_compiled_offset()), Z_method);
   __ z_br(Z_R1_scratch);
 
-  __ bind(no_such_interface);
   // Handle IncompatibleClassChangeError in itable stubs.
-  // More detailed error message.
-  // We force resolving of the call site by jumping to the "handle
-  // wrong method" stub, and so let the interpreter runtime do all the
-  // dirty work.
-  __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::get_handle_wrong_method_stub(), true);
+  __ bind(no_such_interface);
+  // Count unused bytes
+  //                  worst case          actual size
+  // We force resolving of the call site by jumping to
+  // the "handle wrong method" stub, and so let the
+  // interpreter runtime do all the dirty work.
+  padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::get_handle_wrong_method_stub(), true);
   __ z_br(Z_R1_scratch);
 
   masm->flush();
 
   s->set_exception_points(npe_addr, ame_addr);
< prev index next >