--- old/src/hotspot/cpu/s390/vtableStubs_s390.cpp 2018-02-27 10:50:31.293655000 +0100 +++ new/src/hotspot/cpu/s390/vtableStubs_s390.cpp 2018-02-27 10:50:30.910624000 +0100 @@ -58,11 +58,13 @@ ResourceMark rm; CodeBuffer cb(s->entry_point(), code_length); MacroAssembler *masm = new MacroAssembler(&cb); + int padding_bytes = 0; #if (!defined(PRODUCT) && defined(COMPILER2)) if (CountCompiledCalls) { // Count unused bytes - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true); + // worst case actual size + padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true); // Use generic emitter for direct memory increment. // Abuse Z_method as scratch register for generic emitter. @@ -90,13 +92,17 @@ // Check offset vs vtable length. const Register vtable_idx = Z_R0_scratch; - __ load_const_optimized_rtn_len(vtable_idx, vtable_index*vtableEntry::size_in_bytes(), true); + // Count unused bytes. + // worst case actual size + padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(vtable_idx, vtable_index*vtableEntry::size_in_bytes(), true); assert(Immediate::is_uimm12(in_bytes(Klass::vtable_length_offset())), "disp to large"); __ z_cl(vtable_idx, in_bytes(Klass::vtable_length_offset()), rcvr_klass); __ z_brl(L); __ z_lghi(Z_ARG3, vtable_index); // Debug code, don't optimize. __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), Z_ARG1, Z_ARG3, false); + // Count unused bytes (assume worst case here). + padding_bytes += 12; __ bind(L); } #endif @@ -106,6 +112,8 @@ // Duplicate safety code from enc_class Java_Dynamic_Call_dynTOC. if (Displacement::is_validDisp(v_off)) { __ z_lg(Z_method/*method oop*/, v_off, rcvr_klass/*class oop*/); + // Account for the load_const in the else path. + padding_bytes += __ load_const_size(); } else { // Worse case, offset does not fit in displacement field. __ load_const(Z_method, v_off); // Z_method temporarily holds the offset value. @@ -146,10 +154,13 @@ ResourceMark rm; CodeBuffer cb(s->entry_point(), code_length); MacroAssembler *masm = new MacroAssembler(&cb); + int padding_bytes = 0; #if (!defined(PRODUCT) && defined(COMPILER2)) if (CountCompiledCalls) { - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true); + // Count unused bytes + // worst case actual size + padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::nof_megamorphic_calls_addr(), true); // Use generic emitter for direct memory increment. // Use Z_tmp_1 as scratch register for generic emitter. @@ -200,13 +211,14 @@ __ z_lg(Z_R1_scratch, in_bytes(Method::from_compiled_offset()), Z_method); __ z_br(Z_R1_scratch); - __ bind(no_such_interface); // Handle IncompatibleClassChangeError in itable stubs. - // More detailed error message. - // We force resolving of the call site by jumping to the "handle - // wrong method" stub, and so let the interpreter runtime do all the - // dirty work. - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::get_handle_wrong_method_stub(), true); + __ bind(no_such_interface); + // Count unused bytes + // worst case actual size + // We force resolving of the call site by jumping to + // the "handle wrong method" stub, and so let the + // interpreter runtime do all the dirty work. + padding_bytes += __ load_const_size() - __ load_const_optimized_rtn_len(Z_R1_scratch, (long)SharedRuntime::get_handle_wrong_method_stub(), true); __ z_br(Z_R1_scratch); masm->flush();