80
81 #if (!defined(PRODUCT) && defined(COMPILER2))
82 if (CountCompiledCalls) {
83 __ incrementl(ExternalAddress((address) SharedRuntime::nof_megamorphic_calls_addr()));
84 }
85 #endif
86
87 // get receiver (need to skip return address on top of stack)
88 assert(VtableStub::receiver_location() == rcx->as_VMReg(), "receiver expected in rcx");
89
90 // get receiver klass
91 address npe_addr = __ pc();
92 __ movptr(rax, Address(rcx, oopDesc::klass_offset_in_bytes()));
93
94 #ifndef PRODUCT
95 if (DebugVtables) {
96 Label L;
97 start_pc = __ pc();
98 // check offset vs vtable length
99 __ cmpl(Address(rax, Klass::vtable_length_offset()), vtable_index*vtableEntry::size());
100 slop_delta = 6 - (__ pc() - start_pc); // cmpl varies in length, depending on data
101 slop_bytes += slop_delta;
102 assert(slop_delta >= 0, "negative slop(%d) encountered, adjust code size estimate!", slop_delta);
103
104 __ jcc(Assembler::greater, L);
105 __ movl(rbx, vtable_index);
106 // VTABLE TODO: find upper bound for call_VM length.
107 start_pc = __ pc();
108 __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), rcx, rbx);
109 slop_delta = 500 - (__ pc() - start_pc);
110 slop_bytes += slop_delta;
111 assert(slop_delta >= 0, "negative slop(%d) encountered, adjust code size estimate!", slop_delta);
112 __ bind(L);
113 }
114 #endif // PRODUCT
115
116 const Register method = rbx;
117
118 // load Method* and target address
119 start_pc = __ pc();
120 __ lookup_virtual_method(rax, vtable_index, method);
|
80
81 #if (!defined(PRODUCT) && defined(COMPILER2))
82 if (CountCompiledCalls) {
83 __ incrementl(ExternalAddress((address) SharedRuntime::nof_megamorphic_calls_addr()));
84 }
85 #endif
86
87 // get receiver (need to skip return address on top of stack)
88 assert(VtableStub::receiver_location() == rcx->as_VMReg(), "receiver expected in rcx");
89
90 // get receiver klass
91 address npe_addr = __ pc();
92 __ movptr(rax, Address(rcx, oopDesc::klass_offset_in_bytes()));
93
94 #ifndef PRODUCT
95 if (DebugVtables) {
96 Label L;
97 start_pc = __ pc();
98 // check offset vs vtable length
99 __ cmpl(Address(rax, Klass::vtable_length_offset()), vtable_index*vtableEntry::size());
100 slop_delta = 10 - (__ pc() - start_pc); // cmpl varies in length, depending on data
101 slop_bytes += slop_delta;
102 assert(slop_delta >= 0, "negative slop(%d) encountered, adjust code size estimate!", slop_delta);
103
104 __ jcc(Assembler::greater, L);
105 __ movl(rbx, vtable_index);
106 // VTABLE TODO: find upper bound for call_VM length.
107 start_pc = __ pc();
108 __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), rcx, rbx);
109 slop_delta = 500 - (__ pc() - start_pc);
110 slop_bytes += slop_delta;
111 assert(slop_delta >= 0, "negative slop(%d) encountered, adjust code size estimate!", slop_delta);
112 __ bind(L);
113 }
114 #endif // PRODUCT
115
116 const Register method = rbx;
117
118 // load Method* and target address
119 start_pc = __ pc();
120 __ lookup_virtual_method(rax, vtable_index, method);
|