src/hotspot/cpu/ppc/c1_LIRAssembler_ppc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File open Cdiff src/hotspot/cpu/ppc/c1_LIRAssembler_ppc.cpp

src/hotspot/cpu/ppc/c1_LIRAssembler_ppc.cpp

Print this page

        

*** 2772,2788 **** // that the ld can use simm16s to reference the slots of the data. mdo_offset_bias = md->byte_offset_of_slot(data, CounterData::count_offset()); __ add_const_optimized(mdo, mdo, mdo_offset_bias, R0); } - Bytecodes::Code bc = method->java_code_at_bci(bci); - const bool callee_is_static = callee->is_loaded() && callee->is_static(); // Perform additional virtual call profiling for invokevirtual and ! // invokeinterface bytecodes. ! if ((bc == Bytecodes::_invokevirtual || bc == Bytecodes::_invokeinterface) && ! !callee_is_static && // Required for optimized MH invokes. ! C1ProfileVirtualCalls) { assert(op->recv()->is_single_cpu(), "recv must be allocated"); Register recv = op->recv()->as_register(); assert_different_registers(mdo, tmp1, recv); assert(data->is_VirtualCallData(), "need VirtualCallData for virtual calls"); ciKlass* known_klass = op->known_holder(); --- 2772,2784 ---- // that the ld can use simm16s to reference the slots of the data. mdo_offset_bias = md->byte_offset_of_slot(data, CounterData::count_offset()); __ add_const_optimized(mdo, mdo, mdo_offset_bias, R0); } // Perform additional virtual call profiling for invokevirtual and ! // invokeinterface bytecodes ! if (op->should_profile_receiver_type()) { assert(op->recv()->is_single_cpu(), "recv must be allocated"); Register recv = op->recv()->as_register(); assert_different_registers(mdo, tmp1, recv); assert(data->is_VirtualCallData(), "need VirtualCallData for virtual calls"); ciKlass* known_klass = op->known_holder();
src/hotspot/cpu/ppc/c1_LIRAssembler_ppc.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File