1 /*
   2  * Copyright 1997-2009 Sun Microsystems, Inc.  All Rights Reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
  20  * CA 95054 USA or visit www.sun.com if you need additional information or
  21  * have any questions.
  22  *
  23  */
  24 
  25 #include "incls/_precompiled.incl"
  26 #include "incls/_vtableStubs_sparc.cpp.incl"
  27 
  28 // machine-dependent part of VtableStubs: create vtableStub of correct size and
  29 // initialize its code
  30 
  31 #define __ masm->
  32 
  33 
  34 #ifndef PRODUCT
  35 extern "C" void bad_compiled_vtable_index(JavaThread* thread, oopDesc* receiver, int index);
  36 #endif
  37 
  38 
  39 // Used by compiler only; may use only caller saved, non-argument registers
  40 // NOTE:  %%%% if any change is made to this stub make sure that the function
  41 //             pd_code_size_limit is changed to ensure the correct size for VtableStub
  42 VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
  43   const int sparc_code_length = VtableStub::pd_code_size_limit(true);
  44   VtableStub* s = new(sparc_code_length) VtableStub(true, vtable_index);
  45   ResourceMark rm;
  46   CodeBuffer cb(s->entry_point(), sparc_code_length);
  47   MacroAssembler* masm = new MacroAssembler(&cb);
  48 
  49 #ifndef PRODUCT
  50   if (CountCompiledCalls) {
  51     Address ctr(G5, SharedRuntime::nof_megamorphic_calls_addr());
  52     __ sethi(ctr);
  53     __ ld(ctr, G3_scratch);
  54     __ inc(G3_scratch);
  55     __ st(G3_scratch, ctr);
  56   }
  57 #endif /* PRODUCT */
  58 
  59   assert(VtableStub::receiver_location() == O0->as_VMReg(), "receiver expected in O0");
  60 
  61   // get receiver klass
  62   address npe_addr = __ pc();
  63   __ load_klass(O0, G3_scratch);
  64 
  65   // set methodOop (in case of interpreted method), and destination address
  66   int entry_offset = instanceKlass::vtable_start_offset() + vtable_index*vtableEntry::size();
  67 #ifndef PRODUCT
  68   if (DebugVtables) {
  69     Label L;
  70     // check offset vs vtable length
  71     __ ld(G3_scratch, instanceKlass::vtable_length_offset()*wordSize, G5);
  72     __ cmp(G5, vtable_index*vtableEntry::size());
  73     __ br(Assembler::greaterUnsigned, false, Assembler::pt, L);
  74     __ delayed()->nop();
  75     __ set(vtable_index, O2);
  76     __ call_VM(noreg, CAST_FROM_FN_PTR(address, bad_compiled_vtable_index), O0, O2);
  77     __ bind(L);
  78   }
  79 #endif
  80   int v_off = entry_offset*wordSize + vtableEntry::method_offset_in_bytes();
  81   if( __ is_simm13(v_off) ) {
  82     __ ld_ptr(G3, v_off, G5_method);
  83   } else {
  84     __ set(v_off,G5);
  85     __ ld_ptr(G3, G5, G5_method);
  86   }
  87 
  88 #ifndef PRODUCT
  89   if (DebugVtables) {
  90     Label L;
  91     __ br_notnull(G5_method, false, Assembler::pt, L);
  92     __ delayed()->nop();
  93     __ stop("Vtable entry is ZERO");
  94     __ bind(L);
  95   }
  96 #endif
  97 
  98   address ame_addr = __ pc();  // if the vtable entry is null, the method is abstract
  99                                // NOTE: for vtable dispatches, the vtable entry will never be null.
 100 
 101   __ ld_ptr(G5_method, in_bytes(methodOopDesc::from_compiled_offset()), G3_scratch);
 102 
 103   // jump to target (either compiled code or c2iadapter)
 104   __ JMP(G3_scratch, 0);
 105   // load methodOop (in case we call c2iadapter)
 106   __ delayed()->nop();
 107 
 108   masm->flush();
 109   s->set_exception_points(npe_addr, ame_addr);
 110   return s;
 111 }
 112 
 113 
 114 // NOTE:  %%%% if any change is made to this stub make sure that the function
 115 //             pd_code_size_limit is changed to ensure the correct size for VtableStub
 116 VtableStub* VtableStubs::create_itable_stub(int vtable_index) {
 117   const int sparc_code_length = VtableStub::pd_code_size_limit(false);
 118   VtableStub* s = new(sparc_code_length) VtableStub(false, vtable_index);
 119   ResourceMark rm;
 120   CodeBuffer cb(s->entry_point(), sparc_code_length);
 121   MacroAssembler* masm = new MacroAssembler(&cb);
 122 
 123   Register G3_klassOop = G3_scratch;
 124   Register G5_interface = G5;  // Passed in as an argument
 125   Label search;
 126 
 127   // Entry arguments:
 128   //  G5_interface: Interface
 129   //  O0:           Receiver
 130   assert(VtableStub::receiver_location() == O0->as_VMReg(), "receiver expected in O0");
 131 
 132   // get receiver klass (also an implicit null-check)
 133   address npe_addr = __ pc();
 134   __ load_klass(O0, G3_klassOop);
 135   __ verify_oop(G3_klassOop);
 136 
 137   // Push a new window to get some temp registers.  This chops the head of all
 138   // my 64-bit %o registers in the LION build, but this is OK because no longs
 139   // are passed in the %o registers.  Instead, longs are passed in G1 and G4
 140   // and so those registers are not available here.
 141   __ save(SP,-frame::register_save_words*wordSize,SP);
 142   Register I0_receiver = I0;    // Location of receiver after save
 143 
 144 #ifndef PRODUCT
 145   if (CountCompiledCalls) {
 146     Address ctr(L0, SharedRuntime::nof_megamorphic_calls_addr());
 147     __ sethi(ctr);
 148     __ ld(ctr, L1);
 149     __ inc(L1);
 150     __ st(L1, ctr);
 151   }
 152 #endif /* PRODUCT */
 153 
 154   // load start of itable entries into L0 register
 155   const int base = instanceKlass::vtable_start_offset() * wordSize;
 156   __ ld(Address(G3_klassOop, 0, instanceKlass::vtable_length_offset() * wordSize), L0);
 157 
 158   // %%% Could store the aligned, prescaled offset in the klassoop.
 159   __ sll(L0, exact_log2(vtableEntry::size() * wordSize), L0);
 160   // see code for instanceKlass::start_of_itable!
 161   const int vtable_alignment = align_object_offset(1);
 162   assert(vtable_alignment == 1 || vtable_alignment == 2, "");
 163   const int odd_bit = vtableEntry::size() * wordSize;
 164   if (vtable_alignment == 2) {
 165     __ and3(L0, odd_bit, L1);   // isolate the odd bit
 166   }
 167   __ add(G3_klassOop, L0, L0);
 168   if (vtable_alignment == 2) {
 169     __ add(L0, L1, L0);         // double the odd bit, to align up
 170   }
 171 
 172   // Loop over all itable entries until desired interfaceOop (G5_interface) found
 173   __ bind(search);
 174 
 175   // %%%% Could load both offset and interface in one ldx, if they were
 176   // in the opposite order.  This would save a load.
 177   __ ld_ptr(L0, base + itableOffsetEntry::interface_offset_in_bytes(), L1);
 178 
 179   // If the entry is NULL then we've reached the end of the table
 180   // without finding the expected interface, so throw an exception
 181   Label throw_icce;
 182   __ bpr(Assembler::rc_z, false, Assembler::pn, L1, throw_icce);
 183   __ delayed()->cmp(G5_interface, L1);
 184   __ brx(Assembler::notEqual, true, Assembler::pn, search);
 185   __ delayed()->add(L0, itableOffsetEntry::size() * wordSize, L0);
 186 
 187   // entry found and L0 points to it, move offset of vtable for interface into L0
 188   __ ld(L0, base + itableOffsetEntry::offset_offset_in_bytes(), L0);
 189 
 190   // Compute itableMethodEntry and get methodOop(G5_method) and entrypoint(L0) for compiler
 191   const int method_offset = (itableMethodEntry::size() * wordSize * vtable_index) + itableMethodEntry::method_offset_in_bytes();
 192   __ add(G3_klassOop, L0, L1);
 193   if (__ is_simm13(method_offset)) {
 194     __ ld_ptr(L1, method_offset, G5_method);
 195   } else {
 196     __ set(method_offset, G5_method);
 197     __ ld_ptr(L1, G5_method, G5_method);
 198   }
 199 
 200 #ifndef PRODUCT
 201   if (DebugVtables) {
 202     Label L01;
 203     __ bpr(Assembler::rc_nz, false, Assembler::pt, G5_method, L01);
 204     __ delayed()->nop();
 205     __ stop("methodOop is null");
 206     __ bind(L01);
 207     __ verify_oop(G5_method);
 208   }
 209 #endif
 210 
 211   // If the following load is through a NULL pointer, we'll take an OS
 212   // exception that should translate into an AbstractMethodError.  We need the
 213   // window count to be correct at that time.
 214   __ restore();                 // Restore registers BEFORE the AME point
 215 
 216   address ame_addr = __ pc();   // if the vtable entry is null, the method is abstract
 217   __ ld_ptr(G5_method, in_bytes(methodOopDesc::from_compiled_offset()), G3_scratch);
 218 
 219   // G5_method:  methodOop
 220   // O0:         Receiver
 221   // G3_scratch: entry point
 222   __ JMP(G3_scratch, 0);
 223   __ delayed()->nop();
 224 
 225   __ bind(throw_icce);
 226   Address icce(G3_scratch, StubRoutines::throw_IncompatibleClassChangeError_entry());
 227   __ jump_to(icce, 0);
 228   __ delayed()->restore();
 229 
 230   masm->flush();
 231 
 232   guarantee(__ pc() <= s->code_end(), "overflowed buffer");
 233 
 234   s->set_exception_points(npe_addr, ame_addr);
 235   return s;
 236 }
 237 
 238 
 239 int VtableStub::pd_code_size_limit(bool is_vtable_stub) {
 240   if (TraceJumps || DebugVtables || CountCompiledCalls || VerifyOops) return 1000;
 241   else {
 242     const int slop = 2*BytesPerInstWord; // sethi;add  (needed for long offsets)
 243     if (is_vtable_stub) {
 244       // ld;ld;ld,jmp,nop
 245       const int basic = 5*BytesPerInstWord +
 246                         // shift;add for load_klass
 247                         (UseCompressedOops ? 2*BytesPerInstWord : 0);
 248       return basic + slop;
 249     } else {
 250       // save, ld, ld, sll, and, add, add, ld, cmp, br, add, ld, add, ld, ld, jmp, restore, sethi, jmpl, restore
 251       const int basic = (22 LP64_ONLY(+ 12)) * BytesPerInstWord +
 252                         // shift;add for load_klass
 253                         (UseCompressedOops ? 2*BytesPerInstWord : 0);
 254       return (basic + slop);
 255     }
 256   }
 257 }
 258 
 259 
 260 int VtableStub::pd_code_alignment() {
 261   // UltraSPARC cache line size is 8 instructions:
 262   const unsigned int icache_line_size = 32;
 263   return icache_line_size;
 264 }