src/cpu/sparc/vm/templateTable_sparc.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 6829193 Sdiff src/cpu/sparc/vm

src/cpu/sparc/vm/templateTable_sparc.cpp

Print this page


   1 /*
   2  * Copyright 1997-2009 Sun Microsystems, Inc.  All Rights Reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
  20  * CA 95054 USA or visit www.sun.com if you need additional information or
  21  * have any questions.
  22  *


1946 // non-volatile memory refs that happen after the volatile write to float up
1947 // before it.
1948 //
1949 // We only put in barriers around volatile refs (they are expensive), not
1950 // _between_ memory refs (that would require us to track the flavor of the
1951 // previous memory refs).  Requirements (2) and (3) require some barriers
1952 // before volatile stores and after volatile loads.  These nearly cover
1953 // requirement (1) but miss the volatile-store-volatile-load case.  This final
1954 // case is placed after volatile-stores although it could just as well go
1955 // before volatile-loads.
1956 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint) {
1957   // Helper function to insert a is-volatile test and memory barrier
1958   // All current sparc implementations run in TSO, needing only StoreLoad
1959   if ((order_constraint & Assembler::StoreLoad) == 0) return;
1960   __ membar( order_constraint );
1961 }
1962 
1963 // ----------------------------------------------------------------------------
1964 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) {
1965   assert(byte_no == 1 || byte_no == 2, "byte_no out of range");


1966   // Depends on cpCacheOop layout!
1967   const int shift_count = (1 + byte_no)*BitsPerByte;
1968   Label resolved;
1969 
1970   __ get_cache_and_index_at_bcp(Rcache, index, 1);








1971   __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
1972                     ConstantPoolCacheEntry::indices_offset(), Lbyte_code);
1973 
1974   __ srl(  Lbyte_code, shift_count, Lbyte_code );
1975   __ and3( Lbyte_code,        0xFF, Lbyte_code );
1976   __ cmp(  Lbyte_code, (int)bytecode());
1977   __ br(   Assembler::equal, false, Assembler::pt, resolved);
1978   __ delayed()->set((int)bytecode(), O1);

1979 
1980   address entry;
1981   switch (bytecode()) {
1982     case Bytecodes::_getstatic      : // fall through
1983     case Bytecodes::_putstatic      : // fall through
1984     case Bytecodes::_getfield       : // fall through
1985     case Bytecodes::_putfield       : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break;
1986     case Bytecodes::_invokevirtual  : // fall through
1987     case Bytecodes::_invokespecial  : // fall through
1988     case Bytecodes::_invokestatic   : // fall through
1989     case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke);  break;

1990     default                         : ShouldNotReachHere();                                 break;
1991   }
1992   // first time invocation - must resolve first
1993   __ call_VM(noreg, entry, O1);
1994   // Update registers with resolved info
1995   __ get_cache_and_index_at_bcp(Rcache, index, 1);
1996   __ bind(resolved);
1997 }
1998 
1999 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2000                                                Register Rmethod,
2001                                                Register Ritable_index,
2002                                                Register Rflags,
2003                                                bool is_invokevirtual,
2004                                                bool is_invokevfinal) {
2005   // Uses both G3_scratch and G4_scratch
2006   Register Rcache = G3_scratch;
2007   Register Rscratch = G4_scratch;
2008   assert_different_registers(Rcache, Rmethod, Ritable_index);
2009 
2010   ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2011 
2012   // determine constant pool cache field offsets
2013   const int method_offset = in_bytes(
2014     cp_base_offset +
2015       (is_invokevirtual


3113   __ verify_oop(G5_method);
3114   __ call_from_interpreter(Rcall, Gargs, Rret);
3115 
3116 }
3117 
3118 
3119 void TemplateTable::invokedynamic(int byte_no) {
3120   transition(vtos, vtos);
3121 
3122   if (!EnableInvokeDynamic) {
3123     // We should not encounter this bytecode if !EnableInvokeDynamic.
3124     // The verifier will stop it.  However, if we get past the verifier,
3125     // this will stop the thread in a reasonable way, without crashing the JVM.
3126     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
3127                      InterpreterRuntime::throw_IncompatibleClassChangeError));
3128     // the call_VM checks for exception, so we should never return here.
3129     __ should_not_reach_here();
3130     return;
3131   }
3132 
3133   __ stop("invokedynamic NYI");//6815692//



































3134 }
3135 
3136 
3137 //----------------------------------------------------------------------------------------------------
3138 // Allocation
3139 
3140 void TemplateTable::_new() {
3141   transition(vtos, atos);
3142 
3143   Label slow_case;
3144   Label done;
3145   Label initialize_header;
3146   Label initialize_object;  // including clearing the fields
3147 
3148   Register RallocatedObject = Otos_i;
3149   Register RinstanceKlass = O1;
3150   Register Roffset = O3;
3151   Register Rscratch = O4;
3152 
3153   __ get_2_byte_integer_at_bcp(1, Rscratch, Roffset, InterpreterMacroAssembler::Unsigned);


   1 /*
   2  * Copyright 1997-2010 Sun Microsystems, Inc.  All Rights Reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
  20  * CA 95054 USA or visit www.sun.com if you need additional information or
  21  * have any questions.
  22  *


1946 // non-volatile memory refs that happen after the volatile write to float up
1947 // before it.
1948 //
1949 // We only put in barriers around volatile refs (they are expensive), not
1950 // _between_ memory refs (that would require us to track the flavor of the
1951 // previous memory refs).  Requirements (2) and (3) require some barriers
1952 // before volatile stores and after volatile loads.  These nearly cover
1953 // requirement (1) but miss the volatile-store-volatile-load case.  This final
1954 // case is placed after volatile-stores although it could just as well go
1955 // before volatile-loads.
1956 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint) {
1957   // Helper function to insert a is-volatile test and memory barrier
1958   // All current sparc implementations run in TSO, needing only StoreLoad
1959   if ((order_constraint & Assembler::StoreLoad) == 0) return;
1960   __ membar( order_constraint );
1961 }
1962 
1963 // ----------------------------------------------------------------------------
1964 void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) {
1965   assert(byte_no == 1 || byte_no == 2, "byte_no out of range");
1966   bool is_invokedynamic = (bytecode() == Bytecodes::_invokedynamic);
1967 
1968   // Depends on cpCacheOop layout!
1969   const int shift_count = (1 + byte_no)*BitsPerByte;
1970   Label resolved;
1971 
1972   __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic);
1973   if (is_invokedynamic) {
1974     // We are resolved if the f1 field contains a non-null CallSite object.
1975     __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
1976               ConstantPoolCacheEntry::f1_offset(), Lbyte_code);
1977     __ tst(Lbyte_code);
1978     __ br(Assembler::notEqual, false, Assembler::pt, resolved);
1979     __ delayed()->set((int)bytecode(), O1);
1980   } else {
1981     __ ld_ptr(Rcache, constantPoolCacheOopDesc::base_offset() +
1982               ConstantPoolCacheEntry::indices_offset(), Lbyte_code);
1983 
1984     __ srl(  Lbyte_code, shift_count, Lbyte_code );
1985     __ and3( Lbyte_code,        0xFF, Lbyte_code );
1986     __ cmp(  Lbyte_code, (int)bytecode());
1987     __ br(   Assembler::equal, false, Assembler::pt, resolved);
1988     __ delayed()->set((int)bytecode(), O1);
1989   }
1990 
1991   address entry;
1992   switch (bytecode()) {
1993     case Bytecodes::_getstatic      : // fall through
1994     case Bytecodes::_putstatic      : // fall through
1995     case Bytecodes::_getfield       : // fall through
1996     case Bytecodes::_putfield       : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break;
1997     case Bytecodes::_invokevirtual  : // fall through
1998     case Bytecodes::_invokespecial  : // fall through
1999     case Bytecodes::_invokestatic   : // fall through
2000     case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke);  break;
2001     case Bytecodes::_invokedynamic  : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic);  break;
2002     default                         : ShouldNotReachHere();                                 break;
2003   }
2004   // first time invocation - must resolve first
2005   __ call_VM(noreg, entry, O1);
2006   // Update registers with resolved info
2007   __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic);
2008   __ bind(resolved);
2009 }
2010 
2011 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2012                                                Register Rmethod,
2013                                                Register Ritable_index,
2014                                                Register Rflags,
2015                                                bool is_invokevirtual,
2016                                                bool is_invokevfinal) {
2017   // Uses both G3_scratch and G4_scratch
2018   Register Rcache = G3_scratch;
2019   Register Rscratch = G4_scratch;
2020   assert_different_registers(Rcache, Rmethod, Ritable_index);
2021 
2022   ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2023 
2024   // determine constant pool cache field offsets
2025   const int method_offset = in_bytes(
2026     cp_base_offset +
2027       (is_invokevirtual


3125   __ verify_oop(G5_method);
3126   __ call_from_interpreter(Rcall, Gargs, Rret);
3127 
3128 }
3129 
3130 
3131 void TemplateTable::invokedynamic(int byte_no) {
3132   transition(vtos, vtos);
3133 
3134   if (!EnableInvokeDynamic) {
3135     // We should not encounter this bytecode if !EnableInvokeDynamic.
3136     // The verifier will stop it.  However, if we get past the verifier,
3137     // this will stop the thread in a reasonable way, without crashing the JVM.
3138     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
3139                      InterpreterRuntime::throw_IncompatibleClassChangeError));
3140     // the call_VM checks for exception, so we should never return here.
3141     __ should_not_reach_here();
3142     return;
3143   }
3144 
3145   // G5: CallSite object (f1)
3146   // XX: unused (f2)
3147   // G3: receiver address
3148   // XX: flags (unused)
3149 
3150   Register G5_callsite = G5_method;
3151   Register Rscratch    = G3_scratch;
3152   Register Rtemp       = G1_scratch;
3153   Register Rret        = Lscratch;
3154 
3155   load_invoke_cp_cache_entry(byte_no, G5_callsite, noreg, Rret, false);
3156   __ mov(SP, O5_savedSP);  // record SP that we wanted the callee to restore
3157 
3158   __ verify_oop(G5_callsite);
3159 
3160   // profile this call
3161   __ profile_call(O4);
3162 
3163   // get return address
3164   AddressLiteral table(Interpreter::return_5_addrs_by_index_table());
3165   __ set(table, Rtemp);
3166   __ srl(Rret, ConstantPoolCacheEntry::tosBits, Rret);  // get return type
3167   // Make sure we don't need to mask Rret for tosBits after the above shift
3168   ConstantPoolCacheEntry::verify_tosBits();
3169   __ sll(Rret, LogBytesPerWord, Rret);
3170   __ ld_ptr(Rtemp, Rret, Rret);  // get return address
3171 
3172   __ ld_ptr(G5_callsite, __ delayed_value(java_dyn_CallSite::target_offset_in_bytes, Rscratch), G3_method_handle);
3173   __ null_check(G3_method_handle);
3174 
3175   // Adjust Rret first so Llast_SP can be same as Rret
3176   __ add(Rret, -frame::pc_return_offset, O7);
3177   __ add(Lesp, BytesPerWord, Gargs);  // setup parameter pointer
3178   __ jump_to_method_handle_entry(G3_method_handle, Rtemp, /* emit_delayed_nop */ false);
3179   // Record SP so we can remove any stack space allocated by adapter transition
3180   __ delayed()->mov(SP, Llast_SP);
3181 }
3182 
3183 
3184 //----------------------------------------------------------------------------------------------------
3185 // Allocation
3186 
3187 void TemplateTable::_new() {
3188   transition(vtos, atos);
3189 
3190   Label slow_case;
3191   Label done;
3192   Label initialize_header;
3193   Label initialize_object;  // including clearing the fields
3194 
3195   Register RallocatedObject = Otos_i;
3196   Register RinstanceKlass = O1;
3197   Register Roffset = O3;
3198   Register Rscratch = O4;
3199 
3200   __ get_2_byte_integer_at_bcp(1, Rscratch, Roffset, InterpreterMacroAssembler::Unsigned);


src/cpu/sparc/vm/templateTable_sparc.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File