src/cpu/sparc/vm/templateTable_sparc.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 7063628 Sdiff src/cpu/sparc/vm

src/cpu/sparc/vm/templateTable_sparc.cpp

Print this page




 142 //----------------------------------------------------------------------------------------------------
 143 // Miscelaneous helper routines
 144 
 145 
 146 Address TemplateTable::at_bcp(int offset) {
 147   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 148   return Address(Lbcp, offset);
 149 }
 150 
 151 
 152 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register Rbyte_code,
 153                                    Register Rscratch,
 154                                    bool load_bc_into_scratch /*=true*/) {
 155   // With sharing on, may need to test methodOop flag.
 156   if (!RewriteBytecodes) return;
 157   if (load_bc_into_scratch) __ set(bc, Rbyte_code);
 158   Label patch_done;
 159   if (JvmtiExport::can_post_breakpoint()) {
 160     Label fast_patch;
 161     __ ldub(at_bcp(0), Rscratch);
 162     __ cmp(Rscratch, Bytecodes::_breakpoint);
 163     __ br(Assembler::notEqual, false, Assembler::pt, fast_patch);
 164     __ delayed()->nop();  // don't bother to hoist the stb here
 165     // perform the quickening, slowly, in the bowels of the breakpoint table
 166     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), Lmethod, Lbcp, Rbyte_code);
 167     __ ba(false, patch_done);
 168     __ delayed()->nop();
 169     __ bind(fast_patch);
 170   }
 171 #ifdef ASSERT
 172   Bytecodes::Code orig_bytecode =  Bytecodes::java_code(bc);
 173   Label okay;
 174   __ ldub(at_bcp(0), Rscratch);
 175   __ cmp(Rscratch, orig_bytecode);
 176   __ br(Assembler::equal, false, Assembler::pt, okay);
 177   __ delayed() ->cmp(Rscratch, Rbyte_code);
 178   __ br(Assembler::equal, false, Assembler::pt, okay);
 179   __ delayed()->nop();
 180   __ stop("Rewriting wrong bytecode location");
 181   __ bind(okay);
 182 #endif
 183   __ stb(Rbyte_code, at_bcp(0));
 184   __ bind(patch_done);
 185 }
 186 
 187 //----------------------------------------------------------------------------------------------------
 188 // Individual instructions


 264   __ get_2_byte_integer_at_bcp(1, G3_scratch, Otos_i, InterpreterMacroAssembler::Signed);
 265 }
 266 
 267 void TemplateTable::ldc(bool wide) {
 268   transition(vtos, vtos);
 269   Label call_ldc, notInt, isString, notString, notClass, exit;
 270 
 271   if (wide) {
 272     __ get_2_byte_integer_at_bcp(1, G3_scratch, O1, InterpreterMacroAssembler::Unsigned);
 273   } else {
 274     __ ldub(Lbcp, 1, O1);
 275   }
 276   __ get_cpool_and_tags(O0, O2);
 277 
 278   const int base_offset = constantPoolOopDesc::header_size() * wordSize;
 279   const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize;
 280 
 281   // get type from tags
 282   __ add(O2, tags_offset, O2);
 283   __ ldub(O2, O1, O2);
 284   __ cmp(O2, JVM_CONSTANT_UnresolvedString);    // unresolved string? If so, must resolve
 285   __ brx(Assembler::equal, true, Assembler::pt, call_ldc);
 286   __ delayed()->nop();
 287 
 288   __ cmp(O2, JVM_CONSTANT_UnresolvedClass);     // unresolved class? If so, must resolve
 289   __ brx(Assembler::equal, true, Assembler::pt, call_ldc);
 290   __ delayed()->nop();
 291 
 292   __ cmp(O2, JVM_CONSTANT_UnresolvedClassInError);     // unresolved class in error state
 293   __ brx(Assembler::equal, true, Assembler::pn, call_ldc);
 294   __ delayed()->nop();
 295 
 296   __ cmp(O2, JVM_CONSTANT_Class);      // need to call vm to get java mirror of the class
 297   __ brx(Assembler::notEqual, true, Assembler::pt, notClass);
 298   __ delayed()->add(O0, base_offset, O0);
 299 
 300   __ bind(call_ldc);
 301   __ set(wide, O1);
 302   call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), O1);
 303   __ push(atos);
 304   __ ba(false, exit);
 305   __ delayed()->nop();
 306 
 307   __ bind(notClass);
 308  // __ add(O0, base_offset, O0);
 309   __ sll(O1, LogBytesPerWord, O1);
 310   __ cmp(O2, JVM_CONSTANT_Integer);
 311   __ brx(Assembler::notEqual, true, Assembler::pt, notInt);
 312   __ delayed()->cmp(O2, JVM_CONSTANT_String);
 313   __ ld(O0, O1, Otos_i);
 314   __ push(itos);
 315   __ ba(false, exit);
 316   __ delayed()->nop();
 317 
 318   __ bind(notInt);
 319  // __ cmp(O2, JVM_CONSTANT_String);
 320   __ brx(Assembler::equal, true, Assembler::pt, isString);
 321   __ delayed()->cmp(O2, JVM_CONSTANT_Object);
 322   __ brx(Assembler::notEqual, true, Assembler::pt, notString);
 323   __ delayed()->ldf(FloatRegisterImpl::S, O0, O1, Ftos_f);
 324   __ bind(isString);
 325   __ ld_ptr(O0, O1, Otos_i);
 326   __ verify_oop(Otos_i);
 327   __ push(atos);
 328   __ ba(false, exit);
 329   __ delayed()->nop();
 330 
 331   __ bind(notString);
 332  // __ ldf(FloatRegisterImpl::S, O0, O1, Ftos_f);
 333   __ push(ftos);
 334 
 335   __ bind(exit);
 336 }
 337 
 338 // Fast path for caching oop constants.
 339 // %%% We should use this to handle Class and String constants also.
 340 // %%% It will simplify the ldc/primitive path considerably.
 341 void TemplateTable::fast_aldc(bool wide) {
 342   transition(vtos, atos);
 343 
 344   if (!EnableInvokeDynamic) {
 345     // We should not encounter this bytecode if !EnableInvokeDynamic.
 346     // The verifier will stop it.  However, if we get past the verifier,
 347     // this will stop the thread in a reasonable way, without crashing the JVM.
 348     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
 349                      InterpreterRuntime::throw_IncompatibleClassChangeError));
 350     // the call_VM checks for exception, so we should never return here.
 351     __ should_not_reach_here();
 352     return;
 353   }
 354 
 355   Register Rcache = G3_scratch;
 356   Register Rscratch = G4_scratch;
 357 
 358   resolve_cache_and_index(f1_oop, Otos_i, Rcache, Rscratch, wide ? sizeof(u2) : sizeof(u1));
 359 
 360   __ verify_oop(Otos_i);
 361 
 362   Label L_done;
 363   const Register Rcon_klass = G3_scratch;  // same as Rcache
 364   const Register Rarray_klass = G4_scratch;  // same as Rscratch
 365   __ load_klass(Otos_i, Rcon_klass);
 366   AddressLiteral array_klass_addr((address)Universe::systemObjArrayKlassObj_addr());
 367   __ load_contents(array_klass_addr, Rarray_klass);
 368   __ cmp(Rarray_klass, Rcon_klass);
 369   __ brx(Assembler::notEqual, false, Assembler::pt, L_done);
 370   __ delayed()->nop();
 371   __ ld(Address(Otos_i, arrayOopDesc::length_offset_in_bytes()), Rcon_klass);
 372   __ tst(Rcon_klass);
 373   __ brx(Assembler::zero, true, Assembler::pt, L_done);
 374   __ delayed()->clr(Otos_i);    // executed only if branch is taken
 375 
 376   // Load the exception from the system-array which wraps it:
 377   __ load_heap_oop(Otos_i, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i);
 378   __ throw_if_not_x(Assembler::never, Interpreter::throw_exception_entry(), G3_scratch);
 379 
 380   __ bind(L_done);
 381 }
 382 
 383 void TemplateTable::ldc2_w() {
 384   transition(vtos, vtos);
 385   Label retry, resolved, Long, exit;
 386 
 387   __ bind(retry);
 388   __ get_2_byte_integer_at_bcp(1, G3_scratch, O1, InterpreterMacroAssembler::Unsigned);
 389   __ get_cpool_and_tags(O0, O2);
 390 
 391   const int base_offset = constantPoolOopDesc::header_size() * wordSize;
 392   const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize;
 393   // get type from tags
 394   __ add(O2, tags_offset, O2);
 395   __ ldub(O2, O1, O2);
 396 
 397   __ sll(O1, LogBytesPerWord, O1);
 398   __ add(O0, O1, G3_scratch);
 399 
 400   __ cmp(O2, JVM_CONSTANT_Double);
 401   __ brx(Assembler::notEqual, false, Assembler::pt, Long);
 402   __ delayed()->nop();
 403   // A double can be placed at word-aligned locations in the constant pool.
 404   // Check out Conversions.java for an example.
 405   // Also constantPoolOopDesc::header_size() is 20, which makes it very difficult
 406   // to double-align double on the constant pool.  SG, 11/7/97
 407 #ifdef _LP64
 408   __ ldf(FloatRegisterImpl::D, G3_scratch, base_offset, Ftos_d);
 409 #else
 410   FloatRegister f = Ftos_d;
 411   __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset, f);
 412   __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset + sizeof(jdouble)/2,
 413          f->successor());
 414 #endif
 415   __ push(dtos);
 416   __ ba(false, exit);
 417   __ delayed()->nop();
 418 
 419   __ bind(Long);
 420 #ifdef _LP64
 421   __ ldx(G3_scratch, base_offset, Otos_l);
 422 #else
 423   __ ld(G3_scratch, base_offset, Otos_l);
 424   __ ld(G3_scratch, base_offset + sizeof(jlong)/2, Otos_l->successor());
 425 #endif
 426   __ push(ltos);
 427 
 428   __ bind(exit);
 429 }
 430 
 431 
 432 void TemplateTable::locals_index(Register reg, int offset) {
 433   __ ldub( at_bcp(offset), reg );
 434 }
 435 
 436 
 437 void TemplateTable::locals_index_wide(Register reg) {
 438   // offset is 2, not 1, because Lbcp points to wide prefix code
 439   __ get_2_byte_integer_at_bcp(2, G4_scratch, reg, InterpreterMacroAssembler::Unsigned);
 440 }
 441 
 442 void TemplateTable::iload() {
 443   transition(vtos, itos);
 444   // Rewrite iload,iload  pair into fast_iload2
 445   //         iload,caload pair into fast_icaload
 446   if (RewriteFrequentPairs) {
 447     Label rewrite, done;
 448 
 449     // get next byte
 450     __ ldub(at_bcp(Bytecodes::length_for(Bytecodes::_iload)), G3_scratch);
 451 
 452     // if _iload, wait to rewrite to iload2.  We only want to rewrite the
 453     // last two iloads in a pair.  Comparing against fast_iload means that
 454     // the next bytecode is neither an iload or a caload, and therefore
 455     // an iload pair.
 456     __ cmp(G3_scratch, (int)Bytecodes::_iload);
 457     __ br(Assembler::equal, false, Assembler::pn, done);
 458     __ delayed()->nop();
 459 
 460     __ cmp(G3_scratch, (int)Bytecodes::_fast_iload);
 461     __ br(Assembler::equal, false, Assembler::pn, rewrite);
 462     __ delayed()->set(Bytecodes::_fast_iload2, G4_scratch);
 463 
 464     __ cmp(G3_scratch, (int)Bytecodes::_caload);
 465     __ br(Assembler::equal, false, Assembler::pn, rewrite);
 466     __ delayed()->set(Bytecodes::_fast_icaload, G4_scratch);
 467 
 468     __ set(Bytecodes::_fast_iload, G4_scratch);  // don't check again
 469     // rewrite
 470     // G4_scratch: fast bytecode
 471     __ bind(rewrite);
 472     patch_bytecode(Bytecodes::_iload, G4_scratch, G3_scratch, false);
 473     __ bind(done);
 474   }
 475 
 476   // Get the local value into tos
 477   locals_index(G3_scratch);
 478   __ access_local_int( G3_scratch, Otos_i );


 680   //
 681   // _aload_0, _fast_igetfield (itos)
 682   // _aload_0, _fast_agetfield (atos)
 683   // _aload_0, _fast_fgetfield (ftos)
 684   //
 685   // occur frequently. If RewriteFrequentPairs is set, the (slow) _aload_0
 686   // bytecode checks the next bytecode and then rewrites the current
 687   // bytecode into a pair bytecode; otherwise it rewrites the current
 688   // bytecode into _fast_aload_0 that doesn't do the pair check anymore.
 689   //
 690   if (RewriteFrequentPairs) {
 691     Label rewrite, done;
 692 
 693     // get next byte
 694     __ ldub(at_bcp(Bytecodes::length_for(Bytecodes::_aload_0)), G3_scratch);
 695 
 696     // do actual aload_0
 697     aload(0);
 698 
 699     // if _getfield then wait with rewrite
 700     __ cmp(G3_scratch, (int)Bytecodes::_getfield);
 701     __ br(Assembler::equal, false, Assembler::pn, done);
 702     __ delayed()->nop();
 703 
 704     // if _igetfield then rewrite to _fast_iaccess_0
 705     assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
 706     __ cmp(G3_scratch, (int)Bytecodes::_fast_igetfield);
 707     __ br(Assembler::equal, false, Assembler::pn, rewrite);
 708     __ delayed()->set(Bytecodes::_fast_iaccess_0, G4_scratch);
 709 
 710     // if _agetfield then rewrite to _fast_aaccess_0
 711     assert(Bytecodes::java_code(Bytecodes::_fast_aaccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
 712     __ cmp(G3_scratch, (int)Bytecodes::_fast_agetfield);
 713     __ br(Assembler::equal, false, Assembler::pn, rewrite);
 714     __ delayed()->set(Bytecodes::_fast_aaccess_0, G4_scratch);
 715 
 716     // if _fgetfield then rewrite to _fast_faccess_0
 717     assert(Bytecodes::java_code(Bytecodes::_fast_faccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
 718     __ cmp(G3_scratch, (int)Bytecodes::_fast_fgetfield);
 719     __ br(Assembler::equal, false, Assembler::pn, rewrite);
 720     __ delayed()->set(Bytecodes::_fast_faccess_0, G4_scratch);
 721 
 722     // else rewrite to _fast_aload0


 850   // Fos_d: val
 851   // O3: array
 852   __ index_check(O3, O2, LogBytesPerLong, G3_scratch, O2);
 853   __ stf(FloatRegisterImpl::D, Ftos_d, O2, arrayOopDesc::base_offset_in_bytes(T_DOUBLE));
 854 }
 855 
 856 
 857 void TemplateTable::aastore() {
 858   Label store_ok, is_null, done;
 859   transition(vtos, vtos);
 860   __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
 861   __ ld(Lesp, Interpreter::expr_offset_in_bytes(1), O2);         // get index
 862   __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(2), O3);     // get array
 863   // Otos_i: val
 864   // O2: index
 865   // O3: array
 866   __ verify_oop(Otos_i);
 867   __ index_check_without_pop(O3, O2, UseCompressedOops ? 2 : LogBytesPerWord, G3_scratch, O1);
 868 
 869   // do array store check - check for NULL value first
 870   __ br_null( Otos_i, false, Assembler::pn, is_null );
 871   __ delayed()->nop();
 872 
 873   __ load_klass(O3, O4); // get array klass
 874   __ load_klass(Otos_i, O5); // get value klass
 875 
 876   // do fast instanceof cache test
 877 
 878   __ ld_ptr(O4,     sizeof(oopDesc) + objArrayKlass::element_klass_offset_in_bytes(),  O4);
 879 
 880   assert(Otos_i == O0, "just checking");
 881 
 882   // Otos_i:    value
 883   // O1:        addr - offset
 884   // O2:        index
 885   // O3:        array
 886   // O4:        array element klass
 887   // O5:        value klass
 888 
 889   // Address element(O1, 0, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
 890 
 891   // Generate a fast subtype check.  Branch to store_ok if no
 892   // failure.  Throw if failure.
 893   __ gen_subtype_check( O5, O4, G3_scratch, G4_scratch, G1_scratch, store_ok );
 894 
 895   // Not a subtype; so must throw exception
 896   __ throw_if_not_x( Assembler::never, Interpreter::_throw_ArrayStoreException_entry, G3_scratch );
 897 
 898   // Store is OK.
 899   __ bind(store_ok);
 900   do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i, G3_scratch, _bs->kind(), true);
 901 
 902   __ ba(false,done);
 903   __ delayed()->inc(Lesp, 3* Interpreter::stackElementSize); // adj sp (pops array, index and value)
 904 
 905   __ bind(is_null);
 906   do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), G0, G4_scratch, _bs->kind(), true);
 907 
 908   __ profile_null_seen(G3_scratch);
 909   __ inc(Lesp, 3* Interpreter::stackElementSize);     // adj sp (pops array, index and value)
 910   __ bind(done);
 911 }
 912 
 913 
 914 void TemplateTable::bastore() {
 915   transition(itos, vtos);
 916   __ pop_i(O2); // index
 917   // Otos_i: val
 918   // O3: array
 919   __ index_check(O3, O2, 0, G3_scratch, O2);
 920   __ stb(Otos_i, O2, arrayOopDesc::base_offset_in_bytes(T_BYTE));
 921 }
 922 


1616   // Save the current Lbcp
1617   const Register O0_cur_bcp = O0;
1618   __ mov( Lbcp, O0_cur_bcp );
1619 
1620 
1621   bool increment_invocation_counter_for_backward_branches = UseCompiler && UseLoopCounter;
1622   if ( increment_invocation_counter_for_backward_branches ) {
1623     Label Lforward;
1624     // check branch direction
1625     __ br( Assembler::positive, false,  Assembler::pn, Lforward );
1626     // Bump bytecode pointer by displacement (take the branch)
1627     __ delayed()->add( O1_disp, Lbcp, Lbcp );     // add to bc addr
1628 
1629     if (TieredCompilation) {
1630       Label Lno_mdo, Loverflow;
1631       int increment = InvocationCounter::count_increment;
1632       int mask = ((1 << Tier0BackedgeNotifyFreqLog) - 1) << InvocationCounter::count_shift;
1633       if (ProfileInterpreter) {
1634         // If no method data exists, go to profile_continue.
1635         __ ld_ptr(Lmethod, methodOopDesc::method_data_offset(), G4_scratch);
1636         __ br_null(G4_scratch, false, Assembler::pn, Lno_mdo);
1637         __ delayed()->nop();
1638 
1639         // Increment backedge counter in the MDO
1640         Address mdo_backedge_counter(G4_scratch, in_bytes(methodDataOopDesc::backedge_counter_offset()) +
1641                                                  in_bytes(InvocationCounter::counter_offset()));
1642         __ increment_mask_and_jump(mdo_backedge_counter, increment, mask, G3_scratch, Lscratch,
1643                                    Assembler::notZero, &Lforward);
1644         __ ba(false, Loverflow);
1645         __ delayed()->nop();
1646       }
1647 
1648       // If there's no MDO, increment counter in methodOop
1649       __ bind(Lno_mdo);
1650       Address backedge_counter(Lmethod, in_bytes(methodOopDesc::backedge_counter_offset()) +
1651                                         in_bytes(InvocationCounter::counter_offset()));
1652       __ increment_mask_and_jump(backedge_counter, increment, mask, G3_scratch, Lscratch,
1653                                  Assembler::notZero, &Lforward);
1654       __ bind(Loverflow);
1655 
1656       // notify point for loop, pass branch bytecode
1657       __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), O0_cur_bcp);
1658 
1659       // Was an OSR adapter generated?
1660       // O0 = osr nmethod
1661       __ br_null(O0, false, Assembler::pn, Lforward);
1662       __ delayed()->nop();
1663 
1664       // Has the nmethod been invalidated already?
1665       __ ld(O0, nmethod::entry_bci_offset(), O2);
1666       __ cmp(O2, InvalidOSREntryBci);
1667       __ br(Assembler::equal, false, Assembler::pn, Lforward);
1668       __ delayed()->nop();
1669 
1670       // migrate the interpreter frame off of the stack
1671 
1672       __ mov(G2_thread, L7);
1673       // save nmethod
1674       __ mov(O0, L6);
1675       __ set_last_Java_frame(SP, noreg);
1676       __ call_VM_leaf(noreg, CAST_FROM_FN_PTR(address, SharedRuntime::OSR_migration_begin), L7);
1677       __ reset_last_Java_frame();
1678       __ mov(L7, G2_thread);
1679 
1680       // move OSR nmethod to I1
1681       __ mov(L6, I1);
1682 
1683       // OSR buffer to I0
1684       __ mov(O0, I0);
1685 
1686       // remove the interpreter frame
1687       __ restore(I5_savedSP, 0, SP);
1688 


1813   __ add(Lbcp, BytesPerInt, O1);
1814   __ and3(O1, -BytesPerInt, O1);
1815   // load lo, hi
1816   __ ld(O1, 1 * BytesPerInt, O2);       // Low Byte
1817   __ ld(O1, 2 * BytesPerInt, O3);       // High Byte
1818 #ifdef _LP64
1819   // Sign extend the 32 bits
1820   __ sra ( Otos_i, 0, Otos_i );
1821 #endif /* _LP64 */
1822 
1823   // check against lo & hi
1824   __ cmp( Otos_i, O2);
1825   __ br( Assembler::less, false, Assembler::pn, default_case);
1826   __ delayed()->cmp( Otos_i, O3 );
1827   __ br( Assembler::greater, false, Assembler::pn, default_case);
1828   // lookup dispatch offset
1829   __ delayed()->sub(Otos_i, O2, O2);
1830   __ profile_switch_case(O2, O3, G3_scratch, G4_scratch);
1831   __ sll(O2, LogBytesPerInt, O2);
1832   __ add(O2, 3 * BytesPerInt, O2);
1833   __ ba(false, continue_execution);
1834   __ delayed()->ld(O1, O2, O2);
1835   // handle default
1836   __ bind(default_case);
1837   __ profile_switch_default(O3);
1838   __ ld(O1, 0, O2); // get default offset
1839   // continue execution
1840   __ bind(continue_execution);
1841   __ add(Lbcp, O2, Lbcp);
1842   __ dispatch_next(vtos);
1843 }
1844 
1845 
1846 void TemplateTable::lookupswitch() {
1847   transition(itos, itos);
1848   __ stop("lookupswitch bytecode should have been rewritten");
1849 }
1850 
1851 void TemplateTable::fast_linearswitch() {
1852   transition(itos, vtos);
1853     Label loop_entry, loop, found, continue_execution;
1854   // align bcp
1855   __ add(Lbcp, BytesPerInt, O1);
1856   __ and3(O1, -BytesPerInt, O1);
1857  // set counter
1858   __ ld(O1, BytesPerInt, O2);
1859   __ sll(O2, LogBytesPerInt + 1, O2); // in word-pairs
1860   __ add(O1, 2 * BytesPerInt, O3); // set first pair addr
1861   __ ba(false, loop_entry);
1862   __ delayed()->add(O3, O2, O2); // counter now points past last pair
1863 
1864   // table search
1865   __ bind(loop);
1866   __ cmp(O4, Otos_i);
1867   __ br(Assembler::equal, true, Assembler::pn, found);
1868   __ delayed()->ld(O3, BytesPerInt, O4); // offset -> O4
1869   __ inc(O3, 2 * BytesPerInt);
1870 
1871   __ bind(loop_entry);
1872   __ cmp(O2, O3);
1873   __ brx(Assembler::greaterUnsigned, true, Assembler::pt, loop);
1874   __ delayed()->ld(O3, 0, O4);
1875 
1876   // default case
1877   __ ld(O1, 0, O4); // get default offset
1878   if (ProfileInterpreter) {
1879     __ profile_switch_default(O3);
1880     __ ba(false, continue_execution);
1881     __ delayed()->nop();
1882   }
1883 
1884   // entry found -> get offset
1885   __ bind(found);
1886   if (ProfileInterpreter) {
1887     __ sub(O3, O1, O3);
1888     __ sub(O3, 2*BytesPerInt, O3);
1889     __ srl(O3, LogBytesPerInt + 1, O3); // in word-pairs
1890     __ profile_switch_case(O3, O1, O2, G3_scratch);
1891 
1892     __ bind(continue_execution);
1893   }
1894   __ add(Lbcp, O4, Lbcp);
1895   __ dispatch_next(vtos);
1896 }
1897 
1898 
1899 void TemplateTable::fast_binaryswitch() {
1900   transition(itos, vtos);
1901   // Implementation using the following core algorithm: (copied from Intel)


1927   assert(Otos_i == O0, "alias checking");
1928   const Register Rkey     = Otos_i;                    // already set (tosca)
1929   const Register Rarray   = O1;
1930   const Register Ri       = O2;
1931   const Register Rj       = O3;
1932   const Register Rh       = O4;
1933   const Register Rscratch = O5;
1934 
1935   const int log_entry_size = 3;
1936   const int entry_size = 1 << log_entry_size;
1937 
1938   Label found;
1939   // Find Array start
1940   __ add(Lbcp, 3 * BytesPerInt, Rarray);
1941   __ and3(Rarray, -BytesPerInt, Rarray);
1942   // initialize i & j (in delay slot)
1943   __ clr( Ri );
1944 
1945   // and start
1946   Label entry;
1947   __ ba(false, entry);
1948   __ delayed()->ld( Rarray, -BytesPerInt, Rj);
1949   // (Rj is already in the native byte-ordering.)
1950 
1951   // binary search loop
1952   { Label loop;
1953     __ bind( loop );
1954     // int h = (i + j) >> 1;
1955     __ sra( Rh, 1, Rh );
1956     // if (key < array[h].fast_match()) {
1957     //   j = h;
1958     // } else {
1959     //   i = h;
1960     // }
1961     __ sll( Rh, log_entry_size, Rscratch );
1962     __ ld( Rarray, Rscratch, Rscratch );
1963     // (Rscratch is already in the native byte-ordering.)
1964     __ cmp( Rkey, Rscratch );
1965     if ( VM_Version::v9_instructions_work() ) {
1966       __ movcc( Assembler::less,         false, Assembler::icc, Rh, Rj );  // j = h if (key <  array[h].fast_match())
1967       __ movcc( Assembler::greaterEqual, false, Assembler::icc, Rh, Ri );  // i = h if (key >= array[h].fast_match())


1985   // end of binary search, result index is i (must check again!)
1986   Label default_case;
1987   Label continue_execution;
1988   if (ProfileInterpreter) {
1989     __ mov( Ri, Rh );              // Save index in i for profiling
1990   }
1991   __ sll( Ri, log_entry_size, Ri );
1992   __ ld( Rarray, Ri, Rscratch );
1993   // (Rscratch is already in the native byte-ordering.)
1994   __ cmp( Rkey, Rscratch );
1995   __ br( Assembler::notEqual, true, Assembler::pn, default_case );
1996   __ delayed()->ld( Rarray, -2 * BytesPerInt, Rj ); // load default offset -> j
1997 
1998   // entry found -> j = offset
1999   __ inc( Ri, BytesPerInt );
2000   __ profile_switch_case(Rh, Rj, Rscratch, Rkey);
2001   __ ld( Rarray, Ri, Rj );
2002   // (Rj is already in the native byte-ordering.)
2003 
2004   if (ProfileInterpreter) {
2005     __ ba(false, continue_execution);
2006     __ delayed()->nop();
2007   }
2008 
2009   __ bind(default_case); // fall through (if not profiling)
2010   __ profile_switch_default(Ri);
2011 
2012   __ bind(continue_execution);
2013   __ add( Lbcp, Rj, Lbcp );
2014   __ dispatch_next( vtos );
2015 }
2016 
2017 
2018 void TemplateTable::_return(TosState state) {
2019   transition(state, state);
2020   assert(_desc->calls_vm(), "inconsistent calls_vm information");
2021 
2022   if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2023     assert(state == vtos, "only valid state");
2024     __ mov(G0, G3_scratch);
2025     __ access_local_ptr(G3_scratch, Otos_i);
2026     __ load_klass(Otos_i, O2);


2199   if (is_static) {
2200     __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f1_offset(), Robj);
2201   }
2202 }
2203 
2204 // The registers Rcache and index expected to be set before call.
2205 // Correct values of the Rcache and index registers are preserved.
2206 void TemplateTable::jvmti_post_field_access(Register Rcache,
2207                                             Register index,
2208                                             bool is_static,
2209                                             bool has_tos) {
2210   ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2211 
2212   if (JvmtiExport::can_post_field_access()) {
2213     // Check to see if a field access watch has been set before we take
2214     // the time to call into the VM.
2215     Label Label1;
2216     assert_different_registers(Rcache, index, G1_scratch);
2217     AddressLiteral get_field_access_count_addr(JvmtiExport::get_field_access_count_addr());
2218     __ load_contents(get_field_access_count_addr, G1_scratch);
2219     __ tst(G1_scratch);
2220     __ br(Assembler::zero, false, Assembler::pt, Label1);
2221     __ delayed()->nop();
2222 
2223     __ add(Rcache, in_bytes(cp_base_offset), Rcache);
2224 
2225     if (is_static) {
2226       __ clr(Otos_i);
2227     } else {
2228       if (has_tos) {
2229       // save object pointer before call_VM() clobbers it
2230         __ push_ptr(Otos_i);  // put object on tos where GC wants it.
2231       } else {
2232         // Load top of stack (do not pop the value off the stack);
2233         __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
2234       }
2235       __ verify_oop(Otos_i);
2236     }
2237     // Otos_i: object pointer or NULL if static
2238     // Rcache: cache entry pointer
2239     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2240                Otos_i, Rcache);
2241     if (!is_static && has_tos) {


2281   Label checkVolatile;
2282 
2283   // compute field type
2284   Label notByte, notInt, notShort, notChar, notLong, notFloat, notObj;
2285   __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
2286   // Make sure we don't need to mask Rflags for tosBits after the above shift
2287   ConstantPoolCacheEntry::verify_tosBits();
2288 
2289   // Check atos before itos for getstatic, more likely (in Queens at least)
2290   __ cmp(Rflags, atos);
2291   __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2292   __ delayed() ->cmp(Rflags, itos);
2293 
2294   // atos
2295   __ load_heap_oop(Rclass, Roffset, Otos_i);
2296   __ verify_oop(Otos_i);
2297   __ push(atos);
2298   if (!is_static) {
2299     patch_bytecode(Bytecodes::_fast_agetfield, G3_scratch, G4_scratch);
2300   }
2301   __ ba(false, checkVolatile);
2302   __ delayed()->tst(Lscratch);
2303 
2304   __ bind(notObj);
2305 
2306   // cmp(Rflags, itos);
2307   __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2308   __ delayed() ->cmp(Rflags, ltos);
2309 
2310   // itos
2311   __ ld(Rclass, Roffset, Otos_i);
2312   __ push(itos);
2313   if (!is_static) {
2314     patch_bytecode(Bytecodes::_fast_igetfield, G3_scratch, G4_scratch);
2315   }
2316   __ ba(false, checkVolatile);
2317   __ delayed()->tst(Lscratch);
2318 
2319   __ bind(notInt);
2320 
2321   // cmp(Rflags, ltos);
2322   __ br(Assembler::notEqual, false, Assembler::pt, notLong);
2323   __ delayed() ->cmp(Rflags, btos);
2324 
2325   // ltos
2326   // load must be atomic
2327   __ ld_long(Rclass, Roffset, Otos_l);
2328   __ push(ltos);
2329   if (!is_static) {
2330     patch_bytecode(Bytecodes::_fast_lgetfield, G3_scratch, G4_scratch);
2331   }
2332   __ ba(false, checkVolatile);
2333   __ delayed()->tst(Lscratch);
2334 
2335   __ bind(notLong);
2336 
2337   // cmp(Rflags, btos);
2338   __ br(Assembler::notEqual, false, Assembler::pt, notByte);
2339   __ delayed() ->cmp(Rflags, ctos);
2340 
2341   // btos
2342   __ ldsb(Rclass, Roffset, Otos_i);
2343   __ push(itos);
2344   if (!is_static) {
2345     patch_bytecode(Bytecodes::_fast_bgetfield, G3_scratch, G4_scratch);
2346   }
2347   __ ba(false, checkVolatile);
2348   __ delayed()->tst(Lscratch);
2349 
2350   __ bind(notByte);
2351 
2352   // cmp(Rflags, ctos);
2353   __ br(Assembler::notEqual, false, Assembler::pt, notChar);
2354   __ delayed() ->cmp(Rflags, stos);
2355 
2356   // ctos
2357   __ lduh(Rclass, Roffset, Otos_i);
2358   __ push(itos);
2359   if (!is_static) {
2360     patch_bytecode(Bytecodes::_fast_cgetfield, G3_scratch, G4_scratch);
2361   }
2362   __ ba(false, checkVolatile);
2363   __ delayed()->tst(Lscratch);
2364 
2365   __ bind(notChar);
2366 
2367   // cmp(Rflags, stos);
2368   __ br(Assembler::notEqual, false, Assembler::pt, notShort);
2369   __ delayed() ->cmp(Rflags, ftos);
2370 
2371   // stos
2372   __ ldsh(Rclass, Roffset, Otos_i);
2373   __ push(itos);
2374   if (!is_static) {
2375     patch_bytecode(Bytecodes::_fast_sgetfield, G3_scratch, G4_scratch);
2376   }
2377   __ ba(false, checkVolatile);
2378   __ delayed()->tst(Lscratch);
2379 
2380   __ bind(notShort);
2381 
2382 
2383   // cmp(Rflags, ftos);
2384   __ br(Assembler::notEqual, false, Assembler::pt, notFloat);
2385   __ delayed() ->tst(Lscratch);
2386 
2387   // ftos
2388   __ ldf(FloatRegisterImpl::S, Rclass, Roffset, Ftos_f);
2389   __ push(ftos);
2390   if (!is_static) {
2391     patch_bytecode(Bytecodes::_fast_fgetfield, G3_scratch, G4_scratch);
2392   }
2393   __ ba(false, checkVolatile);
2394   __ delayed()->tst(Lscratch);
2395 
2396   __ bind(notFloat);
2397 
2398 
2399   // dtos
2400   __ ldf(FloatRegisterImpl::D, Rclass, Roffset, Ftos_d);
2401   __ push(dtos);
2402   if (!is_static) {
2403     patch_bytecode(Bytecodes::_fast_dgetfield, G3_scratch, G4_scratch);
2404   }
2405 
2406   __ bind(checkVolatile);
2407   if (__ membar_has_effect(membar_bits)) {
2408     // __ tst(Lscratch); executed in delay slot
2409     __ br(Assembler::zero, false, Assembler::pt, exit);
2410     __ delayed()->nop();
2411     volatile_barrier(membar_bits);
2412   }
2413 


2482   if (__ membar_has_effect(membar_bits)) {
2483     __ btst(Lscratch, Rflags);
2484     __ br(Assembler::zero, false, Assembler::pt, exit);
2485     __ delayed()->nop();
2486     volatile_barrier(membar_bits);
2487     __ bind(exit);
2488   }
2489 
2490   if (state == atos) {
2491     __ verify_oop(Otos_i);    // does not blow flags!
2492   }
2493 }
2494 
2495 void TemplateTable::jvmti_post_fast_field_mod() {
2496   if (JvmtiExport::can_post_field_modification()) {
2497     // Check to see if a field modification watch has been set before we take
2498     // the time to call into the VM.
2499     Label done;
2500     AddressLiteral get_field_modification_count_addr(JvmtiExport::get_field_modification_count_addr());
2501     __ load_contents(get_field_modification_count_addr, G4_scratch);
2502     __ tst(G4_scratch);
2503     __ br(Assembler::zero, false, Assembler::pt, done);
2504     __ delayed()->nop();
2505     __ pop_ptr(G4_scratch);     // copy the object pointer from tos
2506     __ verify_oop(G4_scratch);
2507     __ push_ptr(G4_scratch);    // put the object pointer back on tos
2508     __ get_cache_entry_pointer_at_bcp(G1_scratch, G3_scratch, 1);
2509     // Save tos values before call_VM() clobbers them. Since we have
2510     // to do it for every data type, we use the saved values as the
2511     // jvalue object.
2512     switch (bytecode()) {  // save tos values before call_VM() clobbers them
2513     case Bytecodes::_fast_aputfield: __ push_ptr(Otos_i); break;
2514     case Bytecodes::_fast_bputfield: // fall through
2515     case Bytecodes::_fast_sputfield: // fall through
2516     case Bytecodes::_fast_cputfield: // fall through
2517     case Bytecodes::_fast_iputfield: __ push_i(Otos_i); break;
2518     case Bytecodes::_fast_dputfield: __ push_d(Ftos_d); break;
2519     case Bytecodes::_fast_fputfield: __ push_f(Ftos_f); break;
2520     // get words in right order for use as jvalue object
2521     case Bytecodes::_fast_lputfield: __ push_l(Otos_l); break;
2522     }
2523     // setup pointer to jvalue object
2524     __ mov(Lesp, G3_scratch);  __ inc(G3_scratch, wordSize);


2535     case Bytecodes::_fast_dputfield: __ pop_d(Ftos_d); break;
2536     case Bytecodes::_fast_fputfield: __ pop_f(Ftos_f); break;
2537     case Bytecodes::_fast_lputfield: __ pop_l(Otos_l); break;
2538     }
2539     __ bind(done);
2540   }
2541 }
2542 
2543 // The registers Rcache and index expected to be set before call.
2544 // The function may destroy various registers, just not the Rcache and index registers.
2545 void TemplateTable::jvmti_post_field_mod(Register Rcache, Register index, bool is_static) {
2546   ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2547 
2548   if (JvmtiExport::can_post_field_modification()) {
2549     // Check to see if a field modification watch has been set before we take
2550     // the time to call into the VM.
2551     Label Label1;
2552     assert_different_registers(Rcache, index, G1_scratch);
2553     AddressLiteral get_field_modification_count_addr(JvmtiExport::get_field_modification_count_addr());
2554     __ load_contents(get_field_modification_count_addr, G1_scratch);
2555     __ tst(G1_scratch);
2556     __ br(Assembler::zero, false, Assembler::pt, Label1);
2557     __ delayed()->nop();
2558 
2559     // The Rcache and index registers have been already set.
2560     // This allows to eliminate this call but the Rcache and index
2561     // registers must be correspondingly used after this line.
2562     __ get_cache_and_index_at_bcp(G1_scratch, G4_scratch, 1);
2563 
2564     __ add(G1_scratch, in_bytes(cp_base_offset), G3_scratch);
2565     if (is_static) {
2566       // Life is simple.  Null out the object pointer.
2567       __ clr(G4_scratch);
2568     } else {
2569       Register Rflags = G1_scratch;
2570       // Life is harder. The stack holds the value on top, followed by the
2571       // object.  We don't know the size of the value, though; it could be
2572       // one or two words depending on its type. As a result, we must find
2573       // the type to determine where the object is.
2574 
2575       Label two_word, valsizeknown;
2576       __ ld_ptr(G1_scratch, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
2577       __ mov(Lesp, G4_scratch);
2578       __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
2579       // Make sure we don't need to mask Rflags for tosBits after the above shift
2580       ConstantPoolCacheEntry::verify_tosBits();
2581       __ cmp(Rflags, ltos);
2582       __ br(Assembler::equal, false, Assembler::pt, two_word);
2583       __ delayed()->cmp(Rflags, dtos);
2584       __ br(Assembler::equal, false, Assembler::pt, two_word);
2585       __ delayed()->nop();
2586       __ inc(G4_scratch, Interpreter::expr_offset_in_bytes(1));
2587       __ br(Assembler::always, false, Assembler::pt, valsizeknown);
2588       __ delayed()->nop();
2589       __ bind(two_word);
2590 
2591       __ inc(G4_scratch, Interpreter::expr_offset_in_bytes(2));
2592 
2593       __ bind(valsizeknown);
2594       // setup object pointer
2595       __ ld_ptr(G4_scratch, 0, G4_scratch);
2596       __ verify_oop(G4_scratch);
2597     }
2598     // setup pointer to jvalue object
2599     __ mov(Lesp, G1_scratch);  __ inc(G1_scratch, wordSize);
2600     // G4_scratch:  object pointer or NULL if static
2601     // G3_scratch: cache entry pointer
2602     // G1_scratch: jvalue object on the stack
2603     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
2604                G4_scratch, G3_scratch, G1_scratch);
2605     __ get_cache_and_index_at_bcp(Rcache, index, 1);
2606     __ bind(Label1);
2607   }
2608 }


2619   Register index  = G4_scratch;
2620   Register Rclass = Rcache;
2621   Register Roffset= G4_scratch;
2622   Register Rflags = G1_scratch;
2623   ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2624 
2625   resolve_cache_and_index(byte_no, noreg, Rcache, index, sizeof(u2));
2626   jvmti_post_field_mod(Rcache, index, is_static);
2627   load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
2628 
2629   Assembler::Membar_mask_bits read_bits =
2630     Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore);
2631   Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad;
2632 
2633   Label notVolatile, checkVolatile, exit;
2634   if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) {
2635     __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
2636     __ and3(Rflags, Lscratch, Lscratch);
2637 
2638     if (__ membar_has_effect(read_bits)) {
2639       __ tst(Lscratch);
2640       __ br(Assembler::zero, false, Assembler::pt, notVolatile);
2641       __ delayed()->nop();
2642       volatile_barrier(read_bits);
2643       __ bind(notVolatile);
2644     }
2645   }
2646 
2647   __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
2648   // Make sure we don't need to mask Rflags for tosBits after the above shift
2649   ConstantPoolCacheEntry::verify_tosBits();
2650 
2651   // compute field type
2652   Label notInt, notShort, notChar, notObj, notByte, notLong, notFloat;
2653 
2654   if (is_static) {
2655     // putstatic with object type most likely, check that first
2656     __ cmp(Rflags, atos );
2657     __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2658     __ delayed() ->cmp(Rflags, itos );
2659 
2660     // atos
2661     __ pop_ptr();
2662     __ verify_oop(Otos_i);
2663 
2664     do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
2665 
2666     __ ba(false, checkVolatile);
2667     __ delayed()->tst(Lscratch);
2668 
2669     __ bind(notObj);
2670 
2671     // cmp(Rflags, itos );
2672     __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2673     __ delayed() ->cmp(Rflags, btos );
2674 
2675     // itos
2676     __ pop_i();
2677     __ st(Otos_i, Rclass, Roffset);
2678     __ ba(false, checkVolatile);
2679     __ delayed()->tst(Lscratch);
2680 
2681     __ bind(notInt);
2682 
2683   } else {
2684     // putfield with int type most likely, check that first
2685     __ cmp(Rflags, itos );
2686     __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2687     __ delayed() ->cmp(Rflags, atos );
2688 
2689     // itos
2690     __ pop_i();
2691     pop_and_check_object(Rclass);
2692     __ st(Otos_i, Rclass, Roffset);
2693     patch_bytecode(Bytecodes::_fast_iputfield, G3_scratch, G4_scratch);
2694     __ ba(false, checkVolatile);
2695     __ delayed()->tst(Lscratch);
2696 
2697     __ bind(notInt);
2698     // cmp(Rflags, atos );
2699     __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2700     __ delayed() ->cmp(Rflags, btos );
2701 
2702     // atos
2703     __ pop_ptr();
2704     pop_and_check_object(Rclass);
2705     __ verify_oop(Otos_i);
2706 
2707     do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
2708 
2709     patch_bytecode(Bytecodes::_fast_aputfield, G3_scratch, G4_scratch);
2710     __ ba(false, checkVolatile);
2711     __ delayed()->tst(Lscratch);
2712 
2713     __ bind(notObj);
2714   }
2715 
2716   // cmp(Rflags, btos );
2717   __ br(Assembler::notEqual, false, Assembler::pt, notByte);
2718   __ delayed() ->cmp(Rflags, ltos );
2719 
2720   // btos
2721   __ pop_i();
2722   if (!is_static) pop_and_check_object(Rclass);
2723   __ stb(Otos_i, Rclass, Roffset);
2724   if (!is_static) {
2725     patch_bytecode(Bytecodes::_fast_bputfield, G3_scratch, G4_scratch);
2726   }
2727   __ ba(false, checkVolatile);
2728   __ delayed()->tst(Lscratch);
2729 
2730   __ bind(notByte);
2731 
2732   // cmp(Rflags, ltos );
2733   __ br(Assembler::notEqual, false, Assembler::pt, notLong);
2734   __ delayed() ->cmp(Rflags, ctos );
2735 
2736   // ltos
2737   __ pop_l();
2738   if (!is_static) pop_and_check_object(Rclass);
2739   __ st_long(Otos_l, Rclass, Roffset);
2740   if (!is_static) {
2741     patch_bytecode(Bytecodes::_fast_lputfield, G3_scratch, G4_scratch);
2742   }
2743   __ ba(false, checkVolatile);
2744   __ delayed()->tst(Lscratch);
2745 
2746   __ bind(notLong);
2747 
2748   // cmp(Rflags, ctos );
2749   __ br(Assembler::notEqual, false, Assembler::pt, notChar);
2750   __ delayed() ->cmp(Rflags, stos );
2751 
2752   // ctos (char)
2753   __ pop_i();
2754   if (!is_static) pop_and_check_object(Rclass);
2755   __ sth(Otos_i, Rclass, Roffset);
2756   if (!is_static) {
2757     patch_bytecode(Bytecodes::_fast_cputfield, G3_scratch, G4_scratch);
2758   }
2759   __ ba(false, checkVolatile);
2760   __ delayed()->tst(Lscratch);
2761 
2762   __ bind(notChar);
2763   // cmp(Rflags, stos );
2764   __ br(Assembler::notEqual, false, Assembler::pt, notShort);
2765   __ delayed() ->cmp(Rflags, ftos );
2766 
2767   // stos (char)
2768   __ pop_i();
2769   if (!is_static) pop_and_check_object(Rclass);
2770   __ sth(Otos_i, Rclass, Roffset);
2771   if (!is_static) {
2772     patch_bytecode(Bytecodes::_fast_sputfield, G3_scratch, G4_scratch);
2773   }
2774   __ ba(false, checkVolatile);
2775   __ delayed()->tst(Lscratch);
2776 
2777   __ bind(notShort);
2778   // cmp(Rflags, ftos );
2779   __ br(Assembler::notZero, false, Assembler::pt, notFloat);
2780   __ delayed()->nop();
2781 
2782   // ftos
2783   __ pop_f();
2784   if (!is_static) pop_and_check_object(Rclass);
2785   __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset);
2786   if (!is_static) {
2787     patch_bytecode(Bytecodes::_fast_fputfield, G3_scratch, G4_scratch);
2788   }
2789   __ ba(false, checkVolatile);
2790   __ delayed()->tst(Lscratch);
2791 
2792   __ bind(notFloat);
2793 
2794   // dtos
2795   __ pop_d();
2796   if (!is_static) pop_and_check_object(Rclass);
2797   __ stf(FloatRegisterImpl::D, Ftos_d, Rclass, Roffset);
2798   if (!is_static) {
2799     patch_bytecode(Bytecodes::_fast_dputfield, G3_scratch, G4_scratch);
2800   }
2801 
2802   __ bind(checkVolatile);
2803   __ tst(Lscratch);
2804 
2805   if (__ membar_has_effect(write_bits)) {
2806     // __ tst(Lscratch); in delay slot
2807     __ br(Assembler::zero, false, Assembler::pt, exit);
2808     __ delayed()->nop();
2809     volatile_barrier(Assembler::StoreLoad);


2816   Register Rcache = G3_scratch;
2817   Register Rclass = Rcache;
2818   Register Roffset= G4_scratch;
2819   Register Rflags = G1_scratch;
2820   ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2821 
2822   jvmti_post_fast_field_mod();
2823 
2824   __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 1);
2825 
2826   Assembler::Membar_mask_bits read_bits =
2827     Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore);
2828   Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad;
2829 
2830   Label notVolatile, checkVolatile, exit;
2831   if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) {
2832     __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
2833     __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
2834     __ and3(Rflags, Lscratch, Lscratch);
2835     if (__ membar_has_effect(read_bits)) {
2836       __ tst(Lscratch);
2837       __ br(Assembler::zero, false, Assembler::pt, notVolatile);
2838       __ delayed()->nop();
2839       volatile_barrier(read_bits);
2840       __ bind(notVolatile);
2841     }
2842   }
2843 
2844   __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset);
2845   pop_and_check_object(Rclass);
2846 
2847   switch (bytecode()) {
2848     case Bytecodes::_fast_bputfield: __ stb(Otos_i, Rclass, Roffset); break;
2849     case Bytecodes::_fast_cputfield: /* fall through */
2850     case Bytecodes::_fast_sputfield: __ sth(Otos_i, Rclass, Roffset); break;
2851     case Bytecodes::_fast_iputfield: __ st(Otos_i, Rclass, Roffset);  break;
2852     case Bytecodes::_fast_lputfield: __ st_long(Otos_l, Rclass, Roffset); break;
2853     case Bytecodes::_fast_fputfield:
2854       __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset);
2855       break;
2856     case Bytecodes::_fast_dputfield:
2857       __ stf(FloatRegisterImpl::D, Ftos_d, Rclass, Roffset);
2858       break;
2859     case Bytecodes::_fast_aputfield:
2860       do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
2861       break;
2862     default:
2863       ShouldNotReachHere();
2864   }
2865 
2866   if (__ membar_has_effect(write_bits)) {
2867     __ tst(Lscratch);
2868     __ br(Assembler::zero, false, Assembler::pt, exit);
2869     __ delayed()->nop();
2870     volatile_barrier(Assembler::StoreLoad);
2871     __ bind(exit);
2872   }
2873 }
2874 
2875 
2876 void TemplateTable::putfield(int byte_no) {
2877   putfield_or_static(byte_no, false);
2878 }
2879 
2880 void TemplateTable::putstatic(int byte_no) {
2881   putfield_or_static(byte_no, true);
2882 }
2883 
2884 
2885 void TemplateTable::fast_xaccess(TosState state) {
2886   transition(vtos, state);
2887   Register Rcache = G3_scratch;
2888   Register Roffset = G4_scratch;
2889   Register Rflags  = G4_scratch;


3209   __ sll(Rtemp, LogBytesPerWord, Rtemp);   // Rscratch *= 4;
3210   if (Assembler::is_simm13(base)) {
3211     __ add(Rtemp, base, Rtemp);
3212   } else {
3213     __ set(base, Rscratch);
3214     __ add(Rscratch, Rtemp, Rtemp);
3215   }
3216   __ add(RklassOop, Rtemp, Rscratch);
3217 
3218   __ bind(search);
3219 
3220   __ ld_ptr(Rscratch, itableOffsetEntry::interface_offset_in_bytes(), Rtemp);
3221   {
3222     Label ok;
3223 
3224     // Check that entry is non-null.  Null entries are probably a bytecode
3225     // problem.  If the interface isn't implemented by the receiver class,
3226     // the VM should throw IncompatibleClassChangeError.  linkResolver checks
3227     // this too but that's only if the entry isn't already resolved, so we
3228     // need to check again.
3229     __ br_notnull( Rtemp, false, Assembler::pt, ok);
3230     __ delayed()->nop();
3231     call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_IncompatibleClassChangeError));
3232     __ should_not_reach_here();
3233     __ bind(ok);
3234     __ verify_oop(Rtemp);
3235   }
3236 
3237   __ verify_oop(Rinterface);
3238 
3239   __ cmp(Rinterface, Rtemp);
3240   __ brx(Assembler::notEqual, true, Assembler::pn, search);
3241   __ delayed()->add(Rscratch, itableOffsetEntry::size() * wordSize, Rscratch);
3242 
3243   // entry found and Rscratch points to it
3244   __ ld(Rscratch, itableOffsetEntry::offset_offset_in_bytes(), Rscratch);
3245 
3246   assert(itableMethodEntry::method_offset_in_bytes() == 0, "adjust instruction below");
3247   __ sll(Rindex, exact_log2(itableMethodEntry::size() * wordSize), Rindex);       // Rindex *= 8;
3248   __ add(Rscratch, Rindex, Rscratch);
3249   __ ld_ptr(RklassOop, Rscratch, G5_method);
3250 
3251   // Check for abstract method error.
3252   {
3253     Label ok;
3254     __ tst(G5_method);
3255     __ brx(Assembler::notZero, false, Assembler::pt, ok);
3256     __ delayed()->nop();
3257     call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodError));
3258     __ should_not_reach_here();
3259     __ bind(ok);
3260   }
3261 
3262   Register Rcall = Rinterface;
3263   assert_different_registers(Rcall, G5_method, Gargs, Rret);
3264 
3265   __ verify_oop(G5_method);
3266   __ call_from_interpreter(Rcall, Gargs, Rret);
3267 
3268 }
3269 
3270 
3271 void TemplateTable::invokedynamic(int byte_no) {
3272   transition(vtos, vtos);
3273   assert(byte_no == f1_oop, "use this argument");
3274 
3275   if (!EnableInvokeDynamic) {
3276     // We should not encounter this bytecode if !EnableInvokeDynamic.


3391     // if there is enough space, we do not CAS and do not clear
3392     __ cmp(RnewTopValue, RendValue);
3393     if(ZeroTLAB) {
3394       // the fields have already been cleared
3395       __ brx(Assembler::lessEqualUnsigned, true, Assembler::pt, initialize_header);
3396     } else {
3397       // initialize both the header and fields
3398       __ brx(Assembler::lessEqualUnsigned, true, Assembler::pt, initialize_object);
3399     }
3400     __ delayed()->st_ptr(RnewTopValue, G2_thread, in_bytes(JavaThread::tlab_top_offset()));
3401 
3402     if (allow_shared_alloc) {
3403       // Check if tlab should be discarded (refill_waste_limit >= free)
3404       __ ld_ptr(G2_thread, in_bytes(JavaThread::tlab_refill_waste_limit_offset()), RtlabWasteLimitValue);
3405       __ sub(RendValue, RoldTopValue, RfreeValue);
3406 #ifdef _LP64
3407       __ srlx(RfreeValue, LogHeapWordSize, RfreeValue);
3408 #else
3409       __ srl(RfreeValue, LogHeapWordSize, RfreeValue);
3410 #endif
3411       __ cmp(RtlabWasteLimitValue, RfreeValue);
3412       __ brx(Assembler::greaterEqualUnsigned, false, Assembler::pt, slow_case); // tlab waste is small
3413       __ delayed()->nop();
3414 
3415       // increment waste limit to prevent getting stuck on this slow path
3416       __ add(RtlabWasteLimitValue, ThreadLocalAllocBuffer::refill_waste_limit_increment(), RtlabWasteLimitValue);
3417       __ st_ptr(RtlabWasteLimitValue, G2_thread, in_bytes(JavaThread::tlab_refill_waste_limit_offset()));
3418     } else {
3419       // No allocation in the shared eden.
3420       __ br(Assembler::always, false, Assembler::pt, slow_case);
3421       __ delayed()->nop();
3422     }
3423   }
3424 
3425   // Allocation in the shared Eden
3426   if (allow_shared_alloc) {
3427     Register RoldTopValue = G1_scratch;
3428     Register RtopAddr = G3_scratch;
3429     Register RnewTopValue = RallocatedObject;
3430     Register RendValue = Rscratch;
3431 
3432     __ set((intptr_t)Universe::heap()->top_addr(), RtopAddr);
3433 
3434     Label retry;
3435     __ bind(retry);
3436     __ set((intptr_t)Universe::heap()->end_addr(), RendValue);
3437     __ ld_ptr(RendValue, 0, RendValue);
3438     __ ld_ptr(RtopAddr, 0, RoldTopValue);
3439     __ add(RoldTopValue, Roffset, RnewTopValue);
3440 
3441     // RnewTopValue contains the top address after the new object
3442     // has been allocated.
3443     __ cmp(RnewTopValue, RendValue);
3444     __ brx(Assembler::greaterUnsigned, false, Assembler::pn, slow_case);
3445     __ delayed()->nop();
3446 
3447     __ casx_under_lock(RtopAddr, RoldTopValue, RnewTopValue,
3448       VM_Version::v9_instructions_work() ? NULL :
3449       (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr());
3450 
3451     // if someone beat us on the allocation, try again, otherwise continue
3452     __ cmp(RoldTopValue, RnewTopValue);
3453     __ brx(Assembler::notEqual, false, Assembler::pn, retry);
3454     __ delayed()->nop();
3455 
3456     // bump total bytes allocated by this thread
3457     // RoldTopValue and RtopAddr are dead, so can use G1 and G3
3458     __ incr_allocated_bytes(Roffset, G1_scratch, G3_scratch);
3459   }
3460 
3461   if (UseTLAB || Universe::heap()->supports_inline_contig_alloc()) {
3462     // clear object fields
3463     __ bind(initialize_object);
3464     __ deccc(Roffset, sizeof(oopDesc));
3465     __ br(Assembler::zero, false, Assembler::pt, initialize_header);
3466     __ delayed()->add(RallocatedObject, sizeof(oopDesc), G3_scratch);
3467 
3468     // initialize remaining object fields
3469     { Label loop;
3470       __ subcc(Roffset, wordSize, Roffset);
3471       __ bind(loop);
3472       //__ subcc(Roffset, wordSize, Roffset);      // executed above loop or in delay slot
3473       __ st_ptr(G0, G3_scratch, Roffset);
3474       __ br(Assembler::notEqual, false, Assembler::pt, loop);
3475       __ delayed()->subcc(Roffset, wordSize, Roffset);
3476     }
3477     __ br(Assembler::always, false, Assembler::pt, initialize_header);
3478     __ delayed()->nop();
3479   }
3480 
3481   // slow case
3482   __ bind(slow_case);
3483   __ get_2_byte_integer_at_bcp(1, G3_scratch, O2, InterpreterMacroAssembler::Unsigned);
3484   __ get_constant_pool(O1);
3485 
3486   call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), O1, O2);
3487 
3488   __ ba(false, done);
3489   __ delayed()->nop();
3490 
3491   // Initialize the header: mark, klass
3492   __ bind(initialize_header);
3493 
3494   if (UseBiasedLocking) {
3495     __ ld_ptr(RinstanceKlass, Klass::prototype_header_offset_in_bytes() + sizeof(oopDesc), G4_scratch);
3496   } else {
3497     __ set((intptr_t)markOopDesc::prototype(), G4_scratch);
3498   }
3499   __ st_ptr(G4_scratch, RallocatedObject, oopDesc::mark_offset_in_bytes());       // mark
3500   __ store_klass_gap(G0, RallocatedObject);         // klass gap if compressed
3501   __ store_klass(RinstanceKlass, RallocatedObject); // klass (last for cms)
3502 
3503   {
3504     SkipIfEqual skip_if(
3505       _masm, G4_scratch, &DTraceAllocProbes, Assembler::zero);
3506     // Trigger dtrace event
3507     __ push(atos);
3508     __ call_VM_leaf(noreg,
3509        CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), O0);


3533 
3534 void TemplateTable::arraylength() {
3535   transition(atos, itos);
3536   Label ok;
3537   __ verify_oop(Otos_i);
3538   __ tst(Otos_i);
3539   __ throw_if_not_1_x( Assembler::notZero, ok );
3540   __ delayed()->ld(Otos_i, arrayOopDesc::length_offset_in_bytes(), Otos_i);
3541   __ throw_if_not_2( Interpreter::_throw_NullPointerException_entry, G3_scratch, ok);
3542 }
3543 
3544 
3545 void TemplateTable::checkcast() {
3546   transition(atos, atos);
3547   Label done, is_null, quicked, cast_ok, resolved;
3548   Register Roffset = G1_scratch;
3549   Register RobjKlass = O5;
3550   Register RspecifiedKlass = O4;
3551 
3552   // Check for casting a NULL
3553   __ br_null(Otos_i, false, Assembler::pn, is_null);
3554   __ delayed()->nop();
3555 
3556   // Get value klass in RobjKlass
3557   __ load_klass(Otos_i, RobjKlass); // get value klass
3558 
3559   // Get constant pool tag
3560   __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned);
3561 
3562   // See if the checkcast has been quickened
3563   __ get_cpool_and_tags(Lscratch, G3_scratch);
3564   __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch);
3565   __ ldub(G3_scratch, Roffset, G3_scratch);
3566   __ cmp(G3_scratch, JVM_CONSTANT_Class);
3567   __ br(Assembler::equal, true, Assembler::pt, quicked);
3568   __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
3569 
3570   __ push_ptr(); // save receiver for result, and for GC
3571   call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
3572   __ pop_ptr(Otos_i, G3_scratch); // restore receiver
3573 
3574   __ br(Assembler::always, false, Assembler::pt, resolved);
3575   __ delayed()->nop();
3576 
3577   // Extract target class from constant pool
3578   __ bind(quicked);
3579   __ add(Roffset, sizeof(constantPoolOopDesc), Roffset);
3580   __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
3581   __ bind(resolved);
3582   __ load_klass(Otos_i, RobjKlass); // get value klass
3583 
3584   // Generate a fast subtype check.  Branch to cast_ok if no
3585   // failure.  Throw exception if failure.
3586   __ gen_subtype_check( RobjKlass, RspecifiedKlass, G3_scratch, G4_scratch, G1_scratch, cast_ok );
3587 
3588   // Not a subtype; so must throw exception
3589   __ throw_if_not_x( Assembler::never, Interpreter::_throw_ClassCastException_entry, G3_scratch );
3590 
3591   __ bind(cast_ok);
3592 
3593   if (ProfileInterpreter) {
3594     __ ba(false, done);
3595     __ delayed()->nop();
3596   }
3597   __ bind(is_null);
3598   __ profile_null_seen(G3_scratch);
3599   __ bind(done);
3600 }
3601 
3602 
3603 void TemplateTable::instanceof() {
3604   Label done, is_null, quicked, resolved;
3605   transition(atos, itos);
3606   Register Roffset = G1_scratch;
3607   Register RobjKlass = O5;
3608   Register RspecifiedKlass = O4;
3609 
3610   // Check for casting a NULL
3611   __ br_null(Otos_i, false, Assembler::pt, is_null);
3612   __ delayed()->nop();
3613 
3614   // Get value klass in RobjKlass
3615   __ load_klass(Otos_i, RobjKlass); // get value klass
3616 
3617   // Get constant pool tag
3618   __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned);
3619 
3620   // See if the checkcast has been quickened
3621   __ get_cpool_and_tags(Lscratch, G3_scratch);
3622   __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch);
3623   __ ldub(G3_scratch, Roffset, G3_scratch);
3624   __ cmp(G3_scratch, JVM_CONSTANT_Class);
3625   __ br(Assembler::equal, true, Assembler::pt, quicked);
3626   __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
3627 
3628   __ push_ptr(); // save receiver for result, and for GC
3629   call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
3630   __ pop_ptr(Otos_i, G3_scratch); // restore receiver
3631 
3632   __ br(Assembler::always, false, Assembler::pt, resolved);
3633   __ delayed()->nop();
3634 
3635 
3636   // Extract target class from constant pool
3637   __ bind(quicked);
3638   __ add(Roffset, sizeof(constantPoolOopDesc), Roffset);
3639   __ get_constant_pool(Lscratch);
3640   __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
3641   __ bind(resolved);
3642   __ load_klass(Otos_i, RobjKlass); // get value klass
3643 
3644   // Generate a fast subtype check.  Branch to cast_ok if no
3645   // failure.  Return 0 if failure.
3646   __ or3(G0, 1, Otos_i);      // set result assuming quick tests succeed
3647   __ gen_subtype_check( RobjKlass, RspecifiedKlass, G3_scratch, G4_scratch, G1_scratch, done );
3648   // Not a subtype; return 0;
3649   __ clr( Otos_i );
3650 
3651   if (ProfileInterpreter) {
3652     __ ba(false, done);
3653     __ delayed()->nop();
3654   }
3655   __ bind(is_null);
3656   __ profile_null_seen(G3_scratch);
3657   __ bind(done);
3658 }
3659 
3660 void TemplateTable::_breakpoint() {
3661 
3662    // Note: We get here even if we are single stepping..
3663    // jbug inists on setting breakpoints at every bytecode
3664    // even if we are in single step mode.
3665 
3666    transition(vtos, vtos);
3667    // get the unpatched byte code
3668    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::get_original_bytecode_at), Lmethod, Lbcp);
3669    __ mov(O0, Lbyte_code);
3670 
3671    // post the breakpoint event
3672    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::_breakpoint), Lmethod, Lbcp);
3673 


3707   // monitorenter returns true).
3708 
3709   {   Label ok;
3710     __ tst(Otos_i);
3711     __ throw_if_not_1_x( Assembler::notZero,  ok);
3712     __ delayed()->mov(Otos_i, Lscratch); // save obj
3713     __ throw_if_not_2( Interpreter::_throw_NullPointerException_entry, G3_scratch, ok);
3714   }
3715 
3716   assert(O0 == Otos_i, "Be sure where the object to lock is");
3717 
3718   // find a free slot in the monitor block
3719 
3720 
3721   // initialize entry pointer
3722   __ clr(O1); // points to free slot or NULL
3723 
3724   {
3725     Label entry, loop, exit;
3726     __ add( __ top_most_monitor(), O2 ); // last one to check
3727     __ ba( false, entry );
3728     __ delayed()->mov( Lmonitors, O3 ); // first one to check
3729 
3730 
3731     __ bind( loop );
3732 
3733     __ verify_oop(O4);          // verify each monitor's oop
3734     __ tst(O4); // is this entry unused?
3735     if (VM_Version::v9_instructions_work())
3736       __ movcc( Assembler::zero, false, Assembler::ptr_cc, O3, O1);
3737     else {
3738       Label L;
3739       __ br( Assembler::zero, true, Assembler::pn, L );
3740       __ delayed()->mov(O3, O1); // rememeber this one if match
3741       __ bind(L);
3742     }
3743 
3744     __ cmp(O4, O0); // check if current entry is for same object
3745     __ brx( Assembler::equal, false, Assembler::pn, exit );
3746     __ delayed()->inc( O3, frame::interpreter_frame_monitor_size() * wordSize ); // check next one
3747 
3748     __ bind( entry );
3749 
3750     __ cmp( O3, O2 );
3751     __ brx( Assembler::lessEqualUnsigned, true, Assembler::pt, loop );
3752     __ delayed()->ld_ptr(O3, BasicObjectLock::obj_offset_in_bytes(), O4);
3753 
3754     __ bind( exit );
3755   }
3756 
3757   { Label allocated;
3758 
3759     // found free slot?
3760     __ br_notnull(O1, false, Assembler::pn, allocated);
3761     __ delayed()->nop();
3762 
3763     __ add_monitor_to_stack( false, O2, O3 );
3764     __ mov(Lmonitors, O1);
3765 
3766     __ bind(allocated);
3767   }
3768 
3769   // Increment bcp to point to the next bytecode, so exception handling for async. exceptions work correctly.
3770   // The object has already been poped from the stack, so the expression stack looks correct.
3771   __ inc(Lbcp);
3772 
3773   __ st_ptr(O0, O1, BasicObjectLock::obj_offset_in_bytes()); // store object
3774   __ lock_object(O1, O0);
3775 
3776   // check if there's enough space on the stack for the monitors after locking
3777   __ generate_stack_overflow_check(0);
3778 
3779   // The bcp has already been incremented. Just need to dispatch to next instruction.
3780   __ dispatch_next(vtos);
3781 }
3782 
3783 
3784 void TemplateTable::monitorexit() {
3785   transition(atos, vtos);
3786   __ verify_oop(Otos_i);
3787   __ tst(Otos_i);
3788   __ throw_if_not_x( Assembler::notZero, Interpreter::_throw_NullPointerException_entry, G3_scratch );
3789 
3790   assert(O0 == Otos_i, "just checking");
3791 
3792   { Label entry, loop, found;
3793     __ add( __ top_most_monitor(), O2 ); // last one to check
3794     __ ba(false, entry );
3795     // use Lscratch to hold monitor elem to check, start with most recent monitor,
3796     // By using a local it survives the call to the C routine.
3797     __ delayed()->mov( Lmonitors, Lscratch );
3798 
3799     __ bind( loop );
3800 
3801     __ verify_oop(O4);          // verify each monitor's oop
3802     __ cmp(O4, O0); // check if current entry is for desired object
3803     __ brx( Assembler::equal, true, Assembler::pt, found );
3804     __ delayed()->mov(Lscratch, O1); // pass found entry as argument to monitorexit
3805 
3806     __ inc( Lscratch, frame::interpreter_frame_monitor_size() * wordSize ); // advance to next
3807 
3808     __ bind( entry );
3809 
3810     __ cmp( Lscratch, O2 );
3811     __ brx( Assembler::lessEqualUnsigned, true, Assembler::pt, loop );
3812     __ delayed()->ld_ptr(Lscratch, BasicObjectLock::obj_offset_in_bytes(), O4);
3813 
3814     call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_illegal_monitor_state_exception));




 142 //----------------------------------------------------------------------------------------------------
 143 // Miscelaneous helper routines
 144 
 145 
 146 Address TemplateTable::at_bcp(int offset) {
 147   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 148   return Address(Lbcp, offset);
 149 }
 150 
 151 
 152 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register Rbyte_code,
 153                                    Register Rscratch,
 154                                    bool load_bc_into_scratch /*=true*/) {
 155   // With sharing on, may need to test methodOop flag.
 156   if (!RewriteBytecodes) return;
 157   if (load_bc_into_scratch) __ set(bc, Rbyte_code);
 158   Label patch_done;
 159   if (JvmtiExport::can_post_breakpoint()) {
 160     Label fast_patch;
 161     __ ldub(at_bcp(0), Rscratch);
 162     __ cmp_and_br_short(Rscratch, Bytecodes::_breakpoint, Assembler::notEqual, Assembler::pt, fast_patch);


 163     // perform the quickening, slowly, in the bowels of the breakpoint table
 164     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), Lmethod, Lbcp, Rbyte_code);
 165     __ ba_short(patch_done);

 166     __ bind(fast_patch);
 167   }
 168 #ifdef ASSERT
 169   Bytecodes::Code orig_bytecode =  Bytecodes::java_code(bc);
 170   Label okay;
 171   __ ldub(at_bcp(0), Rscratch);
 172   __ cmp(Rscratch, orig_bytecode);
 173   __ br(Assembler::equal, false, Assembler::pt, okay);
 174   __ delayed() ->cmp(Rscratch, Rbyte_code);
 175   __ br(Assembler::equal, false, Assembler::pt, okay);
 176   __ delayed()->nop();
 177   __ stop("Rewriting wrong bytecode location");
 178   __ bind(okay);
 179 #endif
 180   __ stb(Rbyte_code, at_bcp(0));
 181   __ bind(patch_done);
 182 }
 183 
 184 //----------------------------------------------------------------------------------------------------
 185 // Individual instructions


 261   __ get_2_byte_integer_at_bcp(1, G3_scratch, Otos_i, InterpreterMacroAssembler::Signed);
 262 }
 263 
 264 void TemplateTable::ldc(bool wide) {
 265   transition(vtos, vtos);
 266   Label call_ldc, notInt, isString, notString, notClass, exit;
 267 
 268   if (wide) {
 269     __ get_2_byte_integer_at_bcp(1, G3_scratch, O1, InterpreterMacroAssembler::Unsigned);
 270   } else {
 271     __ ldub(Lbcp, 1, O1);
 272   }
 273   __ get_cpool_and_tags(O0, O2);
 274 
 275   const int base_offset = constantPoolOopDesc::header_size() * wordSize;
 276   const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize;
 277 
 278   // get type from tags
 279   __ add(O2, tags_offset, O2);
 280   __ ldub(O2, O1, O2);
 281   // unresolved string? If so, must resolve
 282   __ cmp_and_brx_short(O2, JVM_CONSTANT_UnresolvedString, Assembler::equal, Assembler::pt, call_ldc);

 283 
 284   // unresolved class? If so, must resolve
 285   __ cmp_and_brx_short(O2, JVM_CONSTANT_UnresolvedClass, Assembler::equal, Assembler::pt, call_ldc);

 286 
 287   // unresolved class in error state
 288   __ cmp_and_brx_short(O2, JVM_CONSTANT_UnresolvedClassInError, Assembler::equal, Assembler::pn, call_ldc);

 289 
 290   __ cmp(O2, JVM_CONSTANT_Class);      // need to call vm to get java mirror of the class
 291   __ brx(Assembler::notEqual, true, Assembler::pt, notClass);
 292   __ delayed()->add(O0, base_offset, O0);
 293 
 294   __ bind(call_ldc);
 295   __ set(wide, O1);
 296   call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), O1);
 297   __ push(atos);
 298   __ ba_short(exit);

 299 
 300   __ bind(notClass);
 301  // __ add(O0, base_offset, O0);
 302   __ sll(O1, LogBytesPerWord, O1);
 303   __ cmp(O2, JVM_CONSTANT_Integer);
 304   __ brx(Assembler::notEqual, true, Assembler::pt, notInt);
 305   __ delayed()->cmp(O2, JVM_CONSTANT_String);
 306   __ ld(O0, O1, Otos_i);
 307   __ push(itos);
 308   __ ba_short(exit);

 309 
 310   __ bind(notInt);
 311  // __ cmp(O2, JVM_CONSTANT_String);
 312   __ brx(Assembler::equal, true, Assembler::pt, isString);
 313   __ delayed()->cmp(O2, JVM_CONSTANT_Object);
 314   __ brx(Assembler::notEqual, true, Assembler::pt, notString);
 315   __ delayed()->ldf(FloatRegisterImpl::S, O0, O1, Ftos_f);
 316   __ bind(isString);
 317   __ ld_ptr(O0, O1, Otos_i);
 318   __ verify_oop(Otos_i);
 319   __ push(atos);
 320   __ ba_short(exit);

 321 
 322   __ bind(notString);
 323  // __ ldf(FloatRegisterImpl::S, O0, O1, Ftos_f);
 324   __ push(ftos);
 325 
 326   __ bind(exit);
 327 }
 328 
 329 // Fast path for caching oop constants.
 330 // %%% We should use this to handle Class and String constants also.
 331 // %%% It will simplify the ldc/primitive path considerably.
 332 void TemplateTable::fast_aldc(bool wide) {
 333   transition(vtos, atos);
 334 
 335   if (!EnableInvokeDynamic) {
 336     // We should not encounter this bytecode if !EnableInvokeDynamic.
 337     // The verifier will stop it.  However, if we get past the verifier,
 338     // this will stop the thread in a reasonable way, without crashing the JVM.
 339     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
 340                      InterpreterRuntime::throw_IncompatibleClassChangeError));
 341     // the call_VM checks for exception, so we should never return here.
 342     __ should_not_reach_here();
 343     return;
 344   }
 345 
 346   Register Rcache = G3_scratch;
 347   Register Rscratch = G4_scratch;
 348 
 349   resolve_cache_and_index(f1_oop, Otos_i, Rcache, Rscratch, wide ? sizeof(u2) : sizeof(u1));
 350 
 351   __ verify_oop(Otos_i);
 352 
 353   Label L_done;
 354   const Register Rcon_klass = G3_scratch;  // same as Rcache
 355   const Register Rarray_klass = G4_scratch;  // same as Rscratch
 356   __ load_klass(Otos_i, Rcon_klass);
 357   AddressLiteral array_klass_addr((address)Universe::systemObjArrayKlassObj_addr());
 358   __ load_contents(array_klass_addr, Rarray_klass);
 359   __ cmp_and_brx_short(Rarray_klass, Rcon_klass, Assembler::notEqual, Assembler::pt, L_done);


 360   __ ld(Address(Otos_i, arrayOopDesc::length_offset_in_bytes()), Rcon_klass);
 361   __ tst(Rcon_klass);
 362   __ brx(Assembler::zero, true, Assembler::pt, L_done);
 363   __ delayed()->clr(Otos_i);    // executed only if branch is taken
 364 
 365   // Load the exception from the system-array which wraps it:
 366   __ load_heap_oop(Otos_i, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i);
 367   __ throw_if_not_x(Assembler::never, Interpreter::throw_exception_entry(), G3_scratch);
 368 
 369   __ bind(L_done);
 370 }
 371 
 372 void TemplateTable::ldc2_w() {
 373   transition(vtos, vtos);
 374   Label retry, resolved, Long, exit;
 375 
 376   __ bind(retry);
 377   __ get_2_byte_integer_at_bcp(1, G3_scratch, O1, InterpreterMacroAssembler::Unsigned);
 378   __ get_cpool_and_tags(O0, O2);
 379 
 380   const int base_offset = constantPoolOopDesc::header_size() * wordSize;
 381   const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize;
 382   // get type from tags
 383   __ add(O2, tags_offset, O2);
 384   __ ldub(O2, O1, O2);
 385 
 386   __ sll(O1, LogBytesPerWord, O1);
 387   __ add(O0, O1, G3_scratch);
 388 
 389   __ cmp_and_brx_short(O2, JVM_CONSTANT_Double, Assembler::notEqual, Assembler::pt, Long);


 390   // A double can be placed at word-aligned locations in the constant pool.
 391   // Check out Conversions.java for an example.
 392   // Also constantPoolOopDesc::header_size() is 20, which makes it very difficult
 393   // to double-align double on the constant pool.  SG, 11/7/97
 394 #ifdef _LP64
 395   __ ldf(FloatRegisterImpl::D, G3_scratch, base_offset, Ftos_d);
 396 #else
 397   FloatRegister f = Ftos_d;
 398   __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset, f);
 399   __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset + sizeof(jdouble)/2,
 400          f->successor());
 401 #endif
 402   __ push(dtos);
 403   __ ba_short(exit);

 404 
 405   __ bind(Long);
 406 #ifdef _LP64
 407   __ ldx(G3_scratch, base_offset, Otos_l);
 408 #else
 409   __ ld(G3_scratch, base_offset, Otos_l);
 410   __ ld(G3_scratch, base_offset + sizeof(jlong)/2, Otos_l->successor());
 411 #endif
 412   __ push(ltos);
 413 
 414   __ bind(exit);
 415 }
 416 
 417 
 418 void TemplateTable::locals_index(Register reg, int offset) {
 419   __ ldub( at_bcp(offset), reg );
 420 }
 421 
 422 
 423 void TemplateTable::locals_index_wide(Register reg) {
 424   // offset is 2, not 1, because Lbcp points to wide prefix code
 425   __ get_2_byte_integer_at_bcp(2, G4_scratch, reg, InterpreterMacroAssembler::Unsigned);
 426 }
 427 
 428 void TemplateTable::iload() {
 429   transition(vtos, itos);
 430   // Rewrite iload,iload  pair into fast_iload2
 431   //         iload,caload pair into fast_icaload
 432   if (RewriteFrequentPairs) {
 433     Label rewrite, done;
 434 
 435     // get next byte
 436     __ ldub(at_bcp(Bytecodes::length_for(Bytecodes::_iload)), G3_scratch);
 437 
 438     // if _iload, wait to rewrite to iload2.  We only want to rewrite the
 439     // last two iloads in a pair.  Comparing against fast_iload means that
 440     // the next bytecode is neither an iload or a caload, and therefore
 441     // an iload pair.
 442     __ cmp_and_br_short(G3_scratch, (int)Bytecodes::_iload, Assembler::equal, Assembler::pn, done);


 443 
 444     __ cmp(G3_scratch, (int)Bytecodes::_fast_iload);
 445     __ br(Assembler::equal, false, Assembler::pn, rewrite);
 446     __ delayed()->set(Bytecodes::_fast_iload2, G4_scratch);
 447 
 448     __ cmp(G3_scratch, (int)Bytecodes::_caload);
 449     __ br(Assembler::equal, false, Assembler::pn, rewrite);
 450     __ delayed()->set(Bytecodes::_fast_icaload, G4_scratch);
 451 
 452     __ set(Bytecodes::_fast_iload, G4_scratch);  // don't check again
 453     // rewrite
 454     // G4_scratch: fast bytecode
 455     __ bind(rewrite);
 456     patch_bytecode(Bytecodes::_iload, G4_scratch, G3_scratch, false);
 457     __ bind(done);
 458   }
 459 
 460   // Get the local value into tos
 461   locals_index(G3_scratch);
 462   __ access_local_int( G3_scratch, Otos_i );


 664   //
 665   // _aload_0, _fast_igetfield (itos)
 666   // _aload_0, _fast_agetfield (atos)
 667   // _aload_0, _fast_fgetfield (ftos)
 668   //
 669   // occur frequently. If RewriteFrequentPairs is set, the (slow) _aload_0
 670   // bytecode checks the next bytecode and then rewrites the current
 671   // bytecode into a pair bytecode; otherwise it rewrites the current
 672   // bytecode into _fast_aload_0 that doesn't do the pair check anymore.
 673   //
 674   if (RewriteFrequentPairs) {
 675     Label rewrite, done;
 676 
 677     // get next byte
 678     __ ldub(at_bcp(Bytecodes::length_for(Bytecodes::_aload_0)), G3_scratch);
 679 
 680     // do actual aload_0
 681     aload(0);
 682 
 683     // if _getfield then wait with rewrite
 684     __ cmp_and_br_short(G3_scratch, (int)Bytecodes::_getfield, Assembler::equal, Assembler::pn, done);


 685 
 686     // if _igetfield then rewrite to _fast_iaccess_0
 687     assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
 688     __ cmp(G3_scratch, (int)Bytecodes::_fast_igetfield);
 689     __ br(Assembler::equal, false, Assembler::pn, rewrite);
 690     __ delayed()->set(Bytecodes::_fast_iaccess_0, G4_scratch);
 691 
 692     // if _agetfield then rewrite to _fast_aaccess_0
 693     assert(Bytecodes::java_code(Bytecodes::_fast_aaccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
 694     __ cmp(G3_scratch, (int)Bytecodes::_fast_agetfield);
 695     __ br(Assembler::equal, false, Assembler::pn, rewrite);
 696     __ delayed()->set(Bytecodes::_fast_aaccess_0, G4_scratch);
 697 
 698     // if _fgetfield then rewrite to _fast_faccess_0
 699     assert(Bytecodes::java_code(Bytecodes::_fast_faccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
 700     __ cmp(G3_scratch, (int)Bytecodes::_fast_fgetfield);
 701     __ br(Assembler::equal, false, Assembler::pn, rewrite);
 702     __ delayed()->set(Bytecodes::_fast_faccess_0, G4_scratch);
 703 
 704     // else rewrite to _fast_aload0


 832   // Fos_d: val
 833   // O3: array
 834   __ index_check(O3, O2, LogBytesPerLong, G3_scratch, O2);
 835   __ stf(FloatRegisterImpl::D, Ftos_d, O2, arrayOopDesc::base_offset_in_bytes(T_DOUBLE));
 836 }
 837 
 838 
 839 void TemplateTable::aastore() {
 840   Label store_ok, is_null, done;
 841   transition(vtos, vtos);
 842   __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
 843   __ ld(Lesp, Interpreter::expr_offset_in_bytes(1), O2);         // get index
 844   __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(2), O3);     // get array
 845   // Otos_i: val
 846   // O2: index
 847   // O3: array
 848   __ verify_oop(Otos_i);
 849   __ index_check_without_pop(O3, O2, UseCompressedOops ? 2 : LogBytesPerWord, G3_scratch, O1);
 850 
 851   // do array store check - check for NULL value first
 852   __ br_null_short( Otos_i, Assembler::pn, is_null );

 853 
 854   __ load_klass(O3, O4); // get array klass
 855   __ load_klass(Otos_i, O5); // get value klass
 856 
 857   // do fast instanceof cache test
 858 
 859   __ ld_ptr(O4,     sizeof(oopDesc) + objArrayKlass::element_klass_offset_in_bytes(),  O4);
 860 
 861   assert(Otos_i == O0, "just checking");
 862 
 863   // Otos_i:    value
 864   // O1:        addr - offset
 865   // O2:        index
 866   // O3:        array
 867   // O4:        array element klass
 868   // O5:        value klass
 869 
 870   // Address element(O1, 0, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
 871 
 872   // Generate a fast subtype check.  Branch to store_ok if no
 873   // failure.  Throw if failure.
 874   __ gen_subtype_check( O5, O4, G3_scratch, G4_scratch, G1_scratch, store_ok );
 875 
 876   // Not a subtype; so must throw exception
 877   __ throw_if_not_x( Assembler::never, Interpreter::_throw_ArrayStoreException_entry, G3_scratch );
 878 
 879   // Store is OK.
 880   __ bind(store_ok);
 881   do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i, G3_scratch, _bs->kind(), true);
 882 
 883   __ ba(done);
 884   __ delayed()->inc(Lesp, 3* Interpreter::stackElementSize); // adj sp (pops array, index and value)
 885 
 886   __ bind(is_null);
 887   do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), G0, G4_scratch, _bs->kind(), true);
 888 
 889   __ profile_null_seen(G3_scratch);
 890   __ inc(Lesp, 3* Interpreter::stackElementSize);     // adj sp (pops array, index and value)
 891   __ bind(done);
 892 }
 893 
 894 
 895 void TemplateTable::bastore() {
 896   transition(itos, vtos);
 897   __ pop_i(O2); // index
 898   // Otos_i: val
 899   // O3: array
 900   __ index_check(O3, O2, 0, G3_scratch, O2);
 901   __ stb(Otos_i, O2, arrayOopDesc::base_offset_in_bytes(T_BYTE));
 902 }
 903 


1597   // Save the current Lbcp
1598   const Register O0_cur_bcp = O0;
1599   __ mov( Lbcp, O0_cur_bcp );
1600 
1601 
1602   bool increment_invocation_counter_for_backward_branches = UseCompiler && UseLoopCounter;
1603   if ( increment_invocation_counter_for_backward_branches ) {
1604     Label Lforward;
1605     // check branch direction
1606     __ br( Assembler::positive, false,  Assembler::pn, Lforward );
1607     // Bump bytecode pointer by displacement (take the branch)
1608     __ delayed()->add( O1_disp, Lbcp, Lbcp );     // add to bc addr
1609 
1610     if (TieredCompilation) {
1611       Label Lno_mdo, Loverflow;
1612       int increment = InvocationCounter::count_increment;
1613       int mask = ((1 << Tier0BackedgeNotifyFreqLog) - 1) << InvocationCounter::count_shift;
1614       if (ProfileInterpreter) {
1615         // If no method data exists, go to profile_continue.
1616         __ ld_ptr(Lmethod, methodOopDesc::method_data_offset(), G4_scratch);
1617         __ br_null_short(G4_scratch, Assembler::pn, Lno_mdo);

1618 
1619         // Increment backedge counter in the MDO
1620         Address mdo_backedge_counter(G4_scratch, in_bytes(methodDataOopDesc::backedge_counter_offset()) +
1621                                                  in_bytes(InvocationCounter::counter_offset()));
1622         __ increment_mask_and_jump(mdo_backedge_counter, increment, mask, G3_scratch, Lscratch,
1623                                    Assembler::notZero, &Lforward);
1624         __ ba_short(Loverflow);

1625       }
1626 
1627       // If there's no MDO, increment counter in methodOop
1628       __ bind(Lno_mdo);
1629       Address backedge_counter(Lmethod, in_bytes(methodOopDesc::backedge_counter_offset()) +
1630                                         in_bytes(InvocationCounter::counter_offset()));
1631       __ increment_mask_and_jump(backedge_counter, increment, mask, G3_scratch, Lscratch,
1632                                  Assembler::notZero, &Lforward);
1633       __ bind(Loverflow);
1634 
1635       // notify point for loop, pass branch bytecode
1636       __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), O0_cur_bcp);
1637 
1638       // Was an OSR adapter generated?
1639       // O0 = osr nmethod
1640       __ br_null_short(O0, Assembler::pn, Lforward);

1641 
1642       // Has the nmethod been invalidated already?
1643       __ ld(O0, nmethod::entry_bci_offset(), O2);
1644       __ cmp_and_br_short(O2, InvalidOSREntryBci, Assembler::equal, Assembler::pn, Lforward);


1645 
1646       // migrate the interpreter frame off of the stack
1647 
1648       __ mov(G2_thread, L7);
1649       // save nmethod
1650       __ mov(O0, L6);
1651       __ set_last_Java_frame(SP, noreg);
1652       __ call_VM_leaf(noreg, CAST_FROM_FN_PTR(address, SharedRuntime::OSR_migration_begin), L7);
1653       __ reset_last_Java_frame();
1654       __ mov(L7, G2_thread);
1655 
1656       // move OSR nmethod to I1
1657       __ mov(L6, I1);
1658 
1659       // OSR buffer to I0
1660       __ mov(O0, I0);
1661 
1662       // remove the interpreter frame
1663       __ restore(I5_savedSP, 0, SP);
1664 


1789   __ add(Lbcp, BytesPerInt, O1);
1790   __ and3(O1, -BytesPerInt, O1);
1791   // load lo, hi
1792   __ ld(O1, 1 * BytesPerInt, O2);       // Low Byte
1793   __ ld(O1, 2 * BytesPerInt, O3);       // High Byte
1794 #ifdef _LP64
1795   // Sign extend the 32 bits
1796   __ sra ( Otos_i, 0, Otos_i );
1797 #endif /* _LP64 */
1798 
1799   // check against lo & hi
1800   __ cmp( Otos_i, O2);
1801   __ br( Assembler::less, false, Assembler::pn, default_case);
1802   __ delayed()->cmp( Otos_i, O3 );
1803   __ br( Assembler::greater, false, Assembler::pn, default_case);
1804   // lookup dispatch offset
1805   __ delayed()->sub(Otos_i, O2, O2);
1806   __ profile_switch_case(O2, O3, G3_scratch, G4_scratch);
1807   __ sll(O2, LogBytesPerInt, O2);
1808   __ add(O2, 3 * BytesPerInt, O2);
1809   __ ba(continue_execution);
1810   __ delayed()->ld(O1, O2, O2);
1811   // handle default
1812   __ bind(default_case);
1813   __ profile_switch_default(O3);
1814   __ ld(O1, 0, O2); // get default offset
1815   // continue execution
1816   __ bind(continue_execution);
1817   __ add(Lbcp, O2, Lbcp);
1818   __ dispatch_next(vtos);
1819 }
1820 
1821 
1822 void TemplateTable::lookupswitch() {
1823   transition(itos, itos);
1824   __ stop("lookupswitch bytecode should have been rewritten");
1825 }
1826 
1827 void TemplateTable::fast_linearswitch() {
1828   transition(itos, vtos);
1829     Label loop_entry, loop, found, continue_execution;
1830   // align bcp
1831   __ add(Lbcp, BytesPerInt, O1);
1832   __ and3(O1, -BytesPerInt, O1);
1833  // set counter
1834   __ ld(O1, BytesPerInt, O2);
1835   __ sll(O2, LogBytesPerInt + 1, O2); // in word-pairs
1836   __ add(O1, 2 * BytesPerInt, O3); // set first pair addr
1837   __ ba(loop_entry);
1838   __ delayed()->add(O3, O2, O2); // counter now points past last pair
1839 
1840   // table search
1841   __ bind(loop);
1842   __ cmp(O4, Otos_i);
1843   __ br(Assembler::equal, true, Assembler::pn, found);
1844   __ delayed()->ld(O3, BytesPerInt, O4); // offset -> O4
1845   __ inc(O3, 2 * BytesPerInt);
1846 
1847   __ bind(loop_entry);
1848   __ cmp(O2, O3);
1849   __ brx(Assembler::greaterUnsigned, true, Assembler::pt, loop);
1850   __ delayed()->ld(O3, 0, O4);
1851 
1852   // default case
1853   __ ld(O1, 0, O4); // get default offset
1854   if (ProfileInterpreter) {
1855     __ profile_switch_default(O3);
1856     __ ba_short(continue_execution);

1857   }
1858 
1859   // entry found -> get offset
1860   __ bind(found);
1861   if (ProfileInterpreter) {
1862     __ sub(O3, O1, O3);
1863     __ sub(O3, 2*BytesPerInt, O3);
1864     __ srl(O3, LogBytesPerInt + 1, O3); // in word-pairs
1865     __ profile_switch_case(O3, O1, O2, G3_scratch);
1866 
1867     __ bind(continue_execution);
1868   }
1869   __ add(Lbcp, O4, Lbcp);
1870   __ dispatch_next(vtos);
1871 }
1872 
1873 
1874 void TemplateTable::fast_binaryswitch() {
1875   transition(itos, vtos);
1876   // Implementation using the following core algorithm: (copied from Intel)


1902   assert(Otos_i == O0, "alias checking");
1903   const Register Rkey     = Otos_i;                    // already set (tosca)
1904   const Register Rarray   = O1;
1905   const Register Ri       = O2;
1906   const Register Rj       = O3;
1907   const Register Rh       = O4;
1908   const Register Rscratch = O5;
1909 
1910   const int log_entry_size = 3;
1911   const int entry_size = 1 << log_entry_size;
1912 
1913   Label found;
1914   // Find Array start
1915   __ add(Lbcp, 3 * BytesPerInt, Rarray);
1916   __ and3(Rarray, -BytesPerInt, Rarray);
1917   // initialize i & j (in delay slot)
1918   __ clr( Ri );
1919 
1920   // and start
1921   Label entry;
1922   __ ba(entry);
1923   __ delayed()->ld( Rarray, -BytesPerInt, Rj);
1924   // (Rj is already in the native byte-ordering.)
1925 
1926   // binary search loop
1927   { Label loop;
1928     __ bind( loop );
1929     // int h = (i + j) >> 1;
1930     __ sra( Rh, 1, Rh );
1931     // if (key < array[h].fast_match()) {
1932     //   j = h;
1933     // } else {
1934     //   i = h;
1935     // }
1936     __ sll( Rh, log_entry_size, Rscratch );
1937     __ ld( Rarray, Rscratch, Rscratch );
1938     // (Rscratch is already in the native byte-ordering.)
1939     __ cmp( Rkey, Rscratch );
1940     if ( VM_Version::v9_instructions_work() ) {
1941       __ movcc( Assembler::less,         false, Assembler::icc, Rh, Rj );  // j = h if (key <  array[h].fast_match())
1942       __ movcc( Assembler::greaterEqual, false, Assembler::icc, Rh, Ri );  // i = h if (key >= array[h].fast_match())


1960   // end of binary search, result index is i (must check again!)
1961   Label default_case;
1962   Label continue_execution;
1963   if (ProfileInterpreter) {
1964     __ mov( Ri, Rh );              // Save index in i for profiling
1965   }
1966   __ sll( Ri, log_entry_size, Ri );
1967   __ ld( Rarray, Ri, Rscratch );
1968   // (Rscratch is already in the native byte-ordering.)
1969   __ cmp( Rkey, Rscratch );
1970   __ br( Assembler::notEqual, true, Assembler::pn, default_case );
1971   __ delayed()->ld( Rarray, -2 * BytesPerInt, Rj ); // load default offset -> j
1972 
1973   // entry found -> j = offset
1974   __ inc( Ri, BytesPerInt );
1975   __ profile_switch_case(Rh, Rj, Rscratch, Rkey);
1976   __ ld( Rarray, Ri, Rj );
1977   // (Rj is already in the native byte-ordering.)
1978 
1979   if (ProfileInterpreter) {
1980     __ ba_short(continue_execution);

1981   }
1982 
1983   __ bind(default_case); // fall through (if not profiling)
1984   __ profile_switch_default(Ri);
1985 
1986   __ bind(continue_execution);
1987   __ add( Lbcp, Rj, Lbcp );
1988   __ dispatch_next( vtos );
1989 }
1990 
1991 
1992 void TemplateTable::_return(TosState state) {
1993   transition(state, state);
1994   assert(_desc->calls_vm(), "inconsistent calls_vm information");
1995 
1996   if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
1997     assert(state == vtos, "only valid state");
1998     __ mov(G0, G3_scratch);
1999     __ access_local_ptr(G3_scratch, Otos_i);
2000     __ load_klass(Otos_i, O2);


2173   if (is_static) {
2174     __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f1_offset(), Robj);
2175   }
2176 }
2177 
2178 // The registers Rcache and index expected to be set before call.
2179 // Correct values of the Rcache and index registers are preserved.
2180 void TemplateTable::jvmti_post_field_access(Register Rcache,
2181                                             Register index,
2182                                             bool is_static,
2183                                             bool has_tos) {
2184   ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2185 
2186   if (JvmtiExport::can_post_field_access()) {
2187     // Check to see if a field access watch has been set before we take
2188     // the time to call into the VM.
2189     Label Label1;
2190     assert_different_registers(Rcache, index, G1_scratch);
2191     AddressLiteral get_field_access_count_addr(JvmtiExport::get_field_access_count_addr());
2192     __ load_contents(get_field_access_count_addr, G1_scratch);
2193     __ cmp_and_br_short(G1_scratch, 0, Assembler::equal, Assembler::pt, Label1);


2194 
2195     __ add(Rcache, in_bytes(cp_base_offset), Rcache);
2196 
2197     if (is_static) {
2198       __ clr(Otos_i);
2199     } else {
2200       if (has_tos) {
2201       // save object pointer before call_VM() clobbers it
2202         __ push_ptr(Otos_i);  // put object on tos where GC wants it.
2203       } else {
2204         // Load top of stack (do not pop the value off the stack);
2205         __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
2206       }
2207       __ verify_oop(Otos_i);
2208     }
2209     // Otos_i: object pointer or NULL if static
2210     // Rcache: cache entry pointer
2211     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2212                Otos_i, Rcache);
2213     if (!is_static && has_tos) {


2253   Label checkVolatile;
2254 
2255   // compute field type
2256   Label notByte, notInt, notShort, notChar, notLong, notFloat, notObj;
2257   __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
2258   // Make sure we don't need to mask Rflags for tosBits after the above shift
2259   ConstantPoolCacheEntry::verify_tosBits();
2260 
2261   // Check atos before itos for getstatic, more likely (in Queens at least)
2262   __ cmp(Rflags, atos);
2263   __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2264   __ delayed() ->cmp(Rflags, itos);
2265 
2266   // atos
2267   __ load_heap_oop(Rclass, Roffset, Otos_i);
2268   __ verify_oop(Otos_i);
2269   __ push(atos);
2270   if (!is_static) {
2271     patch_bytecode(Bytecodes::_fast_agetfield, G3_scratch, G4_scratch);
2272   }
2273   __ ba(checkVolatile);
2274   __ delayed()->tst(Lscratch);
2275 
2276   __ bind(notObj);
2277 
2278   // cmp(Rflags, itos);
2279   __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2280   __ delayed() ->cmp(Rflags, ltos);
2281 
2282   // itos
2283   __ ld(Rclass, Roffset, Otos_i);
2284   __ push(itos);
2285   if (!is_static) {
2286     patch_bytecode(Bytecodes::_fast_igetfield, G3_scratch, G4_scratch);
2287   }
2288   __ ba(checkVolatile);
2289   __ delayed()->tst(Lscratch);
2290 
2291   __ bind(notInt);
2292 
2293   // cmp(Rflags, ltos);
2294   __ br(Assembler::notEqual, false, Assembler::pt, notLong);
2295   __ delayed() ->cmp(Rflags, btos);
2296 
2297   // ltos
2298   // load must be atomic
2299   __ ld_long(Rclass, Roffset, Otos_l);
2300   __ push(ltos);
2301   if (!is_static) {
2302     patch_bytecode(Bytecodes::_fast_lgetfield, G3_scratch, G4_scratch);
2303   }
2304   __ ba(checkVolatile);
2305   __ delayed()->tst(Lscratch);
2306 
2307   __ bind(notLong);
2308 
2309   // cmp(Rflags, btos);
2310   __ br(Assembler::notEqual, false, Assembler::pt, notByte);
2311   __ delayed() ->cmp(Rflags, ctos);
2312 
2313   // btos
2314   __ ldsb(Rclass, Roffset, Otos_i);
2315   __ push(itos);
2316   if (!is_static) {
2317     patch_bytecode(Bytecodes::_fast_bgetfield, G3_scratch, G4_scratch);
2318   }
2319   __ ba(checkVolatile);
2320   __ delayed()->tst(Lscratch);
2321 
2322   __ bind(notByte);
2323 
2324   // cmp(Rflags, ctos);
2325   __ br(Assembler::notEqual, false, Assembler::pt, notChar);
2326   __ delayed() ->cmp(Rflags, stos);
2327 
2328   // ctos
2329   __ lduh(Rclass, Roffset, Otos_i);
2330   __ push(itos);
2331   if (!is_static) {
2332     patch_bytecode(Bytecodes::_fast_cgetfield, G3_scratch, G4_scratch);
2333   }
2334   __ ba(checkVolatile);
2335   __ delayed()->tst(Lscratch);
2336 
2337   __ bind(notChar);
2338 
2339   // cmp(Rflags, stos);
2340   __ br(Assembler::notEqual, false, Assembler::pt, notShort);
2341   __ delayed() ->cmp(Rflags, ftos);
2342 
2343   // stos
2344   __ ldsh(Rclass, Roffset, Otos_i);
2345   __ push(itos);
2346   if (!is_static) {
2347     patch_bytecode(Bytecodes::_fast_sgetfield, G3_scratch, G4_scratch);
2348   }
2349   __ ba(checkVolatile);
2350   __ delayed()->tst(Lscratch);
2351 
2352   __ bind(notShort);
2353 
2354 
2355   // cmp(Rflags, ftos);
2356   __ br(Assembler::notEqual, false, Assembler::pt, notFloat);
2357   __ delayed() ->tst(Lscratch);
2358 
2359   // ftos
2360   __ ldf(FloatRegisterImpl::S, Rclass, Roffset, Ftos_f);
2361   __ push(ftos);
2362   if (!is_static) {
2363     patch_bytecode(Bytecodes::_fast_fgetfield, G3_scratch, G4_scratch);
2364   }
2365   __ ba(checkVolatile);
2366   __ delayed()->tst(Lscratch);
2367 
2368   __ bind(notFloat);
2369 
2370 
2371   // dtos
2372   __ ldf(FloatRegisterImpl::D, Rclass, Roffset, Ftos_d);
2373   __ push(dtos);
2374   if (!is_static) {
2375     patch_bytecode(Bytecodes::_fast_dgetfield, G3_scratch, G4_scratch);
2376   }
2377 
2378   __ bind(checkVolatile);
2379   if (__ membar_has_effect(membar_bits)) {
2380     // __ tst(Lscratch); executed in delay slot
2381     __ br(Assembler::zero, false, Assembler::pt, exit);
2382     __ delayed()->nop();
2383     volatile_barrier(membar_bits);
2384   }
2385 


2454   if (__ membar_has_effect(membar_bits)) {
2455     __ btst(Lscratch, Rflags);
2456     __ br(Assembler::zero, false, Assembler::pt, exit);
2457     __ delayed()->nop();
2458     volatile_barrier(membar_bits);
2459     __ bind(exit);
2460   }
2461 
2462   if (state == atos) {
2463     __ verify_oop(Otos_i);    // does not blow flags!
2464   }
2465 }
2466 
2467 void TemplateTable::jvmti_post_fast_field_mod() {
2468   if (JvmtiExport::can_post_field_modification()) {
2469     // Check to see if a field modification watch has been set before we take
2470     // the time to call into the VM.
2471     Label done;
2472     AddressLiteral get_field_modification_count_addr(JvmtiExport::get_field_modification_count_addr());
2473     __ load_contents(get_field_modification_count_addr, G4_scratch);
2474     __ cmp_and_br_short(G4_scratch, 0, Assembler::equal, Assembler::pt, done);


2475     __ pop_ptr(G4_scratch);     // copy the object pointer from tos
2476     __ verify_oop(G4_scratch);
2477     __ push_ptr(G4_scratch);    // put the object pointer back on tos
2478     __ get_cache_entry_pointer_at_bcp(G1_scratch, G3_scratch, 1);
2479     // Save tos values before call_VM() clobbers them. Since we have
2480     // to do it for every data type, we use the saved values as the
2481     // jvalue object.
2482     switch (bytecode()) {  // save tos values before call_VM() clobbers them
2483     case Bytecodes::_fast_aputfield: __ push_ptr(Otos_i); break;
2484     case Bytecodes::_fast_bputfield: // fall through
2485     case Bytecodes::_fast_sputfield: // fall through
2486     case Bytecodes::_fast_cputfield: // fall through
2487     case Bytecodes::_fast_iputfield: __ push_i(Otos_i); break;
2488     case Bytecodes::_fast_dputfield: __ push_d(Ftos_d); break;
2489     case Bytecodes::_fast_fputfield: __ push_f(Ftos_f); break;
2490     // get words in right order for use as jvalue object
2491     case Bytecodes::_fast_lputfield: __ push_l(Otos_l); break;
2492     }
2493     // setup pointer to jvalue object
2494     __ mov(Lesp, G3_scratch);  __ inc(G3_scratch, wordSize);


2505     case Bytecodes::_fast_dputfield: __ pop_d(Ftos_d); break;
2506     case Bytecodes::_fast_fputfield: __ pop_f(Ftos_f); break;
2507     case Bytecodes::_fast_lputfield: __ pop_l(Otos_l); break;
2508     }
2509     __ bind(done);
2510   }
2511 }
2512 
2513 // The registers Rcache and index expected to be set before call.
2514 // The function may destroy various registers, just not the Rcache and index registers.
2515 void TemplateTable::jvmti_post_field_mod(Register Rcache, Register index, bool is_static) {
2516   ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2517 
2518   if (JvmtiExport::can_post_field_modification()) {
2519     // Check to see if a field modification watch has been set before we take
2520     // the time to call into the VM.
2521     Label Label1;
2522     assert_different_registers(Rcache, index, G1_scratch);
2523     AddressLiteral get_field_modification_count_addr(JvmtiExport::get_field_modification_count_addr());
2524     __ load_contents(get_field_modification_count_addr, G1_scratch);
2525     __ cmp_and_br_short(G1_scratch, 0, Assembler::zero, Assembler::pt, Label1);


2526 
2527     // The Rcache and index registers have been already set.
2528     // This allows to eliminate this call but the Rcache and index
2529     // registers must be correspondingly used after this line.
2530     __ get_cache_and_index_at_bcp(G1_scratch, G4_scratch, 1);
2531 
2532     __ add(G1_scratch, in_bytes(cp_base_offset), G3_scratch);
2533     if (is_static) {
2534       // Life is simple.  Null out the object pointer.
2535       __ clr(G4_scratch);
2536     } else {
2537       Register Rflags = G1_scratch;
2538       // Life is harder. The stack holds the value on top, followed by the
2539       // object.  We don't know the size of the value, though; it could be
2540       // one or two words depending on its type. As a result, we must find
2541       // the type to determine where the object is.
2542 
2543       Label two_word, valsizeknown;
2544       __ ld_ptr(G1_scratch, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
2545       __ mov(Lesp, G4_scratch);
2546       __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
2547       // Make sure we don't need to mask Rflags for tosBits after the above shift
2548       ConstantPoolCacheEntry::verify_tosBits();
2549       __ cmp(Rflags, ltos);
2550       __ br(Assembler::equal, false, Assembler::pt, two_word);
2551       __ delayed()->cmp(Rflags, dtos);
2552       __ br(Assembler::equal, false, Assembler::pt, two_word);
2553       __ delayed()->nop();
2554       __ inc(G4_scratch, Interpreter::expr_offset_in_bytes(1));
2555       __ ba_short(valsizeknown);

2556       __ bind(two_word);
2557 
2558       __ inc(G4_scratch, Interpreter::expr_offset_in_bytes(2));
2559 
2560       __ bind(valsizeknown);
2561       // setup object pointer
2562       __ ld_ptr(G4_scratch, 0, G4_scratch);
2563       __ verify_oop(G4_scratch);
2564     }
2565     // setup pointer to jvalue object
2566     __ mov(Lesp, G1_scratch);  __ inc(G1_scratch, wordSize);
2567     // G4_scratch:  object pointer or NULL if static
2568     // G3_scratch: cache entry pointer
2569     // G1_scratch: jvalue object on the stack
2570     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
2571                G4_scratch, G3_scratch, G1_scratch);
2572     __ get_cache_and_index_at_bcp(Rcache, index, 1);
2573     __ bind(Label1);
2574   }
2575 }


2586   Register index  = G4_scratch;
2587   Register Rclass = Rcache;
2588   Register Roffset= G4_scratch;
2589   Register Rflags = G1_scratch;
2590   ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2591 
2592   resolve_cache_and_index(byte_no, noreg, Rcache, index, sizeof(u2));
2593   jvmti_post_field_mod(Rcache, index, is_static);
2594   load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
2595 
2596   Assembler::Membar_mask_bits read_bits =
2597     Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore);
2598   Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad;
2599 
2600   Label notVolatile, checkVolatile, exit;
2601   if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) {
2602     __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
2603     __ and3(Rflags, Lscratch, Lscratch);
2604 
2605     if (__ membar_has_effect(read_bits)) {
2606       __ cmp_and_br_short(Lscratch, 0, Assembler::equal, Assembler::pt, notVolatile);


2607       volatile_barrier(read_bits);
2608       __ bind(notVolatile);
2609     }
2610   }
2611 
2612   __ srl(Rflags, ConstantPoolCacheEntry::tosBits, Rflags);
2613   // Make sure we don't need to mask Rflags for tosBits after the above shift
2614   ConstantPoolCacheEntry::verify_tosBits();
2615 
2616   // compute field type
2617   Label notInt, notShort, notChar, notObj, notByte, notLong, notFloat;
2618 
2619   if (is_static) {
2620     // putstatic with object type most likely, check that first
2621     __ cmp(Rflags, atos );
2622     __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2623     __ delayed() ->cmp(Rflags, itos );
2624 
2625     // atos
2626     __ pop_ptr();
2627     __ verify_oop(Otos_i);
2628 
2629     do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
2630 
2631     __ ba(checkVolatile);
2632     __ delayed()->tst(Lscratch);
2633 
2634     __ bind(notObj);
2635 
2636     // cmp(Rflags, itos );
2637     __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2638     __ delayed() ->cmp(Rflags, btos );
2639 
2640     // itos
2641     __ pop_i();
2642     __ st(Otos_i, Rclass, Roffset);
2643     __ ba(checkVolatile);
2644     __ delayed()->tst(Lscratch);
2645 
2646     __ bind(notInt);
2647 
2648   } else {
2649     // putfield with int type most likely, check that first
2650     __ cmp(Rflags, itos );
2651     __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2652     __ delayed() ->cmp(Rflags, atos );
2653 
2654     // itos
2655     __ pop_i();
2656     pop_and_check_object(Rclass);
2657     __ st(Otos_i, Rclass, Roffset);
2658     patch_bytecode(Bytecodes::_fast_iputfield, G3_scratch, G4_scratch);
2659     __ ba(checkVolatile);
2660     __ delayed()->tst(Lscratch);
2661 
2662     __ bind(notInt);
2663     // cmp(Rflags, atos );
2664     __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2665     __ delayed() ->cmp(Rflags, btos );
2666 
2667     // atos
2668     __ pop_ptr();
2669     pop_and_check_object(Rclass);
2670     __ verify_oop(Otos_i);
2671 
2672     do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
2673 
2674     patch_bytecode(Bytecodes::_fast_aputfield, G3_scratch, G4_scratch);
2675     __ ba(checkVolatile);
2676     __ delayed()->tst(Lscratch);
2677 
2678     __ bind(notObj);
2679   }
2680 
2681   // cmp(Rflags, btos );
2682   __ br(Assembler::notEqual, false, Assembler::pt, notByte);
2683   __ delayed() ->cmp(Rflags, ltos );
2684 
2685   // btos
2686   __ pop_i();
2687   if (!is_static) pop_and_check_object(Rclass);
2688   __ stb(Otos_i, Rclass, Roffset);
2689   if (!is_static) {
2690     patch_bytecode(Bytecodes::_fast_bputfield, G3_scratch, G4_scratch);
2691   }
2692   __ ba(checkVolatile);
2693   __ delayed()->tst(Lscratch);
2694 
2695   __ bind(notByte);
2696 
2697   // cmp(Rflags, ltos );
2698   __ br(Assembler::notEqual, false, Assembler::pt, notLong);
2699   __ delayed() ->cmp(Rflags, ctos );
2700 
2701   // ltos
2702   __ pop_l();
2703   if (!is_static) pop_and_check_object(Rclass);
2704   __ st_long(Otos_l, Rclass, Roffset);
2705   if (!is_static) {
2706     patch_bytecode(Bytecodes::_fast_lputfield, G3_scratch, G4_scratch);
2707   }
2708   __ ba(checkVolatile);
2709   __ delayed()->tst(Lscratch);
2710 
2711   __ bind(notLong);
2712 
2713   // cmp(Rflags, ctos );
2714   __ br(Assembler::notEqual, false, Assembler::pt, notChar);
2715   __ delayed() ->cmp(Rflags, stos );
2716 
2717   // ctos (char)
2718   __ pop_i();
2719   if (!is_static) pop_and_check_object(Rclass);
2720   __ sth(Otos_i, Rclass, Roffset);
2721   if (!is_static) {
2722     patch_bytecode(Bytecodes::_fast_cputfield, G3_scratch, G4_scratch);
2723   }
2724   __ ba(checkVolatile);
2725   __ delayed()->tst(Lscratch);
2726 
2727   __ bind(notChar);
2728   // cmp(Rflags, stos );
2729   __ br(Assembler::notEqual, false, Assembler::pt, notShort);
2730   __ delayed() ->cmp(Rflags, ftos );
2731 
2732   // stos (char)
2733   __ pop_i();
2734   if (!is_static) pop_and_check_object(Rclass);
2735   __ sth(Otos_i, Rclass, Roffset);
2736   if (!is_static) {
2737     patch_bytecode(Bytecodes::_fast_sputfield, G3_scratch, G4_scratch);
2738   }
2739   __ ba(checkVolatile);
2740   __ delayed()->tst(Lscratch);
2741 
2742   __ bind(notShort);
2743   // cmp(Rflags, ftos );
2744   __ br(Assembler::notZero, false, Assembler::pt, notFloat);
2745   __ delayed()->nop();
2746 
2747   // ftos
2748   __ pop_f();
2749   if (!is_static) pop_and_check_object(Rclass);
2750   __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset);
2751   if (!is_static) {
2752     patch_bytecode(Bytecodes::_fast_fputfield, G3_scratch, G4_scratch);
2753   }
2754   __ ba(checkVolatile);
2755   __ delayed()->tst(Lscratch);
2756 
2757   __ bind(notFloat);
2758 
2759   // dtos
2760   __ pop_d();
2761   if (!is_static) pop_and_check_object(Rclass);
2762   __ stf(FloatRegisterImpl::D, Ftos_d, Rclass, Roffset);
2763   if (!is_static) {
2764     patch_bytecode(Bytecodes::_fast_dputfield, G3_scratch, G4_scratch);
2765   }
2766 
2767   __ bind(checkVolatile);
2768   __ tst(Lscratch);
2769 
2770   if (__ membar_has_effect(write_bits)) {
2771     // __ tst(Lscratch); in delay slot
2772     __ br(Assembler::zero, false, Assembler::pt, exit);
2773     __ delayed()->nop();
2774     volatile_barrier(Assembler::StoreLoad);


2781   Register Rcache = G3_scratch;
2782   Register Rclass = Rcache;
2783   Register Roffset= G4_scratch;
2784   Register Rflags = G1_scratch;
2785   ByteSize cp_base_offset = constantPoolCacheOopDesc::base_offset();
2786 
2787   jvmti_post_fast_field_mod();
2788 
2789   __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 1);
2790 
2791   Assembler::Membar_mask_bits read_bits =
2792     Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore);
2793   Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad;
2794 
2795   Label notVolatile, checkVolatile, exit;
2796   if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) {
2797     __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags);
2798     __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch);
2799     __ and3(Rflags, Lscratch, Lscratch);
2800     if (__ membar_has_effect(read_bits)) {
2801       __ cmp_and_br_short(Lscratch, 0, Assembler::equal, Assembler::pt, notVolatile);


2802       volatile_barrier(read_bits);
2803       __ bind(notVolatile);
2804     }
2805   }
2806 
2807   __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset);
2808   pop_and_check_object(Rclass);
2809 
2810   switch (bytecode()) {
2811     case Bytecodes::_fast_bputfield: __ stb(Otos_i, Rclass, Roffset); break;
2812     case Bytecodes::_fast_cputfield: /* fall through */
2813     case Bytecodes::_fast_sputfield: __ sth(Otos_i, Rclass, Roffset); break;
2814     case Bytecodes::_fast_iputfield: __ st(Otos_i, Rclass, Roffset);  break;
2815     case Bytecodes::_fast_lputfield: __ st_long(Otos_l, Rclass, Roffset); break;
2816     case Bytecodes::_fast_fputfield:
2817       __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset);
2818       break;
2819     case Bytecodes::_fast_dputfield:
2820       __ stf(FloatRegisterImpl::D, Ftos_d, Rclass, Roffset);
2821       break;
2822     case Bytecodes::_fast_aputfield:
2823       do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
2824       break;
2825     default:
2826       ShouldNotReachHere();
2827   }
2828 
2829   if (__ membar_has_effect(write_bits)) {
2830     __ cmp_and_br_short(Lscratch, 0, Assembler::equal, Assembler::pt, exit);


2831     volatile_barrier(Assembler::StoreLoad);
2832     __ bind(exit);
2833   }
2834 }
2835 
2836 
2837 void TemplateTable::putfield(int byte_no) {
2838   putfield_or_static(byte_no, false);
2839 }
2840 
2841 void TemplateTable::putstatic(int byte_no) {
2842   putfield_or_static(byte_no, true);
2843 }
2844 
2845 
2846 void TemplateTable::fast_xaccess(TosState state) {
2847   transition(vtos, state);
2848   Register Rcache = G3_scratch;
2849   Register Roffset = G4_scratch;
2850   Register Rflags  = G4_scratch;


3170   __ sll(Rtemp, LogBytesPerWord, Rtemp);   // Rscratch *= 4;
3171   if (Assembler::is_simm13(base)) {
3172     __ add(Rtemp, base, Rtemp);
3173   } else {
3174     __ set(base, Rscratch);
3175     __ add(Rscratch, Rtemp, Rtemp);
3176   }
3177   __ add(RklassOop, Rtemp, Rscratch);
3178 
3179   __ bind(search);
3180 
3181   __ ld_ptr(Rscratch, itableOffsetEntry::interface_offset_in_bytes(), Rtemp);
3182   {
3183     Label ok;
3184 
3185     // Check that entry is non-null.  Null entries are probably a bytecode
3186     // problem.  If the interface isn't implemented by the receiver class,
3187     // the VM should throw IncompatibleClassChangeError.  linkResolver checks
3188     // this too but that's only if the entry isn't already resolved, so we
3189     // need to check again.
3190     __ br_notnull_short( Rtemp, Assembler::pt, ok);

3191     call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_IncompatibleClassChangeError));
3192     __ should_not_reach_here();
3193     __ bind(ok);
3194     __ verify_oop(Rtemp);
3195   }
3196 
3197   __ verify_oop(Rinterface);
3198 
3199   __ cmp(Rinterface, Rtemp);
3200   __ brx(Assembler::notEqual, true, Assembler::pn, search);
3201   __ delayed()->add(Rscratch, itableOffsetEntry::size() * wordSize, Rscratch);
3202 
3203   // entry found and Rscratch points to it
3204   __ ld(Rscratch, itableOffsetEntry::offset_offset_in_bytes(), Rscratch);
3205 
3206   assert(itableMethodEntry::method_offset_in_bytes() == 0, "adjust instruction below");
3207   __ sll(Rindex, exact_log2(itableMethodEntry::size() * wordSize), Rindex);       // Rindex *= 8;
3208   __ add(Rscratch, Rindex, Rscratch);
3209   __ ld_ptr(RklassOop, Rscratch, G5_method);
3210 
3211   // Check for abstract method error.
3212   {
3213     Label ok;
3214     __ br_notnull_short(G5_method, Assembler::pt, ok);


3215     call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodError));
3216     __ should_not_reach_here();
3217     __ bind(ok);
3218   }
3219 
3220   Register Rcall = Rinterface;
3221   assert_different_registers(Rcall, G5_method, Gargs, Rret);
3222 
3223   __ verify_oop(G5_method);
3224   __ call_from_interpreter(Rcall, Gargs, Rret);
3225 
3226 }
3227 
3228 
3229 void TemplateTable::invokedynamic(int byte_no) {
3230   transition(vtos, vtos);
3231   assert(byte_no == f1_oop, "use this argument");
3232 
3233   if (!EnableInvokeDynamic) {
3234     // We should not encounter this bytecode if !EnableInvokeDynamic.


3349     // if there is enough space, we do not CAS and do not clear
3350     __ cmp(RnewTopValue, RendValue);
3351     if(ZeroTLAB) {
3352       // the fields have already been cleared
3353       __ brx(Assembler::lessEqualUnsigned, true, Assembler::pt, initialize_header);
3354     } else {
3355       // initialize both the header and fields
3356       __ brx(Assembler::lessEqualUnsigned, true, Assembler::pt, initialize_object);
3357     }
3358     __ delayed()->st_ptr(RnewTopValue, G2_thread, in_bytes(JavaThread::tlab_top_offset()));
3359 
3360     if (allow_shared_alloc) {
3361       // Check if tlab should be discarded (refill_waste_limit >= free)
3362       __ ld_ptr(G2_thread, in_bytes(JavaThread::tlab_refill_waste_limit_offset()), RtlabWasteLimitValue);
3363       __ sub(RendValue, RoldTopValue, RfreeValue);
3364 #ifdef _LP64
3365       __ srlx(RfreeValue, LogHeapWordSize, RfreeValue);
3366 #else
3367       __ srl(RfreeValue, LogHeapWordSize, RfreeValue);
3368 #endif
3369       __ cmp_and_brx_short(RtlabWasteLimitValue, RfreeValue, Assembler::greaterEqualUnsigned, Assembler::pt, slow_case); // tlab waste is small


3370 
3371       // increment waste limit to prevent getting stuck on this slow path
3372       __ add(RtlabWasteLimitValue, ThreadLocalAllocBuffer::refill_waste_limit_increment(), RtlabWasteLimitValue);
3373       __ st_ptr(RtlabWasteLimitValue, G2_thread, in_bytes(JavaThread::tlab_refill_waste_limit_offset()));
3374     } else {
3375       // No allocation in the shared eden.
3376       __ ba_short(slow_case);

3377     }
3378   }
3379 
3380   // Allocation in the shared Eden
3381   if (allow_shared_alloc) {
3382     Register RoldTopValue = G1_scratch;
3383     Register RtopAddr = G3_scratch;
3384     Register RnewTopValue = RallocatedObject;
3385     Register RendValue = Rscratch;
3386 
3387     __ set((intptr_t)Universe::heap()->top_addr(), RtopAddr);
3388 
3389     Label retry;
3390     __ bind(retry);
3391     __ set((intptr_t)Universe::heap()->end_addr(), RendValue);
3392     __ ld_ptr(RendValue, 0, RendValue);
3393     __ ld_ptr(RtopAddr, 0, RoldTopValue);
3394     __ add(RoldTopValue, Roffset, RnewTopValue);
3395 
3396     // RnewTopValue contains the top address after the new object
3397     // has been allocated.
3398     __ cmp_and_brx_short(RnewTopValue, RendValue, Assembler::greaterUnsigned, Assembler::pn, slow_case);


3399 
3400     __ casx_under_lock(RtopAddr, RoldTopValue, RnewTopValue,
3401       VM_Version::v9_instructions_work() ? NULL :
3402       (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr());
3403 
3404     // if someone beat us on the allocation, try again, otherwise continue
3405     __ cmp_and_brx_short(RoldTopValue, RnewTopValue, Assembler::notEqual, Assembler::pn, retry);


3406 
3407     // bump total bytes allocated by this thread
3408     // RoldTopValue and RtopAddr are dead, so can use G1 and G3
3409     __ incr_allocated_bytes(Roffset, G1_scratch, G3_scratch);
3410   }
3411 
3412   if (UseTLAB || Universe::heap()->supports_inline_contig_alloc()) {
3413     // clear object fields
3414     __ bind(initialize_object);
3415     __ deccc(Roffset, sizeof(oopDesc));
3416     __ br(Assembler::zero, false, Assembler::pt, initialize_header);
3417     __ delayed()->add(RallocatedObject, sizeof(oopDesc), G3_scratch);
3418 
3419     // initialize remaining object fields
3420     { Label loop;
3421       __ subcc(Roffset, wordSize, Roffset);
3422       __ bind(loop);
3423       //__ subcc(Roffset, wordSize, Roffset);      // executed above loop or in delay slot
3424       __ st_ptr(G0, G3_scratch, Roffset);
3425       __ br(Assembler::notEqual, false, Assembler::pt, loop);
3426       __ delayed()->subcc(Roffset, wordSize, Roffset);
3427     }
3428     __ ba_short(initialize_header);

3429   }
3430 
3431   // slow case
3432   __ bind(slow_case);
3433   __ get_2_byte_integer_at_bcp(1, G3_scratch, O2, InterpreterMacroAssembler::Unsigned);
3434   __ get_constant_pool(O1);
3435 
3436   call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), O1, O2);
3437 
3438   __ ba_short(done);

3439 
3440   // Initialize the header: mark, klass
3441   __ bind(initialize_header);
3442 
3443   if (UseBiasedLocking) {
3444     __ ld_ptr(RinstanceKlass, Klass::prototype_header_offset_in_bytes() + sizeof(oopDesc), G4_scratch);
3445   } else {
3446     __ set((intptr_t)markOopDesc::prototype(), G4_scratch);
3447   }
3448   __ st_ptr(G4_scratch, RallocatedObject, oopDesc::mark_offset_in_bytes());       // mark
3449   __ store_klass_gap(G0, RallocatedObject);         // klass gap if compressed
3450   __ store_klass(RinstanceKlass, RallocatedObject); // klass (last for cms)
3451 
3452   {
3453     SkipIfEqual skip_if(
3454       _masm, G4_scratch, &DTraceAllocProbes, Assembler::zero);
3455     // Trigger dtrace event
3456     __ push(atos);
3457     __ call_VM_leaf(noreg,
3458        CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), O0);


3482 
3483 void TemplateTable::arraylength() {
3484   transition(atos, itos);
3485   Label ok;
3486   __ verify_oop(Otos_i);
3487   __ tst(Otos_i);
3488   __ throw_if_not_1_x( Assembler::notZero, ok );
3489   __ delayed()->ld(Otos_i, arrayOopDesc::length_offset_in_bytes(), Otos_i);
3490   __ throw_if_not_2( Interpreter::_throw_NullPointerException_entry, G3_scratch, ok);
3491 }
3492 
3493 
3494 void TemplateTable::checkcast() {
3495   transition(atos, atos);
3496   Label done, is_null, quicked, cast_ok, resolved;
3497   Register Roffset = G1_scratch;
3498   Register RobjKlass = O5;
3499   Register RspecifiedKlass = O4;
3500 
3501   // Check for casting a NULL
3502   __ br_null_short(Otos_i, Assembler::pn, is_null);

3503 
3504   // Get value klass in RobjKlass
3505   __ load_klass(Otos_i, RobjKlass); // get value klass
3506 
3507   // Get constant pool tag
3508   __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned);
3509 
3510   // See if the checkcast has been quickened
3511   __ get_cpool_and_tags(Lscratch, G3_scratch);
3512   __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch);
3513   __ ldub(G3_scratch, Roffset, G3_scratch);
3514   __ cmp(G3_scratch, JVM_CONSTANT_Class);
3515   __ br(Assembler::equal, true, Assembler::pt, quicked);
3516   __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
3517 
3518   __ push_ptr(); // save receiver for result, and for GC
3519   call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
3520   __ pop_ptr(Otos_i, G3_scratch); // restore receiver
3521 
3522   __ ba_short(resolved);

3523 
3524   // Extract target class from constant pool
3525   __ bind(quicked);
3526   __ add(Roffset, sizeof(constantPoolOopDesc), Roffset);
3527   __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
3528   __ bind(resolved);
3529   __ load_klass(Otos_i, RobjKlass); // get value klass
3530 
3531   // Generate a fast subtype check.  Branch to cast_ok if no
3532   // failure.  Throw exception if failure.
3533   __ gen_subtype_check( RobjKlass, RspecifiedKlass, G3_scratch, G4_scratch, G1_scratch, cast_ok );
3534 
3535   // Not a subtype; so must throw exception
3536   __ throw_if_not_x( Assembler::never, Interpreter::_throw_ClassCastException_entry, G3_scratch );
3537 
3538   __ bind(cast_ok);
3539 
3540   if (ProfileInterpreter) {
3541     __ ba_short(done);

3542   }
3543   __ bind(is_null);
3544   __ profile_null_seen(G3_scratch);
3545   __ bind(done);
3546 }
3547 
3548 
3549 void TemplateTable::instanceof() {
3550   Label done, is_null, quicked, resolved;
3551   transition(atos, itos);
3552   Register Roffset = G1_scratch;
3553   Register RobjKlass = O5;
3554   Register RspecifiedKlass = O4;
3555 
3556   // Check for casting a NULL
3557   __ br_null_short(Otos_i, Assembler::pt, is_null);

3558 
3559   // Get value klass in RobjKlass
3560   __ load_klass(Otos_i, RobjKlass); // get value klass
3561 
3562   // Get constant pool tag
3563   __ get_2_byte_integer_at_bcp(1, Lscratch, Roffset, InterpreterMacroAssembler::Unsigned);
3564 
3565   // See if the checkcast has been quickened
3566   __ get_cpool_and_tags(Lscratch, G3_scratch);
3567   __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch);
3568   __ ldub(G3_scratch, Roffset, G3_scratch);
3569   __ cmp(G3_scratch, JVM_CONSTANT_Class);
3570   __ br(Assembler::equal, true, Assembler::pt, quicked);
3571   __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
3572 
3573   __ push_ptr(); // save receiver for result, and for GC
3574   call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) );
3575   __ pop_ptr(Otos_i, G3_scratch); // restore receiver
3576 
3577   __ ba_short(resolved);

3578 

3579   // Extract target class from constant pool
3580   __ bind(quicked);
3581   __ add(Roffset, sizeof(constantPoolOopDesc), Roffset);
3582   __ get_constant_pool(Lscratch);
3583   __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
3584   __ bind(resolved);
3585   __ load_klass(Otos_i, RobjKlass); // get value klass
3586 
3587   // Generate a fast subtype check.  Branch to cast_ok if no
3588   // failure.  Return 0 if failure.
3589   __ or3(G0, 1, Otos_i);      // set result assuming quick tests succeed
3590   __ gen_subtype_check( RobjKlass, RspecifiedKlass, G3_scratch, G4_scratch, G1_scratch, done );
3591   // Not a subtype; return 0;
3592   __ clr( Otos_i );
3593 
3594   if (ProfileInterpreter) {
3595     __ ba_short(done);

3596   }
3597   __ bind(is_null);
3598   __ profile_null_seen(G3_scratch);
3599   __ bind(done);
3600 }
3601 
3602 void TemplateTable::_breakpoint() {
3603 
3604    // Note: We get here even if we are single stepping..
3605    // jbug inists on setting breakpoints at every bytecode
3606    // even if we are in single step mode.
3607 
3608    transition(vtos, vtos);
3609    // get the unpatched byte code
3610    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::get_original_bytecode_at), Lmethod, Lbcp);
3611    __ mov(O0, Lbyte_code);
3612 
3613    // post the breakpoint event
3614    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::_breakpoint), Lmethod, Lbcp);
3615 


3649   // monitorenter returns true).
3650 
3651   {   Label ok;
3652     __ tst(Otos_i);
3653     __ throw_if_not_1_x( Assembler::notZero,  ok);
3654     __ delayed()->mov(Otos_i, Lscratch); // save obj
3655     __ throw_if_not_2( Interpreter::_throw_NullPointerException_entry, G3_scratch, ok);
3656   }
3657 
3658   assert(O0 == Otos_i, "Be sure where the object to lock is");
3659 
3660   // find a free slot in the monitor block
3661 
3662 
3663   // initialize entry pointer
3664   __ clr(O1); // points to free slot or NULL
3665 
3666   {
3667     Label entry, loop, exit;
3668     __ add( __ top_most_monitor(), O2 ); // last one to check
3669     __ ba( entry );
3670     __ delayed()->mov( Lmonitors, O3 ); // first one to check
3671 
3672 
3673     __ bind( loop );
3674 
3675     __ verify_oop(O4);          // verify each monitor's oop
3676     __ tst(O4); // is this entry unused?
3677     if (VM_Version::v9_instructions_work())
3678       __ movcc( Assembler::zero, false, Assembler::ptr_cc, O3, O1);
3679     else {
3680       Label L;
3681       __ br( Assembler::zero, true, Assembler::pn, L );
3682       __ delayed()->mov(O3, O1); // rememeber this one if match
3683       __ bind(L);
3684     }
3685 
3686     __ cmp(O4, O0); // check if current entry is for same object
3687     __ brx( Assembler::equal, false, Assembler::pn, exit );
3688     __ delayed()->inc( O3, frame::interpreter_frame_monitor_size() * wordSize ); // check next one
3689 
3690     __ bind( entry );
3691 
3692     __ cmp( O3, O2 );
3693     __ brx( Assembler::lessEqualUnsigned, true, Assembler::pt, loop );
3694     __ delayed()->ld_ptr(O3, BasicObjectLock::obj_offset_in_bytes(), O4);
3695 
3696     __ bind( exit );
3697   }
3698 
3699   { Label allocated;
3700 
3701     // found free slot?
3702     __ br_notnull_short(O1, Assembler::pn, allocated);

3703 
3704     __ add_monitor_to_stack( false, O2, O3 );
3705     __ mov(Lmonitors, O1);
3706 
3707     __ bind(allocated);
3708   }
3709 
3710   // Increment bcp to point to the next bytecode, so exception handling for async. exceptions work correctly.
3711   // The object has already been poped from the stack, so the expression stack looks correct.
3712   __ inc(Lbcp);
3713 
3714   __ st_ptr(O0, O1, BasicObjectLock::obj_offset_in_bytes()); // store object
3715   __ lock_object(O1, O0);
3716 
3717   // check if there's enough space on the stack for the monitors after locking
3718   __ generate_stack_overflow_check(0);
3719 
3720   // The bcp has already been incremented. Just need to dispatch to next instruction.
3721   __ dispatch_next(vtos);
3722 }
3723 
3724 
3725 void TemplateTable::monitorexit() {
3726   transition(atos, vtos);
3727   __ verify_oop(Otos_i);
3728   __ tst(Otos_i);
3729   __ throw_if_not_x( Assembler::notZero, Interpreter::_throw_NullPointerException_entry, G3_scratch );
3730 
3731   assert(O0 == Otos_i, "just checking");
3732 
3733   { Label entry, loop, found;
3734     __ add( __ top_most_monitor(), O2 ); // last one to check
3735     __ ba(entry);
3736     // use Lscratch to hold monitor elem to check, start with most recent monitor,
3737     // By using a local it survives the call to the C routine.
3738     __ delayed()->mov( Lmonitors, Lscratch );
3739 
3740     __ bind( loop );
3741 
3742     __ verify_oop(O4);          // verify each monitor's oop
3743     __ cmp(O4, O0); // check if current entry is for desired object
3744     __ brx( Assembler::equal, true, Assembler::pt, found );
3745     __ delayed()->mov(Lscratch, O1); // pass found entry as argument to monitorexit
3746 
3747     __ inc( Lscratch, frame::interpreter_frame_monitor_size() * wordSize ); // advance to next
3748 
3749     __ bind( entry );
3750 
3751     __ cmp( Lscratch, O2 );
3752     __ brx( Assembler::lessEqualUnsigned, true, Assembler::pt, loop );
3753     __ delayed()->ld_ptr(Lscratch, BasicObjectLock::obj_offset_in_bytes(), O4);
3754 
3755     call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_illegal_monitor_state_exception));


src/cpu/sparc/vm/templateTable_sparc.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File