src/cpu/sparc/vm/templateTable_sparc.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 7063628 Cdiff src/cpu/sparc/vm/templateTable_sparc.cpp

src/cpu/sparc/vm/templateTable_sparc.cpp

Print this page

        

*** 157,173 **** if (load_bc_into_scratch) __ set(bc, Rbyte_code); Label patch_done; if (JvmtiExport::can_post_breakpoint()) { Label fast_patch; __ ldub(at_bcp(0), Rscratch); ! __ cmp(Rscratch, Bytecodes::_breakpoint); ! __ br(Assembler::notEqual, false, Assembler::pt, fast_patch); ! __ delayed()->nop(); // don't bother to hoist the stb here // perform the quickening, slowly, in the bowels of the breakpoint table __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), Lmethod, Lbcp, Rbyte_code); ! __ ba(false, patch_done); ! __ delayed()->nop(); __ bind(fast_patch); } #ifdef ASSERT Bytecodes::Code orig_bytecode = Bytecodes::java_code(bc); Label okay; --- 157,170 ---- if (load_bc_into_scratch) __ set(bc, Rbyte_code); Label patch_done; if (JvmtiExport::can_post_breakpoint()) { Label fast_patch; __ ldub(at_bcp(0), Rscratch); ! __ cmp_and_br_short(Rscratch, Bytecodes::_breakpoint, Assembler::notEqual, Assembler::pt, fast_patch); // perform the quickening, slowly, in the bowels of the breakpoint table __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), Lmethod, Lbcp, Rbyte_code); ! __ ba_short(patch_done); __ bind(fast_patch); } #ifdef ASSERT Bytecodes::Code orig_bytecode = Bytecodes::java_code(bc); Label okay;
*** 279,321 **** const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize; // get type from tags __ add(O2, tags_offset, O2); __ ldub(O2, O1, O2); ! __ cmp(O2, JVM_CONSTANT_UnresolvedString); // unresolved string? If so, must resolve ! __ brx(Assembler::equal, true, Assembler::pt, call_ldc); ! __ delayed()->nop(); ! __ cmp(O2, JVM_CONSTANT_UnresolvedClass); // unresolved class? If so, must resolve ! __ brx(Assembler::equal, true, Assembler::pt, call_ldc); ! __ delayed()->nop(); ! __ cmp(O2, JVM_CONSTANT_UnresolvedClassInError); // unresolved class in error state ! __ brx(Assembler::equal, true, Assembler::pn, call_ldc); ! __ delayed()->nop(); __ cmp(O2, JVM_CONSTANT_Class); // need to call vm to get java mirror of the class __ brx(Assembler::notEqual, true, Assembler::pt, notClass); __ delayed()->add(O0, base_offset, O0); __ bind(call_ldc); __ set(wide, O1); call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), O1); __ push(atos); ! __ ba(false, exit); ! __ delayed()->nop(); __ bind(notClass); // __ add(O0, base_offset, O0); __ sll(O1, LogBytesPerWord, O1); __ cmp(O2, JVM_CONSTANT_Integer); __ brx(Assembler::notEqual, true, Assembler::pt, notInt); __ delayed()->cmp(O2, JVM_CONSTANT_String); __ ld(O0, O1, Otos_i); __ push(itos); ! __ ba(false, exit); ! __ delayed()->nop(); __ bind(notInt); // __ cmp(O2, JVM_CONSTANT_String); __ brx(Assembler::equal, true, Assembler::pt, isString); __ delayed()->cmp(O2, JVM_CONSTANT_Object); --- 276,313 ---- const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize; // get type from tags __ add(O2, tags_offset, O2); __ ldub(O2, O1, O2); ! // unresolved string? If so, must resolve ! __ cmp_and_brx_short(O2, JVM_CONSTANT_UnresolvedString, Assembler::equal, Assembler::pt, call_ldc); ! // unresolved class? If so, must resolve ! __ cmp_and_brx_short(O2, JVM_CONSTANT_UnresolvedClass, Assembler::equal, Assembler::pt, call_ldc); ! // unresolved class in error state ! __ cmp_and_brx_short(O2, JVM_CONSTANT_UnresolvedClassInError, Assembler::equal, Assembler::pn, call_ldc); __ cmp(O2, JVM_CONSTANT_Class); // need to call vm to get java mirror of the class __ brx(Assembler::notEqual, true, Assembler::pt, notClass); __ delayed()->add(O0, base_offset, O0); __ bind(call_ldc); __ set(wide, O1); call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), O1); __ push(atos); ! __ ba_short(exit); __ bind(notClass); // __ add(O0, base_offset, O0); __ sll(O1, LogBytesPerWord, O1); __ cmp(O2, JVM_CONSTANT_Integer); __ brx(Assembler::notEqual, true, Assembler::pt, notInt); __ delayed()->cmp(O2, JVM_CONSTANT_String); __ ld(O0, O1, Otos_i); __ push(itos); ! __ ba_short(exit); __ bind(notInt); // __ cmp(O2, JVM_CONSTANT_String); __ brx(Assembler::equal, true, Assembler::pt, isString); __ delayed()->cmp(O2, JVM_CONSTANT_Object);
*** 323,334 **** __ delayed()->ldf(FloatRegisterImpl::S, O0, O1, Ftos_f); __ bind(isString); __ ld_ptr(O0, O1, Otos_i); __ verify_oop(Otos_i); __ push(atos); ! __ ba(false, exit); ! __ delayed()->nop(); __ bind(notString); // __ ldf(FloatRegisterImpl::S, O0, O1, Ftos_f); __ push(ftos); --- 315,325 ---- __ delayed()->ldf(FloatRegisterImpl::S, O0, O1, Ftos_f); __ bind(isString); __ ld_ptr(O0, O1, Otos_i); __ verify_oop(Otos_i); __ push(atos); ! __ ba_short(exit); __ bind(notString); // __ ldf(FloatRegisterImpl::S, O0, O1, Ftos_f); __ push(ftos);
*** 363,375 **** const Register Rcon_klass = G3_scratch; // same as Rcache const Register Rarray_klass = G4_scratch; // same as Rscratch __ load_klass(Otos_i, Rcon_klass); AddressLiteral array_klass_addr((address)Universe::systemObjArrayKlassObj_addr()); __ load_contents(array_klass_addr, Rarray_klass); ! __ cmp(Rarray_klass, Rcon_klass); ! __ brx(Assembler::notEqual, false, Assembler::pt, L_done); ! __ delayed()->nop(); __ ld(Address(Otos_i, arrayOopDesc::length_offset_in_bytes()), Rcon_klass); __ tst(Rcon_klass); __ brx(Assembler::zero, true, Assembler::pt, L_done); __ delayed()->clr(Otos_i); // executed only if branch is taken --- 354,364 ---- const Register Rcon_klass = G3_scratch; // same as Rcache const Register Rarray_klass = G4_scratch; // same as Rscratch __ load_klass(Otos_i, Rcon_klass); AddressLiteral array_klass_addr((address)Universe::systemObjArrayKlassObj_addr()); __ load_contents(array_klass_addr, Rarray_klass); ! __ cmp_and_brx_short(Rarray_klass, Rcon_klass, Assembler::notEqual, Assembler::pt, L_done); __ ld(Address(Otos_i, arrayOopDesc::length_offset_in_bytes()), Rcon_klass); __ tst(Rcon_klass); __ brx(Assembler::zero, true, Assembler::pt, L_done); __ delayed()->clr(Otos_i); // executed only if branch is taken
*** 395,407 **** __ ldub(O2, O1, O2); __ sll(O1, LogBytesPerWord, O1); __ add(O0, O1, G3_scratch); ! __ cmp(O2, JVM_CONSTANT_Double); ! __ brx(Assembler::notEqual, false, Assembler::pt, Long); ! __ delayed()->nop(); // A double can be placed at word-aligned locations in the constant pool. // Check out Conversions.java for an example. // Also constantPoolOopDesc::header_size() is 20, which makes it very difficult // to double-align double on the constant pool. SG, 11/7/97 #ifdef _LP64 --- 384,394 ---- __ ldub(O2, O1, O2); __ sll(O1, LogBytesPerWord, O1); __ add(O0, O1, G3_scratch); ! __ cmp_and_brx_short(O2, JVM_CONSTANT_Double, Assembler::notEqual, Assembler::pt, Long); // A double can be placed at word-aligned locations in the constant pool. // Check out Conversions.java for an example. // Also constantPoolOopDesc::header_size() is 20, which makes it very difficult // to double-align double on the constant pool. SG, 11/7/97 #ifdef _LP64
*** 411,422 **** __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset, f); __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset + sizeof(jdouble)/2, f->successor()); #endif __ push(dtos); ! __ ba(false, exit); ! __ delayed()->nop(); __ bind(Long); #ifdef _LP64 __ ldx(G3_scratch, base_offset, Otos_l); #else --- 398,408 ---- __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset, f); __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset + sizeof(jdouble)/2, f->successor()); #endif __ push(dtos); ! __ ba_short(exit); __ bind(Long); #ifdef _LP64 __ ldx(G3_scratch, base_offset, Otos_l); #else
*** 451,463 **** // if _iload, wait to rewrite to iload2. We only want to rewrite the // last two iloads in a pair. Comparing against fast_iload means that // the next bytecode is neither an iload or a caload, and therefore // an iload pair. ! __ cmp(G3_scratch, (int)Bytecodes::_iload); ! __ br(Assembler::equal, false, Assembler::pn, done); ! __ delayed()->nop(); __ cmp(G3_scratch, (int)Bytecodes::_fast_iload); __ br(Assembler::equal, false, Assembler::pn, rewrite); __ delayed()->set(Bytecodes::_fast_iload2, G4_scratch); --- 437,447 ---- // if _iload, wait to rewrite to iload2. We only want to rewrite the // last two iloads in a pair. Comparing against fast_iload means that // the next bytecode is neither an iload or a caload, and therefore // an iload pair. ! __ cmp_and_br_short(G3_scratch, (int)Bytecodes::_iload, Assembler::equal, Assembler::pn, done); __ cmp(G3_scratch, (int)Bytecodes::_fast_iload); __ br(Assembler::equal, false, Assembler::pn, rewrite); __ delayed()->set(Bytecodes::_fast_iload2, G4_scratch);
*** 695,707 **** // do actual aload_0 aload(0); // if _getfield then wait with rewrite ! __ cmp(G3_scratch, (int)Bytecodes::_getfield); ! __ br(Assembler::equal, false, Assembler::pn, done); ! __ delayed()->nop(); // if _igetfield then rewrite to _fast_iaccess_0 assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def"); __ cmp(G3_scratch, (int)Bytecodes::_fast_igetfield); __ br(Assembler::equal, false, Assembler::pn, rewrite); --- 679,689 ---- // do actual aload_0 aload(0); // if _getfield then wait with rewrite ! __ cmp_and_br_short(G3_scratch, (int)Bytecodes::_getfield, Assembler::equal, Assembler::pn, done); // if _igetfield then rewrite to _fast_iaccess_0 assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def"); __ cmp(G3_scratch, (int)Bytecodes::_fast_igetfield); __ br(Assembler::equal, false, Assembler::pn, rewrite);
*** 865,876 **** // O3: array __ verify_oop(Otos_i); __ index_check_without_pop(O3, O2, UseCompressedOops ? 2 : LogBytesPerWord, G3_scratch, O1); // do array store check - check for NULL value first ! __ br_null( Otos_i, false, Assembler::pn, is_null ); ! __ delayed()->nop(); __ load_klass(O3, O4); // get array klass __ load_klass(Otos_i, O5); // get value klass // do fast instanceof cache test --- 847,857 ---- // O3: array __ verify_oop(Otos_i); __ index_check_without_pop(O3, O2, UseCompressedOops ? 2 : LogBytesPerWord, G3_scratch, O1); // do array store check - check for NULL value first ! __ br_null_short( Otos_i, Assembler::pn, is_null ); __ load_klass(O3, O4); // get array klass __ load_klass(Otos_i, O5); // get value klass // do fast instanceof cache test
*** 897,907 **** // Store is OK. __ bind(store_ok); do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i, G3_scratch, _bs->kind(), true); ! __ ba(false,done); __ delayed()->inc(Lesp, 3* Interpreter::stackElementSize); // adj sp (pops array, index and value) __ bind(is_null); do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), G0, G4_scratch, _bs->kind(), true); --- 878,888 ---- // Store is OK. __ bind(store_ok); do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), Otos_i, G3_scratch, _bs->kind(), true); ! __ ba(done); __ delayed()->inc(Lesp, 3* Interpreter::stackElementSize); // adj sp (pops array, index and value) __ bind(is_null); do_oop_store(_masm, O1, noreg, arrayOopDesc::base_offset_in_bytes(T_OBJECT), G0, G4_scratch, _bs->kind(), true);
*** 1631,1650 **** int increment = InvocationCounter::count_increment; int mask = ((1 << Tier0BackedgeNotifyFreqLog) - 1) << InvocationCounter::count_shift; if (ProfileInterpreter) { // If no method data exists, go to profile_continue. __ ld_ptr(Lmethod, methodOopDesc::method_data_offset(), G4_scratch); ! __ br_null(G4_scratch, false, Assembler::pn, Lno_mdo); ! __ delayed()->nop(); // Increment backedge counter in the MDO Address mdo_backedge_counter(G4_scratch, in_bytes(methodDataOopDesc::backedge_counter_offset()) + in_bytes(InvocationCounter::counter_offset())); __ increment_mask_and_jump(mdo_backedge_counter, increment, mask, G3_scratch, Lscratch, Assembler::notZero, &Lforward); ! __ ba(false, Loverflow); ! __ delayed()->nop(); } // If there's no MDO, increment counter in methodOop __ bind(Lno_mdo); Address backedge_counter(Lmethod, in_bytes(methodOopDesc::backedge_counter_offset()) + --- 1612,1629 ---- int increment = InvocationCounter::count_increment; int mask = ((1 << Tier0BackedgeNotifyFreqLog) - 1) << InvocationCounter::count_shift; if (ProfileInterpreter) { // If no method data exists, go to profile_continue. __ ld_ptr(Lmethod, methodOopDesc::method_data_offset(), G4_scratch); ! __ br_null_short(G4_scratch, Assembler::pn, Lno_mdo); // Increment backedge counter in the MDO Address mdo_backedge_counter(G4_scratch, in_bytes(methodDataOopDesc::backedge_counter_offset()) + in_bytes(InvocationCounter::counter_offset())); __ increment_mask_and_jump(mdo_backedge_counter, increment, mask, G3_scratch, Lscratch, Assembler::notZero, &Lforward); ! __ ba_short(Loverflow); } // If there's no MDO, increment counter in methodOop __ bind(Lno_mdo); Address backedge_counter(Lmethod, in_bytes(methodOopDesc::backedge_counter_offset()) +
*** 1656,1673 **** // notify point for loop, pass branch bytecode __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), O0_cur_bcp); // Was an OSR adapter generated? // O0 = osr nmethod ! __ br_null(O0, false, Assembler::pn, Lforward); ! __ delayed()->nop(); // Has the nmethod been invalidated already? __ ld(O0, nmethod::entry_bci_offset(), O2); ! __ cmp(O2, InvalidOSREntryBci); ! __ br(Assembler::equal, false, Assembler::pn, Lforward); ! __ delayed()->nop(); // migrate the interpreter frame off of the stack __ mov(G2_thread, L7); // save nmethod --- 1635,1649 ---- // notify point for loop, pass branch bytecode __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::frequency_counter_overflow), O0_cur_bcp); // Was an OSR adapter generated? // O0 = osr nmethod ! __ br_null_short(O0, Assembler::pn, Lforward); // Has the nmethod been invalidated already? __ ld(O0, nmethod::entry_bci_offset(), O2); ! __ cmp_and_br_short(O2, InvalidOSREntryBci, Assembler::equal, Assembler::pn, Lforward); // migrate the interpreter frame off of the stack __ mov(G2_thread, L7); // save nmethod
*** 1828,1838 **** // lookup dispatch offset __ delayed()->sub(Otos_i, O2, O2); __ profile_switch_case(O2, O3, G3_scratch, G4_scratch); __ sll(O2, LogBytesPerInt, O2); __ add(O2, 3 * BytesPerInt, O2); ! __ ba(false, continue_execution); __ delayed()->ld(O1, O2, O2); // handle default __ bind(default_case); __ profile_switch_default(O3); __ ld(O1, 0, O2); // get default offset --- 1804,1814 ---- // lookup dispatch offset __ delayed()->sub(Otos_i, O2, O2); __ profile_switch_case(O2, O3, G3_scratch, G4_scratch); __ sll(O2, LogBytesPerInt, O2); __ add(O2, 3 * BytesPerInt, O2); ! __ ba(continue_execution); __ delayed()->ld(O1, O2, O2); // handle default __ bind(default_case); __ profile_switch_default(O3); __ ld(O1, 0, O2); // get default offset
*** 1856,1866 **** __ and3(O1, -BytesPerInt, O1); // set counter __ ld(O1, BytesPerInt, O2); __ sll(O2, LogBytesPerInt + 1, O2); // in word-pairs __ add(O1, 2 * BytesPerInt, O3); // set first pair addr ! __ ba(false, loop_entry); __ delayed()->add(O3, O2, O2); // counter now points past last pair // table search __ bind(loop); __ cmp(O4, Otos_i); --- 1832,1842 ---- __ and3(O1, -BytesPerInt, O1); // set counter __ ld(O1, BytesPerInt, O2); __ sll(O2, LogBytesPerInt + 1, O2); // in word-pairs __ add(O1, 2 * BytesPerInt, O3); // set first pair addr ! __ ba(loop_entry); __ delayed()->add(O3, O2, O2); // counter now points past last pair // table search __ bind(loop); __ cmp(O4, Otos_i);
*** 1875,1886 **** // default case __ ld(O1, 0, O4); // get default offset if (ProfileInterpreter) { __ profile_switch_default(O3); ! __ ba(false, continue_execution); ! __ delayed()->nop(); } // entry found -> get offset __ bind(found); if (ProfileInterpreter) { --- 1851,1861 ---- // default case __ ld(O1, 0, O4); // get default offset if (ProfileInterpreter) { __ profile_switch_default(O3); ! __ ba_short(continue_execution); } // entry found -> get offset __ bind(found); if (ProfileInterpreter) {
*** 1942,1952 **** // initialize i & j (in delay slot) __ clr( Ri ); // and start Label entry; ! __ ba(false, entry); __ delayed()->ld( Rarray, -BytesPerInt, Rj); // (Rj is already in the native byte-ordering.) // binary search loop { Label loop; --- 1917,1927 ---- // initialize i & j (in delay slot) __ clr( Ri ); // and start Label entry; ! __ ba(entry); __ delayed()->ld( Rarray, -BytesPerInt, Rj); // (Rj is already in the native byte-ordering.) // binary search loop { Label loop;
*** 2000,2011 **** __ profile_switch_case(Rh, Rj, Rscratch, Rkey); __ ld( Rarray, Ri, Rj ); // (Rj is already in the native byte-ordering.) if (ProfileInterpreter) { ! __ ba(false, continue_execution); ! __ delayed()->nop(); } __ bind(default_case); // fall through (if not profiling) __ profile_switch_default(Ri); --- 1975,1985 ---- __ profile_switch_case(Rh, Rj, Rscratch, Rkey); __ ld( Rarray, Ri, Rj ); // (Rj is already in the native byte-ordering.) if (ProfileInterpreter) { ! __ ba_short(continue_execution); } __ bind(default_case); // fall through (if not profiling) __ profile_switch_default(Ri);
*** 2214,2226 **** // the time to call into the VM. Label Label1; assert_different_registers(Rcache, index, G1_scratch); AddressLiteral get_field_access_count_addr(JvmtiExport::get_field_access_count_addr()); __ load_contents(get_field_access_count_addr, G1_scratch); ! __ tst(G1_scratch); ! __ br(Assembler::zero, false, Assembler::pt, Label1); ! __ delayed()->nop(); __ add(Rcache, in_bytes(cp_base_offset), Rcache); if (is_static) { __ clr(Otos_i); --- 2188,2198 ---- // the time to call into the VM. Label Label1; assert_different_registers(Rcache, index, G1_scratch); AddressLiteral get_field_access_count_addr(JvmtiExport::get_field_access_count_addr()); __ load_contents(get_field_access_count_addr, G1_scratch); ! __ cmp_and_br_short(G1_scratch, 0, Assembler::equal, Assembler::pt, Label1); __ add(Rcache, in_bytes(cp_base_offset), Rcache); if (is_static) { __ clr(Otos_i);
*** 2296,2306 **** __ verify_oop(Otos_i); __ push(atos); if (!is_static) { patch_bytecode(Bytecodes::_fast_agetfield, G3_scratch, G4_scratch); } ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notObj); // cmp(Rflags, itos); --- 2268,2278 ---- __ verify_oop(Otos_i); __ push(atos); if (!is_static) { patch_bytecode(Bytecodes::_fast_agetfield, G3_scratch, G4_scratch); } ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notObj); // cmp(Rflags, itos);
*** 2311,2321 **** __ ld(Rclass, Roffset, Otos_i); __ push(itos); if (!is_static) { patch_bytecode(Bytecodes::_fast_igetfield, G3_scratch, G4_scratch); } ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notInt); // cmp(Rflags, ltos); --- 2283,2293 ---- __ ld(Rclass, Roffset, Otos_i); __ push(itos); if (!is_static) { patch_bytecode(Bytecodes::_fast_igetfield, G3_scratch, G4_scratch); } ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notInt); // cmp(Rflags, ltos);
*** 2327,2337 **** __ ld_long(Rclass, Roffset, Otos_l); __ push(ltos); if (!is_static) { patch_bytecode(Bytecodes::_fast_lgetfield, G3_scratch, G4_scratch); } ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notLong); // cmp(Rflags, btos); --- 2299,2309 ---- __ ld_long(Rclass, Roffset, Otos_l); __ push(ltos); if (!is_static) { patch_bytecode(Bytecodes::_fast_lgetfield, G3_scratch, G4_scratch); } ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notLong); // cmp(Rflags, btos);
*** 2342,2352 **** __ ldsb(Rclass, Roffset, Otos_i); __ push(itos); if (!is_static) { patch_bytecode(Bytecodes::_fast_bgetfield, G3_scratch, G4_scratch); } ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notByte); // cmp(Rflags, ctos); --- 2314,2324 ---- __ ldsb(Rclass, Roffset, Otos_i); __ push(itos); if (!is_static) { patch_bytecode(Bytecodes::_fast_bgetfield, G3_scratch, G4_scratch); } ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notByte); // cmp(Rflags, ctos);
*** 2357,2367 **** __ lduh(Rclass, Roffset, Otos_i); __ push(itos); if (!is_static) { patch_bytecode(Bytecodes::_fast_cgetfield, G3_scratch, G4_scratch); } ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notChar); // cmp(Rflags, stos); --- 2329,2339 ---- __ lduh(Rclass, Roffset, Otos_i); __ push(itos); if (!is_static) { patch_bytecode(Bytecodes::_fast_cgetfield, G3_scratch, G4_scratch); } ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notChar); // cmp(Rflags, stos);
*** 2372,2382 **** __ ldsh(Rclass, Roffset, Otos_i); __ push(itos); if (!is_static) { patch_bytecode(Bytecodes::_fast_sgetfield, G3_scratch, G4_scratch); } ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notShort); --- 2344,2354 ---- __ ldsh(Rclass, Roffset, Otos_i); __ push(itos); if (!is_static) { patch_bytecode(Bytecodes::_fast_sgetfield, G3_scratch, G4_scratch); } ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notShort);
*** 2388,2398 **** __ ldf(FloatRegisterImpl::S, Rclass, Roffset, Ftos_f); __ push(ftos); if (!is_static) { patch_bytecode(Bytecodes::_fast_fgetfield, G3_scratch, G4_scratch); } ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notFloat); --- 2360,2370 ---- __ ldf(FloatRegisterImpl::S, Rclass, Roffset, Ftos_f); __ push(ftos); if (!is_static) { patch_bytecode(Bytecodes::_fast_fgetfield, G3_scratch, G4_scratch); } ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notFloat);
*** 2497,2509 **** // Check to see if a field modification watch has been set before we take // the time to call into the VM. Label done; AddressLiteral get_field_modification_count_addr(JvmtiExport::get_field_modification_count_addr()); __ load_contents(get_field_modification_count_addr, G4_scratch); ! __ tst(G4_scratch); ! __ br(Assembler::zero, false, Assembler::pt, done); ! __ delayed()->nop(); __ pop_ptr(G4_scratch); // copy the object pointer from tos __ verify_oop(G4_scratch); __ push_ptr(G4_scratch); // put the object pointer back on tos __ get_cache_entry_pointer_at_bcp(G1_scratch, G3_scratch, 1); // Save tos values before call_VM() clobbers them. Since we have --- 2469,2479 ---- // Check to see if a field modification watch has been set before we take // the time to call into the VM. Label done; AddressLiteral get_field_modification_count_addr(JvmtiExport::get_field_modification_count_addr()); __ load_contents(get_field_modification_count_addr, G4_scratch); ! __ cmp_and_br_short(G4_scratch, 0, Assembler::equal, Assembler::pt, done); __ pop_ptr(G4_scratch); // copy the object pointer from tos __ verify_oop(G4_scratch); __ push_ptr(G4_scratch); // put the object pointer back on tos __ get_cache_entry_pointer_at_bcp(G1_scratch, G3_scratch, 1); // Save tos values before call_VM() clobbers them. Since we have
*** 2550,2562 **** // the time to call into the VM. Label Label1; assert_different_registers(Rcache, index, G1_scratch); AddressLiteral get_field_modification_count_addr(JvmtiExport::get_field_modification_count_addr()); __ load_contents(get_field_modification_count_addr, G1_scratch); ! __ tst(G1_scratch); ! __ br(Assembler::zero, false, Assembler::pt, Label1); ! __ delayed()->nop(); // The Rcache and index registers have been already set. // This allows to eliminate this call but the Rcache and index // registers must be correspondingly used after this line. __ get_cache_and_index_at_bcp(G1_scratch, G4_scratch, 1); --- 2520,2530 ---- // the time to call into the VM. Label Label1; assert_different_registers(Rcache, index, G1_scratch); AddressLiteral get_field_modification_count_addr(JvmtiExport::get_field_modification_count_addr()); __ load_contents(get_field_modification_count_addr, G1_scratch); ! __ cmp_and_br_short(G1_scratch, 0, Assembler::zero, Assembler::pt, Label1); // The Rcache and index registers have been already set. // This allows to eliminate this call but the Rcache and index // registers must be correspondingly used after this line. __ get_cache_and_index_at_bcp(G1_scratch, G4_scratch, 1);
*** 2582,2593 **** __ br(Assembler::equal, false, Assembler::pt, two_word); __ delayed()->cmp(Rflags, dtos); __ br(Assembler::equal, false, Assembler::pt, two_word); __ delayed()->nop(); __ inc(G4_scratch, Interpreter::expr_offset_in_bytes(1)); ! __ br(Assembler::always, false, Assembler::pt, valsizeknown); ! __ delayed()->nop(); __ bind(two_word); __ inc(G4_scratch, Interpreter::expr_offset_in_bytes(2)); __ bind(valsizeknown); --- 2550,2560 ---- __ br(Assembler::equal, false, Assembler::pt, two_word); __ delayed()->cmp(Rflags, dtos); __ br(Assembler::equal, false, Assembler::pt, two_word); __ delayed()->nop(); __ inc(G4_scratch, Interpreter::expr_offset_in_bytes(1)); ! __ ba_short(valsizeknown); __ bind(two_word); __ inc(G4_scratch, Interpreter::expr_offset_in_bytes(2)); __ bind(valsizeknown);
*** 2634,2646 **** if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) { __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch); __ and3(Rflags, Lscratch, Lscratch); if (__ membar_has_effect(read_bits)) { ! __ tst(Lscratch); ! __ br(Assembler::zero, false, Assembler::pt, notVolatile); ! __ delayed()->nop(); volatile_barrier(read_bits); __ bind(notVolatile); } } --- 2601,2611 ---- if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) { __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch); __ and3(Rflags, Lscratch, Lscratch); if (__ membar_has_effect(read_bits)) { ! __ cmp_and_br_short(Lscratch, 0, Assembler::equal, Assembler::pt, notVolatile); volatile_barrier(read_bits); __ bind(notVolatile); } }
*** 2661,2671 **** __ pop_ptr(); __ verify_oop(Otos_i); do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false); ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notObj); // cmp(Rflags, itos ); --- 2626,2636 ---- __ pop_ptr(); __ verify_oop(Otos_i); do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false); ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notObj); // cmp(Rflags, itos );
*** 2673,2683 **** __ delayed() ->cmp(Rflags, btos ); // itos __ pop_i(); __ st(Otos_i, Rclass, Roffset); ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notInt); } else { --- 2638,2648 ---- __ delayed() ->cmp(Rflags, btos ); // itos __ pop_i(); __ st(Otos_i, Rclass, Roffset); ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notInt); } else {
*** 2689,2699 **** // itos __ pop_i(); pop_and_check_object(Rclass); __ st(Otos_i, Rclass, Roffset); patch_bytecode(Bytecodes::_fast_iputfield, G3_scratch, G4_scratch); ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notInt); // cmp(Rflags, atos ); __ br(Assembler::notEqual, false, Assembler::pt, notObj); --- 2654,2664 ---- // itos __ pop_i(); pop_and_check_object(Rclass); __ st(Otos_i, Rclass, Roffset); patch_bytecode(Bytecodes::_fast_iputfield, G3_scratch, G4_scratch); ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notInt); // cmp(Rflags, atos ); __ br(Assembler::notEqual, false, Assembler::pt, notObj);
*** 2705,2715 **** __ verify_oop(Otos_i); do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false); patch_bytecode(Bytecodes::_fast_aputfield, G3_scratch, G4_scratch); ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notObj); } --- 2670,2680 ---- __ verify_oop(Otos_i); do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false); patch_bytecode(Bytecodes::_fast_aputfield, G3_scratch, G4_scratch); ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notObj); }
*** 2722,2732 **** if (!is_static) pop_and_check_object(Rclass); __ stb(Otos_i, Rclass, Roffset); if (!is_static) { patch_bytecode(Bytecodes::_fast_bputfield, G3_scratch, G4_scratch); } ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notByte); // cmp(Rflags, ltos ); --- 2687,2697 ---- if (!is_static) pop_and_check_object(Rclass); __ stb(Otos_i, Rclass, Roffset); if (!is_static) { patch_bytecode(Bytecodes::_fast_bputfield, G3_scratch, G4_scratch); } ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notByte); // cmp(Rflags, ltos );
*** 2738,2748 **** if (!is_static) pop_and_check_object(Rclass); __ st_long(Otos_l, Rclass, Roffset); if (!is_static) { patch_bytecode(Bytecodes::_fast_lputfield, G3_scratch, G4_scratch); } ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notLong); // cmp(Rflags, ctos ); --- 2703,2713 ---- if (!is_static) pop_and_check_object(Rclass); __ st_long(Otos_l, Rclass, Roffset); if (!is_static) { patch_bytecode(Bytecodes::_fast_lputfield, G3_scratch, G4_scratch); } ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notLong); // cmp(Rflags, ctos );
*** 2754,2764 **** if (!is_static) pop_and_check_object(Rclass); __ sth(Otos_i, Rclass, Roffset); if (!is_static) { patch_bytecode(Bytecodes::_fast_cputfield, G3_scratch, G4_scratch); } ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notChar); // cmp(Rflags, stos ); __ br(Assembler::notEqual, false, Assembler::pt, notShort); --- 2719,2729 ---- if (!is_static) pop_and_check_object(Rclass); __ sth(Otos_i, Rclass, Roffset); if (!is_static) { patch_bytecode(Bytecodes::_fast_cputfield, G3_scratch, G4_scratch); } ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notChar); // cmp(Rflags, stos ); __ br(Assembler::notEqual, false, Assembler::pt, notShort);
*** 2769,2779 **** if (!is_static) pop_and_check_object(Rclass); __ sth(Otos_i, Rclass, Roffset); if (!is_static) { patch_bytecode(Bytecodes::_fast_sputfield, G3_scratch, G4_scratch); } ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notShort); // cmp(Rflags, ftos ); __ br(Assembler::notZero, false, Assembler::pt, notFloat); --- 2734,2744 ---- if (!is_static) pop_and_check_object(Rclass); __ sth(Otos_i, Rclass, Roffset); if (!is_static) { patch_bytecode(Bytecodes::_fast_sputfield, G3_scratch, G4_scratch); } ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notShort); // cmp(Rflags, ftos ); __ br(Assembler::notZero, false, Assembler::pt, notFloat);
*** 2784,2794 **** if (!is_static) pop_and_check_object(Rclass); __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset); if (!is_static) { patch_bytecode(Bytecodes::_fast_fputfield, G3_scratch, G4_scratch); } ! __ ba(false, checkVolatile); __ delayed()->tst(Lscratch); __ bind(notFloat); // dtos --- 2749,2759 ---- if (!is_static) pop_and_check_object(Rclass); __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset); if (!is_static) { patch_bytecode(Bytecodes::_fast_fputfield, G3_scratch, G4_scratch); } ! __ ba(checkVolatile); __ delayed()->tst(Lscratch); __ bind(notFloat); // dtos
*** 2831,2843 **** if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) { __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags); __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch); __ and3(Rflags, Lscratch, Lscratch); if (__ membar_has_effect(read_bits)) { ! __ tst(Lscratch); ! __ br(Assembler::zero, false, Assembler::pt, notVolatile); ! __ delayed()->nop(); volatile_barrier(read_bits); __ bind(notVolatile); } } --- 2796,2806 ---- if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) { __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::flags_offset(), Rflags); __ set((1 << ConstantPoolCacheEntry::volatileField), Lscratch); __ and3(Rflags, Lscratch, Lscratch); if (__ membar_has_effect(read_bits)) { ! __ cmp_and_br_short(Lscratch, 0, Assembler::equal, Assembler::pt, notVolatile); volatile_barrier(read_bits); __ bind(notVolatile); } }
*** 2862,2874 **** default: ShouldNotReachHere(); } if (__ membar_has_effect(write_bits)) { ! __ tst(Lscratch); ! __ br(Assembler::zero, false, Assembler::pt, exit); ! __ delayed()->nop(); volatile_barrier(Assembler::StoreLoad); __ bind(exit); } } --- 2825,2835 ---- default: ShouldNotReachHere(); } if (__ membar_has_effect(write_bits)) { ! __ cmp_and_br_short(Lscratch, 0, Assembler::equal, Assembler::pt, exit); volatile_barrier(Assembler::StoreLoad); __ bind(exit); } }
*** 3224,3235 **** // Check that entry is non-null. Null entries are probably a bytecode // problem. If the interface isn't implemented by the receiver class, // the VM should throw IncompatibleClassChangeError. linkResolver checks // this too but that's only if the entry isn't already resolved, so we // need to check again. ! __ br_notnull( Rtemp, false, Assembler::pt, ok); ! __ delayed()->nop(); call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_IncompatibleClassChangeError)); __ should_not_reach_here(); __ bind(ok); __ verify_oop(Rtemp); } --- 3185,3195 ---- // Check that entry is non-null. Null entries are probably a bytecode // problem. If the interface isn't implemented by the receiver class, // the VM should throw IncompatibleClassChangeError. linkResolver checks // this too but that's only if the entry isn't already resolved, so we // need to check again. ! __ br_notnull_short( Rtemp, Assembler::pt, ok); call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_IncompatibleClassChangeError)); __ should_not_reach_here(); __ bind(ok); __ verify_oop(Rtemp); }
*** 3249,3261 **** __ ld_ptr(RklassOop, Rscratch, G5_method); // Check for abstract method error. { Label ok; ! __ tst(G5_method); ! __ brx(Assembler::notZero, false, Assembler::pt, ok); ! __ delayed()->nop(); call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodError)); __ should_not_reach_here(); __ bind(ok); } --- 3209,3219 ---- __ ld_ptr(RklassOop, Rscratch, G5_method); // Check for abstract method error. { Label ok; ! __ br_notnull_short(G5_method, Assembler::pt, ok); call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodError)); __ should_not_reach_here(); __ bind(ok); }
*** 3406,3426 **** #ifdef _LP64 __ srlx(RfreeValue, LogHeapWordSize, RfreeValue); #else __ srl(RfreeValue, LogHeapWordSize, RfreeValue); #endif ! __ cmp(RtlabWasteLimitValue, RfreeValue); ! __ brx(Assembler::greaterEqualUnsigned, false, Assembler::pt, slow_case); // tlab waste is small ! __ delayed()->nop(); // increment waste limit to prevent getting stuck on this slow path __ add(RtlabWasteLimitValue, ThreadLocalAllocBuffer::refill_waste_limit_increment(), RtlabWasteLimitValue); __ st_ptr(RtlabWasteLimitValue, G2_thread, in_bytes(JavaThread::tlab_refill_waste_limit_offset())); } else { // No allocation in the shared eden. ! __ br(Assembler::always, false, Assembler::pt, slow_case); ! __ delayed()->nop(); } } // Allocation in the shared Eden if (allow_shared_alloc) { --- 3364,3381 ---- #ifdef _LP64 __ srlx(RfreeValue, LogHeapWordSize, RfreeValue); #else __ srl(RfreeValue, LogHeapWordSize, RfreeValue); #endif ! __ cmp_and_brx_short(RtlabWasteLimitValue, RfreeValue, Assembler::greaterEqualUnsigned, Assembler::pt, slow_case); // tlab waste is small // increment waste limit to prevent getting stuck on this slow path __ add(RtlabWasteLimitValue, ThreadLocalAllocBuffer::refill_waste_limit_increment(), RtlabWasteLimitValue); __ st_ptr(RtlabWasteLimitValue, G2_thread, in_bytes(JavaThread::tlab_refill_waste_limit_offset())); } else { // No allocation in the shared eden. ! __ ba_short(slow_case); } } // Allocation in the shared Eden if (allow_shared_alloc) {
*** 3438,3459 **** __ ld_ptr(RtopAddr, 0, RoldTopValue); __ add(RoldTopValue, Roffset, RnewTopValue); // RnewTopValue contains the top address after the new object // has been allocated. ! __ cmp(RnewTopValue, RendValue); ! __ brx(Assembler::greaterUnsigned, false, Assembler::pn, slow_case); ! __ delayed()->nop(); __ casx_under_lock(RtopAddr, RoldTopValue, RnewTopValue, VM_Version::v9_instructions_work() ? NULL : (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr()); // if someone beat us on the allocation, try again, otherwise continue ! __ cmp(RoldTopValue, RnewTopValue); ! __ brx(Assembler::notEqual, false, Assembler::pn, retry); ! __ delayed()->nop(); // bump total bytes allocated by this thread // RoldTopValue and RtopAddr are dead, so can use G1 and G3 __ incr_allocated_bytes(Roffset, G1_scratch, G3_scratch); } --- 3393,3410 ---- __ ld_ptr(RtopAddr, 0, RoldTopValue); __ add(RoldTopValue, Roffset, RnewTopValue); // RnewTopValue contains the top address after the new object // has been allocated. ! __ cmp_and_brx_short(RnewTopValue, RendValue, Assembler::greaterUnsigned, Assembler::pn, slow_case); __ casx_under_lock(RtopAddr, RoldTopValue, RnewTopValue, VM_Version::v9_instructions_work() ? NULL : (address)StubRoutines::Sparc::atomic_memory_operation_lock_addr()); // if someone beat us on the allocation, try again, otherwise continue ! __ cmp_and_brx_short(RoldTopValue, RnewTopValue, Assembler::notEqual, Assembler::pn, retry); // bump total bytes allocated by this thread // RoldTopValue and RtopAddr are dead, so can use G1 and G3 __ incr_allocated_bytes(Roffset, G1_scratch, G3_scratch); }
*** 3472,3494 **** //__ subcc(Roffset, wordSize, Roffset); // executed above loop or in delay slot __ st_ptr(G0, G3_scratch, Roffset); __ br(Assembler::notEqual, false, Assembler::pt, loop); __ delayed()->subcc(Roffset, wordSize, Roffset); } ! __ br(Assembler::always, false, Assembler::pt, initialize_header); ! __ delayed()->nop(); } // slow case __ bind(slow_case); __ get_2_byte_integer_at_bcp(1, G3_scratch, O2, InterpreterMacroAssembler::Unsigned); __ get_constant_pool(O1); call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), O1, O2); ! __ ba(false, done); ! __ delayed()->nop(); // Initialize the header: mark, klass __ bind(initialize_header); if (UseBiasedLocking) { --- 3423,3443 ---- //__ subcc(Roffset, wordSize, Roffset); // executed above loop or in delay slot __ st_ptr(G0, G3_scratch, Roffset); __ br(Assembler::notEqual, false, Assembler::pt, loop); __ delayed()->subcc(Roffset, wordSize, Roffset); } ! __ ba_short(initialize_header); } // slow case __ bind(slow_case); __ get_2_byte_integer_at_bcp(1, G3_scratch, O2, InterpreterMacroAssembler::Unsigned); __ get_constant_pool(O1); call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), O1, O2); ! __ ba_short(done); // Initialize the header: mark, klass __ bind(initialize_header); if (UseBiasedLocking) {
*** 3548,3559 **** Register Roffset = G1_scratch; Register RobjKlass = O5; Register RspecifiedKlass = O4; // Check for casting a NULL ! __ br_null(Otos_i, false, Assembler::pn, is_null); ! __ delayed()->nop(); // Get value klass in RobjKlass __ load_klass(Otos_i, RobjKlass); // get value klass // Get constant pool tag --- 3497,3507 ---- Register Roffset = G1_scratch; Register RobjKlass = O5; Register RspecifiedKlass = O4; // Check for casting a NULL ! __ br_null_short(Otos_i, Assembler::pn, is_null); // Get value klass in RobjKlass __ load_klass(Otos_i, RobjKlass); // get value klass // Get constant pool tag
*** 3569,3580 **** __ push_ptr(); // save receiver for result, and for GC call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) ); __ pop_ptr(Otos_i, G3_scratch); // restore receiver ! __ br(Assembler::always, false, Assembler::pt, resolved); ! __ delayed()->nop(); // Extract target class from constant pool __ bind(quicked); __ add(Roffset, sizeof(constantPoolOopDesc), Roffset); __ ld_ptr(Lscratch, Roffset, RspecifiedKlass); --- 3517,3527 ---- __ push_ptr(); // save receiver for result, and for GC call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) ); __ pop_ptr(Otos_i, G3_scratch); // restore receiver ! __ ba_short(resolved); // Extract target class from constant pool __ bind(quicked); __ add(Roffset, sizeof(constantPoolOopDesc), Roffset); __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
*** 3589,3600 **** __ throw_if_not_x( Assembler::never, Interpreter::_throw_ClassCastException_entry, G3_scratch ); __ bind(cast_ok); if (ProfileInterpreter) { ! __ ba(false, done); ! __ delayed()->nop(); } __ bind(is_null); __ profile_null_seen(G3_scratch); __ bind(done); } --- 3536,3546 ---- __ throw_if_not_x( Assembler::never, Interpreter::_throw_ClassCastException_entry, G3_scratch ); __ bind(cast_ok); if (ProfileInterpreter) { ! __ ba_short(done); } __ bind(is_null); __ profile_null_seen(G3_scratch); __ bind(done); }
*** 3606,3617 **** Register Roffset = G1_scratch; Register RobjKlass = O5; Register RspecifiedKlass = O4; // Check for casting a NULL ! __ br_null(Otos_i, false, Assembler::pt, is_null); ! __ delayed()->nop(); // Get value klass in RobjKlass __ load_klass(Otos_i, RobjKlass); // get value klass // Get constant pool tag --- 3552,3562 ---- Register Roffset = G1_scratch; Register RobjKlass = O5; Register RspecifiedKlass = O4; // Check for casting a NULL ! __ br_null_short(Otos_i, Assembler::pt, is_null); // Get value klass in RobjKlass __ load_klass(Otos_i, RobjKlass); // get value klass // Get constant pool tag
*** 3627,3640 **** __ push_ptr(); // save receiver for result, and for GC call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) ); __ pop_ptr(Otos_i, G3_scratch); // restore receiver ! __ br(Assembler::always, false, Assembler::pt, resolved); ! __ delayed()->nop(); - // Extract target class from constant pool __ bind(quicked); __ add(Roffset, sizeof(constantPoolOopDesc), Roffset); __ get_constant_pool(Lscratch); __ ld_ptr(Lscratch, Roffset, RspecifiedKlass); --- 3572,3583 ---- __ push_ptr(); // save receiver for result, and for GC call_VM(RspecifiedKlass, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc) ); __ pop_ptr(Otos_i, G3_scratch); // restore receiver ! __ ba_short(resolved); // Extract target class from constant pool __ bind(quicked); __ add(Roffset, sizeof(constantPoolOopDesc), Roffset); __ get_constant_pool(Lscratch); __ ld_ptr(Lscratch, Roffset, RspecifiedKlass);
*** 3647,3658 **** __ gen_subtype_check( RobjKlass, RspecifiedKlass, G3_scratch, G4_scratch, G1_scratch, done ); // Not a subtype; return 0; __ clr( Otos_i ); if (ProfileInterpreter) { ! __ ba(false, done); ! __ delayed()->nop(); } __ bind(is_null); __ profile_null_seen(G3_scratch); __ bind(done); } --- 3590,3600 ---- __ gen_subtype_check( RobjKlass, RspecifiedKlass, G3_scratch, G4_scratch, G1_scratch, done ); // Not a subtype; return 0; __ clr( Otos_i ); if (ProfileInterpreter) { ! __ ba_short(done); } __ bind(is_null); __ profile_null_seen(G3_scratch); __ bind(done); }
*** 3722,3732 **** __ clr(O1); // points to free slot or NULL { Label entry, loop, exit; __ add( __ top_most_monitor(), O2 ); // last one to check ! __ ba( false, entry ); __ delayed()->mov( Lmonitors, O3 ); // first one to check __ bind( loop ); --- 3664,3674 ---- __ clr(O1); // points to free slot or NULL { Label entry, loop, exit; __ add( __ top_most_monitor(), O2 ); // last one to check ! __ ba( entry ); __ delayed()->mov( Lmonitors, O3 ); // first one to check __ bind( loop );
*** 3755,3766 **** } { Label allocated; // found free slot? ! __ br_notnull(O1, false, Assembler::pn, allocated); ! __ delayed()->nop(); __ add_monitor_to_stack( false, O2, O3 ); __ mov(Lmonitors, O1); __ bind(allocated); --- 3697,3707 ---- } { Label allocated; // found free slot? ! __ br_notnull_short(O1, Assembler::pn, allocated); __ add_monitor_to_stack( false, O2, O3 ); __ mov(Lmonitors, O1); __ bind(allocated);
*** 3789,3799 **** assert(O0 == Otos_i, "just checking"); { Label entry, loop, found; __ add( __ top_most_monitor(), O2 ); // last one to check ! __ ba(false, entry ); // use Lscratch to hold monitor elem to check, start with most recent monitor, // By using a local it survives the call to the C routine. __ delayed()->mov( Lmonitors, Lscratch ); __ bind( loop ); --- 3730,3740 ---- assert(O0 == Otos_i, "just checking"); { Label entry, loop, found; __ add( __ top_most_monitor(), O2 ); // last one to check ! __ ba(entry); // use Lscratch to hold monitor elem to check, start with most recent monitor, // By using a local it survives the call to the C routine. __ delayed()->mov( Lmonitors, Lscratch ); __ bind( loop );
src/cpu/sparc/vm/templateTable_sparc.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File