369 int index_size = wide ? sizeof(u2) : sizeof(u1);
370 Label resolved;
371
372 // We are resolved if the resolved reference cache entry contains a
373 // non-null object (CallSite, etc.)
374 assert_different_registers(Otos_i, G3_scratch);
375 __ get_cache_index_at_bcp(Otos_i, G3_scratch, 1, index_size); // load index => G3_scratch
376 __ load_resolved_reference_at_index(Otos_i, G3_scratch);
377 __ tst(Otos_i);
378 __ br(Assembler::notEqual, false, Assembler::pt, resolved);
379 __ delayed()->set((int)bytecode(), O1);
380
381 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);
382
383 // first time invocation - must resolve first
384 __ call_VM(Otos_i, entry, O1);
385 __ bind(resolved);
386 __ verify_oop(Otos_i);
387 }
388
389
390 void TemplateTable::ldc2_w() {
391 transition(vtos, vtos);
392 Label Long, exit;
393
394 __ get_2_byte_integer_at_bcp(1, G3_scratch, O1, InterpreterMacroAssembler::Unsigned);
395 __ get_cpool_and_tags(O0, O2);
396
397 const int base_offset = ConstantPool::header_size() * wordSize;
398 const int tags_offset = Array<u1>::base_offset_in_bytes();
399 // get type from tags
400 __ add(O2, tags_offset, O2);
401 __ ldub(O2, O1, O2);
402
403 __ sll(O1, LogBytesPerWord, O1);
404 __ add(O0, O1, G3_scratch);
405
406 __ cmp_and_brx_short(O2, JVM_CONSTANT_Double, Assembler::notEqual, Assembler::pt, Long);
407 // A double can be placed at word-aligned locations in the constant pool.
408 // Check out Conversions.java for an example.
409 // Also ConstantPool::header_size() is 20, which makes it very difficult
414 FloatRegister f = Ftos_d;
415 __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset, f);
416 __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset + sizeof(jdouble)/2,
417 f->successor());
418 #endif
419 __ push(dtos);
420 __ ba_short(exit);
421
422 __ bind(Long);
423 #ifdef _LP64
424 __ ldx(G3_scratch, base_offset, Otos_l);
425 #else
426 __ ld(G3_scratch, base_offset, Otos_l);
427 __ ld(G3_scratch, base_offset + sizeof(jlong)/2, Otos_l->successor());
428 #endif
429 __ push(ltos);
430
431 __ bind(exit);
432 }
433
434
435 void TemplateTable::locals_index(Register reg, int offset) {
436 __ ldub( at_bcp(offset), reg );
437 }
438
439
440 void TemplateTable::locals_index_wide(Register reg) {
441 // offset is 2, not 1, because Lbcp points to wide prefix code
442 __ get_2_byte_integer_at_bcp(2, G4_scratch, reg, InterpreterMacroAssembler::Unsigned);
443 }
444
445 void TemplateTable::iload() {
446 transition(vtos, itos);
447 // Rewrite iload,iload pair into fast_iload2
448 // iload,caload pair into fast_icaload
449 if (RewriteFrequentPairs) {
450 Label rewrite, done;
451
452 // get next byte
453 __ ldub(at_bcp(Bytecodes::length_for(Bytecodes::_iload)), G3_scratch);
454
455 // if _iload, wait to rewrite to iload2. We only want to rewrite the
456 // last two iloads in a pair. Comparing against fast_iload means that
457 // the next bytecode is neither an iload or a caload, and therefore
458 // an iload pair.
459 __ cmp_and_br_short(G3_scratch, (int)Bytecodes::_iload, Assembler::equal, Assembler::pn, done);
460
461 __ cmp(G3_scratch, (int)Bytecodes::_fast_iload);
462 __ br(Assembler::equal, false, Assembler::pn, rewrite);
463 __ delayed()->set(Bytecodes::_fast_iload2, G4_scratch);
464
465 __ cmp(G3_scratch, (int)Bytecodes::_caload);
466 __ br(Assembler::equal, false, Assembler::pn, rewrite);
467 __ delayed()->set(Bytecodes::_fast_icaload, G4_scratch);
468
469 __ set(Bytecodes::_fast_iload, G4_scratch); // don't check again
656
657 void TemplateTable::fload(int n) {
658 transition(vtos, ftos);
659 assert(n < Argument::n_register_parameters, "would need more code");
660 __ ldf( FloatRegisterImpl::S, Llocals, Interpreter::local_offset_in_bytes(n), Ftos_f );
661 }
662
663
664 void TemplateTable::dload(int n) {
665 transition(vtos, dtos);
666 FloatRegister dst = Ftos_d;
667 __ load_unaligned_double(Llocals, Interpreter::local_offset_in_bytes(n+1), dst);
668 }
669
670
671 void TemplateTable::aload(int n) {
672 transition(vtos, atos);
673 __ ld_ptr( Llocals, Interpreter::local_offset_in_bytes(n), Otos_i );
674 }
675
676
677 void TemplateTable::aload_0() {
678 transition(vtos, atos);
679
680 // According to bytecode histograms, the pairs:
681 //
682 // _aload_0, _fast_igetfield (itos)
683 // _aload_0, _fast_agetfield (atos)
684 // _aload_0, _fast_fgetfield (ftos)
685 //
686 // occur frequently. If RewriteFrequentPairs is set, the (slow) _aload_0
687 // bytecode checks the next bytecode and then rewrites the current
688 // bytecode into a pair bytecode; otherwise it rewrites the current
689 // bytecode into _fast_aload_0 that doesn't do the pair check anymore.
690 //
691 if (RewriteFrequentPairs) {
692 Label rewrite, done;
693
694 // get next byte
695 __ ldub(at_bcp(Bytecodes::length_for(Bytecodes::_aload_0)), G3_scratch);
696
697 // do actual aload_0
698 aload(0);
699
700 // if _getfield then wait with rewrite
701 __ cmp_and_br_short(G3_scratch, (int)Bytecodes::_getfield, Assembler::equal, Assembler::pn, done);
702
703 // if _igetfield then rewrite to _fast_iaccess_0
704 assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
705 __ cmp(G3_scratch, (int)Bytecodes::_fast_igetfield);
706 __ br(Assembler::equal, false, Assembler::pn, rewrite);
707 __ delayed()->set(Bytecodes::_fast_iaccess_0, G4_scratch);
708
709 // if _agetfield then rewrite to _fast_aaccess_0
710 assert(Bytecodes::java_code(Bytecodes::_fast_aaccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
711 __ cmp(G3_scratch, (int)Bytecodes::_fast_agetfield);
715 // if _fgetfield then rewrite to _fast_faccess_0
716 assert(Bytecodes::java_code(Bytecodes::_fast_faccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
717 __ cmp(G3_scratch, (int)Bytecodes::_fast_fgetfield);
718 __ br(Assembler::equal, false, Assembler::pn, rewrite);
719 __ delayed()->set(Bytecodes::_fast_faccess_0, G4_scratch);
720
721 // else rewrite to _fast_aload0
722 assert(Bytecodes::java_code(Bytecodes::_fast_aload_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
723 __ set(Bytecodes::_fast_aload_0, G4_scratch);
724
725 // rewrite
726 // G4_scratch: fast bytecode
727 __ bind(rewrite);
728 patch_bytecode(Bytecodes::_aload_0, G4_scratch, G3_scratch, false);
729 __ bind(done);
730 } else {
731 aload(0);
732 }
733 }
734
735
736 void TemplateTable::istore() {
737 transition(itos, vtos);
738 locals_index(G3_scratch);
739 __ store_local_int( G3_scratch, Otos_i );
740 }
741
742
743 void TemplateTable::lstore() {
744 transition(ltos, vtos);
745 locals_index(G3_scratch);
746 __ store_local_long( G3_scratch, Otos_l );
747 }
748
749
750 void TemplateTable::fstore() {
751 transition(ftos, vtos);
752 locals_index(G3_scratch);
753 __ store_local_float( G3_scratch, Ftos_f );
754 }
755
2029 // We only put in barriers around volatile refs (they are expensive), not
2030 // _between_ memory refs (that would require us to track the flavor of the
2031 // previous memory refs). Requirements (2) and (3) require some barriers
2032 // before volatile stores and after volatile loads. These nearly cover
2033 // requirement (1) but miss the volatile-store-volatile-load case. This final
2034 // case is placed after volatile-stores although it could just as well go
2035 // before volatile-loads.
2036 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint) {
2037 // Helper function to insert a is-volatile test and memory barrier
2038 // All current sparc implementations run in TSO, needing only StoreLoad
2039 if ((order_constraint & Assembler::StoreLoad) == 0) return;
2040 __ membar( order_constraint );
2041 }
2042
2043 // ----------------------------------------------------------------------------
2044 void TemplateTable::resolve_cache_and_index(int byte_no,
2045 Register Rcache,
2046 Register index,
2047 size_t index_size) {
2048 // Depends on cpCacheOop layout!
2049 Label resolved;
2050
2051 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2052 __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, Lbyte_code, byte_no, 1, index_size);
2053 __ cmp(Lbyte_code, (int) bytecode()); // have we resolved this bytecode?
2054 __ br(Assembler::equal, false, Assembler::pt, resolved);
2055 __ delayed()->set((int)bytecode(), O1);
2056
2057 address entry;
2058 switch (bytecode()) {
2059 case Bytecodes::_getstatic : // fall through
2060 case Bytecodes::_putstatic : // fall through
2061 case Bytecodes::_getfield : // fall through
2062 case Bytecodes::_putfield : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break;
2063 case Bytecodes::_invokevirtual : // fall through
2064 case Bytecodes::_invokespecial : // fall through
2065 case Bytecodes::_invokestatic : // fall through
2066 case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break;
2067 case Bytecodes::_invokehandle : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle); break;
2068 case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break;
2069 default:
2070 fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode())));
2071 break;
2072 }
2073 // first time invocation - must resolve first
2074 __ call_VM(noreg, entry, O1);
2075 // Update registers with resolved info
2076 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2077 __ bind(resolved);
2078 }
2079
2080 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2081 Register method,
2082 Register itable_index,
2083 Register flags,
2084 bool is_invokevirtual,
2085 bool is_invokevfinal,
2086 bool is_invokedynamic) {
2087 // Uses both G3_scratch and G4_scratch
2088 Register cache = G3_scratch;
2089 Register index = G4_scratch;
2090 assert_different_registers(cache, method, itable_index);
2167 __ push_ptr(Otos_i); // put object on tos where GC wants it.
2168 } else {
2169 // Load top of stack (do not pop the value off the stack);
2170 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
2171 }
2172 __ verify_oop(Otos_i);
2173 }
2174 // Otos_i: object pointer or NULL if static
2175 // Rcache: cache entry pointer
2176 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2177 Otos_i, Rcache);
2178 if (!is_static && has_tos) {
2179 __ pop_ptr(Otos_i); // restore object pointer
2180 __ verify_oop(Otos_i);
2181 }
2182 __ get_cache_and_index_at_bcp(Rcache, index, 1);
2183 __ bind(Label1);
2184 }
2185 }
2186
2187 void TemplateTable::getfield_or_static(int byte_no, bool is_static) {
2188 transition(vtos, vtos);
2189
2190 Register Rcache = G3_scratch;
2191 Register index = G4_scratch;
2192 Register Rclass = Rcache;
2193 Register Roffset= G4_scratch;
2194 Register Rflags = G1_scratch;
2195 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2196
2197 resolve_cache_and_index(byte_no, Rcache, index, sizeof(u2));
2198 jvmti_post_field_access(Rcache, index, is_static, false);
2199 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
2200
2201 if (!is_static) {
2202 pop_and_check_object(Rclass);
2203 } else {
2204 __ verify_oop(Rclass);
2205 }
2206
2207 Label exit;
2215 __ and3(Rflags, Lscratch, Lscratch);
2216 }
2217
2218 Label checkVolatile;
2219
2220 // compute field type
2221 Label notByte, notInt, notShort, notChar, notLong, notFloat, notObj;
2222 __ srl(Rflags, ConstantPoolCacheEntry::tos_state_shift, Rflags);
2223 // Make sure we don't need to mask Rflags after the above shift
2224 ConstantPoolCacheEntry::verify_tos_state_shift();
2225
2226 // Check atos before itos for getstatic, more likely (in Queens at least)
2227 __ cmp(Rflags, atos);
2228 __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2229 __ delayed() ->cmp(Rflags, itos);
2230
2231 // atos
2232 __ load_heap_oop(Rclass, Roffset, Otos_i);
2233 __ verify_oop(Otos_i);
2234 __ push(atos);
2235 if (!is_static) {
2236 patch_bytecode(Bytecodes::_fast_agetfield, G3_scratch, G4_scratch);
2237 }
2238 __ ba(checkVolatile);
2239 __ delayed()->tst(Lscratch);
2240
2241 __ bind(notObj);
2242
2243 // cmp(Rflags, itos);
2244 __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2245 __ delayed() ->cmp(Rflags, ltos);
2246
2247 // itos
2248 __ ld(Rclass, Roffset, Otos_i);
2249 __ push(itos);
2250 if (!is_static) {
2251 patch_bytecode(Bytecodes::_fast_igetfield, G3_scratch, G4_scratch);
2252 }
2253 __ ba(checkVolatile);
2254 __ delayed()->tst(Lscratch);
2255
2256 __ bind(notInt);
2257
2258 // cmp(Rflags, ltos);
2259 __ br(Assembler::notEqual, false, Assembler::pt, notLong);
2260 __ delayed() ->cmp(Rflags, btos);
2261
2262 // ltos
2263 // load must be atomic
2264 __ ld_long(Rclass, Roffset, Otos_l);
2265 __ push(ltos);
2266 if (!is_static) {
2267 patch_bytecode(Bytecodes::_fast_lgetfield, G3_scratch, G4_scratch);
2268 }
2269 __ ba(checkVolatile);
2270 __ delayed()->tst(Lscratch);
2271
2272 __ bind(notLong);
2273
2274 // cmp(Rflags, btos);
2275 __ br(Assembler::notEqual, false, Assembler::pt, notByte);
2276 __ delayed() ->cmp(Rflags, ctos);
2277
2278 // btos
2279 __ ldsb(Rclass, Roffset, Otos_i);
2280 __ push(itos);
2281 if (!is_static) {
2282 patch_bytecode(Bytecodes::_fast_bgetfield, G3_scratch, G4_scratch);
2283 }
2284 __ ba(checkVolatile);
2285 __ delayed()->tst(Lscratch);
2286
2287 __ bind(notByte);
2288
2289 // cmp(Rflags, ctos);
2290 __ br(Assembler::notEqual, false, Assembler::pt, notChar);
2291 __ delayed() ->cmp(Rflags, stos);
2292
2293 // ctos
2294 __ lduh(Rclass, Roffset, Otos_i);
2295 __ push(itos);
2296 if (!is_static) {
2297 patch_bytecode(Bytecodes::_fast_cgetfield, G3_scratch, G4_scratch);
2298 }
2299 __ ba(checkVolatile);
2300 __ delayed()->tst(Lscratch);
2301
2302 __ bind(notChar);
2303
2304 // cmp(Rflags, stos);
2305 __ br(Assembler::notEqual, false, Assembler::pt, notShort);
2306 __ delayed() ->cmp(Rflags, ftos);
2307
2308 // stos
2309 __ ldsh(Rclass, Roffset, Otos_i);
2310 __ push(itos);
2311 if (!is_static) {
2312 patch_bytecode(Bytecodes::_fast_sgetfield, G3_scratch, G4_scratch);
2313 }
2314 __ ba(checkVolatile);
2315 __ delayed()->tst(Lscratch);
2316
2317 __ bind(notShort);
2318
2319
2320 // cmp(Rflags, ftos);
2321 __ br(Assembler::notEqual, false, Assembler::pt, notFloat);
2322 __ delayed() ->tst(Lscratch);
2323
2324 // ftos
2325 __ ldf(FloatRegisterImpl::S, Rclass, Roffset, Ftos_f);
2326 __ push(ftos);
2327 if (!is_static) {
2328 patch_bytecode(Bytecodes::_fast_fgetfield, G3_scratch, G4_scratch);
2329 }
2330 __ ba(checkVolatile);
2331 __ delayed()->tst(Lscratch);
2332
2333 __ bind(notFloat);
2334
2335
2336 // dtos
2337 __ ldf(FloatRegisterImpl::D, Rclass, Roffset, Ftos_d);
2338 __ push(dtos);
2339 if (!is_static) {
2340 patch_bytecode(Bytecodes::_fast_dgetfield, G3_scratch, G4_scratch);
2341 }
2342
2343 __ bind(checkVolatile);
2344 if (__ membar_has_effect(membar_bits)) {
2345 // __ tst(Lscratch); executed in delay slot
2346 __ br(Assembler::zero, false, Assembler::pt, exit);
2347 __ delayed()->nop();
2348 volatile_barrier(membar_bits);
2349 }
2350
2351 __ bind(exit);
2352 }
2353
2354
2355 void TemplateTable::getfield(int byte_no) {
2356 getfield_or_static(byte_no, false);
2357 }
2358
2359 void TemplateTable::getstatic(int byte_no) {
2360 getfield_or_static(byte_no, true);
2361 }
2362
2363
2364 void TemplateTable::fast_accessfield(TosState state) {
2365 transition(atos, state);
2366 Register Rcache = G3_scratch;
2367 Register index = G4_scratch;
2368 Register Roffset = G4_scratch;
2369 Register Rflags = Rcache;
2370 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2371
2372 __ get_cache_and_index_at_bcp(Rcache, index, 1);
2373 jvmti_post_field_access(Rcache, index, /*is_static*/false, /*has_tos*/true);
2374
2375 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset);
2376
2377 __ null_check(Otos_i);
2378 __ verify_oop(Otos_i);
2379
2380 Label exit;
2381
2382 Assembler::Membar_mask_bits membar_bits =
2383 Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
2528 __ verify_oop(G4_scratch);
2529 }
2530 // setup pointer to jvalue object
2531 __ mov(Lesp, G1_scratch); __ inc(G1_scratch, wordSize);
2532 // G4_scratch: object pointer or NULL if static
2533 // G3_scratch: cache entry pointer
2534 // G1_scratch: jvalue object on the stack
2535 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
2536 G4_scratch, G3_scratch, G1_scratch);
2537 __ get_cache_and_index_at_bcp(Rcache, index, 1);
2538 __ bind(Label1);
2539 }
2540 }
2541
2542 void TemplateTable::pop_and_check_object(Register r) {
2543 __ pop_ptr(r);
2544 __ null_check(r); // for field access must check obj.
2545 __ verify_oop(r);
2546 }
2547
2548 void TemplateTable::putfield_or_static(int byte_no, bool is_static) {
2549 transition(vtos, vtos);
2550 Register Rcache = G3_scratch;
2551 Register index = G4_scratch;
2552 Register Rclass = Rcache;
2553 Register Roffset= G4_scratch;
2554 Register Rflags = G1_scratch;
2555 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2556
2557 resolve_cache_and_index(byte_no, Rcache, index, sizeof(u2));
2558 jvmti_post_field_mod(Rcache, index, is_static);
2559 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
2560
2561 Assembler::Membar_mask_bits read_bits =
2562 Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore);
2563 Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad;
2564
2565 Label notVolatile, checkVolatile, exit;
2566 if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) {
2567 __ set((1 << ConstantPoolCacheEntry::is_volatile_shift), Lscratch);
2568 __ and3(Rflags, Lscratch, Lscratch);
2604 // itos
2605 {
2606 __ pop_i();
2607 __ st(Otos_i, Rclass, Roffset);
2608 __ ba(checkVolatile);
2609 __ delayed()->tst(Lscratch);
2610 }
2611
2612 __ bind(notInt);
2613 } else {
2614 // putfield with int type most likely, check that first
2615 __ cmp(Rflags, itos);
2616 __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2617 __ delayed()->cmp(Rflags, atos);
2618
2619 // itos
2620 {
2621 __ pop_i();
2622 pop_and_check_object(Rclass);
2623 __ st(Otos_i, Rclass, Roffset);
2624 patch_bytecode(Bytecodes::_fast_iputfield, G3_scratch, G4_scratch, true, byte_no);
2625 __ ba(checkVolatile);
2626 __ delayed()->tst(Lscratch);
2627 }
2628
2629 __ bind(notInt);
2630 // cmp(Rflags, atos);
2631 __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2632 __ delayed()->cmp(Rflags, btos);
2633
2634 // atos
2635 {
2636 __ pop_ptr();
2637 pop_and_check_object(Rclass);
2638 __ verify_oop(Otos_i);
2639 do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
2640 patch_bytecode(Bytecodes::_fast_aputfield, G3_scratch, G4_scratch, true, byte_no);
2641 __ ba(checkVolatile);
2642 __ delayed()->tst(Lscratch);
2643 }
2644
2645 __ bind(notObj);
2646 }
2647
2648 // cmp(Rflags, btos);
2649 __ br(Assembler::notEqual, false, Assembler::pt, notByte);
2650 __ delayed()->cmp(Rflags, ltos);
2651
2652 // btos
2653 {
2654 __ pop_i();
2655 if (!is_static) pop_and_check_object(Rclass);
2656 __ stb(Otos_i, Rclass, Roffset);
2657 if (!is_static) {
2658 patch_bytecode(Bytecodes::_fast_bputfield, G3_scratch, G4_scratch, true, byte_no);
2659 }
2660 __ ba(checkVolatile);
2661 __ delayed()->tst(Lscratch);
2662 }
2663
2664 __ bind(notByte);
2665 // cmp(Rflags, ltos);
2666 __ br(Assembler::notEqual, false, Assembler::pt, notLong);
2667 __ delayed()->cmp(Rflags, ctos);
2668
2669 // ltos
2670 {
2671 __ pop_l();
2672 if (!is_static) pop_and_check_object(Rclass);
2673 __ st_long(Otos_l, Rclass, Roffset);
2674 if (!is_static) {
2675 patch_bytecode(Bytecodes::_fast_lputfield, G3_scratch, G4_scratch, true, byte_no);
2676 }
2677 __ ba(checkVolatile);
2678 __ delayed()->tst(Lscratch);
2679 }
2680
2681 __ bind(notLong);
2682 // cmp(Rflags, ctos);
2683 __ br(Assembler::notEqual, false, Assembler::pt, notChar);
2684 __ delayed()->cmp(Rflags, stos);
2685
2686 // ctos (char)
2687 {
2688 __ pop_i();
2689 if (!is_static) pop_and_check_object(Rclass);
2690 __ sth(Otos_i, Rclass, Roffset);
2691 if (!is_static) {
2692 patch_bytecode(Bytecodes::_fast_cputfield, G3_scratch, G4_scratch, true, byte_no);
2693 }
2694 __ ba(checkVolatile);
2695 __ delayed()->tst(Lscratch);
2696 }
2697
2698 __ bind(notChar);
2699 // cmp(Rflags, stos);
2700 __ br(Assembler::notEqual, false, Assembler::pt, notShort);
2701 __ delayed()->cmp(Rflags, ftos);
2702
2703 // stos (short)
2704 {
2705 __ pop_i();
2706 if (!is_static) pop_and_check_object(Rclass);
2707 __ sth(Otos_i, Rclass, Roffset);
2708 if (!is_static) {
2709 patch_bytecode(Bytecodes::_fast_sputfield, G3_scratch, G4_scratch, true, byte_no);
2710 }
2711 __ ba(checkVolatile);
2712 __ delayed()->tst(Lscratch);
2713 }
2714
2715 __ bind(notShort);
2716 // cmp(Rflags, ftos);
2717 __ br(Assembler::notZero, false, Assembler::pt, notFloat);
2718 __ delayed()->nop();
2719
2720 // ftos
2721 {
2722 __ pop_f();
2723 if (!is_static) pop_and_check_object(Rclass);
2724 __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset);
2725 if (!is_static) {
2726 patch_bytecode(Bytecodes::_fast_fputfield, G3_scratch, G4_scratch, true, byte_no);
2727 }
2728 __ ba(checkVolatile);
2729 __ delayed()->tst(Lscratch);
2730 }
2731
2732 __ bind(notFloat);
2733
2734 // dtos
2735 {
2736 __ pop_d();
2737 if (!is_static) pop_and_check_object(Rclass);
2738 __ stf(FloatRegisterImpl::D, Ftos_d, Rclass, Roffset);
2739 if (!is_static) {
2740 patch_bytecode(Bytecodes::_fast_dputfield, G3_scratch, G4_scratch, true, byte_no);
2741 }
2742 }
2743
2744 __ bind(checkVolatile);
2745 __ tst(Lscratch);
2746
2747 if (__ membar_has_effect(write_bits)) {
2748 // __ tst(Lscratch); in delay slot
2749 __ br(Assembler::zero, false, Assembler::pt, exit);
2750 __ delayed()->nop();
2751 volatile_barrier(Assembler::StoreLoad);
2752 __ bind(exit);
2753 }
2754 }
2755
2756 void TemplateTable::fast_storefield(TosState state) {
2757 transition(state, vtos);
2758 Register Rcache = G3_scratch;
2759 Register Rclass = Rcache;
2793 case Bytecodes::_fast_fputfield:
2794 __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset);
2795 break;
2796 case Bytecodes::_fast_dputfield:
2797 __ stf(FloatRegisterImpl::D, Ftos_d, Rclass, Roffset);
2798 break;
2799 case Bytecodes::_fast_aputfield:
2800 do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
2801 break;
2802 default:
2803 ShouldNotReachHere();
2804 }
2805
2806 if (__ membar_has_effect(write_bits)) {
2807 __ cmp_and_br_short(Lscratch, 0, Assembler::equal, Assembler::pt, exit);
2808 volatile_barrier(Assembler::StoreLoad);
2809 __ bind(exit);
2810 }
2811 }
2812
2813
2814 void TemplateTable::putfield(int byte_no) {
2815 putfield_or_static(byte_no, false);
2816 }
2817
2818 void TemplateTable::putstatic(int byte_no) {
2819 putfield_or_static(byte_no, true);
2820 }
2821
2822
2823 void TemplateTable::fast_xaccess(TosState state) {
2824 transition(vtos, state);
2825 Register Rcache = G3_scratch;
2826 Register Roffset = G4_scratch;
2827 Register Rflags = G4_scratch;
2828 Register Rreceiver = Lscratch;
2829
2830 __ ld_ptr(Llocals, 0, Rreceiver);
2831
2832 // access constant pool cache (is resolved)
2833 __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 2);
2834 __ ld_ptr(Rcache, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset(), Roffset);
2835 __ add(Lbcp, 1, Lbcp); // needed to report exception at the correct bcp
2836
2837 __ verify_oop(Rreceiver);
2838 __ null_check(Rreceiver);
2839 if (state == atos) {
2840 __ load_heap_oop(Rreceiver, Roffset, Otos_i);
2841 } else if (state == itos) {
2842 __ ld (Rreceiver, Roffset, Otos_i) ;
2955
2956 void TemplateTable::invokevirtual(int byte_no) {
2957 transition(vtos, vtos);
2958 assert(byte_no == f2_byte, "use this argument");
2959
2960 Register Rscratch = G3_scratch;
2961 Register Rtemp = G4_scratch;
2962 Register Rret = Lscratch;
2963 Register O0_recv = O0;
2964 Label notFinal;
2965
2966 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, true, false, false);
2967 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
2968
2969 // Check for vfinal
2970 __ set((1 << ConstantPoolCacheEntry::is_vfinal_shift), G4_scratch);
2971 __ btst(Rret, G4_scratch);
2972 __ br(Assembler::zero, false, Assembler::pt, notFinal);
2973 __ delayed()->and3(Rret, 0xFF, G4_scratch); // gets number of parameters
2974
2975 patch_bytecode(Bytecodes::_fast_invokevfinal, Rscratch, Rtemp);
2976
2977 invokevfinal_helper(Rscratch, Rret);
2978
2979 __ bind(notFinal);
2980
2981 __ mov(G5_method, Rscratch); // better scratch register
2982 __ load_receiver(G4_scratch, O0_recv); // gets receiverOop
2983 // receiver is in O0_recv
2984 __ verify_oop(O0_recv);
2985
2986 // get return address
2987 AddressLiteral table(Interpreter::invoke_return_entry_table());
2988 __ set(table, Rtemp);
2989 __ srl(Rret, ConstantPoolCacheEntry::tos_state_shift, Rret); // get return type
2990 // Make sure we don't need to mask Rret after the above shift
2991 ConstantPoolCacheEntry::verify_tos_state_shift();
2992 __ sll(Rret, LogBytesPerWord, Rret);
2993 __ ld_ptr(Rtemp, Rret, Rret); // get return address
2994
2995 // get receiver klass
|
369 int index_size = wide ? sizeof(u2) : sizeof(u1);
370 Label resolved;
371
372 // We are resolved if the resolved reference cache entry contains a
373 // non-null object (CallSite, etc.)
374 assert_different_registers(Otos_i, G3_scratch);
375 __ get_cache_index_at_bcp(Otos_i, G3_scratch, 1, index_size); // load index => G3_scratch
376 __ load_resolved_reference_at_index(Otos_i, G3_scratch);
377 __ tst(Otos_i);
378 __ br(Assembler::notEqual, false, Assembler::pt, resolved);
379 __ delayed()->set((int)bytecode(), O1);
380
381 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_ldc);
382
383 // first time invocation - must resolve first
384 __ call_VM(Otos_i, entry, O1);
385 __ bind(resolved);
386 __ verify_oop(Otos_i);
387 }
388
389 void TemplateTable::ldc2_w() {
390 transition(vtos, vtos);
391 Label Long, exit;
392
393 __ get_2_byte_integer_at_bcp(1, G3_scratch, O1, InterpreterMacroAssembler::Unsigned);
394 __ get_cpool_and_tags(O0, O2);
395
396 const int base_offset = ConstantPool::header_size() * wordSize;
397 const int tags_offset = Array<u1>::base_offset_in_bytes();
398 // get type from tags
399 __ add(O2, tags_offset, O2);
400 __ ldub(O2, O1, O2);
401
402 __ sll(O1, LogBytesPerWord, O1);
403 __ add(O0, O1, G3_scratch);
404
405 __ cmp_and_brx_short(O2, JVM_CONSTANT_Double, Assembler::notEqual, Assembler::pt, Long);
406 // A double can be placed at word-aligned locations in the constant pool.
407 // Check out Conversions.java for an example.
408 // Also ConstantPool::header_size() is 20, which makes it very difficult
413 FloatRegister f = Ftos_d;
414 __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset, f);
415 __ ldf(FloatRegisterImpl::S, G3_scratch, base_offset + sizeof(jdouble)/2,
416 f->successor());
417 #endif
418 __ push(dtos);
419 __ ba_short(exit);
420
421 __ bind(Long);
422 #ifdef _LP64
423 __ ldx(G3_scratch, base_offset, Otos_l);
424 #else
425 __ ld(G3_scratch, base_offset, Otos_l);
426 __ ld(G3_scratch, base_offset + sizeof(jlong)/2, Otos_l->successor());
427 #endif
428 __ push(ltos);
429
430 __ bind(exit);
431 }
432
433 void TemplateTable::locals_index(Register reg, int offset) {
434 __ ldub( at_bcp(offset), reg );
435 }
436
437 void TemplateTable::locals_index_wide(Register reg) {
438 // offset is 2, not 1, because Lbcp points to wide prefix code
439 __ get_2_byte_integer_at_bcp(2, G4_scratch, reg, InterpreterMacroAssembler::Unsigned);
440 }
441
442 void TemplateTable::iload() {
443 iload_internal();
444 }
445
446 void TemplateTable::nofast_iload() {
447 iload_internal(may_not_rewrite);
448 }
449
450 void TemplateTable::iload_internal(RewriteControl rc) {
451 transition(vtos, itos);
452 // Rewrite iload,iload pair into fast_iload2
453 // iload,caload pair into fast_icaload
454 if (RewriteFrequentPairs && rc == may_rewrite) {
455 Label rewrite, done;
456
457 // get next byte
458 __ ldub(at_bcp(Bytecodes::length_for(Bytecodes::_iload)), G3_scratch);
459
460 // if _iload, wait to rewrite to iload2. We only want to rewrite the
461 // last two iloads in a pair. Comparing against fast_iload means that
462 // the next bytecode is neither an iload or a caload, and therefore
463 // an iload pair.
464 __ cmp_and_br_short(G3_scratch, (int)Bytecodes::_iload, Assembler::equal, Assembler::pn, done);
465
466 __ cmp(G3_scratch, (int)Bytecodes::_fast_iload);
467 __ br(Assembler::equal, false, Assembler::pn, rewrite);
468 __ delayed()->set(Bytecodes::_fast_iload2, G4_scratch);
469
470 __ cmp(G3_scratch, (int)Bytecodes::_caload);
471 __ br(Assembler::equal, false, Assembler::pn, rewrite);
472 __ delayed()->set(Bytecodes::_fast_icaload, G4_scratch);
473
474 __ set(Bytecodes::_fast_iload, G4_scratch); // don't check again
661
662 void TemplateTable::fload(int n) {
663 transition(vtos, ftos);
664 assert(n < Argument::n_register_parameters, "would need more code");
665 __ ldf( FloatRegisterImpl::S, Llocals, Interpreter::local_offset_in_bytes(n), Ftos_f );
666 }
667
668
669 void TemplateTable::dload(int n) {
670 transition(vtos, dtos);
671 FloatRegister dst = Ftos_d;
672 __ load_unaligned_double(Llocals, Interpreter::local_offset_in_bytes(n+1), dst);
673 }
674
675
676 void TemplateTable::aload(int n) {
677 transition(vtos, atos);
678 __ ld_ptr( Llocals, Interpreter::local_offset_in_bytes(n), Otos_i );
679 }
680
681 void TemplateTable::aload_0() {
682 aload_0_internal();
683 }
684
685 void TemplateTable::nofast_aload_0() {
686 aload_0_internal(may_not_rewrite);
687 }
688
689 void TemplateTable::aload_0_internal(RewriteControl rc) {
690 transition(vtos, atos);
691
692 // According to bytecode histograms, the pairs:
693 //
694 // _aload_0, _fast_igetfield (itos)
695 // _aload_0, _fast_agetfield (atos)
696 // _aload_0, _fast_fgetfield (ftos)
697 //
698 // occur frequently. If RewriteFrequentPairs is set, the (slow) _aload_0
699 // bytecode checks the next bytecode and then rewrites the current
700 // bytecode into a pair bytecode; otherwise it rewrites the current
701 // bytecode into _fast_aload_0 that doesn't do the pair check anymore.
702 //
703 if (RewriteFrequentPairs && rc == may_rewrite) {
704 Label rewrite, done;
705
706 // get next byte
707 __ ldub(at_bcp(Bytecodes::length_for(Bytecodes::_aload_0)), G3_scratch);
708
709 // do actual aload_0
710 aload(0);
711
712 // if _getfield then wait with rewrite
713 __ cmp_and_br_short(G3_scratch, (int)Bytecodes::_getfield, Assembler::equal, Assembler::pn, done);
714
715 // if _igetfield then rewrite to _fast_iaccess_0
716 assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
717 __ cmp(G3_scratch, (int)Bytecodes::_fast_igetfield);
718 __ br(Assembler::equal, false, Assembler::pn, rewrite);
719 __ delayed()->set(Bytecodes::_fast_iaccess_0, G4_scratch);
720
721 // if _agetfield then rewrite to _fast_aaccess_0
722 assert(Bytecodes::java_code(Bytecodes::_fast_aaccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
723 __ cmp(G3_scratch, (int)Bytecodes::_fast_agetfield);
727 // if _fgetfield then rewrite to _fast_faccess_0
728 assert(Bytecodes::java_code(Bytecodes::_fast_faccess_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
729 __ cmp(G3_scratch, (int)Bytecodes::_fast_fgetfield);
730 __ br(Assembler::equal, false, Assembler::pn, rewrite);
731 __ delayed()->set(Bytecodes::_fast_faccess_0, G4_scratch);
732
733 // else rewrite to _fast_aload0
734 assert(Bytecodes::java_code(Bytecodes::_fast_aload_0) == Bytecodes::_aload_0, "adjust fast bytecode def");
735 __ set(Bytecodes::_fast_aload_0, G4_scratch);
736
737 // rewrite
738 // G4_scratch: fast bytecode
739 __ bind(rewrite);
740 patch_bytecode(Bytecodes::_aload_0, G4_scratch, G3_scratch, false);
741 __ bind(done);
742 } else {
743 aload(0);
744 }
745 }
746
747 void TemplateTable::istore() {
748 transition(itos, vtos);
749 locals_index(G3_scratch);
750 __ store_local_int( G3_scratch, Otos_i );
751 }
752
753
754 void TemplateTable::lstore() {
755 transition(ltos, vtos);
756 locals_index(G3_scratch);
757 __ store_local_long( G3_scratch, Otos_l );
758 }
759
760
761 void TemplateTable::fstore() {
762 transition(ftos, vtos);
763 locals_index(G3_scratch);
764 __ store_local_float( G3_scratch, Ftos_f );
765 }
766
2040 // We only put in barriers around volatile refs (they are expensive), not
2041 // _between_ memory refs (that would require us to track the flavor of the
2042 // previous memory refs). Requirements (2) and (3) require some barriers
2043 // before volatile stores and after volatile loads. These nearly cover
2044 // requirement (1) but miss the volatile-store-volatile-load case. This final
2045 // case is placed after volatile-stores although it could just as well go
2046 // before volatile-loads.
2047 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint) {
2048 // Helper function to insert a is-volatile test and memory barrier
2049 // All current sparc implementations run in TSO, needing only StoreLoad
2050 if ((order_constraint & Assembler::StoreLoad) == 0) return;
2051 __ membar( order_constraint );
2052 }
2053
2054 // ----------------------------------------------------------------------------
2055 void TemplateTable::resolve_cache_and_index(int byte_no,
2056 Register Rcache,
2057 Register index,
2058 size_t index_size) {
2059 // Depends on cpCacheOop layout!
2060
2061 Label resolved;
2062 Bytecodes::Code code = bytecode();
2063 switch (code) {
2064 case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2065 case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2066 }
2067
2068 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2069 __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, Lbyte_code, byte_no, 1, index_size);
2070 __ cmp(Lbyte_code, code); // have we resolved this bytecode?
2071 __ br(Assembler::equal, false, Assembler::pt, resolved);
2072 __ delayed()->set(code, O1);
2073
2074 address entry;
2075
2076 switch (code) {
2077 case Bytecodes::_getstatic : // fall through
2078 case Bytecodes::_putstatic : // fall through
2079 case Bytecodes::_getfield : // fall through
2080 case Bytecodes::_putfield : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put); break;
2081 case Bytecodes::_invokevirtual : // fall through
2082 case Bytecodes::_invokespecial : // fall through
2083 case Bytecodes::_invokestatic : // fall through
2084 case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke); break;
2085 case Bytecodes::_invokehandle : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle); break;
2086 case Bytecodes::_invokedynamic : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic); break;
2087 default:
2088 fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(code)));
2089 break;
2090 }
2091 // first time invocation - must resolve first
2092 __ call_VM(noreg, entry, O1);
2093 // Update registers with resolved info
2094 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2095 __ bind(resolved);
2096 }
2097
2098 void TemplateTable::load_invoke_cp_cache_entry(int byte_no,
2099 Register method,
2100 Register itable_index,
2101 Register flags,
2102 bool is_invokevirtual,
2103 bool is_invokevfinal,
2104 bool is_invokedynamic) {
2105 // Uses both G3_scratch and G4_scratch
2106 Register cache = G3_scratch;
2107 Register index = G4_scratch;
2108 assert_different_registers(cache, method, itable_index);
2185 __ push_ptr(Otos_i); // put object on tos where GC wants it.
2186 } else {
2187 // Load top of stack (do not pop the value off the stack);
2188 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
2189 }
2190 __ verify_oop(Otos_i);
2191 }
2192 // Otos_i: object pointer or NULL if static
2193 // Rcache: cache entry pointer
2194 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2195 Otos_i, Rcache);
2196 if (!is_static && has_tos) {
2197 __ pop_ptr(Otos_i); // restore object pointer
2198 __ verify_oop(Otos_i);
2199 }
2200 __ get_cache_and_index_at_bcp(Rcache, index, 1);
2201 __ bind(Label1);
2202 }
2203 }
2204
2205 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2206 transition(vtos, vtos);
2207
2208 Register Rcache = G3_scratch;
2209 Register index = G4_scratch;
2210 Register Rclass = Rcache;
2211 Register Roffset= G4_scratch;
2212 Register Rflags = G1_scratch;
2213 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2214
2215 resolve_cache_and_index(byte_no, Rcache, index, sizeof(u2));
2216 jvmti_post_field_access(Rcache, index, is_static, false);
2217 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
2218
2219 if (!is_static) {
2220 pop_and_check_object(Rclass);
2221 } else {
2222 __ verify_oop(Rclass);
2223 }
2224
2225 Label exit;
2233 __ and3(Rflags, Lscratch, Lscratch);
2234 }
2235
2236 Label checkVolatile;
2237
2238 // compute field type
2239 Label notByte, notInt, notShort, notChar, notLong, notFloat, notObj;
2240 __ srl(Rflags, ConstantPoolCacheEntry::tos_state_shift, Rflags);
2241 // Make sure we don't need to mask Rflags after the above shift
2242 ConstantPoolCacheEntry::verify_tos_state_shift();
2243
2244 // Check atos before itos for getstatic, more likely (in Queens at least)
2245 __ cmp(Rflags, atos);
2246 __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2247 __ delayed() ->cmp(Rflags, itos);
2248
2249 // atos
2250 __ load_heap_oop(Rclass, Roffset, Otos_i);
2251 __ verify_oop(Otos_i);
2252 __ push(atos);
2253 if (!is_static && rc == may_rewrite) {
2254 patch_bytecode(Bytecodes::_fast_agetfield, G3_scratch, G4_scratch);
2255 }
2256 __ ba(checkVolatile);
2257 __ delayed()->tst(Lscratch);
2258
2259 __ bind(notObj);
2260
2261 // cmp(Rflags, itos);
2262 __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2263 __ delayed() ->cmp(Rflags, ltos);
2264
2265 // itos
2266 __ ld(Rclass, Roffset, Otos_i);
2267 __ push(itos);
2268 if (!is_static && rc == may_rewrite) {
2269 patch_bytecode(Bytecodes::_fast_igetfield, G3_scratch, G4_scratch);
2270 }
2271 __ ba(checkVolatile);
2272 __ delayed()->tst(Lscratch);
2273
2274 __ bind(notInt);
2275
2276 // cmp(Rflags, ltos);
2277 __ br(Assembler::notEqual, false, Assembler::pt, notLong);
2278 __ delayed() ->cmp(Rflags, btos);
2279
2280 // ltos
2281 // load must be atomic
2282 __ ld_long(Rclass, Roffset, Otos_l);
2283 __ push(ltos);
2284 if (!is_static && rc == may_rewrite) {
2285 patch_bytecode(Bytecodes::_fast_lgetfield, G3_scratch, G4_scratch);
2286 }
2287 __ ba(checkVolatile);
2288 __ delayed()->tst(Lscratch);
2289
2290 __ bind(notLong);
2291
2292 // cmp(Rflags, btos);
2293 __ br(Assembler::notEqual, false, Assembler::pt, notByte);
2294 __ delayed() ->cmp(Rflags, ctos);
2295
2296 // btos
2297 __ ldsb(Rclass, Roffset, Otos_i);
2298 __ push(itos);
2299 if (!is_static && rc == may_rewrite) {
2300 patch_bytecode(Bytecodes::_fast_bgetfield, G3_scratch, G4_scratch);
2301 }
2302 __ ba(checkVolatile);
2303 __ delayed()->tst(Lscratch);
2304
2305 __ bind(notByte);
2306
2307 // cmp(Rflags, ctos);
2308 __ br(Assembler::notEqual, false, Assembler::pt, notChar);
2309 __ delayed() ->cmp(Rflags, stos);
2310
2311 // ctos
2312 __ lduh(Rclass, Roffset, Otos_i);
2313 __ push(itos);
2314 if (!is_static && rc == may_rewrite) {
2315 patch_bytecode(Bytecodes::_fast_cgetfield, G3_scratch, G4_scratch);
2316 }
2317 __ ba(checkVolatile);
2318 __ delayed()->tst(Lscratch);
2319
2320 __ bind(notChar);
2321
2322 // cmp(Rflags, stos);
2323 __ br(Assembler::notEqual, false, Assembler::pt, notShort);
2324 __ delayed() ->cmp(Rflags, ftos);
2325
2326 // stos
2327 __ ldsh(Rclass, Roffset, Otos_i);
2328 __ push(itos);
2329 if (!is_static && rc == may_rewrite) {
2330 patch_bytecode(Bytecodes::_fast_sgetfield, G3_scratch, G4_scratch);
2331 }
2332 __ ba(checkVolatile);
2333 __ delayed()->tst(Lscratch);
2334
2335 __ bind(notShort);
2336
2337
2338 // cmp(Rflags, ftos);
2339 __ br(Assembler::notEqual, false, Assembler::pt, notFloat);
2340 __ delayed() ->tst(Lscratch);
2341
2342 // ftos
2343 __ ldf(FloatRegisterImpl::S, Rclass, Roffset, Ftos_f);
2344 __ push(ftos);
2345 if (!is_static && rc == may_rewrite) {
2346 patch_bytecode(Bytecodes::_fast_fgetfield, G3_scratch, G4_scratch);
2347 }
2348 __ ba(checkVolatile);
2349 __ delayed()->tst(Lscratch);
2350
2351 __ bind(notFloat);
2352
2353
2354 // dtos
2355 __ ldf(FloatRegisterImpl::D, Rclass, Roffset, Ftos_d);
2356 __ push(dtos);
2357 if (!is_static && rc == may_rewrite) {
2358 patch_bytecode(Bytecodes::_fast_dgetfield, G3_scratch, G4_scratch);
2359 }
2360
2361 __ bind(checkVolatile);
2362 if (__ membar_has_effect(membar_bits)) {
2363 // __ tst(Lscratch); executed in delay slot
2364 __ br(Assembler::zero, false, Assembler::pt, exit);
2365 __ delayed()->nop();
2366 volatile_barrier(membar_bits);
2367 }
2368
2369 __ bind(exit);
2370 }
2371
2372 void TemplateTable::getfield(int byte_no) {
2373 getfield_or_static(byte_no, false);
2374 }
2375
2376 void TemplateTable::nofast_getfield(int byte_no) {
2377 getfield_or_static(byte_no, false, may_not_rewrite);
2378 }
2379
2380 void TemplateTable::getstatic(int byte_no) {
2381 getfield_or_static(byte_no, true);
2382 }
2383
2384 void TemplateTable::fast_accessfield(TosState state) {
2385 transition(atos, state);
2386 Register Rcache = G3_scratch;
2387 Register index = G4_scratch;
2388 Register Roffset = G4_scratch;
2389 Register Rflags = Rcache;
2390 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2391
2392 __ get_cache_and_index_at_bcp(Rcache, index, 1);
2393 jvmti_post_field_access(Rcache, index, /*is_static*/false, /*has_tos*/true);
2394
2395 __ ld_ptr(Rcache, cp_base_offset + ConstantPoolCacheEntry::f2_offset(), Roffset);
2396
2397 __ null_check(Otos_i);
2398 __ verify_oop(Otos_i);
2399
2400 Label exit;
2401
2402 Assembler::Membar_mask_bits membar_bits =
2403 Assembler::Membar_mask_bits(Assembler::LoadLoad | Assembler::LoadStore);
2548 __ verify_oop(G4_scratch);
2549 }
2550 // setup pointer to jvalue object
2551 __ mov(Lesp, G1_scratch); __ inc(G1_scratch, wordSize);
2552 // G4_scratch: object pointer or NULL if static
2553 // G3_scratch: cache entry pointer
2554 // G1_scratch: jvalue object on the stack
2555 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification),
2556 G4_scratch, G3_scratch, G1_scratch);
2557 __ get_cache_and_index_at_bcp(Rcache, index, 1);
2558 __ bind(Label1);
2559 }
2560 }
2561
2562 void TemplateTable::pop_and_check_object(Register r) {
2563 __ pop_ptr(r);
2564 __ null_check(r); // for field access must check obj.
2565 __ verify_oop(r);
2566 }
2567
2568 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2569 transition(vtos, vtos);
2570 Register Rcache = G3_scratch;
2571 Register index = G4_scratch;
2572 Register Rclass = Rcache;
2573 Register Roffset= G4_scratch;
2574 Register Rflags = G1_scratch;
2575 ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2576
2577 resolve_cache_and_index(byte_no, Rcache, index, sizeof(u2));
2578 jvmti_post_field_mod(Rcache, index, is_static);
2579 load_field_cp_cache_entry(Rclass, Rcache, index, Roffset, Rflags, is_static);
2580
2581 Assembler::Membar_mask_bits read_bits =
2582 Assembler::Membar_mask_bits(Assembler::LoadStore | Assembler::StoreStore);
2583 Assembler::Membar_mask_bits write_bits = Assembler::StoreLoad;
2584
2585 Label notVolatile, checkVolatile, exit;
2586 if (__ membar_has_effect(read_bits) || __ membar_has_effect(write_bits)) {
2587 __ set((1 << ConstantPoolCacheEntry::is_volatile_shift), Lscratch);
2588 __ and3(Rflags, Lscratch, Lscratch);
2624 // itos
2625 {
2626 __ pop_i();
2627 __ st(Otos_i, Rclass, Roffset);
2628 __ ba(checkVolatile);
2629 __ delayed()->tst(Lscratch);
2630 }
2631
2632 __ bind(notInt);
2633 } else {
2634 // putfield with int type most likely, check that first
2635 __ cmp(Rflags, itos);
2636 __ br(Assembler::notEqual, false, Assembler::pt, notInt);
2637 __ delayed()->cmp(Rflags, atos);
2638
2639 // itos
2640 {
2641 __ pop_i();
2642 pop_and_check_object(Rclass);
2643 __ st(Otos_i, Rclass, Roffset);
2644 if (rc == may_rewrite) patch_bytecode(Bytecodes::_fast_iputfield, G3_scratch, G4_scratch, true, byte_no);
2645 __ ba(checkVolatile);
2646 __ delayed()->tst(Lscratch);
2647 }
2648
2649 __ bind(notInt);
2650 // cmp(Rflags, atos);
2651 __ br(Assembler::notEqual, false, Assembler::pt, notObj);
2652 __ delayed()->cmp(Rflags, btos);
2653
2654 // atos
2655 {
2656 __ pop_ptr();
2657 pop_and_check_object(Rclass);
2658 __ verify_oop(Otos_i);
2659 do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
2660 if (rc == may_rewrite) patch_bytecode(Bytecodes::_fast_aputfield, G3_scratch, G4_scratch, true, byte_no);
2661 __ ba(checkVolatile);
2662 __ delayed()->tst(Lscratch);
2663 }
2664
2665 __ bind(notObj);
2666 }
2667
2668 // cmp(Rflags, btos);
2669 __ br(Assembler::notEqual, false, Assembler::pt, notByte);
2670 __ delayed()->cmp(Rflags, ltos);
2671
2672 // btos
2673 {
2674 __ pop_i();
2675 if (!is_static) pop_and_check_object(Rclass);
2676 __ stb(Otos_i, Rclass, Roffset);
2677 if (!is_static && rc == may_rewrite) {
2678 patch_bytecode(Bytecodes::_fast_bputfield, G3_scratch, G4_scratch, true, byte_no);
2679 }
2680 __ ba(checkVolatile);
2681 __ delayed()->tst(Lscratch);
2682 }
2683
2684 __ bind(notByte);
2685 // cmp(Rflags, ltos);
2686 __ br(Assembler::notEqual, false, Assembler::pt, notLong);
2687 __ delayed()->cmp(Rflags, ctos);
2688
2689 // ltos
2690 {
2691 __ pop_l();
2692 if (!is_static) pop_and_check_object(Rclass);
2693 __ st_long(Otos_l, Rclass, Roffset);
2694 if (!is_static && rc == may_rewrite) {
2695 patch_bytecode(Bytecodes::_fast_lputfield, G3_scratch, G4_scratch, true, byte_no);
2696 }
2697 __ ba(checkVolatile);
2698 __ delayed()->tst(Lscratch);
2699 }
2700
2701 __ bind(notLong);
2702 // cmp(Rflags, ctos);
2703 __ br(Assembler::notEqual, false, Assembler::pt, notChar);
2704 __ delayed()->cmp(Rflags, stos);
2705
2706 // ctos (char)
2707 {
2708 __ pop_i();
2709 if (!is_static) pop_and_check_object(Rclass);
2710 __ sth(Otos_i, Rclass, Roffset);
2711 if (!is_static && rc == may_rewrite) {
2712 patch_bytecode(Bytecodes::_fast_cputfield, G3_scratch, G4_scratch, true, byte_no);
2713 }
2714 __ ba(checkVolatile);
2715 __ delayed()->tst(Lscratch);
2716 }
2717
2718 __ bind(notChar);
2719 // cmp(Rflags, stos);
2720 __ br(Assembler::notEqual, false, Assembler::pt, notShort);
2721 __ delayed()->cmp(Rflags, ftos);
2722
2723 // stos (short)
2724 {
2725 __ pop_i();
2726 if (!is_static) pop_and_check_object(Rclass);
2727 __ sth(Otos_i, Rclass, Roffset);
2728 if (!is_static && rc == may_rewrite) {
2729 patch_bytecode(Bytecodes::_fast_sputfield, G3_scratch, G4_scratch, true, byte_no);
2730 }
2731 __ ba(checkVolatile);
2732 __ delayed()->tst(Lscratch);
2733 }
2734
2735 __ bind(notShort);
2736 // cmp(Rflags, ftos);
2737 __ br(Assembler::notZero, false, Assembler::pt, notFloat);
2738 __ delayed()->nop();
2739
2740 // ftos
2741 {
2742 __ pop_f();
2743 if (!is_static) pop_and_check_object(Rclass);
2744 __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset);
2745 if (!is_static && rc == may_rewrite) {
2746 patch_bytecode(Bytecodes::_fast_fputfield, G3_scratch, G4_scratch, true, byte_no);
2747 }
2748 __ ba(checkVolatile);
2749 __ delayed()->tst(Lscratch);
2750 }
2751
2752 __ bind(notFloat);
2753
2754 // dtos
2755 {
2756 __ pop_d();
2757 if (!is_static) pop_and_check_object(Rclass);
2758 __ stf(FloatRegisterImpl::D, Ftos_d, Rclass, Roffset);
2759 if (!is_static && rc == may_rewrite) {
2760 patch_bytecode(Bytecodes::_fast_dputfield, G3_scratch, G4_scratch, true, byte_no);
2761 }
2762 }
2763
2764 __ bind(checkVolatile);
2765 __ tst(Lscratch);
2766
2767 if (__ membar_has_effect(write_bits)) {
2768 // __ tst(Lscratch); in delay slot
2769 __ br(Assembler::zero, false, Assembler::pt, exit);
2770 __ delayed()->nop();
2771 volatile_barrier(Assembler::StoreLoad);
2772 __ bind(exit);
2773 }
2774 }
2775
2776 void TemplateTable::fast_storefield(TosState state) {
2777 transition(state, vtos);
2778 Register Rcache = G3_scratch;
2779 Register Rclass = Rcache;
2813 case Bytecodes::_fast_fputfield:
2814 __ stf(FloatRegisterImpl::S, Ftos_f, Rclass, Roffset);
2815 break;
2816 case Bytecodes::_fast_dputfield:
2817 __ stf(FloatRegisterImpl::D, Ftos_d, Rclass, Roffset);
2818 break;
2819 case Bytecodes::_fast_aputfield:
2820 do_oop_store(_masm, Rclass, Roffset, 0, Otos_i, G1_scratch, _bs->kind(), false);
2821 break;
2822 default:
2823 ShouldNotReachHere();
2824 }
2825
2826 if (__ membar_has_effect(write_bits)) {
2827 __ cmp_and_br_short(Lscratch, 0, Assembler::equal, Assembler::pt, exit);
2828 volatile_barrier(Assembler::StoreLoad);
2829 __ bind(exit);
2830 }
2831 }
2832
2833 void TemplateTable::putfield(int byte_no) {
2834 putfield_or_static(byte_no, false);
2835 }
2836
2837 void TemplateTable::nofast_putfield(int byte_no) {
2838 putfield_or_static(byte_no, false, may_not_rewrite);
2839 }
2840
2841 void TemplateTable::putstatic(int byte_no) {
2842 putfield_or_static(byte_no, true);
2843 }
2844
2845 void TemplateTable::fast_xaccess(TosState state) {
2846 transition(vtos, state);
2847 Register Rcache = G3_scratch;
2848 Register Roffset = G4_scratch;
2849 Register Rflags = G4_scratch;
2850 Register Rreceiver = Lscratch;
2851
2852 __ ld_ptr(Llocals, 0, Rreceiver);
2853
2854 // access constant pool cache (is resolved)
2855 __ get_cache_and_index_at_bcp(Rcache, G4_scratch, 2);
2856 __ ld_ptr(Rcache, ConstantPoolCache::base_offset() + ConstantPoolCacheEntry::f2_offset(), Roffset);
2857 __ add(Lbcp, 1, Lbcp); // needed to report exception at the correct bcp
2858
2859 __ verify_oop(Rreceiver);
2860 __ null_check(Rreceiver);
2861 if (state == atos) {
2862 __ load_heap_oop(Rreceiver, Roffset, Otos_i);
2863 } else if (state == itos) {
2864 __ ld (Rreceiver, Roffset, Otos_i) ;
2977
2978 void TemplateTable::invokevirtual(int byte_no) {
2979 transition(vtos, vtos);
2980 assert(byte_no == f2_byte, "use this argument");
2981
2982 Register Rscratch = G3_scratch;
2983 Register Rtemp = G4_scratch;
2984 Register Rret = Lscratch;
2985 Register O0_recv = O0;
2986 Label notFinal;
2987
2988 load_invoke_cp_cache_entry(byte_no, G5_method, noreg, Rret, true, false, false);
2989 __ mov(SP, O5_savedSP); // record SP that we wanted the callee to restore
2990
2991 // Check for vfinal
2992 __ set((1 << ConstantPoolCacheEntry::is_vfinal_shift), G4_scratch);
2993 __ btst(Rret, G4_scratch);
2994 __ br(Assembler::zero, false, Assembler::pt, notFinal);
2995 __ delayed()->and3(Rret, 0xFF, G4_scratch); // gets number of parameters
2996
2997 if (RewriteBytecodes && !UseSharedSpaces) {
2998 patch_bytecode(Bytecodes::_fast_invokevfinal, Rscratch, Rtemp);
2999 }
3000
3001 invokevfinal_helper(Rscratch, Rret);
3002
3003 __ bind(notFinal);
3004
3005 __ mov(G5_method, Rscratch); // better scratch register
3006 __ load_receiver(G4_scratch, O0_recv); // gets receiverOop
3007 // receiver is in O0_recv
3008 __ verify_oop(O0_recv);
3009
3010 // get return address
3011 AddressLiteral table(Interpreter::invoke_return_entry_table());
3012 __ set(table, Rtemp);
3013 __ srl(Rret, ConstantPoolCacheEntry::tos_state_shift, Rret); // get return type
3014 // Make sure we don't need to mask Rret after the above shift
3015 ConstantPoolCacheEntry::verify_tos_state_shift();
3016 __ sll(Rret, LogBytesPerWord, Rret);
3017 __ ld_ptr(Rtemp, Rret, Rret); // get return address
3018
3019 // get receiver klass
|