src/cpu/x86/vm/templateTable_x86_64.cpp

Print this page




 466           JVM_CONSTANT_Double);
 467   __ jccb(Assembler::notEqual, Long);
 468   // dtos
 469   __ movdbl(xmm0, Address(rcx, rbx, Address::times_8, base_offset));
 470   __ push_d();
 471   __ jmpb(Done);
 472 
 473   __ bind(Long);
 474   // ltos
 475   __ movq(rax, Address(rcx, rbx, Address::times_8, base_offset));
 476   __ push_l();
 477 
 478   __ bind(Done);
 479 }
 480 
 481 void TemplateTable::locals_index(Register reg, int offset) {
 482   __ load_unsigned_byte(reg, at_bcp(offset));
 483   __ negptr(reg);
 484 }
 485 
 486 void TemplateTable::iload() {
 487   transition(vtos, itos);
 488   if (RewriteFrequentPairs) {
 489     Label rewrite, done;
 490     const Register bc = c_rarg3;
 491     assert(rbx != bc, "register damaged");
 492 
 493     // get next byte
 494     __ load_unsigned_byte(rbx,
 495                           at_bcp(Bytecodes::length_for(Bytecodes::_iload)));
 496     // if _iload, wait to rewrite to iload2.  We only want to rewrite the
 497     // last two iloads in a pair.  Comparing against fast_iload means that
 498     // the next bytecode is neither an iload or a caload, and therefore
 499     // an iload pair.
 500     __ cmpl(rbx, Bytecodes::_iload);
 501     __ jcc(Assembler::equal, done);
 502 
 503     __ cmpl(rbx, Bytecodes::_fast_iload);
 504     __ movl(bc, Bytecodes::_fast_iload2);
 505     __ jccb(Assembler::equal, rewrite);
 506 
 507     // if _caload, rewrite to fast_icaload
 508     __ cmpl(rbx, Bytecodes::_caload);
 509     __ movl(bc, Bytecodes::_fast_icaload);
 510     __ jccb(Assembler::equal, rewrite);
 511 
 512     // rewrite so iload doesn't check again.
 513     __ movl(bc, Bytecodes::_fast_iload);
 514 
 515     // rewrite
 516     // bc: fast bytecode
 517     __ bind(rewrite);
 518     patch_bytecode(Bytecodes::_iload, bc, rbx, false);

 519     __ bind(done);
 520   }
 521 
 522   // Get the local value into tos
 523   locals_index(rbx);
 524   __ movl(rax, iaddress(rbx));
 525 }
 526 








 527 void TemplateTable::fast_iload2() {
 528   transition(vtos, itos);
 529   locals_index(rbx);
 530   __ movl(rax, iaddress(rbx));
 531   __ push(itos);
 532   locals_index(rbx, 3);
 533   __ movl(rax, iaddress(rbx));
 534 }
 535 
 536 void TemplateTable::fast_iload() {
 537   transition(vtos, itos);
 538   locals_index(rbx);
 539   __ movl(rax, iaddress(rbx));
 540 }
 541 
 542 void TemplateTable::lload() {
 543   transition(vtos, ltos);
 544   locals_index(rbx);
 545   __ movq(rax, laddress(rbx));
 546 }


 734   transition(vtos, ltos);
 735   __ movq(rax, laddress(n));
 736 }
 737 
 738 void TemplateTable::fload(int n) {
 739   transition(vtos, ftos);
 740   __ movflt(xmm0, faddress(n));
 741 }
 742 
 743 void TemplateTable::dload(int n) {
 744   transition(vtos, dtos);
 745   __ movdbl(xmm0, daddress(n));
 746 }
 747 
 748 void TemplateTable::aload(int n) {
 749   transition(vtos, atos);
 750   __ movptr(rax, aaddress(n));
 751 }
 752 
 753 void TemplateTable::aload_0() {








 754   transition(vtos, atos);
 755   // According to bytecode histograms, the pairs:
 756   //
 757   // _aload_0, _fast_igetfield
 758   // _aload_0, _fast_agetfield
 759   // _aload_0, _fast_fgetfield
 760   //
 761   // occur frequently. If RewriteFrequentPairs is set, the (slow)
 762   // _aload_0 bytecode checks if the next bytecode is either
 763   // _fast_igetfield, _fast_agetfield or _fast_fgetfield and then
 764   // rewrites the current bytecode into a pair bytecode; otherwise it
 765   // rewrites the current bytecode into _fast_aload_0 that doesn't do
 766   // the pair check anymore.
 767   //
 768   // Note: If the next bytecode is _getfield, the rewrite must be
 769   //       delayed, otherwise we may miss an opportunity for a pair.
 770   //
 771   // Also rewrite frequent pairs
 772   //   aload_0, aload_1
 773   //   aload_0, iload_1
 774   // These bytecodes with a small amount of code are most profitable
 775   // to rewrite
 776   if (RewriteFrequentPairs) {
 777     Label rewrite, done;
 778     const Register bc = c_rarg3;
 779     assert(rbx != bc, "register damaged");
 780     // get next byte
 781     __ load_unsigned_byte(rbx,
 782                           at_bcp(Bytecodes::length_for(Bytecodes::_aload_0)));
 783 
 784     // do actual aload_0
 785     aload(0);
 786 
 787     // if _getfield then wait with rewrite
 788     __ cmpl(rbx, Bytecodes::_getfield);
 789     __ jcc(Assembler::equal, done);
 790 
 791     // if _igetfield then reqrite to _fast_iaccess_0
 792     assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) ==
 793            Bytecodes::_aload_0,
 794            "fix bytecode definition");
 795     __ cmpl(rbx, Bytecodes::_fast_igetfield);
 796     __ movl(bc, Bytecodes::_fast_iaccess_0);


2092 // not _between_ memory refs (that would require us to track the
2093 // flavor of the previous memory refs).  Requirements (2) and (3)
2094 // require some barriers before volatile stores and after volatile
2095 // loads.  These nearly cover requirement (1) but miss the
2096 // volatile-store-volatile-load case.  This final case is placed after
2097 // volatile-stores although it could just as well go before
2098 // volatile-loads.
2099 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits
2100                                      order_constraint) {
2101   // Helper function to insert a is-volatile test and memory barrier
2102   if (os::is_MP()) { // Not needed on single CPU
2103     __ membar(order_constraint);
2104   }
2105 }
2106 
2107 void TemplateTable::resolve_cache_and_index(int byte_no,
2108                                             Register Rcache,
2109                                             Register index,
2110                                             size_t index_size) {
2111   const Register temp = rbx;







2112   assert_different_registers(Rcache, index, temp);
2113 
2114   Label resolved;
2115     assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2116     __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
2117     __ cmpl(temp, (int) bytecode());  // have we resolved this bytecode?
2118     __ jcc(Assembler::equal, resolved);
2119 
2120   // resolve first time through
2121   address entry;
2122   switch (bytecode()) {
2123   case Bytecodes::_getstatic:
2124   case Bytecodes::_putstatic:
2125   case Bytecodes::_getfield:
2126   case Bytecodes::_putfield:
2127     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put);
2128     break;
2129   case Bytecodes::_invokevirtual:
2130   case Bytecodes::_invokespecial:
2131   case Bytecodes::_invokestatic:
2132   case Bytecodes::_invokeinterface:
2133     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke);
2134     break;
2135   case Bytecodes::_invokehandle:
2136     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle);
2137     break;
2138   case Bytecodes::_invokedynamic:
2139     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic);
2140     break;
2141   default:
2142     fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode())));
2143     break;
2144   }
2145   __ movl(temp, (int) bytecode());
2146   __ call_VM(noreg, entry, temp);
2147 
2148   // Update registers with resolved info
2149   __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2150   __ bind(resolved);
2151 }
2152 
2153 // The cache and index registers must be set before call
2154 void TemplateTable::load_field_cp_cache_entry(Register obj,
2155                                               Register cache,
2156                                               Register index,
2157                                               Register off,
2158                                               Register flags,
2159                                               bool is_static = false) {
2160   assert_different_registers(cache, index, flags, off);
2161 
2162   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2163   // Field offset
2164   __ movptr(off, Address(cache, index, Address::times_ptr,
2165                          in_bytes(cp_base_offset +


2244       __ movptr(c_rarg1, at_tos()); // get object pointer without popping it
2245       __ verify_oop(c_rarg1);
2246     }
2247     // c_rarg1: object pointer or NULL
2248     // c_rarg2: cache entry pointer
2249     // c_rarg3: jvalue object on the stack
2250     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2251                                        InterpreterRuntime::post_field_access),
2252                c_rarg1, c_rarg2, c_rarg3);
2253     __ get_cache_and_index_at_bcp(cache, index, 1);
2254     __ bind(L1);
2255   }
2256 }
2257 
2258 void TemplateTable::pop_and_check_object(Register r) {
2259   __ pop_ptr(r);
2260   __ null_check(r);  // for field access must check obj.
2261   __ verify_oop(r);
2262 }
2263 
2264 void TemplateTable::getfield_or_static(int byte_no, bool is_static) {
2265   transition(vtos, vtos);
2266 
2267   const Register cache = rcx;
2268   const Register index = rdx;
2269   const Register obj   = c_rarg3;
2270   const Register off   = rbx;
2271   const Register flags = rax;
2272   const Register bc = c_rarg3; // uses same reg as obj, so don't mix them
2273 
2274   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2275   jvmti_post_field_access(cache, index, is_static, false);
2276   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2277 
2278   if (!is_static) {
2279     // obj is on the stack
2280     pop_and_check_object(obj);
2281   }
2282 
2283   const Address field(obj, off, Address::times_1);
2284 
2285   Label Done, notByte, notInt, notShort, notChar,
2286               notLong, notFloat, notObj, notDouble;
2287 
2288   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2289   // Make sure we don't need to mask edx after the above shift
2290   assert(btos == 0, "change code, btos != 0");
2291 
2292   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2293   __ jcc(Assembler::notZero, notByte);
2294   // btos
2295   __ load_signed_byte(rax, field);
2296   __ push(btos);
2297   // Rewrite bytecode to be faster
2298   if (!is_static) {
2299     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2300   }
2301   __ jmp(Done);
2302 
2303   __ bind(notByte);
2304   __ cmpl(flags, atos);
2305   __ jcc(Assembler::notEqual, notObj);
2306   // atos
2307   __ load_heap_oop(rax, field);
2308   __ push(atos);
2309   if (!is_static) {
2310     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2311   }
2312   __ jmp(Done);
2313 
2314   __ bind(notObj);
2315   __ cmpl(flags, itos);
2316   __ jcc(Assembler::notEqual, notInt);
2317   // itos
2318   __ movl(rax, field);
2319   __ push(itos);
2320   // Rewrite bytecode to be faster
2321   if (!is_static) {
2322     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2323   }
2324   __ jmp(Done);
2325 
2326   __ bind(notInt);
2327   __ cmpl(flags, ctos);
2328   __ jcc(Assembler::notEqual, notChar);
2329   // ctos
2330   __ load_unsigned_short(rax, field);
2331   __ push(ctos);
2332   // Rewrite bytecode to be faster
2333   if (!is_static) {
2334     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2335   }
2336   __ jmp(Done);
2337 
2338   __ bind(notChar);
2339   __ cmpl(flags, stos);
2340   __ jcc(Assembler::notEqual, notShort);
2341   // stos
2342   __ load_signed_short(rax, field);
2343   __ push(stos);
2344   // Rewrite bytecode to be faster
2345   if (!is_static) {
2346     patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx);
2347   }
2348   __ jmp(Done);
2349 
2350   __ bind(notShort);
2351   __ cmpl(flags, ltos);
2352   __ jcc(Assembler::notEqual, notLong);
2353   // ltos
2354   __ movq(rax, field);
2355   __ push(ltos);
2356   // Rewrite bytecode to be faster
2357   if (!is_static) {
2358     patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx);
2359   }
2360   __ jmp(Done);
2361 
2362   __ bind(notLong);
2363   __ cmpl(flags, ftos);
2364   __ jcc(Assembler::notEqual, notFloat);
2365   // ftos
2366   __ movflt(xmm0, field);
2367   __ push(ftos);
2368   // Rewrite bytecode to be faster
2369   if (!is_static) {
2370     patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
2371   }
2372   __ jmp(Done);
2373 
2374   __ bind(notFloat);
2375 #ifdef ASSERT
2376   __ cmpl(flags, dtos);
2377   __ jcc(Assembler::notEqual, notDouble);
2378 #endif
2379   // dtos
2380   __ movdbl(xmm0, field);
2381   __ push(dtos);
2382   // Rewrite bytecode to be faster
2383   if (!is_static) {
2384     patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
2385   }
2386 #ifdef ASSERT
2387   __ jmp(Done);
2388 
2389   __ bind(notDouble);
2390   __ stop("Bad state");
2391 #endif
2392 
2393   __ bind(Done);
2394   // [jk] not needed currently
2395   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2396   //                                              Assembler::LoadStore));
2397 }
2398 
2399 
2400 void TemplateTable::getfield(int byte_no) {
2401   getfield_or_static(byte_no, false);
2402 }
2403 




2404 void TemplateTable::getstatic(int byte_no) {
2405   getfield_or_static(byte_no, true);
2406 }
2407 
2408 // The registers cache and index expected to be set before call.
2409 // The function may destroy various registers, just not the cache and index registers.
2410 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2411   transition(vtos, vtos);
2412 
2413   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2414 
2415   if (JvmtiExport::can_post_field_modification()) {
2416     // Check to see if a field modification watch has been set before
2417     // we take the time to call into the VM.
2418     Label L1;
2419     assert_different_registers(cache, index, rax);
2420     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2421     __ testl(rax, rax);
2422     __ jcc(Assembler::zero, L1);
2423 


2447                  c_rarg1, at_tos_p2()); // dtos (two word jvalue)
2448     }
2449     // cache entry pointer
2450     __ addptr(c_rarg2, in_bytes(cp_base_offset));
2451     __ shll(rscratch1, LogBytesPerWord);
2452     __ addptr(c_rarg2, rscratch1);
2453     // object (tos)
2454     __ mov(c_rarg3, rsp);
2455     // c_rarg1: object pointer set up above (NULL if static)
2456     // c_rarg2: cache entry pointer
2457     // c_rarg3: jvalue object on the stack
2458     __ call_VM(noreg,
2459                CAST_FROM_FN_PTR(address,
2460                                 InterpreterRuntime::post_field_modification),
2461                c_rarg1, c_rarg2, c_rarg3);
2462     __ get_cache_and_index_at_bcp(cache, index, 1);
2463     __ bind(L1);
2464   }
2465 }
2466 
2467 void TemplateTable::putfield_or_static(int byte_no, bool is_static) {
2468   transition(vtos, vtos);
2469 
2470   const Register cache = rcx;
2471   const Register index = rdx;
2472   const Register obj   = rcx;
2473   const Register off   = rbx;
2474   const Register flags = rax;
2475   const Register bc    = c_rarg3;
2476 
2477   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2478   jvmti_post_field_mod(cache, index, is_static);
2479   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2480 
2481   // [jk] not needed currently
2482   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2483   //                                              Assembler::StoreStore));
2484 
2485   Label notVolatile, Done;
2486   __ movl(rdx, flags);
2487   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2488   __ andl(rdx, 0x1);
2489 
2490   // field address
2491   const Address field(obj, off, Address::times_1);
2492 
2493   Label notByte, notInt, notShort, notChar,
2494         notLong, notFloat, notObj, notDouble;
2495 
2496   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2497 
2498   assert(btos == 0, "change code, btos != 0");
2499   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2500   __ jcc(Assembler::notZero, notByte);
2501 
2502   // btos
2503   {
2504     __ pop(btos);
2505     if (!is_static) pop_and_check_object(obj);
2506     __ movb(field, rax);
2507     if (!is_static) {
2508       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2509     }
2510     __ jmp(Done);
2511   }
2512 
2513   __ bind(notByte);
2514   __ cmpl(flags, atos);
2515   __ jcc(Assembler::notEqual, notObj);
2516 
2517   // atos
2518   {
2519     __ pop(atos);
2520     if (!is_static) pop_and_check_object(obj);
2521     // Store into the field
2522     do_oop_store(_masm, field, rax, _bs->kind(), false);
2523     if (!is_static) {
2524       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2525     }
2526     __ jmp(Done);
2527   }
2528 
2529   __ bind(notObj);
2530   __ cmpl(flags, itos);
2531   __ jcc(Assembler::notEqual, notInt);
2532 
2533   // itos
2534   {
2535     __ pop(itos);
2536     if (!is_static) pop_and_check_object(obj);
2537     __ movl(field, rax);
2538     if (!is_static) {
2539       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
2540     }
2541     __ jmp(Done);
2542   }
2543 
2544   __ bind(notInt);
2545   __ cmpl(flags, ctos);
2546   __ jcc(Assembler::notEqual, notChar);
2547 
2548   // ctos
2549   {
2550     __ pop(ctos);
2551     if (!is_static) pop_and_check_object(obj);
2552     __ movw(field, rax);
2553     if (!is_static) {
2554       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
2555     }
2556     __ jmp(Done);
2557   }
2558 
2559   __ bind(notChar);
2560   __ cmpl(flags, stos);
2561   __ jcc(Assembler::notEqual, notShort);
2562 
2563   // stos
2564   {
2565     __ pop(stos);
2566     if (!is_static) pop_and_check_object(obj);
2567     __ movw(field, rax);
2568     if (!is_static) {
2569       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
2570     }
2571     __ jmp(Done);
2572   }
2573 
2574   __ bind(notShort);
2575   __ cmpl(flags, ltos);
2576   __ jcc(Assembler::notEqual, notLong);
2577 
2578   // ltos
2579   {
2580     __ pop(ltos);
2581     if (!is_static) pop_and_check_object(obj);
2582     __ movq(field, rax);
2583     if (!is_static) {
2584       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
2585     }
2586     __ jmp(Done);
2587   }
2588 
2589   __ bind(notLong);
2590   __ cmpl(flags, ftos);
2591   __ jcc(Assembler::notEqual, notFloat);
2592 
2593   // ftos
2594   {
2595     __ pop(ftos);
2596     if (!is_static) pop_and_check_object(obj);
2597     __ movflt(field, xmm0);
2598     if (!is_static) {
2599       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
2600     }
2601     __ jmp(Done);
2602   }
2603 
2604   __ bind(notFloat);
2605 #ifdef ASSERT
2606   __ cmpl(flags, dtos);
2607   __ jcc(Assembler::notEqual, notDouble);
2608 #endif
2609 
2610   // dtos
2611   {
2612     __ pop(dtos);
2613     if (!is_static) pop_and_check_object(obj);
2614     __ movdbl(field, xmm0);
2615     if (!is_static) {
2616       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
2617     }
2618   }
2619 
2620 #ifdef ASSERT
2621   __ jmp(Done);
2622 
2623   __ bind(notDouble);
2624   __ stop("Bad state");
2625 #endif
2626 
2627   __ bind(Done);
2628 
2629   // Check for volatile store
2630   __ testl(rdx, rdx);
2631   __ jcc(Assembler::zero, notVolatile);
2632   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2633                                                Assembler::StoreStore));
2634   __ bind(notVolatile);
2635 }
2636 
2637 void TemplateTable::putfield(int byte_no) {
2638   putfield_or_static(byte_no, false);
2639 }
2640 




2641 void TemplateTable::putstatic(int byte_no) {
2642   putfield_or_static(byte_no, true);
2643 }
2644 
2645 void TemplateTable::jvmti_post_fast_field_mod() {
2646   if (JvmtiExport::can_post_field_modification()) {
2647     // Check to see if a field modification watch has been set before
2648     // we take the time to call into the VM.
2649     Label L2;
2650     __ mov32(c_rarg3, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2651     __ testl(c_rarg3, c_rarg3);
2652     __ jcc(Assembler::zero, L2);
2653     __ pop_ptr(rbx);                  // copy the object pointer from tos
2654     __ verify_oop(rbx);
2655     __ push_ptr(rbx);                 // put the object pointer back on tos
2656     // Save tos values before call_VM() clobbers them. Since we have
2657     // to do it for every data type, we use the saved values as the
2658     // jvalue object.
2659     switch (bytecode()) {          // load values into the jvalue object
2660     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;


2898   __ decrement(r13);
2899 }
2900 
2901 
2902 
2903 //-----------------------------------------------------------------------------
2904 // Calls
2905 
2906 void TemplateTable::count_calls(Register method, Register temp) {
2907   // implemented elsewhere
2908   ShouldNotReachHere();
2909 }
2910 
2911 void TemplateTable::prepare_invoke(int byte_no,
2912                                    Register method,  // linked method (or i-klass)
2913                                    Register index,   // itable index, MethodType, etc.
2914                                    Register recv,    // if caller wants to see it
2915                                    Register flags    // if caller wants to test it
2916                                    ) {
2917   // determine flags
2918   const Bytecodes::Code code = bytecode();
2919   const bool is_invokeinterface  = code == Bytecodes::_invokeinterface;
2920   const bool is_invokedynamic    = code == Bytecodes::_invokedynamic;
2921   const bool is_invokehandle     = code == Bytecodes::_invokehandle;
2922   const bool is_invokevirtual    = code == Bytecodes::_invokevirtual;
2923   const bool is_invokespecial    = code == Bytecodes::_invokespecial;
2924   const bool load_receiver       = (recv  != noreg);
2925   const bool save_flags          = (flags != noreg);
2926   assert(load_receiver == (code != Bytecodes::_invokestatic && code != Bytecodes::_invokedynamic), "");
2927   assert(save_flags    == (is_invokeinterface || is_invokevirtual), "need flags for vfinal");
2928   assert(flags == noreg || flags == rdx, "");
2929   assert(recv  == noreg || recv  == rcx, "");
2930 
2931   // setup registers & access constant pool cache
2932   if (recv  == noreg)  recv  = rcx;
2933   if (flags == noreg)  flags = rdx;
2934   assert_different_registers(method, index, recv, flags);
2935 
2936   // save 'interpreter return address'
2937   __ save_bcp();
2938 


3023   __ profile_final_call(rax);
3024   __ profile_arguments_type(rax, method, r13, true);
3025 
3026   __ jump_from_interpreted(method, rax);
3027 
3028   __ bind(notFinal);
3029 
3030   // get receiver klass
3031   __ null_check(recv, oopDesc::klass_offset_in_bytes());
3032   __ load_klass(rax, recv);
3033 
3034   // profile this call
3035   __ profile_virtual_call(rax, r14, rdx);
3036 
3037   // get target Method* & entry point
3038   __ lookup_virtual_method(rax, index, method);
3039   __ profile_arguments_type(rdx, method, r13, true);
3040   __ jump_from_interpreted(method, rdx);
3041 }
3042 
3043 
3044 void TemplateTable::invokevirtual(int byte_no) {








3045   transition(vtos, vtos);
3046   assert(byte_no == f2_byte, "use this argument");
3047   prepare_invoke(byte_no,
3048                  rbx,    // method or vtable index
3049                  noreg,  // unused itable index
3050                  rcx, rdx); // recv, flags
3051 
3052   // rbx: index
3053   // rcx: receiver
3054   // rdx: flags
3055 
3056   invokevirtual_helper(rbx, rcx, rdx);
3057 }
3058 
3059 
3060 void TemplateTable::invokespecial(int byte_no) {
3061   transition(vtos, vtos);
3062   assert(byte_no == f1_byte, "use this argument");
3063   prepare_invoke(byte_no, rbx, noreg,  // get f1 Method*
3064                  rcx);  // get receiver also for null check




 466           JVM_CONSTANT_Double);
 467   __ jccb(Assembler::notEqual, Long);
 468   // dtos
 469   __ movdbl(xmm0, Address(rcx, rbx, Address::times_8, base_offset));
 470   __ push_d();
 471   __ jmpb(Done);
 472 
 473   __ bind(Long);
 474   // ltos
 475   __ movq(rax, Address(rcx, rbx, Address::times_8, base_offset));
 476   __ push_l();
 477 
 478   __ bind(Done);
 479 }
 480 
 481 void TemplateTable::locals_index(Register reg, int offset) {
 482   __ load_unsigned_byte(reg, at_bcp(offset));
 483   __ negptr(reg);
 484 }
 485 
 486 void TemplateTable::iload_internal(RewriteControl rc) {
 487   transition(vtos, itos);
 488   if (RewriteFrequentPairs && rc == MAY_REWRITE) {
 489     Label rewrite, done;
 490     const Register bc = c_rarg3;
 491     assert(rbx != bc, "register damaged");
 492 
 493     // get next byte
 494     __ load_unsigned_byte(rbx,
 495                           at_bcp(Bytecodes::length_for(Bytecodes::_iload)));
 496     // if _iload, wait to rewrite to iload2.  We only want to rewrite the
 497     // last two iloads in a pair.  Comparing against fast_iload means that
 498     // the next bytecode is neither an iload or a caload, and therefore
 499     // an iload pair.
 500     __ cmpl(rbx, Bytecodes::_iload);
 501     __ jcc(Assembler::equal, done);
 502 
 503     __ cmpl(rbx, Bytecodes::_fast_iload);
 504     __ movl(bc, Bytecodes::_fast_iload2);
 505     __ jccb(Assembler::equal, rewrite);
 506 
 507     // if _caload, rewrite to fast_icaload
 508     __ cmpl(rbx, Bytecodes::_caload);
 509     __ movl(bc, Bytecodes::_fast_icaload);
 510     __ jccb(Assembler::equal, rewrite);
 511 
 512     // rewrite so iload doesn't check again.
 513     __ movl(bc, Bytecodes::_fast_iload);
 514 
 515     // rewrite
 516     // bc: fast bytecode
 517     __ bind(rewrite);
 518     patch_bytecode(Bytecodes::_iload, bc, rbx, false);
 519 
 520     __ bind(done);
 521   }
 522 
 523   // Get the local value into tos
 524   locals_index(rbx);
 525   __ movl(rax, iaddress(rbx));
 526 }
 527 
 528 void TemplateTable::iload() {
 529   iload_internal();
 530 }
 531 
 532 void TemplateTable::nofast_iload() {
 533   iload_internal(MAY_NOT_REWRITE);
 534 }
 535 
 536 void TemplateTable::fast_iload2() {
 537   transition(vtos, itos);
 538   locals_index(rbx);
 539   __ movl(rax, iaddress(rbx));
 540   __ push(itos);
 541   locals_index(rbx, 3);
 542   __ movl(rax, iaddress(rbx));
 543 }
 544 
 545 void TemplateTable::fast_iload() {
 546   transition(vtos, itos);
 547   locals_index(rbx);
 548   __ movl(rax, iaddress(rbx));
 549 }
 550 
 551 void TemplateTable::lload() {
 552   transition(vtos, ltos);
 553   locals_index(rbx);
 554   __ movq(rax, laddress(rbx));
 555 }


 743   transition(vtos, ltos);
 744   __ movq(rax, laddress(n));
 745 }
 746 
 747 void TemplateTable::fload(int n) {
 748   transition(vtos, ftos);
 749   __ movflt(xmm0, faddress(n));
 750 }
 751 
 752 void TemplateTable::dload(int n) {
 753   transition(vtos, dtos);
 754   __ movdbl(xmm0, daddress(n));
 755 }
 756 
 757 void TemplateTable::aload(int n) {
 758   transition(vtos, atos);
 759   __ movptr(rax, aaddress(n));
 760 }
 761 
 762 void TemplateTable::aload_0() {
 763   aload_0_internal();
 764 }
 765 
 766 void TemplateTable::nofast_aload_0() {
 767   aload_0_internal(MAY_NOT_REWRITE);
 768 }
 769 
 770 void TemplateTable::aload_0_internal(RewriteControl rc) {
 771   transition(vtos, atos);
 772   // According to bytecode histograms, the pairs:
 773   //
 774   // _aload_0, _fast_igetfield
 775   // _aload_0, _fast_agetfield
 776   // _aload_0, _fast_fgetfield
 777   //
 778   // occur frequently. If RewriteFrequentPairs is set, the (slow)
 779   // _aload_0 bytecode checks if the next bytecode is either
 780   // _fast_igetfield, _fast_agetfield or _fast_fgetfield and then
 781   // rewrites the current bytecode into a pair bytecode; otherwise it
 782   // rewrites the current bytecode into _fast_aload_0 that doesn't do
 783   // the pair check anymore.
 784   //
 785   // Note: If the next bytecode is _getfield, the rewrite must be
 786   //       delayed, otherwise we may miss an opportunity for a pair.
 787   //
 788   // Also rewrite frequent pairs
 789   //   aload_0, aload_1
 790   //   aload_0, iload_1
 791   // These bytecodes with a small amount of code are most profitable
 792   // to rewrite
 793   if (RewriteFrequentPairs && rc == MAY_REWRITE) {
 794     Label rewrite, done;
 795     const Register bc = c_rarg3;
 796     assert(rbx != bc, "register damaged");
 797     // get next byte
 798     __ load_unsigned_byte(rbx,
 799                           at_bcp(Bytecodes::length_for(Bytecodes::_aload_0)));
 800 
 801     // do actual aload_0
 802     aload(0);
 803 
 804     // if _getfield then wait with rewrite
 805     __ cmpl(rbx, Bytecodes::_getfield);
 806     __ jcc(Assembler::equal, done);
 807 
 808     // if _igetfield then reqrite to _fast_iaccess_0
 809     assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) ==
 810            Bytecodes::_aload_0,
 811            "fix bytecode definition");
 812     __ cmpl(rbx, Bytecodes::_fast_igetfield);
 813     __ movl(bc, Bytecodes::_fast_iaccess_0);


2109 // not _between_ memory refs (that would require us to track the
2110 // flavor of the previous memory refs).  Requirements (2) and (3)
2111 // require some barriers before volatile stores and after volatile
2112 // loads.  These nearly cover requirement (1) but miss the
2113 // volatile-store-volatile-load case.  This final case is placed after
2114 // volatile-stores although it could just as well go before
2115 // volatile-loads.
2116 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits
2117                                      order_constraint) {
2118   // Helper function to insert a is-volatile test and memory barrier
2119   if (os::is_MP()) { // Not needed on single CPU
2120     __ membar(order_constraint);
2121   }
2122 }
2123 
2124 void TemplateTable::resolve_cache_and_index(int byte_no,
2125                                             Register Rcache,
2126                                             Register index,
2127                                             size_t index_size) {
2128   const Register temp = rbx;
2129   Bytecodes::Code code = bytecode();
2130   switch (code) {
2131   case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2132   case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2133   case Bytecodes::_nofast_invokevirtual: code = Bytecodes::_invokevirtual;
2134   }
2135 
2136   assert_different_registers(Rcache, index, temp);
2137 
2138   Label resolved;
2139     assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2140     __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
2141     __ cmpl(temp, code);  // have we resolved this bytecode?
2142     __ jcc(Assembler::equal, resolved);
2143 
2144   // resolve first time through
2145   address entry;
2146   switch (code) {
2147   case Bytecodes::_getstatic:
2148   case Bytecodes::_putstatic:
2149   case Bytecodes::_getfield:
2150   case Bytecodes::_putfield:
2151     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put);
2152     break;
2153   case Bytecodes::_invokevirtual:
2154   case Bytecodes::_invokespecial:
2155   case Bytecodes::_invokestatic:
2156   case Bytecodes::_invokeinterface:
2157     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke);
2158     break;
2159   case Bytecodes::_invokehandle:
2160     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle);
2161     break;
2162   case Bytecodes::_invokedynamic:
2163     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic);
2164     break;
2165   default:
2166     fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(code)));
2167     break;
2168   }
2169   __ movl(temp, code);
2170   __ call_VM(noreg, entry, temp);
2171 
2172   // Update registers with resolved info
2173   __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2174   __ bind(resolved);
2175 }
2176 
2177 // The cache and index registers must be set before call
2178 void TemplateTable::load_field_cp_cache_entry(Register obj,
2179                                               Register cache,
2180                                               Register index,
2181                                               Register off,
2182                                               Register flags,
2183                                               bool is_static = false) {
2184   assert_different_registers(cache, index, flags, off);
2185 
2186   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2187   // Field offset
2188   __ movptr(off, Address(cache, index, Address::times_ptr,
2189                          in_bytes(cp_base_offset +


2268       __ movptr(c_rarg1, at_tos()); // get object pointer without popping it
2269       __ verify_oop(c_rarg1);
2270     }
2271     // c_rarg1: object pointer or NULL
2272     // c_rarg2: cache entry pointer
2273     // c_rarg3: jvalue object on the stack
2274     __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2275                                        InterpreterRuntime::post_field_access),
2276                c_rarg1, c_rarg2, c_rarg3);
2277     __ get_cache_and_index_at_bcp(cache, index, 1);
2278     __ bind(L1);
2279   }
2280 }
2281 
2282 void TemplateTable::pop_and_check_object(Register r) {
2283   __ pop_ptr(r);
2284   __ null_check(r);  // for field access must check obj.
2285   __ verify_oop(r);
2286 }
2287 
2288 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2289   transition(vtos, vtos);
2290 
2291   const Register cache = rcx;
2292   const Register index = rdx;
2293   const Register obj   = c_rarg3;
2294   const Register off   = rbx;
2295   const Register flags = rax;
2296   const Register bc = c_rarg3; // uses same reg as obj, so don't mix them
2297 
2298   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2299   jvmti_post_field_access(cache, index, is_static, false);
2300   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2301 
2302   if (!is_static) {
2303     // obj is on the stack
2304     pop_and_check_object(obj);
2305   }
2306 
2307   const Address field(obj, off, Address::times_1);
2308 
2309   Label Done, notByte, notInt, notShort, notChar,
2310               notLong, notFloat, notObj, notDouble;
2311 
2312   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2313   // Make sure we don't need to mask edx after the above shift
2314   assert(btos == 0, "change code, btos != 0");
2315 
2316   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2317   __ jcc(Assembler::notZero, notByte);
2318   // btos
2319   __ load_signed_byte(rax, field);
2320   __ push(btos);
2321   // Rewrite bytecode to be faster
2322   if (!is_static && rc == MAY_REWRITE) {
2323     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2324   }
2325   __ jmp(Done);
2326 
2327   __ bind(notByte);
2328   __ cmpl(flags, atos);
2329   __ jcc(Assembler::notEqual, notObj);
2330   // atos
2331   __ load_heap_oop(rax, field);
2332   __ push(atos);
2333   if (!is_static && rc == MAY_REWRITE) {
2334     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2335   }
2336   __ jmp(Done);
2337 
2338   __ bind(notObj);
2339   __ cmpl(flags, itos);
2340   __ jcc(Assembler::notEqual, notInt);
2341   // itos
2342   __ movl(rax, field);
2343   __ push(itos);
2344   // Rewrite bytecode to be faster
2345   if (!is_static && rc == MAY_REWRITE) {
2346     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2347   }
2348   __ jmp(Done);
2349 
2350   __ bind(notInt);
2351   __ cmpl(flags, ctos);
2352   __ jcc(Assembler::notEqual, notChar);
2353   // ctos
2354   __ load_unsigned_short(rax, field);
2355   __ push(ctos);
2356   // Rewrite bytecode to be faster
2357   if (!is_static && rc == MAY_REWRITE) {
2358     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2359   }
2360   __ jmp(Done);
2361 
2362   __ bind(notChar);
2363   __ cmpl(flags, stos);
2364   __ jcc(Assembler::notEqual, notShort);
2365   // stos
2366   __ load_signed_short(rax, field);
2367   __ push(stos);
2368   // Rewrite bytecode to be faster
2369   if (!is_static && rc == MAY_REWRITE) {
2370     patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx);
2371   }
2372   __ jmp(Done);
2373 
2374   __ bind(notShort);
2375   __ cmpl(flags, ltos);
2376   __ jcc(Assembler::notEqual, notLong);
2377   // ltos
2378   __ movq(rax, field);
2379   __ push(ltos);
2380   // Rewrite bytecode to be faster
2381   if (!is_static && rc == MAY_REWRITE) {
2382     patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx);
2383   }
2384   __ jmp(Done);
2385 
2386   __ bind(notLong);
2387   __ cmpl(flags, ftos);
2388   __ jcc(Assembler::notEqual, notFloat);
2389   // ftos
2390   __ movflt(xmm0, field);
2391   __ push(ftos);
2392   // Rewrite bytecode to be faster
2393   if (!is_static && rc == MAY_REWRITE) {
2394     patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
2395   }
2396   __ jmp(Done);
2397 
2398   __ bind(notFloat);
2399 #ifdef ASSERT
2400   __ cmpl(flags, dtos);
2401   __ jcc(Assembler::notEqual, notDouble);
2402 #endif
2403   // dtos
2404   __ movdbl(xmm0, field);
2405   __ push(dtos);
2406   // Rewrite bytecode to be faster
2407   if (!is_static && rc == MAY_REWRITE) {
2408     patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
2409   }
2410 #ifdef ASSERT
2411   __ jmp(Done);
2412 
2413   __ bind(notDouble);
2414   __ stop("Bad state");
2415 #endif
2416 
2417   __ bind(Done);
2418   // [jk] not needed currently
2419   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2420   //                                              Assembler::LoadStore));
2421 }
2422 
2423 
2424 void TemplateTable::getfield(int byte_no) {
2425   getfield_or_static(byte_no, false);
2426 }
2427 
2428 void TemplateTable::nofast_getfield(int byte_no) {
2429   getfield_or_static(byte_no, false, MAY_NOT_REWRITE);
2430 }
2431 
2432 void TemplateTable::getstatic(int byte_no) {
2433   getfield_or_static(byte_no, true);
2434 }
2435 
2436 // The registers cache and index expected to be set before call.
2437 // The function may destroy various registers, just not the cache and index registers.
2438 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2439   transition(vtos, vtos);
2440 
2441   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2442 
2443   if (JvmtiExport::can_post_field_modification()) {
2444     // Check to see if a field modification watch has been set before
2445     // we take the time to call into the VM.
2446     Label L1;
2447     assert_different_registers(cache, index, rax);
2448     __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2449     __ testl(rax, rax);
2450     __ jcc(Assembler::zero, L1);
2451 


2475                  c_rarg1, at_tos_p2()); // dtos (two word jvalue)
2476     }
2477     // cache entry pointer
2478     __ addptr(c_rarg2, in_bytes(cp_base_offset));
2479     __ shll(rscratch1, LogBytesPerWord);
2480     __ addptr(c_rarg2, rscratch1);
2481     // object (tos)
2482     __ mov(c_rarg3, rsp);
2483     // c_rarg1: object pointer set up above (NULL if static)
2484     // c_rarg2: cache entry pointer
2485     // c_rarg3: jvalue object on the stack
2486     __ call_VM(noreg,
2487                CAST_FROM_FN_PTR(address,
2488                                 InterpreterRuntime::post_field_modification),
2489                c_rarg1, c_rarg2, c_rarg3);
2490     __ get_cache_and_index_at_bcp(cache, index, 1);
2491     __ bind(L1);
2492   }
2493 }
2494 
2495 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2496   transition(vtos, vtos);
2497 
2498   const Register cache = rcx;
2499   const Register index = rdx;
2500   const Register obj   = rcx;
2501   const Register off   = rbx;
2502   const Register flags = rax;
2503   const Register bc    = c_rarg3;
2504 
2505   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2506   jvmti_post_field_mod(cache, index, is_static);
2507   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2508 
2509   // [jk] not needed currently
2510   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2511   //                                              Assembler::StoreStore));
2512 
2513   Label notVolatile, Done;
2514   __ movl(rdx, flags);
2515   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2516   __ andl(rdx, 0x1);
2517 
2518   // field address
2519   const Address field(obj, off, Address::times_1);
2520 
2521   Label notByte, notInt, notShort, notChar,
2522         notLong, notFloat, notObj, notDouble;
2523 
2524   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2525 
2526   assert(btos == 0, "change code, btos != 0");
2527   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2528   __ jcc(Assembler::notZero, notByte);
2529 
2530   // btos
2531   {
2532     __ pop(btos);
2533     if (!is_static) pop_and_check_object(obj);
2534     __ movb(field, rax);
2535     if (!is_static && rc == MAY_REWRITE) {
2536       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2537     }
2538     __ jmp(Done);
2539   }
2540 
2541   __ bind(notByte);
2542   __ cmpl(flags, atos);
2543   __ jcc(Assembler::notEqual, notObj);
2544 
2545   // atos
2546   {
2547     __ pop(atos);
2548     if (!is_static) pop_and_check_object(obj);
2549     // Store into the field
2550     do_oop_store(_masm, field, rax, _bs->kind(), false);
2551     if (!is_static && rc == MAY_REWRITE) {
2552       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2553     }
2554     __ jmp(Done);
2555   }
2556 
2557   __ bind(notObj);
2558   __ cmpl(flags, itos);
2559   __ jcc(Assembler::notEqual, notInt);
2560 
2561   // itos
2562   {
2563     __ pop(itos);
2564     if (!is_static) pop_and_check_object(obj);
2565     __ movl(field, rax);
2566     if (!is_static && rc == MAY_REWRITE) {
2567       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
2568     }
2569     __ jmp(Done);
2570   }
2571 
2572   __ bind(notInt);
2573   __ cmpl(flags, ctos);
2574   __ jcc(Assembler::notEqual, notChar);
2575 
2576   // ctos
2577   {
2578     __ pop(ctos);
2579     if (!is_static) pop_and_check_object(obj);
2580     __ movw(field, rax);
2581     if (!is_static && rc == MAY_REWRITE) {
2582       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
2583     }
2584     __ jmp(Done);
2585   }
2586 
2587   __ bind(notChar);
2588   __ cmpl(flags, stos);
2589   __ jcc(Assembler::notEqual, notShort);
2590 
2591   // stos
2592   {
2593     __ pop(stos);
2594     if (!is_static) pop_and_check_object(obj);
2595     __ movw(field, rax);
2596     if (!is_static && rc == MAY_REWRITE) {
2597       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
2598     }
2599     __ jmp(Done);
2600   }
2601 
2602   __ bind(notShort);
2603   __ cmpl(flags, ltos);
2604   __ jcc(Assembler::notEqual, notLong);
2605 
2606   // ltos
2607   {
2608     __ pop(ltos);
2609     if (!is_static) pop_and_check_object(obj);
2610     __ movq(field, rax);
2611     if (!is_static && rc == MAY_REWRITE) {
2612       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
2613     }
2614     __ jmp(Done);
2615   }
2616 
2617   __ bind(notLong);
2618   __ cmpl(flags, ftos);
2619   __ jcc(Assembler::notEqual, notFloat);
2620 
2621   // ftos
2622   {
2623     __ pop(ftos);
2624     if (!is_static) pop_and_check_object(obj);
2625     __ movflt(field, xmm0);
2626     if (!is_static && rc == MAY_REWRITE) {
2627       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
2628     }
2629     __ jmp(Done);
2630   }
2631 
2632   __ bind(notFloat);
2633 #ifdef ASSERT
2634   __ cmpl(flags, dtos);
2635   __ jcc(Assembler::notEqual, notDouble);
2636 #endif
2637 
2638   // dtos
2639   {
2640     __ pop(dtos);
2641     if (!is_static) pop_and_check_object(obj);
2642     __ movdbl(field, xmm0);
2643     if (!is_static && rc == MAY_REWRITE) {
2644       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
2645     }
2646   }
2647 
2648 #ifdef ASSERT
2649   __ jmp(Done);
2650 
2651   __ bind(notDouble);
2652   __ stop("Bad state");
2653 #endif
2654 
2655   __ bind(Done);
2656 
2657   // Check for volatile store
2658   __ testl(rdx, rdx);
2659   __ jcc(Assembler::zero, notVolatile);
2660   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2661                                                Assembler::StoreStore));
2662   __ bind(notVolatile);
2663 }
2664 
2665 void TemplateTable::putfield(int byte_no) {
2666   putfield_or_static(byte_no, false);
2667 }
2668 
2669 void TemplateTable::nofast_putfield(int byte_no) {
2670   putfield_or_static(byte_no, false, MAY_NOT_REWRITE);
2671 }
2672 
2673 void TemplateTable::putstatic(int byte_no) {
2674   putfield_or_static(byte_no, true);
2675 }
2676 
2677 void TemplateTable::jvmti_post_fast_field_mod() {
2678   if (JvmtiExport::can_post_field_modification()) {
2679     // Check to see if a field modification watch has been set before
2680     // we take the time to call into the VM.
2681     Label L2;
2682     __ mov32(c_rarg3, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2683     __ testl(c_rarg3, c_rarg3);
2684     __ jcc(Assembler::zero, L2);
2685     __ pop_ptr(rbx);                  // copy the object pointer from tos
2686     __ verify_oop(rbx);
2687     __ push_ptr(rbx);                 // put the object pointer back on tos
2688     // Save tos values before call_VM() clobbers them. Since we have
2689     // to do it for every data type, we use the saved values as the
2690     // jvalue object.
2691     switch (bytecode()) {          // load values into the jvalue object
2692     case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;


2930   __ decrement(r13);
2931 }
2932 
2933 
2934 
2935 //-----------------------------------------------------------------------------
2936 // Calls
2937 
2938 void TemplateTable::count_calls(Register method, Register temp) {
2939   // implemented elsewhere
2940   ShouldNotReachHere();
2941 }
2942 
2943 void TemplateTable::prepare_invoke(int byte_no,
2944                                    Register method,  // linked method (or i-klass)
2945                                    Register index,   // itable index, MethodType, etc.
2946                                    Register recv,    // if caller wants to see it
2947                                    Register flags    // if caller wants to test it
2948                                    ) {
2949   // determine flags
2950   const Bytecodes::Code code = bytecode() == Bytecodes::_nofast_invokevirtual ? Bytecodes::_invokevirtual : bytecode();
2951   const bool is_invokeinterface  = code == Bytecodes::_invokeinterface;
2952   const bool is_invokedynamic    = code == Bytecodes::_invokedynamic;
2953   const bool is_invokehandle     = code == Bytecodes::_invokehandle;
2954   const bool is_invokevirtual    = code == Bytecodes::_invokevirtual;
2955   const bool is_invokespecial    = code == Bytecodes::_invokespecial;
2956   const bool load_receiver       = (recv  != noreg);
2957   const bool save_flags          = (flags != noreg);
2958   assert(load_receiver == (code != Bytecodes::_invokestatic && code != Bytecodes::_invokedynamic), "");
2959   assert(save_flags    == (is_invokeinterface || is_invokevirtual), "need flags for vfinal");
2960   assert(flags == noreg || flags == rdx, "");
2961   assert(recv  == noreg || recv  == rcx, "");
2962 
2963   // setup registers & access constant pool cache
2964   if (recv  == noreg)  recv  = rcx;
2965   if (flags == noreg)  flags = rdx;
2966   assert_different_registers(method, index, recv, flags);
2967 
2968   // save 'interpreter return address'
2969   __ save_bcp();
2970 


3055   __ profile_final_call(rax);
3056   __ profile_arguments_type(rax, method, r13, true);
3057 
3058   __ jump_from_interpreted(method, rax);
3059 
3060   __ bind(notFinal);
3061 
3062   // get receiver klass
3063   __ null_check(recv, oopDesc::klass_offset_in_bytes());
3064   __ load_klass(rax, recv);
3065 
3066   // profile this call
3067   __ profile_virtual_call(rax, r14, rdx);
3068 
3069   // get target Method* & entry point
3070   __ lookup_virtual_method(rax, index, method);
3071   __ profile_arguments_type(rdx, method, r13, true);
3072   __ jump_from_interpreted(method, rdx);
3073 }
3074 

3075 void TemplateTable::invokevirtual(int byte_no) {
3076   invokevirtual_internal(byte_no);
3077 }
3078 
3079 void TemplateTable::nofast_invokevirtual(int byte_no) {
3080   invokevirtual_internal(byte_no, MAY_NOT_REWRITE);
3081 }
3082 
3083 void TemplateTable::invokevirtual_internal(int byte_no, RewriteControl rc) {
3084   transition(vtos, vtos);
3085   assert(byte_no == f2_byte, "use this argument");
3086   prepare_invoke(byte_no,
3087                  rbx,    // method or vtable index
3088                  noreg,  // unused itable index
3089                  rcx, rdx); // recv, flags
3090 
3091   // rbx: index
3092   // rcx: receiver
3093   // rdx: flags
3094 
3095   invokevirtual_helper(rbx, rcx, rdx);
3096 }
3097 
3098 
3099 void TemplateTable::invokespecial(int byte_no) {
3100   transition(vtos, vtos);
3101   assert(byte_no == f1_byte, "use this argument");
3102   prepare_invoke(byte_no, rbx, noreg,  // get f1 Method*
3103                  rcx);  // get receiver also for null check