src/cpu/x86/vm/templateTable_x86.cpp

Print this page




 527   NOT_LP64(__ fld_d(    Address(rcx, rbx, Address::times_ptr, base_offset)));
 528   __ push(dtos);
 529 
 530   __ jmpb(Done);
 531   __ bind(Long);
 532 
 533   // ltos
 534   __ movptr(rax, Address(rcx, rbx, Address::times_ptr, base_offset + 0 * wordSize));
 535   NOT_LP64(__ movptr(rdx, Address(rcx, rbx, Address::times_ptr, base_offset + 1 * wordSize)));
 536   __ push(ltos);
 537 
 538   __ bind(Done);
 539 }
 540 
 541 void TemplateTable::locals_index(Register reg, int offset) {
 542   __ load_unsigned_byte(reg, at_bcp(offset));
 543   __ negptr(reg);
 544 }
 545 
 546 void TemplateTable::iload() {








 547   transition(vtos, itos);
 548   if (RewriteFrequentPairs) {
 549     Label rewrite, done;
 550     const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
 551     LP64_ONLY(assert(rbx != bc, "register damaged"));
 552 
 553     // get next byte
 554     __ load_unsigned_byte(rbx,
 555                           at_bcp(Bytecodes::length_for(Bytecodes::_iload)));
 556     // if _iload, wait to rewrite to iload2.  We only want to rewrite the
 557     // last two iloads in a pair.  Comparing against fast_iload means that
 558     // the next bytecode is neither an iload or a caload, and therefore
 559     // an iload pair.
 560     __ cmpl(rbx, Bytecodes::_iload);
 561     __ jcc(Assembler::equal, done);
 562 
 563     __ cmpl(rbx, Bytecodes::_fast_iload);
 564     __ movl(bc, Bytecodes::_fast_iload2);
 565 
 566     __ jccb(Assembler::equal, rewrite);
 567 
 568     // if _caload, rewrite to fast_icaload


 799 }
 800 
 801 void TemplateTable::fload(int n) {
 802   transition(vtos, ftos);
 803   LP64_ONLY(__ movflt(xmm0, faddress(n)));
 804   NOT_LP64(__ fld_s(faddress(n)));
 805 }
 806 
 807 void TemplateTable::dload(int n) {
 808   transition(vtos, dtos);
 809   LP64_ONLY(__ movdbl(xmm0, daddress(n)));
 810   NOT_LP64(__ fld_d(daddress(n)));
 811 }
 812 
 813 void TemplateTable::aload(int n) {
 814   transition(vtos, atos);
 815   __ movptr(rax, aaddress(n));
 816 }
 817 
 818 void TemplateTable::aload_0() {








 819   transition(vtos, atos);
 820   // According to bytecode histograms, the pairs:
 821   //
 822   // _aload_0, _fast_igetfield
 823   // _aload_0, _fast_agetfield
 824   // _aload_0, _fast_fgetfield
 825   //
 826   // occur frequently. If RewriteFrequentPairs is set, the (slow)
 827   // _aload_0 bytecode checks if the next bytecode is either
 828   // _fast_igetfield, _fast_agetfield or _fast_fgetfield and then
 829   // rewrites the current bytecode into a pair bytecode; otherwise it
 830   // rewrites the current bytecode into _fast_aload_0 that doesn't do
 831   // the pair check anymore.
 832   //
 833   // Note: If the next bytecode is _getfield, the rewrite must be
 834   //       delayed, otherwise we may miss an opportunity for a pair.
 835   //
 836   // Also rewrite frequent pairs
 837   //   aload_0, aload_1
 838   //   aload_0, iload_1
 839   // These bytecodes with a small amount of code are most profitable
 840   // to rewrite
 841   if (RewriteFrequentPairs) {
 842     Label rewrite, done;
 843 
 844     const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
 845     LP64_ONLY(assert(rbx != bc, "register damaged"));
 846 
 847     // get next byte
 848     __ load_unsigned_byte(rbx, at_bcp(Bytecodes::length_for(Bytecodes::_aload_0)));
 849 
 850     // do actual aload_0
 851     aload(0);
 852 
 853     // if _getfield then wait with rewrite
 854     __ cmpl(rbx, Bytecodes::_getfield);
 855     __ jcc(Assembler::equal, done);
 856 
 857     // if _igetfield then reqrite to _fast_iaccess_0
 858     assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
 859     __ cmpl(rbx, Bytecodes::_fast_igetfield);
 860     __ movl(bc, Bytecodes::_fast_iaccess_0);
 861     __ jccb(Assembler::equal, rewrite);


2475 // require some barriers before volatile stores and after volatile
2476 // loads.  These nearly cover requirement (1) but miss the
2477 // volatile-store-volatile-load case.  This final case is placed after
2478 // volatile-stores although it could just as well go before
2479 // volatile-loads.
2480 
2481 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint ) {
2482   // Helper function to insert a is-volatile test and memory barrier
2483   if(!os::is_MP()) return;    // Not needed on single CPU
2484   __ membar(order_constraint);
2485 }
2486 
2487 void TemplateTable::resolve_cache_and_index(int byte_no,
2488                                             Register Rcache,
2489                                             Register index,
2490                                             size_t index_size) {
2491   const Register temp = rbx;
2492   assert_different_registers(Rcache, index, temp);
2493 
2494   Label resolved;







2495     assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2496     __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
2497     __ cmpl(temp, (int) bytecode());  // have we resolved this bytecode?
2498     __ jcc(Assembler::equal, resolved);
2499 
2500   // resolve first time through
2501   address entry;
2502   switch (bytecode()) {
2503     case Bytecodes::_getstatic      : // fall through
2504     case Bytecodes::_putstatic      : // fall through
2505     case Bytecodes::_getfield       : // fall through
2506     case Bytecodes::_putfield       : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put);        break;
2507     case Bytecodes::_invokevirtual  : // fall through
2508     case Bytecodes::_invokespecial  : // fall through
2509     case Bytecodes::_invokestatic   : // fall through
2510     case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke);         break;
2511     case Bytecodes::_invokehandle   : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle);   break;
2512     case Bytecodes::_invokedynamic  : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic);  break;
2513     default:
2514       fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(bytecode())));
2515       break;
2516   }
2517   __ movl(temp, (int)bytecode());
2518   __ call_VM(noreg, entry, temp);
2519   // Update registers with resolved info
2520   __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2521   __ bind(resolved);
2522 }
2523 
2524 // The cache and index registers must be set before call
2525 void TemplateTable::load_field_cp_cache_entry(Register obj,
2526                                               Register cache,
2527                                               Register index,
2528                                               Register off,
2529                                               Register flags,
2530                                               bool is_static = false) {
2531   assert_different_registers(cache, index, flags, off);
2532 
2533   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2534   // Field offset
2535   __ movptr(off, Address(cache, index, Address::times_ptr,
2536                          in_bytes(cp_base_offset +
2537                                   ConstantPoolCacheEntry::f2_offset())));


2612     } else {
2613       __ pop(atos);         // Get the object
2614       __ verify_oop(rax);
2615       __ push(atos);        // Restore stack state
2616     }
2617     // rax,:   object pointer or NULL
2618     // cache: cache entry pointer
2619     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2620                rax, cache);
2621     __ get_cache_and_index_at_bcp(cache, index, 1);
2622     __ bind(L1);
2623   }
2624 }
2625 
2626 void TemplateTable::pop_and_check_object(Register r) {
2627   __ pop_ptr(r);
2628   __ null_check(r);  // for field access must check obj.
2629   __ verify_oop(r);
2630 }
2631 
2632 void TemplateTable::getfield_or_static(int byte_no, bool is_static) {
2633   transition(vtos, vtos);
2634 
2635   const Register cache = rcx;
2636   const Register index = rdx;
2637   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2638   const Register off   = rbx;
2639   const Register flags = rax;
2640   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2641 
2642   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2643   jvmti_post_field_access(cache, index, is_static, false);
2644   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2645 
2646   if (!is_static) pop_and_check_object(obj);
2647 
2648   const Address field(obj, off, Address::times_1, 0*wordSize);
2649   NOT_LP64(const Address hi(obj, off, Address::times_1, 1*wordSize));
2650 
2651   Label Done, notByte, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
2652 
2653   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2654   // Make sure we don't need to mask edx after the above shift
2655   assert(btos == 0, "change code, btos != 0");
2656 
2657   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2658 
2659   __ jcc(Assembler::notZero, notByte);
2660   // btos
2661   __ load_signed_byte(rax, field);
2662   __ push(btos);
2663   // Rewrite bytecode to be faster
2664   if (!is_static) {
2665     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2666   }
2667   __ jmp(Done);
2668 
2669   __ bind(notByte);
2670   __ cmpl(flags, atos);
2671   __ jcc(Assembler::notEqual, notObj);
2672   // atos
2673   __ load_heap_oop(rax, field);
2674   __ push(atos);
2675   if (!is_static) {
2676     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2677   }
2678   __ jmp(Done);
2679 
2680   __ bind(notObj);
2681   __ cmpl(flags, itos);
2682   __ jcc(Assembler::notEqual, notInt);
2683   // itos
2684   __ movl(rax, field);
2685   __ push(itos);
2686   // Rewrite bytecode to be faster
2687   if (!is_static) {
2688     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2689   }
2690   __ jmp(Done);
2691 
2692   __ bind(notInt);
2693   __ cmpl(flags, ctos);
2694   __ jcc(Assembler::notEqual, notChar);
2695   // ctos
2696   __ load_unsigned_short(rax, field);
2697   __ push(ctos);
2698   // Rewrite bytecode to be faster
2699   if (!is_static) {
2700     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2701   }
2702   __ jmp(Done);
2703 
2704   __ bind(notChar);
2705   __ cmpl(flags, stos);
2706   __ jcc(Assembler::notEqual, notShort);
2707   // stos
2708   __ load_signed_short(rax, field);
2709   __ push(stos);
2710   // Rewrite bytecode to be faster
2711   if (!is_static) {
2712     patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx);
2713   }
2714   __ jmp(Done);
2715 
2716   __ bind(notShort);
2717   __ cmpl(flags, ltos);
2718   __ jcc(Assembler::notEqual, notLong);
2719   // ltos
2720 
2721 #ifndef _LP64
2722   // Generate code as if volatile.  There just aren't enough registers to
2723   // save that information and this code is faster than the test.
2724   __ fild_d(field);                // Must load atomically
2725   __ subptr(rsp,2*wordSize);    // Make space for store
2726   __ fistp_d(Address(rsp,0));
2727   __ pop(rax);
2728   __ pop(rdx);
2729 #else
2730   __ movq(rax, field);
2731 #endif
2732 
2733   __ push(ltos);
2734   // Rewrite bytecode to be faster
2735   LP64_ONLY(if (!is_static) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
2736   __ jmp(Done);
2737 
2738   __ bind(notLong);
2739   __ cmpl(flags, ftos);
2740   __ jcc(Assembler::notEqual, notFloat);
2741   // ftos
2742 
2743   LP64_ONLY(__ movflt(xmm0, field));
2744   NOT_LP64(__ fld_s(field));
2745   __ push(ftos);
2746   // Rewrite bytecode to be faster
2747   if (!is_static) {
2748     patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
2749   }
2750   __ jmp(Done);
2751 
2752   __ bind(notFloat);
2753 #ifdef ASSERT
2754   __ cmpl(flags, dtos);
2755   __ jcc(Assembler::notEqual, notDouble);
2756 #endif
2757   // dtos
2758   LP64_ONLY(__ movdbl(xmm0, field));
2759   NOT_LP64(__ fld_d(field));
2760   __ push(dtos);
2761   // Rewrite bytecode to be faster
2762   if (!is_static) {
2763     patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
2764   }
2765 #ifdef ASSERT
2766   __ jmp(Done);
2767 
2768 
2769   __ bind(notDouble);
2770   __ stop("Bad state");
2771 #endif
2772 
2773   __ bind(Done);
2774   // [jk] not needed currently
2775   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2776   //                                              Assembler::LoadStore));
2777 }
2778 
2779 void TemplateTable::getfield(int byte_no) {
2780   getfield_or_static(byte_no, false);
2781 }
2782 




2783 void TemplateTable::getstatic(int byte_no) {
2784   getfield_or_static(byte_no, true);
2785 }
2786 
2787 
2788 // The registers cache and index expected to be set before call.
2789 // The function may destroy various registers, just not the cache and index registers.
2790 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2791 
2792   const Register robj = LP64_ONLY(c_rarg2)   NOT_LP64(rax);
2793   const Register RBX  = LP64_ONLY(c_rarg1)   NOT_LP64(rbx);
2794   const Register RCX  = LP64_ONLY(c_rarg3)   NOT_LP64(rcx);
2795   const Register RDX  = LP64_ONLY(rscratch1) NOT_LP64(rdx);
2796 
2797   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2798 
2799   if (JvmtiExport::can_post_field_modification()) {
2800     // Check to see if a field modification watch has been set before
2801     // we take the time to call into the VM.
2802     Label L1;


2854 #endif
2855     }
2856     // cache entry pointer
2857     __ addptr(robj, in_bytes(cp_base_offset));
2858     __ shll(RDX, LogBytesPerWord);
2859     __ addptr(robj, RDX);
2860     // object (tos)
2861     __ mov(RCX, rsp);
2862     // c_rarg1: object pointer set up above (NULL if static)
2863     // c_rarg2: cache entry pointer
2864     // c_rarg3: jvalue object on the stack
2865     __ call_VM(noreg,
2866                CAST_FROM_FN_PTR(address,
2867                                 InterpreterRuntime::post_field_modification),
2868                RBX, robj, RCX);
2869     __ get_cache_and_index_at_bcp(cache, index, 1);
2870     __ bind(L1);
2871   }
2872 }
2873 
2874 void TemplateTable::putfield_or_static(int byte_no, bool is_static) {
2875   transition(vtos, vtos);
2876 
2877   const Register cache = rcx;
2878   const Register index = rdx;
2879   const Register obj   = rcx;
2880   const Register off   = rbx;
2881   const Register flags = rax;
2882   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2883 
2884   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2885   jvmti_post_field_mod(cache, index, is_static);
2886   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2887 
2888   // [jk] not needed currently
2889   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2890   //                                              Assembler::StoreStore));
2891 
2892   Label notVolatile, Done;
2893   __ movl(rdx, flags);
2894   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2895   __ andl(rdx, 0x1);
2896 
2897   // field addresses
2898   const Address field(obj, off, Address::times_1, 0*wordSize);
2899   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
2900 
2901   Label notByte, notInt, notShort, notChar,
2902         notLong, notFloat, notObj, notDouble;
2903 
2904   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2905 
2906   assert(btos == 0, "change code, btos != 0");
2907   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2908   __ jcc(Assembler::notZero, notByte);
2909 
2910   // btos
2911   {
2912     __ pop(btos);
2913     if (!is_static) pop_and_check_object(obj);
2914     __ movb(field, rax);
2915     if (!is_static) {
2916       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2917     }
2918     __ jmp(Done);
2919   }
2920 
2921   __ bind(notByte);
2922   __ cmpl(flags, atos);
2923   __ jcc(Assembler::notEqual, notObj);
2924 
2925   // atos
2926   {
2927     __ pop(atos);
2928     if (!is_static) pop_and_check_object(obj);
2929     // Store into the field
2930     do_oop_store(_masm, field, rax, _bs->kind(), false);
2931     if (!is_static) {
2932       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2933     }
2934     __ jmp(Done);
2935   }
2936 
2937   __ bind(notObj);
2938   __ cmpl(flags, itos);
2939   __ jcc(Assembler::notEqual, notInt);
2940 
2941   // itos
2942   {
2943     __ pop(itos);
2944     if (!is_static) pop_and_check_object(obj);
2945     __ movl(field, rax);
2946     if (!is_static) {
2947       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
2948     }
2949     __ jmp(Done);
2950   }
2951 
2952   __ bind(notInt);
2953   __ cmpl(flags, ctos);
2954   __ jcc(Assembler::notEqual, notChar);
2955 
2956   // ctos
2957   {
2958     __ pop(ctos);
2959     if (!is_static) pop_and_check_object(obj);
2960     __ movw(field, rax);
2961     if (!is_static) {
2962       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
2963     }
2964     __ jmp(Done);
2965   }
2966 
2967   __ bind(notChar);
2968   __ cmpl(flags, stos);
2969   __ jcc(Assembler::notEqual, notShort);
2970 
2971   // stos
2972   {
2973     __ pop(stos);
2974     if (!is_static) pop_and_check_object(obj);
2975     __ movw(field, rax);
2976     if (!is_static) {
2977       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
2978     }
2979     __ jmp(Done);
2980   }
2981 
2982   __ bind(notShort);
2983   __ cmpl(flags, ltos);
2984   __ jcc(Assembler::notEqual, notLong);
2985 
2986   // ltos
2987 #ifdef _LP64
2988   {
2989     __ pop(ltos);
2990     if (!is_static) pop_and_check_object(obj);
2991     __ movq(field, rax);
2992     if (!is_static) {
2993       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
2994     }
2995     __ jmp(Done);
2996   }
2997 #else
2998   {
2999     Label notVolatileLong;
3000     __ testl(rdx, rdx);
3001     __ jcc(Assembler::zero, notVolatileLong);
3002 
3003     __ pop(ltos);  // overwrites rdx, do this after testing volatile.
3004     if (!is_static) pop_and_check_object(obj);
3005 
3006     // Replace with real volatile test
3007     __ push(rdx);
3008     __ push(rax);                 // Must update atomically with FIST
3009     __ fild_d(Address(rsp,0));    // So load into FPU register
3010     __ fistp_d(field);            // and put into memory atomically
3011     __ addptr(rsp, 2*wordSize);
3012     // volatile_barrier();


3019 
3020     __ pop(ltos);  // overwrites rdx
3021     if (!is_static) pop_and_check_object(obj);
3022     __ movptr(hi, rdx);
3023     __ movptr(field, rax);
3024     // Don't rewrite to _fast_lputfield for potential volatile case.
3025     __ jmp(notVolatile);
3026   }
3027 #endif // _LP64
3028 
3029   __ bind(notLong);
3030   __ cmpl(flags, ftos);
3031   __ jcc(Assembler::notEqual, notFloat);
3032 
3033   // ftos
3034   {
3035     __ pop(ftos);
3036     if (!is_static) pop_and_check_object(obj);
3037     NOT_LP64( __ fstp_s(field);)
3038     LP64_ONLY( __ movflt(field, xmm0);)
3039     if (!is_static) {
3040       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3041     }
3042     __ jmp(Done);
3043   }
3044 
3045   __ bind(notFloat);
3046 #ifdef ASSERT
3047   __ cmpl(flags, dtos);
3048   __ jcc(Assembler::notEqual, notDouble);
3049 #endif
3050 
3051   // dtos
3052   {
3053     __ pop(dtos);
3054     if (!is_static) pop_and_check_object(obj);
3055     NOT_LP64( __ fstp_d(field);)
3056     LP64_ONLY( __ movdbl(field, xmm0);)
3057     if (!is_static) {
3058       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3059     }
3060   }
3061 
3062 #ifdef ASSERT
3063   __ jmp(Done);
3064 
3065   __ bind(notDouble);
3066   __ stop("Bad state");
3067 #endif
3068 
3069   __ bind(Done);
3070 
3071   // Check for volatile store
3072   __ testl(rdx, rdx);
3073   __ jcc(Assembler::zero, notVolatile);
3074   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3075                                                Assembler::StoreStore));
3076   __ bind(notVolatile);
3077 }
3078 
3079 void TemplateTable::putfield(int byte_no) {
3080   putfield_or_static(byte_no, false);
3081 }
3082 




3083 void TemplateTable::putstatic(int byte_no) {
3084   putfield_or_static(byte_no, true);
3085 }
3086 
3087 void TemplateTable::jvmti_post_fast_field_mod() {
3088 
3089   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3090 
3091   if (JvmtiExport::can_post_field_modification()) {
3092     // Check to see if a field modification watch has been set before
3093     // we take the time to call into the VM.
3094     Label L2;
3095     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3096     __ testl(scratch, scratch);
3097     __ jcc(Assembler::zero, L2);
3098     __ pop_ptr(rbx);                  // copy the object pointer from tos
3099     __ verify_oop(rbx);
3100     __ push_ptr(rbx);                 // put the object pointer back on tos
3101     // Save tos values before call_VM() clobbers them. Since we have
3102     // to do it for every data type, we use the saved values as the




 527   NOT_LP64(__ fld_d(    Address(rcx, rbx, Address::times_ptr, base_offset)));
 528   __ push(dtos);
 529 
 530   __ jmpb(Done);
 531   __ bind(Long);
 532 
 533   // ltos
 534   __ movptr(rax, Address(rcx, rbx, Address::times_ptr, base_offset + 0 * wordSize));
 535   NOT_LP64(__ movptr(rdx, Address(rcx, rbx, Address::times_ptr, base_offset + 1 * wordSize)));
 536   __ push(ltos);
 537 
 538   __ bind(Done);
 539 }
 540 
 541 void TemplateTable::locals_index(Register reg, int offset) {
 542   __ load_unsigned_byte(reg, at_bcp(offset));
 543   __ negptr(reg);
 544 }
 545 
 546 void TemplateTable::iload() {
 547   iload_internal();
 548 }
 549 
 550 void TemplateTable::nofast_iload() {
 551   iload_internal(may_not_rewrite);
 552 }
 553 
 554 void TemplateTable::iload_internal(RewriteControl rc) {
 555   transition(vtos, itos);
 556   if (RewriteFrequentPairs && rc == may_rewrite) {
 557     Label rewrite, done;
 558     const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
 559     LP64_ONLY(assert(rbx != bc, "register damaged"));
 560 
 561     // get next byte
 562     __ load_unsigned_byte(rbx,
 563                           at_bcp(Bytecodes::length_for(Bytecodes::_iload)));
 564     // if _iload, wait to rewrite to iload2.  We only want to rewrite the
 565     // last two iloads in a pair.  Comparing against fast_iload means that
 566     // the next bytecode is neither an iload or a caload, and therefore
 567     // an iload pair.
 568     __ cmpl(rbx, Bytecodes::_iload);
 569     __ jcc(Assembler::equal, done);
 570 
 571     __ cmpl(rbx, Bytecodes::_fast_iload);
 572     __ movl(bc, Bytecodes::_fast_iload2);
 573 
 574     __ jccb(Assembler::equal, rewrite);
 575 
 576     // if _caload, rewrite to fast_icaload


 807 }
 808 
 809 void TemplateTable::fload(int n) {
 810   transition(vtos, ftos);
 811   LP64_ONLY(__ movflt(xmm0, faddress(n)));
 812   NOT_LP64(__ fld_s(faddress(n)));
 813 }
 814 
 815 void TemplateTable::dload(int n) {
 816   transition(vtos, dtos);
 817   LP64_ONLY(__ movdbl(xmm0, daddress(n)));
 818   NOT_LP64(__ fld_d(daddress(n)));
 819 }
 820 
 821 void TemplateTable::aload(int n) {
 822   transition(vtos, atos);
 823   __ movptr(rax, aaddress(n));
 824 }
 825 
 826 void TemplateTable::aload_0() {
 827   aload_0_internal();
 828 }
 829 
 830 void TemplateTable::nofast_aload_0() {
 831   aload_0_internal(may_not_rewrite);
 832 }
 833 
 834 void TemplateTable::aload_0_internal(RewriteControl rc) {
 835   transition(vtos, atos);
 836   // According to bytecode histograms, the pairs:
 837   //
 838   // _aload_0, _fast_igetfield
 839   // _aload_0, _fast_agetfield
 840   // _aload_0, _fast_fgetfield
 841   //
 842   // occur frequently. If RewriteFrequentPairs is set, the (slow)
 843   // _aload_0 bytecode checks if the next bytecode is either
 844   // _fast_igetfield, _fast_agetfield or _fast_fgetfield and then
 845   // rewrites the current bytecode into a pair bytecode; otherwise it
 846   // rewrites the current bytecode into _fast_aload_0 that doesn't do
 847   // the pair check anymore.
 848   //
 849   // Note: If the next bytecode is _getfield, the rewrite must be
 850   //       delayed, otherwise we may miss an opportunity for a pair.
 851   //
 852   // Also rewrite frequent pairs
 853   //   aload_0, aload_1
 854   //   aload_0, iload_1
 855   // These bytecodes with a small amount of code are most profitable
 856   // to rewrite
 857   if (RewriteFrequentPairs && rc == may_rewrite) {
 858     Label rewrite, done;
 859 
 860     const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
 861     LP64_ONLY(assert(rbx != bc, "register damaged"));
 862 
 863     // get next byte
 864     __ load_unsigned_byte(rbx, at_bcp(Bytecodes::length_for(Bytecodes::_aload_0)));
 865 
 866     // do actual aload_0
 867     aload(0);
 868 
 869     // if _getfield then wait with rewrite
 870     __ cmpl(rbx, Bytecodes::_getfield);
 871     __ jcc(Assembler::equal, done);
 872 
 873     // if _igetfield then reqrite to _fast_iaccess_0
 874     assert(Bytecodes::java_code(Bytecodes::_fast_iaccess_0) == Bytecodes::_aload_0, "fix bytecode definition");
 875     __ cmpl(rbx, Bytecodes::_fast_igetfield);
 876     __ movl(bc, Bytecodes::_fast_iaccess_0);
 877     __ jccb(Assembler::equal, rewrite);


2491 // require some barriers before volatile stores and after volatile
2492 // loads.  These nearly cover requirement (1) but miss the
2493 // volatile-store-volatile-load case.  This final case is placed after
2494 // volatile-stores although it could just as well go before
2495 // volatile-loads.
2496 
2497 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint ) {
2498   // Helper function to insert a is-volatile test and memory barrier
2499   if(!os::is_MP()) return;    // Not needed on single CPU
2500   __ membar(order_constraint);
2501 }
2502 
2503 void TemplateTable::resolve_cache_and_index(int byte_no,
2504                                             Register Rcache,
2505                                             Register index,
2506                                             size_t index_size) {
2507   const Register temp = rbx;
2508   assert_different_registers(Rcache, index, temp);
2509 
2510   Label resolved;
2511 
2512   Bytecodes::Code code = bytecode();
2513   switch (code) {
2514   case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2515   case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2516   }
2517 
2518   assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2519   __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
2520   __ cmpl(temp, code);  // have we resolved this bytecode?
2521   __ jcc(Assembler::equal, resolved);
2522 
2523   // resolve first time through
2524   address entry;
2525   switch (code) {
2526     case Bytecodes::_getstatic      : // fall through
2527     case Bytecodes::_putstatic      : // fall through
2528     case Bytecodes::_getfield       : // fall through
2529     case Bytecodes::_putfield       : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put);        break;
2530     case Bytecodes::_invokevirtual  : // fall through
2531     case Bytecodes::_invokespecial  : // fall through
2532     case Bytecodes::_invokestatic   : // fall through
2533     case Bytecodes::_invokeinterface: entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke);         break;
2534     case Bytecodes::_invokehandle   : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokehandle);   break;
2535     case Bytecodes::_invokedynamic  : entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic);  break;
2536     default:
2537       fatal(err_msg("unexpected bytecode: %s", Bytecodes::name(code)));
2538       break;
2539   }
2540   __ movl(temp, code);
2541   __ call_VM(noreg, entry, temp);
2542   // Update registers with resolved info
2543   __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2544   __ bind(resolved);
2545 }
2546 
2547 // The cache and index registers must be set before call
2548 void TemplateTable::load_field_cp_cache_entry(Register obj,
2549                                               Register cache,
2550                                               Register index,
2551                                               Register off,
2552                                               Register flags,
2553                                               bool is_static = false) {
2554   assert_different_registers(cache, index, flags, off);
2555 
2556   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2557   // Field offset
2558   __ movptr(off, Address(cache, index, Address::times_ptr,
2559                          in_bytes(cp_base_offset +
2560                                   ConstantPoolCacheEntry::f2_offset())));


2635     } else {
2636       __ pop(atos);         // Get the object
2637       __ verify_oop(rax);
2638       __ push(atos);        // Restore stack state
2639     }
2640     // rax,:   object pointer or NULL
2641     // cache: cache entry pointer
2642     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2643                rax, cache);
2644     __ get_cache_and_index_at_bcp(cache, index, 1);
2645     __ bind(L1);
2646   }
2647 }
2648 
2649 void TemplateTable::pop_and_check_object(Register r) {
2650   __ pop_ptr(r);
2651   __ null_check(r);  // for field access must check obj.
2652   __ verify_oop(r);
2653 }
2654 
2655 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2656   transition(vtos, vtos);
2657 
2658   const Register cache = rcx;
2659   const Register index = rdx;
2660   const Register obj   = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2661   const Register off   = rbx;
2662   const Register flags = rax;
2663   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2664 
2665   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2666   jvmti_post_field_access(cache, index, is_static, false);
2667   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2668 
2669   if (!is_static) pop_and_check_object(obj);
2670 
2671   const Address field(obj, off, Address::times_1, 0*wordSize);
2672   NOT_LP64(const Address hi(obj, off, Address::times_1, 1*wordSize));
2673 
2674   Label Done, notByte, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
2675 
2676   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2677   // Make sure we don't need to mask edx after the above shift
2678   assert(btos == 0, "change code, btos != 0");
2679 
2680   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2681 
2682   __ jcc(Assembler::notZero, notByte);
2683   // btos
2684   __ load_signed_byte(rax, field);
2685   __ push(btos);
2686   // Rewrite bytecode to be faster
2687   if (!is_static && rc == may_rewrite) {
2688     patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2689   }
2690   __ jmp(Done);
2691 
2692   __ bind(notByte);
2693   __ cmpl(flags, atos);
2694   __ jcc(Assembler::notEqual, notObj);
2695   // atos
2696   __ load_heap_oop(rax, field);
2697   __ push(atos);
2698   if (!is_static && rc == may_rewrite) {
2699     patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2700   }
2701   __ jmp(Done);
2702 
2703   __ bind(notObj);
2704   __ cmpl(flags, itos);
2705   __ jcc(Assembler::notEqual, notInt);
2706   // itos
2707   __ movl(rax, field);
2708   __ push(itos);
2709   // Rewrite bytecode to be faster
2710   if (!is_static && rc == may_rewrite) {
2711     patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2712   }
2713   __ jmp(Done);
2714 
2715   __ bind(notInt);
2716   __ cmpl(flags, ctos);
2717   __ jcc(Assembler::notEqual, notChar);
2718   // ctos
2719   __ load_unsigned_short(rax, field);
2720   __ push(ctos);
2721   // Rewrite bytecode to be faster
2722   if (!is_static && rc == may_rewrite) {
2723     patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2724   }
2725   __ jmp(Done);
2726 
2727   __ bind(notChar);
2728   __ cmpl(flags, stos);
2729   __ jcc(Assembler::notEqual, notShort);
2730   // stos
2731   __ load_signed_short(rax, field);
2732   __ push(stos);
2733   // Rewrite bytecode to be faster
2734   if (!is_static && rc == may_rewrite) {
2735     patch_bytecode(Bytecodes::_fast_sgetfield, bc, rbx);
2736   }
2737   __ jmp(Done);
2738 
2739   __ bind(notShort);
2740   __ cmpl(flags, ltos);
2741   __ jcc(Assembler::notEqual, notLong);
2742   // ltos
2743 
2744 #ifndef _LP64
2745   // Generate code as if volatile.  There just aren't enough registers to
2746   // save that information and this code is faster than the test.
2747   __ fild_d(field);                // Must load atomically
2748   __ subptr(rsp,2*wordSize);    // Make space for store
2749   __ fistp_d(Address(rsp,0));
2750   __ pop(rax);
2751   __ pop(rdx);
2752 #else
2753   __ movq(rax, field);
2754 #endif
2755 
2756   __ push(ltos);
2757   // Rewrite bytecode to be faster
2758   LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
2759   __ jmp(Done);
2760 
2761   __ bind(notLong);
2762   __ cmpl(flags, ftos);
2763   __ jcc(Assembler::notEqual, notFloat);
2764   // ftos
2765 
2766   LP64_ONLY(__ movflt(xmm0, field));
2767   NOT_LP64(__ fld_s(field));
2768   __ push(ftos);
2769   // Rewrite bytecode to be faster
2770   if (!is_static && rc == may_rewrite) {
2771     patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
2772   }
2773   __ jmp(Done);
2774 
2775   __ bind(notFloat);
2776 #ifdef ASSERT
2777   __ cmpl(flags, dtos);
2778   __ jcc(Assembler::notEqual, notDouble);
2779 #endif
2780   // dtos
2781   LP64_ONLY(__ movdbl(xmm0, field));
2782   NOT_LP64(__ fld_d(field));
2783   __ push(dtos);
2784   // Rewrite bytecode to be faster
2785   if (!is_static && rc == may_rewrite) {
2786     patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
2787   }
2788 #ifdef ASSERT
2789   __ jmp(Done);
2790 
2791 
2792   __ bind(notDouble);
2793   __ stop("Bad state");
2794 #endif
2795 
2796   __ bind(Done);
2797   // [jk] not needed currently
2798   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2799   //                                              Assembler::LoadStore));
2800 }
2801 
2802 void TemplateTable::getfield(int byte_no) {
2803   getfield_or_static(byte_no, false);
2804 }
2805 
2806 void TemplateTable::nofast_getfield(int byte_no) {
2807   getfield_or_static(byte_no, false, may_not_rewrite);
2808 }
2809 
2810 void TemplateTable::getstatic(int byte_no) {
2811   getfield_or_static(byte_no, true);
2812 }
2813 
2814 
2815 // The registers cache and index expected to be set before call.
2816 // The function may destroy various registers, just not the cache and index registers.
2817 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2818 
2819   const Register robj = LP64_ONLY(c_rarg2)   NOT_LP64(rax);
2820   const Register RBX  = LP64_ONLY(c_rarg1)   NOT_LP64(rbx);
2821   const Register RCX  = LP64_ONLY(c_rarg3)   NOT_LP64(rcx);
2822   const Register RDX  = LP64_ONLY(rscratch1) NOT_LP64(rdx);
2823 
2824   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2825 
2826   if (JvmtiExport::can_post_field_modification()) {
2827     // Check to see if a field modification watch has been set before
2828     // we take the time to call into the VM.
2829     Label L1;


2881 #endif
2882     }
2883     // cache entry pointer
2884     __ addptr(robj, in_bytes(cp_base_offset));
2885     __ shll(RDX, LogBytesPerWord);
2886     __ addptr(robj, RDX);
2887     // object (tos)
2888     __ mov(RCX, rsp);
2889     // c_rarg1: object pointer set up above (NULL if static)
2890     // c_rarg2: cache entry pointer
2891     // c_rarg3: jvalue object on the stack
2892     __ call_VM(noreg,
2893                CAST_FROM_FN_PTR(address,
2894                                 InterpreterRuntime::post_field_modification),
2895                RBX, robj, RCX);
2896     __ get_cache_and_index_at_bcp(cache, index, 1);
2897     __ bind(L1);
2898   }
2899 }
2900 
2901 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2902   transition(vtos, vtos);
2903 
2904   const Register cache = rcx;
2905   const Register index = rdx;
2906   const Register obj   = rcx;
2907   const Register off   = rbx;
2908   const Register flags = rax;
2909   const Register bc    = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2910 
2911   resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2912   jvmti_post_field_mod(cache, index, is_static);
2913   load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2914 
2915   // [jk] not needed currently
2916   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2917   //                                              Assembler::StoreStore));
2918 
2919   Label notVolatile, Done;
2920   __ movl(rdx, flags);
2921   __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
2922   __ andl(rdx, 0x1);
2923 
2924   // field addresses
2925   const Address field(obj, off, Address::times_1, 0*wordSize);
2926   NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
2927 
2928   Label notByte, notInt, notShort, notChar,
2929         notLong, notFloat, notObj, notDouble;
2930 
2931   __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2932 
2933   assert(btos == 0, "change code, btos != 0");
2934   __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2935   __ jcc(Assembler::notZero, notByte);
2936 
2937   // btos
2938   {
2939     __ pop(btos);
2940     if (!is_static) pop_and_check_object(obj);
2941     __ movb(field, rax);
2942     if (!is_static && rc == may_rewrite) {
2943       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2944     }
2945     __ jmp(Done);
2946   }
2947 
2948   __ bind(notByte);
2949   __ cmpl(flags, atos);
2950   __ jcc(Assembler::notEqual, notObj);
2951 
2952   // atos
2953   {
2954     __ pop(atos);
2955     if (!is_static) pop_and_check_object(obj);
2956     // Store into the field
2957     do_oop_store(_masm, field, rax, _bs->kind(), false);
2958     if (!is_static && rc == may_rewrite) {
2959       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2960     }
2961     __ jmp(Done);
2962   }
2963 
2964   __ bind(notObj);
2965   __ cmpl(flags, itos);
2966   __ jcc(Assembler::notEqual, notInt);
2967 
2968   // itos
2969   {
2970     __ pop(itos);
2971     if (!is_static) pop_and_check_object(obj);
2972     __ movl(field, rax);
2973     if (!is_static && rc == may_rewrite) {
2974       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
2975     }
2976     __ jmp(Done);
2977   }
2978 
2979   __ bind(notInt);
2980   __ cmpl(flags, ctos);
2981   __ jcc(Assembler::notEqual, notChar);
2982 
2983   // ctos
2984   {
2985     __ pop(ctos);
2986     if (!is_static) pop_and_check_object(obj);
2987     __ movw(field, rax);
2988     if (!is_static && rc == may_rewrite) {
2989       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
2990     }
2991     __ jmp(Done);
2992   }
2993 
2994   __ bind(notChar);
2995   __ cmpl(flags, stos);
2996   __ jcc(Assembler::notEqual, notShort);
2997 
2998   // stos
2999   {
3000     __ pop(stos);
3001     if (!is_static) pop_and_check_object(obj);
3002     __ movw(field, rax);
3003     if (!is_static && rc == may_rewrite) {
3004       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
3005     }
3006     __ jmp(Done);
3007   }
3008 
3009   __ bind(notShort);
3010   __ cmpl(flags, ltos);
3011   __ jcc(Assembler::notEqual, notLong);
3012 
3013   // ltos
3014 #ifdef _LP64
3015   {
3016     __ pop(ltos);
3017     if (!is_static) pop_and_check_object(obj);
3018     __ movq(field, rax);
3019     if (!is_static && rc == may_rewrite) {
3020       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
3021     }
3022     __ jmp(Done);
3023   }
3024 #else
3025   {
3026     Label notVolatileLong;
3027     __ testl(rdx, rdx);
3028     __ jcc(Assembler::zero, notVolatileLong);
3029 
3030     __ pop(ltos);  // overwrites rdx, do this after testing volatile.
3031     if (!is_static) pop_and_check_object(obj);
3032 
3033     // Replace with real volatile test
3034     __ push(rdx);
3035     __ push(rax);                 // Must update atomically with FIST
3036     __ fild_d(Address(rsp,0));    // So load into FPU register
3037     __ fistp_d(field);            // and put into memory atomically
3038     __ addptr(rsp, 2*wordSize);
3039     // volatile_barrier();


3046 
3047     __ pop(ltos);  // overwrites rdx
3048     if (!is_static) pop_and_check_object(obj);
3049     __ movptr(hi, rdx);
3050     __ movptr(field, rax);
3051     // Don't rewrite to _fast_lputfield for potential volatile case.
3052     __ jmp(notVolatile);
3053   }
3054 #endif // _LP64
3055 
3056   __ bind(notLong);
3057   __ cmpl(flags, ftos);
3058   __ jcc(Assembler::notEqual, notFloat);
3059 
3060   // ftos
3061   {
3062     __ pop(ftos);
3063     if (!is_static) pop_and_check_object(obj);
3064     NOT_LP64( __ fstp_s(field);)
3065     LP64_ONLY( __ movflt(field, xmm0);)
3066     if (!is_static && rc == may_rewrite) {
3067       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3068     }
3069     __ jmp(Done);
3070   }
3071 
3072   __ bind(notFloat);
3073 #ifdef ASSERT
3074   __ cmpl(flags, dtos);
3075   __ jcc(Assembler::notEqual, notDouble);
3076 #endif
3077 
3078   // dtos
3079   {
3080     __ pop(dtos);
3081     if (!is_static) pop_and_check_object(obj);
3082     NOT_LP64( __ fstp_d(field);)
3083     LP64_ONLY( __ movdbl(field, xmm0);)
3084     if (!is_static && rc == may_rewrite) {
3085       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3086     }
3087   }
3088 
3089 #ifdef ASSERT
3090   __ jmp(Done);
3091 
3092   __ bind(notDouble);
3093   __ stop("Bad state");
3094 #endif
3095 
3096   __ bind(Done);
3097 
3098   // Check for volatile store
3099   __ testl(rdx, rdx);
3100   __ jcc(Assembler::zero, notVolatile);
3101   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3102                                                Assembler::StoreStore));
3103   __ bind(notVolatile);
3104 }
3105 
3106 void TemplateTable::putfield(int byte_no) {
3107   putfield_or_static(byte_no, false);
3108 }
3109 
3110 void TemplateTable::nofast_putfield(int byte_no) {
3111   putfield_or_static(byte_no, false, may_not_rewrite);
3112 }
3113 
3114 void TemplateTable::putstatic(int byte_no) {
3115   putfield_or_static(byte_no, true);
3116 }
3117 
3118 void TemplateTable::jvmti_post_fast_field_mod() {
3119 
3120   const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3121 
3122   if (JvmtiExport::can_post_field_modification()) {
3123     // Check to see if a field modification watch has been set before
3124     // we take the time to call into the VM.
3125     Label L2;
3126     __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3127     __ testl(scratch, scratch);
3128     __ jcc(Assembler::zero, L2);
3129     __ pop_ptr(rbx);                  // copy the object pointer from tos
3130     __ verify_oop(rbx);
3131     __ push_ptr(rbx);                 // put the object pointer back on tos
3132     // Save tos values before call_VM() clobbers them. Since we have
3133     // to do it for every data type, we use the saved values as the