2858 }
2859
2860 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2861 transition(vtos, vtos);
2862
2863 const Register cache = rcx;
2864 const Register index = rdx;
2865 const Register obj = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2866 const Register off = rbx;
2867 const Register flags = rax;
2868 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2869
2870 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2871 jvmti_post_field_access(cache, index, is_static, false);
2872 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2873
2874 if (!is_static) pop_and_check_object(obj);
2875
2876 const Address field(obj, off, Address::times_1, 0*wordSize);
2877
2878 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notDouble;
2879
2880 __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2881 // Make sure we don't need to mask edx after the above shift
2882 assert(btos == 0, "change code, btos != 0");
2883
2884 __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2885
2886 __ jcc(Assembler::notZero, notByte);
2887 // btos
2888 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
2889 __ push(btos);
2890 // Rewrite bytecode to be faster
2891 if (!is_static && rc == may_rewrite) {
2892 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2893 }
2894 __ jmp(Done);
2895
2896 __ bind(notByte);
2897 __ cmpl(flags, ztos);
2898 __ jcc(Assembler::notEqual, notBool);
2964 __ push(ltos);
2965 // Rewrite bytecode to be faster
2966 LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
2967 __ jmp(Done);
2968
2969 __ bind(notLong);
2970 __ cmpl(flags, ftos);
2971 __ jcc(Assembler::notEqual, notFloat);
2972 // ftos
2973
2974 __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
2975 __ push(ftos);
2976 // Rewrite bytecode to be faster
2977 if (!is_static && rc == may_rewrite) {
2978 patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
2979 }
2980 __ jmp(Done);
2981
2982 __ bind(notFloat);
2983 #ifdef ASSERT
2984 __ cmpl(flags, dtos);
2985 __ jcc(Assembler::notEqual, notDouble);
2986 #endif
2987 // dtos
2988 __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */, field, noreg, noreg);
2989 __ push(dtos);
2990 // Rewrite bytecode to be faster
2991 if (!is_static && rc == may_rewrite) {
2992 patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
2993 }
2994 #ifdef ASSERT
2995 __ jmp(Done);
2996
2997
2998 __ bind(notDouble);
2999 __ stop("Bad state");
3000 #endif
3001
3002 __ bind(Done);
3003 // [jk] not needed currently
3004 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3005 // Assembler::LoadStore));
3006 }
3007
3008 void TemplateTable::getfield(int byte_no) {
3009 getfield_or_static(byte_no, false);
3010 }
3011
3012 void TemplateTable::nofast_getfield(int byte_no) {
3013 getfield_or_static(byte_no, false, may_not_rewrite);
3014 }
3015
3016 void TemplateTable::getstatic(int byte_no) {
3017 getfield_or_static(byte_no, true);
3095 // c_rarg1: object pointer set up above (NULL if static)
3096 // c_rarg2: cache entry pointer
3097 // c_rarg3: jvalue object on the stack
3098 __ call_VM(noreg,
3099 CAST_FROM_FN_PTR(address,
3100 InterpreterRuntime::post_field_modification),
3101 RBX, robj, RCX);
3102 __ get_cache_and_index_at_bcp(cache, index, 1);
3103 __ bind(L1);
3104 }
3105 }
3106
3107 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3108 transition(vtos, vtos);
3109
3110 const Register cache = rcx;
3111 const Register index = rdx;
3112 const Register obj = rcx;
3113 const Register off = rbx;
3114 const Register flags = rax;
3115 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3116
3117 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
3118 jvmti_post_field_mod(cache, index, is_static);
3119 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
3120
3121 // [jk] not needed currently
3122 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3123 // Assembler::StoreStore));
3124
3125 Label notVolatile, Done;
3126 __ movl(rdx, flags);
3127 __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3128 __ andl(rdx, 0x1);
3129
3130 // field addresses
3131 const Address field(obj, off, Address::times_1, 0*wordSize);
3132 NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3133
3134 Label notByte, notBool, notInt, notShort, notChar,
3135 notLong, notFloat, notObj, notDouble;
3136
3137 __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3138
3139 assert(btos == 0, "change code, btos != 0");
3140 __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3141 __ jcc(Assembler::notZero, notByte);
3142
3143 // btos
3144 {
3145 __ pop(btos);
3146 if (!is_static) pop_and_check_object(obj);
3147 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3148 if (!is_static && rc == may_rewrite) {
3149 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3150 }
3151 __ jmp(Done);
3152 }
3153
3154 __ bind(notByte);
3155 __ cmpl(flags, ztos);
3215 __ bind(notChar);
3216 __ cmpl(flags, stos);
3217 __ jcc(Assembler::notEqual, notShort);
3218
3219 // stos
3220 {
3221 __ pop(stos);
3222 if (!is_static) pop_and_check_object(obj);
3223 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
3224 if (!is_static && rc == may_rewrite) {
3225 patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
3226 }
3227 __ jmp(Done);
3228 }
3229
3230 __ bind(notShort);
3231 __ cmpl(flags, ltos);
3232 __ jcc(Assembler::notEqual, notLong);
3233
3234 // ltos
3235 #ifdef _LP64
3236 {
3237 __ pop(ltos);
3238 if (!is_static) pop_and_check_object(obj);
3239 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos*/, noreg, noreg);
3240 if (!is_static && rc == may_rewrite) {
3241 patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
3242 }
3243 __ jmp(Done);
3244 }
3245 #else
3246 {
3247 Label notVolatileLong;
3248 __ testl(rdx, rdx);
3249 __ jcc(Assembler::zero, notVolatileLong);
3250
3251 __ pop(ltos); // overwrites rdx, do this after testing volatile.
3252 if (!is_static) pop_and_check_object(obj);
3253
3254 // Replace with real volatile test
3255 __ access_store_at(T_LONG, IN_HEAP | MO_RELAXED, field, noreg /* ltos */, noreg, noreg);
3256 // volatile_barrier();
3257 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3258 Assembler::StoreStore));
3259 // Don't rewrite volatile version
3260 __ jmp(notVolatile);
3261
3262 __ bind(notVolatileLong);
3263
3264 __ pop(ltos); // overwrites rdx
3265 if (!is_static) pop_and_check_object(obj);
3266 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
3267 // Don't rewrite to _fast_lputfield for potential volatile case.
3268 __ jmp(notVolatile);
3269 }
3270 #endif // _LP64
3271
3272 __ bind(notLong);
3273 __ cmpl(flags, ftos);
3274 __ jcc(Assembler::notEqual, notFloat);
3275
3276 // ftos
3277 {
3278 __ pop(ftos);
3279 if (!is_static) pop_and_check_object(obj);
3280 __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
3281 if (!is_static && rc == may_rewrite) {
3282 patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3283 }
3284 __ jmp(Done);
3285 }
3286
3287 __ bind(notFloat);
3288 #ifdef ASSERT
3289 __ cmpl(flags, dtos);
3290 __ jcc(Assembler::notEqual, notDouble);
3291 #endif
3292
3293 // dtos
3294 {
3295 __ pop(dtos);
3296 if (!is_static) pop_and_check_object(obj);
3297 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg);
3298 if (!is_static && rc == may_rewrite) {
3299 patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3300 }
3301 }
3302
3303 #ifdef ASSERT
3304 __ jmp(Done);
3305
3306 __ bind(notDouble);
3307 __ stop("Bad state");
3308 #endif
3309
3310 __ bind(Done);
3311
3312 // Check for volatile store
3313 __ testl(rdx, rdx);
3314 __ jcc(Assembler::zero, notVolatile);
3315 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3316 Assembler::StoreStore));
3317 __ bind(notVolatile);
3318 }
3319
3320 void TemplateTable::putfield(int byte_no) {
3321 putfield_or_static(byte_no, false);
3322 }
3323
3324 void TemplateTable::nofast_putfield(int byte_no) {
3325 putfield_or_static(byte_no, false, may_not_rewrite);
3326 }
3327
3328 void TemplateTable::putstatic(int byte_no) {
3329 putfield_or_static(byte_no, true);
3330 }
3331
3332 void TemplateTable::jvmti_post_fast_field_mod() {
3333
3334 const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3335
3336 if (JvmtiExport::can_post_field_modification()) {
3337 // Check to see if a field modification watch has been set before
3393 ByteSize base = ConstantPoolCache::base_offset();
3394
3395 jvmti_post_fast_field_mod();
3396
3397 // access constant pool cache
3398 __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3399
3400 // test for volatile with rdx but rdx is tos register for lputfield.
3401 __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3402 in_bytes(base +
3403 ConstantPoolCacheEntry::flags_offset())));
3404
3405 // replace index with field offset from cache entry
3406 __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3407 in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3408
3409 // [jk] not needed currently
3410 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3411 // Assembler::StoreStore));
3412
3413 Label notVolatile;
3414 __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3415 __ andl(rdx, 0x1);
3416
3417 // Get object from stack
3418 pop_and_check_object(rcx);
3419
3420 // field address
3421 const Address field(rcx, rbx, Address::times_1);
3422
3423 // access field
3424 switch (bytecode()) {
3425 case Bytecodes::_fast_aputfield:
3426 do_oop_store(_masm, field, rax);
3427 break;
3428 case Bytecodes::_fast_lputfield:
3429 #ifdef _LP64
3430 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
3431 #else
3432 __ stop("should not be rewritten");
3433 #endif
3434 break;
3435 case Bytecodes::_fast_iputfield:
3436 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3437 break;
3438 case Bytecodes::_fast_zputfield:
3439 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3440 break;
3441 case Bytecodes::_fast_bputfield:
3442 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3443 break;
3444 case Bytecodes::_fast_sputfield:
3445 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
3446 break;
3447 case Bytecodes::_fast_cputfield:
3448 __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg);
3449 break;
3450 case Bytecodes::_fast_fputfield:
3451 __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos*/, noreg, noreg);
3452 break;
3453 case Bytecodes::_fast_dputfield:
3454 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos*/, noreg, noreg);
3455 break;
3456 default:
3457 ShouldNotReachHere();
3458 }
3459
3460 // Check for volatile store
3461 __ testl(rdx, rdx);
3462 __ jcc(Assembler::zero, notVolatile);
3463 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3464 Assembler::StoreStore));
3465 __ bind(notVolatile);
3466 }
3467
3468 void TemplateTable::fast_accessfield(TosState state) {
3469 transition(atos, state);
3470
3471 // Do the JVMTI work here to avoid disturbing the register state below
3472 if (JvmtiExport::can_post_field_access()) {
3473 // Check to see if a field access watch has been set before we
3474 // take the time to call into the VM.
3475 Label L1;
3476 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3477 __ testl(rcx, rcx);
3478 __ jcc(Assembler::zero, L1);
3479 // access constant pool cache entry
3480 LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rcx, 1));
3481 NOT_LP64(__ get_cache_entry_pointer_at_bcp(rcx, rdx, 1));
3482 __ verify_oop(rax);
3483 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3484 LP64_ONLY(__ mov(c_rarg1, rax));
3485 // c_rarg1: object pointer copied above
|
2858 }
2859
2860 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2861 transition(vtos, vtos);
2862
2863 const Register cache = rcx;
2864 const Register index = rdx;
2865 const Register obj = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
2866 const Register off = rbx;
2867 const Register flags = rax;
2868 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx); // uses same reg as obj, so don't mix them
2869
2870 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
2871 jvmti_post_field_access(cache, index, is_static, false);
2872 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
2873
2874 if (!is_static) pop_and_check_object(obj);
2875
2876 const Address field(obj, off, Address::times_1, 0*wordSize);
2877
2878 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
2879
2880 __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
2881 // Make sure we don't need to mask edx after the above shift
2882 assert(btos == 0, "change code, btos != 0");
2883
2884 __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
2885
2886 __ jcc(Assembler::notZero, notByte);
2887 // btos
2888 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
2889 __ push(btos);
2890 // Rewrite bytecode to be faster
2891 if (!is_static && rc == may_rewrite) {
2892 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2893 }
2894 __ jmp(Done);
2895
2896 __ bind(notByte);
2897 __ cmpl(flags, ztos);
2898 __ jcc(Assembler::notEqual, notBool);
2964 __ push(ltos);
2965 // Rewrite bytecode to be faster
2966 LP64_ONLY(if (!is_static && rc == may_rewrite) patch_bytecode(Bytecodes::_fast_lgetfield, bc, rbx));
2967 __ jmp(Done);
2968
2969 __ bind(notLong);
2970 __ cmpl(flags, ftos);
2971 __ jcc(Assembler::notEqual, notFloat);
2972 // ftos
2973
2974 __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
2975 __ push(ftos);
2976 // Rewrite bytecode to be faster
2977 if (!is_static && rc == may_rewrite) {
2978 patch_bytecode(Bytecodes::_fast_fgetfield, bc, rbx);
2979 }
2980 __ jmp(Done);
2981
2982 __ bind(notFloat);
2983 #ifdef ASSERT
2984 Label notDouble;
2985 __ cmpl(flags, dtos);
2986 __ jcc(Assembler::notEqual, notDouble);
2987 #endif
2988 // dtos
2989 __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* dtos */, field, noreg, noreg);
2990 __ push(dtos);
2991 // Rewrite bytecode to be faster
2992 if (!is_static && rc == may_rewrite) {
2993 patch_bytecode(Bytecodes::_fast_dgetfield, bc, rbx);
2994 }
2995 #ifdef ASSERT
2996 __ jmp(Done);
2997
2998 __ bind(notDouble);
2999 __ stop("Bad state");
3000 #endif
3001
3002 __ bind(Done);
3003 // [jk] not needed currently
3004 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
3005 // Assembler::LoadStore));
3006 }
3007
3008 void TemplateTable::getfield(int byte_no) {
3009 getfield_or_static(byte_no, false);
3010 }
3011
3012 void TemplateTable::nofast_getfield(int byte_no) {
3013 getfield_or_static(byte_no, false, may_not_rewrite);
3014 }
3015
3016 void TemplateTable::getstatic(int byte_no) {
3017 getfield_or_static(byte_no, true);
3095 // c_rarg1: object pointer set up above (NULL if static)
3096 // c_rarg2: cache entry pointer
3097 // c_rarg3: jvalue object on the stack
3098 __ call_VM(noreg,
3099 CAST_FROM_FN_PTR(address,
3100 InterpreterRuntime::post_field_modification),
3101 RBX, robj, RCX);
3102 __ get_cache_and_index_at_bcp(cache, index, 1);
3103 __ bind(L1);
3104 }
3105 }
3106
3107 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
3108 transition(vtos, vtos);
3109
3110 const Register cache = rcx;
3111 const Register index = rdx;
3112 const Register obj = rcx;
3113 const Register off = rbx;
3114 const Register flags = rax;
3115
3116 resolve_cache_and_index(byte_no, cache, index, sizeof(u2));
3117 jvmti_post_field_mod(cache, index, is_static);
3118 load_field_cp_cache_entry(obj, cache, index, off, flags, is_static);
3119
3120 // [jk] not needed currently
3121 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3122 // Assembler::StoreStore));
3123
3124 Label notVolatile, Done;
3125 __ movl(rdx, flags);
3126 __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3127 __ andl(rdx, 0x1);
3128
3129 // Check for volatile store
3130 __ testl(rdx, rdx);
3131 __ jcc(Assembler::zero, notVolatile);
3132
3133 putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags);
3134 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3135 Assembler::StoreStore));
3136 __ jmp(Done);
3137 __ bind(notVolatile);
3138
3139 putfield_or_static_helper(byte_no, is_static, rc, obj, off, flags);
3140
3141 __ bind(Done);
3142 }
3143
3144 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
3145 Register obj, Register off, Register flags) {
3146
3147 // field addresses
3148 const Address field(obj, off, Address::times_1, 0*wordSize);
3149 NOT_LP64( const Address hi(obj, off, Address::times_1, 1*wordSize);)
3150
3151 Label notByte, notBool, notInt, notShort, notChar,
3152 notLong, notFloat, notObj;
3153 Label Done;
3154
3155 const Register bc = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3156
3157 __ shrl(flags, ConstantPoolCacheEntry::tos_state_shift);
3158
3159 assert(btos == 0, "change code, btos != 0");
3160 __ andl(flags, ConstantPoolCacheEntry::tos_state_mask);
3161 __ jcc(Assembler::notZero, notByte);
3162
3163 // btos
3164 {
3165 __ pop(btos);
3166 if (!is_static) pop_and_check_object(obj);
3167 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3168 if (!is_static && rc == may_rewrite) {
3169 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
3170 }
3171 __ jmp(Done);
3172 }
3173
3174 __ bind(notByte);
3175 __ cmpl(flags, ztos);
3235 __ bind(notChar);
3236 __ cmpl(flags, stos);
3237 __ jcc(Assembler::notEqual, notShort);
3238
3239 // stos
3240 {
3241 __ pop(stos);
3242 if (!is_static) pop_and_check_object(obj);
3243 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
3244 if (!is_static && rc == may_rewrite) {
3245 patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
3246 }
3247 __ jmp(Done);
3248 }
3249
3250 __ bind(notShort);
3251 __ cmpl(flags, ltos);
3252 __ jcc(Assembler::notEqual, notLong);
3253
3254 // ltos
3255 {
3256 __ pop(ltos);
3257 if (!is_static) pop_and_check_object(obj);
3258 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos*/, noreg, noreg);
3259 #ifdef _LP64
3260 if (!is_static && rc == may_rewrite) {
3261 patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
3262 }
3263 #endif // _LP64
3264 __ jmp(Done);
3265 }
3266
3267 __ bind(notLong);
3268 __ cmpl(flags, ftos);
3269 __ jcc(Assembler::notEqual, notFloat);
3270
3271 // ftos
3272 {
3273 __ pop(ftos);
3274 if (!is_static) pop_and_check_object(obj);
3275 __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
3276 if (!is_static && rc == may_rewrite) {
3277 patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
3278 }
3279 __ jmp(Done);
3280 }
3281
3282 __ bind(notFloat);
3283 #ifdef ASSERT
3284 Label notDouble;
3285 __ cmpl(flags, dtos);
3286 __ jcc(Assembler::notEqual, notDouble);
3287 #endif
3288
3289 // dtos
3290 {
3291 __ pop(dtos);
3292 if (!is_static) pop_and_check_object(obj);
3293 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg);
3294 if (!is_static && rc == may_rewrite) {
3295 patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
3296 }
3297 }
3298
3299 #ifdef ASSERT
3300 __ jmp(Done);
3301
3302 __ bind(notDouble);
3303 __ stop("Bad state");
3304 #endif
3305
3306 __ bind(Done);
3307 }
3308
3309 void TemplateTable::putfield(int byte_no) {
3310 putfield_or_static(byte_no, false);
3311 }
3312
3313 void TemplateTable::nofast_putfield(int byte_no) {
3314 putfield_or_static(byte_no, false, may_not_rewrite);
3315 }
3316
3317 void TemplateTable::putstatic(int byte_no) {
3318 putfield_or_static(byte_no, true);
3319 }
3320
3321 void TemplateTable::jvmti_post_fast_field_mod() {
3322
3323 const Register scratch = LP64_ONLY(c_rarg3) NOT_LP64(rcx);
3324
3325 if (JvmtiExport::can_post_field_modification()) {
3326 // Check to see if a field modification watch has been set before
3382 ByteSize base = ConstantPoolCache::base_offset();
3383
3384 jvmti_post_fast_field_mod();
3385
3386 // access constant pool cache
3387 __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3388
3389 // test for volatile with rdx but rdx is tos register for lputfield.
3390 __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3391 in_bytes(base +
3392 ConstantPoolCacheEntry::flags_offset())));
3393
3394 // replace index with field offset from cache entry
3395 __ movptr(rbx, Address(rcx, rbx, Address::times_ptr,
3396 in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
3397
3398 // [jk] not needed currently
3399 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
3400 // Assembler::StoreStore));
3401
3402 Label notVolatile, Done;
3403 __ shrl(rdx, ConstantPoolCacheEntry::is_volatile_shift);
3404 __ andl(rdx, 0x1);
3405
3406 // Get object from stack
3407 pop_and_check_object(rcx);
3408
3409 // field address
3410 const Address field(rcx, rbx, Address::times_1);
3411
3412 // Check for volatile store
3413 __ testl(rdx, rdx);
3414 __ jcc(Assembler::zero, notVolatile);
3415
3416 fast_storefield_helper(field, rax);
3417 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3418 Assembler::StoreStore));
3419 __ jmp(Done);
3420 __ bind(notVolatile);
3421
3422 fast_storefield_helper(field, rax);
3423
3424 __ bind(Done);
3425 }
3426
3427 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3428
3429 // access field
3430 switch (bytecode()) {
3431 case Bytecodes::_fast_aputfield:
3432 do_oop_store(_masm, field, rax);
3433 break;
3434 case Bytecodes::_fast_lputfield:
3435 #ifdef _LP64
3436 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg);
3437 #else
3438 __ stop("should not be rewritten");
3439 #endif
3440 break;
3441 case Bytecodes::_fast_iputfield:
3442 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg);
3443 break;
3444 case Bytecodes::_fast_zputfield:
3445 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg);
3446 break;
3447 case Bytecodes::_fast_bputfield:
3448 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg);
3449 break;
3450 case Bytecodes::_fast_sputfield:
3451 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg);
3452 break;
3453 case Bytecodes::_fast_cputfield:
3454 __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg);
3455 break;
3456 case Bytecodes::_fast_fputfield:
3457 __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos*/, noreg, noreg);
3458 break;
3459 case Bytecodes::_fast_dputfield:
3460 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos*/, noreg, noreg);
3461 break;
3462 default:
3463 ShouldNotReachHere();
3464 }
3465 }
3466
3467 void TemplateTable::fast_accessfield(TosState state) {
3468 transition(atos, state);
3469
3470 // Do the JVMTI work here to avoid disturbing the register state below
3471 if (JvmtiExport::can_post_field_access()) {
3472 // Check to see if a field access watch has been set before we
3473 // take the time to call into the VM.
3474 Label L1;
3475 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3476 __ testl(rcx, rcx);
3477 __ jcc(Assembler::zero, L1);
3478 // access constant pool cache entry
3479 LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rcx, 1));
3480 NOT_LP64(__ get_cache_entry_pointer_at_bcp(rcx, rdx, 1));
3481 __ verify_oop(rax);
3482 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3483 LP64_ONLY(__ mov(c_rarg1, rax));
3484 // c_rarg1: object pointer copied above
|