< prev index next >

src/cpu/x86/vm/templateTable_x86.cpp

Print this page




2605 
2606 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint ) {
2607   // Helper function to insert a is-volatile test and memory barrier
2608   if(!os::is_MP()) return;    // Not needed on single CPU
2609   __ membar(order_constraint);
2610 }
2611 
2612 void TemplateTable::resolve_cache_and_index(int byte_no,
2613                                             Register Rcache,
2614                                             Register index,
2615                                             size_t index_size) {
2616   const Register temp = rbx;
2617   assert_different_registers(Rcache, index, temp);
2618 
2619   Label resolved;
2620 
2621   Bytecodes::Code code = bytecode();
2622   switch (code) {
2623   case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2624   case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;

2625   }
2626 
2627   assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2628   __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
2629   __ cmpl(temp, code);  // have we resolved this bytecode?
2630   __ jcc(Assembler::equal, resolved);
2631 
2632   // resolve first time through
2633   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2634   __ movl(temp, code);
2635   __ call_VM(noreg, entry, temp);
2636   // Update registers with resolved info
2637   __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2638   __ bind(resolved);
2639 }
2640 
2641 // The cache and index registers must be set before call
2642 void TemplateTable::load_field_cp_cache_entry(Register obj,
2643                                               Register cache,
2644                                               Register index,


3270     // access constant pool cache entry
3271     LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rax, 1));
3272     NOT_LP64(__ get_cache_entry_pointer_at_bcp(rax, rdx, 1));
3273     __ verify_oop(rbx);
3274     // rbx: object pointer copied above
3275     // c_rarg2: cache entry pointer
3276     // c_rarg3: jvalue object on the stack
3277     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3278     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3279 
3280     switch (bytecode()) {             // restore tos values
3281     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3282     case Bytecodes::_fast_bputfield: // fall through
3283     case Bytecodes::_fast_zputfield: // fall through
3284     case Bytecodes::_fast_sputfield: // fall through
3285     case Bytecodes::_fast_cputfield: // fall through
3286     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3287     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3288     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3289     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;

3290     }
3291     __ bind(L2);
3292   }
3293 }
3294 
3295 void TemplateTable::fast_storefield(TosState state) {
3296   transition(state, vtos);
3297 
3298   ByteSize base = ConstantPoolCache::base_offset();
3299 
3300   jvmti_post_fast_field_mod();
3301 
3302   // access constant pool cache
3303   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3304 
3305   // test for volatile with rdx but rdx is tos register for lputfield.
3306   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3307                        in_bytes(base +
3308                                 ConstantPoolCacheEntry::flags_offset())));
3309 




2605 
2606 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint ) {
2607   // Helper function to insert a is-volatile test and memory barrier
2608   if(!os::is_MP()) return;    // Not needed on single CPU
2609   __ membar(order_constraint);
2610 }
2611 
2612 void TemplateTable::resolve_cache_and_index(int byte_no,
2613                                             Register Rcache,
2614                                             Register index,
2615                                             size_t index_size) {
2616   const Register temp = rbx;
2617   assert_different_registers(Rcache, index, temp);
2618 
2619   Label resolved;
2620 
2621   Bytecodes::Code code = bytecode();
2622   switch (code) {
2623   case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2624   case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2625   default: break;
2626   }
2627 
2628   assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2629   __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
2630   __ cmpl(temp, code);  // have we resolved this bytecode?
2631   __ jcc(Assembler::equal, resolved);
2632 
2633   // resolve first time through
2634   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2635   __ movl(temp, code);
2636   __ call_VM(noreg, entry, temp);
2637   // Update registers with resolved info
2638   __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2639   __ bind(resolved);
2640 }
2641 
2642 // The cache and index registers must be set before call
2643 void TemplateTable::load_field_cp_cache_entry(Register obj,
2644                                               Register cache,
2645                                               Register index,


3271     // access constant pool cache entry
3272     LP64_ONLY(__ get_cache_entry_pointer_at_bcp(c_rarg2, rax, 1));
3273     NOT_LP64(__ get_cache_entry_pointer_at_bcp(rax, rdx, 1));
3274     __ verify_oop(rbx);
3275     // rbx: object pointer copied above
3276     // c_rarg2: cache entry pointer
3277     // c_rarg3: jvalue object on the stack
3278     LP64_ONLY(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3));
3279     NOT_LP64(__ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, rax, rcx));
3280 
3281     switch (bytecode()) {             // restore tos values
3282     case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3283     case Bytecodes::_fast_bputfield: // fall through
3284     case Bytecodes::_fast_zputfield: // fall through
3285     case Bytecodes::_fast_sputfield: // fall through
3286     case Bytecodes::_fast_cputfield: // fall through
3287     case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3288     case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3289     case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3290     case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3291     default: break;
3292     }
3293     __ bind(L2);
3294   }
3295 }
3296 
3297 void TemplateTable::fast_storefield(TosState state) {
3298   transition(state, vtos);
3299 
3300   ByteSize base = ConstantPoolCache::base_offset();
3301 
3302   jvmti_post_fast_field_mod();
3303 
3304   // access constant pool cache
3305   __ get_cache_and_index_at_bcp(rcx, rbx, 1);
3306 
3307   // test for volatile with rdx but rdx is tos register for lputfield.
3308   __ movl(rdx, Address(rcx, rbx, Address::times_ptr,
3309                        in_bytes(base +
3310                                 ConstantPoolCacheEntry::flags_offset())));
3311 


< prev index next >