< prev index next >

src/hotspot/cpu/x86/templateTable_x86.cpp

Print this page




2702 // (3) Similar a volatile write cannot let unrelated NON-volatile
2703 //     memory refs that happen BEFORE the write float down to after the
2704 //     write.  It's OK for non-volatile memory refs that happen after the
2705 //     volatile write to float up before it.
2706 //
2707 // We only put in barriers around volatile refs (they are expensive),
2708 // not _between_ memory refs (that would require us to track the
2709 // flavor of the previous memory refs).  Requirements (2) and (3)
2710 // require some barriers before volatile stores and after volatile
2711 // loads.  These nearly cover requirement (1) but miss the
2712 // volatile-store-volatile-load case.  This final case is placed after
2713 // volatile-stores although it could just as well go before
2714 // volatile-loads.
2715 
2716 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint ) {
2717   // Helper function to insert a is-volatile test and memory barrier
2718   __ membar(order_constraint);
2719 }
2720 
2721 void TemplateTable::resolve_cache_and_index(int byte_no,
2722                                             Register Rcache,
2723                                             Register index,
2724                                             size_t index_size) {
2725   const Register temp = rbx;
2726   assert_different_registers(Rcache, index, temp);
2727 

2728   Label resolved;
2729 
2730   Bytecodes::Code code = bytecode();
2731   switch (code) {
2732   case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2733   case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2734   default: break;
2735   }
2736 
2737   assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2738   __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
2739   __ cmpl(temp, code);  // have we resolved this bytecode?
2740   __ jcc(Assembler::equal, resolved);
2741 
2742   // resolve first time through


2743   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2744   __ movl(temp, code);
2745   __ call_VM(noreg, entry, temp);
2746   // Update registers with resolved info
2747   __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);

2748   __ bind(resolved);












2749 }
2750 
2751 // The cache and index registers must be set before call
2752 void TemplateTable::load_field_cp_cache_entry(Register obj,
2753                                               Register cache,
2754                                               Register index,
2755                                               Register off,
2756                                               Register flags,
2757                                               bool is_static = false) {
2758   assert_different_registers(cache, index, flags, off);
2759 
2760   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2761   // Field offset
2762   __ movptr(off, Address(cache, index, Address::times_ptr,
2763                          in_bytes(cp_base_offset +
2764                                   ConstantPoolCacheEntry::f2_offset())));
2765   // Flags
2766   __ movl(flags, Address(cache, index, Address::times_ptr,
2767                          in_bytes(cp_base_offset +
2768                                   ConstantPoolCacheEntry::flags_offset())));




2702 // (3) Similar a volatile write cannot let unrelated NON-volatile
2703 //     memory refs that happen BEFORE the write float down to after the
2704 //     write.  It's OK for non-volatile memory refs that happen after the
2705 //     volatile write to float up before it.
2706 //
2707 // We only put in barriers around volatile refs (they are expensive),
2708 // not _between_ memory refs (that would require us to track the
2709 // flavor of the previous memory refs).  Requirements (2) and (3)
2710 // require some barriers before volatile stores and after volatile
2711 // loads.  These nearly cover requirement (1) but miss the
2712 // volatile-store-volatile-load case.  This final case is placed after
2713 // volatile-stores although it could just as well go before
2714 // volatile-loads.
2715 
2716 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits order_constraint ) {
2717   // Helper function to insert a is-volatile test and memory barrier
2718   __ membar(order_constraint);
2719 }
2720 
2721 void TemplateTable::resolve_cache_and_index(int byte_no,
2722                                             Register cache,
2723                                             Register index,
2724                                             size_t index_size) {
2725   const Register temp = rbx;
2726   assert_different_registers(cache, index, temp);
2727 
2728   Label L_clinit_barrier_slow;
2729   Label resolved;
2730 
2731   Bytecodes::Code code = bytecode();
2732   switch (code) {
2733   case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2734   case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2735   default: break;
2736   }
2737 
2738   assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2739   __ get_cache_and_index_and_bytecode_at_bcp(cache, index, temp, byte_no, 1, index_size);
2740   __ cmpl(temp, code);  // have we resolved this bytecode?
2741   __ jcc(Assembler::equal, resolved);
2742 
2743   // resolve first time through
2744   // Class initialization barrier slow path lands here as well.
2745   __ bind(L_clinit_barrier_slow);
2746   address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2747   __ movl(temp, code);
2748   __ call_VM(noreg, entry, temp);
2749   // Update registers with resolved info
2750   __ get_cache_and_index_at_bcp(cache, index, 1, index_size);
2751 
2752   __ bind(resolved);
2753 
2754   // Class initialization barrier for static methods
2755   if (UseFastClassInitChecks && bytecode() == Bytecodes::_invokestatic) {
2756     const Register method = temp;
2757     const Register klass  = temp;
2758     const Register thread = LP64_ONLY(r15_thread) NOT_LP64(noreg);
2759     assert(thread != noreg, "x86_32 not supported");
2760 
2761     __ load_resolved_method_at_index(byte_no, cache, index, method);
2762     __ load_method_holder(klass, method);
2763     __ clinit_barrier(klass, thread, NULL /*L_fast_path*/, &L_clinit_barrier_slow);
2764   }
2765 }
2766 
2767 // The cache and index registers must be set before call
2768 void TemplateTable::load_field_cp_cache_entry(Register obj,
2769                                               Register cache,
2770                                               Register index,
2771                                               Register off,
2772                                               Register flags,
2773                                               bool is_static = false) {
2774   assert_different_registers(cache, index, flags, off);
2775 
2776   ByteSize cp_base_offset = ConstantPoolCache::base_offset();
2777   // Field offset
2778   __ movptr(off, Address(cache, index, Address::times_ptr,
2779                          in_bytes(cp_base_offset +
2780                                   ConstantPoolCacheEntry::f2_offset())));
2781   // Flags
2782   __ movl(flags, Address(cache, index, Address::times_ptr,
2783                          in_bytes(cp_base_offset +
2784                                   ConstantPoolCacheEntry::flags_offset())));


< prev index next >