Print this page
rev 1022 : 6829192: JSR 292 needs to support 64-bit x86
Summary: changes for method handles and invokedynamic
Reviewed-by: ?, ?

Split Close
Expand all
Collapse all
          --- old/src/cpu/x86/vm/templateTable_x86_64.cpp
          +++ new/src/cpu/x86/vm/templateTable_x86_64.cpp
↓ open down ↓ 195 lines elided ↑ open up ↑
 196  196    }
 197  197    Label patch_done;
 198  198    if (JvmtiExport::can_post_breakpoint()) {
 199  199      Label fast_patch;
 200  200      // if a breakpoint is present we can't rewrite the stream directly
 201  201      __ movzbl(scratch, at_bcp(0));
 202  202      __ cmpl(scratch, Bytecodes::_breakpoint);
 203  203      __ jcc(Assembler::notEqual, fast_patch);
 204  204      __ get_method(scratch);
 205  205      // Let breakpoint table handling rewrite to quicker bytecode
 206      -    __ call_VM(noreg,
 207      -               CAST_FROM_FN_PTR(address,
 208      -                                InterpreterRuntime::set_original_bytecode_at),
 209      -               scratch, r13, bc);
      206 +    __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), scratch, r13, bc);
 210  207  #ifndef ASSERT
 211  208      __ jmpb(patch_done);
 212      -    __ bind(fast_patch);
 213      -  }
 214  209  #else
 215  210      __ jmp(patch_done);
      211 +#endif
 216  212      __ bind(fast_patch);
 217  213    }
      214 +#ifdef ASSERT
 218  215    Label okay;
 219  216    __ load_unsigned_byte(scratch, at_bcp(0));
 220  217    __ cmpl(scratch, (int) Bytecodes::java_code(bytecode));
 221  218    __ jcc(Assembler::equal, okay);
 222  219    __ cmpl(scratch, bc);
 223  220    __ jcc(Assembler::equal, okay);
 224  221    __ stop("patching the wrong bytecode");
 225  222    __ bind(okay);
 226  223  #endif
 227  224    // patch bytecode
↓ open down ↓ 1819 lines elided ↑ open up ↑
2047 2044  // volatile-stores although it could just as well go before
2048 2045  // volatile-loads.
2049 2046  void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits
2050 2047                                       order_constraint) {
2051 2048    // Helper function to insert a is-volatile test and memory barrier
2052 2049    if (os::is_MP()) { // Not needed on single CPU
2053 2050      __ membar(order_constraint);
2054 2051    }
2055 2052  }
2056 2053  
2057      -void TemplateTable::resolve_cache_and_index(int byte_no,
2058      -                                            Register Rcache,
2059      -                                            Register index) {
     2054 +void TemplateTable::resolve_cache_and_index(int byte_no, Register Rcache, Register index) {
2060 2055    assert(byte_no == 1 || byte_no == 2, "byte_no out of range");
     2056 +  bool is_invokedynamic = (bytecode() == Bytecodes::_invokedynamic);
2061 2057  
2062 2058    const Register temp = rbx;
2063 2059    assert_different_registers(Rcache, index, temp);
2064 2060  
2065 2061    const int shift_count = (1 + byte_no) * BitsPerByte;
2066 2062    Label resolved;
2067      -  __ get_cache_and_index_at_bcp(Rcache, index, 1);
2068      -  __ movl(temp, Address(Rcache,
2069      -                        index, Address::times_8,
2070      -                        constantPoolCacheOopDesc::base_offset() +
2071      -                        ConstantPoolCacheEntry::indices_offset()));
2072      -  __ shrl(temp, shift_count);
2073      -  // have we resolved this bytecode?
2074      -  __ andl(temp, 0xFF);
2075      -  __ cmpl(temp, (int) bytecode());
2076      -  __ jcc(Assembler::equal, resolved);
     2063 +  __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic);
     2064 +  if (is_invokedynamic) {
     2065 +    // we are resolved if the f1 field contains a non-null CallSite object
     2066 +    __ cmpptr(Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()), (int32_t) NULL_WORD);
     2067 +    __ jcc(Assembler::notEqual, resolved);
     2068 +  } else {
     2069 +    __ movl(temp, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset()));
     2070 +    __ shrl(temp, shift_count);
     2071 +    // have we resolved this bytecode?
     2072 +    __ andl(temp, 0xFF);
     2073 +    __ cmpl(temp, (int) bytecode());
     2074 +    __ jcc(Assembler::equal, resolved);
     2075 +  }
2077 2076  
2078 2077    // resolve first time through
2079 2078    address entry;
2080 2079    switch (bytecode()) {
2081 2080    case Bytecodes::_getstatic:
2082 2081    case Bytecodes::_putstatic:
2083 2082    case Bytecodes::_getfield:
2084 2083    case Bytecodes::_putfield:
2085 2084      entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put);
2086 2085      break;
2087 2086    case Bytecodes::_invokevirtual:
2088 2087    case Bytecodes::_invokespecial:
2089 2088    case Bytecodes::_invokestatic:
2090 2089    case Bytecodes::_invokeinterface:
2091 2090      entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke);
2092 2091      break;
     2092 +  case Bytecodes::_invokedynamic:
     2093 +    entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic);
     2094 +    break;
2093 2095    default:
2094 2096      ShouldNotReachHere();
2095 2097      break;
2096 2098    }
2097 2099    __ movl(temp, (int) bytecode());
2098 2100    __ call_VM(noreg, entry, temp);
2099 2101  
2100 2102    // Update registers with resolved info
2101      -  __ get_cache_and_index_at_bcp(Rcache, index, 1);
     2103 +  __ get_cache_and_index_at_bcp(Rcache, index, 1, is_invokedynamic);
2102 2104    __ bind(resolved);
2103 2105  }
2104 2106  
2105 2107  // The Rcache and index registers must be set before call
2106 2108  void TemplateTable::load_field_cp_cache_entry(Register obj,
2107 2109                                                Register cache,
2108 2110                                                Register index,
2109 2111                                                Register off,
2110 2112                                                Register flags,
2111 2113                                                bool is_static = false) {
↓ open down ↓ 713 lines elided ↑ open up ↑
2825 2827  
2826 2828  
2827 2829  //-----------------------------------------------------------------------------
2828 2830  // Calls
2829 2831  
2830 2832  void TemplateTable::count_calls(Register method, Register temp) {
2831 2833    // implemented elsewhere
2832 2834    ShouldNotReachHere();
2833 2835  }
2834 2836  
2835      -void TemplateTable::prepare_invoke(Register method,
2836      -                                   Register index,
2837      -                                   int byte_no,
2838      -                                   Bytecodes::Code code) {
     2837 +void TemplateTable::prepare_invoke(Register method, Register index, int byte_no) {
2839 2838    // determine flags
     2839 +  Bytecodes::Code code = bytecode();
2840 2840    const bool is_invokeinterface  = code == Bytecodes::_invokeinterface;
     2841 +  const bool is_invokedynamic    = code == Bytecodes::_invokedynamic;
2841 2842    const bool is_invokevirtual    = code == Bytecodes::_invokevirtual;
2842 2843    const bool is_invokespecial    = code == Bytecodes::_invokespecial;
2843      -  const bool load_receiver       = code != Bytecodes::_invokestatic;
     2844 +  const bool load_receiver      = (code != Bytecodes::_invokestatic && code != Bytecodes::_invokedynamic);
2844 2845    const bool receiver_null_check = is_invokespecial;
2845 2846    const bool save_flags = is_invokeinterface || is_invokevirtual;
2846 2847    // setup registers & access constant pool cache
2847 2848    const Register recv   = rcx;
2848 2849    const Register flags  = rdx;
2849 2850    assert_different_registers(method, index, recv, flags);
2850 2851  
2851 2852    // save 'interpreter return address'
2852 2853    __ save_bcp();
2853 2854  
2854 2855    load_invoke_cp_cache_entry(byte_no, method, index, flags, is_invokevirtual);
2855 2856  
2856 2857    // load receiver if needed (note: no return address pushed yet)
2857 2858    if (load_receiver) {
2858 2859      __ movl(recv, flags);
2859 2860      __ andl(recv, 0xFF);
2860 2861      if (TaggedStackInterpreter) __ shll(recv, 1);  // index*2
2861      -    __ movptr(recv, Address(rsp, recv, Address::times_8,
2862      -                                 -Interpreter::expr_offset_in_bytes(1)));
2863      -    __ verify_oop(recv);
     2862 +    Address recv_addr(rsp, recv, Address::times_8, -Interpreter::expr_offset_in_bytes(1));
     2863 +    if (is_invokedynamic) {
     2864 +      __ lea(recv, recv_addr);
     2865 +    } else {
     2866 +      __ movptr(recv, recv_addr);
     2867 +      __ verify_oop(recv);
     2868 +    }
2864 2869    }
2865 2870  
2866 2871    // do null check if needed
2867 2872    if (receiver_null_check) {
2868 2873      __ null_check(recv);
2869 2874    }
2870 2875  
2871 2876    if (save_flags) {
2872 2877      __ movl(r13, flags);
2873 2878    }
2874 2879  
2875 2880    // compute return type
2876 2881    __ shrl(flags, ConstantPoolCacheEntry::tosBits);
2877 2882    // Make sure we don't need to mask flags for tosBits after the above shift
2878 2883    ConstantPoolCacheEntry::verify_tosBits();
2879 2884    // load return address
2880 2885    {
2881      -    ExternalAddress return_5((address)Interpreter::return_5_addrs_by_index_table());
2882      -    ExternalAddress return_3((address)Interpreter::return_3_addrs_by_index_table());
2883      -    __ lea(rscratch1, (is_invokeinterface ? return_5 : return_3));
2884      -    __ movptr(flags, Address(rscratch1, flags, Address::times_8));
     2886 +    address table_addr;
     2887 +    if (is_invokeinterface || is_invokedynamic)
     2888 +      table_addr = (address)Interpreter::return_5_addrs_by_index_table();
     2889 +    else
     2890 +      table_addr = (address)Interpreter::return_3_addrs_by_index_table();
     2891 +    ExternalAddress table(table_addr);
     2892 +    __ lea(rscratch1, table);
     2893 +    __ movptr(flags, Address(rscratch1, flags, Address::times_ptr));
2885 2894    }
2886 2895  
2887 2896    // push return address
2888 2897    __ push(flags);
2889 2898  
2890 2899    // Restore flag field from the constant pool cache, and restore esi
2891 2900    // for later null checks.  r13 is the bytecode pointer
2892 2901    if (save_flags) {
2893 2902      __ movl(flags, r13);
2894 2903      __ restore_bcp();
↓ open down ↓ 45 lines elided ↑ open up ↑
2940 2949    __ movptr(method, Address(rax, index,
2941 2950                                   Address::times_8,
2942 2951                                   base + vtableEntry::method_offset_in_bytes()));
2943 2952    __ movptr(rdx, Address(method, methodOopDesc::interpreter_entry_offset()));
2944 2953    __ jump_from_interpreted(method, rdx);
2945 2954  }
2946 2955  
2947 2956  
2948 2957  void TemplateTable::invokevirtual(int byte_no) {
2949 2958    transition(vtos, vtos);
2950      -  prepare_invoke(rbx, noreg, byte_no, bytecode());
     2959 +  prepare_invoke(rbx, noreg, byte_no);
2951 2960  
2952 2961    // rbx: index
2953 2962    // rcx: receiver
2954 2963    // rdx: flags
2955 2964  
2956 2965    invokevirtual_helper(rbx, rcx, rdx);
2957 2966  }
2958 2967  
2959 2968  
2960 2969  void TemplateTable::invokespecial(int byte_no) {
2961 2970    transition(vtos, vtos);
2962      -  prepare_invoke(rbx, noreg, byte_no, bytecode());
     2971 +  prepare_invoke(rbx, noreg, byte_no);
2963 2972    // do the call
2964 2973    __ verify_oop(rbx);
2965 2974    __ profile_call(rax);
2966 2975    __ jump_from_interpreted(rbx, rax);
2967 2976  }
2968 2977  
2969 2978  
2970 2979  void TemplateTable::invokestatic(int byte_no) {
2971 2980    transition(vtos, vtos);
2972      -  prepare_invoke(rbx, noreg, byte_no, bytecode());
     2981 +  prepare_invoke(rbx, noreg, byte_no);
2973 2982    // do the call
2974 2983    __ verify_oop(rbx);
2975 2984    __ profile_call(rax);
2976 2985    __ jump_from_interpreted(rbx, rax);
2977 2986  }
2978 2987  
2979 2988  void TemplateTable::fast_invokevfinal(int byte_no) {
2980 2989    transition(vtos, vtos);
2981 2990    __ stop("fast_invokevfinal not used on amd64");
2982 2991  }
2983 2992  
2984 2993  void TemplateTable::invokeinterface(int byte_no) {
2985 2994    transition(vtos, vtos);
2986      -  prepare_invoke(rax, rbx, byte_no, bytecode());
     2995 +  prepare_invoke(rax, rbx, byte_no);
2987 2996  
2988 2997    // rax: Interface
2989 2998    // rbx: index
2990 2999    // rcx: receiver
2991 3000    // rdx: flags
2992 3001  
2993 3002    // Special case of invokeinterface called for virtual method of
2994 3003    // java.lang.Object.  See cpCacheOop.cpp for details.
2995 3004    // This code isn't produced by javac, but could be produced by
2996 3005    // another compliant java compiler.
↓ open down ↓ 68 lines elided ↑ open up ↑
3065 3074      // We should not encounter this bytecode if !EnableInvokeDynamic.
3066 3075      // The verifier will stop it.  However, if we get past the verifier,
3067 3076      // this will stop the thread in a reasonable way, without crashing the JVM.
3068 3077      __ call_VM(noreg, CAST_FROM_FN_PTR(address,
3069 3078                       InterpreterRuntime::throw_IncompatibleClassChangeError));
3070 3079      // the call_VM checks for exception, so we should never return here.
3071 3080      __ should_not_reach_here();
3072 3081      return;
3073 3082    }
3074 3083  
3075      -  __ stop("invokedynamic NYI");//6815692//
     3084 +  prepare_invoke(rax, rbx, byte_no);
     3085 +
     3086 +  // rax: CallSite object (f1)
     3087 +  // rbx: unused (f2)
     3088 +  // rcx: receiver address
     3089 +  // rdx: flags (unused)
     3090 +
     3091 +  if (ProfileInterpreter) {
     3092 +    Label L;
     3093 +    // %%% should make a type profile for any invokedynamic that takes a ref argument
     3094 +    // profile this call
     3095 +    __ profile_call(r13);
     3096 +  }
     3097 +
     3098 +  __ movptr(rcx, Address(rax, __ delayed_value(java_dyn_CallSite::target_offset_in_bytes, rcx)));
     3099 +  __ null_check(rcx);
     3100 +  __ prepare_to_jump_from_interpreted();
     3101 +  __ jump_to_method_handle_entry(rcx, rdx);
3076 3102  }
3077 3103  
3078 3104  
3079 3105  //-----------------------------------------------------------------------------
3080 3106  // Allocation
3081 3107  
3082 3108  void TemplateTable::_new() {
3083 3109    transition(vtos, atos);
3084 3110    __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3085 3111    Label slow_case;
↓ open down ↓ 513 lines elided ↑ open up ↑
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX