3009 assert(recv == rcx, "");
3010
3011 // Test for an invoke of a final method
3012 Label notFinal;
3013 __ movl(rax, flags);
3014 __ andl(rax, (1 << ConstantPoolCacheEntry::is_vfinal_shift));
3015 __ jcc(Assembler::zero, notFinal);
3016
3017 const Register method = index; // method must be rbx
3018 assert(method == rbx,
3019 "Method* must be rbx for interpreter calling convention");
3020
3021 // do the call - the index is actually the method to call
3022 // that is, f2 is a vtable index if !is_vfinal, else f2 is a Method*
3023
3024 // It's final, need a null check here!
3025 __ null_check(recv);
3026
3027 // profile this call
3028 __ profile_final_call(rax);
3029
3030 __ jump_from_interpreted(method, rax);
3031
3032 __ bind(notFinal);
3033
3034 // get receiver klass
3035 __ null_check(recv, oopDesc::klass_offset_in_bytes());
3036 __ load_klass(rax, recv);
3037
3038 // profile this call
3039 __ profile_virtual_call(rax, r14, rdx);
3040
3041 // get target Method* & entry point
3042 __ lookup_virtual_method(rax, index, method);
3043 __ jump_from_interpreted(method, rdx);
3044 }
3045
3046
3047 void TemplateTable::invokevirtual(int byte_no) {
3048 transition(vtos, vtos);
3049 assert(byte_no == f2_byte, "use this argument");
3050 prepare_invoke(byte_no,
3051 rbx, // method or vtable index
3052 noreg, // unused itable index
3053 rcx, rdx); // recv, flags
3054
3055 // rbx: index
3056 // rcx: receiver
3057 // rdx: flags
3058
3059 invokevirtual_helper(rbx, rcx, rdx);
3060 }
3061
3062
3063 void TemplateTable::invokespecial(int byte_no) {
3064 transition(vtos, vtos);
3065 assert(byte_no == f1_byte, "use this argument");
3066 prepare_invoke(byte_no, rbx, noreg, // get f1 Method*
3067 rcx); // get receiver also for null check
3068 __ verify_oop(rcx);
3069 __ null_check(rcx);
3070 // do the call
3071 __ profile_call(rax);
3072 __ jump_from_interpreted(rbx, rax);
3073 }
3074
3075
3076 void TemplateTable::invokestatic(int byte_no) {
3077 transition(vtos, vtos);
3078 assert(byte_no == f1_byte, "use this argument");
3079 prepare_invoke(byte_no, rbx); // get f1 Method*
3080 // do the call
3081 __ profile_call(rax);
3082 __ jump_from_interpreted(rbx, rax);
3083 }
3084
3085 void TemplateTable::fast_invokevfinal(int byte_no) {
3086 transition(vtos, vtos);
3087 assert(byte_no == f2_byte, "use this argument");
3088 __ stop("fast_invokevfinal not used on amd64");
3089 }
3090
3091 void TemplateTable::invokeinterface(int byte_no) {
3092 transition(vtos, vtos);
3093 assert(byte_no == f1_byte, "use this argument");
3094 prepare_invoke(byte_no, rax, rbx, // get f1 Klass*, f2 itable index
3095 rcx, rdx); // recv, flags
3096
3097 // rax: interface klass (from f1)
3098 // rbx: itable index (from f2)
3099 // rcx: receiver
3100 // rdx: flags
3101
3119 // profile this call
3120 __ profile_virtual_call(rdx, r13, r14);
3121
3122 Label no_such_interface, no_such_method;
3123
3124 __ lookup_interface_method(// inputs: rec. class, interface, itable index
3125 rdx, rax, rbx,
3126 // outputs: method, scan temp. reg
3127 rbx, r13,
3128 no_such_interface);
3129
3130 // rbx: Method* to call
3131 // rcx: receiver
3132 // Check for abstract method error
3133 // Note: This should be done more efficiently via a throw_abstract_method_error
3134 // interpreter entry point and a conditional jump to it in case of a null
3135 // method.
3136 __ testptr(rbx, rbx);
3137 __ jcc(Assembler::zero, no_such_method);
3138
3139 // do the call
3140 // rcx: receiver
3141 // rbx,: Method*
3142 __ jump_from_interpreted(rbx, rdx);
3143 __ should_not_reach_here();
3144
3145 // exception handling code follows...
3146 // note: must restore interpreter registers to canonical
3147 // state for exception handling to work correctly!
3148
3149 __ bind(no_such_method);
3150 // throw exception
3151 __ pop(rbx); // pop return address (pushed by prepare_invoke)
3152 __ restore_bcp(); // r13 must be correct for exception handler (was destroyed)
3153 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
3154 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodError));
3155 // the call_VM checks for exception, so we should never return here.
3156 __ should_not_reach_here();
3157
3158 __ bind(no_such_interface);
3176 const Register rdx_flags = rdx;
3177
3178 if (!EnableInvokeDynamic) {
3179 // rewriter does not generate this bytecode
3180 __ should_not_reach_here();
3181 return;
3182 }
3183
3184 prepare_invoke(byte_no, rbx_method, rax_mtype, rcx_recv);
3185 __ verify_method_ptr(rbx_method);
3186 __ verify_oop(rcx_recv);
3187 __ null_check(rcx_recv);
3188
3189 // rax: MethodType object (from cpool->resolved_references[f1], if necessary)
3190 // rbx: MH.invokeExact_MT method (from f2)
3191
3192 // Note: rax_mtype is already pushed (if necessary) by prepare_invoke
3193
3194 // FIXME: profile the LambdaForm also
3195 __ profile_final_call(rax);
3196
3197 __ jump_from_interpreted(rbx_method, rdx);
3198 }
3199
3200
3201 void TemplateTable::invokedynamic(int byte_no) {
3202 transition(vtos, vtos);
3203 assert(byte_no == f1_byte, "use this argument");
3204
3205 if (!EnableInvokeDynamic) {
3206 // We should not encounter this bytecode if !EnableInvokeDynamic.
3207 // The verifier will stop it. However, if we get past the verifier,
3208 // this will stop the thread in a reasonable way, without crashing the JVM.
3209 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
3210 InterpreterRuntime::throw_IncompatibleClassChangeError));
3211 // the call_VM checks for exception, so we should never return here.
3212 __ should_not_reach_here();
3213 return;
3214 }
3215
3216 const Register rbx_method = rbx;
3217 const Register rax_callsite = rax;
3218
3219 prepare_invoke(byte_no, rbx_method, rax_callsite);
3220
3221 // rax: CallSite object (from cpool->resolved_references[f1])
3222 // rbx: MH.linkToCallSite method (from f2)
3223
3224 // Note: rax_callsite is already pushed by prepare_invoke
3225
3226 // %%% should make a type profile for any invokedynamic that takes a ref argument
3227 // profile this call
3228 __ profile_call(r13);
3229
3230 __ verify_oop(rax_callsite);
3231
3232 __ jump_from_interpreted(rbx_method, rdx);
3233 }
3234
3235
3236 //-----------------------------------------------------------------------------
3237 // Allocation
3238
3239 void TemplateTable::_new() {
3240 transition(vtos, atos);
3241 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3242 Label slow_case;
3243 Label done;
3244 Label initialize_header;
3245 Label initialize_object; // including clearing the fields
3246 Label allocate_shared;
3247
3248 __ get_cpool_and_tags(rsi, rax);
|
3009 assert(recv == rcx, "");
3010
3011 // Test for an invoke of a final method
3012 Label notFinal;
3013 __ movl(rax, flags);
3014 __ andl(rax, (1 << ConstantPoolCacheEntry::is_vfinal_shift));
3015 __ jcc(Assembler::zero, notFinal);
3016
3017 const Register method = index; // method must be rbx
3018 assert(method == rbx,
3019 "Method* must be rbx for interpreter calling convention");
3020
3021 // do the call - the index is actually the method to call
3022 // that is, f2 is a vtable index if !is_vfinal, else f2 is a Method*
3023
3024 // It's final, need a null check here!
3025 __ null_check(recv);
3026
3027 // profile this call
3028 __ profile_final_call(rax);
3029 __ profile_arguments_type(rax, method, r13, true);
3030
3031 __ jump_from_interpreted(method, rax);
3032
3033 __ bind(notFinal);
3034
3035 // get receiver klass
3036 __ null_check(recv, oopDesc::klass_offset_in_bytes());
3037 __ load_klass(rax, recv);
3038
3039 // profile this call
3040 __ profile_virtual_call(rax, r14, rdx);
3041
3042 // get target Method* & entry point
3043 __ lookup_virtual_method(rax, index, method);
3044 __ profile_arguments_type(rdx, method, r13, true);
3045 __ jump_from_interpreted(method, rdx);
3046 }
3047
3048
3049 void TemplateTable::invokevirtual(int byte_no) {
3050 transition(vtos, vtos);
3051 assert(byte_no == f2_byte, "use this argument");
3052 prepare_invoke(byte_no,
3053 rbx, // method or vtable index
3054 noreg, // unused itable index
3055 rcx, rdx); // recv, flags
3056
3057 // rbx: index
3058 // rcx: receiver
3059 // rdx: flags
3060
3061 invokevirtual_helper(rbx, rcx, rdx);
3062 }
3063
3064
3065 void TemplateTable::invokespecial(int byte_no) {
3066 transition(vtos, vtos);
3067 assert(byte_no == f1_byte, "use this argument");
3068 prepare_invoke(byte_no, rbx, noreg, // get f1 Method*
3069 rcx); // get receiver also for null check
3070 __ verify_oop(rcx);
3071 __ null_check(rcx);
3072 // do the call
3073 __ profile_call(rax);
3074 __ profile_arguments_type(rax, rbx, r13, false);
3075 __ jump_from_interpreted(rbx, rax);
3076 }
3077
3078
3079 void TemplateTable::invokestatic(int byte_no) {
3080 transition(vtos, vtos);
3081 assert(byte_no == f1_byte, "use this argument");
3082 prepare_invoke(byte_no, rbx); // get f1 Method*
3083 // do the call
3084 __ profile_call(rax);
3085 __ profile_arguments_type(rax, rbx, r13, false);
3086 __ jump_from_interpreted(rbx, rax);
3087 }
3088
3089 void TemplateTable::fast_invokevfinal(int byte_no) {
3090 transition(vtos, vtos);
3091 assert(byte_no == f2_byte, "use this argument");
3092 __ stop("fast_invokevfinal not used on amd64");
3093 }
3094
3095 void TemplateTable::invokeinterface(int byte_no) {
3096 transition(vtos, vtos);
3097 assert(byte_no == f1_byte, "use this argument");
3098 prepare_invoke(byte_no, rax, rbx, // get f1 Klass*, f2 itable index
3099 rcx, rdx); // recv, flags
3100
3101 // rax: interface klass (from f1)
3102 // rbx: itable index (from f2)
3103 // rcx: receiver
3104 // rdx: flags
3105
3123 // profile this call
3124 __ profile_virtual_call(rdx, r13, r14);
3125
3126 Label no_such_interface, no_such_method;
3127
3128 __ lookup_interface_method(// inputs: rec. class, interface, itable index
3129 rdx, rax, rbx,
3130 // outputs: method, scan temp. reg
3131 rbx, r13,
3132 no_such_interface);
3133
3134 // rbx: Method* to call
3135 // rcx: receiver
3136 // Check for abstract method error
3137 // Note: This should be done more efficiently via a throw_abstract_method_error
3138 // interpreter entry point and a conditional jump to it in case of a null
3139 // method.
3140 __ testptr(rbx, rbx);
3141 __ jcc(Assembler::zero, no_such_method);
3142
3143 __ profile_arguments_type(rdx, rbx, r13, true);
3144
3145 // do the call
3146 // rcx: receiver
3147 // rbx,: Method*
3148 __ jump_from_interpreted(rbx, rdx);
3149 __ should_not_reach_here();
3150
3151 // exception handling code follows...
3152 // note: must restore interpreter registers to canonical
3153 // state for exception handling to work correctly!
3154
3155 __ bind(no_such_method);
3156 // throw exception
3157 __ pop(rbx); // pop return address (pushed by prepare_invoke)
3158 __ restore_bcp(); // r13 must be correct for exception handler (was destroyed)
3159 __ restore_locals(); // make sure locals pointer is correct as well (was destroyed)
3160 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::throw_AbstractMethodError));
3161 // the call_VM checks for exception, so we should never return here.
3162 __ should_not_reach_here();
3163
3164 __ bind(no_such_interface);
3182 const Register rdx_flags = rdx;
3183
3184 if (!EnableInvokeDynamic) {
3185 // rewriter does not generate this bytecode
3186 __ should_not_reach_here();
3187 return;
3188 }
3189
3190 prepare_invoke(byte_no, rbx_method, rax_mtype, rcx_recv);
3191 __ verify_method_ptr(rbx_method);
3192 __ verify_oop(rcx_recv);
3193 __ null_check(rcx_recv);
3194
3195 // rax: MethodType object (from cpool->resolved_references[f1], if necessary)
3196 // rbx: MH.invokeExact_MT method (from f2)
3197
3198 // Note: rax_mtype is already pushed (if necessary) by prepare_invoke
3199
3200 // FIXME: profile the LambdaForm also
3201 __ profile_final_call(rax);
3202 __ profile_arguments_type(rdx, rbx_method, r13, true);
3203
3204 __ jump_from_interpreted(rbx_method, rdx);
3205 }
3206
3207
3208 void TemplateTable::invokedynamic(int byte_no) {
3209 transition(vtos, vtos);
3210 assert(byte_no == f1_byte, "use this argument");
3211
3212 if (!EnableInvokeDynamic) {
3213 // We should not encounter this bytecode if !EnableInvokeDynamic.
3214 // The verifier will stop it. However, if we get past the verifier,
3215 // this will stop the thread in a reasonable way, without crashing the JVM.
3216 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
3217 InterpreterRuntime::throw_IncompatibleClassChangeError));
3218 // the call_VM checks for exception, so we should never return here.
3219 __ should_not_reach_here();
3220 return;
3221 }
3222
3223 const Register rbx_method = rbx;
3224 const Register rax_callsite = rax;
3225
3226 prepare_invoke(byte_no, rbx_method, rax_callsite);
3227
3228 // rax: CallSite object (from cpool->resolved_references[f1])
3229 // rbx: MH.linkToCallSite method (from f2)
3230
3231 // Note: rax_callsite is already pushed by prepare_invoke
3232
3233 // %%% should make a type profile for any invokedynamic that takes a ref argument
3234 // profile this call
3235 __ profile_call(r13);
3236 __ profile_arguments_type(rdx, rbx_method, r13, false);
3237
3238 __ verify_oop(rax_callsite);
3239
3240 __ jump_from_interpreted(rbx_method, rdx);
3241 }
3242
3243
3244 //-----------------------------------------------------------------------------
3245 // Allocation
3246
3247 void TemplateTable::_new() {
3248 transition(vtos, atos);
3249 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3250 Label slow_case;
3251 Label done;
3252 Label initialize_header;
3253 Label initialize_object; // including clearing the fields
3254 Label allocate_shared;
3255
3256 __ get_cpool_and_tags(rsi, rax);
|