963
964
965 void TemplateTable::aastore() {
966 Label is_null, ok_is_subtype, done;
967 transition(vtos, vtos);
968 // stack: ..., array, index, value
969 __ movptr(rax, at_tos()); // Value
970 __ movl(rcx, at_tos_p1()); // Index
971 __ movptr(rdx, at_tos_p2()); // Array
972
973 Address element_address(rdx, rcx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
974 index_check_without_pop(rdx, rcx); // kills rbx,
975 // do array store check - check for NULL value first
976 __ testptr(rax, rax);
977 __ jcc(Assembler::zero, is_null);
978
979 // Move subklass into EBX
980 __ load_klass(rbx, rax);
981 // Move superklass into EAX
982 __ load_klass(rax, rdx);
983 __ movptr(rax, Address(rax, sizeof(oopDesc) + objArrayKlass::element_klass_offset_in_bytes()));
984 // Compress array+index*wordSize+12 into a single register. Frees ECX.
985 __ lea(rdx, element_address);
986
987 // Generate subtype check. Blows ECX. Resets EDI to locals.
988 // Superklass in EAX. Subklass in EBX.
989 __ gen_subtype_check( rbx, ok_is_subtype );
990
991 // Come here on failure
992 // object is at TOS
993 __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
994
995 // Come here on success
996 __ bind(ok_is_subtype);
997
998 // Get the value to store
999 __ movptr(rax, at_rsp());
1000 // and store it with appropriate barrier
1001 do_oop_store(_masm, Address(rdx, 0), rax, _bs->kind(), true);
1002
1003 __ jmp(done);
2016 __ profile_switch_default(i);
2017 __ movl(j, Address(array, -2*wordSize));
2018 __ bswapl(j);
2019 LP64_ONLY(__ movslq(j, j));
2020 __ restore_bcp();
2021 __ restore_locals(); // restore rdi
2022 __ load_unsigned_byte(rbx, Address(rsi, j, Address::times_1));
2023 __ addptr(rsi, j);
2024 __ dispatch_only(vtos);
2025 }
2026
2027
2028 void TemplateTable::_return(TosState state) {
2029 transition(state, state);
2030 assert(_desc->calls_vm(), "inconsistent calls_vm information"); // call in remove_activation
2031
2032 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2033 assert(state == vtos, "only valid state");
2034 __ movptr(rax, aaddress(0));
2035 __ load_klass(rdi, rax);
2036 __ movl(rdi, Address(rdi, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc)));
2037 __ testl(rdi, JVM_ACC_HAS_FINALIZER);
2038 Label skip_register_finalizer;
2039 __ jcc(Assembler::zero, skip_register_finalizer);
2040
2041 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), rax);
2042
2043 __ bind(skip_register_finalizer);
2044 }
2045
2046 __ remove_activation(state, rsi);
2047 __ jmp(rsi);
2048 }
2049
2050
2051 // ----------------------------------------------------------------------------
2052 // Volatile variables demand their effects be made known to all CPU's in
2053 // order. Store buffers on most chips allow reads & writes to reorder; the
2054 // JMM's ReadAfterWrite.java test fails in -Xint mode without some kind of
2055 // memory barrier (i.e., it's not sufficient that the interpreter does not
2056 // reorder volatile references, the hardware also must not reorder them).
3171 Label done;
3172 Label initialize_header;
3173 Label initialize_object; // including clearing the fields
3174 Label allocate_shared;
3175
3176 __ get_cpool_and_tags(rcx, rax);
3177
3178 // Make sure the class we're about to instantiate has been resolved.
3179 // This is done before loading instanceKlass to be consistent with the order
3180 // how Constant Pool is updated (see constantPoolOopDesc::klass_at_put)
3181 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize;
3182 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3183 __ jcc(Assembler::notEqual, slow_case_no_pop);
3184
3185 // get instanceKlass
3186 __ movptr(rcx, Address(rcx, rdx, Address::times_ptr, sizeof(constantPoolOopDesc)));
3187 __ push(rcx); // save the contexts of klass for initializing the header
3188
3189 // make sure klass is initialized & doesn't have finalizer
3190 // make sure klass is fully initialized
3191 __ cmpl(Address(rcx, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc)), instanceKlass::fully_initialized);
3192 __ jcc(Assembler::notEqual, slow_case);
3193
3194 // get instance_size in instanceKlass (scaled to a count of bytes)
3195 __ movl(rdx, Address(rcx, Klass::layout_helper_offset_in_bytes() + sizeof(oopDesc)));
3196 // test to see if it has a finalizer or is malformed in some way
3197 __ testl(rdx, Klass::_lh_instance_slow_path_bit);
3198 __ jcc(Assembler::notZero, slow_case);
3199
3200 //
3201 // Allocate the instance
3202 // 1) Try to allocate in the TLAB
3203 // 2) if fail and the object is large allocate in the shared Eden
3204 // 3) if the above fails (or is not applicable), go to a slow case
3205 // (creates a new TLAB, etc.)
3206
3207 const bool allow_shared_alloc =
3208 Universe::heap()->supports_inline_contig_alloc() && !CMSIncrementalMode;
3209
3210 const Register thread = rcx;
3211 if (UseTLAB || allow_shared_alloc) {
3212 __ get_thread(thread);
3213 }
3214
3215 if (UseTLAB) {
3276 // Ignore partial flag stall after shrl() since it is debug VM
3277 __ jccb(Assembler::carryClear, L);
3278 __ stop("object size is not multiple of 2 - adjust this code");
3279 __ bind(L);
3280 // rdx must be > 0, no extra check needed here
3281 #endif
3282
3283 // initialize remaining object fields: rdx was a multiple of 8
3284 { Label loop;
3285 __ bind(loop);
3286 __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
3287 NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
3288 __ decrement(rdx);
3289 __ jcc(Assembler::notZero, loop);
3290 }
3291
3292 // initialize object header only.
3293 __ bind(initialize_header);
3294 if (UseBiasedLocking) {
3295 __ pop(rcx); // get saved klass back in the register.
3296 __ movptr(rbx, Address(rcx, Klass::prototype_header_offset_in_bytes() + klassOopDesc::klass_part_offset_in_bytes()));
3297 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()), rbx);
3298 } else {
3299 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()),
3300 (int32_t)markOopDesc::prototype()); // header
3301 __ pop(rcx); // get saved klass back in the register.
3302 }
3303 __ store_klass(rax, rcx); // klass
3304
3305 {
3306 SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0);
3307 // Trigger dtrace event for fastpath
3308 __ push(atos);
3309 __ call_VM_leaf(
3310 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), rax);
3311 __ pop(atos);
3312 }
3313
3314 __ jmp(done);
3315 }
3316
|
963
964
965 void TemplateTable::aastore() {
966 Label is_null, ok_is_subtype, done;
967 transition(vtos, vtos);
968 // stack: ..., array, index, value
969 __ movptr(rax, at_tos()); // Value
970 __ movl(rcx, at_tos_p1()); // Index
971 __ movptr(rdx, at_tos_p2()); // Array
972
973 Address element_address(rdx, rcx, Address::times_4, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
974 index_check_without_pop(rdx, rcx); // kills rbx,
975 // do array store check - check for NULL value first
976 __ testptr(rax, rax);
977 __ jcc(Assembler::zero, is_null);
978
979 // Move subklass into EBX
980 __ load_klass(rbx, rax);
981 // Move superklass into EAX
982 __ load_klass(rax, rdx);
983 __ movptr(rax, Address(rax, objArrayKlass::element_klass_offset()));
984 // Compress array+index*wordSize+12 into a single register. Frees ECX.
985 __ lea(rdx, element_address);
986
987 // Generate subtype check. Blows ECX. Resets EDI to locals.
988 // Superklass in EAX. Subklass in EBX.
989 __ gen_subtype_check( rbx, ok_is_subtype );
990
991 // Come here on failure
992 // object is at TOS
993 __ jump(ExternalAddress(Interpreter::_throw_ArrayStoreException_entry));
994
995 // Come here on success
996 __ bind(ok_is_subtype);
997
998 // Get the value to store
999 __ movptr(rax, at_rsp());
1000 // and store it with appropriate barrier
1001 do_oop_store(_masm, Address(rdx, 0), rax, _bs->kind(), true);
1002
1003 __ jmp(done);
2016 __ profile_switch_default(i);
2017 __ movl(j, Address(array, -2*wordSize));
2018 __ bswapl(j);
2019 LP64_ONLY(__ movslq(j, j));
2020 __ restore_bcp();
2021 __ restore_locals(); // restore rdi
2022 __ load_unsigned_byte(rbx, Address(rsi, j, Address::times_1));
2023 __ addptr(rsi, j);
2024 __ dispatch_only(vtos);
2025 }
2026
2027
2028 void TemplateTable::_return(TosState state) {
2029 transition(state, state);
2030 assert(_desc->calls_vm(), "inconsistent calls_vm information"); // call in remove_activation
2031
2032 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2033 assert(state == vtos, "only valid state");
2034 __ movptr(rax, aaddress(0));
2035 __ load_klass(rdi, rax);
2036 __ movl(rdi, Address(rdi, Klass::access_flags_offset()));
2037 __ testl(rdi, JVM_ACC_HAS_FINALIZER);
2038 Label skip_register_finalizer;
2039 __ jcc(Assembler::zero, skip_register_finalizer);
2040
2041 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), rax);
2042
2043 __ bind(skip_register_finalizer);
2044 }
2045
2046 __ remove_activation(state, rsi);
2047 __ jmp(rsi);
2048 }
2049
2050
2051 // ----------------------------------------------------------------------------
2052 // Volatile variables demand their effects be made known to all CPU's in
2053 // order. Store buffers on most chips allow reads & writes to reorder; the
2054 // JMM's ReadAfterWrite.java test fails in -Xint mode without some kind of
2055 // memory barrier (i.e., it's not sufficient that the interpreter does not
2056 // reorder volatile references, the hardware also must not reorder them).
3171 Label done;
3172 Label initialize_header;
3173 Label initialize_object; // including clearing the fields
3174 Label allocate_shared;
3175
3176 __ get_cpool_and_tags(rcx, rax);
3177
3178 // Make sure the class we're about to instantiate has been resolved.
3179 // This is done before loading instanceKlass to be consistent with the order
3180 // how Constant Pool is updated (see constantPoolOopDesc::klass_at_put)
3181 const int tags_offset = typeArrayOopDesc::header_size(T_BYTE) * wordSize;
3182 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3183 __ jcc(Assembler::notEqual, slow_case_no_pop);
3184
3185 // get instanceKlass
3186 __ movptr(rcx, Address(rcx, rdx, Address::times_ptr, sizeof(constantPoolOopDesc)));
3187 __ push(rcx); // save the contexts of klass for initializing the header
3188
3189 // make sure klass is initialized & doesn't have finalizer
3190 // make sure klass is fully initialized
3191 __ cmpl(Address(rcx, instanceKlass::init_state_offset()), instanceKlass::fully_initialized);
3192 __ jcc(Assembler::notEqual, slow_case);
3193
3194 // get instance_size in instanceKlass (scaled to a count of bytes)
3195 __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
3196 // test to see if it has a finalizer or is malformed in some way
3197 __ testl(rdx, Klass::_lh_instance_slow_path_bit);
3198 __ jcc(Assembler::notZero, slow_case);
3199
3200 //
3201 // Allocate the instance
3202 // 1) Try to allocate in the TLAB
3203 // 2) if fail and the object is large allocate in the shared Eden
3204 // 3) if the above fails (or is not applicable), go to a slow case
3205 // (creates a new TLAB, etc.)
3206
3207 const bool allow_shared_alloc =
3208 Universe::heap()->supports_inline_contig_alloc() && !CMSIncrementalMode;
3209
3210 const Register thread = rcx;
3211 if (UseTLAB || allow_shared_alloc) {
3212 __ get_thread(thread);
3213 }
3214
3215 if (UseTLAB) {
3276 // Ignore partial flag stall after shrl() since it is debug VM
3277 __ jccb(Assembler::carryClear, L);
3278 __ stop("object size is not multiple of 2 - adjust this code");
3279 __ bind(L);
3280 // rdx must be > 0, no extra check needed here
3281 #endif
3282
3283 // initialize remaining object fields: rdx was a multiple of 8
3284 { Label loop;
3285 __ bind(loop);
3286 __ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 1*oopSize), rcx);
3287 NOT_LP64(__ movptr(Address(rax, rdx, Address::times_8, sizeof(oopDesc) - 2*oopSize), rcx));
3288 __ decrement(rdx);
3289 __ jcc(Assembler::notZero, loop);
3290 }
3291
3292 // initialize object header only.
3293 __ bind(initialize_header);
3294 if (UseBiasedLocking) {
3295 __ pop(rcx); // get saved klass back in the register.
3296 __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
3297 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()), rbx);
3298 } else {
3299 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes ()),
3300 (int32_t)markOopDesc::prototype()); // header
3301 __ pop(rcx); // get saved klass back in the register.
3302 }
3303 __ store_klass(rax, rcx); // klass
3304
3305 {
3306 SkipIfEqual skip_if(_masm, &DTraceAllocProbes, 0);
3307 // Trigger dtrace event for fastpath
3308 __ push(atos);
3309 __ call_VM_leaf(
3310 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), rax);
3311 __ pop(atos);
3312 }
3313
3314 __ jmp(done);
3315 }
3316
|