871 void TemplateTable::aastore() {
872 Label store_ok, is_null, done;
873 transition(vtos, vtos);
874 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
875 __ ld(Lesp, Interpreter::expr_offset_in_bytes(1), O2); // get index
876 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(2), O3); // get array
877 // Otos_i: val
878 // O2: index
879 // O3: array
880 __ verify_oop(Otos_i);
881 __ index_check_without_pop(O3, O2, UseCompressedOops ? 2 : LogBytesPerWord, G3_scratch, O1);
882
883 // do array store check - check for NULL value first
884 __ br_null_short( Otos_i, Assembler::pn, is_null );
885
886 __ load_klass(O3, O4); // get array klass
887 __ load_klass(Otos_i, O5); // get value klass
888
889 // do fast instanceof cache test
890
891 __ ld_ptr(O4, sizeof(oopDesc) + objArrayKlass::element_klass_offset_in_bytes(), O4);
892
893 assert(Otos_i == O0, "just checking");
894
895 // Otos_i: value
896 // O1: addr - offset
897 // O2: index
898 // O3: array
899 // O4: array element klass
900 // O5: value klass
901
902 // Address element(O1, 0, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
903
904 // Generate a fast subtype check. Branch to store_ok if no
905 // failure. Throw if failure.
906 __ gen_subtype_check( O5, O4, G3_scratch, G4_scratch, G1_scratch, store_ok );
907
908 // Not a subtype; so must throw exception
909 __ throw_if_not_x( Assembler::never, Interpreter::_throw_ArrayStoreException_entry, G3_scratch );
910
911 // Store is OK.
2014
2015 __ bind(default_case); // fall through (if not profiling)
2016 __ profile_switch_default(Ri);
2017
2018 __ bind(continue_execution);
2019 __ add( Lbcp, Rj, Lbcp );
2020 __ dispatch_next( vtos );
2021 }
2022
2023
2024 void TemplateTable::_return(TosState state) {
2025 transition(state, state);
2026 assert(_desc->calls_vm(), "inconsistent calls_vm information");
2027
2028 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2029 assert(state == vtos, "only valid state");
2030 __ mov(G0, G3_scratch);
2031 __ access_local_ptr(G3_scratch, Otos_i);
2032 __ load_klass(Otos_i, O2);
2033 __ set(JVM_ACC_HAS_FINALIZER, G3);
2034 __ ld(O2, Klass::access_flags_offset_in_bytes() + sizeof(oopDesc), O2);
2035 __ andcc(G3, O2, G0);
2036 Label skip_register_finalizer;
2037 __ br(Assembler::zero, false, Assembler::pn, skip_register_finalizer);
2038 __ delayed()->nop();
2039
2040 // Call out to do finalizer registration
2041 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), Otos_i);
2042
2043 __ bind(skip_register_finalizer);
2044 }
2045
2046 __ remove_activation(state, /* throw_monitor_exception */ true);
2047
2048 // The caller's SP was adjusted upon method entry to accomodate
2049 // the callee's non-argument locals. Undo that adjustment.
2050 __ ret(); // return to caller
2051 __ delayed()->restore(I5_savedSP, G0, SP);
2052 }
2053
2054
3333 Register RinstanceKlass = O1;
3334 Register Roffset = O3;
3335 Register Rscratch = O4;
3336
3337 __ get_2_byte_integer_at_bcp(1, Rscratch, Roffset, InterpreterMacroAssembler::Unsigned);
3338 __ get_cpool_and_tags(Rscratch, G3_scratch);
3339 // make sure the class we're about to instantiate has been resolved
3340 // This is done before loading instanceKlass to be consistent with the order
3341 // how Constant Pool is updated (see constantPoolOopDesc::klass_at_put)
3342 __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch);
3343 __ ldub(G3_scratch, Roffset, G3_scratch);
3344 __ cmp(G3_scratch, JVM_CONSTANT_Class);
3345 __ br(Assembler::notEqual, false, Assembler::pn, slow_case);
3346 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
3347 // get instanceKlass
3348 //__ sll(Roffset, LogBytesPerWord, Roffset); // executed in delay slot
3349 __ add(Roffset, sizeof(constantPoolOopDesc), Roffset);
3350 __ ld_ptr(Rscratch, Roffset, RinstanceKlass);
3351
3352 // make sure klass is fully initialized:
3353 __ ld(RinstanceKlass, instanceKlass::init_state_offset_in_bytes() + sizeof(oopDesc), G3_scratch);
3354 __ cmp(G3_scratch, instanceKlass::fully_initialized);
3355 __ br(Assembler::notEqual, false, Assembler::pn, slow_case);
3356 __ delayed()->ld(RinstanceKlass, Klass::layout_helper_offset_in_bytes() + sizeof(oopDesc), Roffset);
3357
3358 // get instance_size in instanceKlass (already aligned)
3359 //__ ld(RinstanceKlass, Klass::layout_helper_offset_in_bytes() + sizeof(oopDesc), Roffset);
3360
3361 // make sure klass does not have has_finalizer, or is abstract, or interface or java/lang/Class
3362 __ btst(Klass::_lh_instance_slow_path_bit, Roffset);
3363 __ br(Assembler::notZero, false, Assembler::pn, slow_case);
3364 __ delayed()->nop();
3365
3366 // allocate the instance
3367 // 1) Try to allocate in the TLAB
3368 // 2) if fail, and the TLAB is not full enough to discard, allocate in the shared Eden
3369 // 3) if the above fails (or is not applicable), go to a slow case
3370 // (creates a new TLAB, etc.)
3371
3372 const bool allow_shared_alloc =
3373 Universe::heap()->supports_inline_contig_alloc() && !CMSIncrementalMode;
3374
3375 if(UseTLAB) {
3376 Register RoldTopValue = RallocatedObject;
3377 Register RtlabWasteLimitValue = G3_scratch;
3378 Register RnewTopValue = G1_scratch;
3379 Register RendValue = Rscratch;
3466 __ st_ptr(G0, G3_scratch, Roffset);
3467 __ br(Assembler::notEqual, false, Assembler::pt, loop);
3468 __ delayed()->subcc(Roffset, wordSize, Roffset);
3469 }
3470 __ ba_short(initialize_header);
3471 }
3472
3473 // slow case
3474 __ bind(slow_case);
3475 __ get_2_byte_integer_at_bcp(1, G3_scratch, O2, InterpreterMacroAssembler::Unsigned);
3476 __ get_constant_pool(O1);
3477
3478 call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), O1, O2);
3479
3480 __ ba_short(done);
3481
3482 // Initialize the header: mark, klass
3483 __ bind(initialize_header);
3484
3485 if (UseBiasedLocking) {
3486 __ ld_ptr(RinstanceKlass, Klass::prototype_header_offset_in_bytes() + sizeof(oopDesc), G4_scratch);
3487 } else {
3488 __ set((intptr_t)markOopDesc::prototype(), G4_scratch);
3489 }
3490 __ st_ptr(G4_scratch, RallocatedObject, oopDesc::mark_offset_in_bytes()); // mark
3491 __ store_klass_gap(G0, RallocatedObject); // klass gap if compressed
3492 __ store_klass(RinstanceKlass, RallocatedObject); // klass (last for cms)
3493
3494 {
3495 SkipIfEqual skip_if(
3496 _masm, G4_scratch, &DTraceAllocProbes, Assembler::zero);
3497 // Trigger dtrace event
3498 __ push(atos);
3499 __ call_VM_leaf(noreg,
3500 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), O0);
3501 __ pop(atos);
3502 }
3503
3504 // continue
3505 __ bind(done);
3506 }
|
871 void TemplateTable::aastore() {
872 Label store_ok, is_null, done;
873 transition(vtos, vtos);
874 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(0), Otos_i);
875 __ ld(Lesp, Interpreter::expr_offset_in_bytes(1), O2); // get index
876 __ ld_ptr(Lesp, Interpreter::expr_offset_in_bytes(2), O3); // get array
877 // Otos_i: val
878 // O2: index
879 // O3: array
880 __ verify_oop(Otos_i);
881 __ index_check_without_pop(O3, O2, UseCompressedOops ? 2 : LogBytesPerWord, G3_scratch, O1);
882
883 // do array store check - check for NULL value first
884 __ br_null_short( Otos_i, Assembler::pn, is_null );
885
886 __ load_klass(O3, O4); // get array klass
887 __ load_klass(Otos_i, O5); // get value klass
888
889 // do fast instanceof cache test
890
891 __ ld_ptr(O4, objArrayKlass::element_klass_offset_in_bytes(), O4);
892
893 assert(Otos_i == O0, "just checking");
894
895 // Otos_i: value
896 // O1: addr - offset
897 // O2: index
898 // O3: array
899 // O4: array element klass
900 // O5: value klass
901
902 // Address element(O1, 0, arrayOopDesc::base_offset_in_bytes(T_OBJECT));
903
904 // Generate a fast subtype check. Branch to store_ok if no
905 // failure. Throw if failure.
906 __ gen_subtype_check( O5, O4, G3_scratch, G4_scratch, G1_scratch, store_ok );
907
908 // Not a subtype; so must throw exception
909 __ throw_if_not_x( Assembler::never, Interpreter::_throw_ArrayStoreException_entry, G3_scratch );
910
911 // Store is OK.
2014
2015 __ bind(default_case); // fall through (if not profiling)
2016 __ profile_switch_default(Ri);
2017
2018 __ bind(continue_execution);
2019 __ add( Lbcp, Rj, Lbcp );
2020 __ dispatch_next( vtos );
2021 }
2022
2023
2024 void TemplateTable::_return(TosState state) {
2025 transition(state, state);
2026 assert(_desc->calls_vm(), "inconsistent calls_vm information");
2027
2028 if (_desc->bytecode() == Bytecodes::_return_register_finalizer) {
2029 assert(state == vtos, "only valid state");
2030 __ mov(G0, G3_scratch);
2031 __ access_local_ptr(G3_scratch, Otos_i);
2032 __ load_klass(Otos_i, O2);
2033 __ set(JVM_ACC_HAS_FINALIZER, G3);
2034 __ ld(O2, Klass::access_flags_offset_in_bytes(), O2);
2035 __ andcc(G3, O2, G0);
2036 Label skip_register_finalizer;
2037 __ br(Assembler::zero, false, Assembler::pn, skip_register_finalizer);
2038 __ delayed()->nop();
2039
2040 // Call out to do finalizer registration
2041 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::register_finalizer), Otos_i);
2042
2043 __ bind(skip_register_finalizer);
2044 }
2045
2046 __ remove_activation(state, /* throw_monitor_exception */ true);
2047
2048 // The caller's SP was adjusted upon method entry to accomodate
2049 // the callee's non-argument locals. Undo that adjustment.
2050 __ ret(); // return to caller
2051 __ delayed()->restore(I5_savedSP, G0, SP);
2052 }
2053
2054
3333 Register RinstanceKlass = O1;
3334 Register Roffset = O3;
3335 Register Rscratch = O4;
3336
3337 __ get_2_byte_integer_at_bcp(1, Rscratch, Roffset, InterpreterMacroAssembler::Unsigned);
3338 __ get_cpool_and_tags(Rscratch, G3_scratch);
3339 // make sure the class we're about to instantiate has been resolved
3340 // This is done before loading instanceKlass to be consistent with the order
3341 // how Constant Pool is updated (see constantPoolOopDesc::klass_at_put)
3342 __ add(G3_scratch, typeArrayOopDesc::header_size(T_BYTE) * wordSize, G3_scratch);
3343 __ ldub(G3_scratch, Roffset, G3_scratch);
3344 __ cmp(G3_scratch, JVM_CONSTANT_Class);
3345 __ br(Assembler::notEqual, false, Assembler::pn, slow_case);
3346 __ delayed()->sll(Roffset, LogBytesPerWord, Roffset);
3347 // get instanceKlass
3348 //__ sll(Roffset, LogBytesPerWord, Roffset); // executed in delay slot
3349 __ add(Roffset, sizeof(constantPoolOopDesc), Roffset);
3350 __ ld_ptr(Rscratch, Roffset, RinstanceKlass);
3351
3352 // make sure klass is fully initialized:
3353 __ ld(RinstanceKlass, instanceKlass::init_state_offset_in_bytes(), G3_scratch);
3354 __ cmp(G3_scratch, instanceKlass::fully_initialized);
3355 __ br(Assembler::notEqual, false, Assembler::pn, slow_case);
3356 __ delayed()->ld(RinstanceKlass, Klass::layout_helper_offset_in_bytes(), Roffset);
3357
3358 // get instance_size in instanceKlass (already aligned)
3359 //__ ld(RinstanceKlass, Klass::layout_helper_offset_in_bytes(), Roffset);
3360
3361 // make sure klass does not have has_finalizer, or is abstract, or interface or java/lang/Class
3362 __ btst(Klass::_lh_instance_slow_path_bit, Roffset);
3363 __ br(Assembler::notZero, false, Assembler::pn, slow_case);
3364 __ delayed()->nop();
3365
3366 // allocate the instance
3367 // 1) Try to allocate in the TLAB
3368 // 2) if fail, and the TLAB is not full enough to discard, allocate in the shared Eden
3369 // 3) if the above fails (or is not applicable), go to a slow case
3370 // (creates a new TLAB, etc.)
3371
3372 const bool allow_shared_alloc =
3373 Universe::heap()->supports_inline_contig_alloc() && !CMSIncrementalMode;
3374
3375 if(UseTLAB) {
3376 Register RoldTopValue = RallocatedObject;
3377 Register RtlabWasteLimitValue = G3_scratch;
3378 Register RnewTopValue = G1_scratch;
3379 Register RendValue = Rscratch;
3466 __ st_ptr(G0, G3_scratch, Roffset);
3467 __ br(Assembler::notEqual, false, Assembler::pt, loop);
3468 __ delayed()->subcc(Roffset, wordSize, Roffset);
3469 }
3470 __ ba_short(initialize_header);
3471 }
3472
3473 // slow case
3474 __ bind(slow_case);
3475 __ get_2_byte_integer_at_bcp(1, G3_scratch, O2, InterpreterMacroAssembler::Unsigned);
3476 __ get_constant_pool(O1);
3477
3478 call_VM(Otos_i, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), O1, O2);
3479
3480 __ ba_short(done);
3481
3482 // Initialize the header: mark, klass
3483 __ bind(initialize_header);
3484
3485 if (UseBiasedLocking) {
3486 __ ld_ptr(RinstanceKlass, Klass::prototype_header_offset_in_bytes(), G4_scratch);
3487 } else {
3488 __ set((intptr_t)markOopDesc::prototype(), G4_scratch);
3489 }
3490 __ st_ptr(G4_scratch, RallocatedObject, oopDesc::mark_offset_in_bytes()); // mark
3491 __ store_klass_gap(G0, RallocatedObject); // klass gap if compressed
3492 __ store_klass(RinstanceKlass, RallocatedObject); // klass (last for cms)
3493
3494 {
3495 SkipIfEqual skip_if(
3496 _masm, G4_scratch, &DTraceAllocProbes, Assembler::zero);
3497 // Trigger dtrace event
3498 __ push(atos);
3499 __ call_VM_leaf(noreg,
3500 CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_object_alloc), O0);
3501 __ pop(atos);
3502 }
3503
3504 // continue
3505 __ bind(done);
3506 }
|