314 {
315 transition(vtos, vtos);
316 Label call_ldc, notFloat, notClass, notInt, Done;
317
318 if (wide) {
319 __ get_unsigned_2_byte_index_at_bcp(r1, 1);
320 } else {
321 __ load_unsigned_byte(r1, at_bcp(1));
322 }
323 __ get_cpool_and_tags(r2, r0);
324
325 const int base_offset = ConstantPool::header_size() * wordSize;
326 const int tags_offset = Array<u1>::base_offset_in_bytes();
327
328 // get type
329 __ add(r3, r1, tags_offset);
330 __ lea(r3, Address(r0, r3));
331 __ ldarb(r3, r3);
332
333 // unresolved class - get the resolved class
334 __ cmp(r3, JVM_CONSTANT_UnresolvedClass);
335 __ br(Assembler::EQ, call_ldc);
336
337 // unresolved class in error state - call into runtime to throw the error
338 // from the first resolution attempt
339 __ cmp(r3, JVM_CONSTANT_UnresolvedClassInError);
340 __ br(Assembler::EQ, call_ldc);
341
342 // resolved class - need to call vm to get java mirror of the class
343 __ cmp(r3, JVM_CONSTANT_Class);
344 __ br(Assembler::NE, notClass);
345
346 __ bind(call_ldc);
347 __ mov(c_rarg1, wide);
348 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), c_rarg1);
349 __ push_ptr(r0);
350 __ verify_oop(r0);
351 __ b(Done);
352
353 __ bind(notClass);
354 __ cmp(r3, JVM_CONSTANT_Float);
355 __ br(Assembler::NE, notFloat);
356 // ftos
357 __ adds(r1, r2, r1, Assembler::LSL, 3);
358 __ ldrs(v0, Address(r1, base_offset));
359 __ push_f();
360 __ b(Done);
361
362 __ bind(notFloat);
363
364 __ cmp(r3, JVM_CONSTANT_Integer);
365 __ br(Assembler::NE, notInt);
366
367 // itos
368 __ adds(r1, r2, r1, Assembler::LSL, 3);
369 __ ldrw(r0, Address(r1, base_offset));
370 __ push_i(r0);
371 __ b(Done);
372
373 __ bind(notInt);
374 condy_helper(Done);
375
376 __ bind(Done);
377 }
378
379 // Fast path for caching oop constants.
380 void TemplateTable::fast_aldc(bool wide)
381 {
382 transition(vtos, atos);
383
384 Register result = r0;
2316 // volatile-stores although it could just as well go before
2317 // volatile-loads.
2318
2319 void TemplateTable::resolve_cache_and_index(int byte_no,
2320 Register Rcache,
2321 Register index,
2322 size_t index_size) {
2323 const Register temp = r19;
2324 assert_different_registers(Rcache, index, temp);
2325
2326 Label resolved;
2327
2328 Bytecodes::Code code = bytecode();
2329 switch (code) {
2330 case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2331 case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2332 }
2333
2334 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2335 __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
2336 __ cmp(temp, (int) code); // have we resolved this bytecode?
2337 __ br(Assembler::EQ, resolved);
2338
2339 // resolve first time through
2340 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2341 __ mov(temp, (int) code);
2342 __ call_VM(noreg, entry, temp);
2343
2344 // Update registers with resolved info
2345 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2346 // n.b. unlike x86 Rcache is now rcpool plus the indexed offset
2347 // so all clients ofthis method must be modified accordingly
2348 __ bind(resolved);
2349 }
2350
2351 // The Rcache and index registers must be set before call
2352 // n.b unlike x86 cache already includes the index offset
2353 void TemplateTable::load_field_cp_cache_entry(Register obj,
2354 Register cache,
2355 Register index,
2356 Register off,
2498 // the mask is not needed. aarch64 just uses bitfield extract
2499 __ ubfxw(flags, raw_flags, ConstantPoolCacheEntry::tos_state_shift,
2500 ConstantPoolCacheEntry::tos_state_bits);
2501
2502 assert(btos == 0, "change code, btos != 0");
2503 __ cbnz(flags, notByte);
2504
2505 // Don't rewrite getstatic, only getfield
2506 if (is_static) rc = may_not_rewrite;
2507
2508 // btos
2509 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
2510 __ push(btos);
2511 // Rewrite bytecode to be faster
2512 if (rc == may_rewrite) {
2513 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2514 }
2515 __ b(Done);
2516
2517 __ bind(notByte);
2518 __ cmp(flags, ztos);
2519 __ br(Assembler::NE, notBool);
2520
2521 // ztos (same code as btos)
2522 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2523 __ push(ztos);
2524 // Rewrite bytecode to be faster
2525 if (rc == may_rewrite) {
2526 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2527 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2528 }
2529 __ b(Done);
2530
2531 __ bind(notBool);
2532 __ cmp(flags, atos);
2533 __ br(Assembler::NE, notObj);
2534 // atos
2535 do_oop_load(_masm, field, r0, IN_HEAP);
2536 __ push(atos);
2537 if (rc == may_rewrite) {
2538 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2539 }
2540 __ b(Done);
2541
2542 __ bind(notObj);
2543 __ cmp(flags, itos);
2544 __ br(Assembler::NE, notInt);
2545 // itos
2546 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2547 __ push(itos);
2548 // Rewrite bytecode to be faster
2549 if (rc == may_rewrite) {
2550 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2551 }
2552 __ b(Done);
2553
2554 __ bind(notInt);
2555 __ cmp(flags, ctos);
2556 __ br(Assembler::NE, notChar);
2557 // ctos
2558 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2559 __ push(ctos);
2560 // Rewrite bytecode to be faster
2561 if (rc == may_rewrite) {
2562 patch_bytecode(Bytecodes::_fast_cgetfield, bc, r1);
2563 }
2564 __ b(Done);
2565
2566 __ bind(notChar);
2567 __ cmp(flags, stos);
2568 __ br(Assembler::NE, notShort);
2569 // stos
2570 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
2571 __ push(stos);
2572 // Rewrite bytecode to be faster
2573 if (rc == may_rewrite) {
2574 patch_bytecode(Bytecodes::_fast_sgetfield, bc, r1);
2575 }
2576 __ b(Done);
2577
2578 __ bind(notShort);
2579 __ cmp(flags, ltos);
2580 __ br(Assembler::NE, notLong);
2581 // ltos
2582 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
2583 __ push(ltos);
2584 // Rewrite bytecode to be faster
2585 if (rc == may_rewrite) {
2586 patch_bytecode(Bytecodes::_fast_lgetfield, bc, r1);
2587 }
2588 __ b(Done);
2589
2590 __ bind(notLong);
2591 __ cmp(flags, ftos);
2592 __ br(Assembler::NE, notFloat);
2593 // ftos
2594 __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
2595 __ push(ftos);
2596 // Rewrite bytecode to be faster
2597 if (rc == may_rewrite) {
2598 patch_bytecode(Bytecodes::_fast_fgetfield, bc, r1);
2599 }
2600 __ b(Done);
2601
2602 __ bind(notFloat);
2603 #ifdef ASSERT
2604 __ cmp(flags, dtos);
2605 __ br(Assembler::NE, notDouble);
2606 #endif
2607 // dtos
2608 __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
2609 __ push(dtos);
2610 // Rewrite bytecode to be faster
2611 if (rc == may_rewrite) {
2612 patch_bytecode(Bytecodes::_fast_dgetfield, bc, r1);
2613 }
2614 #ifdef ASSERT
2615 __ b(Done);
2616
2617 __ bind(notDouble);
2618 __ stop("Bad state");
2619 #endif
2620
2621 __ bind(Done);
2622
2623 Label notVolatile;
2624 __ tbz(raw_flags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2734 __ ubfxw(flags, flags, ConstantPoolCacheEntry::tos_state_shift, ConstantPoolCacheEntry::tos_state_bits);
2735
2736 assert(btos == 0, "change code, btos != 0");
2737 __ cbnz(flags, notByte);
2738
2739 // Don't rewrite putstatic, only putfield
2740 if (is_static) rc = may_not_rewrite;
2741
2742 // btos
2743 {
2744 __ pop(btos);
2745 if (!is_static) pop_and_check_object(obj);
2746 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
2747 if (rc == may_rewrite) {
2748 patch_bytecode(Bytecodes::_fast_bputfield, bc, r1, true, byte_no);
2749 }
2750 __ b(Done);
2751 }
2752
2753 __ bind(notByte);
2754 __ cmp(flags, ztos);
2755 __ br(Assembler::NE, notBool);
2756
2757 // ztos
2758 {
2759 __ pop(ztos);
2760 if (!is_static) pop_and_check_object(obj);
2761 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
2762 if (rc == may_rewrite) {
2763 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2764 }
2765 __ b(Done);
2766 }
2767
2768 __ bind(notBool);
2769 __ cmp(flags, atos);
2770 __ br(Assembler::NE, notObj);
2771
2772 // atos
2773 {
2774 __ pop(atos);
2775 if (!is_static) pop_and_check_object(obj);
2776 // Store into the field
2777 do_oop_store(_masm, field, r0, IN_HEAP);
2778 if (rc == may_rewrite) {
2779 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2780 }
2781 __ b(Done);
2782 }
2783
2784 __ bind(notObj);
2785 __ cmp(flags, itos);
2786 __ br(Assembler::NE, notInt);
2787
2788 // itos
2789 {
2790 __ pop(itos);
2791 if (!is_static) pop_and_check_object(obj);
2792 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
2793 if (rc == may_rewrite) {
2794 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
2795 }
2796 __ b(Done);
2797 }
2798
2799 __ bind(notInt);
2800 __ cmp(flags, ctos);
2801 __ br(Assembler::NE, notChar);
2802
2803 // ctos
2804 {
2805 __ pop(ctos);
2806 if (!is_static) pop_and_check_object(obj);
2807 __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg);
2808 if (rc == may_rewrite) {
2809 patch_bytecode(Bytecodes::_fast_cputfield, bc, r1, true, byte_no);
2810 }
2811 __ b(Done);
2812 }
2813
2814 __ bind(notChar);
2815 __ cmp(flags, stos);
2816 __ br(Assembler::NE, notShort);
2817
2818 // stos
2819 {
2820 __ pop(stos);
2821 if (!is_static) pop_and_check_object(obj);
2822 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg);
2823 if (rc == may_rewrite) {
2824 patch_bytecode(Bytecodes::_fast_sputfield, bc, r1, true, byte_no);
2825 }
2826 __ b(Done);
2827 }
2828
2829 __ bind(notShort);
2830 __ cmp(flags, ltos);
2831 __ br(Assembler::NE, notLong);
2832
2833 // ltos
2834 {
2835 __ pop(ltos);
2836 if (!is_static) pop_and_check_object(obj);
2837 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg);
2838 if (rc == may_rewrite) {
2839 patch_bytecode(Bytecodes::_fast_lputfield, bc, r1, true, byte_no);
2840 }
2841 __ b(Done);
2842 }
2843
2844 __ bind(notLong);
2845 __ cmp(flags, ftos);
2846 __ br(Assembler::NE, notFloat);
2847
2848 // ftos
2849 {
2850 __ pop(ftos);
2851 if (!is_static) pop_and_check_object(obj);
2852 __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
2853 if (rc == may_rewrite) {
2854 patch_bytecode(Bytecodes::_fast_fputfield, bc, r1, true, byte_no);
2855 }
2856 __ b(Done);
2857 }
2858
2859 __ bind(notFloat);
2860 #ifdef ASSERT
2861 __ cmp(flags, dtos);
2862 __ br(Assembler::NE, notDouble);
2863 #endif
2864
2865 // dtos
2866 {
2867 __ pop(dtos);
2868 if (!is_static) pop_and_check_object(obj);
2869 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg);
2870 if (rc == may_rewrite) {
2871 patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
2872 }
2873 }
2874
2875 #ifdef ASSERT
2876 __ b(Done);
2877
2878 __ bind(notDouble);
2879 __ stop("Bad state");
2880 #endif
2881
3517 //-----------------------------------------------------------------------------
3518 // Allocation
3519
3520 void TemplateTable::_new() {
3521 transition(vtos, atos);
3522
3523 __ get_unsigned_2_byte_index_at_bcp(r3, 1);
3524 Label slow_case;
3525 Label done;
3526 Label initialize_header;
3527 Label initialize_object; // including clearing the fields
3528
3529 __ get_cpool_and_tags(r4, r0);
3530 // Make sure the class we're about to instantiate has been resolved.
3531 // This is done before loading InstanceKlass to be consistent with the order
3532 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3533 const int tags_offset = Array<u1>::base_offset_in_bytes();
3534 __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3535 __ lea(rscratch1, Address(rscratch1, tags_offset));
3536 __ ldarb(rscratch1, rscratch1);
3537 __ cmp(rscratch1, JVM_CONSTANT_Class);
3538 __ br(Assembler::NE, slow_case);
3539
3540 // get InstanceKlass
3541 __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3542
3543 // make sure klass is initialized & doesn't have finalizer
3544 // make sure klass is fully initialized
3545 __ ldrb(rscratch1, Address(r4, InstanceKlass::init_state_offset()));
3546 __ cmp(rscratch1, InstanceKlass::fully_initialized);
3547 __ br(Assembler::NE, slow_case);
3548
3549 // get instance_size in InstanceKlass (scaled to a count of bytes)
3550 __ ldrw(r3,
3551 Address(r4,
3552 Klass::layout_helper_offset()));
3553 // test to see if it has a finalizer or is malformed in some way
3554 __ tbnz(r3, exact_log2(Klass::_lh_instance_slow_path_bit), slow_case);
3555
3556 // Allocate the instance:
3557 // If TLAB is enabled:
3558 // Try to allocate in the TLAB.
3559 // If fails, go to the slow path.
3560 // Else If inline contiguous allocations are enabled:
3561 // Try to allocate in eden.
3562 // If fails due to heap end, go to slow path.
3563 //
3564 // If TLAB is enabled OR inline contiguous is enabled:
3565 // Initialize the allocation.
3566 // Exit.
3666
3667 void TemplateTable::arraylength() {
3668 transition(atos, itos);
3669 __ null_check(r0, arrayOopDesc::length_offset_in_bytes());
3670 __ ldrw(r0, Address(r0, arrayOopDesc::length_offset_in_bytes()));
3671 }
3672
3673 void TemplateTable::checkcast()
3674 {
3675 transition(atos, atos);
3676 Label done, is_null, ok_is_subtype, quicked, resolved;
3677 __ cbz(r0, is_null);
3678
3679 // Get cpool & tags index
3680 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3681 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3682 // See if bytecode has already been quicked
3683 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3684 __ lea(r1, Address(rscratch1, r19));
3685 __ ldarb(r1, r1);
3686 __ cmp(r1, JVM_CONSTANT_Class);
3687 __ br(Assembler::EQ, quicked);
3688
3689 __ push(atos); // save receiver for result, and for GC
3690 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3691 // vm_result_2 has metadata result
3692 __ get_vm_result_2(r0, rthread);
3693 __ pop(r3); // restore receiver
3694 __ b(resolved);
3695
3696 // Get superklass in r0 and subklass in r3
3697 __ bind(quicked);
3698 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3699 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3700
3701 __ bind(resolved);
3702 __ load_klass(r19, r3);
3703
3704 // Generate subtype check. Blows r2, r5. Object in r3.
3705 // Superklass in r0. Subklass in r19.
3706 __ gen_subtype_check(r19, ok_is_subtype);
3720 __ bind(is_null);
3721 __ profile_null_seen(r2);
3722 } else {
3723 __ bind(is_null); // same as 'done'
3724 }
3725 __ bind(done);
3726 }
3727
3728 void TemplateTable::instanceof() {
3729 transition(atos, itos);
3730 Label done, is_null, ok_is_subtype, quicked, resolved;
3731 __ cbz(r0, is_null);
3732
3733 // Get cpool & tags index
3734 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3735 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3736 // See if bytecode has already been quicked
3737 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3738 __ lea(r1, Address(rscratch1, r19));
3739 __ ldarb(r1, r1);
3740 __ cmp(r1, JVM_CONSTANT_Class);
3741 __ br(Assembler::EQ, quicked);
3742
3743 __ push(atos); // save receiver for result, and for GC
3744 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3745 // vm_result_2 has metadata result
3746 __ get_vm_result_2(r0, rthread);
3747 __ pop(r3); // restore receiver
3748 __ verify_oop(r3);
3749 __ load_klass(r3, r3);
3750 __ b(resolved);
3751
3752 // Get superklass in r0 and subklass in r3
3753 __ bind(quicked);
3754 __ load_klass(r3, r0);
3755 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1);
3756
3757 __ bind(resolved);
3758
3759 // Generate subtype check. Blows r2, r5
3760 // Superklass in r0. Subklass in r3.
|
314 {
315 transition(vtos, vtos);
316 Label call_ldc, notFloat, notClass, notInt, Done;
317
318 if (wide) {
319 __ get_unsigned_2_byte_index_at_bcp(r1, 1);
320 } else {
321 __ load_unsigned_byte(r1, at_bcp(1));
322 }
323 __ get_cpool_and_tags(r2, r0);
324
325 const int base_offset = ConstantPool::header_size() * wordSize;
326 const int tags_offset = Array<u1>::base_offset_in_bytes();
327
328 // get type
329 __ add(r3, r1, tags_offset);
330 __ lea(r3, Address(r0, r3));
331 __ ldarb(r3, r3);
332
333 // unresolved class - get the resolved class
334 __ cmp(r3, (u1)JVM_CONSTANT_UnresolvedClass);
335 __ br(Assembler::EQ, call_ldc);
336
337 // unresolved class in error state - call into runtime to throw the error
338 // from the first resolution attempt
339 __ cmp(r3, (u1)JVM_CONSTANT_UnresolvedClassInError);
340 __ br(Assembler::EQ, call_ldc);
341
342 // resolved class - need to call vm to get java mirror of the class
343 __ cmp(r3, (u1)JVM_CONSTANT_Class);
344 __ br(Assembler::NE, notClass);
345
346 __ bind(call_ldc);
347 __ mov(c_rarg1, wide);
348 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::ldc), c_rarg1);
349 __ push_ptr(r0);
350 __ verify_oop(r0);
351 __ b(Done);
352
353 __ bind(notClass);
354 __ cmp(r3, (u1)JVM_CONSTANT_Float);
355 __ br(Assembler::NE, notFloat);
356 // ftos
357 __ adds(r1, r2, r1, Assembler::LSL, 3);
358 __ ldrs(v0, Address(r1, base_offset));
359 __ push_f();
360 __ b(Done);
361
362 __ bind(notFloat);
363
364 __ cmp(r3, (u1)JVM_CONSTANT_Integer);
365 __ br(Assembler::NE, notInt);
366
367 // itos
368 __ adds(r1, r2, r1, Assembler::LSL, 3);
369 __ ldrw(r0, Address(r1, base_offset));
370 __ push_i(r0);
371 __ b(Done);
372
373 __ bind(notInt);
374 condy_helper(Done);
375
376 __ bind(Done);
377 }
378
379 // Fast path for caching oop constants.
380 void TemplateTable::fast_aldc(bool wide)
381 {
382 transition(vtos, atos);
383
384 Register result = r0;
2316 // volatile-stores although it could just as well go before
2317 // volatile-loads.
2318
2319 void TemplateTable::resolve_cache_and_index(int byte_no,
2320 Register Rcache,
2321 Register index,
2322 size_t index_size) {
2323 const Register temp = r19;
2324 assert_different_registers(Rcache, index, temp);
2325
2326 Label resolved;
2327
2328 Bytecodes::Code code = bytecode();
2329 switch (code) {
2330 case Bytecodes::_nofast_getfield: code = Bytecodes::_getfield; break;
2331 case Bytecodes::_nofast_putfield: code = Bytecodes::_putfield; break;
2332 }
2333
2334 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2335 __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
2336 __ subs(zr, temp, (int) code); // have we resolved this bytecode?
2337 __ br(Assembler::EQ, resolved);
2338
2339 // resolve first time through
2340 address entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_from_cache);
2341 __ mov(temp, (int) code);
2342 __ call_VM(noreg, entry, temp);
2343
2344 // Update registers with resolved info
2345 __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2346 // n.b. unlike x86 Rcache is now rcpool plus the indexed offset
2347 // so all clients ofthis method must be modified accordingly
2348 __ bind(resolved);
2349 }
2350
2351 // The Rcache and index registers must be set before call
2352 // n.b unlike x86 cache already includes the index offset
2353 void TemplateTable::load_field_cp_cache_entry(Register obj,
2354 Register cache,
2355 Register index,
2356 Register off,
2498 // the mask is not needed. aarch64 just uses bitfield extract
2499 __ ubfxw(flags, raw_flags, ConstantPoolCacheEntry::tos_state_shift,
2500 ConstantPoolCacheEntry::tos_state_bits);
2501
2502 assert(btos == 0, "change code, btos != 0");
2503 __ cbnz(flags, notByte);
2504
2505 // Don't rewrite getstatic, only getfield
2506 if (is_static) rc = may_not_rewrite;
2507
2508 // btos
2509 __ access_load_at(T_BYTE, IN_HEAP, r0, field, noreg, noreg);
2510 __ push(btos);
2511 // Rewrite bytecode to be faster
2512 if (rc == may_rewrite) {
2513 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2514 }
2515 __ b(Done);
2516
2517 __ bind(notByte);
2518 __ cmp(flags, (u1)ztos);
2519 __ br(Assembler::NE, notBool);
2520
2521 // ztos (same code as btos)
2522 __ access_load_at(T_BOOLEAN, IN_HEAP, r0, field, noreg, noreg);
2523 __ push(ztos);
2524 // Rewrite bytecode to be faster
2525 if (rc == may_rewrite) {
2526 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2527 patch_bytecode(Bytecodes::_fast_bgetfield, bc, r1);
2528 }
2529 __ b(Done);
2530
2531 __ bind(notBool);
2532 __ cmp(flags, (u1)atos);
2533 __ br(Assembler::NE, notObj);
2534 // atos
2535 do_oop_load(_masm, field, r0, IN_HEAP);
2536 __ push(atos);
2537 if (rc == may_rewrite) {
2538 patch_bytecode(Bytecodes::_fast_agetfield, bc, r1);
2539 }
2540 __ b(Done);
2541
2542 __ bind(notObj);
2543 __ cmp(flags, (u1)itos);
2544 __ br(Assembler::NE, notInt);
2545 // itos
2546 __ access_load_at(T_INT, IN_HEAP, r0, field, noreg, noreg);
2547 __ push(itos);
2548 // Rewrite bytecode to be faster
2549 if (rc == may_rewrite) {
2550 patch_bytecode(Bytecodes::_fast_igetfield, bc, r1);
2551 }
2552 __ b(Done);
2553
2554 __ bind(notInt);
2555 __ cmp(flags, (u1)ctos);
2556 __ br(Assembler::NE, notChar);
2557 // ctos
2558 __ access_load_at(T_CHAR, IN_HEAP, r0, field, noreg, noreg);
2559 __ push(ctos);
2560 // Rewrite bytecode to be faster
2561 if (rc == may_rewrite) {
2562 patch_bytecode(Bytecodes::_fast_cgetfield, bc, r1);
2563 }
2564 __ b(Done);
2565
2566 __ bind(notChar);
2567 __ cmp(flags, (u1)stos);
2568 __ br(Assembler::NE, notShort);
2569 // stos
2570 __ access_load_at(T_SHORT, IN_HEAP, r0, field, noreg, noreg);
2571 __ push(stos);
2572 // Rewrite bytecode to be faster
2573 if (rc == may_rewrite) {
2574 patch_bytecode(Bytecodes::_fast_sgetfield, bc, r1);
2575 }
2576 __ b(Done);
2577
2578 __ bind(notShort);
2579 __ cmp(flags, (u1)ltos);
2580 __ br(Assembler::NE, notLong);
2581 // ltos
2582 __ access_load_at(T_LONG, IN_HEAP, r0, field, noreg, noreg);
2583 __ push(ltos);
2584 // Rewrite bytecode to be faster
2585 if (rc == may_rewrite) {
2586 patch_bytecode(Bytecodes::_fast_lgetfield, bc, r1);
2587 }
2588 __ b(Done);
2589
2590 __ bind(notLong);
2591 __ cmp(flags, (u1)ftos);
2592 __ br(Assembler::NE, notFloat);
2593 // ftos
2594 __ access_load_at(T_FLOAT, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
2595 __ push(ftos);
2596 // Rewrite bytecode to be faster
2597 if (rc == may_rewrite) {
2598 patch_bytecode(Bytecodes::_fast_fgetfield, bc, r1);
2599 }
2600 __ b(Done);
2601
2602 __ bind(notFloat);
2603 #ifdef ASSERT
2604 __ cmp(flags, (u1)dtos);
2605 __ br(Assembler::NE, notDouble);
2606 #endif
2607 // dtos
2608 __ access_load_at(T_DOUBLE, IN_HEAP, noreg /* ftos */, field, noreg, noreg);
2609 __ push(dtos);
2610 // Rewrite bytecode to be faster
2611 if (rc == may_rewrite) {
2612 patch_bytecode(Bytecodes::_fast_dgetfield, bc, r1);
2613 }
2614 #ifdef ASSERT
2615 __ b(Done);
2616
2617 __ bind(notDouble);
2618 __ stop("Bad state");
2619 #endif
2620
2621 __ bind(Done);
2622
2623 Label notVolatile;
2624 __ tbz(raw_flags, ConstantPoolCacheEntry::is_volatile_shift, notVolatile);
2734 __ ubfxw(flags, flags, ConstantPoolCacheEntry::tos_state_shift, ConstantPoolCacheEntry::tos_state_bits);
2735
2736 assert(btos == 0, "change code, btos != 0");
2737 __ cbnz(flags, notByte);
2738
2739 // Don't rewrite putstatic, only putfield
2740 if (is_static) rc = may_not_rewrite;
2741
2742 // btos
2743 {
2744 __ pop(btos);
2745 if (!is_static) pop_and_check_object(obj);
2746 __ access_store_at(T_BYTE, IN_HEAP, field, r0, noreg, noreg);
2747 if (rc == may_rewrite) {
2748 patch_bytecode(Bytecodes::_fast_bputfield, bc, r1, true, byte_no);
2749 }
2750 __ b(Done);
2751 }
2752
2753 __ bind(notByte);
2754 __ cmp(flags, (u1)ztos);
2755 __ br(Assembler::NE, notBool);
2756
2757 // ztos
2758 {
2759 __ pop(ztos);
2760 if (!is_static) pop_and_check_object(obj);
2761 __ access_store_at(T_BOOLEAN, IN_HEAP, field, r0, noreg, noreg);
2762 if (rc == may_rewrite) {
2763 patch_bytecode(Bytecodes::_fast_zputfield, bc, r1, true, byte_no);
2764 }
2765 __ b(Done);
2766 }
2767
2768 __ bind(notBool);
2769 __ cmp(flags, (u1)atos);
2770 __ br(Assembler::NE, notObj);
2771
2772 // atos
2773 {
2774 __ pop(atos);
2775 if (!is_static) pop_and_check_object(obj);
2776 // Store into the field
2777 do_oop_store(_masm, field, r0, IN_HEAP);
2778 if (rc == may_rewrite) {
2779 patch_bytecode(Bytecodes::_fast_aputfield, bc, r1, true, byte_no);
2780 }
2781 __ b(Done);
2782 }
2783
2784 __ bind(notObj);
2785 __ cmp(flags, (u1)itos);
2786 __ br(Assembler::NE, notInt);
2787
2788 // itos
2789 {
2790 __ pop(itos);
2791 if (!is_static) pop_and_check_object(obj);
2792 __ access_store_at(T_INT, IN_HEAP, field, r0, noreg, noreg);
2793 if (rc == may_rewrite) {
2794 patch_bytecode(Bytecodes::_fast_iputfield, bc, r1, true, byte_no);
2795 }
2796 __ b(Done);
2797 }
2798
2799 __ bind(notInt);
2800 __ cmp(flags, (u1)ctos);
2801 __ br(Assembler::NE, notChar);
2802
2803 // ctos
2804 {
2805 __ pop(ctos);
2806 if (!is_static) pop_and_check_object(obj);
2807 __ access_store_at(T_CHAR, IN_HEAP, field, r0, noreg, noreg);
2808 if (rc == may_rewrite) {
2809 patch_bytecode(Bytecodes::_fast_cputfield, bc, r1, true, byte_no);
2810 }
2811 __ b(Done);
2812 }
2813
2814 __ bind(notChar);
2815 __ cmp(flags, (u1)stos);
2816 __ br(Assembler::NE, notShort);
2817
2818 // stos
2819 {
2820 __ pop(stos);
2821 if (!is_static) pop_and_check_object(obj);
2822 __ access_store_at(T_SHORT, IN_HEAP, field, r0, noreg, noreg);
2823 if (rc == may_rewrite) {
2824 patch_bytecode(Bytecodes::_fast_sputfield, bc, r1, true, byte_no);
2825 }
2826 __ b(Done);
2827 }
2828
2829 __ bind(notShort);
2830 __ cmp(flags, (u1)ltos);
2831 __ br(Assembler::NE, notLong);
2832
2833 // ltos
2834 {
2835 __ pop(ltos);
2836 if (!is_static) pop_and_check_object(obj);
2837 __ access_store_at(T_LONG, IN_HEAP, field, r0, noreg, noreg);
2838 if (rc == may_rewrite) {
2839 patch_bytecode(Bytecodes::_fast_lputfield, bc, r1, true, byte_no);
2840 }
2841 __ b(Done);
2842 }
2843
2844 __ bind(notLong);
2845 __ cmp(flags, (u1)ftos);
2846 __ br(Assembler::NE, notFloat);
2847
2848 // ftos
2849 {
2850 __ pop(ftos);
2851 if (!is_static) pop_and_check_object(obj);
2852 __ access_store_at(T_FLOAT, IN_HEAP, field, noreg /* ftos */, noreg, noreg);
2853 if (rc == may_rewrite) {
2854 patch_bytecode(Bytecodes::_fast_fputfield, bc, r1, true, byte_no);
2855 }
2856 __ b(Done);
2857 }
2858
2859 __ bind(notFloat);
2860 #ifdef ASSERT
2861 __ cmp(flags, (u1)dtos);
2862 __ br(Assembler::NE, notDouble);
2863 #endif
2864
2865 // dtos
2866 {
2867 __ pop(dtos);
2868 if (!is_static) pop_and_check_object(obj);
2869 __ access_store_at(T_DOUBLE, IN_HEAP, field, noreg /* dtos */, noreg, noreg);
2870 if (rc == may_rewrite) {
2871 patch_bytecode(Bytecodes::_fast_dputfield, bc, r1, true, byte_no);
2872 }
2873 }
2874
2875 #ifdef ASSERT
2876 __ b(Done);
2877
2878 __ bind(notDouble);
2879 __ stop("Bad state");
2880 #endif
2881
3517 //-----------------------------------------------------------------------------
3518 // Allocation
3519
3520 void TemplateTable::_new() {
3521 transition(vtos, atos);
3522
3523 __ get_unsigned_2_byte_index_at_bcp(r3, 1);
3524 Label slow_case;
3525 Label done;
3526 Label initialize_header;
3527 Label initialize_object; // including clearing the fields
3528
3529 __ get_cpool_and_tags(r4, r0);
3530 // Make sure the class we're about to instantiate has been resolved.
3531 // This is done before loading InstanceKlass to be consistent with the order
3532 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3533 const int tags_offset = Array<u1>::base_offset_in_bytes();
3534 __ lea(rscratch1, Address(r0, r3, Address::lsl(0)));
3535 __ lea(rscratch1, Address(rscratch1, tags_offset));
3536 __ ldarb(rscratch1, rscratch1);
3537 __ cmp(rscratch1, (u1)JVM_CONSTANT_Class);
3538 __ br(Assembler::NE, slow_case);
3539
3540 // get InstanceKlass
3541 __ load_resolved_klass_at_offset(r4, r3, r4, rscratch1);
3542
3543 // make sure klass is initialized & doesn't have finalizer
3544 // make sure klass is fully initialized
3545 __ ldrb(rscratch1, Address(r4, InstanceKlass::init_state_offset()));
3546 __ cmp(rscratch1, (u1)InstanceKlass::fully_initialized);
3547 __ br(Assembler::NE, slow_case);
3548
3549 // get instance_size in InstanceKlass (scaled to a count of bytes)
3550 __ ldrw(r3,
3551 Address(r4,
3552 Klass::layout_helper_offset()));
3553 // test to see if it has a finalizer or is malformed in some way
3554 __ tbnz(r3, exact_log2(Klass::_lh_instance_slow_path_bit), slow_case);
3555
3556 // Allocate the instance:
3557 // If TLAB is enabled:
3558 // Try to allocate in the TLAB.
3559 // If fails, go to the slow path.
3560 // Else If inline contiguous allocations are enabled:
3561 // Try to allocate in eden.
3562 // If fails due to heap end, go to slow path.
3563 //
3564 // If TLAB is enabled OR inline contiguous is enabled:
3565 // Initialize the allocation.
3566 // Exit.
3666
3667 void TemplateTable::arraylength() {
3668 transition(atos, itos);
3669 __ null_check(r0, arrayOopDesc::length_offset_in_bytes());
3670 __ ldrw(r0, Address(r0, arrayOopDesc::length_offset_in_bytes()));
3671 }
3672
3673 void TemplateTable::checkcast()
3674 {
3675 transition(atos, atos);
3676 Label done, is_null, ok_is_subtype, quicked, resolved;
3677 __ cbz(r0, is_null);
3678
3679 // Get cpool & tags index
3680 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3681 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3682 // See if bytecode has already been quicked
3683 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3684 __ lea(r1, Address(rscratch1, r19));
3685 __ ldarb(r1, r1);
3686 __ cmp(r1, (u1)JVM_CONSTANT_Class);
3687 __ br(Assembler::EQ, quicked);
3688
3689 __ push(atos); // save receiver for result, and for GC
3690 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3691 // vm_result_2 has metadata result
3692 __ get_vm_result_2(r0, rthread);
3693 __ pop(r3); // restore receiver
3694 __ b(resolved);
3695
3696 // Get superklass in r0 and subklass in r3
3697 __ bind(quicked);
3698 __ mov(r3, r0); // Save object in r3; r0 needed for subtype check
3699 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1); // r0 = klass
3700
3701 __ bind(resolved);
3702 __ load_klass(r19, r3);
3703
3704 // Generate subtype check. Blows r2, r5. Object in r3.
3705 // Superklass in r0. Subklass in r19.
3706 __ gen_subtype_check(r19, ok_is_subtype);
3720 __ bind(is_null);
3721 __ profile_null_seen(r2);
3722 } else {
3723 __ bind(is_null); // same as 'done'
3724 }
3725 __ bind(done);
3726 }
3727
3728 void TemplateTable::instanceof() {
3729 transition(atos, itos);
3730 Label done, is_null, ok_is_subtype, quicked, resolved;
3731 __ cbz(r0, is_null);
3732
3733 // Get cpool & tags index
3734 __ get_cpool_and_tags(r2, r3); // r2=cpool, r3=tags array
3735 __ get_unsigned_2_byte_index_at_bcp(r19, 1); // r19=index
3736 // See if bytecode has already been quicked
3737 __ add(rscratch1, r3, Array<u1>::base_offset_in_bytes());
3738 __ lea(r1, Address(rscratch1, r19));
3739 __ ldarb(r1, r1);
3740 __ cmp(r1, (u1)JVM_CONSTANT_Class);
3741 __ br(Assembler::EQ, quicked);
3742
3743 __ push(atos); // save receiver for result, and for GC
3744 call_VM(r0, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3745 // vm_result_2 has metadata result
3746 __ get_vm_result_2(r0, rthread);
3747 __ pop(r3); // restore receiver
3748 __ verify_oop(r3);
3749 __ load_klass(r3, r3);
3750 __ b(resolved);
3751
3752 // Get superklass in r0 and subklass in r3
3753 __ bind(quicked);
3754 __ load_klass(r3, r0);
3755 __ load_resolved_klass_at_offset(r2, r19, r0, rscratch1);
3756
3757 __ bind(resolved);
3758
3759 // Generate subtype check. Blows r2, r5
3760 // Superklass in r0. Subklass in r3.
|