< prev index next >

src/hotspot/cpu/arm/c1_Runtime1_arm.cpp

Print this page




 552         if (!UseTLAB && Universe::heap()->supports_inline_contig_alloc() && id != new_instance_id) {
 553           Label slow_case, slow_case_no_pop;
 554 
 555           // Make sure the class is fully initialized
 556           if (id == fast_new_instance_init_check_id) {
 557             __ ldrb(result, Address(klass, InstanceKlass::init_state_offset()));
 558             __ cmp(result, InstanceKlass::fully_initialized);
 559             __ b(slow_case_no_pop, ne);
 560           }
 561 
 562           // Free some temporary registers
 563           const Register obj_size = R4;
 564           const Register tmp1     = R5;
 565           const Register tmp2     = LR;
 566           const Register obj_end  = Rtemp;
 567 
 568           __ raw_push(R4, R5, LR);
 569 
 570           __ ldr_u32(obj_size, Address(klass, Klass::layout_helper_offset()));
 571           __ eden_allocate(result, obj_end, tmp1, tmp2, obj_size, slow_case);        // initializes result and obj_end
 572           __ incr_allocated_bytes(obj_size, tmp2);
 573           __ initialize_object(result, obj_end, klass, noreg /* len */, tmp1, tmp2,
 574                                instanceOopDesc::header_size() * HeapWordSize, -1,
 575                                /* is_tlab_allocated */ false);
 576           __ raw_pop_and_ret(R4, R5);
 577 
 578           __ bind(slow_case);
 579           __ raw_pop(R4, R5, LR);
 580 
 581           __ bind(slow_case_no_pop);
 582         }
 583 
 584         OopMap* map = save_live_registers(sasm);
 585         int call_offset = __ call_RT(result, noreg, CAST_FROM_FN_PTR(address, new_instance), klass);
 586         oop_maps = new OopMapSet();
 587         oop_maps->add_gc_map(call_offset, map);
 588 
 589         // MacroAssembler::StoreStore useless (included in the runtime exit path)
 590 
 591         restore_live_registers_except_R0(sasm);
 592       }


 641           __ raw_push(R4, R5, LR);
 642 
 643           // Get the allocation size: round_up((length << (layout_helper & 0xff)) + header_size)
 644           __ ldr_u32(tmp1, Address(klass, Klass::layout_helper_offset()));
 645           __ mov(arr_size, MinObjAlignmentInBytesMask);
 646           __ and_32(tmp2, tmp1, (unsigned int)(Klass::_lh_header_size_mask << Klass::_lh_header_size_shift));
 647 
 648 #ifdef AARCH64
 649           __ lslv_w(tmp3, length, tmp1);
 650           __ add(arr_size, arr_size, tmp3);
 651 #else
 652           __ add(arr_size, arr_size, AsmOperand(length, lsl, tmp1));
 653 #endif // AARCH64
 654 
 655           __ add(arr_size, arr_size, AsmOperand(tmp2, lsr, Klass::_lh_header_size_shift));
 656           __ align_reg(arr_size, arr_size, MinObjAlignmentInBytes);
 657 
 658           // eden_allocate destroys tmp2, so reload header_size after allocation
 659           // eden_allocate initializes result and obj_end
 660           __ eden_allocate(result, obj_end, tmp1, tmp2, arr_size, slow_case);
 661           __ incr_allocated_bytes(arr_size, tmp2);
 662           __ ldrb(tmp2, Address(klass, in_bytes(Klass::layout_helper_offset()) +
 663                                        Klass::_lh_header_size_shift / BitsPerByte));
 664           __ initialize_object(result, obj_end, klass, length, tmp1, tmp2, tmp2, -1, /* is_tlab_allocated */ false);
 665           __ raw_pop_and_ret(R4, R5);
 666 
 667           __ bind(slow_case);
 668           __ raw_pop(R4, R5, LR);
 669           __ bind(slow_case_no_pop);
 670         }
 671 
 672         OopMap* map = save_live_registers(sasm);
 673         int call_offset;
 674         if (id == new_type_array_id) {
 675           call_offset = __ call_RT(result, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
 676         } else {
 677           call_offset = __ call_RT(result, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);
 678         }
 679         oop_maps = new OopMapSet();
 680         oop_maps->add_gc_map(call_offset, map);
 681 




 552         if (!UseTLAB && Universe::heap()->supports_inline_contig_alloc() && id != new_instance_id) {
 553           Label slow_case, slow_case_no_pop;
 554 
 555           // Make sure the class is fully initialized
 556           if (id == fast_new_instance_init_check_id) {
 557             __ ldrb(result, Address(klass, InstanceKlass::init_state_offset()));
 558             __ cmp(result, InstanceKlass::fully_initialized);
 559             __ b(slow_case_no_pop, ne);
 560           }
 561 
 562           // Free some temporary registers
 563           const Register obj_size = R4;
 564           const Register tmp1     = R5;
 565           const Register tmp2     = LR;
 566           const Register obj_end  = Rtemp;
 567 
 568           __ raw_push(R4, R5, LR);
 569 
 570           __ ldr_u32(obj_size, Address(klass, Klass::layout_helper_offset()));
 571           __ eden_allocate(result, obj_end, tmp1, tmp2, obj_size, slow_case);        // initializes result and obj_end

 572           __ initialize_object(result, obj_end, klass, noreg /* len */, tmp1, tmp2,
 573                                instanceOopDesc::header_size() * HeapWordSize, -1,
 574                                /* is_tlab_allocated */ false);
 575           __ raw_pop_and_ret(R4, R5);
 576 
 577           __ bind(slow_case);
 578           __ raw_pop(R4, R5, LR);
 579 
 580           __ bind(slow_case_no_pop);
 581         }
 582 
 583         OopMap* map = save_live_registers(sasm);
 584         int call_offset = __ call_RT(result, noreg, CAST_FROM_FN_PTR(address, new_instance), klass);
 585         oop_maps = new OopMapSet();
 586         oop_maps->add_gc_map(call_offset, map);
 587 
 588         // MacroAssembler::StoreStore useless (included in the runtime exit path)
 589 
 590         restore_live_registers_except_R0(sasm);
 591       }


 640           __ raw_push(R4, R5, LR);
 641 
 642           // Get the allocation size: round_up((length << (layout_helper & 0xff)) + header_size)
 643           __ ldr_u32(tmp1, Address(klass, Klass::layout_helper_offset()));
 644           __ mov(arr_size, MinObjAlignmentInBytesMask);
 645           __ and_32(tmp2, tmp1, (unsigned int)(Klass::_lh_header_size_mask << Klass::_lh_header_size_shift));
 646 
 647 #ifdef AARCH64
 648           __ lslv_w(tmp3, length, tmp1);
 649           __ add(arr_size, arr_size, tmp3);
 650 #else
 651           __ add(arr_size, arr_size, AsmOperand(length, lsl, tmp1));
 652 #endif // AARCH64
 653 
 654           __ add(arr_size, arr_size, AsmOperand(tmp2, lsr, Klass::_lh_header_size_shift));
 655           __ align_reg(arr_size, arr_size, MinObjAlignmentInBytes);
 656 
 657           // eden_allocate destroys tmp2, so reload header_size after allocation
 658           // eden_allocate initializes result and obj_end
 659           __ eden_allocate(result, obj_end, tmp1, tmp2, arr_size, slow_case);

 660           __ ldrb(tmp2, Address(klass, in_bytes(Klass::layout_helper_offset()) +
 661                                        Klass::_lh_header_size_shift / BitsPerByte));
 662           __ initialize_object(result, obj_end, klass, length, tmp1, tmp2, tmp2, -1, /* is_tlab_allocated */ false);
 663           __ raw_pop_and_ret(R4, R5);
 664 
 665           __ bind(slow_case);
 666           __ raw_pop(R4, R5, LR);
 667           __ bind(slow_case_no_pop);
 668         }
 669 
 670         OopMap* map = save_live_registers(sasm);
 671         int call_offset;
 672         if (id == new_type_array_id) {
 673           call_offset = __ call_RT(result, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
 674         } else {
 675           call_offset = __ call_RT(result, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);
 676         }
 677         oop_maps = new OopMapSet();
 678         oop_maps->add_gc_map(call_offset, map);
 679 


< prev index next >