755 case counter_overflow_id:
756 {
757 Register bci = r0, method = r1;
758 __ enter();
759 OopMap* map = save_live_registers(sasm);
760 // Retrieve bci
761 __ ldrw(bci, Address(rfp, 2*BytesPerWord));
762 // And a pointer to the Method*
763 __ ldr(method, Address(rfp, 3*BytesPerWord));
764 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, counter_overflow), bci, method);
765 oop_maps = new OopMapSet();
766 oop_maps->add_gc_map(call_offset, map);
767 restore_live_registers(sasm);
768 __ leave();
769 __ ret(lr);
770 }
771 break;
772
773 case new_type_array_id:
774 case new_object_array_id:
775 {
776 Register length = r19; // Incoming
777 Register klass = r3; // Incoming
778 Register obj = r0; // Result
779
780 if (id == new_type_array_id) {
781 __ set_info("new_type_array", dont_gc_arguments);
782 } else {
783 __ set_info("new_object_array", dont_gc_arguments);
784 }
785
786 #ifdef ASSERT
787 // assert object type is really an array of the proper kind
788 {
789 Label ok;
790 Register t0 = obj;
791 __ ldrw(t0, Address(klass, Klass::layout_helper_offset()));
792 __ asrw(t0, t0, Klass::_lh_array_tag_shift);
793 int tag = ((id == new_type_array_id)
794 ? Klass::_lh_array_tag_type_value
795 : Klass::_lh_array_tag_obj_value);
796 __ mov(rscratch1, tag);
797 __ cmpw(t0, rscratch1);
798 __ br(Assembler::EQ, ok);
799 __ stop("assert(is an array klass)");
800 __ should_not_reach_here();
801 __ bind(ok);
802 }
803 #endif // ASSERT
804
805 // If TLAB is disabled, see if there is support for inlining contiguous
806 // allocations.
807 // Otherwise, just go to the slow path.
808 if (!UseTLAB && Universe::heap()->supports_inline_contig_alloc()) {
809 Register arr_size = r4;
810 Register t1 = r2;
811 Register t2 = r5;
812 Label slow_path;
813 assert_different_registers(length, klass, obj, arr_size, t1, t2);
814
815 // check that array length is small enough for fast path.
835 __ ldrb(t1, Address(klass, in_bytes(Klass::layout_helper_offset()) + (Klass::_lh_header_size_shift / BitsPerByte)));
836 assert(Klass::_lh_header_size_shift % BitsPerByte == 0, "bytewise");
837 assert(Klass::_lh_header_size_mask <= 0xFF, "bytewise");
838 __ andr(t1, t1, Klass::_lh_header_size_mask);
839 __ sub(arr_size, arr_size, t1); // body length
840 __ add(t1, t1, obj); // body start
841 __ initialize_body(t1, arr_size, 0, t2);
842 __ verify_oop(obj);
843
844 __ ret(lr);
845
846 __ bind(slow_path);
847 }
848
849 __ enter();
850 OopMap* map = save_live_registers(sasm);
851 int call_offset;
852 if (id == new_type_array_id) {
853 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
854 } else {
855 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);
856 }
857
858 oop_maps = new OopMapSet();
859 oop_maps->add_gc_map(call_offset, map);
860 restore_live_registers_except_r0(sasm);
861
862 __ verify_oop(obj);
863 __ leave();
864 __ ret(lr);
865
866 // r0: new array
867 }
868 break;
869
870 case new_multi_array_id:
871 { StubFrame f(sasm, "new_multi_array", dont_gc_arguments);
872 // r0,: klass
873 // r19,: rank
874 // r2: address of 1st dimension
909 OopMap* oop_map = save_live_registers(sasm);
910 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, SharedRuntime::register_finalizer), r0);
911 oop_maps = new OopMapSet();
912 oop_maps->add_gc_map(call_offset, oop_map);
913
914 // Now restore all the live registers
915 restore_live_registers(sasm);
916
917 __ leave();
918 __ ret(lr);
919 }
920 break;
921
922 case throw_class_cast_exception_id:
923 { StubFrame f(sasm, "throw_class_cast_exception", dont_gc_arguments);
924 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_class_cast_exception), true);
925 }
926 break;
927
928 case throw_incompatible_class_change_error_id:
929 { StubFrame f(sasm, "throw_incompatible_class_cast_exception", dont_gc_arguments);
930 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_incompatible_class_change_error), false);
931 }
932 break;
933
934 case slow_subtype_check_id:
935 {
936 // Typical calling sequence:
937 // __ push(klass_RInfo); // object klass or other subclass
938 // __ push(sup_k_RInfo); // array element klass or other superclass
939 // __ bl(slow_subtype_check);
940 // Note that the subclass is pushed first, and is therefore deepest.
941 enum layout {
942 r0_off, r0_off_hi,
943 r2_off, r2_off_hi,
944 r4_off, r4_off_hi,
945 r5_off, r5_off_hi,
946 sup_k_off, sup_k_off_hi,
947 klass_off, klass_off_hi,
948 framesize,
949 result_off = sup_k_off
950 };
951
952 __ set_info("slow_subtype_check", dont_gc_arguments);
953 __ push(RegSet::of(r0, r2, r4, r5), sp);
1105 break;
1106
1107 case predicate_failed_trap_id:
1108 {
1109 StubFrame f(sasm, "predicate_failed_trap", dont_gc_arguments);
1110
1111 OopMap* map = save_live_registers(sasm);
1112
1113 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, predicate_failed_trap));
1114 oop_maps = new OopMapSet();
1115 oop_maps->add_gc_map(call_offset, map);
1116 restore_live_registers(sasm);
1117 __ leave();
1118 DeoptimizationBlob* deopt_blob = SharedRuntime::deopt_blob();
1119 assert(deopt_blob != NULL, "deoptimization blob must have been created");
1120
1121 __ far_jump(RuntimeAddress(deopt_blob->unpack_with_reexecution()));
1122 }
1123 break;
1124
1125
1126 default:
1127 { StubFrame f(sasm, "unimplemented entry", dont_gc_arguments);
1128 __ mov(r0, (int)id);
1129 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), r0);
1130 __ should_not_reach_here();
1131 }
1132 break;
1133 }
1134 }
1135 return oop_maps;
1136 }
1137
1138 #undef __
1139
1140 const char *Runtime1::pd_name_for_address(address entry) { Unimplemented(); return 0; }
|
755 case counter_overflow_id:
756 {
757 Register bci = r0, method = r1;
758 __ enter();
759 OopMap* map = save_live_registers(sasm);
760 // Retrieve bci
761 __ ldrw(bci, Address(rfp, 2*BytesPerWord));
762 // And a pointer to the Method*
763 __ ldr(method, Address(rfp, 3*BytesPerWord));
764 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, counter_overflow), bci, method);
765 oop_maps = new OopMapSet();
766 oop_maps->add_gc_map(call_offset, map);
767 restore_live_registers(sasm);
768 __ leave();
769 __ ret(lr);
770 }
771 break;
772
773 case new_type_array_id:
774 case new_object_array_id:
775 case new_value_array_id:
776 {
777 Register length = r19; // Incoming
778 Register klass = r3; // Incoming
779 Register obj = r0; // Result
780
781 if (id == new_type_array_id) {
782 __ set_info("new_type_array", dont_gc_arguments);
783 }
784 else if (id == new_object_array_id) {
785 __ set_info("new_object_array", dont_gc_arguments);
786 }
787 else {
788 __ set_info("new_value_array", dont_gc_arguments);
789 }
790
791 #ifdef ASSERT
792 // assert object type is really an array of the proper kind
793 {
794 Label ok;
795 Register t0 = obj;
796 __ ldrw(t0, Address(klass, Klass::layout_helper_offset()));
797 __ asrw(t0, t0, Klass::_lh_array_tag_shift);
798
799 int tag = 0;
800 switch (id) {
801 case new_type_array_id: tag = Klass::_lh_array_tag_type_value; break;
802 case new_object_array_id: tag = Klass::_lh_array_tag_obj_value; break;
803 case new_value_array_id: tag = Klass::_lh_array_tag_vt_value; break;
804 default: ShouldNotReachHere();
805 }
806 __ mov(rscratch1, tag);
807 __ cmpw(t0, rscratch1);
808 __ br(Assembler::EQ, ok);
809 __ stop("assert(is an array klass)");
810 __ should_not_reach_here();
811 __ bind(ok);
812 }
813 #endif // ASSERT
814
815 // If TLAB is disabled, see if there is support for inlining contiguous
816 // allocations.
817 // Otherwise, just go to the slow path.
818 if (!UseTLAB && Universe::heap()->supports_inline_contig_alloc()) {
819 Register arr_size = r4;
820 Register t1 = r2;
821 Register t2 = r5;
822 Label slow_path;
823 assert_different_registers(length, klass, obj, arr_size, t1, t2);
824
825 // check that array length is small enough for fast path.
845 __ ldrb(t1, Address(klass, in_bytes(Klass::layout_helper_offset()) + (Klass::_lh_header_size_shift / BitsPerByte)));
846 assert(Klass::_lh_header_size_shift % BitsPerByte == 0, "bytewise");
847 assert(Klass::_lh_header_size_mask <= 0xFF, "bytewise");
848 __ andr(t1, t1, Klass::_lh_header_size_mask);
849 __ sub(arr_size, arr_size, t1); // body length
850 __ add(t1, t1, obj); // body start
851 __ initialize_body(t1, arr_size, 0, t2);
852 __ verify_oop(obj);
853
854 __ ret(lr);
855
856 __ bind(slow_path);
857 }
858
859 __ enter();
860 OopMap* map = save_live_registers(sasm);
861 int call_offset;
862 if (id == new_type_array_id) {
863 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_type_array), klass, length);
864 } else {
865 // Runtime1::new_object_array handles both object and value arrays
866 call_offset = __ call_RT(obj, noreg, CAST_FROM_FN_PTR(address, new_object_array), klass, length);
867 }
868
869 oop_maps = new OopMapSet();
870 oop_maps->add_gc_map(call_offset, map);
871 restore_live_registers_except_r0(sasm);
872
873 __ verify_oop(obj);
874 __ leave();
875 __ ret(lr);
876
877 // r0: new array
878 }
879 break;
880
881 case new_multi_array_id:
882 { StubFrame f(sasm, "new_multi_array", dont_gc_arguments);
883 // r0,: klass
884 // r19,: rank
885 // r2: address of 1st dimension
920 OopMap* oop_map = save_live_registers(sasm);
921 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, SharedRuntime::register_finalizer), r0);
922 oop_maps = new OopMapSet();
923 oop_maps->add_gc_map(call_offset, oop_map);
924
925 // Now restore all the live registers
926 restore_live_registers(sasm);
927
928 __ leave();
929 __ ret(lr);
930 }
931 break;
932
933 case throw_class_cast_exception_id:
934 { StubFrame f(sasm, "throw_class_cast_exception", dont_gc_arguments);
935 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_class_cast_exception), true);
936 }
937 break;
938
939 case throw_incompatible_class_change_error_id:
940 { StubFrame f(sasm, "throw_incompatible_class_change_exception", dont_gc_arguments);
941 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_incompatible_class_change_error), false);
942 }
943 break;
944
945 case throw_illegal_monitor_state_exception_id:
946 { StubFrame f(sasm, "throw_illegal_monitor_state_exception", dont_gc_arguments);
947 oop_maps = generate_exception_throw(sasm, CAST_FROM_FN_PTR(address, throw_illegal_monitor_state_exception), false);
948 }
949 break;
950
951 case slow_subtype_check_id:
952 {
953 // Typical calling sequence:
954 // __ push(klass_RInfo); // object klass or other subclass
955 // __ push(sup_k_RInfo); // array element klass or other superclass
956 // __ bl(slow_subtype_check);
957 // Note that the subclass is pushed first, and is therefore deepest.
958 enum layout {
959 r0_off, r0_off_hi,
960 r2_off, r2_off_hi,
961 r4_off, r4_off_hi,
962 r5_off, r5_off_hi,
963 sup_k_off, sup_k_off_hi,
964 klass_off, klass_off_hi,
965 framesize,
966 result_off = sup_k_off
967 };
968
969 __ set_info("slow_subtype_check", dont_gc_arguments);
970 __ push(RegSet::of(r0, r2, r4, r5), sp);
1122 break;
1123
1124 case predicate_failed_trap_id:
1125 {
1126 StubFrame f(sasm, "predicate_failed_trap", dont_gc_arguments);
1127
1128 OopMap* map = save_live_registers(sasm);
1129
1130 int call_offset = __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, predicate_failed_trap));
1131 oop_maps = new OopMapSet();
1132 oop_maps->add_gc_map(call_offset, map);
1133 restore_live_registers(sasm);
1134 __ leave();
1135 DeoptimizationBlob* deopt_blob = SharedRuntime::deopt_blob();
1136 assert(deopt_blob != NULL, "deoptimization blob must have been created");
1137
1138 __ far_jump(RuntimeAddress(deopt_blob->unpack_with_reexecution()));
1139 }
1140 break;
1141
1142 default: // DMS CHECK: we come here with id:0 and id:32 during VM intialization, should it be fixed?
1143 { StubFrame f(sasm, "unimplemented entry", dont_gc_arguments);
1144 __ mov(r0, (int)id);
1145 __ call_RT(noreg, noreg, CAST_FROM_FN_PTR(address, unimplemented_entry), r0);
1146 __ should_not_reach_here();
1147 }
1148 break;
1149 }
1150 }
1151
1152
1153 return oop_maps;
1154 }
1155
1156 #undef __
1157
1158 const char *Runtime1::pd_name_for_address(address entry) { Unimplemented(); return 0; }
|