559 // Is there a value type argument?
560 for (int i = 0; i < sig_extended->length() && !has_value_argument; i++) {
561 has_value_argument = (sig_extended->at(i)._bt == T_VALUETYPE);
562 }
563 if (has_value_argument) {
564 // There is at least a value type argument: we're coming from
565 // compiled code so we have no buffers to back the value
566 // types. Allocate the buffers here with a runtime call.
567 OopMap* map = RegisterSaver::save_live_registers(masm, 0, &frame_size_in_words);
568
569 frame_complete = __ offset();
570 address the_pc = __ pc();
571
572 __ set_last_Java_frame(noreg, noreg, the_pc, rscratch1);
573
574 __ mov(c_rarg0, rthread);
575 __ mov(c_rarg1, r1);
576 __ mov(c_rarg2, (int64_t)alloc_value_receiver);
577
578 __ lea(rscratch1, RuntimeAddress(CAST_FROM_FN_PTR(address, SharedRuntime::allocate_value_types)));
579 __ blrt(rscratch1, 3, 0, 1);
580
581 oop_maps->add_gc_map((int)(__ pc() - start), map);
582 __ reset_last_Java_frame(false);
583
584 RegisterSaver::restore_live_registers(masm);
585
586 Label no_exception;
587 __ ldr(r0, Address(rthread, Thread::pending_exception_offset()));
588 __ cbz(r0, no_exception);
589
590 __ str(zr, Address(rthread, JavaThread::vm_result_offset()));
591 __ ldr(r0, Address(rthread, Thread::pending_exception_offset()));
592 __ b(RuntimeAddress(StubRoutines::forward_exception_entry()));
593
594 __ bind(no_exception);
595
596 // We get an array of objects from the runtime call
597 __ get_vm_result(r10, rthread);
598 __ get_vm_result_2(r1, rthread); // TODO: required to keep the callee Method live?
599 }
868 __ ldrd(r_1->as_FloatRegister(), Address(esp, next_off));
869 }
870 }
871 }
872
873
874 // 6243940 We might end up in handle_wrong_method if
875 // the callee is deoptimized as we race thru here. If that
876 // happens we don't want to take a safepoint because the
877 // caller frame will look interpreted and arguments are now
878 // "compiled" so it is much better to make this transition
879 // invisible to the stack walking code. Unfortunately if
880 // we try and find the callee by normal means a safepoint
881 // is possible. So we stash the desired callee in the thread
882 // and the vm will find there should this case occur.
883
884 __ str(rmethod, Address(rthread, JavaThread::callee_target_offset()));
885 __ br(rscratch1);
886 }
887
888 // ---------------------------------------------------------------
889 AdapterHandlerEntry* SharedRuntime::generate_i2c2i_adapters(MacroAssembler *masm,
890 int total_args_passed,
891 int comp_args_on_stack,
892 const BasicType *sig_bt,
893 const VMRegPair *regs,
894 AdapterFingerPrint* fingerprint) {
895 address i2c_entry = __ pc();
896
897 gen_i2c_adapter(masm, total_args_passed, comp_args_on_stack, sig_bt, regs);
898
899 address c2i_unverified_entry = __ pc();
900 Label skip_fixup;
901
902 Label ok;
903
904 Register holder = rscratch2;
905 Register receiver = j_rarg0;
906 Register tmp = r10; // A call-clobbered register not used for arg passing
907
908 // -------------------------------------------------------------------------
909 // Generate a C2I adapter. On entry we know rmethod holds the Method* during calls
910 // to the interpreter. The args start out packed in the compiled layout. They
911 // need to be unpacked into the interpreter layout. This will almost always
912 // require some stack space. We grow the current (compiled) stack, then repack
913 // the args. We finally end in a jump to the generic interpreter entry point.
914 // On exit from the interpreter, the interpreter will restore our SP (lest the
915 // compiled code, which relys solely on SP and not FP, get sick).
916
917 {
918 __ block_comment("c2i_unverified_entry {");
919 __ load_klass(rscratch1, receiver);
920 __ ldr(tmp, Address(holder, CompiledICHolder::holder_klass_offset()));
921 __ cmp(rscratch1, tmp);
922 __ ldr(rmethod, Address(holder, CompiledICHolder::holder_metadata_offset()));
923 __ br(Assembler::EQ, ok);
924 __ far_jump(RuntimeAddress(SharedRuntime::get_ic_miss_stub()));
925
926 __ bind(ok);
927 // Method might have been compiled since the call site was patched to
928 // interpreted; if that is the case treat it as a miss so we can get
929 // the call site corrected.
930 __ ldr(rscratch1, Address(rmethod, in_bytes(Method::code_offset())));
931 __ cbz(rscratch1, skip_fixup);
932 __ far_jump(RuntimeAddress(SharedRuntime::get_ic_miss_stub()));
933 __ block_comment("} c2i_unverified_entry");
934 }
935 }
936
937 // ---------------------------------------------------------------
938 AdapterHandlerEntry* SharedRuntime::generate_i2c2i_adapters(MacroAssembler *masm,
939 int comp_args_on_stack,
940 const GrowableArray<SigEntry>* sig,
941 const VMRegPair* regs,
942 const GrowableArray<SigEntry>* sig_cc,
943 const VMRegPair* regs_cc,
944 const GrowableArray<SigEntry>* sig_cc_ro,
945 const VMRegPair* regs_cc_ro,
946 AdapterFingerPrint* fingerprint,
947 AdapterBlob*& new_adapter) {
948
949 address i2c_entry = __ pc();
950 gen_i2c_adapter(masm, comp_args_on_stack, sig, regs);
951
952 address c2i_unverified_entry = __ pc();
953 Label skip_fixup;
954
955
956 gen_inline_cache_check(masm, skip_fixup);
957
958 OopMapSet* oop_maps = new OopMapSet();
959 int frame_complete = CodeOffsets::frame_never_safe;
960 int frame_size_in_words = 0;
961
962 // Scalarized c2i adapter with non-scalarized receiver (i.e., don't pack receiver)
963 address c2i_value_ro_entry = __ pc();
964 if (regs_cc != regs_cc_ro) {
965 Label unused;
966 gen_c2i_adapter(masm, sig_cc_ro, regs_cc_ro, skip_fixup, i2c_entry, oop_maps, frame_complete, frame_size_in_words, false);
967 skip_fixup = unused;
968 }
969
970 // Scalarized c2i adapter
971 address c2i_entry = __ pc();
972
973 // Not implemented
974 // BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
975 // bs->c2i_entry_barrier(masm);
976
977 gen_c2i_adapter(masm, sig_cc, regs_cc, skip_fixup, i2c_entry, oop_maps, frame_complete, frame_size_in_words, true);
978
979 address c2i_unverified_value_entry = c2i_unverified_entry;
980
981 // Non-scalarized c2i adapter
982 address c2i_value_entry = c2i_entry;
983 if (regs != regs_cc) {
984 Label value_entry_skip_fixup;
985 c2i_unverified_value_entry = __ pc();
986 gen_inline_cache_check(masm, value_entry_skip_fixup);
987
988 c2i_value_entry = __ pc();
989 Label unused;
990 gen_c2i_adapter(masm, sig, regs, value_entry_skip_fixup, i2c_entry, oop_maps, frame_complete, frame_size_in_words, false);
991 }
992
993 // TODO fix this
994 // Class initialization barrier for static methods
995 address c2i_no_clinit_check_entry = NULL;
996
997 __ flush();
998
999 // The c2i adapter might safepoint and trigger a GC. The caller must make sure that
1000 // the GC knows about the location of oop argument locations passed to the c2i adapter.
1001
1002 bool caller_must_gc_arguments = (regs != regs_cc);
1003 new_adapter = AdapterBlob::create(masm->code(), frame_complete, frame_size_in_words + 10, oop_maps, caller_must_gc_arguments);
1004
1005 return AdapterHandlerLibrary::new_entry(fingerprint, i2c_entry, c2i_entry, c2i_value_entry, c2i_value_ro_entry, c2i_unverified_entry, c2i_unverified_value_entry, c2i_no_clinit_check_entry);
1006 }
1007
1008 int SharedRuntime::c_calling_convention(const BasicType *sig_bt,
1009 VMRegPair *regs,
1010 VMRegPair *regs2,
1011 int total_args_passed) {
1012 assert(regs2 == NULL, "not needed on AArch64");
1013
1014 // We return the amount of VMRegImpl stack slots we need to reserve for all
1015 // the arguments NOT counting out_preserve_stack_slots.
|
559 // Is there a value type argument?
560 for (int i = 0; i < sig_extended->length() && !has_value_argument; i++) {
561 has_value_argument = (sig_extended->at(i)._bt == T_VALUETYPE);
562 }
563 if (has_value_argument) {
564 // There is at least a value type argument: we're coming from
565 // compiled code so we have no buffers to back the value
566 // types. Allocate the buffers here with a runtime call.
567 OopMap* map = RegisterSaver::save_live_registers(masm, 0, &frame_size_in_words);
568
569 frame_complete = __ offset();
570 address the_pc = __ pc();
571
572 __ set_last_Java_frame(noreg, noreg, the_pc, rscratch1);
573
574 __ mov(c_rarg0, rthread);
575 __ mov(c_rarg1, r1);
576 __ mov(c_rarg2, (int64_t)alloc_value_receiver);
577
578 __ lea(rscratch1, RuntimeAddress(CAST_FROM_FN_PTR(address, SharedRuntime::allocate_value_types)));
579 __ blr(rscratch1);
580
581 oop_maps->add_gc_map((int)(__ pc() - start), map);
582 __ reset_last_Java_frame(false);
583
584 RegisterSaver::restore_live_registers(masm);
585
586 Label no_exception;
587 __ ldr(r0, Address(rthread, Thread::pending_exception_offset()));
588 __ cbz(r0, no_exception);
589
590 __ str(zr, Address(rthread, JavaThread::vm_result_offset()));
591 __ ldr(r0, Address(rthread, Thread::pending_exception_offset()));
592 __ b(RuntimeAddress(StubRoutines::forward_exception_entry()));
593
594 __ bind(no_exception);
595
596 // We get an array of objects from the runtime call
597 __ get_vm_result(r10, rthread);
598 __ get_vm_result_2(r1, rthread); // TODO: required to keep the callee Method live?
599 }
868 __ ldrd(r_1->as_FloatRegister(), Address(esp, next_off));
869 }
870 }
871 }
872
873
874 // 6243940 We might end up in handle_wrong_method if
875 // the callee is deoptimized as we race thru here. If that
876 // happens we don't want to take a safepoint because the
877 // caller frame will look interpreted and arguments are now
878 // "compiled" so it is much better to make this transition
879 // invisible to the stack walking code. Unfortunately if
880 // we try and find the callee by normal means a safepoint
881 // is possible. So we stash the desired callee in the thread
882 // and the vm will find there should this case occur.
883
884 __ str(rmethod, Address(rthread, JavaThread::callee_target_offset()));
885 __ br(rscratch1);
886 }
887
888 static void gen_inline_cache_check(MacroAssembler *masm, Label& skip_fixup) {
889
890 Label ok;
891
892 Register holder = rscratch2;
893 Register receiver = j_rarg0;
894 Register tmp = r10; // A call-clobbered register not used for arg passing
895
896 // -------------------------------------------------------------------------
897 // Generate a C2I adapter. On entry we know rmethod holds the Method* during calls
898 // to the interpreter. The args start out packed in the compiled layout. They
899 // need to be unpacked into the interpreter layout. This will almost always
900 // require some stack space. We grow the current (compiled) stack, then repack
901 // the args. We finally end in a jump to the generic interpreter entry point.
902 // On exit from the interpreter, the interpreter will restore our SP (lest the
903 // compiled code, which relys solely on SP and not FP, get sick).
904
905 {
906 __ block_comment("c2i_unverified_entry {");
907 __ load_klass(rscratch1, receiver);
908 __ ldr(tmp, Address(holder, CompiledICHolder::holder_klass_offset()));
909 __ cmp(rscratch1, tmp);
910 __ ldr(rmethod, Address(holder, CompiledICHolder::holder_metadata_offset()));
911 __ br(Assembler::EQ, ok);
912 __ far_jump(RuntimeAddress(SharedRuntime::get_ic_miss_stub()));
913
914 __ bind(ok);
915 // Method might have been compiled since the call site was patched to
916 // interpreted; if that is the case treat it as a miss so we can get
917 // the call site corrected.
918 __ ldr(rscratch1, Address(rmethod, in_bytes(Method::code_offset())));
919 __ cbz(rscratch1, skip_fixup);
920 __ far_jump(RuntimeAddress(SharedRuntime::get_ic_miss_stub()));
921 __ block_comment("} c2i_unverified_entry");
922 }
923 }
924
925
926 // ---------------------------------------------------------------
927 AdapterHandlerEntry* SharedRuntime::generate_i2c2i_adapters(MacroAssembler *masm,
928 int comp_args_on_stack,
929 const GrowableArray<SigEntry>* sig,
930 const VMRegPair* regs,
931 const GrowableArray<SigEntry>* sig_cc,
932 const VMRegPair* regs_cc,
933 const GrowableArray<SigEntry>* sig_cc_ro,
934 const VMRegPair* regs_cc_ro,
935 AdapterFingerPrint* fingerprint,
936 AdapterBlob*& new_adapter) {
937
938 address i2c_entry = __ pc();
939 gen_i2c_adapter(masm, comp_args_on_stack, sig, regs);
940
941 address c2i_unverified_entry = __ pc();
942 Label skip_fixup;
943
944 gen_inline_cache_check(masm, skip_fixup);
945
946 OopMapSet* oop_maps = new OopMapSet();
947 int frame_complete = CodeOffsets::frame_never_safe;
948 int frame_size_in_words = 0;
949
950 // Scalarized c2i adapter with non-scalarized receiver (i.e., don't pack receiver)
951 address c2i_value_ro_entry = __ pc();
952 if (regs_cc != regs_cc_ro) {
953 Label unused;
954 gen_c2i_adapter(masm, sig_cc_ro, regs_cc_ro, skip_fixup, i2c_entry, oop_maps, frame_complete, frame_size_in_words, false);
955 skip_fixup = unused;
956 }
957
958 // Scalarized c2i adapter
959 address c2i_entry = __ pc();
960
961 // Class initialization barrier for static methods
962 address c2i_no_clinit_check_entry = NULL;
963
964 if (VM_Version::supports_fast_class_init_checks()) {
965 Label L_skip_barrier;
966 { // Bypass the barrier for non-static methods
967 Register flags = rscratch1;
968 __ ldrw(flags, Address(rmethod, Method::access_flags_offset()));
969 __ tst(flags, JVM_ACC_STATIC);
970 __ br(Assembler::NE, L_skip_barrier); // non-static
971 }
972
973 Register klass = rscratch1;
974 __ load_method_holder(klass, rmethod);
975 // We pass rthread to this function on x86
976 __ clinit_barrier(klass, rscratch2, &L_skip_barrier /*L_fast_path*/);
977
978 __ far_jump(RuntimeAddress(SharedRuntime::get_handle_wrong_method_stub())); // slow path
979
980 __ bind(L_skip_barrier);
981 c2i_no_clinit_check_entry = __ pc();
982 }
983
984 // FIXME: Not Implemented
985 // BarrierSetAssembler* bs = BarrierSet::barrier_set()->barrier_set_assembler();
986 // bs->c2i_entry_barrier(masm);
987
988 gen_c2i_adapter(masm, sig_cc, regs_cc, skip_fixup, i2c_entry, oop_maps, frame_complete, frame_size_in_words, true);
989
990 address c2i_unverified_value_entry = c2i_unverified_entry;
991
992 // Non-scalarized c2i adapter
993 address c2i_value_entry = c2i_entry;
994 if (regs != regs_cc) {
995 Label value_entry_skip_fixup;
996 c2i_unverified_value_entry = __ pc();
997 gen_inline_cache_check(masm, value_entry_skip_fixup);
998
999 c2i_value_entry = __ pc();
1000 Label unused;
1001 gen_c2i_adapter(masm, sig, regs, value_entry_skip_fixup, i2c_entry, oop_maps, frame_complete, frame_size_in_words, false);
1002 }
1003
1004 __ flush();
1005
1006 // The c2i adapter might safepoint and trigger a GC. The caller must make sure that
1007 // the GC knows about the location of oop argument locations passed to the c2i adapter.
1008
1009 bool caller_must_gc_arguments = (regs != regs_cc);
1010 new_adapter = AdapterBlob::create(masm->code(), frame_complete, frame_size_in_words + 10, oop_maps, caller_must_gc_arguments);
1011
1012 return AdapterHandlerLibrary::new_entry(fingerprint, i2c_entry, c2i_entry, c2i_value_entry, c2i_value_ro_entry, c2i_unverified_entry, c2i_unverified_value_entry, c2i_no_clinit_check_entry);
1013 }
1014
1015 int SharedRuntime::c_calling_convention(const BasicType *sig_bt,
1016 VMRegPair *regs,
1017 VMRegPair *regs2,
1018 int total_args_passed) {
1019 assert(regs2 == NULL, "not needed on AArch64");
1020
1021 // We return the amount of VMRegImpl stack slots we need to reserve for all
1022 // the arguments NOT counting out_preserve_stack_slots.
|