1796 int s = obj_args->max_length();
1797 // if called through method handle invoke, some arguments may have been popped
1798 for (int i = start, j = 0; j < s && i < args->length(); i++) {
1799 if (args->at(i)->type()->is_object_kind()) {
1800 obj_args->push(args->at(i));
1801 j++;
1802 }
1803 }
1804 check_args_for_profiling(obj_args, s);
1805 return obj_args;
1806 }
1807
1808
1809 void GraphBuilder::invoke(Bytecodes::Code code) {
1810 bool will_link;
1811 ciSignature* declared_signature = NULL;
1812 ciMethod* target = stream()->get_method(will_link, &declared_signature);
1813 ciKlass* holder = stream()->get_declared_method_holder();
1814 const Bytecodes::Code bc_raw = stream()->cur_bc_raw();
1815 assert(declared_signature != NULL, "cannot be null");
1816
1817 ciInstanceKlass* klass = target->holder();
1818
1819 // Make sure there are no evident problems with linking the instruction.
1820 bool is_resolved = true;
1821 if (klass->is_loaded() && !target->is_loaded()) {
1822 is_resolved = false; // method not found
1823 }
1824
1825 // check if CHA possible: if so, change the code to invoke_special
1826 ciInstanceKlass* calling_klass = method()->holder();
1827 ciInstanceKlass* callee_holder = ciEnv::get_instance_klass_for_declared_method_holder(holder);
1828 ciInstanceKlass* actual_recv = callee_holder;
1829
1830 CompileLog* log = compilation()->log();
1831 if (log != NULL)
1832 log->elem("call method='%d' instr='%s'",
1833 log->identify(target),
1834 Bytecodes::name(code));
1835
1836 // Some methods are obviously bindable without any type checks so
1837 // convert them directly to an invokespecial or invokestatic.
1838 if (target->is_loaded() && !target->is_abstract() && target->can_be_statically_bound()) {
1839 switch (bc_raw) {
1840 case Bytecodes::_invokevirtual:
1841 code = Bytecodes::_invokespecial;
1842 break;
1843 case Bytecodes::_invokehandle:
1851 }
1852 }
1853
1854 // Push appendix argument (MethodType, CallSite, etc.), if one.
1855 bool patch_for_appendix = false;
1856 int patching_appendix_arg = 0;
1857 if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
1858 Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
1859 apush(arg);
1860 patch_for_appendix = true;
1861 patching_appendix_arg = (will_link && stream()->has_appendix()) ? 0 : 1;
1862 } else if (stream()->has_appendix()) {
1863 ciObject* appendix = stream()->get_appendix();
1864 Value arg = append(new Constant(new ObjectConstant(appendix)));
1865 apush(arg);
1866 }
1867
1868 ciMethod* cha_monomorphic_target = NULL;
1869 ciMethod* exact_target = NULL;
1870 Value better_receiver = NULL;
1871 if (UseCHA && DeoptC1 && klass->is_loaded() && target->is_loaded() &&
1872 !(// %%% FIXME: Are both of these relevant?
1873 target->is_method_handle_intrinsic() ||
1874 target->is_compiled_lambda_form()) &&
1875 !patch_for_appendix) {
1876 Value receiver = NULL;
1877 ciInstanceKlass* receiver_klass = NULL;
1878 bool type_is_exact = false;
1879 // try to find a precise receiver type
1880 if (will_link && !target->is_static()) {
1881 int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1882 receiver = state()->stack_at(index);
1883 ciType* type = receiver->exact_type();
1884 if (type != NULL && type->is_loaded() &&
1885 type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {
1886 receiver_klass = (ciInstanceKlass*) type;
1887 type_is_exact = true;
1888 }
1889 if (type == NULL) {
1890 type = receiver->declared_type();
1891 if (type != NULL && type->is_loaded() &&
1971 if (cha_monomorphic_target != NULL) {
1972 if (cha_monomorphic_target->is_abstract()) {
1973 // Do not optimize for abstract methods
1974 cha_monomorphic_target = NULL;
1975 }
1976 }
1977
1978 if (cha_monomorphic_target != NULL) {
1979 if (!(target->is_final_method())) {
1980 // If we inlined because CHA revealed only a single target method,
1981 // then we are dependent on that target method not getting overridden
1982 // by dynamic class loading. Be sure to test the "static" receiver
1983 // dest_method here, as opposed to the actual receiver, which may
1984 // falsely lead us to believe that the receiver is final or private.
1985 dependency_recorder()->assert_unique_concrete_method(actual_recv, cha_monomorphic_target);
1986 }
1987 code = Bytecodes::_invokespecial;
1988 }
1989
1990 // check if we could do inlining
1991 if (!PatchALot && Inline && is_resolved &&
1992 klass->is_loaded() && target->is_loaded() &&
1993 (klass->is_initialized() || klass->is_interface() && target->holder()->is_initialized())
1994 && !patch_for_appendix) {
1995 // callee is known => check if we have static binding
1996 if (code == Bytecodes::_invokestatic ||
1997 code == Bytecodes::_invokespecial ||
1998 code == Bytecodes::_invokevirtual && target->is_final_method() ||
1999 code == Bytecodes::_invokedynamic) {
2000 ciMethod* inline_target = (cha_monomorphic_target != NULL) ? cha_monomorphic_target : target;
2001 // static binding => check if callee is ok
2002 bool success = try_inline(inline_target, (cha_monomorphic_target != NULL) || (exact_target != NULL), false, code, better_receiver);
2003
2004 CHECK_BAILOUT();
2005 clear_inline_bailout();
2006
2007 if (success) {
2008 // Register dependence if JVMTI has either breakpoint
2009 // setting or hotswapping of methods capabilities since they may
2010 // cause deoptimization.
2011 if (compilation()->env()->jvmti_can_hotswap_or_post_breakpoint()) {
2012 dependency_recorder()->assert_evol_method(inline_target);
2015 }
2016 } else {
2017 print_inlining(target, "no static binding", /*success*/ false);
2018 }
2019 } else {
2020 print_inlining(target, "not inlineable", /*success*/ false);
2021 }
2022
2023 // If we attempted an inline which did not succeed because of a
2024 // bailout during construction of the callee graph, the entire
2025 // compilation has to be aborted. This is fairly rare and currently
2026 // seems to only occur for jasm-generated classes which contain
2027 // jsr/ret pairs which are not associated with finally clauses and
2028 // do not have exception handlers in the containing method, and are
2029 // therefore not caught early enough to abort the inlining without
2030 // corrupting the graph. (We currently bail out with a non-empty
2031 // stack at a ret in these situations.)
2032 CHECK_BAILOUT();
2033
2034 // inlining not successful => standard invoke
2035 bool is_loaded = target->is_loaded();
2036 ValueType* result_type = as_ValueType(declared_signature->return_type());
2037 ValueStack* state_before = copy_state_exhandling();
2038
2039 // The bytecode (code) might change in this method so we are checking this very late.
2040 const bool has_receiver =
2041 code == Bytecodes::_invokespecial ||
2042 code == Bytecodes::_invokevirtual ||
2043 code == Bytecodes::_invokeinterface;
2044 Values* args = state()->pop_arguments(target->arg_size_no_receiver() + patching_appendix_arg);
2045 Value recv = has_receiver ? apop() : NULL;
2046 int vtable_index = Method::invalid_vtable_index;
2047
2048 #ifdef SPARC
2049 // Currently only supported on Sparc.
2050 // The UseInlineCaches only controls dispatch to invokevirtuals for
2051 // loaded classes which we weren't able to statically bind.
2052 if (!UseInlineCaches && is_resolved && is_loaded && code == Bytecodes::_invokevirtual
2053 && !target->can_be_statically_bound()) {
2054 // Find a vtable index if one is available
2055 // For arrays, callee_holder is Object. Resolving the call with
2056 // Object would allow an illegal call to finalize() on an
2057 // array. We use holder instead: illegal calls to finalize() won't
2058 // be compiled as vtable calls (IC call resolution will catch the
2059 // illegal call) and the few legal calls on array types won't be
2060 // either.
2061 vtable_index = target->resolve_vtable_index(calling_klass, holder);
2062 }
2063 #endif
2064
2065 if (is_resolved) {
2066 // invokespecial always needs a NULL check. invokevirtual where the target is
2067 // final or where it's not known whether the target is final requires a NULL check.
2068 // Otherwise normal invokevirtual will perform the null check during the lookup
2069 // logic or the unverified entry point. Profiling of calls requires that
2070 // the null check is performed in all cases.
2071 bool do_null_check = (recv != NULL) &&
2072 (code == Bytecodes::_invokespecial || !is_loaded || target->is_final() || (is_profiling() && profile_calls()));
2073
2074 if (do_null_check) {
2075 null_check(recv);
2076 }
2077
2078 if (is_profiling()) {
2079 // Note that we'd collect profile data in this method if we wanted it.
2080 compilation()->set_would_profile(true);
2081
2082 if (profile_calls()) {
2083 assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");
2084 ciKlass* target_klass = NULL;
2085 if (cha_monomorphic_target != NULL) {
2086 target_klass = cha_monomorphic_target->holder();
2087 } else if (exact_target != NULL) {
2088 target_klass = exact_target->holder();
2089 }
2090 profile_call(target, recv, target_klass, collect_args_for_profiling(args, NULL, false), false);
2091 }
2092 }
2093 } else {
2094 // No need in null check or profiling: linkage error will be thrown at runtime
2095 // during resolution.
2096 }
2097
2098 Invoke* result = new Invoke(code, result_type, recv, args, vtable_index, target, state_before);
2099 // push result
2100 append_split(result);
2101
2102 if (result_type != voidType) {
2103 if (method()->is_strict()) {
2104 push(result_type, round_fp(result));
2105 } else {
2106 push(result_type, result);
2107 }
2108 }
2109 if (profile_return() && result_type->is_object_kind()) {
2110 profile_return_type(result, target);
2111 }
2112 }
2113
2114
2115 void GraphBuilder::new_instance(int klass_index) {
|
1796 int s = obj_args->max_length();
1797 // if called through method handle invoke, some arguments may have been popped
1798 for (int i = start, j = 0; j < s && i < args->length(); i++) {
1799 if (args->at(i)->type()->is_object_kind()) {
1800 obj_args->push(args->at(i));
1801 j++;
1802 }
1803 }
1804 check_args_for_profiling(obj_args, s);
1805 return obj_args;
1806 }
1807
1808
1809 void GraphBuilder::invoke(Bytecodes::Code code) {
1810 bool will_link;
1811 ciSignature* declared_signature = NULL;
1812 ciMethod* target = stream()->get_method(will_link, &declared_signature);
1813 ciKlass* holder = stream()->get_declared_method_holder();
1814 const Bytecodes::Code bc_raw = stream()->cur_bc_raw();
1815 assert(declared_signature != NULL, "cannot be null");
1816 assert(will_link == target->is_loaded(), "Check");
1817
1818 ciInstanceKlass* klass = target->holder();
1819 assert(!target->is_loaded() || klass->is_loaded(), "loaded target must imply loaded klass");
1820
1821 // check if CHA possible: if so, change the code to invoke_special
1822 ciInstanceKlass* calling_klass = method()->holder();
1823 ciInstanceKlass* callee_holder = ciEnv::get_instance_klass_for_declared_method_holder(holder);
1824 ciInstanceKlass* actual_recv = callee_holder;
1825
1826 CompileLog* log = compilation()->log();
1827 if (log != NULL)
1828 log->elem("call method='%d' instr='%s'",
1829 log->identify(target),
1830 Bytecodes::name(code));
1831
1832 // Some methods are obviously bindable without any type checks so
1833 // convert them directly to an invokespecial or invokestatic.
1834 if (target->is_loaded() && !target->is_abstract() && target->can_be_statically_bound()) {
1835 switch (bc_raw) {
1836 case Bytecodes::_invokevirtual:
1837 code = Bytecodes::_invokespecial;
1838 break;
1839 case Bytecodes::_invokehandle:
1847 }
1848 }
1849
1850 // Push appendix argument (MethodType, CallSite, etc.), if one.
1851 bool patch_for_appendix = false;
1852 int patching_appendix_arg = 0;
1853 if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
1854 Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
1855 apush(arg);
1856 patch_for_appendix = true;
1857 patching_appendix_arg = (will_link && stream()->has_appendix()) ? 0 : 1;
1858 } else if (stream()->has_appendix()) {
1859 ciObject* appendix = stream()->get_appendix();
1860 Value arg = append(new Constant(new ObjectConstant(appendix)));
1861 apush(arg);
1862 }
1863
1864 ciMethod* cha_monomorphic_target = NULL;
1865 ciMethod* exact_target = NULL;
1866 Value better_receiver = NULL;
1867 if (UseCHA && DeoptC1 && target->is_loaded() &&
1868 !(// %%% FIXME: Are both of these relevant?
1869 target->is_method_handle_intrinsic() ||
1870 target->is_compiled_lambda_form()) &&
1871 !patch_for_appendix) {
1872 Value receiver = NULL;
1873 ciInstanceKlass* receiver_klass = NULL;
1874 bool type_is_exact = false;
1875 // try to find a precise receiver type
1876 if (will_link && !target->is_static()) {
1877 int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1878 receiver = state()->stack_at(index);
1879 ciType* type = receiver->exact_type();
1880 if (type != NULL && type->is_loaded() &&
1881 type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {
1882 receiver_klass = (ciInstanceKlass*) type;
1883 type_is_exact = true;
1884 }
1885 if (type == NULL) {
1886 type = receiver->declared_type();
1887 if (type != NULL && type->is_loaded() &&
1967 if (cha_monomorphic_target != NULL) {
1968 if (cha_monomorphic_target->is_abstract()) {
1969 // Do not optimize for abstract methods
1970 cha_monomorphic_target = NULL;
1971 }
1972 }
1973
1974 if (cha_monomorphic_target != NULL) {
1975 if (!(target->is_final_method())) {
1976 // If we inlined because CHA revealed only a single target method,
1977 // then we are dependent on that target method not getting overridden
1978 // by dynamic class loading. Be sure to test the "static" receiver
1979 // dest_method here, as opposed to the actual receiver, which may
1980 // falsely lead us to believe that the receiver is final or private.
1981 dependency_recorder()->assert_unique_concrete_method(actual_recv, cha_monomorphic_target);
1982 }
1983 code = Bytecodes::_invokespecial;
1984 }
1985
1986 // check if we could do inlining
1987 if (!PatchALot && Inline && target->is_loaded() &&
1988 (klass->is_initialized() || klass->is_interface() && target->holder()->is_initialized())
1989 && !patch_for_appendix) {
1990 // callee is known => check if we have static binding
1991 if (code == Bytecodes::_invokestatic ||
1992 code == Bytecodes::_invokespecial ||
1993 code == Bytecodes::_invokevirtual && target->is_final_method() ||
1994 code == Bytecodes::_invokedynamic) {
1995 ciMethod* inline_target = (cha_monomorphic_target != NULL) ? cha_monomorphic_target : target;
1996 // static binding => check if callee is ok
1997 bool success = try_inline(inline_target, (cha_monomorphic_target != NULL) || (exact_target != NULL), false, code, better_receiver);
1998
1999 CHECK_BAILOUT();
2000 clear_inline_bailout();
2001
2002 if (success) {
2003 // Register dependence if JVMTI has either breakpoint
2004 // setting or hotswapping of methods capabilities since they may
2005 // cause deoptimization.
2006 if (compilation()->env()->jvmti_can_hotswap_or_post_breakpoint()) {
2007 dependency_recorder()->assert_evol_method(inline_target);
2010 }
2011 } else {
2012 print_inlining(target, "no static binding", /*success*/ false);
2013 }
2014 } else {
2015 print_inlining(target, "not inlineable", /*success*/ false);
2016 }
2017
2018 // If we attempted an inline which did not succeed because of a
2019 // bailout during construction of the callee graph, the entire
2020 // compilation has to be aborted. This is fairly rare and currently
2021 // seems to only occur for jasm-generated classes which contain
2022 // jsr/ret pairs which are not associated with finally clauses and
2023 // do not have exception handlers in the containing method, and are
2024 // therefore not caught early enough to abort the inlining without
2025 // corrupting the graph. (We currently bail out with a non-empty
2026 // stack at a ret in these situations.)
2027 CHECK_BAILOUT();
2028
2029 // inlining not successful => standard invoke
2030 ValueType* result_type = as_ValueType(declared_signature->return_type());
2031 ValueStack* state_before = copy_state_exhandling();
2032
2033 // The bytecode (code) might change in this method so we are checking this very late.
2034 const bool has_receiver =
2035 code == Bytecodes::_invokespecial ||
2036 code == Bytecodes::_invokevirtual ||
2037 code == Bytecodes::_invokeinterface;
2038 Values* args = state()->pop_arguments(target->arg_size_no_receiver() + patching_appendix_arg);
2039 Value recv = has_receiver ? apop() : NULL;
2040 int vtable_index = Method::invalid_vtable_index;
2041
2042 #ifdef SPARC
2043 // Currently only supported on Sparc.
2044 // The UseInlineCaches only controls dispatch to invokevirtuals for
2045 // loaded classes which we weren't able to statically bind.
2046 if (!UseInlineCaches && target->is_loaded() && code == Bytecodes::_invokevirtual
2047 && !target->can_be_statically_bound()) {
2048 // Find a vtable index if one is available
2049 // For arrays, callee_holder is Object. Resolving the call with
2050 // Object would allow an illegal call to finalize() on an
2051 // array. We use holder instead: illegal calls to finalize() won't
2052 // be compiled as vtable calls (IC call resolution will catch the
2053 // illegal call) and the few legal calls on array types won't be
2054 // either.
2055 vtable_index = target->resolve_vtable_index(calling_klass, holder);
2056 }
2057 #endif
2058
2059 // invokespecial always needs a NULL check. invokevirtual where the target is
2060 // final or where it's not known whether the target is final requires a NULL check.
2061 // Otherwise normal invokevirtual will perform the null check during the lookup
2062 // logic or the unverified entry point. Profiling of calls requires that
2063 // the null check is performed in all cases.
2064
2065 bool do_null_check = (recv != NULL) &&
2066 (code == Bytecodes::_invokespecial || (target->is_loaded() && (target->is_final() || (is_profiling() && profile_calls()))));
2067
2068 if (do_null_check) {
2069 null_check(recv);
2070
2071 if (is_profiling()) {
2072 // Note that we'd collect profile data in this method if we wanted it.
2073 compilation()->set_would_profile(true);
2074
2075 if (profile_calls()) {
2076 assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");
2077 ciKlass* target_klass = NULL;
2078 if (cha_monomorphic_target != NULL) {
2079 target_klass = cha_monomorphic_target->holder();
2080 } else if (exact_target != NULL) {
2081 target_klass = exact_target->holder();
2082 }
2083 profile_call(target, recv, target_klass, collect_args_for_profiling(args, NULL, false), false);
2084 }
2085 }
2086 }
2087
2088 Invoke* result = new Invoke(code, result_type, recv, args, vtable_index, target, state_before);
2089 // push result
2090 append_split(result);
2091
2092 if (result_type != voidType) {
2093 if (method()->is_strict()) {
2094 push(result_type, round_fp(result));
2095 } else {
2096 push(result_type, result);
2097 }
2098 }
2099 if (profile_return() && result_type->is_object_kind()) {
2100 profile_return_type(result, target);
2101 }
2102 }
2103
2104
2105 void GraphBuilder::new_instance(int klass_index) {
|