1 /*
2 * Copyright 1999-2009 Sun Microsystems, Inc. All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
1507 ciMethod* target = stream()->get_method(will_link);
1508 // we have to make sure the argument size (incl. the receiver)
1509 // is correct for compilation (the call would fail later during
1510 // linkage anyway) - was bug (gri 7/28/99)
1511 if (target->is_loaded() && target->is_static() != (code == Bytecodes::_invokestatic)) BAILOUT("will cause link error");
1512 ciInstanceKlass* klass = target->holder();
1513
1514 // check if CHA possible: if so, change the code to invoke_special
1515 ciInstanceKlass* calling_klass = method()->holder();
1516 ciKlass* holder = stream()->get_declared_method_holder();
1517 ciInstanceKlass* callee_holder = ciEnv::get_instance_klass_for_declared_method_holder(holder);
1518 ciInstanceKlass* actual_recv = callee_holder;
1519
1520 // some methods are obviously bindable without any type checks so
1521 // convert them directly to an invokespecial.
1522 if (target->is_loaded() && !target->is_abstract() &&
1523 target->can_be_statically_bound() && code == Bytecodes::_invokevirtual) {
1524 code = Bytecodes::_invokespecial;
1525 }
1526
1527 if (code == Bytecodes::_invokedynamic) {
1528 BAILOUT("invokedynamic NYI"); // FIXME
1529 return;
1530 }
1531
1532 // NEEDS_CLEANUP
1533 // I've added the target-is_loaded() test below but I don't really understand
1534 // how klass->is_loaded() can be true and yet target->is_loaded() is false.
1535 // this happened while running the JCK invokevirtual tests under doit. TKR
1536 ciMethod* cha_monomorphic_target = NULL;
1537 ciMethod* exact_target = NULL;
1538 if (UseCHA && DeoptC1 && klass->is_loaded() && target->is_loaded()) {
1539 Value receiver = NULL;
1540 ciInstanceKlass* receiver_klass = NULL;
1541 bool type_is_exact = false;
1542 // try to find a precise receiver type
1543 if (will_link && !target->is_static()) {
1544 int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1545 receiver = state()->stack_at(index);
1546 ciType* type = receiver->exact_type();
1547 if (type != NULL && type->is_loaded() &&
1548 type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {
1549 receiver_klass = (ciInstanceKlass*) type;
1550 type_is_exact = true;
1551 }
1552 if (type == NULL) {
1553 type = receiver->declared_type();
1554 if (type != NULL && type->is_loaded() &&
1555 type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {
1556 receiver_klass = (ciInstanceKlass*) type;
1557 if (receiver_klass->is_leaf_type() && !receiver_klass->is_final()) {
1558 // Insert a dependency on this type since
1664 // cause deoptimization.
1665 if (compilation()->env()->jvmti_can_hotswap_or_post_breakpoint()) {
1666 dependency_recorder()->assert_evol_method(inline_target);
1667 }
1668 return;
1669 }
1670 }
1671 }
1672 // If we attempted an inline which did not succeed because of a
1673 // bailout during construction of the callee graph, the entire
1674 // compilation has to be aborted. This is fairly rare and currently
1675 // seems to only occur for jasm-generated classes which contain
1676 // jsr/ret pairs which are not associated with finally clauses and
1677 // do not have exception handlers in the containing method, and are
1678 // therefore not caught early enough to abort the inlining without
1679 // corrupting the graph. (We currently bail out with a non-empty
1680 // stack at a ret in these situations.)
1681 CHECK_BAILOUT();
1682
1683 // inlining not successful => standard invoke
1684 bool is_static = code == Bytecodes::_invokestatic;
1685 ValueType* result_type = as_ValueType(target->return_type());
1686 Values* args = state()->pop_arguments(target->arg_size_no_receiver());
1687 Value recv = is_static ? NULL : apop();
1688 bool is_loaded = target->is_loaded();
1689 int vtable_index = methodOopDesc::invalid_vtable_index;
1690
1691 #ifdef SPARC
1692 // Currently only supported on Sparc.
1693 // The UseInlineCaches only controls dispatch to invokevirtuals for
1694 // loaded classes which we weren't able to statically bind.
1695 if (!UseInlineCaches && is_loaded && code == Bytecodes::_invokevirtual
1696 && !target->can_be_statically_bound()) {
1697 // Find a vtable index if one is available
1698 vtable_index = target->resolve_vtable_index(calling_klass, callee_holder);
1699 }
1700 #endif
1701
1702 if (recv != NULL &&
1703 (code == Bytecodes::_invokespecial ||
1704 !is_loaded || target->is_final() ||
1705 profile_calls())) {
1706 // invokespecial always needs a NULL check. invokevirtual where
1707 // the target is final or where it's not known that whether the
1708 // target is final requires a NULL check. Otherwise normal
1709 // invokevirtual will perform the null check during the lookup
1710 // logic or the unverified entry point. Profiling of calls
1711 // requires that the null check is performed in all cases.
1712 null_check(recv);
1713 }
1714
1715 if (profile_calls()) {
1716 assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");
1717 ciKlass* target_klass = NULL;
1718 if (cha_monomorphic_target != NULL) {
1719 target_klass = cha_monomorphic_target->holder();
1720 } else if (exact_target != NULL) {
1721 target_klass = exact_target->holder();
1722 }
1723 profile_call(recv, target_klass);
1724 }
1725
1726 Invoke* result = new Invoke(code, result_type, recv, args, vtable_index, target);
1727 // push result
1728 append_split(result);
1729
1730 if (result_type != voidType) {
1731 if (method()->is_strict()) {
1732 push(result_type, round_fp(result));
1733 } else {
1734 push(result_type, result);
1735 }
1736 }
1737 }
1738
1739
1740 void GraphBuilder::new_instance(int klass_index) {
1741 bool will_link;
1742 ciKlass* klass = stream()->get_klass(will_link);
1743 assert(klass->is_instance_klass(), "must be an instance klass");
1744 NewInstance* new_instance = new NewInstance(klass->as_instance_klass());
1745 _memory->new_instance(new_instance);
1746 apush(append_split(new_instance));
2845 , _memory(new MemoryBuffer())
2846 , _compilation(compilation)
2847 , _inline_bailout_msg(NULL)
2848 {
2849 int osr_bci = compilation->osr_bci();
2850
2851 // determine entry points and bci2block mapping
2852 BlockListBuilder blm(compilation, scope, osr_bci);
2853 CHECK_BAILOUT();
2854
2855 BlockList* bci2block = blm.bci2block();
2856 BlockBegin* start_block = bci2block->at(0);
2857
2858 assert(is_initialized(), "GraphBuilder must have been initialized");
2859 push_root_scope(scope, bci2block, start_block);
2860
2861 // setup state for std entry
2862 _initial_state = state_at_entry();
2863 start_block->merge(_initial_state);
2864
2865 BlockBegin* sync_handler = NULL;
2866 if (method()->is_synchronized() || _compilation->env()->dtrace_method_probes()) {
2867 // setup an exception handler to do the unlocking and/or notification
2868 sync_handler = new BlockBegin(-1);
2869 sync_handler->set(BlockBegin::exception_entry_flag);
2870 sync_handler->set(BlockBegin::is_on_work_list_flag);
2871 sync_handler->set(BlockBegin::default_exception_handler_flag);
2872
2873 ciExceptionHandler* desc = new ciExceptionHandler(method()->holder(), 0, method()->code_size(), -1, 0);
2874 XHandler* h = new XHandler(desc);
2875 h->set_entry_block(sync_handler);
2876 scope_data()->xhandlers()->append(h);
2877 scope_data()->set_has_handler();
2878 }
2879
2880 // complete graph
2881 _vmap = new ValueMap();
2882 scope->compute_lock_stack_size();
2883 switch (scope->method()->intrinsic_id()) {
2884 case vmIntrinsics::_dabs : // fall through
2885 case vmIntrinsics::_dsqrt : // fall through
2886 case vmIntrinsics::_dsin : // fall through
2887 case vmIntrinsics::_dcos : // fall through
2888 case vmIntrinsics::_dtan : // fall through
2889 case vmIntrinsics::_dlog : // fall through
2890 case vmIntrinsics::_dlog10 : // fall through
2891 {
2892 // Compiles where the root method is an intrinsic need a special
2893 // compilation environment because the bytecodes for the method
2894 // shouldn't be parsed during the compilation, only the special
2895 // Intrinsic node should be emitted. If this isn't done the the
2896 // code for the inlined version will be different than the root
2897 // compiled version which could lead to monotonicity problems on
2898 // intel.
|
1 /*
2 * Copyright 1999-2010 Sun Microsystems, Inc. All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
1507 ciMethod* target = stream()->get_method(will_link);
1508 // we have to make sure the argument size (incl. the receiver)
1509 // is correct for compilation (the call would fail later during
1510 // linkage anyway) - was bug (gri 7/28/99)
1511 if (target->is_loaded() && target->is_static() != (code == Bytecodes::_invokestatic)) BAILOUT("will cause link error");
1512 ciInstanceKlass* klass = target->holder();
1513
1514 // check if CHA possible: if so, change the code to invoke_special
1515 ciInstanceKlass* calling_klass = method()->holder();
1516 ciKlass* holder = stream()->get_declared_method_holder();
1517 ciInstanceKlass* callee_holder = ciEnv::get_instance_klass_for_declared_method_holder(holder);
1518 ciInstanceKlass* actual_recv = callee_holder;
1519
1520 // some methods are obviously bindable without any type checks so
1521 // convert them directly to an invokespecial.
1522 if (target->is_loaded() && !target->is_abstract() &&
1523 target->can_be_statically_bound() && code == Bytecodes::_invokevirtual) {
1524 code = Bytecodes::_invokespecial;
1525 }
1526
1527 // NEEDS_CLEANUP
1528 // I've added the target-is_loaded() test below but I don't really understand
1529 // how klass->is_loaded() can be true and yet target->is_loaded() is false.
1530 // this happened while running the JCK invokevirtual tests under doit. TKR
1531 ciMethod* cha_monomorphic_target = NULL;
1532 ciMethod* exact_target = NULL;
1533 if (UseCHA && DeoptC1 && klass->is_loaded() && target->is_loaded() &&
1534 !target->is_method_handle_invoke()) {
1535 Value receiver = NULL;
1536 ciInstanceKlass* receiver_klass = NULL;
1537 bool type_is_exact = false;
1538 // try to find a precise receiver type
1539 if (will_link && !target->is_static()) {
1540 int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1541 receiver = state()->stack_at(index);
1542 ciType* type = receiver->exact_type();
1543 if (type != NULL && type->is_loaded() &&
1544 type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {
1545 receiver_klass = (ciInstanceKlass*) type;
1546 type_is_exact = true;
1547 }
1548 if (type == NULL) {
1549 type = receiver->declared_type();
1550 if (type != NULL && type->is_loaded() &&
1551 type->is_instance_klass() && !type->as_instance_klass()->is_interface()) {
1552 receiver_klass = (ciInstanceKlass*) type;
1553 if (receiver_klass->is_leaf_type() && !receiver_klass->is_final()) {
1554 // Insert a dependency on this type since
1660 // cause deoptimization.
1661 if (compilation()->env()->jvmti_can_hotswap_or_post_breakpoint()) {
1662 dependency_recorder()->assert_evol_method(inline_target);
1663 }
1664 return;
1665 }
1666 }
1667 }
1668 // If we attempted an inline which did not succeed because of a
1669 // bailout during construction of the callee graph, the entire
1670 // compilation has to be aborted. This is fairly rare and currently
1671 // seems to only occur for jasm-generated classes which contain
1672 // jsr/ret pairs which are not associated with finally clauses and
1673 // do not have exception handlers in the containing method, and are
1674 // therefore not caught early enough to abort the inlining without
1675 // corrupting the graph. (We currently bail out with a non-empty
1676 // stack at a ret in these situations.)
1677 CHECK_BAILOUT();
1678
1679 // inlining not successful => standard invoke
1680 bool is_loaded = target->is_loaded();
1681 bool has_receiver =
1682 code == Bytecodes::_invokespecial ||
1683 code == Bytecodes::_invokevirtual ||
1684 code == Bytecodes::_invokeinterface;
1685 bool is_invokedynamic = code == Bytecodes::_invokedynamic;
1686 ValueType* result_type = as_ValueType(target->return_type());
1687
1688 // We require the debug info to be the "state before" because
1689 // invokedynamics may deoptimize.
1690 ValueStack* state_before = is_invokedynamic ? state()->copy() : NULL;
1691
1692 Values* args = state()->pop_arguments(target->arg_size_no_receiver());
1693 Value recv = has_receiver ? apop() : NULL;
1694 int vtable_index = methodOopDesc::invalid_vtable_index;
1695
1696 #ifdef SPARC
1697 // Currently only supported on Sparc.
1698 // The UseInlineCaches only controls dispatch to invokevirtuals for
1699 // loaded classes which we weren't able to statically bind.
1700 if (!UseInlineCaches && is_loaded && code == Bytecodes::_invokevirtual
1701 && !target->can_be_statically_bound()) {
1702 // Find a vtable index if one is available
1703 vtable_index = target->resolve_vtable_index(calling_klass, callee_holder);
1704 }
1705 #endif
1706
1707 if (recv != NULL &&
1708 (code == Bytecodes::_invokespecial ||
1709 !is_loaded || target->is_final() ||
1710 profile_calls())) {
1711 // invokespecial always needs a NULL check. invokevirtual where
1712 // the target is final or where it's not known that whether the
1713 // target is final requires a NULL check. Otherwise normal
1714 // invokevirtual will perform the null check during the lookup
1715 // logic or the unverified entry point. Profiling of calls
1716 // requires that the null check is performed in all cases.
1717 null_check(recv);
1718 }
1719
1720 if (profile_calls()) {
1721 assert(cha_monomorphic_target == NULL || exact_target == NULL, "both can not be set");
1722 ciKlass* target_klass = NULL;
1723 if (cha_monomorphic_target != NULL) {
1724 target_klass = cha_monomorphic_target->holder();
1725 } else if (exact_target != NULL) {
1726 target_klass = exact_target->holder();
1727 }
1728 profile_call(recv, target_klass);
1729 }
1730
1731 Invoke* result = new Invoke(code, result_type, recv, args, vtable_index, target, state_before);
1732 // push result
1733 append_split(result);
1734
1735 if (result_type != voidType) {
1736 if (method()->is_strict()) {
1737 push(result_type, round_fp(result));
1738 } else {
1739 push(result_type, result);
1740 }
1741 }
1742 }
1743
1744
1745 void GraphBuilder::new_instance(int klass_index) {
1746 bool will_link;
1747 ciKlass* klass = stream()->get_klass(will_link);
1748 assert(klass->is_instance_klass(), "must be an instance klass");
1749 NewInstance* new_instance = new NewInstance(klass->as_instance_klass());
1750 _memory->new_instance(new_instance);
1751 apush(append_split(new_instance));
2850 , _memory(new MemoryBuffer())
2851 , _compilation(compilation)
2852 , _inline_bailout_msg(NULL)
2853 {
2854 int osr_bci = compilation->osr_bci();
2855
2856 // determine entry points and bci2block mapping
2857 BlockListBuilder blm(compilation, scope, osr_bci);
2858 CHECK_BAILOUT();
2859
2860 BlockList* bci2block = blm.bci2block();
2861 BlockBegin* start_block = bci2block->at(0);
2862
2863 assert(is_initialized(), "GraphBuilder must have been initialized");
2864 push_root_scope(scope, bci2block, start_block);
2865
2866 // setup state for std entry
2867 _initial_state = state_at_entry();
2868 start_block->merge(_initial_state);
2869
2870 // setup an exception handler to do the unlocking and/or
2871 // notification and unwind the frame.
2872 BlockBegin* sync_handler = new BlockBegin(-1);
2873 sync_handler->set(BlockBegin::exception_entry_flag);
2874 sync_handler->set(BlockBegin::is_on_work_list_flag);
2875 sync_handler->set(BlockBegin::default_exception_handler_flag);
2876
2877 ciExceptionHandler* desc = new ciExceptionHandler(method()->holder(), 0, method()->code_size(), -1, 0);
2878 XHandler* h = new XHandler(desc);
2879 h->set_entry_block(sync_handler);
2880 scope_data()->xhandlers()->append(h);
2881 scope_data()->set_has_handler();
2882
2883 // complete graph
2884 _vmap = new ValueMap();
2885 scope->compute_lock_stack_size();
2886 switch (scope->method()->intrinsic_id()) {
2887 case vmIntrinsics::_dabs : // fall through
2888 case vmIntrinsics::_dsqrt : // fall through
2889 case vmIntrinsics::_dsin : // fall through
2890 case vmIntrinsics::_dcos : // fall through
2891 case vmIntrinsics::_dtan : // fall through
2892 case vmIntrinsics::_dlog : // fall through
2893 case vmIntrinsics::_dlog10 : // fall through
2894 {
2895 // Compiles where the root method is an intrinsic need a special
2896 // compilation environment because the bytecodes for the method
2897 // shouldn't be parsed during the compilation, only the special
2898 // Intrinsic node should be emitted. If this isn't done the the
2899 // code for the inlined version will be different than the root
2900 // compiled version which could lead to monotonicity problems on
2901 // intel.
|