594 int offset = java_lang_Throwable::get_detailMessage_offset();
595 const TypePtr* adr_typ = ex_con->add_offset(offset);
596
597 Node *adr = basic_plus_adr(ex_node, ex_node, offset);
598 const TypeOopPtr* val_type = TypeOopPtr::make_from_klass(env()->String_klass());
599 Node *store = store_oop_to_object(control(), ex_node, adr, adr_typ, null(), val_type, T_OBJECT);
600
601 add_exception_state(make_exception_state(ex_node));
602 return;
603 }
604 }
605
606 // %%% Maybe add entry to OptoRuntime which directly throws the exc.?
607 // It won't be much cheaper than bailing to the interp., since we'll
608 // have to pass up all the debug-info, and the runtime will have to
609 // create the stack trace.
610
611 // Usual case: Bail to interpreter.
612 // Reserve the right to recompile if we haven't seen anything yet.
613
614 Deoptimization::DeoptAction action = Deoptimization::Action_maybe_recompile;
615 if (treat_throw_as_hot
616 && (method()->method_data()->trap_recompiled_at(bci())
617 || C->too_many_traps(reason))) {
618 // We cannot afford to take more traps here. Suffer in the interpreter.
619 if (C->log() != NULL)
620 C->log()->elem("hot_throw preallocated='0' reason='%s' mcount='%d'",
621 Deoptimization::trap_reason_name(reason),
622 C->trap_count(reason));
623 action = Deoptimization::Action_none;
624 }
625
626 // "must_throw" prunes the JVM state to include only the stack, if there
627 // are no local exception handlers. This should cut down on register
628 // allocation time and code size, by drastically reducing the number
629 // of in-edges on the call to the uncommon trap.
630
631 uncommon_trap(reason, action, (ciKlass*)NULL, (char*)NULL, must_throw);
632 }
633
634
635 //----------------------------PreserveJVMState---------------------------------
636 PreserveJVMState::PreserveJVMState(GraphKit* kit, bool clone_map) {
2721 // Edge case: no mature data. Be optimistic here.
2722 return true;
2723 // If the profile has not seen a null, assume it won't happen.
2724 assert(java_bc() == Bytecodes::_checkcast ||
2725 java_bc() == Bytecodes::_instanceof ||
2726 java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2727 return !data->as_BitData()->null_seen();
2728 }
2729 return false;
2730 }
2731
2732 //------------------------maybe_cast_profiled_receiver-------------------------
2733 // If the profile has seen exactly one type, narrow to exactly that type.
2734 // Subsequent type checks will always fold up.
2735 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
2736 ciKlass* require_klass,
2737 ciKlass* spec_klass,
2738 bool safe_for_replace) {
2739 if (!UseTypeProfile || !TypeProfileCasts) return NULL;
2740
2741 // Make sure we haven't already deoptimized from this tactic.
2742 if (too_many_traps(Deoptimization::Reason_class_check))
2743 return NULL;
2744
2745 // (No, this isn't a call, but it's enough like a virtual call
2746 // to use the same ciMethod accessor to get the profile info...)
2747 // If we have a speculative type use it instead of profiling (which
2748 // may not help us)
2749 ciKlass* exact_kls = spec_klass == NULL ? profile_has_unique_klass() : spec_klass;
2750 if (exact_kls != NULL) {// no cast failures here
2751 if (require_klass == NULL ||
2752 static_subtype_check(require_klass, exact_kls) == SSC_always_true) {
2753 // If we narrow the type to match what the type profile sees or
2754 // the speculative type, we can then remove the rest of the
2755 // cast.
2756 // This is a win, even if the exact_kls is very specific,
2757 // because downstream operations, such as method calls,
2758 // will often benefit from the sharper type.
2759 Node* exact_obj = not_null_obj; // will get updated in place...
2760 Node* slow_ctl = type_check_receiver(exact_obj, exact_kls, 1.0,
2761 &exact_obj);
2762 { PreserveJVMState pjvms(this);
2763 set_control(slow_ctl);
2764 uncommon_trap(Deoptimization::Reason_class_check,
2765 Deoptimization::Action_maybe_recompile);
2766 }
2767 if (safe_for_replace) {
2768 replace_in_map(not_null_obj, exact_obj);
2769 }
2770 return exact_obj;
2771 }
2772 // assert(ssc == SSC_always_true)... except maybe the profile lied to us.
2773 }
2774
2775 return NULL;
2776 }
2777
2778 /**
2779 * Cast obj to type and emit guard unless we had too many traps here
2780 * already
2781 *
2782 * @param obj node being casted
2783 * @param type type to cast the node to
2784 * @param not_null true if we know node cannot be null
2785 */
2786 Node* GraphKit::maybe_cast_profiled_obj(Node* obj,
2787 ciKlass* type,
2788 bool not_null) {
2789 // type == NULL if profiling tells us this object is always null
2790 if (type != NULL) {
2791 if (!too_many_traps(Deoptimization::Reason_null_check) &&
2792 !too_many_traps(Deoptimization::Reason_class_check)) {
2793 Node* not_null_obj = NULL;
2794 // not_null is true if we know the object is not null and
2795 // there's no need for a null check
2796 if (!not_null) {
2797 Node* null_ctl = top();
2798 not_null_obj = null_check_oop(obj, &null_ctl, true, true);
2799 assert(null_ctl->is_top(), "no null control here");
2800 } else {
2801 not_null_obj = obj;
2802 }
2803
2804 Node* exact_obj = not_null_obj;
2805 ciKlass* exact_kls = type;
2806 Node* slow_ctl = type_check_receiver(exact_obj, exact_kls, 1.0,
2807 &exact_obj);
2808 {
2809 PreserveJVMState pjvms(this);
2810 set_control(slow_ctl);
2811 uncommon_trap(Deoptimization::Reason_class_check,
2812 Deoptimization::Action_maybe_recompile);
2813 }
2814 replace_in_map(not_null_obj, exact_obj);
2815 obj = exact_obj;
2816 }
2817 } else {
2818 if (!too_many_traps(Deoptimization::Reason_null_assert)) {
2819 Node* exact_obj = null_assert(obj);
2820 replace_in_map(obj, exact_obj);
2821 obj = exact_obj;
2822 }
2823 }
2824 return obj;
2825 }
2826
2827 //-------------------------------gen_instanceof--------------------------------
2828 // Generate an instance-of idiom. Used by both the instance-of bytecode
2829 // and the reflective instance-of call.
2830 Node* GraphKit::gen_instanceof(Node* obj, Node* superklass, bool safe_for_replace) {
2831 kill_dead_locals(); // Benefit all the uncommon traps
2860 if (null_ctl == top()) {
2861 // Do this eagerly, so that pattern matches like is_diamond_phi
2862 // will work even during parsing.
2863 assert(_null_path == PATH_LIMIT-1, "delete last");
2864 region->del_req(_null_path);
2865 phi ->del_req(_null_path);
2866 }
2867
2868 // Do we know the type check always succeed?
2869 bool known_statically = false;
2870 if (_gvn.type(superklass)->singleton()) {
2871 ciKlass* superk = _gvn.type(superklass)->is_klassptr()->klass();
2872 ciKlass* subk = _gvn.type(obj)->is_oopptr()->klass();
2873 if (subk != NULL && subk->is_loaded()) {
2874 int static_res = static_subtype_check(superk, subk);
2875 known_statically = (static_res == SSC_always_true || static_res == SSC_always_false);
2876 }
2877 }
2878
2879 if (known_statically && UseTypeSpeculation) {
2880 // If we know the type check always succeed then we don't use the
2881 // profiling data at this bytecode. Don't lose it, feed it to the
2882 // type system as a speculative type.
2883 not_null_obj = record_profiled_receiver_for_speculation(not_null_obj);
2884 } else {
2885 const TypeOopPtr* obj_type = _gvn.type(obj)->is_oopptr();
2886 // We may not have profiling here or it may not help us. If we
2887 // have a speculative type use it to perform an exact cast.
2888 ciKlass* spec_obj_type = obj_type->speculative_type();
2889 if (spec_obj_type != NULL || (ProfileDynamicTypes && data != NULL)) {
2890 Node* cast_obj = maybe_cast_profiled_receiver(not_null_obj, NULL, spec_obj_type, safe_for_replace);
2891 if (stopped()) { // Profile disagrees with this path.
2892 set_control(null_ctl); // Null is the only remaining possibility.
2893 return intcon(0);
2894 }
2895 if (cast_obj != NULL) {
2896 not_null_obj = cast_obj;
2897 }
2898 }
2899 }
2900
|
594 int offset = java_lang_Throwable::get_detailMessage_offset();
595 const TypePtr* adr_typ = ex_con->add_offset(offset);
596
597 Node *adr = basic_plus_adr(ex_node, ex_node, offset);
598 const TypeOopPtr* val_type = TypeOopPtr::make_from_klass(env()->String_klass());
599 Node *store = store_oop_to_object(control(), ex_node, adr, adr_typ, null(), val_type, T_OBJECT);
600
601 add_exception_state(make_exception_state(ex_node));
602 return;
603 }
604 }
605
606 // %%% Maybe add entry to OptoRuntime which directly throws the exc.?
607 // It won't be much cheaper than bailing to the interp., since we'll
608 // have to pass up all the debug-info, and the runtime will have to
609 // create the stack trace.
610
611 // Usual case: Bail to interpreter.
612 // Reserve the right to recompile if we haven't seen anything yet.
613
614 assert(!Deoptimization::reason_is_speculate(reason), "unsupported");
615 Deoptimization::DeoptAction action = Deoptimization::Action_maybe_recompile;
616 if (treat_throw_as_hot
617 && (method()->method_data()->trap_recompiled_at(bci(), NULL)
618 || C->too_many_traps(reason))) {
619 // We cannot afford to take more traps here. Suffer in the interpreter.
620 if (C->log() != NULL)
621 C->log()->elem("hot_throw preallocated='0' reason='%s' mcount='%d'",
622 Deoptimization::trap_reason_name(reason),
623 C->trap_count(reason));
624 action = Deoptimization::Action_none;
625 }
626
627 // "must_throw" prunes the JVM state to include only the stack, if there
628 // are no local exception handlers. This should cut down on register
629 // allocation time and code size, by drastically reducing the number
630 // of in-edges on the call to the uncommon trap.
631
632 uncommon_trap(reason, action, (ciKlass*)NULL, (char*)NULL, must_throw);
633 }
634
635
636 //----------------------------PreserveJVMState---------------------------------
637 PreserveJVMState::PreserveJVMState(GraphKit* kit, bool clone_map) {
2722 // Edge case: no mature data. Be optimistic here.
2723 return true;
2724 // If the profile has not seen a null, assume it won't happen.
2725 assert(java_bc() == Bytecodes::_checkcast ||
2726 java_bc() == Bytecodes::_instanceof ||
2727 java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2728 return !data->as_BitData()->null_seen();
2729 }
2730 return false;
2731 }
2732
2733 //------------------------maybe_cast_profiled_receiver-------------------------
2734 // If the profile has seen exactly one type, narrow to exactly that type.
2735 // Subsequent type checks will always fold up.
2736 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
2737 ciKlass* require_klass,
2738 ciKlass* spec_klass,
2739 bool safe_for_replace) {
2740 if (!UseTypeProfile || !TypeProfileCasts) return NULL;
2741
2742 Deoptimization::DeoptReason reason = spec_klass == NULL ? Deoptimization::Reason_class_check : Deoptimization::Reason_speculate_class_check;
2743
2744 // Make sure we haven't already deoptimized from this tactic.
2745 if (too_many_traps(reason))
2746 return NULL;
2747
2748 // (No, this isn't a call, but it's enough like a virtual call
2749 // to use the same ciMethod accessor to get the profile info...)
2750 // If we have a speculative type use it instead of profiling (which
2751 // may not help us)
2752 ciKlass* exact_kls = spec_klass == NULL ? profile_has_unique_klass() : spec_klass;
2753 if (exact_kls != NULL) {// no cast failures here
2754 if (require_klass == NULL ||
2755 static_subtype_check(require_klass, exact_kls) == SSC_always_true) {
2756 // If we narrow the type to match what the type profile sees or
2757 // the speculative type, we can then remove the rest of the
2758 // cast.
2759 // This is a win, even if the exact_kls is very specific,
2760 // because downstream operations, such as method calls,
2761 // will often benefit from the sharper type.
2762 Node* exact_obj = not_null_obj; // will get updated in place...
2763 Node* slow_ctl = type_check_receiver(exact_obj, exact_kls, 1.0,
2764 &exact_obj);
2765 { PreserveJVMState pjvms(this);
2766 set_control(slow_ctl);
2767 uncommon_trap(reason,
2768 Deoptimization::Action_maybe_recompile);
2769 }
2770 if (safe_for_replace) {
2771 replace_in_map(not_null_obj, exact_obj);
2772 }
2773 return exact_obj;
2774 }
2775 // assert(ssc == SSC_always_true)... except maybe the profile lied to us.
2776 }
2777
2778 return NULL;
2779 }
2780
2781 /**
2782 * Cast obj to type and emit guard unless we had too many traps here
2783 * already
2784 *
2785 * @param obj node being casted
2786 * @param type type to cast the node to
2787 * @param not_null true if we know node cannot be null
2788 */
2789 Node* GraphKit::maybe_cast_profiled_obj(Node* obj,
2790 ciKlass* type,
2791 bool not_null) {
2792 // type == NULL if profiling tells us this object is always null
2793 if (type != NULL) {
2794 Deoptimization::DeoptReason class_reason = Deoptimization::Reason_speculate_class_check;
2795 Deoptimization::DeoptReason null_reason = Deoptimization::Reason_null_check;
2796 if (!too_many_traps(null_reason) &&
2797 !too_many_traps(class_reason)) {
2798 Node* not_null_obj = NULL;
2799 // not_null is true if we know the object is not null and
2800 // there's no need for a null check
2801 if (!not_null) {
2802 Node* null_ctl = top();
2803 not_null_obj = null_check_oop(obj, &null_ctl, true, true);
2804 assert(null_ctl->is_top(), "no null control here");
2805 } else {
2806 not_null_obj = obj;
2807 }
2808
2809 Node* exact_obj = not_null_obj;
2810 ciKlass* exact_kls = type;
2811 Node* slow_ctl = type_check_receiver(exact_obj, exact_kls, 1.0,
2812 &exact_obj);
2813 {
2814 PreserveJVMState pjvms(this);
2815 set_control(slow_ctl);
2816 uncommon_trap(class_reason,
2817 Deoptimization::Action_maybe_recompile);
2818 }
2819 replace_in_map(not_null_obj, exact_obj);
2820 obj = exact_obj;
2821 }
2822 } else {
2823 if (!too_many_traps(Deoptimization::Reason_null_assert)) {
2824 Node* exact_obj = null_assert(obj);
2825 replace_in_map(obj, exact_obj);
2826 obj = exact_obj;
2827 }
2828 }
2829 return obj;
2830 }
2831
2832 //-------------------------------gen_instanceof--------------------------------
2833 // Generate an instance-of idiom. Used by both the instance-of bytecode
2834 // and the reflective instance-of call.
2835 Node* GraphKit::gen_instanceof(Node* obj, Node* superklass, bool safe_for_replace) {
2836 kill_dead_locals(); // Benefit all the uncommon traps
2865 if (null_ctl == top()) {
2866 // Do this eagerly, so that pattern matches like is_diamond_phi
2867 // will work even during parsing.
2868 assert(_null_path == PATH_LIMIT-1, "delete last");
2869 region->del_req(_null_path);
2870 phi ->del_req(_null_path);
2871 }
2872
2873 // Do we know the type check always succeed?
2874 bool known_statically = false;
2875 if (_gvn.type(superklass)->singleton()) {
2876 ciKlass* superk = _gvn.type(superklass)->is_klassptr()->klass();
2877 ciKlass* subk = _gvn.type(obj)->is_oopptr()->klass();
2878 if (subk != NULL && subk->is_loaded()) {
2879 int static_res = static_subtype_check(superk, subk);
2880 known_statically = (static_res == SSC_always_true || static_res == SSC_always_false);
2881 }
2882 }
2883
2884 if (known_statically && UseTypeSpeculation) {
2885 // If we know the type check always succeeds then we don't use the
2886 // profiling data at this bytecode. Don't lose it, feed it to the
2887 // type system as a speculative type.
2888 not_null_obj = record_profiled_receiver_for_speculation(not_null_obj);
2889 } else {
2890 const TypeOopPtr* obj_type = _gvn.type(obj)->is_oopptr();
2891 // We may not have profiling here or it may not help us. If we
2892 // have a speculative type use it to perform an exact cast.
2893 ciKlass* spec_obj_type = obj_type->speculative_type();
2894 if (spec_obj_type != NULL || (ProfileDynamicTypes && data != NULL)) {
2895 Node* cast_obj = maybe_cast_profiled_receiver(not_null_obj, NULL, spec_obj_type, safe_for_replace);
2896 if (stopped()) { // Profile disagrees with this path.
2897 set_control(null_ctl); // Null is the only remaining possibility.
2898 return intcon(0);
2899 }
2900 if (cast_obj != NULL) {
2901 not_null_obj = cast_obj;
2902 }
2903 }
2904 }
2905
|