595 const TypePtr* adr_typ = ex_con->add_offset(offset);
596
597 Node *adr = basic_plus_adr(ex_node, ex_node, offset);
598 const TypeOopPtr* val_type = TypeOopPtr::make_from_klass(env()->String_klass());
599 // Conservatively release stores of object references.
600 Node *store = store_oop_to_object(control(), ex_node, adr, adr_typ, null(), val_type, T_OBJECT, MemNode::release);
601
602 add_exception_state(make_exception_state(ex_node));
603 return;
604 }
605 }
606
607 // %%% Maybe add entry to OptoRuntime which directly throws the exc.?
608 // It won't be much cheaper than bailing to the interp., since we'll
609 // have to pass up all the debug-info, and the runtime will have to
610 // create the stack trace.
611
612 // Usual case: Bail to interpreter.
613 // Reserve the right to recompile if we haven't seen anything yet.
614
615 assert(!Deoptimization::reason_is_speculate(reason), "unsupported");
616 Deoptimization::DeoptAction action = Deoptimization::Action_maybe_recompile;
617 if (treat_throw_as_hot
618 && (method()->method_data()->trap_recompiled_at(bci(), NULL)
619 || C->too_many_traps(reason))) {
620 // We cannot afford to take more traps here. Suffer in the interpreter.
621 if (C->log() != NULL)
622 C->log()->elem("hot_throw preallocated='0' reason='%s' mcount='%d'",
623 Deoptimization::trap_reason_name(reason),
624 C->trap_count(reason));
625 action = Deoptimization::Action_none;
626 }
627
628 // "must_throw" prunes the JVM state to include only the stack, if there
629 // are no local exception handlers. This should cut down on register
630 // allocation time and code size, by drastically reducing the number
631 // of in-edges on the call to the uncommon trap.
632
633 uncommon_trap(reason, action, (ciKlass*)NULL, (char*)NULL, must_throw);
634 }
635
636
637 //----------------------------PreserveJVMState---------------------------------
638 PreserveJVMState::PreserveJVMState(GraphKit* kit, bool clone_map) {
1153 alen = _gvn.transform( new (C) LoadRangeNode(0, immutable_memory(), r_adr, TypeInt::POS));
1154 } else {
1155 alen = alloc->Ideal_length();
1156 Node* ccast = alloc->make_ideal_length(_gvn.type(array)->is_oopptr(), &_gvn);
1157 if (ccast != alen) {
1158 alen = _gvn.transform(ccast);
1159 }
1160 }
1161 return alen;
1162 }
1163
1164 //------------------------------do_null_check----------------------------------
1165 // Helper function to do a NULL pointer check. Returned value is
1166 // the incoming address with NULL casted away. You are allowed to use the
1167 // not-null value only if you are control dependent on the test.
1168 extern int explicit_null_checks_inserted,
1169 explicit_null_checks_elided;
1170 Node* GraphKit::null_check_common(Node* value, BasicType type,
1171 // optional arguments for variations:
1172 bool assert_null,
1173 Node* *null_control) {
1174 assert(!assert_null || null_control == NULL, "not both at once");
1175 if (stopped()) return top();
1176 if (!GenerateCompilerNullChecks && !assert_null && null_control == NULL) {
1177 // For some performance testing, we may wish to suppress null checking.
1178 value = cast_not_null(value); // Make it appear to be non-null (4962416).
1179 return value;
1180 }
1181 explicit_null_checks_inserted++;
1182
1183 // Construct NULL check
1184 Node *chk = NULL;
1185 switch(type) {
1186 case T_LONG : chk = new (C) CmpLNode(value, _gvn.zerocon(T_LONG)); break;
1187 case T_INT : chk = new (C) CmpINode(value, _gvn.intcon(0)); break;
1188 case T_ARRAY : // fall through
1189 type = T_OBJECT; // simplify further tests
1190 case T_OBJECT : {
1191 const Type *t = _gvn.type( value );
1192
1193 const TypeOopPtr* tp = t->isa_oopptr();
1263 replace_in_map(value, null());
1264 return null(); // do not issue the redundant test
1265 }
1266 Node *oldcontrol = control();
1267 set_control(cfg);
1268 Node *res = cast_not_null(value);
1269 set_control(oldcontrol);
1270 explicit_null_checks_elided++;
1271 return res;
1272 }
1273 cfg = IfNode::up_one_dom(cfg, /*linear_only=*/ true);
1274 if (cfg == NULL) break; // Quit at region nodes
1275 depth++;
1276 }
1277 }
1278
1279 //-----------
1280 // Branch to failure if null
1281 float ok_prob = PROB_MAX; // a priori estimate: nulls never happen
1282 Deoptimization::DeoptReason reason;
1283 if (assert_null)
1284 reason = Deoptimization::Reason_null_assert;
1285 else if (type == T_OBJECT)
1286 reason = Deoptimization::Reason_null_check;
1287 else
1288 reason = Deoptimization::Reason_div0_check;
1289
1290 // %%% Since Reason_unhandled is not recorded on a per-bytecode basis,
1291 // ciMethodData::has_trap_at will return a conservative -1 if any
1292 // must-be-null assertion has failed. This could cause performance
1293 // problems for a method after its first do_null_assert failure.
1294 // Consider using 'Reason_class_check' instead?
1295
1296 // To cause an implicit null check, we set the not-null probability
1297 // to the maximum (PROB_MAX). For an explicit check the probability
1298 // is set to a smaller value.
1299 if (null_control != NULL || too_many_traps(reason)) {
1300 // probability is less likely
1301 ok_prob = PROB_LIKELY_MAG(3);
1302 } else if (!assert_null &&
1303 (ImplicitNullCheckThreshold > 0) &&
1304 method() != NULL &&
1305 (method()->method_data()->trap_count(reason)
1306 >= (uint)ImplicitNullCheckThreshold)) {
1307 ok_prob = PROB_LIKELY_MAG(3);
1308 }
1309
2092 const TypeFunc* tf = TypeFunc::make(dest_method);
2093 int nargs = tf->_domain->_cnt - TypeFunc::Parms;
2094 for (int j = 0; j < nargs; j++) {
2095 const Type *targ = tf->_domain->field_at(j + TypeFunc::Parms);
2096 if( targ->basic_type() == T_DOUBLE ) {
2097 // If any parameters are doubles, they must be rounded before
2098 // the call, dstore_rounding does gvn.transform
2099 Node *arg = argument(j);
2100 arg = dstore_rounding(arg);
2101 set_argument(j, arg);
2102 }
2103 }
2104 }
2105
2106 /**
2107 * Record profiling data exact_kls for Node n with the type system so
2108 * that it can propagate it (speculation)
2109 *
2110 * @param n node that the type applies to
2111 * @param exact_kls type from profiling
2112 *
2113 * @return node with improved type
2114 */
2115 Node* GraphKit::record_profile_for_speculation(Node* n, ciKlass* exact_kls) {
2116 const Type* current_type = _gvn.type(n);
2117 assert(UseTypeSpeculation, "type speculation must be on");
2118
2119 const TypeOopPtr* speculative = current_type->speculative();
2120
2121 if (current_type->would_improve_type(exact_kls, jvms()->depth())) {
2122 const TypeKlassPtr* tklass = TypeKlassPtr::make(exact_kls);
2123 const TypeOopPtr* xtype = tklass->as_instance_type();
2124 assert(xtype->klass_is_exact(), "Should be exact");
2125 // record the new speculative type's depth
2126 speculative = xtype->with_inline_depth(jvms()->depth());
2127 }
2128
2129 if (speculative != current_type->speculative()) {
2130 // Build a type with a speculative type (what we think we know
2131 // about the type but will need a guard when we use it)
2132 const TypeOopPtr* spec_type = TypeOopPtr::make(TypePtr::BotPTR, Type::OffsetBot, TypeOopPtr::InstanceBot, speculative);
2133 // We're changing the type, we need a new CheckCast node to carry
2134 // the new type. The new type depends on the control: what
2135 // profiling tells us is only valid from here as far as we can
2136 // tell.
2137 Node* cast = new(C) CheckCastPPNode(control(), n, current_type->remove_speculative()->join_speculative(spec_type));
2138 cast = _gvn.transform(cast);
2139 replace_in_map(n, cast);
2140 n = cast;
2141 }
2142
2143 return n;
2144 }
2145
2146 /**
2147 * Record profiling data from receiver profiling at an invoke with the
2148 * type system so that it can propagate it (speculation)
2149 *
2150 * @param n receiver node
2151 *
2152 * @return node with improved type
2153 */
2154 Node* GraphKit::record_profiled_receiver_for_speculation(Node* n) {
2155 if (!UseTypeSpeculation) {
2156 return n;
2157 }
2158 ciKlass* exact_kls = profile_has_unique_klass();
2159 return record_profile_for_speculation(n, exact_kls);
2160 }
2161
2162 /**
2163 * Record profiling data from argument profiling at an invoke with the
2164 * type system so that it can propagate it (speculation)
2165 *
2166 * @param dest_method target method for the call
2167 * @param bc what invoke bytecode is this?
2168 */
2169 void GraphKit::record_profiled_arguments_for_speculation(ciMethod* dest_method, Bytecodes::Code bc) {
2170 if (!UseTypeSpeculation) {
2171 return;
2172 }
2173 const TypeFunc* tf = TypeFunc::make(dest_method);
2174 int nargs = tf->_domain->_cnt - TypeFunc::Parms;
2175 int skip = Bytecodes::has_receiver(bc) ? 1 : 0;
2176 for (int j = skip, i = 0; j < nargs && i < TypeProfileArgsLimit; j++) {
2177 const Type *targ = tf->_domain->field_at(j + TypeFunc::Parms);
2178 if (targ->basic_type() == T_OBJECT || targ->basic_type() == T_ARRAY) {
2179 ciKlass* better_type = method()->argument_profiled_type(bci(), i);
2180 if (better_type != NULL) {
2181 record_profile_for_speculation(argument(j), better_type);
2182 }
2183 i++;
2184 }
2185 }
2186 }
2187
2188 /**
2189 * Record profiling data from parameter profiling at an invoke with
2190 * the type system so that it can propagate it (speculation)
2191 */
2192 void GraphKit::record_profiled_parameters_for_speculation() {
2193 if (!UseTypeSpeculation) {
2194 return;
2195 }
2196 for (int i = 0, j = 0; i < method()->arg_size() ; i++) {
2197 if (_gvn.type(local(i))->isa_oopptr()) {
2198 ciKlass* better_type = method()->parameter_profiled_type(j);
2199 if (better_type != NULL) {
2200 record_profile_for_speculation(local(i), better_type);
2201 }
2202 j++;
2203 }
2204 }
2205 }
2206
2207 void GraphKit::round_double_result(ciMethod* dest_method) {
2208 // A non-strict method may return a double value which has an extended
2209 // exponent, but this must not be visible in a caller which is 'strict'
2210 // If a strict caller invokes a non-strict callee, round a double result
2211
2212 BasicType result_type = dest_method->return_type()->basic_type();
2213 assert( method() != NULL, "must have caller context");
2214 if( result_type == T_DOUBLE && method()->is_strict() && !dest_method->is_strict() ) {
2215 // Destination method's return value is on top of stack
2216 // dstore_rounding() does gvn.transform
2217 Node *result = pop_pair();
2218 result = dstore_rounding(result);
2219 push_pair(result);
2220 }
2221 }
2222
2223 // rounding for strict float precision conformance
2224 Node* GraphKit::precision_rounding(Node* n) {
2225 return UseStrictFP && _method->flags().is_strict()
2226 && UseSSE == 0 && Matcher::strict_fp_requires_explicit_rounding
2266 // [slow_call] \[fast_result]
2267 // Ctl Val \ \
2268 // | \ \
2269 // Catch <1> \ \
2270 // / \ ^ \ \
2271 // Ex No_Ex | \ \
2272 // | \ \ | \ <2> \
2273 // ... \ [slow_res] | | \ [null_result]
2274 // \ \--+--+--- | |
2275 // \ | / \ | /
2276 // --------Region Phi
2277 //
2278 //=============================================================================
2279 // Code is structured as a series of driver functions all called 'do_XXX' that
2280 // call a set of helper functions. Helper functions first, then drivers.
2281
2282 //------------------------------null_check_oop---------------------------------
2283 // Null check oop. Set null-path control into Region in slot 3.
2284 // Make a cast-not-nullness use the other not-null control. Return cast.
2285 Node* GraphKit::null_check_oop(Node* value, Node* *null_control,
2286 bool never_see_null, bool safe_for_replace) {
2287 // Initial NULL check taken path
2288 (*null_control) = top();
2289 Node* cast = null_check_common(value, T_OBJECT, false, null_control);
2290
2291 // Generate uncommon_trap:
2292 if (never_see_null && (*null_control) != top()) {
2293 // If we see an unexpected null at a check-cast we record it and force a
2294 // recompile; the offending check-cast will be compiled to handle NULLs.
2295 // If we see more than one offending BCI, then all checkcasts in the
2296 // method will be compiled to handle NULLs.
2297 PreserveJVMState pjvms(this);
2298 set_control(*null_control);
2299 replace_in_map(value, null());
2300 uncommon_trap(Deoptimization::Reason_null_check,
2301 Deoptimization::Action_make_not_entrant);
2302 (*null_control) = top(); // NULL path is dead
2303 }
2304 if ((*null_control) == top() && safe_for_replace) {
2305 replace_in_map(value, cast);
2306 }
2307
2308 // Cast away null-ness on the result
2309 return cast;
2310 }
2311
2312 //------------------------------opt_iff----------------------------------------
2313 // Optimize the fast-check IfNode. Set the fast-path region slot 2.
2314 // Return slow-path control.
2315 Node* GraphKit::opt_iff(Node* region, Node* iff) {
2316 IfNode *opt_iff = _gvn.transform(iff)->as_If();
2317
2318 // Fast path taken; set region slot 2
2319 Node *fast_taken = _gvn.transform( new (C) IfFalseNode(opt_iff) );
2320 region->init_req(2,fast_taken); // Capture fast-control
2704
2705 const TypeOopPtr* recv_xtype = tklass->as_instance_type();
2706 assert(recv_xtype->klass_is_exact(), "");
2707
2708 // Subsume downstream occurrences of receiver with a cast to
2709 // recv_xtype, since now we know what the type will be.
2710 Node* cast = new(C) CheckCastPPNode(control(), receiver, recv_xtype);
2711 (*casted_receiver) = _gvn.transform(cast);
2712 // (User must make the replace_in_map call.)
2713
2714 return fail;
2715 }
2716
2717
2718 //------------------------------seems_never_null-------------------------------
2719 // Use null_seen information if it is available from the profile.
2720 // If we see an unexpected null at a type check we record it and force a
2721 // recompile; the offending check will be recompiled to handle NULLs.
2722 // If we see several offending BCIs, then all checks in the
2723 // method will be recompiled.
2724 bool GraphKit::seems_never_null(Node* obj, ciProfileData* data) {
2725 if (UncommonNullCast // Cutout for this technique
2726 && obj != null() // And not the -Xcomp stupid case?
2727 && !too_many_traps(Deoptimization::Reason_null_check)
2728 ) {
2729 if (data == NULL)
2730 // Edge case: no mature data. Be optimistic here.
2731 return true;
2732 // If the profile has not seen a null, assume it won't happen.
2733 assert(java_bc() == Bytecodes::_checkcast ||
2734 java_bc() == Bytecodes::_instanceof ||
2735 java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2736 return !data->as_BitData()->null_seen();
2737 }
2738 return false;
2739 }
2740
2741 //------------------------maybe_cast_profiled_receiver-------------------------
2742 // If the profile has seen exactly one type, narrow to exactly that type.
2743 // Subsequent type checks will always fold up.
2744 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
2745 ciKlass* require_klass,
2746 ciKlass* spec_klass,
2747 bool safe_for_replace) {
2748 if (!UseTypeProfile || !TypeProfileCasts) return NULL;
2749
2750 Deoptimization::DeoptReason reason = spec_klass == NULL ? Deoptimization::Reason_class_check : Deoptimization::Reason_speculate_class_check;
2751
2752 // Make sure we haven't already deoptimized from this tactic.
2753 if (too_many_traps(reason))
2754 return NULL;
2755
2756 // (No, this isn't a call, but it's enough like a virtual call
2757 // to use the same ciMethod accessor to get the profile info...)
2783 // assert(ssc == SSC_always_true)... except maybe the profile lied to us.
2784 }
2785
2786 return NULL;
2787 }
2788
2789 /**
2790 * Cast obj to type and emit guard unless we had too many traps here
2791 * already
2792 *
2793 * @param obj node being casted
2794 * @param type type to cast the node to
2795 * @param not_null true if we know node cannot be null
2796 */
2797 Node* GraphKit::maybe_cast_profiled_obj(Node* obj,
2798 ciKlass* type,
2799 bool not_null) {
2800 // type == NULL if profiling tells us this object is always null
2801 if (type != NULL) {
2802 Deoptimization::DeoptReason class_reason = Deoptimization::Reason_speculate_class_check;
2803 Deoptimization::DeoptReason null_reason = Deoptimization::Reason_null_check;
2804 if (!too_many_traps(null_reason) &&
2805 !too_many_traps(class_reason)) {
2806 Node* not_null_obj = NULL;
2807 // not_null is true if we know the object is not null and
2808 // there's no need for a null check
2809 if (!not_null) {
2810 Node* null_ctl = top();
2811 not_null_obj = null_check_oop(obj, &null_ctl, true, true);
2812 assert(null_ctl->is_top(), "no null control here");
2813 } else {
2814 not_null_obj = obj;
2815 }
2816
2817 Node* exact_obj = not_null_obj;
2818 ciKlass* exact_kls = type;
2819 Node* slow_ctl = type_check_receiver(exact_obj, exact_kls, 1.0,
2820 &exact_obj);
2821 {
2822 PreserveJVMState pjvms(this);
2823 set_control(slow_ctl);
2824 uncommon_trap(class_reason,
2825 Deoptimization::Action_maybe_recompile);
2826 }
2827 replace_in_map(not_null_obj, exact_obj);
2828 obj = exact_obj;
2829 }
2830 } else {
2831 if (!too_many_traps(Deoptimization::Reason_null_assert)) {
2839
2840 //-------------------------------gen_instanceof--------------------------------
2841 // Generate an instance-of idiom. Used by both the instance-of bytecode
2842 // and the reflective instance-of call.
2843 Node* GraphKit::gen_instanceof(Node* obj, Node* superklass, bool safe_for_replace) {
2844 kill_dead_locals(); // Benefit all the uncommon traps
2845 assert( !stopped(), "dead parse path should be checked in callers" );
2846 assert(!TypePtr::NULL_PTR->higher_equal(_gvn.type(superklass)->is_klassptr()),
2847 "must check for not-null not-dead klass in callers");
2848
2849 // Make the merge point
2850 enum { _obj_path = 1, _fail_path, _null_path, PATH_LIMIT };
2851 RegionNode* region = new(C) RegionNode(PATH_LIMIT);
2852 Node* phi = new(C) PhiNode(region, TypeInt::BOOL);
2853 C->set_has_split_ifs(true); // Has chance for split-if optimization
2854
2855 ciProfileData* data = NULL;
2856 if (java_bc() == Bytecodes::_instanceof) { // Only for the bytecode
2857 data = method()->method_data()->bci_to_data(bci());
2858 }
2859 bool never_see_null = (ProfileDynamicTypes // aggressive use of profile
2860 && seems_never_null(obj, data));
2861
2862 // Null check; get casted pointer; set region slot 3
2863 Node* null_ctl = top();
2864 Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
2865
2866 // If not_null_obj is dead, only null-path is taken
2867 if (stopped()) { // Doing instance-of on a NULL?
2868 set_control(null_ctl);
2869 return intcon(0);
2870 }
2871 region->init_req(_null_path, null_ctl);
2872 phi ->init_req(_null_path, intcon(0)); // Set null path value
2873 if (null_ctl == top()) {
2874 // Do this eagerly, so that pattern matches like is_diamond_phi
2875 // will work even during parsing.
2876 assert(_null_path == PATH_LIMIT-1, "delete last");
2877 region->del_req(_null_path);
2878 phi ->del_req(_null_path);
2879 }
2880
2881 // Do we know the type check always succeed?
2882 bool known_statically = false;
2883 if (_gvn.type(superklass)->singleton()) {
2884 ciKlass* superk = _gvn.type(superklass)->is_klassptr()->klass();
2967 }
2968 }
2969
2970 ciProfileData* data = NULL;
2971 bool safe_for_replace = false;
2972 if (failure_control == NULL) { // use MDO in regular case only
2973 assert(java_bc() == Bytecodes::_aastore ||
2974 java_bc() == Bytecodes::_checkcast,
2975 "interpreter profiles type checks only for these BCs");
2976 data = method()->method_data()->bci_to_data(bci());
2977 safe_for_replace = true;
2978 }
2979
2980 // Make the merge point
2981 enum { _obj_path = 1, _null_path, PATH_LIMIT };
2982 RegionNode* region = new (C) RegionNode(PATH_LIMIT);
2983 Node* phi = new (C) PhiNode(region, toop);
2984 C->set_has_split_ifs(true); // Has chance for split-if optimization
2985
2986 // Use null-cast information if it is available
2987 bool never_see_null = ((failure_control == NULL) // regular case only
2988 && seems_never_null(obj, data));
2989
2990 // Null check; get casted pointer; set region slot 3
2991 Node* null_ctl = top();
2992 Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace);
2993
2994 // If not_null_obj is dead, only null-path is taken
2995 if (stopped()) { // Doing instance-of on a NULL?
2996 set_control(null_ctl);
2997 return null();
2998 }
2999 region->init_req(_null_path, null_ctl);
3000 phi ->init_req(_null_path, null()); // Set null path value
3001 if (null_ctl == top()) {
3002 // Do this eagerly, so that pattern matches like is_diamond_phi
3003 // will work even during parsing.
3004 assert(_null_path == PATH_LIMIT-1, "delete last");
3005 region->del_req(_null_path);
3006 phi ->del_req(_null_path);
3007 }
3008
3009 Node* cast_obj = NULL;
3010 if (tk->klass_is_exact()) {
3011 // The following optimization tries to statically cast the speculative type of the object
3012 // (for example obtained during profiling) to the type of the superklass and then do a
|
595 const TypePtr* adr_typ = ex_con->add_offset(offset);
596
597 Node *adr = basic_plus_adr(ex_node, ex_node, offset);
598 const TypeOopPtr* val_type = TypeOopPtr::make_from_klass(env()->String_klass());
599 // Conservatively release stores of object references.
600 Node *store = store_oop_to_object(control(), ex_node, adr, adr_typ, null(), val_type, T_OBJECT, MemNode::release);
601
602 add_exception_state(make_exception_state(ex_node));
603 return;
604 }
605 }
606
607 // %%% Maybe add entry to OptoRuntime which directly throws the exc.?
608 // It won't be much cheaper than bailing to the interp., since we'll
609 // have to pass up all the debug-info, and the runtime will have to
610 // create the stack trace.
611
612 // Usual case: Bail to interpreter.
613 // Reserve the right to recompile if we haven't seen anything yet.
614
615 ciMethod* m = Deoptimization::reason_is_speculate(reason) ? C->method() : NULL;
616 Deoptimization::DeoptAction action = Deoptimization::Action_maybe_recompile;
617 if (treat_throw_as_hot
618 && (method()->method_data()->trap_recompiled_at(bci(), m)
619 || C->too_many_traps(reason))) {
620 // We cannot afford to take more traps here. Suffer in the interpreter.
621 if (C->log() != NULL)
622 C->log()->elem("hot_throw preallocated='0' reason='%s' mcount='%d'",
623 Deoptimization::trap_reason_name(reason),
624 C->trap_count(reason));
625 action = Deoptimization::Action_none;
626 }
627
628 // "must_throw" prunes the JVM state to include only the stack, if there
629 // are no local exception handlers. This should cut down on register
630 // allocation time and code size, by drastically reducing the number
631 // of in-edges on the call to the uncommon trap.
632
633 uncommon_trap(reason, action, (ciKlass*)NULL, (char*)NULL, must_throw);
634 }
635
636
637 //----------------------------PreserveJVMState---------------------------------
638 PreserveJVMState::PreserveJVMState(GraphKit* kit, bool clone_map) {
1153 alen = _gvn.transform( new (C) LoadRangeNode(0, immutable_memory(), r_adr, TypeInt::POS));
1154 } else {
1155 alen = alloc->Ideal_length();
1156 Node* ccast = alloc->make_ideal_length(_gvn.type(array)->is_oopptr(), &_gvn);
1157 if (ccast != alen) {
1158 alen = _gvn.transform(ccast);
1159 }
1160 }
1161 return alen;
1162 }
1163
1164 //------------------------------do_null_check----------------------------------
1165 // Helper function to do a NULL pointer check. Returned value is
1166 // the incoming address with NULL casted away. You are allowed to use the
1167 // not-null value only if you are control dependent on the test.
1168 extern int explicit_null_checks_inserted,
1169 explicit_null_checks_elided;
1170 Node* GraphKit::null_check_common(Node* value, BasicType type,
1171 // optional arguments for variations:
1172 bool assert_null,
1173 Node* *null_control,
1174 bool speculative) {
1175 assert(!assert_null || null_control == NULL, "not both at once");
1176 if (stopped()) return top();
1177 if (!GenerateCompilerNullChecks && !assert_null && null_control == NULL) {
1178 // For some performance testing, we may wish to suppress null checking.
1179 value = cast_not_null(value); // Make it appear to be non-null (4962416).
1180 return value;
1181 }
1182 explicit_null_checks_inserted++;
1183
1184 // Construct NULL check
1185 Node *chk = NULL;
1186 switch(type) {
1187 case T_LONG : chk = new (C) CmpLNode(value, _gvn.zerocon(T_LONG)); break;
1188 case T_INT : chk = new (C) CmpINode(value, _gvn.intcon(0)); break;
1189 case T_ARRAY : // fall through
1190 type = T_OBJECT; // simplify further tests
1191 case T_OBJECT : {
1192 const Type *t = _gvn.type( value );
1193
1194 const TypeOopPtr* tp = t->isa_oopptr();
1264 replace_in_map(value, null());
1265 return null(); // do not issue the redundant test
1266 }
1267 Node *oldcontrol = control();
1268 set_control(cfg);
1269 Node *res = cast_not_null(value);
1270 set_control(oldcontrol);
1271 explicit_null_checks_elided++;
1272 return res;
1273 }
1274 cfg = IfNode::up_one_dom(cfg, /*linear_only=*/ true);
1275 if (cfg == NULL) break; // Quit at region nodes
1276 depth++;
1277 }
1278 }
1279
1280 //-----------
1281 // Branch to failure if null
1282 float ok_prob = PROB_MAX; // a priori estimate: nulls never happen
1283 Deoptimization::DeoptReason reason;
1284 if (assert_null) {
1285 reason = Deoptimization::Reason_null_assert;
1286 } else if (type == T_OBJECT) {
1287 reason = speculative ? Deoptimization::Reason_speculate_null_check : Deoptimization::Reason_null_check;
1288 } else {
1289 reason = Deoptimization::Reason_div0_check;
1290 }
1291 // %%% Since Reason_unhandled is not recorded on a per-bytecode basis,
1292 // ciMethodData::has_trap_at will return a conservative -1 if any
1293 // must-be-null assertion has failed. This could cause performance
1294 // problems for a method after its first do_null_assert failure.
1295 // Consider using 'Reason_class_check' instead?
1296
1297 // To cause an implicit null check, we set the not-null probability
1298 // to the maximum (PROB_MAX). For an explicit check the probability
1299 // is set to a smaller value.
1300 if (null_control != NULL || too_many_traps(reason)) {
1301 // probability is less likely
1302 ok_prob = PROB_LIKELY_MAG(3);
1303 } else if (!assert_null &&
1304 (ImplicitNullCheckThreshold > 0) &&
1305 method() != NULL &&
1306 (method()->method_data()->trap_count(reason)
1307 >= (uint)ImplicitNullCheckThreshold)) {
1308 ok_prob = PROB_LIKELY_MAG(3);
1309 }
1310
2093 const TypeFunc* tf = TypeFunc::make(dest_method);
2094 int nargs = tf->_domain->_cnt - TypeFunc::Parms;
2095 for (int j = 0; j < nargs; j++) {
2096 const Type *targ = tf->_domain->field_at(j + TypeFunc::Parms);
2097 if( targ->basic_type() == T_DOUBLE ) {
2098 // If any parameters are doubles, they must be rounded before
2099 // the call, dstore_rounding does gvn.transform
2100 Node *arg = argument(j);
2101 arg = dstore_rounding(arg);
2102 set_argument(j, arg);
2103 }
2104 }
2105 }
2106
2107 /**
2108 * Record profiling data exact_kls for Node n with the type system so
2109 * that it can propagate it (speculation)
2110 *
2111 * @param n node that the type applies to
2112 * @param exact_kls type from profiling
2113 * @param maybe_null did profiling see null?
2114 *
2115 * @return node with improved type
2116 */
2117 Node* GraphKit::record_profile_for_speculation(Node* n, ciKlass* exact_kls, bool maybe_null) {
2118 const Type* current_type = _gvn.type(n);
2119 assert(UseTypeSpeculation, "type speculation must be on");
2120
2121 const TypePtr* speculative = current_type->speculative();
2122
2123 // Should the klass from the profile be recorded in the speculative type?
2124 if (current_type->would_improve_type(exact_kls, jvms()->depth())) {
2125 const TypeKlassPtr* tklass = TypeKlassPtr::make(exact_kls);
2126 const TypeOopPtr* xtype = tklass->as_instance_type();
2127 assert(xtype->klass_is_exact(), "Should be exact");
2128 // Any reason to believe n is not null (from this profiling or a previous one)?
2129 const TypePtr* ptr = (maybe_null && current_type->speculative_maybe_null()) ? TypePtr::BOTTOM : TypePtr::NOTNULL;
2130 // record the new speculative type's depth
2131 speculative = xtype->cast_to_ptr_type(ptr->ptr())->is_ptr();
2132 speculative = speculative->with_inline_depth(jvms()->depth());
2133 } else if (current_type->would_improve_ptr(maybe_null)) {
2134 // Profiling report that null was never seen so we can change the
2135 // speculative type to non null ptr.
2136 assert(!maybe_null, "nothing to improve");
2137 if (speculative == NULL) {
2138 speculative = TypePtr::NOTNULL;
2139 } else {
2140 const TypePtr* ptr = TypePtr::NOTNULL;
2141 speculative = speculative->cast_to_ptr_type(ptr->ptr())->is_ptr();
2142 }
2143 }
2144
2145 if (speculative != current_type->speculative()) {
2146 // Build a type with a speculative type (what we think we know
2147 // about the type but will need a guard when we use it)
2148 const TypeOopPtr* spec_type = TypeOopPtr::make(TypePtr::BotPTR, Type::OffsetBot, TypeOopPtr::InstanceBot, speculative);
2149 // We're changing the type, we need a new CheckCast node to carry
2150 // the new type. The new type depends on the control: what
2151 // profiling tells us is only valid from here as far as we can
2152 // tell.
2153 Node* cast = new(C) CheckCastPPNode(control(), n, current_type->remove_speculative()->join_speculative(spec_type));
2154 cast = _gvn.transform(cast);
2155 replace_in_map(n, cast);
2156 n = cast;
2157 }
2158
2159 return n;
2160 }
2161
2162 /**
2163 * Record profiling data from receiver profiling at an invoke with the
2164 * type system so that it can propagate it (speculation)
2165 *
2166 * @param n receiver node
2167 *
2168 * @return node with improved type
2169 */
2170 Node* GraphKit::record_profiled_receiver_for_speculation(Node* n) {
2171 if (!UseTypeSpeculation) {
2172 return n;
2173 }
2174 ciKlass* exact_kls = profile_has_unique_klass();
2175 bool maybe_null = true;
2176 if (java_bc() == Bytecodes::_checkcast ||
2177 java_bc() == Bytecodes::_instanceof ||
2178 java_bc() == Bytecodes::_aastore) {
2179 ciProfileData* data = method()->method_data()->bci_to_data(bci());
2180 bool maybe_null = data == NULL ? true : data->as_BitData()->null_seen();
2181 }
2182 return record_profile_for_speculation(n, exact_kls, maybe_null);
2183 return n;
2184 }
2185
2186 /**
2187 * Record profiling data from argument profiling at an invoke with the
2188 * type system so that it can propagate it (speculation)
2189 *
2190 * @param dest_method target method for the call
2191 * @param bc what invoke bytecode is this?
2192 */
2193 void GraphKit::record_profiled_arguments_for_speculation(ciMethod* dest_method, Bytecodes::Code bc) {
2194 if (!UseTypeSpeculation) {
2195 return;
2196 }
2197 const TypeFunc* tf = TypeFunc::make(dest_method);
2198 int nargs = tf->_domain->_cnt - TypeFunc::Parms;
2199 int skip = Bytecodes::has_receiver(bc) ? 1 : 0;
2200 for (int j = skip, i = 0; j < nargs && i < TypeProfileArgsLimit; j++) {
2201 const Type *targ = tf->_domain->field_at(j + TypeFunc::Parms);
2202 if (targ->basic_type() == T_OBJECT || targ->basic_type() == T_ARRAY) {
2203 bool maybe_null = true;
2204 ciKlass* better_type = NULL;
2205 if (method()->argument_profiled_type(bci(), i, better_type, maybe_null)) {
2206 record_profile_for_speculation(argument(j), better_type, maybe_null);
2207 }
2208 i++;
2209 }
2210 }
2211 }
2212
2213 /**
2214 * Record profiling data from parameter profiling at an invoke with
2215 * the type system so that it can propagate it (speculation)
2216 */
2217 void GraphKit::record_profiled_parameters_for_speculation() {
2218 if (!UseTypeSpeculation) {
2219 return;
2220 }
2221 for (int i = 0, j = 0; i < method()->arg_size() ; i++) {
2222 if (_gvn.type(local(i))->isa_oopptr()) {
2223 bool maybe_null = true;
2224 ciKlass* better_type = NULL;
2225 if (method()->parameter_profiled_type(j, better_type, maybe_null)) {
2226 record_profile_for_speculation(local(i), better_type, maybe_null);
2227 }
2228 j++;
2229 }
2230 }
2231 }
2232
2233 /**
2234 * Record profiling data from return value profiling at an invoke with
2235 * the type system so that it can propagate it (speculation)
2236 */
2237 void GraphKit::record_profiled_return_for_speculation() {
2238 if (!UseTypeSpeculation) {
2239 return;
2240 }
2241 bool maybe_null = true;
2242 ciKlass* better_type = NULL;
2243 if (method()->return_profiled_type(bci(), better_type, maybe_null)) {
2244 // If profiling reports a single type for the return value,
2245 // feed it to the type system so it can propagate it as a
2246 // speculative type
2247 record_profile_for_speculation(stack(sp()-1), better_type, maybe_null);
2248 }
2249 }
2250
2251 void GraphKit::round_double_result(ciMethod* dest_method) {
2252 // A non-strict method may return a double value which has an extended
2253 // exponent, but this must not be visible in a caller which is 'strict'
2254 // If a strict caller invokes a non-strict callee, round a double result
2255
2256 BasicType result_type = dest_method->return_type()->basic_type();
2257 assert( method() != NULL, "must have caller context");
2258 if( result_type == T_DOUBLE && method()->is_strict() && !dest_method->is_strict() ) {
2259 // Destination method's return value is on top of stack
2260 // dstore_rounding() does gvn.transform
2261 Node *result = pop_pair();
2262 result = dstore_rounding(result);
2263 push_pair(result);
2264 }
2265 }
2266
2267 // rounding for strict float precision conformance
2268 Node* GraphKit::precision_rounding(Node* n) {
2269 return UseStrictFP && _method->flags().is_strict()
2270 && UseSSE == 0 && Matcher::strict_fp_requires_explicit_rounding
2310 // [slow_call] \[fast_result]
2311 // Ctl Val \ \
2312 // | \ \
2313 // Catch <1> \ \
2314 // / \ ^ \ \
2315 // Ex No_Ex | \ \
2316 // | \ \ | \ <2> \
2317 // ... \ [slow_res] | | \ [null_result]
2318 // \ \--+--+--- | |
2319 // \ | / \ | /
2320 // --------Region Phi
2321 //
2322 //=============================================================================
2323 // Code is structured as a series of driver functions all called 'do_XXX' that
2324 // call a set of helper functions. Helper functions first, then drivers.
2325
2326 //------------------------------null_check_oop---------------------------------
2327 // Null check oop. Set null-path control into Region in slot 3.
2328 // Make a cast-not-nullness use the other not-null control. Return cast.
2329 Node* GraphKit::null_check_oop(Node* value, Node* *null_control,
2330 bool never_see_null,
2331 bool safe_for_replace,
2332 bool speculative) {
2333 // Initial NULL check taken path
2334 (*null_control) = top();
2335 Node* cast = null_check_common(value, T_OBJECT, false, null_control, speculative);
2336
2337 // Generate uncommon_trap:
2338 if (never_see_null && (*null_control) != top()) {
2339 // If we see an unexpected null at a check-cast we record it and force a
2340 // recompile; the offending check-cast will be compiled to handle NULLs.
2341 // If we see more than one offending BCI, then all checkcasts in the
2342 // method will be compiled to handle NULLs.
2343 PreserveJVMState pjvms(this);
2344 set_control(*null_control);
2345 replace_in_map(value, null());
2346 Deoptimization::DeoptReason reason = speculative ? Deoptimization::Reason_speculate_null_check : Deoptimization::Reason_null_check;
2347 uncommon_trap(reason,
2348 Deoptimization::Action_make_not_entrant);
2349 (*null_control) = top(); // NULL path is dead
2350 }
2351 if ((*null_control) == top() && safe_for_replace) {
2352 replace_in_map(value, cast);
2353 }
2354
2355 // Cast away null-ness on the result
2356 return cast;
2357 }
2358
2359 //------------------------------opt_iff----------------------------------------
2360 // Optimize the fast-check IfNode. Set the fast-path region slot 2.
2361 // Return slow-path control.
2362 Node* GraphKit::opt_iff(Node* region, Node* iff) {
2363 IfNode *opt_iff = _gvn.transform(iff)->as_If();
2364
2365 // Fast path taken; set region slot 2
2366 Node *fast_taken = _gvn.transform( new (C) IfFalseNode(opt_iff) );
2367 region->init_req(2,fast_taken); // Capture fast-control
2751
2752 const TypeOopPtr* recv_xtype = tklass->as_instance_type();
2753 assert(recv_xtype->klass_is_exact(), "");
2754
2755 // Subsume downstream occurrences of receiver with a cast to
2756 // recv_xtype, since now we know what the type will be.
2757 Node* cast = new(C) CheckCastPPNode(control(), receiver, recv_xtype);
2758 (*casted_receiver) = _gvn.transform(cast);
2759 // (User must make the replace_in_map call.)
2760
2761 return fail;
2762 }
2763
2764
2765 //------------------------------seems_never_null-------------------------------
2766 // Use null_seen information if it is available from the profile.
2767 // If we see an unexpected null at a type check we record it and force a
2768 // recompile; the offending check will be recompiled to handle NULLs.
2769 // If we see several offending BCIs, then all checks in the
2770 // method will be recompiled.
2771 bool GraphKit::seems_never_null(Node* obj, ciProfileData* data, bool& speculating) {
2772 speculating = !_gvn.type(obj)->speculative_maybe_null();
2773 Deoptimization::DeoptReason reason = speculating ? Deoptimization::Reason_speculate_null_check : Deoptimization::Reason_null_check;
2774 if (UncommonNullCast // Cutout for this technique
2775 && obj != null() // And not the -Xcomp stupid case?
2776 && !too_many_traps(reason)
2777 ) {
2778 if (speculating) {
2779 return true;
2780 }
2781 if (data == NULL)
2782 // Edge case: no mature data. Be optimistic here.
2783 return true;
2784 // If the profile has not seen a null, assume it won't happen.
2785 assert(java_bc() == Bytecodes::_checkcast ||
2786 java_bc() == Bytecodes::_instanceof ||
2787 java_bc() == Bytecodes::_aastore, "MDO must collect null_seen bit here");
2788 return !data->as_BitData()->null_seen();
2789 }
2790 speculating = false;
2791 return false;
2792 }
2793
2794 //------------------------maybe_cast_profiled_receiver-------------------------
2795 // If the profile has seen exactly one type, narrow to exactly that type.
2796 // Subsequent type checks will always fold up.
2797 Node* GraphKit::maybe_cast_profiled_receiver(Node* not_null_obj,
2798 ciKlass* require_klass,
2799 ciKlass* spec_klass,
2800 bool safe_for_replace) {
2801 if (!UseTypeProfile || !TypeProfileCasts) return NULL;
2802
2803 Deoptimization::DeoptReason reason = spec_klass == NULL ? Deoptimization::Reason_class_check : Deoptimization::Reason_speculate_class_check;
2804
2805 // Make sure we haven't already deoptimized from this tactic.
2806 if (too_many_traps(reason))
2807 return NULL;
2808
2809 // (No, this isn't a call, but it's enough like a virtual call
2810 // to use the same ciMethod accessor to get the profile info...)
2836 // assert(ssc == SSC_always_true)... except maybe the profile lied to us.
2837 }
2838
2839 return NULL;
2840 }
2841
2842 /**
2843 * Cast obj to type and emit guard unless we had too many traps here
2844 * already
2845 *
2846 * @param obj node being casted
2847 * @param type type to cast the node to
2848 * @param not_null true if we know node cannot be null
2849 */
2850 Node* GraphKit::maybe_cast_profiled_obj(Node* obj,
2851 ciKlass* type,
2852 bool not_null) {
2853 // type == NULL if profiling tells us this object is always null
2854 if (type != NULL) {
2855 Deoptimization::DeoptReason class_reason = Deoptimization::Reason_speculate_class_check;
2856 Deoptimization::DeoptReason null_reason = Deoptimization::Reason_speculate_null_check;
2857 if (!too_many_traps(null_reason) &&
2858 !too_many_traps(class_reason)) {
2859 Node* not_null_obj = NULL;
2860 // not_null is true if we know the object is not null and
2861 // there's no need for a null check
2862 if (!not_null) {
2863 Node* null_ctl = top();
2864 not_null_obj = null_check_oop(obj, &null_ctl, true, true, true);
2865 assert(null_ctl->is_top(), "no null control here");
2866 } else {
2867 not_null_obj = obj;
2868 }
2869
2870 Node* exact_obj = not_null_obj;
2871 ciKlass* exact_kls = type;
2872 Node* slow_ctl = type_check_receiver(exact_obj, exact_kls, 1.0,
2873 &exact_obj);
2874 {
2875 PreserveJVMState pjvms(this);
2876 set_control(slow_ctl);
2877 uncommon_trap(class_reason,
2878 Deoptimization::Action_maybe_recompile);
2879 }
2880 replace_in_map(not_null_obj, exact_obj);
2881 obj = exact_obj;
2882 }
2883 } else {
2884 if (!too_many_traps(Deoptimization::Reason_null_assert)) {
2892
2893 //-------------------------------gen_instanceof--------------------------------
2894 // Generate an instance-of idiom. Used by both the instance-of bytecode
2895 // and the reflective instance-of call.
2896 Node* GraphKit::gen_instanceof(Node* obj, Node* superklass, bool safe_for_replace) {
2897 kill_dead_locals(); // Benefit all the uncommon traps
2898 assert( !stopped(), "dead parse path should be checked in callers" );
2899 assert(!TypePtr::NULL_PTR->higher_equal(_gvn.type(superklass)->is_klassptr()),
2900 "must check for not-null not-dead klass in callers");
2901
2902 // Make the merge point
2903 enum { _obj_path = 1, _fail_path, _null_path, PATH_LIMIT };
2904 RegionNode* region = new(C) RegionNode(PATH_LIMIT);
2905 Node* phi = new(C) PhiNode(region, TypeInt::BOOL);
2906 C->set_has_split_ifs(true); // Has chance for split-if optimization
2907
2908 ciProfileData* data = NULL;
2909 if (java_bc() == Bytecodes::_instanceof) { // Only for the bytecode
2910 data = method()->method_data()->bci_to_data(bci());
2911 }
2912 bool speculative_not_null = false;
2913 bool never_see_null = (ProfileDynamicTypes // aggressive use of profile
2914 && seems_never_null(obj, data, speculative_not_null));
2915
2916 // Null check; get casted pointer; set region slot 3
2917 Node* null_ctl = top();
2918 Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace, speculative_not_null);
2919
2920 // If not_null_obj is dead, only null-path is taken
2921 if (stopped()) { // Doing instance-of on a NULL?
2922 set_control(null_ctl);
2923 return intcon(0);
2924 }
2925 region->init_req(_null_path, null_ctl);
2926 phi ->init_req(_null_path, intcon(0)); // Set null path value
2927 if (null_ctl == top()) {
2928 // Do this eagerly, so that pattern matches like is_diamond_phi
2929 // will work even during parsing.
2930 assert(_null_path == PATH_LIMIT-1, "delete last");
2931 region->del_req(_null_path);
2932 phi ->del_req(_null_path);
2933 }
2934
2935 // Do we know the type check always succeed?
2936 bool known_statically = false;
2937 if (_gvn.type(superklass)->singleton()) {
2938 ciKlass* superk = _gvn.type(superklass)->is_klassptr()->klass();
3021 }
3022 }
3023
3024 ciProfileData* data = NULL;
3025 bool safe_for_replace = false;
3026 if (failure_control == NULL) { // use MDO in regular case only
3027 assert(java_bc() == Bytecodes::_aastore ||
3028 java_bc() == Bytecodes::_checkcast,
3029 "interpreter profiles type checks only for these BCs");
3030 data = method()->method_data()->bci_to_data(bci());
3031 safe_for_replace = true;
3032 }
3033
3034 // Make the merge point
3035 enum { _obj_path = 1, _null_path, PATH_LIMIT };
3036 RegionNode* region = new (C) RegionNode(PATH_LIMIT);
3037 Node* phi = new (C) PhiNode(region, toop);
3038 C->set_has_split_ifs(true); // Has chance for split-if optimization
3039
3040 // Use null-cast information if it is available
3041 bool speculative_not_null = false;
3042 bool never_see_null = ((failure_control == NULL) // regular case only
3043 && seems_never_null(obj, data, speculative_not_null));
3044
3045 // Null check; get casted pointer; set region slot 3
3046 Node* null_ctl = top();
3047 Node* not_null_obj = null_check_oop(obj, &null_ctl, never_see_null, safe_for_replace, speculative_not_null);
3048
3049 // If not_null_obj is dead, only null-path is taken
3050 if (stopped()) { // Doing instance-of on a NULL?
3051 set_control(null_ctl);
3052 return null();
3053 }
3054 region->init_req(_null_path, null_ctl);
3055 phi ->init_req(_null_path, null()); // Set null path value
3056 if (null_ctl == top()) {
3057 // Do this eagerly, so that pattern matches like is_diamond_phi
3058 // will work even during parsing.
3059 assert(_null_path == PATH_LIMIT-1, "delete last");
3060 region->del_req(_null_path);
3061 phi ->del_req(_null_path);
3062 }
3063
3064 Node* cast_obj = NULL;
3065 if (tk->klass_is_exact()) {
3066 // The following optimization tries to statically cast the speculative type of the object
3067 // (for example obtained during profiling) to the type of the superklass and then do a
|