1277 return null(); // do not issue the redundant test
1278 }
1279 Node *oldcontrol = control();
1280 set_control(cfg);
1281 Node *res = cast_not_null(value);
1282 set_control(oldcontrol);
1283 NOT_PRODUCT(explicit_null_checks_elided++);
1284 return res;
1285 }
1286 cfg = IfNode::up_one_dom(cfg, /*linear_only=*/ true);
1287 if (cfg == NULL) break; // Quit at region nodes
1288 depth++;
1289 }
1290 }
1291
1292 //-----------
1293 // Branch to failure if null
1294 float ok_prob = PROB_MAX; // a priori estimate: nulls never happen
1295 Deoptimization::DeoptReason reason;
1296 if (assert_null) {
1297 reason = Deoptimization::Reason_null_assert;
1298 } else if (type == T_OBJECT) {
1299 reason = Deoptimization::reason_null_check(speculative);
1300 } else {
1301 reason = Deoptimization::Reason_div0_check;
1302 }
1303 // %%% Since Reason_unhandled is not recorded on a per-bytecode basis,
1304 // ciMethodData::has_trap_at will return a conservative -1 if any
1305 // must-be-null assertion has failed. This could cause performance
1306 // problems for a method after its first do_null_assert failure.
1307 // Consider using 'Reason_class_check' instead?
1308
1309 // To cause an implicit null check, we set the not-null probability
1310 // to the maximum (PROB_MAX). For an explicit check the probability
1311 // is set to a smaller value.
1312 if (null_control != NULL || too_many_traps(reason)) {
1313 // probability is less likely
1314 ok_prob = PROB_LIKELY_MAG(3);
1315 } else if (!assert_null &&
1316 (ImplicitNullCheckThreshold > 0) &&
1317 method() != NULL &&
2116 if( targ->basic_type() == T_DOUBLE ) {
2117 // If any parameters are doubles, they must be rounded before
2118 // the call, dstore_rounding does gvn.transform
2119 Node *arg = argument(j);
2120 arg = dstore_rounding(arg);
2121 set_argument(j, arg);
2122 }
2123 }
2124 }
2125
2126 /**
2127 * Record profiling data exact_kls for Node n with the type system so
2128 * that it can propagate it (speculation)
2129 *
2130 * @param n node that the type applies to
2131 * @param exact_kls type from profiling
2132 * @param maybe_null did profiling see null?
2133 *
2134 * @return node with improved type
2135 */
2136 Node* GraphKit::record_profile_for_speculation(Node* n, ciKlass* exact_kls, bool maybe_null) {
2137 const Type* current_type = _gvn.type(n);
2138 assert(UseTypeSpeculation, "type speculation must be on");
2139
2140 const TypePtr* speculative = current_type->speculative();
2141
2142 // Should the klass from the profile be recorded in the speculative type?
2143 if (current_type->would_improve_type(exact_kls, jvms()->depth())) {
2144 const TypeKlassPtr* tklass = TypeKlassPtr::make(exact_kls);
2145 const TypeOopPtr* xtype = tklass->as_instance_type();
2146 assert(xtype->klass_is_exact(), "Should be exact");
2147 // Any reason to believe n is not null (from this profiling or a previous one)?
2148 const TypePtr* ptr = (maybe_null && current_type->speculative_maybe_null()) ? TypePtr::BOTTOM : TypePtr::NOTNULL;
2149 // record the new speculative type's depth
2150 speculative = xtype->cast_to_ptr_type(ptr->ptr())->is_ptr();
2151 speculative = speculative->with_inline_depth(jvms()->depth());
2152 } else if (current_type->would_improve_ptr(maybe_null)) {
2153 // Profiling report that null was never seen so we can change the
2154 // speculative type to non null ptr.
2155 assert(!maybe_null, "nothing to improve");
2156 if (speculative == NULL) {
2157 speculative = TypePtr::NOTNULL;
2158 } else {
2159 const TypePtr* ptr = TypePtr::NOTNULL;
2160 speculative = speculative->cast_to_ptr_type(ptr->ptr())->is_ptr();
2161 }
2162 }
2163
2164 if (speculative != current_type->speculative()) {
2165 // Build a type with a speculative type (what we think we know
2166 // about the type but will need a guard when we use it)
2167 const TypeOopPtr* spec_type = TypeOopPtr::make(TypePtr::BotPTR, Type::OffsetBot, TypeOopPtr::InstanceBot, speculative);
2168 // We're changing the type, we need a new CheckCast node to carry
2169 // the new type. The new type depends on the control: what
2170 // profiling tells us is only valid from here as far as we can
2171 // tell.
2172 Node* cast = new CheckCastPPNode(control(), n, current_type->remove_speculative()->join_speculative(spec_type));
2173 cast = _gvn.transform(cast);
2174 replace_in_map(n, cast);
2175 n = cast;
2176 }
2177
2178 return n;
2179 }
2180
2181 /**
2182 * Record profiling data from receiver profiling at an invoke with the
2183 * type system so that it can propagate it (speculation)
2184 *
2185 * @param n receiver node
2186 *
2187 * @return node with improved type
2188 */
2189 Node* GraphKit::record_profiled_receiver_for_speculation(Node* n) {
2190 if (!UseTypeSpeculation) {
2191 return n;
2192 }
2193 ciKlass* exact_kls = profile_has_unique_klass();
2194 bool maybe_null = true;
2195 if (java_bc() == Bytecodes::_checkcast ||
2196 java_bc() == Bytecodes::_instanceof ||
2197 java_bc() == Bytecodes::_aastore) {
2198 ciProfileData* data = method()->method_data()->bci_to_data(bci());
2199 maybe_null = data == NULL ? true : data->as_BitData()->null_seen();
2200 }
2201 return record_profile_for_speculation(n, exact_kls, maybe_null);
2202 }
2203
2204 /**
2205 * Record profiling data from argument profiling at an invoke with the
2206 * type system so that it can propagate it (speculation)
2207 *
2208 * @param dest_method target method for the call
2209 * @param bc what invoke bytecode is this?
2210 */
2211 void GraphKit::record_profiled_arguments_for_speculation(ciMethod* dest_method, Bytecodes::Code bc) {
2212 if (!UseTypeSpeculation) {
2213 return;
2214 }
2215 const TypeFunc* tf = TypeFunc::make(dest_method);
2216 int nargs = tf->domain()->cnt() - TypeFunc::Parms;
2217 int skip = Bytecodes::has_receiver(bc) ? 1 : 0;
2218 for (int j = skip, i = 0; j < nargs && i < TypeProfileArgsLimit; j++) {
2219 const Type *targ = tf->domain()->field_at(j + TypeFunc::Parms);
2220 if (targ->basic_type() == T_OBJECT || targ->basic_type() == T_ARRAY) {
2221 bool maybe_null = true;
2222 ciKlass* better_type = NULL;
2223 if (method()->argument_profiled_type(bci(), i, better_type, maybe_null)) {
2224 record_profile_for_speculation(argument(j), better_type, maybe_null);
2225 }
2226 i++;
2227 }
2228 }
2229 }
2230
2231 /**
2232 * Record profiling data from parameter profiling at an invoke with
2233 * the type system so that it can propagate it (speculation)
2234 */
2235 void GraphKit::record_profiled_parameters_for_speculation() {
2236 if (!UseTypeSpeculation) {
2237 return;
2238 }
2239 for (int i = 0, j = 0; i < method()->arg_size() ; i++) {
2240 if (_gvn.type(local(i))->isa_oopptr()) {
2241 bool maybe_null = true;
2242 ciKlass* better_type = NULL;
2243 if (method()->parameter_profiled_type(j, better_type, maybe_null)) {
2244 record_profile_for_speculation(local(i), better_type, maybe_null);
2245 }
2246 j++;
2247 }
2248 }
2249 }
2250
2251 /**
2252 * Record profiling data from return value profiling at an invoke with
2253 * the type system so that it can propagate it (speculation)
2254 */
2255 void GraphKit::record_profiled_return_for_speculation() {
2256 if (!UseTypeSpeculation) {
2257 return;
2258 }
2259 bool maybe_null = true;
2260 ciKlass* better_type = NULL;
2261 if (method()->return_profiled_type(bci(), better_type, maybe_null)) {
2262 // If profiling reports a single type for the return value,
2263 // feed it to the type system so it can propagate it as a
2264 // speculative type
2265 record_profile_for_speculation(stack(sp()-1), better_type, maybe_null);
2266 }
2267 }
2268
2269 void GraphKit::round_double_result(ciMethod* dest_method) {
2270 // A non-strict method may return a double value which has an extended
2271 // exponent, but this must not be visible in a caller which is 'strict'
2272 // If a strict caller invokes a non-strict callee, round a double result
2273
2274 BasicType result_type = dest_method->return_type()->basic_type();
2275 assert( method() != NULL, "must have caller context");
2276 if( result_type == T_DOUBLE && method()->is_strict() && !dest_method->is_strict() ) {
2277 // Destination method's return value is on top of stack
2278 // dstore_rounding() does gvn.transform
2279 Node *result = pop_pair();
2280 result = dstore_rounding(result);
2281 push_pair(result);
2282 }
2283 }
2284
2285 // rounding for strict float precision conformance
2921 phi ->init_req(_null_path, intcon(0)); // Set null path value
2922 if (null_ctl == top()) {
2923 // Do this eagerly, so that pattern matches like is_diamond_phi
2924 // will work even during parsing.
2925 assert(_null_path == PATH_LIMIT-1, "delete last");
2926 region->del_req(_null_path);
2927 phi ->del_req(_null_path);
2928 }
2929
2930 // Do we know the type check always succeed?
2931 bool known_statically = false;
2932 if (_gvn.type(superklass)->singleton()) {
2933 ciKlass* superk = _gvn.type(superklass)->is_klassptr()->klass();
2934 ciKlass* subk = _gvn.type(obj)->is_oopptr()->klass();
2935 if (subk != NULL && subk->is_loaded()) {
2936 int static_res = C->static_subtype_check(superk, subk);
2937 known_statically = (static_res == Compile::SSC_always_true || static_res == Compile::SSC_always_false);
2938 }
2939 }
2940
2941 if (known_statically && UseTypeSpeculation) {
2942 // If we know the type check always succeeds then we don't use the
2943 // profiling data at this bytecode. Don't lose it, feed it to the
2944 // type system as a speculative type.
2945 not_null_obj = record_profiled_receiver_for_speculation(not_null_obj);
2946 } else {
2947 const TypeOopPtr* obj_type = _gvn.type(obj)->is_oopptr();
2948 // We may not have profiling here or it may not help us. If we
2949 // have a speculative type use it to perform an exact cast.
2950 ciKlass* spec_obj_type = obj_type->speculative_type();
2951 if (spec_obj_type != NULL || (ProfileDynamicTypes && data != NULL)) {
2952 Node* cast_obj = maybe_cast_profiled_receiver(not_null_obj, NULL, spec_obj_type, safe_for_replace);
2953 if (stopped()) { // Profile disagrees with this path.
2954 set_control(null_ctl); // Null is the only remaining possibility.
2955 return intcon(0);
2956 }
2957 if (cast_obj != NULL) {
2958 not_null_obj = cast_obj;
2959 }
2960 }
2961 }
2962
2963 // Load the object's klass
2964 Node* obj_klass = load_object_klass(not_null_obj);
2965
2966 // Generate the subtype check
2967 Node* not_subtype_ctrl = gen_subtype_check(obj_klass, superklass);
2968
2969 // Plug in the success path to the general merge in slot 1.
2970 region->init_req(_obj_path, control());
2971 phi ->init_req(_obj_path, intcon(1));
2972
2973 // Plug in the failing path to the general merge in slot 2.
2974 region->init_req(_fail_path, not_subtype_ctrl);
2975 phi ->init_req(_fail_path, intcon(0));
2976
2977 // Return final merged results
2978 set_control( _gvn.transform(region) );
2979 record_for_igvn(region);
2980 return _gvn.transform(phi);
2981 }
2982
2983 //-------------------------------gen_checkcast---------------------------------
2984 // Generate a checkcast idiom. Used by both the checkcast bytecode and the
2985 // array store bytecode. Stack must be as-if BEFORE doing the bytecode so the
2986 // uncommon-trap paths work. Adjust stack after this call.
2987 // If failure_control is supplied and not null, it is filled in with
2988 // the control edge for the cast failure. Otherwise, an appropriate
2989 // uncommon trap or exception is thrown.
2990 Node* GraphKit::gen_checkcast(Node *obj, Node* superklass,
2991 Node* *failure_control) {
2992 kill_dead_locals(); // Benefit all the uncommon traps
2993 const TypeKlassPtr *tk = _gvn.type(superklass)->is_klassptr();
2994 const Type *toop = TypeOopPtr::make_from_klass(tk->klass());
2995
2996 // Fast cutout: Check the case that the cast is vacuously true.
2997 // This detects the common cases where the test will short-circuit
2998 // away completely. We do this before we perform the null check,
2999 // because if the test is going to turn into zero code, we don't
3100
3101 region->init_req(_obj_path, control());
3102 phi ->init_req(_obj_path, cast_obj);
3103
3104 // A merge of NULL or Casted-NotNull obj
3105 Node* res = _gvn.transform(phi);
3106
3107 // Note I do NOT always 'replace_in_map(obj,result)' here.
3108 // if( tk->klass()->can_be_primary_super() )
3109 // This means that if I successfully store an Object into an array-of-String
3110 // I 'forget' that the Object is really now known to be a String. I have to
3111 // do this because we don't have true union types for interfaces - if I store
3112 // a Baz into an array-of-Interface and then tell the optimizer it's an
3113 // Interface, I forget that it's also a Baz and cannot do Baz-like field
3114 // references to it. FIX THIS WHEN UNION TYPES APPEAR!
3115 // replace_in_map( obj, res );
3116
3117 // Return final merged results
3118 set_control( _gvn.transform(region) );
3119 record_for_igvn(region);
3120 return res;
3121 }
3122
3123 //------------------------------next_monitor-----------------------------------
3124 // What number should be given to the next monitor?
3125 int GraphKit::next_monitor() {
3126 int current = jvms()->monitor_depth()* C->sync_stack_slots();
3127 int next = current + C->sync_stack_slots();
3128 // Keep the toplevel high water mark current:
3129 if (C->fixed_slots() < next) C->set_fixed_slots(next);
3130 return current;
3131 }
3132
3133 //------------------------------insert_mem_bar---------------------------------
3134 // Memory barrier to avoid floating things around
3135 // The membar serves as a pinch point between both control and all memory slices.
3136 Node* GraphKit::insert_mem_bar(int opcode, Node* precedent) {
3137 MemBarNode* mb = MemBarNode::make(C, opcode, Compile::AliasIdxBot, precedent);
3138 mb->init_req(TypeFunc::Control, control());
3139 mb->init_req(TypeFunc::Memory, reset_memory());
3140 Node* membar = _gvn.transform(mb);
|
1277 return null(); // do not issue the redundant test
1278 }
1279 Node *oldcontrol = control();
1280 set_control(cfg);
1281 Node *res = cast_not_null(value);
1282 set_control(oldcontrol);
1283 NOT_PRODUCT(explicit_null_checks_elided++);
1284 return res;
1285 }
1286 cfg = IfNode::up_one_dom(cfg, /*linear_only=*/ true);
1287 if (cfg == NULL) break; // Quit at region nodes
1288 depth++;
1289 }
1290 }
1291
1292 //-----------
1293 // Branch to failure if null
1294 float ok_prob = PROB_MAX; // a priori estimate: nulls never happen
1295 Deoptimization::DeoptReason reason;
1296 if (assert_null) {
1297 reason = Deoptimization::reason_null_assert(speculative);
1298 } else if (type == T_OBJECT) {
1299 reason = Deoptimization::reason_null_check(speculative);
1300 } else {
1301 reason = Deoptimization::Reason_div0_check;
1302 }
1303 // %%% Since Reason_unhandled is not recorded on a per-bytecode basis,
1304 // ciMethodData::has_trap_at will return a conservative -1 if any
1305 // must-be-null assertion has failed. This could cause performance
1306 // problems for a method after its first do_null_assert failure.
1307 // Consider using 'Reason_class_check' instead?
1308
1309 // To cause an implicit null check, we set the not-null probability
1310 // to the maximum (PROB_MAX). For an explicit check the probability
1311 // is set to a smaller value.
1312 if (null_control != NULL || too_many_traps(reason)) {
1313 // probability is less likely
1314 ok_prob = PROB_LIKELY_MAG(3);
1315 } else if (!assert_null &&
1316 (ImplicitNullCheckThreshold > 0) &&
1317 method() != NULL &&
2116 if( targ->basic_type() == T_DOUBLE ) {
2117 // If any parameters are doubles, they must be rounded before
2118 // the call, dstore_rounding does gvn.transform
2119 Node *arg = argument(j);
2120 arg = dstore_rounding(arg);
2121 set_argument(j, arg);
2122 }
2123 }
2124 }
2125
2126 /**
2127 * Record profiling data exact_kls for Node n with the type system so
2128 * that it can propagate it (speculation)
2129 *
2130 * @param n node that the type applies to
2131 * @param exact_kls type from profiling
2132 * @param maybe_null did profiling see null?
2133 *
2134 * @return node with improved type
2135 */
2136 Node* GraphKit::record_profile_for_speculation(Node* n, ciKlass* exact_kls, ProfilePtrKind ptr_kind) {
2137 const Type* current_type = _gvn.type(n);
2138 assert(UseTypeSpeculation, "type speculation must be on");
2139
2140 const TypePtr* speculative = current_type->speculative();
2141
2142 // Should the klass from the profile be recorded in the speculative type?
2143 if (current_type->would_improve_type(exact_kls, jvms()->depth())) {
2144 const TypeKlassPtr* tklass = TypeKlassPtr::make(exact_kls);
2145 const TypeOopPtr* xtype = tklass->as_instance_type();
2146 assert(xtype->klass_is_exact(), "Should be exact");
2147 // Any reason to believe n is not null (from this profiling or a previous one)?
2148 assert(ptr_kind != ProfileAlwaysNull, "impossible here");
2149 const TypePtr* ptr = (ptr_kind == ProfileMaybeNull && current_type->speculative_maybe_null()) ? TypePtr::BOTTOM : TypePtr::NOTNULL;
2150 // record the new speculative type's depth
2151 speculative = xtype->cast_to_ptr_type(ptr->ptr())->is_ptr();
2152 speculative = speculative->with_inline_depth(jvms()->depth());
2153 } else if (current_type->would_improve_ptr(ptr_kind)) {
2154 // Profiling report that null was never seen so we can change the
2155 // speculative type to non null ptr.
2156 if (ptr_kind == ProfileAlwaysNull) {
2157 speculative = TypePtr::NULL_PTR;
2158 } else {
2159 assert(ptr_kind == ProfileNeverNull, "nothing else is an improvement");
2160 const TypePtr* ptr = TypePtr::NOTNULL;
2161 if (speculative != NULL) {
2162 speculative = speculative->cast_to_ptr_type(ptr->ptr())->is_ptr();
2163 } else {
2164 speculative = ptr;
2165 }
2166 }
2167 }
2168
2169 if (speculative != current_type->speculative()) {
2170 // Build a type with a speculative type (what we think we know
2171 // about the type but will need a guard when we use it)
2172 const TypeOopPtr* spec_type = TypeOopPtr::make(TypePtr::BotPTR, Type::OffsetBot, TypeOopPtr::InstanceBot, speculative);
2173 // We're changing the type, we need a new CheckCast node to carry
2174 // the new type. The new type depends on the control: what
2175 // profiling tells us is only valid from here as far as we can
2176 // tell.
2177 Node* cast = new CheckCastPPNode(control(), n, current_type->remove_speculative()->join_speculative(spec_type));
2178 cast = _gvn.transform(cast);
2179 replace_in_map(n, cast);
2180 n = cast;
2181 }
2182
2183 return n;
2184 }
2185
2186 /**
2187 * Record profiling data from receiver profiling at an invoke with the
2188 * type system so that it can propagate it (speculation)
2189 *
2190 * @param n receiver node
2191 *
2192 * @return node with improved type
2193 */
2194 Node* GraphKit::record_profiled_receiver_for_speculation(Node* n) {
2195 if (!UseTypeSpeculation) {
2196 return n;
2197 }
2198 ciKlass* exact_kls = profile_has_unique_klass();
2199 ProfilePtrKind ptr_kind = ProfileMaybeNull;
2200 if ((java_bc() == Bytecodes::_checkcast ||
2201 java_bc() == Bytecodes::_instanceof ||
2202 java_bc() == Bytecodes::_aastore) &&
2203 method()->method_data()->is_mature()) {
2204 ciProfileData* data = method()->method_data()->bci_to_data(bci());
2205 if (data != NULL) {
2206 if (!data->as_BitData()->null_seen()) {
2207 ptr_kind = ProfileNeverNull;
2208 } else {
2209 assert(data->is_ReceiverTypeData(), "bad profile data type");
2210 ciReceiverTypeData* call = (ciReceiverTypeData*)data->as_ReceiverTypeData();
2211 uint i = 0;
2212 for (; i < call->row_limit(); i++) {
2213 ciKlass* receiver = call->receiver(i);
2214 if (receiver != NULL) {
2215 break;
2216 }
2217 }
2218 ptr_kind = (i == call->row_limit()) ? ProfileAlwaysNull : ProfileMaybeNull;
2219 }
2220 }
2221 }
2222 return record_profile_for_speculation(n, exact_kls, ptr_kind);
2223 }
2224
2225 /**
2226 * Record profiling data from argument profiling at an invoke with the
2227 * type system so that it can propagate it (speculation)
2228 *
2229 * @param dest_method target method for the call
2230 * @param bc what invoke bytecode is this?
2231 */
2232 void GraphKit::record_profiled_arguments_for_speculation(ciMethod* dest_method, Bytecodes::Code bc) {
2233 if (!UseTypeSpeculation) {
2234 return;
2235 }
2236 const TypeFunc* tf = TypeFunc::make(dest_method);
2237 int nargs = tf->domain()->cnt() - TypeFunc::Parms;
2238 int skip = Bytecodes::has_receiver(bc) ? 1 : 0;
2239 for (int j = skip, i = 0; j < nargs && i < TypeProfileArgsLimit; j++) {
2240 const Type *targ = tf->domain()->field_at(j + TypeFunc::Parms);
2241 if (targ->basic_type() == T_OBJECT || targ->basic_type() == T_ARRAY) {
2242 ProfilePtrKind ptr_kind = ProfileMaybeNull;
2243 ciKlass* better_type = NULL;
2244 if (method()->argument_profiled_type(bci(), i, better_type, ptr_kind)) {
2245 record_profile_for_speculation(argument(j), better_type, ptr_kind);
2246 }
2247 i++;
2248 }
2249 }
2250 }
2251
2252 /**
2253 * Record profiling data from parameter profiling at an invoke with
2254 * the type system so that it can propagate it (speculation)
2255 */
2256 void GraphKit::record_profiled_parameters_for_speculation() {
2257 if (!UseTypeSpeculation) {
2258 return;
2259 }
2260 for (int i = 0, j = 0; i < method()->arg_size() ; i++) {
2261 if (_gvn.type(local(i))->isa_oopptr()) {
2262 ProfilePtrKind ptr_kind = ProfileMaybeNull;
2263 ciKlass* better_type = NULL;
2264 if (method()->parameter_profiled_type(j, better_type, ptr_kind)) {
2265 record_profile_for_speculation(local(i), better_type, ptr_kind);
2266 }
2267 j++;
2268 }
2269 }
2270 }
2271
2272 /**
2273 * Record profiling data from return value profiling at an invoke with
2274 * the type system so that it can propagate it (speculation)
2275 */
2276 void GraphKit::record_profiled_return_for_speculation() {
2277 if (!UseTypeSpeculation) {
2278 return;
2279 }
2280 ProfilePtrKind ptr_kind = ProfileMaybeNull;
2281 ciKlass* better_type = NULL;
2282 if (method()->return_profiled_type(bci(), better_type, ptr_kind)) {
2283 // If profiling reports a single type for the return value,
2284 // feed it to the type system so it can propagate it as a
2285 // speculative type
2286 record_profile_for_speculation(stack(sp()-1), better_type, ptr_kind);
2287 }
2288 }
2289
2290 void GraphKit::round_double_result(ciMethod* dest_method) {
2291 // A non-strict method may return a double value which has an extended
2292 // exponent, but this must not be visible in a caller which is 'strict'
2293 // If a strict caller invokes a non-strict callee, round a double result
2294
2295 BasicType result_type = dest_method->return_type()->basic_type();
2296 assert( method() != NULL, "must have caller context");
2297 if( result_type == T_DOUBLE && method()->is_strict() && !dest_method->is_strict() ) {
2298 // Destination method's return value is on top of stack
2299 // dstore_rounding() does gvn.transform
2300 Node *result = pop_pair();
2301 result = dstore_rounding(result);
2302 push_pair(result);
2303 }
2304 }
2305
2306 // rounding for strict float precision conformance
2942 phi ->init_req(_null_path, intcon(0)); // Set null path value
2943 if (null_ctl == top()) {
2944 // Do this eagerly, so that pattern matches like is_diamond_phi
2945 // will work even during parsing.
2946 assert(_null_path == PATH_LIMIT-1, "delete last");
2947 region->del_req(_null_path);
2948 phi ->del_req(_null_path);
2949 }
2950
2951 // Do we know the type check always succeed?
2952 bool known_statically = false;
2953 if (_gvn.type(superklass)->singleton()) {
2954 ciKlass* superk = _gvn.type(superklass)->is_klassptr()->klass();
2955 ciKlass* subk = _gvn.type(obj)->is_oopptr()->klass();
2956 if (subk != NULL && subk->is_loaded()) {
2957 int static_res = C->static_subtype_check(superk, subk);
2958 known_statically = (static_res == Compile::SSC_always_true || static_res == Compile::SSC_always_false);
2959 }
2960 }
2961
2962 if (!known_statically) {
2963 const TypeOopPtr* obj_type = _gvn.type(obj)->is_oopptr();
2964 // We may not have profiling here or it may not help us. If we
2965 // have a speculative type use it to perform an exact cast.
2966 ciKlass* spec_obj_type = obj_type->speculative_type();
2967 if (spec_obj_type != NULL || (ProfileDynamicTypes && data != NULL)) {
2968 Node* cast_obj = maybe_cast_profiled_receiver(not_null_obj, NULL, spec_obj_type, safe_for_replace);
2969 if (stopped()) { // Profile disagrees with this path.
2970 set_control(null_ctl); // Null is the only remaining possibility.
2971 return intcon(0);
2972 }
2973 if (cast_obj != NULL) {
2974 not_null_obj = cast_obj;
2975 }
2976 }
2977 }
2978
2979 // Load the object's klass
2980 Node* obj_klass = load_object_klass(not_null_obj);
2981
2982 // Generate the subtype check
2983 Node* not_subtype_ctrl = gen_subtype_check(obj_klass, superklass);
2984
2985 // Plug in the success path to the general merge in slot 1.
2986 region->init_req(_obj_path, control());
2987 phi ->init_req(_obj_path, intcon(1));
2988
2989 // Plug in the failing path to the general merge in slot 2.
2990 region->init_req(_fail_path, not_subtype_ctrl);
2991 phi ->init_req(_fail_path, intcon(0));
2992
2993 // Return final merged results
2994 set_control( _gvn.transform(region) );
2995 record_for_igvn(region);
2996
2997 // If we know the type check always succeeds then we don't use the
2998 // profiling data at this bytecode. Don't lose it, feed it to the
2999 // type system as a speculative type.
3000 if (safe_for_replace) {
3001 Node* casted_obj = record_profiled_receiver_for_speculation(obj);
3002 replace_in_map(obj, casted_obj);
3003 }
3004
3005 return _gvn.transform(phi);
3006 }
3007
3008 //-------------------------------gen_checkcast---------------------------------
3009 // Generate a checkcast idiom. Used by both the checkcast bytecode and the
3010 // array store bytecode. Stack must be as-if BEFORE doing the bytecode so the
3011 // uncommon-trap paths work. Adjust stack after this call.
3012 // If failure_control is supplied and not null, it is filled in with
3013 // the control edge for the cast failure. Otherwise, an appropriate
3014 // uncommon trap or exception is thrown.
3015 Node* GraphKit::gen_checkcast(Node *obj, Node* superklass,
3016 Node* *failure_control) {
3017 kill_dead_locals(); // Benefit all the uncommon traps
3018 const TypeKlassPtr *tk = _gvn.type(superklass)->is_klassptr();
3019 const Type *toop = TypeOopPtr::make_from_klass(tk->klass());
3020
3021 // Fast cutout: Check the case that the cast is vacuously true.
3022 // This detects the common cases where the test will short-circuit
3023 // away completely. We do this before we perform the null check,
3024 // because if the test is going to turn into zero code, we don't
3125
3126 region->init_req(_obj_path, control());
3127 phi ->init_req(_obj_path, cast_obj);
3128
3129 // A merge of NULL or Casted-NotNull obj
3130 Node* res = _gvn.transform(phi);
3131
3132 // Note I do NOT always 'replace_in_map(obj,result)' here.
3133 // if( tk->klass()->can_be_primary_super() )
3134 // This means that if I successfully store an Object into an array-of-String
3135 // I 'forget' that the Object is really now known to be a String. I have to
3136 // do this because we don't have true union types for interfaces - if I store
3137 // a Baz into an array-of-Interface and then tell the optimizer it's an
3138 // Interface, I forget that it's also a Baz and cannot do Baz-like field
3139 // references to it. FIX THIS WHEN UNION TYPES APPEAR!
3140 // replace_in_map( obj, res );
3141
3142 // Return final merged results
3143 set_control( _gvn.transform(region) );
3144 record_for_igvn(region);
3145
3146 return record_profiled_receiver_for_speculation(res);
3147 }
3148
3149 //------------------------------next_monitor-----------------------------------
3150 // What number should be given to the next monitor?
3151 int GraphKit::next_monitor() {
3152 int current = jvms()->monitor_depth()* C->sync_stack_slots();
3153 int next = current + C->sync_stack_slots();
3154 // Keep the toplevel high water mark current:
3155 if (C->fixed_slots() < next) C->set_fixed_slots(next);
3156 return current;
3157 }
3158
3159 //------------------------------insert_mem_bar---------------------------------
3160 // Memory barrier to avoid floating things around
3161 // The membar serves as a pinch point between both control and all memory slices.
3162 Node* GraphKit::insert_mem_bar(int opcode, Node* precedent) {
3163 MemBarNode* mb = MemBarNode::make(C, opcode, Compile::AliasIdxBot, precedent);
3164 mb->init_req(TypeFunc::Control, control());
3165 mb->init_req(TypeFunc::Memory, reset_memory());
3166 Node* membar = _gvn.transform(mb);
|