1162 // Special-case a fresh allocation to avoid building nodes:
1163 AllocateArrayNode* alloc = AllocateArrayNode::Ideal_array_allocation(array, &_gvn);
1164 Node *alen;
1165 if (alloc == NULL) {
1166 Node *r_adr = basic_plus_adr(array, arrayOopDesc::length_offset_in_bytes());
1167 alen = _gvn.transform( new LoadRangeNode(0, immutable_memory(), r_adr, TypeInt::POS));
1168 } else {
1169 alen = alloc->Ideal_length();
1170 Node* ccast = alloc->make_ideal_length(_gvn.type(array)->is_oopptr(), &_gvn);
1171 if (ccast != alen) {
1172 alen = _gvn.transform(ccast);
1173 }
1174 }
1175 return alen;
1176 }
1177
1178 //------------------------------do_null_check----------------------------------
1179 // Helper function to do a NULL pointer check. Returned value is
1180 // the incoming address with NULL casted away. You are allowed to use the
1181 // not-null value only if you are control dependent on the test.
1182 extern int explicit_null_checks_inserted,
1183 explicit_null_checks_elided;
1184 Node* GraphKit::null_check_common(Node* value, BasicType type,
1185 // optional arguments for variations:
1186 bool assert_null,
1187 Node* *null_control,
1188 bool speculative) {
1189 assert(!assert_null || null_control == NULL, "not both at once");
1190 if (stopped()) return top();
1191 if (!GenerateCompilerNullChecks && !assert_null && null_control == NULL) {
1192 // For some performance testing, we may wish to suppress null checking.
1193 value = cast_not_null(value); // Make it appear to be non-null (4962416).
1194 return value;
1195 }
1196 explicit_null_checks_inserted++;
1197
1198 // Construct NULL check
1199 Node *chk = NULL;
1200 switch(type) {
1201 case T_LONG : chk = new CmpLNode(value, _gvn.zerocon(T_LONG)); break;
1202 case T_INT : chk = new CmpINode(value, _gvn.intcon(0)); break;
1203 case T_ARRAY : // fall through
1204 type = T_OBJECT; // simplify further tests
1205 case T_OBJECT : {
1206 const Type *t = _gvn.type( value );
1207
1208 const TypeOopPtr* tp = t->isa_oopptr();
1209 if (tp != NULL && tp->klass() != NULL && !tp->klass()->is_loaded()
1210 // Only for do_null_check, not any of its siblings:
1211 && !assert_null && null_control == NULL) {
1212 // Usually, any field access or invocation on an unloaded oop type
1213 // will simply fail to link, since the statically linked class is
1214 // likely also to be unloaded. However, in -Xcomp mode, sometimes
1215 // the static class is loaded but the sharper oop type is not.
1216 // Rather than checking for this obscure case in lots of places,
1217 // we simply observe that a null check on an unloaded class
1218 // will always be followed by a nonsense operation, so we
1219 // can just issue the uncommon trap here.
1220 // Our access to the unloaded class will only be correct
1221 // after it has been loaded and initialized, which requires
1222 // a trip through the interpreter.
1223 #ifndef PRODUCT
1224 if (WizardMode) { tty->print("Null check of unloaded "); tp->klass()->print(); tty->cr(); }
1225 #endif
1226 uncommon_trap(Deoptimization::Reason_unloaded,
1227 Deoptimization::Action_reinterpret,
1228 tp->klass(), "!loaded");
1229 return top();
1230 }
1231
1232 if (assert_null) {
1233 // See if the type is contained in NULL_PTR.
1234 // If so, then the value is already null.
1235 if (t->higher_equal(TypePtr::NULL_PTR)) {
1236 explicit_null_checks_elided++;
1237 return value; // Elided null assert quickly!
1238 }
1239 } else {
1240 // See if mixing in the NULL pointer changes type.
1241 // If so, then the NULL pointer was not allowed in the original
1242 // type. In other words, "value" was not-null.
1243 if (t->meet(TypePtr::NULL_PTR) != t->remove_speculative()) {
1244 // same as: if (!TypePtr::NULL_PTR->higher_equal(t)) ...
1245 explicit_null_checks_elided++;
1246 return value; // Elided null check quickly!
1247 }
1248 }
1249 chk = new CmpPNode( value, null() );
1250 break;
1251 }
1252
1253 default:
1254 fatal("unexpected type: %s", type2name(type));
1255 }
1256 assert(chk != NULL, "sanity check");
1257 chk = _gvn.transform(chk);
1258
1259 BoolTest::mask btest = assert_null ? BoolTest::eq : BoolTest::ne;
1260 BoolNode *btst = new BoolNode( chk, btest);
1261 Node *tst = _gvn.transform( btst );
1262
1263 //-----------
1264 // if peephole optimizations occurred, a prior test existed.
1265 // If a prior test existed, maybe it dominates as we can avoid this test.
1266 if (tst != btst && type == T_OBJECT) {
1267 // At this point we want to scan up the CFG to see if we can
1268 // find an identical test (and so avoid this test altogether).
1269 Node *cfg = control();
1270 int depth = 0;
1271 while( depth < 16 ) { // Limit search depth for speed
1272 if( cfg->Opcode() == Op_IfTrue &&
1273 cfg->in(0)->in(1) == tst ) {
1274 // Found prior test. Use "cast_not_null" to construct an identical
1275 // CastPP (and hence hash to) as already exists for the prior test.
1276 // Return that casted value.
1277 if (assert_null) {
1278 replace_in_map(value, null());
1279 return null(); // do not issue the redundant test
1280 }
1281 Node *oldcontrol = control();
1282 set_control(cfg);
1283 Node *res = cast_not_null(value);
1284 set_control(oldcontrol);
1285 explicit_null_checks_elided++;
1286 return res;
1287 }
1288 cfg = IfNode::up_one_dom(cfg, /*linear_only=*/ true);
1289 if (cfg == NULL) break; // Quit at region nodes
1290 depth++;
1291 }
1292 }
1293
1294 //-----------
1295 // Branch to failure if null
1296 float ok_prob = PROB_MAX; // a priori estimate: nulls never happen
1297 Deoptimization::DeoptReason reason;
1298 if (assert_null) {
1299 reason = Deoptimization::Reason_null_assert;
1300 } else if (type == T_OBJECT) {
1301 reason = Deoptimization::reason_null_check(speculative);
1302 } else {
1303 reason = Deoptimization::Reason_div0_check;
1304 }
1305 // %%% Since Reason_unhandled is not recorded on a per-bytecode basis,
1309 // Consider using 'Reason_class_check' instead?
1310
1311 // To cause an implicit null check, we set the not-null probability
1312 // to the maximum (PROB_MAX). For an explicit check the probability
1313 // is set to a smaller value.
1314 if (null_control != NULL || too_many_traps(reason)) {
1315 // probability is less likely
1316 ok_prob = PROB_LIKELY_MAG(3);
1317 } else if (!assert_null &&
1318 (ImplicitNullCheckThreshold > 0) &&
1319 method() != NULL &&
1320 (method()->method_data()->trap_count(reason)
1321 >= (uint)ImplicitNullCheckThreshold)) {
1322 ok_prob = PROB_LIKELY_MAG(3);
1323 }
1324
1325 if (null_control != NULL) {
1326 IfNode* iff = create_and_map_if(control(), tst, ok_prob, COUNT_UNKNOWN);
1327 Node* null_true = _gvn.transform( new IfFalseNode(iff));
1328 set_control( _gvn.transform( new IfTrueNode(iff)));
1329 if (null_true == top())
1330 explicit_null_checks_elided++;
1331 (*null_control) = null_true;
1332 } else {
1333 BuildCutout unless(this, tst, ok_prob);
1334 // Check for optimizer eliding test at parse time
1335 if (stopped()) {
1336 // Failure not possible; do not bother making uncommon trap.
1337 explicit_null_checks_elided++;
1338 } else if (assert_null) {
1339 uncommon_trap(reason,
1340 Deoptimization::Action_make_not_entrant,
1341 NULL, "assert_null");
1342 } else {
1343 replace_in_map(value, zerocon(type));
1344 builtin_throw(reason);
1345 }
1346 }
1347
1348 // Must throw exception, fall-thru not possible?
1349 if (stopped()) {
1350 return top(); // No result
1351 }
1352
1353 if (assert_null) {
1354 // Cast obj to null on this path.
1355 replace_in_map(value, zerocon(type));
1356 return zerocon(type);
1357 }
|
1162 // Special-case a fresh allocation to avoid building nodes:
1163 AllocateArrayNode* alloc = AllocateArrayNode::Ideal_array_allocation(array, &_gvn);
1164 Node *alen;
1165 if (alloc == NULL) {
1166 Node *r_adr = basic_plus_adr(array, arrayOopDesc::length_offset_in_bytes());
1167 alen = _gvn.transform( new LoadRangeNode(0, immutable_memory(), r_adr, TypeInt::POS));
1168 } else {
1169 alen = alloc->Ideal_length();
1170 Node* ccast = alloc->make_ideal_length(_gvn.type(array)->is_oopptr(), &_gvn);
1171 if (ccast != alen) {
1172 alen = _gvn.transform(ccast);
1173 }
1174 }
1175 return alen;
1176 }
1177
1178 //------------------------------do_null_check----------------------------------
1179 // Helper function to do a NULL pointer check. Returned value is
1180 // the incoming address with NULL casted away. You are allowed to use the
1181 // not-null value only if you are control dependent on the test.
1182 #ifndef PRODUCT
1183 extern int explicit_null_checks_inserted,
1184 explicit_null_checks_elided;
1185 #endif
1186 Node* GraphKit::null_check_common(Node* value, BasicType type,
1187 // optional arguments for variations:
1188 bool assert_null,
1189 Node* *null_control,
1190 bool speculative) {
1191 assert(!assert_null || null_control == NULL, "not both at once");
1192 if (stopped()) return top();
1193 if (!GenerateCompilerNullChecks && !assert_null && null_control == NULL) {
1194 // For some performance testing, we may wish to suppress null checking.
1195 value = cast_not_null(value); // Make it appear to be non-null (4962416).
1196 return value;
1197 }
1198 NOT_PRODUCT(explicit_null_checks_inserted++);
1199
1200 // Construct NULL check
1201 Node *chk = NULL;
1202 switch(type) {
1203 case T_LONG : chk = new CmpLNode(value, _gvn.zerocon(T_LONG)); break;
1204 case T_INT : chk = new CmpINode(value, _gvn.intcon(0)); break;
1205 case T_ARRAY : // fall through
1206 type = T_OBJECT; // simplify further tests
1207 case T_OBJECT : {
1208 const Type *t = _gvn.type( value );
1209
1210 const TypeOopPtr* tp = t->isa_oopptr();
1211 if (tp != NULL && tp->klass() != NULL && !tp->klass()->is_loaded()
1212 // Only for do_null_check, not any of its siblings:
1213 && !assert_null && null_control == NULL) {
1214 // Usually, any field access or invocation on an unloaded oop type
1215 // will simply fail to link, since the statically linked class is
1216 // likely also to be unloaded. However, in -Xcomp mode, sometimes
1217 // the static class is loaded but the sharper oop type is not.
1218 // Rather than checking for this obscure case in lots of places,
1219 // we simply observe that a null check on an unloaded class
1220 // will always be followed by a nonsense operation, so we
1221 // can just issue the uncommon trap here.
1222 // Our access to the unloaded class will only be correct
1223 // after it has been loaded and initialized, which requires
1224 // a trip through the interpreter.
1225 #ifndef PRODUCT
1226 if (WizardMode) { tty->print("Null check of unloaded "); tp->klass()->print(); tty->cr(); }
1227 #endif
1228 uncommon_trap(Deoptimization::Reason_unloaded,
1229 Deoptimization::Action_reinterpret,
1230 tp->klass(), "!loaded");
1231 return top();
1232 }
1233
1234 if (assert_null) {
1235 // See if the type is contained in NULL_PTR.
1236 // If so, then the value is already null.
1237 if (t->higher_equal(TypePtr::NULL_PTR)) {
1238 NOT_PRODUCT(explicit_null_checks_elided++);
1239 return value; // Elided null assert quickly!
1240 }
1241 } else {
1242 // See if mixing in the NULL pointer changes type.
1243 // If so, then the NULL pointer was not allowed in the original
1244 // type. In other words, "value" was not-null.
1245 if (t->meet(TypePtr::NULL_PTR) != t->remove_speculative()) {
1246 // same as: if (!TypePtr::NULL_PTR->higher_equal(t)) ...
1247 NOT_PRODUCT(explicit_null_checks_elided++);
1248 return value; // Elided null check quickly!
1249 }
1250 }
1251 chk = new CmpPNode( value, null() );
1252 break;
1253 }
1254
1255 default:
1256 fatal("unexpected type: %s", type2name(type));
1257 }
1258 assert(chk != NULL, "sanity check");
1259 chk = _gvn.transform(chk);
1260
1261 BoolTest::mask btest = assert_null ? BoolTest::eq : BoolTest::ne;
1262 BoolNode *btst = new BoolNode( chk, btest);
1263 Node *tst = _gvn.transform( btst );
1264
1265 //-----------
1266 // if peephole optimizations occurred, a prior test existed.
1267 // If a prior test existed, maybe it dominates as we can avoid this test.
1268 if (tst != btst && type == T_OBJECT) {
1269 // At this point we want to scan up the CFG to see if we can
1270 // find an identical test (and so avoid this test altogether).
1271 Node *cfg = control();
1272 int depth = 0;
1273 while( depth < 16 ) { // Limit search depth for speed
1274 if( cfg->Opcode() == Op_IfTrue &&
1275 cfg->in(0)->in(1) == tst ) {
1276 // Found prior test. Use "cast_not_null" to construct an identical
1277 // CastPP (and hence hash to) as already exists for the prior test.
1278 // Return that casted value.
1279 if (assert_null) {
1280 replace_in_map(value, null());
1281 return null(); // do not issue the redundant test
1282 }
1283 Node *oldcontrol = control();
1284 set_control(cfg);
1285 Node *res = cast_not_null(value);
1286 set_control(oldcontrol);
1287 NOT_PRODUCT(explicit_null_checks_elided++);
1288 return res;
1289 }
1290 cfg = IfNode::up_one_dom(cfg, /*linear_only=*/ true);
1291 if (cfg == NULL) break; // Quit at region nodes
1292 depth++;
1293 }
1294 }
1295
1296 //-----------
1297 // Branch to failure if null
1298 float ok_prob = PROB_MAX; // a priori estimate: nulls never happen
1299 Deoptimization::DeoptReason reason;
1300 if (assert_null) {
1301 reason = Deoptimization::Reason_null_assert;
1302 } else if (type == T_OBJECT) {
1303 reason = Deoptimization::reason_null_check(speculative);
1304 } else {
1305 reason = Deoptimization::Reason_div0_check;
1306 }
1307 // %%% Since Reason_unhandled is not recorded on a per-bytecode basis,
1311 // Consider using 'Reason_class_check' instead?
1312
1313 // To cause an implicit null check, we set the not-null probability
1314 // to the maximum (PROB_MAX). For an explicit check the probability
1315 // is set to a smaller value.
1316 if (null_control != NULL || too_many_traps(reason)) {
1317 // probability is less likely
1318 ok_prob = PROB_LIKELY_MAG(3);
1319 } else if (!assert_null &&
1320 (ImplicitNullCheckThreshold > 0) &&
1321 method() != NULL &&
1322 (method()->method_data()->trap_count(reason)
1323 >= (uint)ImplicitNullCheckThreshold)) {
1324 ok_prob = PROB_LIKELY_MAG(3);
1325 }
1326
1327 if (null_control != NULL) {
1328 IfNode* iff = create_and_map_if(control(), tst, ok_prob, COUNT_UNKNOWN);
1329 Node* null_true = _gvn.transform( new IfFalseNode(iff));
1330 set_control( _gvn.transform( new IfTrueNode(iff)));
1331 #ifndef PRODUCT
1332 if (null_true == top()) {
1333 explicit_null_checks_elided++;
1334 }
1335 #endif
1336 (*null_control) = null_true;
1337 } else {
1338 BuildCutout unless(this, tst, ok_prob);
1339 // Check for optimizer eliding test at parse time
1340 if (stopped()) {
1341 // Failure not possible; do not bother making uncommon trap.
1342 NOT_PRODUCT(explicit_null_checks_elided++);
1343 } else if (assert_null) {
1344 uncommon_trap(reason,
1345 Deoptimization::Action_make_not_entrant,
1346 NULL, "assert_null");
1347 } else {
1348 replace_in_map(value, zerocon(type));
1349 builtin_throw(reason);
1350 }
1351 }
1352
1353 // Must throw exception, fall-thru not possible?
1354 if (stopped()) {
1355 return top(); // No result
1356 }
1357
1358 if (assert_null) {
1359 // Cast obj to null on this path.
1360 replace_in_map(value, zerocon(type));
1361 return zerocon(type);
1362 }
|