1220 #ifndef PRODUCT
1221 if (PrintEliminateAllocations) {
1222 tty->print("++++ Eliminated: %d ", boxing->_idx);
1223 boxing->method()->print_short_name(tty);
1224 tty->cr();
1225 }
1226 #endif
1227
1228 return true;
1229 }
1230
1231 //---------------------------set_eden_pointers-------------------------
1232 void PhaseMacroExpand::set_eden_pointers(Node* &eden_top_adr, Node* &eden_end_adr) {
1233 if (UseTLAB) { // Private allocation: load from TLS
1234 Node* thread = transform_later(new ThreadLocalNode());
1235 int tlab_top_offset = in_bytes(JavaThread::tlab_top_offset());
1236 int tlab_end_offset = in_bytes(JavaThread::tlab_end_offset());
1237 eden_top_adr = basic_plus_adr(top()/*not oop*/, thread, tlab_top_offset);
1238 eden_end_adr = basic_plus_adr(top()/*not oop*/, thread, tlab_end_offset);
1239 } else { // Shared allocation: load from globals
1240 CollectedHeap* ch = Universe::heap();
1241 address top_adr = (address)ch->top_addr();
1242 address end_adr = (address)ch->end_addr();
1243 eden_top_adr = makecon(TypeRawPtr::make(top_adr));
1244 eden_end_adr = basic_plus_adr(eden_top_adr, end_adr - top_adr);
1245 }
1246 }
1247
1248
1249 Node* PhaseMacroExpand::make_load(Node* ctl, Node* mem, Node* base, int offset, const Type* value_type, BasicType bt) {
1250 Node* adr = basic_plus_adr(base, offset);
1251 const TypePtr* adr_type = adr->bottom_type()->is_ptr();
1252 Node* value = LoadNode::make(_igvn, ctl, mem, adr, adr_type, value_type, bt, MemNode::unordered);
1253 transform_later(value);
1254 return value;
1255 }
1256
1257
1258 Node* PhaseMacroExpand::make_store(Node* ctl, Node* mem, Node* base, int offset, Node* value, BasicType bt) {
1259 Node* adr = basic_plus_adr(base, offset);
1260 mem = StoreNode::make(_igvn, ctl, mem, adr, NULL, value, bt, MemNode::unordered);
1335 // We need a Region and corresponding Phi's to merge the slow-path and fast-path results.
1336 // they will not be used if "always_slow" is set
1337 enum { slow_result_path = 1, fast_result_path = 2 };
1338 Node *result_region = NULL;
1339 Node *result_phi_rawmem = NULL;
1340 Node *result_phi_rawoop = NULL;
1341 Node *result_phi_i_o = NULL;
1342
1343 // The initial slow comparison is a size check, the comparison
1344 // we want to do is a BoolTest::gt
1345 bool always_slow = false;
1346 int tv = _igvn.find_int_con(initial_slow_test, -1);
1347 if (tv >= 0) {
1348 always_slow = (tv == 1);
1349 initial_slow_test = NULL;
1350 } else {
1351 initial_slow_test = BoolNode::make_predicate(initial_slow_test, &_igvn);
1352 }
1353
1354 if (C->env()->dtrace_alloc_probes() ||
1355 !UseTLAB && (!Universe::heap()->supports_inline_contig_alloc())) {
1356 // Force slow-path allocation
1357 always_slow = true;
1358 initial_slow_test = NULL;
1359 }
1360
1361
1362 enum { too_big_or_final_path = 1, need_gc_path = 2 };
1363 Node *slow_region = NULL;
1364 Node *toobig_false = ctrl;
1365
1366 assert (initial_slow_test == NULL || !always_slow, "arguments must be consistent");
1367 // generate the initial test if necessary
1368 if (initial_slow_test != NULL ) {
1369 slow_region = new RegionNode(3);
1370
1371 // Now make the initial failure test. Usually a too-big test but
1372 // might be a TRUE for finalizers or a fancy class check for
1373 // newInstance0.
1374 IfNode *toobig_iff = new IfNode(ctrl, initial_slow_test, PROB_MIN, COUNT_UNKNOWN);
1375 transform_later(toobig_iff);
|
1220 #ifndef PRODUCT
1221 if (PrintEliminateAllocations) {
1222 tty->print("++++ Eliminated: %d ", boxing->_idx);
1223 boxing->method()->print_short_name(tty);
1224 tty->cr();
1225 }
1226 #endif
1227
1228 return true;
1229 }
1230
1231 //---------------------------set_eden_pointers-------------------------
1232 void PhaseMacroExpand::set_eden_pointers(Node* &eden_top_adr, Node* &eden_end_adr) {
1233 if (UseTLAB) { // Private allocation: load from TLS
1234 Node* thread = transform_later(new ThreadLocalNode());
1235 int tlab_top_offset = in_bytes(JavaThread::tlab_top_offset());
1236 int tlab_end_offset = in_bytes(JavaThread::tlab_end_offset());
1237 eden_top_adr = basic_plus_adr(top()/*not oop*/, thread, tlab_top_offset);
1238 eden_end_adr = basic_plus_adr(top()/*not oop*/, thread, tlab_end_offset);
1239 } else { // Shared allocation: load from globals
1240 CollectedHeap* ch = GC::gc()->heap();
1241 address top_adr = (address)ch->top_addr();
1242 address end_adr = (address)ch->end_addr();
1243 eden_top_adr = makecon(TypeRawPtr::make(top_adr));
1244 eden_end_adr = basic_plus_adr(eden_top_adr, end_adr - top_adr);
1245 }
1246 }
1247
1248
1249 Node* PhaseMacroExpand::make_load(Node* ctl, Node* mem, Node* base, int offset, const Type* value_type, BasicType bt) {
1250 Node* adr = basic_plus_adr(base, offset);
1251 const TypePtr* adr_type = adr->bottom_type()->is_ptr();
1252 Node* value = LoadNode::make(_igvn, ctl, mem, adr, adr_type, value_type, bt, MemNode::unordered);
1253 transform_later(value);
1254 return value;
1255 }
1256
1257
1258 Node* PhaseMacroExpand::make_store(Node* ctl, Node* mem, Node* base, int offset, Node* value, BasicType bt) {
1259 Node* adr = basic_plus_adr(base, offset);
1260 mem = StoreNode::make(_igvn, ctl, mem, adr, NULL, value, bt, MemNode::unordered);
1335 // We need a Region and corresponding Phi's to merge the slow-path and fast-path results.
1336 // they will not be used if "always_slow" is set
1337 enum { slow_result_path = 1, fast_result_path = 2 };
1338 Node *result_region = NULL;
1339 Node *result_phi_rawmem = NULL;
1340 Node *result_phi_rawoop = NULL;
1341 Node *result_phi_i_o = NULL;
1342
1343 // The initial slow comparison is a size check, the comparison
1344 // we want to do is a BoolTest::gt
1345 bool always_slow = false;
1346 int tv = _igvn.find_int_con(initial_slow_test, -1);
1347 if (tv >= 0) {
1348 always_slow = (tv == 1);
1349 initial_slow_test = NULL;
1350 } else {
1351 initial_slow_test = BoolNode::make_predicate(initial_slow_test, &_igvn);
1352 }
1353
1354 if (C->env()->dtrace_alloc_probes() ||
1355 !UseTLAB && (!GC::gc()->heap()->supports_inline_contig_alloc())) {
1356 // Force slow-path allocation
1357 always_slow = true;
1358 initial_slow_test = NULL;
1359 }
1360
1361
1362 enum { too_big_or_final_path = 1, need_gc_path = 2 };
1363 Node *slow_region = NULL;
1364 Node *toobig_false = ctrl;
1365
1366 assert (initial_slow_test == NULL || !always_slow, "arguments must be consistent");
1367 // generate the initial test if necessary
1368 if (initial_slow_test != NULL ) {
1369 slow_region = new RegionNode(3);
1370
1371 // Now make the initial failure test. Usually a too-big test but
1372 // might be a TRUE for finalizers or a fancy class check for
1373 // newInstance0.
1374 IfNode *toobig_iff = new IfNode(ctrl, initial_slow_test, PROB_MIN, COUNT_UNKNOWN);
1375 transform_later(toobig_iff);
|