622 C->trap_count(reason));
623 action = Deoptimization::Action_none;
624 }
625
626 // "must_throw" prunes the JVM state to include only the stack, if there
627 // are no local exception handlers. This should cut down on register
628 // allocation time and code size, by drastically reducing the number
629 // of in-edges on the call to the uncommon trap.
630
631 uncommon_trap(reason, action, (ciKlass*)NULL, (char*)NULL, must_throw);
632 }
633
634
635 //----------------------------PreserveJVMState---------------------------------
636 PreserveJVMState::PreserveJVMState(GraphKit* kit, bool clone_map) {
637 debug_only(kit->verify_map());
638 _kit = kit;
639 _map = kit->map(); // preserve the map
640 _sp = kit->sp();
641 kit->set_map(clone_map ? kit->clone_map() : NULL);
642 #ifdef ASSERT
643 _bci = kit->bci();
644 Parse* parser = kit->is_Parse();
645 int block = (parser == NULL || parser->block() == NULL) ? -1 : parser->block()->rpo();
646 _block = block;
647 #endif
648 }
649 PreserveJVMState::~PreserveJVMState() {
650 GraphKit* kit = _kit;
651 #ifdef ASSERT
652 assert(kit->bci() == _bci, "bci must not shift");
653 Parse* parser = kit->is_Parse();
654 int block = (parser == NULL || parser->block() == NULL) ? -1 : parser->block()->rpo();
655 assert(block == _block, "block must not shift");
656 #endif
657 kit->set_map(_map);
658 kit->set_sp(_sp);
659 }
660
661
662 //-----------------------------BuildCutout-------------------------------------
663 BuildCutout::BuildCutout(GraphKit* kit, Node* p, float prob, float cnt)
664 : PreserveJVMState(kit)
665 {
666 assert(p->is_Con() || p->is_Bool(), "test must be a bool");
667 SafePointNode* outer_map = _map; // preserved map is caller's
668 SafePointNode* inner_map = kit->map();
669 IfNode* iff = kit->create_and_map_if(outer_map->control(), p, prob, cnt);
670 outer_map->set_control(kit->gvn().transform( new (kit->C) IfTrueNode(iff) ));
671 inner_map->set_control(kit->gvn().transform( new (kit->C) IfFalseNode(iff) ));
672 }
673 BuildCutout::~BuildCutout() {
674 GraphKit* kit = _kit;
675 assert(kit->stopped(), "cutout code must stop, throw, return, etc.");
676 }
677
678 //---------------------------PreserveReexecuteState----------------------------
1356 const Type *t = _gvn.type(obj);
1357 const Type *t_not_null = t->join(TypePtr::NOTNULL);
1358 // Object is already not-null?
1359 if( t == t_not_null ) return obj;
1360
1361 Node *cast = new (C) CastPPNode(obj,t_not_null);
1362 cast->init_req(0, control());
1363 cast = _gvn.transform( cast );
1364
1365 // Scan for instances of 'obj' in the current JVM mapping.
1366 // These instances are known to be not-null after the test.
1367 if (do_replace_in_map)
1368 replace_in_map(obj, cast);
1369
1370 return cast; // Return casted value
1371 }
1372
1373
1374 //--------------------------replace_in_map-------------------------------------
1375 void GraphKit::replace_in_map(Node* old, Node* neww) {
1376 this->map()->replace_edge(old, neww);
1377
1378 // Note: This operation potentially replaces any edge
1379 // on the map. This includes locals, stack, and monitors
1380 // of the current (innermost) JVM state.
1381
1382 // We can consider replacing in caller maps.
1383 // The idea would be that an inlined function's null checks
1384 // can be shared with the entire inlining tree.
1385 // The expense of doing this is that the PreserveJVMState class
1386 // would have to preserve caller states too, with a deep copy.
1387 }
1388
1389
1390 //=============================================================================
1391 //--------------------------------memory---------------------------------------
1392 Node* GraphKit::memory(uint alias_idx) {
1393 MergeMemNode* mem = merged_memory();
1394 Node* p = mem->memory_at(alias_idx);
1395 _gvn.set_type(p, Type::MEMORY); // must be mapped
1396 return p;
1397 }
1398
1399 //-----------------------------reset_memory------------------------------------
1400 Node* GraphKit::reset_memory() {
1401 Node* mem = map()->memory();
1402 // do not use this node for any more parsing!
1403 debug_only( map()->set_memory((Node*)NULL) );
1404 return _gvn.transform( mem );
1405 }
1406
|
622 C->trap_count(reason));
623 action = Deoptimization::Action_none;
624 }
625
626 // "must_throw" prunes the JVM state to include only the stack, if there
627 // are no local exception handlers. This should cut down on register
628 // allocation time and code size, by drastically reducing the number
629 // of in-edges on the call to the uncommon trap.
630
631 uncommon_trap(reason, action, (ciKlass*)NULL, (char*)NULL, must_throw);
632 }
633
634
635 //----------------------------PreserveJVMState---------------------------------
636 PreserveJVMState::PreserveJVMState(GraphKit* kit, bool clone_map) {
637 debug_only(kit->verify_map());
638 _kit = kit;
639 _map = kit->map(); // preserve the map
640 _sp = kit->sp();
641 kit->set_map(clone_map ? kit->clone_map() : NULL);
642 Compile::current()->inc_preserve_jvm_state();
643 #ifdef ASSERT
644 _bci = kit->bci();
645 Parse* parser = kit->is_Parse();
646 int block = (parser == NULL || parser->block() == NULL) ? -1 : parser->block()->rpo();
647 _block = block;
648 #endif
649 }
650 PreserveJVMState::~PreserveJVMState() {
651 GraphKit* kit = _kit;
652 #ifdef ASSERT
653 assert(kit->bci() == _bci, "bci must not shift");
654 Parse* parser = kit->is_Parse();
655 int block = (parser == NULL || parser->block() == NULL) ? -1 : parser->block()->rpo();
656 assert(block == _block, "block must not shift");
657 #endif
658 kit->set_map(_map);
659 kit->set_sp(_sp);
660 Compile::current()->dec_preserve_jvm_state();
661 }
662
663
664 //-----------------------------BuildCutout-------------------------------------
665 BuildCutout::BuildCutout(GraphKit* kit, Node* p, float prob, float cnt)
666 : PreserveJVMState(kit)
667 {
668 assert(p->is_Con() || p->is_Bool(), "test must be a bool");
669 SafePointNode* outer_map = _map; // preserved map is caller's
670 SafePointNode* inner_map = kit->map();
671 IfNode* iff = kit->create_and_map_if(outer_map->control(), p, prob, cnt);
672 outer_map->set_control(kit->gvn().transform( new (kit->C) IfTrueNode(iff) ));
673 inner_map->set_control(kit->gvn().transform( new (kit->C) IfFalseNode(iff) ));
674 }
675 BuildCutout::~BuildCutout() {
676 GraphKit* kit = _kit;
677 assert(kit->stopped(), "cutout code must stop, throw, return, etc.");
678 }
679
680 //---------------------------PreserveReexecuteState----------------------------
1358 const Type *t = _gvn.type(obj);
1359 const Type *t_not_null = t->join(TypePtr::NOTNULL);
1360 // Object is already not-null?
1361 if( t == t_not_null ) return obj;
1362
1363 Node *cast = new (C) CastPPNode(obj,t_not_null);
1364 cast->init_req(0, control());
1365 cast = _gvn.transform( cast );
1366
1367 // Scan for instances of 'obj' in the current JVM mapping.
1368 // These instances are known to be not-null after the test.
1369 if (do_replace_in_map)
1370 replace_in_map(obj, cast);
1371
1372 return cast; // Return casted value
1373 }
1374
1375
1376 //--------------------------replace_in_map-------------------------------------
1377 void GraphKit::replace_in_map(Node* old, Node* neww) {
1378 if (old == neww) {
1379 return;
1380 }
1381
1382 map()->replace_edge(old, neww);
1383
1384 // Note: This operation potentially replaces any edge
1385 // on the map. This includes locals, stack, and monitors
1386 // of the current (innermost) JVM state.
1387
1388 // PreserveJVMState doesn't do a deep copy so we can't modify
1389 // parents
1390 if (Compile::current()->has_preserve_jvm_state()) {
1391 return;
1392 }
1393
1394 Parse* parser = is_Parse();
1395 if (parser == NULL) {
1396 parser = parent_parser();
1397 }
1398
1399 bool progress = true;
1400 Node* ctrl = map()->in(0);
1401 // Follow the chain of parsers and see whether the update can be
1402 // done in the map of callers. We can do the replace for a caller if
1403 // the current control post dominates the control of a caller.
1404 while (parser != NULL && parser->jvms()->caller() != NULL && progress) {
1405 progress = false;
1406 Node* parent_map = parser->jvms()->caller()->map();
1407 assert(parser->exits().map()->jvms()->depth() == parser->jvms()->caller()->depth(), "map mismatch");
1408
1409 Node* parent_ctrl = parent_map->in(0);
1410
1411 while (parent_ctrl->is_Region()) {
1412 Node* n = parent_ctrl->as_Region()->is_copy();
1413 if (n == NULL) {
1414 break;
1415 }
1416 parent_ctrl = n;
1417 }
1418
1419 for (;;) {
1420 if (ctrl == parent_ctrl) {
1421 // update the map of the exits which is the one that will be
1422 // used when compilation resume after inlining
1423 parser->exits().map()->replace_edge(old, neww);
1424 progress = true;
1425 break;
1426 }
1427 if (ctrl->is_Proj() && ctrl->as_Proj()->is_uncommon_trap_if_pattern(Deoptimization::Reason_none)) {
1428 ctrl = ctrl->in(0)->in(0);
1429 } else if (ctrl->is_Region()) {
1430 Node* n = ctrl->as_Region()->is_copy();
1431 if (n == NULL) {
1432 break;
1433 }
1434 ctrl = n;
1435 } else {
1436 break;
1437 }
1438 }
1439
1440 parser = parser->parent_parser();
1441 }
1442 }
1443
1444
1445 //=============================================================================
1446 //--------------------------------memory---------------------------------------
1447 Node* GraphKit::memory(uint alias_idx) {
1448 MergeMemNode* mem = merged_memory();
1449 Node* p = mem->memory_at(alias_idx);
1450 _gvn.set_type(p, Type::MEMORY); // must be mapped
1451 return p;
1452 }
1453
1454 //-----------------------------reset_memory------------------------------------
1455 Node* GraphKit::reset_memory() {
1456 Node* mem = map()->memory();
1457 // do not use this node for any more parsing!
1458 debug_only( map()->set_memory((Node*)NULL) );
1459 return _gvn.transform( mem );
1460 }
1461
|