415 } else {
416 assert(dst->is_Phi(), "nobody else uses a hidden region");
417 phi = dst->as_Phi();
418 }
419 if (add_multiple && src->in(0) == ex_control) {
420 // Both are phis.
421 add_n_reqs(dst, src);
422 } else {
423 while (dst->req() < region->req()) add_one_req(dst, src);
424 }
425 const Type* srctype = _gvn.type(src);
426 if (phi->type() != srctype) {
427 const Type* dsttype = phi->type()->meet_speculative(srctype);
428 if (phi->type() != dsttype) {
429 phi->set_type(dsttype);
430 _gvn.set_type(phi, dsttype);
431 }
432 }
433 }
434 }
435 }
436
437 //--------------------------use_exception_state--------------------------------
438 Node* GraphKit::use_exception_state(SafePointNode* phi_map) {
439 if (failing()) { stop(); return top(); }
440 Node* region = phi_map->control();
441 Node* hidden_merge_mark = root();
442 assert(phi_map->jvms()->map() == phi_map, "sanity: 1-1 relation");
443 Node* ex_oop = clear_saved_ex_oop(phi_map);
444 if (region->in(0) == hidden_merge_mark) {
445 // Special marking for internal ex-states. Process the phis now.
446 region->set_req(0, region); // now it's an ordinary region
447 set_jvms(phi_map->jvms()); // ...so now we can use it as a map
448 // Note: Setting the jvms also sets the bci and sp.
449 set_control(_gvn.transform(region));
450 uint tos = jvms()->stkoff() + sp();
451 for (uint i = 1; i < tos; i++) {
452 Node* x = phi_map->in(i);
453 if (x->in(0) == region) {
454 assert(x->is_Phi(), "expected a special phi");
628 C->trap_count(reason));
629 action = Deoptimization::Action_none;
630 }
631
632 // "must_throw" prunes the JVM state to include only the stack, if there
633 // are no local exception handlers. This should cut down on register
634 // allocation time and code size, by drastically reducing the number
635 // of in-edges on the call to the uncommon trap.
636
637 uncommon_trap(reason, action, (ciKlass*)NULL, (char*)NULL, must_throw);
638 }
639
640
641 //----------------------------PreserveJVMState---------------------------------
642 PreserveJVMState::PreserveJVMState(GraphKit* kit, bool clone_map) {
643 debug_only(kit->verify_map());
644 _kit = kit;
645 _map = kit->map(); // preserve the map
646 _sp = kit->sp();
647 kit->set_map(clone_map ? kit->clone_map() : NULL);
648 Compile::current()->inc_preserve_jvm_state();
649 #ifdef ASSERT
650 _bci = kit->bci();
651 Parse* parser = kit->is_Parse();
652 int block = (parser == NULL || parser->block() == NULL) ? -1 : parser->block()->rpo();
653 _block = block;
654 #endif
655 }
656 PreserveJVMState::~PreserveJVMState() {
657 GraphKit* kit = _kit;
658 #ifdef ASSERT
659 assert(kit->bci() == _bci, "bci must not shift");
660 Parse* parser = kit->is_Parse();
661 int block = (parser == NULL || parser->block() == NULL) ? -1 : parser->block()->rpo();
662 assert(block == _block, "block must not shift");
663 #endif
664 kit->set_map(_map);
665 kit->set_sp(_sp);
666 Compile::current()->dec_preserve_jvm_state();
667 }
668
669
670 //-----------------------------BuildCutout-------------------------------------
671 BuildCutout::BuildCutout(GraphKit* kit, Node* p, float prob, float cnt)
672 : PreserveJVMState(kit)
673 {
674 assert(p->is_Con() || p->is_Bool(), "test must be a bool");
675 SafePointNode* outer_map = _map; // preserved map is caller's
676 SafePointNode* inner_map = kit->map();
677 IfNode* iff = kit->create_and_map_if(outer_map->control(), p, prob, cnt);
678 outer_map->set_control(kit->gvn().transform( new IfTrueNode(iff) ));
679 inner_map->set_control(kit->gvn().transform( new IfFalseNode(iff) ));
680 }
681 BuildCutout::~BuildCutout() {
682 GraphKit* kit = _kit;
683 assert(kit->stopped(), "cutout code must stop, throw, return, etc.");
684 }
685
686 //---------------------------PreserveReexecuteState----------------------------
1386 // These instances are known to be not-null after the test.
1387 if (do_replace_in_map)
1388 replace_in_map(obj, cast);
1389
1390 return cast; // Return casted value
1391 }
1392
1393
1394 //--------------------------replace_in_map-------------------------------------
1395 void GraphKit::replace_in_map(Node* old, Node* neww) {
1396 if (old == neww) {
1397 return;
1398 }
1399
1400 map()->replace_edge(old, neww);
1401
1402 // Note: This operation potentially replaces any edge
1403 // on the map. This includes locals, stack, and monitors
1404 // of the current (innermost) JVM state.
1405
1406 if (!ReplaceInParentMaps) {
1407 return;
1408 }
1409
1410 // PreserveJVMState doesn't do a deep copy so we can't modify
1411 // parents
1412 if (Compile::current()->has_preserve_jvm_state()) {
1413 return;
1414 }
1415
1416 Parse* parser = is_Parse();
1417 bool progress = true;
1418 Node* ctrl = map()->in(0);
1419 // Follow the chain of parsers and see whether the update can be
1420 // done in the map of callers. We can do the replace for a caller if
1421 // the current control post dominates the control of a caller.
1422 while (parser != NULL && parser->caller() != NULL && progress) {
1423 progress = false;
1424 Node* parent_map = parser->caller()->map();
1425 assert(parser->exits().map()->jvms()->depth() == parser->caller()->depth(), "map mismatch");
1426
1427 Node* parent_ctrl = parent_map->in(0);
1428
1429 while (parent_ctrl->is_Region()) {
1430 Node* n = parent_ctrl->as_Region()->is_copy();
1431 if (n == NULL) {
1432 break;
1433 }
1434 parent_ctrl = n;
1435 }
1436
1437 for (;;) {
1438 if (ctrl == parent_ctrl) {
1439 // update the map of the exits which is the one that will be
1440 // used when compilation resume after inlining
1441 parser->exits().map()->replace_edge(old, neww);
1442 progress = true;
1443 break;
1444 }
1445 if (ctrl->is_Proj() && ctrl->as_Proj()->is_uncommon_trap_if_pattern(Deoptimization::Reason_none)) {
1446 ctrl = ctrl->in(0)->in(0);
1447 } else if (ctrl->is_Region()) {
1448 Node* n = ctrl->as_Region()->is_copy();
1449 if (n == NULL) {
1450 break;
1451 }
1452 ctrl = n;
1453 } else {
1454 break;
1455 }
1456 }
1457
1458 parser = parser->parent_parser();
1459 }
1460 }
1461
1462
1463 //=============================================================================
1464 //--------------------------------memory---------------------------------------
1465 Node* GraphKit::memory(uint alias_idx) {
1466 MergeMemNode* mem = merged_memory();
1467 Node* p = mem->memory_at(alias_idx);
1468 _gvn.set_type(p, Type::MEMORY); // must be mapped
1469 return p;
1470 }
1471
1472 //-----------------------------reset_memory------------------------------------
1473 Node* GraphKit::reset_memory() {
1474 Node* mem = map()->memory();
1475 // do not use this node for any more parsing!
1476 debug_only( map()->set_memory((Node*)NULL) );
1477 return _gvn.transform( mem );
1478 }
1479
1847 Node* mem = _gvn.transform( new ProjNode(call, TypeFunc::Memory) );
1848 // Set the RawPtr memory state only. This covers all the heap top/GC stuff
1849 // We also use hook_mem to extract specific effects from arraycopy stubs.
1850 set_memory(mem, hook_mem);
1851 }
1852 // ...else the call has NO memory effects.
1853
1854 // Make sure the call advertises its memory effects precisely.
1855 // This lets us build accurate anti-dependences in gcm.cpp.
1856 assert(C->alias_type(call->adr_type()) == C->alias_type(hook_mem),
1857 "call node must be constructed correctly");
1858 } else {
1859 assert(hook_mem == NULL, "");
1860 // This is not a "slow path" call; all memory comes from the call.
1861 set_all_memory_call(call);
1862 }
1863 }
1864
1865
1866 // Replace the call with the current state of the kit.
1867 void GraphKit::replace_call(CallNode* call, Node* result) {
1868 JVMState* ejvms = NULL;
1869 if (has_exceptions()) {
1870 ejvms = transfer_exceptions_into_jvms();
1871 }
1872
1873 SafePointNode* final_state = stop();
1874
1875 // Find all the needed outputs of this call
1876 CallProjections callprojs;
1877 call->extract_projections(&callprojs, true);
1878
1879 Node* init_mem = call->in(TypeFunc::Memory);
1880 Node* final_mem = final_state->in(TypeFunc::Memory);
1881 Node* final_ctl = final_state->in(TypeFunc::Control);
1882 Node* final_io = final_state->in(TypeFunc::I_O);
1883
1884 // Replace all the old call edges with the edges from the inlining result
1885 if (callprojs.fallthrough_catchproj != NULL) {
1886 C->gvn_replace_by(callprojs.fallthrough_catchproj, final_ctl);
1887 }
1888 if (callprojs.fallthrough_memproj != NULL) {
1889 C->gvn_replace_by(callprojs.fallthrough_memproj, final_mem);
1890 }
1891 if (callprojs.fallthrough_ioproj != NULL) {
1892 C->gvn_replace_by(callprojs.fallthrough_ioproj, final_io);
1893 }
1894
1895 // Replace the result with the new result if it exists and is used
1896 if (callprojs.resproj != NULL && result != NULL) {
1897 C->gvn_replace_by(callprojs.resproj, result);
1898 }
1899
1900 if (ejvms == NULL) {
1901 // No exception edges to simply kill off those paths
1902 if (callprojs.catchall_catchproj != NULL) {
1903 C->gvn_replace_by(callprojs.catchall_catchproj, C->top());
1904 }
1905 if (callprojs.catchall_memproj != NULL) {
1906 C->gvn_replace_by(callprojs.catchall_memproj, C->top());
1907 }
1908 if (callprojs.catchall_ioproj != NULL) {
1909 C->gvn_replace_by(callprojs.catchall_ioproj, C->top());
1910 }
1911 // Replace the old exception object with top
1912 if (callprojs.exobj != NULL) {
1913 C->gvn_replace_by(callprojs.exobj, C->top());
1914 }
1915 } else {
1916 GraphKit ekit(ejvms);
1917
1918 // Load my combined exception state into the kit, with all phis transformed:
1919 SafePointNode* ex_map = ekit.combine_and_pop_all_exception_states();
1920
1921 Node* ex_oop = ekit.use_exception_state(ex_map);
1922 if (callprojs.catchall_catchproj != NULL) {
1923 C->gvn_replace_by(callprojs.catchall_catchproj, ekit.control());
1924 }
1925 if (callprojs.catchall_memproj != NULL) {
1926 C->gvn_replace_by(callprojs.catchall_memproj, ekit.reset_memory());
1927 }
1928 if (callprojs.catchall_ioproj != NULL) {
1929 C->gvn_replace_by(callprojs.catchall_ioproj, ekit.i_o());
1930 }
1931
1932 // Replace the old exception object with the newly created one
1933 if (callprojs.exobj != NULL) {
1934 C->gvn_replace_by(callprojs.exobj, ex_oop);
1935 }
1936 }
1937
1938 // Disconnect the call from the graph
1939 call->disconnect_inputs(NULL, C);
1940 C->gvn_replace_by(call, C->top());
1941
1942 // Clean up any MergeMems that feed other MergeMems since the
1943 // optimizer doesn't like that.
1944 if (final_mem->is_MergeMem()) {
1945 Node_List wl;
1946 for (SimpleDUIterator i(final_mem); i.has_next(); i.next()) {
1947 Node* m = i.get();
1948 if (m->is_MergeMem() && !wl.contains(m)) {
1949 wl.push(m);
1950 }
1951 }
1952 while (wl.size() > 0) {
1953 _gvn.transform(wl.pop());
1954 }
1955 }
1956 }
1957
1958
1959 //------------------------------increment_counter------------------------------
1960 // for statistics: increment a VM counter by 1
1961
1962 void GraphKit::increment_counter(address counter_addr) {
1963 Node* adr1 = makecon(TypeRawPtr::make(counter_addr));
1964 increment_counter(adr1);
1965 }
1966
1967 void GraphKit::increment_counter(Node* counter_addr) {
1968 int adr_type = Compile::AliasIdxRaw;
1969 Node* ctrl = control();
1970 Node* cnt = make_load(ctrl, counter_addr, TypeInt::INT, T_INT, adr_type, MemNode::unordered);
1971 Node* incr = _gvn.transform(new AddINode(cnt, _gvn.intcon(1)));
1972 store_to_memory(ctrl, counter_addr, incr, T_INT, adr_type, MemNode::unordered);
1973 }
1974
|
415 } else {
416 assert(dst->is_Phi(), "nobody else uses a hidden region");
417 phi = dst->as_Phi();
418 }
419 if (add_multiple && src->in(0) == ex_control) {
420 // Both are phis.
421 add_n_reqs(dst, src);
422 } else {
423 while (dst->req() < region->req()) add_one_req(dst, src);
424 }
425 const Type* srctype = _gvn.type(src);
426 if (phi->type() != srctype) {
427 const Type* dsttype = phi->type()->meet_speculative(srctype);
428 if (phi->type() != dsttype) {
429 phi->set_type(dsttype);
430 _gvn.set_type(phi, dsttype);
431 }
432 }
433 }
434 }
435 phi_map->merge_replaced_nodes_with(ex_map);
436 }
437
438 //--------------------------use_exception_state--------------------------------
439 Node* GraphKit::use_exception_state(SafePointNode* phi_map) {
440 if (failing()) { stop(); return top(); }
441 Node* region = phi_map->control();
442 Node* hidden_merge_mark = root();
443 assert(phi_map->jvms()->map() == phi_map, "sanity: 1-1 relation");
444 Node* ex_oop = clear_saved_ex_oop(phi_map);
445 if (region->in(0) == hidden_merge_mark) {
446 // Special marking for internal ex-states. Process the phis now.
447 region->set_req(0, region); // now it's an ordinary region
448 set_jvms(phi_map->jvms()); // ...so now we can use it as a map
449 // Note: Setting the jvms also sets the bci and sp.
450 set_control(_gvn.transform(region));
451 uint tos = jvms()->stkoff() + sp();
452 for (uint i = 1; i < tos; i++) {
453 Node* x = phi_map->in(i);
454 if (x->in(0) == region) {
455 assert(x->is_Phi(), "expected a special phi");
629 C->trap_count(reason));
630 action = Deoptimization::Action_none;
631 }
632
633 // "must_throw" prunes the JVM state to include only the stack, if there
634 // are no local exception handlers. This should cut down on register
635 // allocation time and code size, by drastically reducing the number
636 // of in-edges on the call to the uncommon trap.
637
638 uncommon_trap(reason, action, (ciKlass*)NULL, (char*)NULL, must_throw);
639 }
640
641
642 //----------------------------PreserveJVMState---------------------------------
643 PreserveJVMState::PreserveJVMState(GraphKit* kit, bool clone_map) {
644 debug_only(kit->verify_map());
645 _kit = kit;
646 _map = kit->map(); // preserve the map
647 _sp = kit->sp();
648 kit->set_map(clone_map ? kit->clone_map() : NULL);
649 #ifdef ASSERT
650 _bci = kit->bci();
651 Parse* parser = kit->is_Parse();
652 int block = (parser == NULL || parser->block() == NULL) ? -1 : parser->block()->rpo();
653 _block = block;
654 #endif
655 }
656 PreserveJVMState::~PreserveJVMState() {
657 GraphKit* kit = _kit;
658 #ifdef ASSERT
659 assert(kit->bci() == _bci, "bci must not shift");
660 Parse* parser = kit->is_Parse();
661 int block = (parser == NULL || parser->block() == NULL) ? -1 : parser->block()->rpo();
662 assert(block == _block, "block must not shift");
663 #endif
664 kit->set_map(_map);
665 kit->set_sp(_sp);
666 }
667
668
669 //-----------------------------BuildCutout-------------------------------------
670 BuildCutout::BuildCutout(GraphKit* kit, Node* p, float prob, float cnt)
671 : PreserveJVMState(kit)
672 {
673 assert(p->is_Con() || p->is_Bool(), "test must be a bool");
674 SafePointNode* outer_map = _map; // preserved map is caller's
675 SafePointNode* inner_map = kit->map();
676 IfNode* iff = kit->create_and_map_if(outer_map->control(), p, prob, cnt);
677 outer_map->set_control(kit->gvn().transform( new IfTrueNode(iff) ));
678 inner_map->set_control(kit->gvn().transform( new IfFalseNode(iff) ));
679 }
680 BuildCutout::~BuildCutout() {
681 GraphKit* kit = _kit;
682 assert(kit->stopped(), "cutout code must stop, throw, return, etc.");
683 }
684
685 //---------------------------PreserveReexecuteState----------------------------
1385 // These instances are known to be not-null after the test.
1386 if (do_replace_in_map)
1387 replace_in_map(obj, cast);
1388
1389 return cast; // Return casted value
1390 }
1391
1392
1393 //--------------------------replace_in_map-------------------------------------
1394 void GraphKit::replace_in_map(Node* old, Node* neww) {
1395 if (old == neww) {
1396 return;
1397 }
1398
1399 map()->replace_edge(old, neww);
1400
1401 // Note: This operation potentially replaces any edge
1402 // on the map. This includes locals, stack, and monitors
1403 // of the current (innermost) JVM state.
1404
1405 // don't let inconsistent types from profiling escape this
1406 // method
1407
1408 const Type* told = _gvn.type(old);
1409 const Type* tnew = _gvn.type(neww);
1410
1411 if (!tnew->higher_equal(told)) {
1412 return;
1413 }
1414
1415 map()->record_replaced_node(old, neww);
1416 }
1417
1418
1419 //=============================================================================
1420 //--------------------------------memory---------------------------------------
1421 Node* GraphKit::memory(uint alias_idx) {
1422 MergeMemNode* mem = merged_memory();
1423 Node* p = mem->memory_at(alias_idx);
1424 _gvn.set_type(p, Type::MEMORY); // must be mapped
1425 return p;
1426 }
1427
1428 //-----------------------------reset_memory------------------------------------
1429 Node* GraphKit::reset_memory() {
1430 Node* mem = map()->memory();
1431 // do not use this node for any more parsing!
1432 debug_only( map()->set_memory((Node*)NULL) );
1433 return _gvn.transform( mem );
1434 }
1435
1803 Node* mem = _gvn.transform( new ProjNode(call, TypeFunc::Memory) );
1804 // Set the RawPtr memory state only. This covers all the heap top/GC stuff
1805 // We also use hook_mem to extract specific effects from arraycopy stubs.
1806 set_memory(mem, hook_mem);
1807 }
1808 // ...else the call has NO memory effects.
1809
1810 // Make sure the call advertises its memory effects precisely.
1811 // This lets us build accurate anti-dependences in gcm.cpp.
1812 assert(C->alias_type(call->adr_type()) == C->alias_type(hook_mem),
1813 "call node must be constructed correctly");
1814 } else {
1815 assert(hook_mem == NULL, "");
1816 // This is not a "slow path" call; all memory comes from the call.
1817 set_all_memory_call(call);
1818 }
1819 }
1820
1821
1822 // Replace the call with the current state of the kit.
1823 void GraphKit::replace_call(CallNode* call, Node* result, bool do_replaced_nodes) {
1824 JVMState* ejvms = NULL;
1825 if (has_exceptions()) {
1826 ejvms = transfer_exceptions_into_jvms();
1827 }
1828
1829 ReplacedNodes replaced_nodes = map()->replaced_nodes();
1830 ReplacedNodes replaced_nodes_exception;
1831 Node* ex_ctl = top();
1832
1833 SafePointNode* final_state = stop();
1834
1835 // Find all the needed outputs of this call
1836 CallProjections callprojs;
1837 call->extract_projections(&callprojs, true);
1838
1839 Node* init_mem = call->in(TypeFunc::Memory);
1840 Node* final_mem = final_state->in(TypeFunc::Memory);
1841 Node* final_ctl = final_state->in(TypeFunc::Control);
1842 Node* final_io = final_state->in(TypeFunc::I_O);
1843
1844 // Replace all the old call edges with the edges from the inlining result
1845 if (callprojs.fallthrough_catchproj != NULL) {
1846 C->gvn_replace_by(callprojs.fallthrough_catchproj, final_ctl);
1847 }
1848 if (callprojs.fallthrough_memproj != NULL) {
1849 if (final_mem->is_MergeMem()) {
1850 // Parser's exits MergeMem was not transformed but may be optimized
1851 final_mem = _gvn.transform(final_mem);
1852 }
1853 C->gvn_replace_by(callprojs.fallthrough_memproj, final_mem);
1854 }
1855 if (callprojs.fallthrough_ioproj != NULL) {
1856 C->gvn_replace_by(callprojs.fallthrough_ioproj, final_io);
1857 }
1858
1859 // Replace the result with the new result if it exists and is used
1860 if (callprojs.resproj != NULL && result != NULL) {
1861 C->gvn_replace_by(callprojs.resproj, result);
1862 }
1863
1864 if (ejvms == NULL) {
1865 // No exception edges to simply kill off those paths
1866 if (callprojs.catchall_catchproj != NULL) {
1867 C->gvn_replace_by(callprojs.catchall_catchproj, C->top());
1868 }
1869 if (callprojs.catchall_memproj != NULL) {
1870 C->gvn_replace_by(callprojs.catchall_memproj, C->top());
1871 }
1872 if (callprojs.catchall_ioproj != NULL) {
1873 C->gvn_replace_by(callprojs.catchall_ioproj, C->top());
1874 }
1875 // Replace the old exception object with top
1876 if (callprojs.exobj != NULL) {
1877 C->gvn_replace_by(callprojs.exobj, C->top());
1878 }
1879 } else {
1880 GraphKit ekit(ejvms);
1881
1882 // Load my combined exception state into the kit, with all phis transformed:
1883 SafePointNode* ex_map = ekit.combine_and_pop_all_exception_states();
1884 replaced_nodes_exception = ex_map->replaced_nodes();
1885
1886 Node* ex_oop = ekit.use_exception_state(ex_map);
1887
1888 if (callprojs.catchall_catchproj != NULL) {
1889 C->gvn_replace_by(callprojs.catchall_catchproj, ekit.control());
1890 ex_ctl = ekit.control();
1891 }
1892 if (callprojs.catchall_memproj != NULL) {
1893 C->gvn_replace_by(callprojs.catchall_memproj, ekit.reset_memory());
1894 }
1895 if (callprojs.catchall_ioproj != NULL) {
1896 C->gvn_replace_by(callprojs.catchall_ioproj, ekit.i_o());
1897 }
1898
1899 // Replace the old exception object with the newly created one
1900 if (callprojs.exobj != NULL) {
1901 C->gvn_replace_by(callprojs.exobj, ex_oop);
1902 }
1903 }
1904
1905 // Disconnect the call from the graph
1906 call->disconnect_inputs(NULL, C);
1907 C->gvn_replace_by(call, C->top());
1908
1909 // Clean up any MergeMems that feed other MergeMems since the
1910 // optimizer doesn't like that.
1911 if (final_mem->is_MergeMem()) {
1912 Node_List wl;
1913 for (SimpleDUIterator i(final_mem); i.has_next(); i.next()) {
1914 Node* m = i.get();
1915 if (m->is_MergeMem() && !wl.contains(m)) {
1916 wl.push(m);
1917 }
1918 }
1919 while (wl.size() > 0) {
1920 _gvn.transform(wl.pop());
1921 }
1922 }
1923
1924 if (callprojs.fallthrough_catchproj != NULL && !final_ctl->is_top() && do_replaced_nodes) {
1925 replaced_nodes.apply(C, final_ctl);
1926 }
1927 if (!ex_ctl->is_top() && do_replaced_nodes) {
1928 replaced_nodes_exception.apply(C, ex_ctl);
1929 }
1930 }
1931
1932
1933 //------------------------------increment_counter------------------------------
1934 // for statistics: increment a VM counter by 1
1935
1936 void GraphKit::increment_counter(address counter_addr) {
1937 Node* adr1 = makecon(TypeRawPtr::make(counter_addr));
1938 increment_counter(adr1);
1939 }
1940
1941 void GraphKit::increment_counter(Node* counter_addr) {
1942 int adr_type = Compile::AliasIdxRaw;
1943 Node* ctrl = control();
1944 Node* cnt = make_load(ctrl, counter_addr, TypeInt::INT, T_INT, adr_type, MemNode::unordered);
1945 Node* incr = _gvn.transform(new AddINode(cnt, _gvn.intcon(1)));
1946 store_to_memory(ctrl, counter_addr, incr, T_INT, adr_type, MemNode::unordered);
1947 }
1948
|