src/share/vm/code/nmethod.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8029443 Sdiff src/share/vm/code

src/share/vm/code/nmethod.cpp

Print this page




1701   } else {
1702     Metadata* ic_oop = ic->cached_metadata();
1703     if (ic_oop != NULL) {
1704       if (ic_oop->is_klass()) {
1705         if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
1706           return;
1707         }
1708       } else if (ic_oop->is_method()) {
1709         if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) {
1710           return;
1711         }
1712       } else {
1713         ShouldNotReachHere();
1714       }
1715     }
1716   }
1717 
1718   ic->set_to_clean();
1719 }
1720 


















1721 // This is called at the end of the strong tracing/marking phase of a
1722 // GC to unload an nmethod if it contains otherwise unreachable
1723 // oops.
1724 
1725 void nmethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
1726   // Make sure the oop's ready to receive visitors
1727   assert(!is_zombie() && !is_unloaded(),
1728          "should not call follow on zombie or unloaded nmethod");
1729 
1730   // If the method is not entrant then a JMP is plastered over the
1731   // first few bytes.  If an oop in the old code was there, that oop
1732   // should not get GC'd.  Skip the first few bytes of oops on
1733   // not-entrant methods.
1734   address low_boundary = verified_entry_point();
1735   if (is_not_entrant()) {
1736     low_boundary += NativeJump::instruction_size;
1737     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
1738     // (See comment above.)
1739   }
1740 
1741   // The RedefineClasses() API can cause the class unloading invariant
1742   // to no longer be true. See jvmtiExport.hpp for details.
1743   // Also, leave a debugging breadcrumb in local flag.
1744   bool a_class_was_redefined = JvmtiExport::has_redefined_a_class();
1745   if (a_class_was_redefined) {
1746     // This set of the unloading_occurred flag is done before the
1747     // call to post_compiled_method_unload() so that the unloading
1748     // of this nmethod is reported.
1749     unloading_occurred = true;
1750   }
1751 
1752   // Exception cache
1753   clean_exception_cache(is_alive);
1754 
1755   // If class unloading occurred we first iterate over all inline caches and
1756   // clear ICs where the cached oop is referring to an unloaded klass or method.
1757   // The remaining live cached oops will be traversed in the relocInfo::oop_type
1758   // iteration below.

1759   if (unloading_occurred) {
1760     RelocIterator iter(this, low_boundary);
1761     while(iter.next()) {
1762       if (iter.type() == relocInfo::virtual_call_type) {
1763         CompiledIC *ic = CompiledIC_at(&iter);

1764         clean_ic_if_metadata_is_dead(ic, is_alive);
















1765       }
1766     }
1767   }
1768 
1769   // Compiled code
1770   {
1771   RelocIterator iter(this, low_boundary);
1772   while (iter.next()) {
1773     if (iter.type() == relocInfo::oop_type) {
1774       oop_Relocation* r = iter.oop_reloc();
1775       // In this loop, we must only traverse those oops directly embedded in
1776       // the code.  Other oops (oop_index>0) are seen as part of scopes_oops.
1777       assert(1 == (r->oop_is_immediate()) +
1778                   (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()),
1779              "oop must be found in exactly one place");
1780       if (r->oop_is_immediate() && r->oop_value() != NULL) {
1781         if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) {
1782           return;
1783         }
1784       }


1863 
1864   bool is_unloaded = false;
1865   bool postponed = false;
1866 
1867   RelocIterator iter(this, low_boundary);
1868   while(iter.next()) {
1869 
1870     switch (iter.type()) {
1871 
1872     case relocInfo::virtual_call_type:
1873       if (unloading_occurred) {
1874         // If class unloading occurred we first iterate over all inline caches and
1875         // clear ICs where the cached oop is referring to an unloaded klass or method.
1876         clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive);
1877       }
1878 
1879       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
1880       break;
1881 
1882     case relocInfo::opt_virtual_call_type:







1883       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
1884       break;
1885 
1886     case relocInfo::static_call_type:







1887       postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
1888       break;
1889 
1890     case relocInfo::oop_type:
1891       if (!is_unloaded) {
1892         // Unload check
1893         oop_Relocation* r = iter.oop_reloc();
1894         // Traverse those oops directly embedded in the code.
1895         // Other oops (oop_index>0) are seen as part of scopes_oops.
1896         assert(1 == (r->oop_is_immediate()) +
1897                   (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()),
1898               "oop must be found in exactly one place");
1899         if (r->oop_is_immediate() && r->oop_value() != NULL) {
1900           if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) {
1901             is_unloaded = true;
1902           }
1903         }
1904       }
1905       break;
1906 




1701   } else {
1702     Metadata* ic_oop = ic->cached_metadata();
1703     if (ic_oop != NULL) {
1704       if (ic_oop->is_klass()) {
1705         if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
1706           return;
1707         }
1708       } else if (ic_oop->is_method()) {
1709         if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) {
1710           return;
1711         }
1712       } else {
1713         ShouldNotReachHere();
1714       }
1715     }
1716   }
1717 
1718   ic->set_to_clean();
1719 }
1720 
1721 /**
1722  * Checks if the to-interpreter stub of an optimized compiled IC or a compiled
1723  * static call contains a reference to dead Method* metadata.
1724  */
1725 bool nmethod::stub_contains_dead_metadata(BoolObjectClosure* is_alive, address stub) {
1726   RelocIterator iter(this, stub, stub + CompiledStaticCall::to_interp_stub_size());
1727   while (iter.next()) {
1728     if (iter.type() == relocInfo::metadata_type) {
1729       Metadata* md = iter.metadata_reloc()->metadata_value();
1730       // Check if class loader of holder Klass is alive
1731       if (md != NULL && md->is_method() && !((Method*)md)->method_holder()->is_loader_alive(is_alive)) {
1732         return true;
1733       }
1734     }
1735   }
1736   return false;
1737 }
1738 
1739 // This is called at the end of the strong tracing/marking phase of a
1740 // GC to unload an nmethod if it contains otherwise unreachable
1741 // oops.
1742 
1743 void nmethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
1744   // Make sure the oop's ready to receive visitors
1745   assert(!is_zombie() && !is_unloaded(),
1746          "should not call follow on zombie or unloaded nmethod");
1747 
1748   // If the method is not entrant then a JMP is plastered over the
1749   // first few bytes.  If an oop in the old code was there, that oop
1750   // should not get GC'd.  Skip the first few bytes of oops on
1751   // not-entrant methods.
1752   address low_boundary = verified_entry_point();
1753   if (is_not_entrant()) {
1754     low_boundary += NativeJump::instruction_size;
1755     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
1756     // (See comment above.)
1757   }
1758 
1759   // The RedefineClasses() API can cause the class unloading invariant
1760   // to no longer be true. See jvmtiExport.hpp for details.
1761   // Also, leave a debugging breadcrumb in local flag.
1762   bool a_class_was_redefined = JvmtiExport::has_redefined_a_class();
1763   if (a_class_was_redefined) {
1764     // This set of the unloading_occurred flag is done before the
1765     // call to post_compiled_method_unload() so that the unloading
1766     // of this nmethod is reported.
1767     unloading_occurred = true;
1768   }
1769 
1770   // Exception cache
1771   clean_exception_cache(is_alive);
1772 
1773   // If class unloading occurred we first iterate over all inline caches and
1774   // clear ICs where the cached oop or the to-interpreter stub (if in use) is
1775   // referring to an unloaded klass or method. The to-interpreter stubs of
1776   // compiled static calls are checked as well. The remaining live cached oops
1777   // will be traversed in the relocInfo::oop_type iteration below.
1778   if (unloading_occurred) {
1779     RelocIterator iter(this, low_boundary);
1780     while(iter.next()) {
1781       switch (iter.type()) {
1782         case relocInfo::virtual_call_type: {
1783           CompiledIC* ic = CompiledIC_at(&iter);
1784           clean_ic_if_metadata_is_dead(ic, is_alive);
1785           break;
1786         }
1787         case relocInfo::opt_virtual_call_type: {
1788           CompiledIC* ic = CompiledIC_at(&iter);
1789           if (ic->is_call_to_interpreted() && stub_contains_dead_metadata(is_alive, ic->ic_destination())) {
1790             ic->set_to_clean();
1791           }
1792           break;
1793         }
1794         case relocInfo::static_call_type: {
1795           CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc());
1796           if (csc->is_call_to_interpreted() && stub_contains_dead_metadata(is_alive, csc->destination())) {
1797             csc->set_to_clean();
1798           }
1799           break;
1800         }
1801       }
1802     }
1803   }
1804 
1805   // Compiled code
1806   {
1807   RelocIterator iter(this, low_boundary);
1808   while (iter.next()) {
1809     if (iter.type() == relocInfo::oop_type) {
1810       oop_Relocation* r = iter.oop_reloc();
1811       // In this loop, we must only traverse those oops directly embedded in
1812       // the code.  Other oops (oop_index>0) are seen as part of scopes_oops.
1813       assert(1 == (r->oop_is_immediate()) +
1814                   (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()),
1815              "oop must be found in exactly one place");
1816       if (r->oop_is_immediate() && r->oop_value() != NULL) {
1817         if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) {
1818           return;
1819         }
1820       }


1899 
1900   bool is_unloaded = false;
1901   bool postponed = false;
1902 
1903   RelocIterator iter(this, low_boundary);
1904   while(iter.next()) {
1905 
1906     switch (iter.type()) {
1907 
1908     case relocInfo::virtual_call_type:
1909       if (unloading_occurred) {
1910         // If class unloading occurred we first iterate over all inline caches and
1911         // clear ICs where the cached oop is referring to an unloaded klass or method.
1912         clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive);
1913       }
1914 
1915       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
1916       break;
1917 
1918     case relocInfo::opt_virtual_call_type:
1919       if (unloading_occurred) {
1920         // Clear IC if it calls a to-interpreter stub that refers to an unloaded method
1921         CompiledIC* ic = CompiledIC_at(&iter);
1922         if (ic->is_call_to_interpreted() && stub_contains_dead_metadata(is_alive, ic->ic_destination())) {
1923           ic->set_to_clean();
1924         }
1925       }
1926       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
1927       break;
1928 
1929     case relocInfo::static_call_type:
1930       if (unloading_occurred) {
1931         // Clear call if it targets a to-interpreter stub that refers to an unloaded method
1932         CompiledStaticCall* csc = compiledStaticCall_at(iter.reloc());
1933         if (csc->is_call_to_interpreted() && stub_contains_dead_metadata(is_alive, csc->destination())) {
1934           csc->set_to_clean();
1935         }
1936       }
1937       postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
1938       break;
1939 
1940     case relocInfo::oop_type:
1941       if (!is_unloaded) {
1942         // Unload check
1943         oop_Relocation* r = iter.oop_reloc();
1944         // Traverse those oops directly embedded in the code.
1945         // Other oops (oop_index>0) are seen as part of scopes_oops.
1946         assert(1 == (r->oop_is_immediate()) +
1947                   (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()),
1948               "oop must be found in exactly one place");
1949         if (r->oop_is_immediate() && r->oop_value() != NULL) {
1950           if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) {
1951             is_unloaded = true;
1952           }
1953         }
1954       }
1955       break;
1956 


src/share/vm/code/nmethod.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File