src/share/vm/code/nmethod.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File 8029443 Sdiff src/share/vm/code

src/share/vm/code/nmethod.cpp

Print this page




1701   } else {
1702     Metadata* ic_oop = ic->cached_metadata();
1703     if (ic_oop != NULL) {
1704       if (ic_oop->is_klass()) {
1705         if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
1706           return;
1707         }
1708       } else if (ic_oop->is_method()) {
1709         if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) {
1710           return;
1711         }
1712       } else {
1713         ShouldNotReachHere();
1714       }
1715     }
1716   }
1717 
1718   ic->set_to_clean();
1719 }
1720 





























1721 // This is called at the end of the strong tracing/marking phase of a
1722 // GC to unload an nmethod if it contains otherwise unreachable
1723 // oops.
1724 
1725 void nmethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
1726   // Make sure the oop's ready to receive visitors
1727   assert(!is_zombie() && !is_unloaded(),
1728          "should not call follow on zombie or unloaded nmethod");
1729 
1730   // If the method is not entrant then a JMP is plastered over the
1731   // first few bytes.  If an oop in the old code was there, that oop
1732   // should not get GC'd.  Skip the first few bytes of oops on
1733   // not-entrant methods.
1734   address low_boundary = verified_entry_point();
1735   if (is_not_entrant()) {
1736     low_boundary += NativeJump::instruction_size;
1737     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
1738     // (See comment above.)
1739   }
1740 
1741   // The RedefineClasses() API can cause the class unloading invariant
1742   // to no longer be true. See jvmtiExport.hpp for details.
1743   // Also, leave a debugging breadcrumb in local flag.
1744   bool a_class_was_redefined = JvmtiExport::has_redefined_a_class();
1745   if (a_class_was_redefined) {
1746     // This set of the unloading_occurred flag is done before the
1747     // call to post_compiled_method_unload() so that the unloading
1748     // of this nmethod is reported.
1749     unloading_occurred = true;
1750   }
1751 
1752   // Exception cache
1753   clean_exception_cache(is_alive);
1754 
1755   // If class unloading occurred we first iterate over all inline caches and
1756   // clear ICs where the cached oop is referring to an unloaded klass or method.
1757   // The remaining live cached oops will be traversed in the relocInfo::oop_type
1758   // iteration below.

1759   if (unloading_occurred) {
1760     RelocIterator iter(this, low_boundary);
1761     while(iter.next()) {
1762       if (iter.type() == relocInfo::virtual_call_type) {
1763         CompiledIC *ic = CompiledIC_at(&iter);
1764         clean_ic_if_metadata_is_dead(ic, is_alive);










1765       }
1766     }
1767   }
1768 
1769   // Compiled code
1770   {
1771   RelocIterator iter(this, low_boundary);
1772   while (iter.next()) {
1773     if (iter.type() == relocInfo::oop_type) {
1774       oop_Relocation* r = iter.oop_reloc();
1775       // In this loop, we must only traverse those oops directly embedded in
1776       // the code.  Other oops (oop_index>0) are seen as part of scopes_oops.
1777       assert(1 == (r->oop_is_immediate()) +
1778                   (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()),
1779              "oop must be found in exactly one place");
1780       if (r->oop_is_immediate() && r->oop_value() != NULL) {
1781         if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) {
1782           return;
1783         }
1784       }


1863 
1864   bool is_unloaded = false;
1865   bool postponed = false;
1866 
1867   RelocIterator iter(this, low_boundary);
1868   while(iter.next()) {
1869 
1870     switch (iter.type()) {
1871 
1872     case relocInfo::virtual_call_type:
1873       if (unloading_occurred) {
1874         // If class unloading occurred we first iterate over all inline caches and
1875         // clear ICs where the cached oop is referring to an unloaded klass or method.
1876         clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive);
1877       }
1878 
1879       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
1880       break;
1881 
1882     case relocInfo::opt_virtual_call_type:



1883       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
1884       break;
1885 
1886     case relocInfo::static_call_type:



1887       postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
1888       break;
1889 
1890     case relocInfo::oop_type:
1891       if (!is_unloaded) {
1892         // Unload check
1893         oop_Relocation* r = iter.oop_reloc();
1894         // Traverse those oops directly embedded in the code.
1895         // Other oops (oop_index>0) are seen as part of scopes_oops.
1896         assert(1 == (r->oop_is_immediate()) +
1897                   (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()),
1898               "oop must be found in exactly one place");
1899         if (r->oop_is_immediate() && r->oop_value() != NULL) {
1900           if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) {
1901             is_unloaded = true;
1902           }
1903         }
1904       }
1905       break;
1906 




1701   } else {
1702     Metadata* ic_oop = ic->cached_metadata();
1703     if (ic_oop != NULL) {
1704       if (ic_oop->is_klass()) {
1705         if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
1706           return;
1707         }
1708       } else if (ic_oop->is_method()) {
1709         if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) {
1710           return;
1711         }
1712       } else {
1713         ShouldNotReachHere();
1714       }
1715     }
1716   }
1717 
1718   ic->set_to_clean();
1719 }
1720 
1721 /**
1722  * Cleans a static- or IC-call to the interpreter stub if the stub refers to an unloaded method.
1723  */
1724 template <class CompiledICorStaticCall>
1725 static void clean_call_to_interpreter_stub(CompiledICorStaticCall* ic, address stub, BoolObjectClosure* is_alive, nmethod* from) {
1726   if (ic->is_call_to_interpreted()) {
1727     // Check if the to-interpreter stub contains a reference to dead Method* metadata.
1728     RelocIterator iter(from, stub, stub + CompiledStaticCall::to_interp_stub_size());
1729     while (iter.next()) {
1730       if (iter.type() == relocInfo::metadata_type) {
1731         Metadata* md = iter.metadata_reloc()->metadata_value();
1732         // Check if class loader of holder Klass is alive
1733         if (md != NULL && md->is_method() && !((Method*)md)->method_holder()->is_loader_alive(is_alive)) {
1734           ic->set_to_clean();
1735           return;
1736         }
1737       }
1738     }
1739   }
1740 }
1741 
1742 static void clean_call_to_interpreter_stub(CompiledIC* ic, BoolObjectClosure* is_alive, nmethod* from) {
1743   clean_call_to_interpreter_stub(ic, ic->ic_destination(), is_alive, from);
1744 }
1745 
1746 static void clean_call_to_interpreter_stub(CompiledStaticCall* csc, BoolObjectClosure* is_alive, nmethod* from) {
1747   clean_call_to_interpreter_stub(csc, csc->destination(), is_alive, from);
1748 }
1749 
1750 // This is called at the end of the strong tracing/marking phase of a
1751 // GC to unload an nmethod if it contains otherwise unreachable
1752 // oops.
1753 
1754 void nmethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) {
1755   // Make sure the oop's ready to receive visitors
1756   assert(!is_zombie() && !is_unloaded(),
1757          "should not call follow on zombie or unloaded nmethod");
1758 
1759   // If the method is not entrant then a JMP is plastered over the
1760   // first few bytes.  If an oop in the old code was there, that oop
1761   // should not get GC'd.  Skip the first few bytes of oops on
1762   // not-entrant methods.
1763   address low_boundary = verified_entry_point();
1764   if (is_not_entrant()) {
1765     low_boundary += NativeJump::instruction_size;
1766     // %%% Note:  On SPARC we patch only a 4-byte trap, not a full NativeJump.
1767     // (See comment above.)
1768   }
1769 
1770   // The RedefineClasses() API can cause the class unloading invariant
1771   // to no longer be true. See jvmtiExport.hpp for details.
1772   // Also, leave a debugging breadcrumb in local flag.
1773   bool a_class_was_redefined = JvmtiExport::has_redefined_a_class();
1774   if (a_class_was_redefined) {
1775     // This set of the unloading_occurred flag is done before the
1776     // call to post_compiled_method_unload() so that the unloading
1777     // of this nmethod is reported.
1778     unloading_occurred = true;
1779   }
1780 
1781   // Exception cache
1782   clean_exception_cache(is_alive);
1783 
1784   // If class unloading occurred we first iterate over all inline caches and
1785   // clear ICs where the cached oop or the to-interpreter stub (if in use) is
1786   // referring to an unloaded klass or method. The to-interpreter stubs of
1787   // compiled static calls are checked as well. The remaining live cached oops
1788   // will be traversed in the relocInfo::oop_type iteration below.
1789   if (unloading_occurred) {
1790     RelocIterator iter(this, low_boundary);
1791     while(iter.next()) {
1792       switch (iter.type()) {
1793         case relocInfo::virtual_call_type: {
1794           clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive);
1795           break;
1796         }
1797         case relocInfo::opt_virtual_call_type: {
1798           clean_call_to_interpreter_stub(CompiledIC_at(&iter), is_alive, this);
1799           break;
1800         }
1801         case relocInfo::static_call_type: {
1802           clean_call_to_interpreter_stub(compiledStaticCall_at(iter.reloc()), is_alive, this);
1803           break;
1804         }
1805       }
1806     }
1807   }
1808 
1809   // Compiled code
1810   {
1811   RelocIterator iter(this, low_boundary);
1812   while (iter.next()) {
1813     if (iter.type() == relocInfo::oop_type) {
1814       oop_Relocation* r = iter.oop_reloc();
1815       // In this loop, we must only traverse those oops directly embedded in
1816       // the code.  Other oops (oop_index>0) are seen as part of scopes_oops.
1817       assert(1 == (r->oop_is_immediate()) +
1818                   (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()),
1819              "oop must be found in exactly one place");
1820       if (r->oop_is_immediate() && r->oop_value() != NULL) {
1821         if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) {
1822           return;
1823         }
1824       }


1903 
1904   bool is_unloaded = false;
1905   bool postponed = false;
1906 
1907   RelocIterator iter(this, low_boundary);
1908   while(iter.next()) {
1909 
1910     switch (iter.type()) {
1911 
1912     case relocInfo::virtual_call_type:
1913       if (unloading_occurred) {
1914         // If class unloading occurred we first iterate over all inline caches and
1915         // clear ICs where the cached oop is referring to an unloaded klass or method.
1916         clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive);
1917       }
1918 
1919       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
1920       break;
1921 
1922     case relocInfo::opt_virtual_call_type:
1923       if (unloading_occurred) {
1924         clean_call_to_interpreter_stub(CompiledIC_at(&iter), is_alive, this);
1925       }
1926       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
1927       break;
1928 
1929     case relocInfo::static_call_type:
1930       if (unloading_occurred) {
1931         clean_call_to_interpreter_stub(compiledStaticCall_at(iter.reloc()), is_alive, this);
1932       }
1933       postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this);
1934       break;
1935 
1936     case relocInfo::oop_type:
1937       if (!is_unloaded) {
1938         // Unload check
1939         oop_Relocation* r = iter.oop_reloc();
1940         // Traverse those oops directly embedded in the code.
1941         // Other oops (oop_index>0) are seen as part of scopes_oops.
1942         assert(1 == (r->oop_is_immediate()) +
1943                   (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()),
1944               "oop must be found in exactly one place");
1945         if (r->oop_is_immediate() && r->oop_value() != NULL) {
1946           if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) {
1947             is_unloaded = true;
1948           }
1949         }
1950       }
1951       break;
1952 


src/share/vm/code/nmethod.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File