1168 } |
1175 } |
1169 } |
1176 } |
1170 } |
1177 } |
1171 } |
1178 } |
1172 |
1179 |
|
1180 void nmethod::verify_clean_inline_caches() { |
|
1181 assert_locked_or_safepoint(CompiledIC_lock); |
|
1182 |
|
1183 // If the method is not entrant or zombie then a JMP is plastered over the |
|
1184 // first few bytes. If an oop in the old code was there, that oop |
|
1185 // should not get GC'd. Skip the first few bytes of oops on |
|
1186 // not-entrant methods. |
|
1187 address low_boundary = verified_entry_point(); |
|
1188 if (!is_in_use()) { |
|
1189 low_boundary += NativeJump::instruction_size; |
|
1190 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump. |
|
1191 // This means that the low_boundary is going to be a little too high. |
|
1192 // This shouldn't matter, since oops of non-entrant methods are never used. |
|
1193 // In fact, why are we bothering to look at oops in a non-entrant method?? |
|
1194 } |
|
1195 |
|
1196 ResourceMark rm; |
|
1197 RelocIterator iter(this, low_boundary); |
|
1198 while(iter.next()) { |
|
1199 switch(iter.type()) { |
|
1200 case relocInfo::virtual_call_type: |
|
1201 case relocInfo::opt_virtual_call_type: { |
|
1202 CompiledIC *ic = CompiledIC_at(&iter); |
|
1203 // Ok, to lookup references to zombies here |
|
1204 CodeBlob *cb = CodeCache::find_blob_unsafe(ic->ic_destination()); |
|
1205 if( cb != NULL && cb->is_nmethod() ) { |
|
1206 nmethod* nm = (nmethod*)cb; |
|
1207 // Verify that inline caches pointing to both zombie and not_entrant methods are clean |
|
1208 if (!nm->is_in_use() || (nm->method()->code() != nm)) { |
|
1209 assert(ic->is_clean(), "IC should be clean"); |
|
1210 } |
|
1211 } |
|
1212 break; |
|
1213 } |
|
1214 case relocInfo::static_call_type: { |
|
1215 CompiledStaticCall *csc = compiledStaticCall_at(iter.reloc()); |
|
1216 CodeBlob *cb = CodeCache::find_blob_unsafe(csc->destination()); |
|
1217 if( cb != NULL && cb->is_nmethod() ) { |
|
1218 nmethod* nm = (nmethod*)cb; |
|
1219 // Verify that inline caches pointing to both zombie and not_entrant methods are clean |
|
1220 if (!nm->is_in_use() || (nm->method()->code() != nm)) { |
|
1221 assert(csc->is_clean(), "IC should be clean"); |
|
1222 } |
|
1223 } |
|
1224 break; |
|
1225 } |
|
1226 } |
|
1227 } |
|
1228 } |
|
1229 |
|
1230 int nmethod::verify_icholder_relocations() { |
|
1231 int count = 0; |
|
1232 |
|
1233 RelocIterator iter(this); |
|
1234 while(iter.next()) { |
|
1235 if (iter.type() == relocInfo::virtual_call_type) { |
|
1236 if (CompiledIC::is_icholder_call_site(iter.virtual_call_reloc())) { |
|
1237 CompiledIC *ic = CompiledIC_at(&iter); |
|
1238 if (TraceCompiledIC) { |
|
1239 tty->print("noticed icholder " INTPTR_FORMAT " ", p2i(ic->cached_icholder())); |
|
1240 ic->print(); |
|
1241 } |
|
1242 assert(ic->cached_icholder() != NULL, "must be non-NULL"); |
|
1243 count++; |
|
1244 } |
|
1245 } |
|
1246 } |
|
1247 |
|
1248 return count; |
|
1249 } |
|
1250 |
1173 // This is a private interface with the sweeper. |
1251 // This is a private interface with the sweeper. |
1174 void nmethod::mark_as_seen_on_stack() { |
1252 void nmethod::mark_as_seen_on_stack() { |
1175 assert(is_alive(), "Must be an alive method"); |
1253 assert(is_alive(), "Must be an alive method"); |
1176 // Set the traversal mark to ensure that the sweeper does 2 |
1254 // Set the traversal mark to ensure that the sweeper does 2 |
1177 // cleaning passes before moving to zombie. |
1255 // cleaning passes before moving to zombie. |
1200 if (mdo == NULL) return; |
1278 if (mdo == NULL) return; |
1201 // There is a benign race here. See comments in methodData.hpp. |
1279 // There is a benign race here. See comments in methodData.hpp. |
1202 mdo->inc_decompile_count(); |
1280 mdo->inc_decompile_count(); |
1203 } |
1281 } |
1204 |
1282 |
|
1283 void nmethod::increase_unloading_clock() { |
|
1284 _global_unloading_clock++; |
|
1285 if (_global_unloading_clock == 0) { |
|
1286 // _nmethods are allocated with _unloading_clock == 0, |
|
1287 // so 0 is never used as a clock value. |
|
1288 _global_unloading_clock = 1; |
|
1289 } |
|
1290 } |
|
1291 |
|
1292 void nmethod::set_unloading_clock(unsigned char unloading_clock) { |
|
1293 OrderAccess::release_store((volatile jubyte*)&_unloading_clock, unloading_clock); |
|
1294 } |
|
1295 |
|
1296 unsigned char nmethod::unloading_clock() { |
|
1297 return (unsigned char)OrderAccess::load_acquire((volatile jubyte*)&_unloading_clock); |
|
1298 } |
|
1299 |
1205 void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) { |
1300 void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) { |
1206 |
1301 |
1207 post_compiled_method_unload(); |
1302 post_compiled_method_unload(); |
1208 |
1303 |
1209 // Since this nmethod is being unloaded, make sure that dependencies |
1304 // Since this nmethod is being unloaded, make sure that dependencies |
1588 // attempt to report the event in the unlikely scenario where the |
1687 // attempt to report the event in the unlikely scenario where the |
1589 // event is enabled at the time the nmethod is made a zombie. |
1688 // event is enabled at the time the nmethod is made a zombie. |
1590 set_unload_reported(); |
1689 set_unload_reported(); |
1591 } |
1690 } |
1592 |
1691 |
|
1692 void static clean_ic_if_metadata_is_dead(CompiledIC *ic, BoolObjectClosure *is_alive) { |
|
1693 if (ic->is_icholder_call()) { |
|
1694 // The only exception is compiledICHolder oops which may |
|
1695 // yet be marked below. (We check this further below). |
|
1696 CompiledICHolder* cichk_oop = ic->cached_icholder(); |
|
1697 if (cichk_oop->holder_method()->method_holder()->is_loader_alive(is_alive) && |
|
1698 cichk_oop->holder_klass()->is_loader_alive(is_alive)) { |
|
1699 return; |
|
1700 } |
|
1701 } else { |
|
1702 Metadata* ic_oop = ic->cached_metadata(); |
|
1703 if (ic_oop != NULL) { |
|
1704 if (ic_oop->is_klass()) { |
|
1705 if (((Klass*)ic_oop)->is_loader_alive(is_alive)) { |
|
1706 return; |
|
1707 } |
|
1708 } else if (ic_oop->is_method()) { |
|
1709 if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) { |
|
1710 return; |
|
1711 } |
|
1712 } else { |
|
1713 ShouldNotReachHere(); |
|
1714 } |
|
1715 } |
|
1716 } |
|
1717 |
|
1718 ic->set_to_clean(); |
|
1719 } |
|
1720 |
1593 // This is called at the end of the strong tracing/marking phase of a |
1721 // This is called at the end of the strong tracing/marking phase of a |
1594 // GC to unload an nmethod if it contains otherwise unreachable |
1722 // GC to unload an nmethod if it contains otherwise unreachable |
1595 // oops. |
1723 // oops. |
1596 |
1724 |
1597 void nmethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) { |
1725 void nmethod::do_unloading(BoolObjectClosure* is_alive, bool unloading_occurred) { |
1631 if (unloading_occurred) { |
1759 if (unloading_occurred) { |
1632 RelocIterator iter(this, low_boundary); |
1760 RelocIterator iter(this, low_boundary); |
1633 while(iter.next()) { |
1761 while(iter.next()) { |
1634 if (iter.type() == relocInfo::virtual_call_type) { |
1762 if (iter.type() == relocInfo::virtual_call_type) { |
1635 CompiledIC *ic = CompiledIC_at(&iter); |
1763 CompiledIC *ic = CompiledIC_at(&iter); |
1636 if (ic->is_icholder_call()) { |
1764 clean_ic_if_metadata_is_dead(ic, is_alive); |
1637 // The only exception is compiledICHolder oops which may |
|
1638 // yet be marked below. (We check this further below). |
|
1639 CompiledICHolder* cichk_oop = ic->cached_icholder(); |
|
1640 if (cichk_oop->holder_method()->method_holder()->is_loader_alive(is_alive) && |
|
1641 cichk_oop->holder_klass()->is_loader_alive(is_alive)) { |
|
1642 continue; |
|
1643 } |
|
1644 } else { |
|
1645 Metadata* ic_oop = ic->cached_metadata(); |
|
1646 if (ic_oop != NULL) { |
|
1647 if (ic_oop->is_klass()) { |
|
1648 if (((Klass*)ic_oop)->is_loader_alive(is_alive)) { |
|
1649 continue; |
|
1650 } |
|
1651 } else if (ic_oop->is_method()) { |
|
1652 if (((Method*)ic_oop)->method_holder()->is_loader_alive(is_alive)) { |
|
1653 continue; |
|
1654 } |
|
1655 } else { |
|
1656 ShouldNotReachHere(); |
|
1657 } |
|
1658 } |
|
1659 } |
|
1660 ic->set_to_clean(); |
|
1661 } |
1765 } |
1662 } |
1766 } |
1663 } |
1767 } |
1664 |
1768 |
1665 // Compiled code |
1769 // Compiled code |
1691 } |
1795 } |
1692 } |
1796 } |
1693 |
1797 |
1694 // Ensure that all metadata is still alive |
1798 // Ensure that all metadata is still alive |
1695 verify_metadata_loaders(low_boundary, is_alive); |
1799 verify_metadata_loaders(low_boundary, is_alive); |
|
1800 } |
|
1801 |
|
1802 template <class CompiledICorStaticCall> |
|
1803 static bool clean_if_nmethod_is_unloaded(CompiledICorStaticCall *ic, address addr, BoolObjectClosure *is_alive, nmethod* from) { |
|
1804 // Ok, to lookup references to zombies here |
|
1805 CodeBlob *cb = CodeCache::find_blob_unsafe(addr); |
|
1806 if (cb != NULL && cb->is_nmethod()) { |
|
1807 nmethod* nm = (nmethod*)cb; |
|
1808 |
|
1809 if (nm->unloading_clock() != nmethod::global_unloading_clock()) { |
|
1810 // The nmethod has not been processed yet. |
|
1811 return true; |
|
1812 } |
|
1813 |
|
1814 // Clean inline caches pointing to both zombie and not_entrant methods |
|
1815 if (!nm->is_in_use() || (nm->method()->code() != nm)) { |
|
1816 ic->set_to_clean(); |
|
1817 assert(ic->is_clean(), err_msg("nmethod " PTR_FORMAT "not clean %s", from, from->method()->name_and_sig_as_C_string())); |
|
1818 } |
|
1819 } |
|
1820 |
|
1821 return false; |
|
1822 } |
|
1823 |
|
1824 static bool clean_if_nmethod_is_unloaded(CompiledIC *ic, BoolObjectClosure *is_alive, nmethod* from) { |
|
1825 return clean_if_nmethod_is_unloaded(ic, ic->ic_destination(), is_alive, from); |
|
1826 } |
|
1827 |
|
1828 static bool clean_if_nmethod_is_unloaded(CompiledStaticCall *csc, BoolObjectClosure *is_alive, nmethod* from) { |
|
1829 return clean_if_nmethod_is_unloaded(csc, csc->destination(), is_alive, from); |
|
1830 } |
|
1831 |
|
1832 bool nmethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) { |
|
1833 ResourceMark rm; |
|
1834 |
|
1835 // Make sure the oop's ready to receive visitors |
|
1836 assert(!is_zombie() && !is_unloaded(), |
|
1837 "should not call follow on zombie or unloaded nmethod"); |
|
1838 |
|
1839 // If the method is not entrant then a JMP is plastered over the |
|
1840 // first few bytes. If an oop in the old code was there, that oop |
|
1841 // should not get GC'd. Skip the first few bytes of oops on |
|
1842 // not-entrant methods. |
|
1843 address low_boundary = verified_entry_point(); |
|
1844 if (is_not_entrant()) { |
|
1845 low_boundary += NativeJump::instruction_size; |
|
1846 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump. |
|
1847 // (See comment above.) |
|
1848 } |
|
1849 |
|
1850 // The RedefineClasses() API can cause the class unloading invariant |
|
1851 // to no longer be true. See jvmtiExport.hpp for details. |
|
1852 // Also, leave a debugging breadcrumb in local flag. |
|
1853 bool a_class_was_redefined = JvmtiExport::has_redefined_a_class(); |
|
1854 if (a_class_was_redefined) { |
|
1855 // This set of the unloading_occurred flag is done before the |
|
1856 // call to post_compiled_method_unload() so that the unloading |
|
1857 // of this nmethod is reported. |
|
1858 unloading_occurred = true; |
|
1859 } |
|
1860 |
|
1861 // Exception cache |
|
1862 clean_exception_cache(is_alive); |
|
1863 |
|
1864 bool is_unloaded = false; |
|
1865 bool postponed = false; |
|
1866 |
|
1867 RelocIterator iter(this, low_boundary); |
|
1868 while(iter.next()) { |
|
1869 |
|
1870 switch (iter.type()) { |
|
1871 |
|
1872 case relocInfo::virtual_call_type: |
|
1873 if (unloading_occurred) { |
|
1874 // If class unloading occurred we first iterate over all inline caches and |
|
1875 // clear ICs where the cached oop is referring to an unloaded klass or method. |
|
1876 clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive); |
|
1877 } |
|
1878 |
|
1879 postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this); |
|
1880 break; |
|
1881 |
|
1882 case relocInfo::opt_virtual_call_type: |
|
1883 postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this); |
|
1884 break; |
|
1885 |
|
1886 case relocInfo::static_call_type: |
|
1887 postponed |= clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this); |
|
1888 break; |
|
1889 |
|
1890 case relocInfo::oop_type: |
|
1891 if (!is_unloaded) { |
|
1892 // Unload check |
|
1893 oop_Relocation* r = iter.oop_reloc(); |
|
1894 // Traverse those oops directly embedded in the code. |
|
1895 // Other oops (oop_index>0) are seen as part of scopes_oops. |
|
1896 assert(1 == (r->oop_is_immediate()) + |
|
1897 (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()), |
|
1898 "oop must be found in exactly one place"); |
|
1899 if (r->oop_is_immediate() && r->oop_value() != NULL) { |
|
1900 if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) { |
|
1901 is_unloaded = true; |
|
1902 } |
|
1903 } |
|
1904 } |
|
1905 break; |
|
1906 |
|
1907 } |
|
1908 } |
|
1909 |
|
1910 if (is_unloaded) { |
|
1911 return postponed; |
|
1912 } |
|
1913 |
|
1914 // Scopes |
|
1915 for (oop* p = oops_begin(); p < oops_end(); p++) { |
|
1916 if (*p == Universe::non_oop_word()) continue; // skip non-oops |
|
1917 if (can_unload(is_alive, p, unloading_occurred)) { |
|
1918 is_unloaded = true; |
|
1919 break; |
|
1920 } |
|
1921 } |
|
1922 |
|
1923 if (is_unloaded) { |
|
1924 return postponed; |
|
1925 } |
|
1926 |
|
1927 // Ensure that all metadata is still alive |
|
1928 verify_metadata_loaders(low_boundary, is_alive); |
|
1929 |
|
1930 return postponed; |
|
1931 } |
|
1932 |
|
1933 void nmethod::do_unloading_parallel_postponed(BoolObjectClosure* is_alive, bool unloading_occurred) { |
|
1934 ResourceMark rm; |
|
1935 |
|
1936 // Make sure the oop's ready to receive visitors |
|
1937 assert(!is_zombie(), |
|
1938 "should not call follow on zombie nmethod"); |
|
1939 |
|
1940 // If the method is not entrant then a JMP is plastered over the |
|
1941 // first few bytes. If an oop in the old code was there, that oop |
|
1942 // should not get GC'd. Skip the first few bytes of oops on |
|
1943 // not-entrant methods. |
|
1944 address low_boundary = verified_entry_point(); |
|
1945 if (is_not_entrant()) { |
|
1946 low_boundary += NativeJump::instruction_size; |
|
1947 // %%% Note: On SPARC we patch only a 4-byte trap, not a full NativeJump. |
|
1948 // (See comment above.) |
|
1949 } |
|
1950 |
|
1951 RelocIterator iter(this, low_boundary); |
|
1952 while(iter.next()) { |
|
1953 |
|
1954 switch (iter.type()) { |
|
1955 |
|
1956 case relocInfo::virtual_call_type: |
|
1957 clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this); |
|
1958 break; |
|
1959 |
|
1960 case relocInfo::opt_virtual_call_type: |
|
1961 clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this); |
|
1962 break; |
|
1963 |
|
1964 case relocInfo::static_call_type: |
|
1965 clean_if_nmethod_is_unloaded(compiledStaticCall_at(iter.reloc()), is_alive, this); |
|
1966 break; |
|
1967 } |
|
1968 } |
1696 } |
1969 } |
1697 |
1970 |
1698 #ifdef ASSERT |
1971 #ifdef ASSERT |
1699 |
1972 |
1700 class CheckClass : AllStatic { |
1973 class CheckClass : AllStatic { |