744 } |
744 } |
745 |
745 |
746 void CodeCache::gc_epilogue() { |
746 void CodeCache::gc_epilogue() { |
747 assert_locked_or_safepoint(CodeCache_lock); |
747 assert_locked_or_safepoint(CodeCache_lock); |
748 NMethodIterator iter; |
748 NMethodIterator iter; |
749 while(iter.next_alive()) { |
749 while(iter.next()) { |
750 nmethod* nm = iter.method(); |
750 nmethod* nm = iter.method(); |
751 assert(!nm->is_unloaded(), "Tautology"); |
751 if (!nm->is_zombie()) { |
752 if (needs_cache_clean()) { |
752 if (needs_cache_clean()) { |
753 nm->cleanup_inline_caches(); |
753 // Clean ICs of unloaded nmethods as well because they may reference other |
754 } |
754 // unloaded nmethods that may be flushed earlier in the sweeper cycle. |
755 DEBUG_ONLY(nm->verify()); |
755 nm->cleanup_inline_caches(); |
756 DEBUG_ONLY(nm->verify_oop_relocations()); |
756 } |
|
757 DEBUG_ONLY(nm->verify()); |
|
758 DEBUG_ONLY(nm->verify_oop_relocations()); |
|
759 } |
757 } |
760 } |
758 set_needs_cache_clean(false); |
761 set_needs_cache_clean(false); |
759 prune_scavenge_root_nmethods(); |
762 prune_scavenge_root_nmethods(); |
760 |
763 |
761 verify_icholder_relocations(); |
764 verify_icholder_relocations(); |
991 } |
994 } |
992 |
995 |
993 return number_of_marked_CodeBlobs; |
996 return number_of_marked_CodeBlobs; |
994 } |
997 } |
995 |
998 |
996 void CodeCache::make_marked_nmethods_zombies() { |
|
997 assert(SafepointSynchronize::is_at_safepoint(), "must be at a safepoint"); |
|
998 NMethodIterator iter; |
|
999 while(iter.next_alive()) { |
|
1000 nmethod* nm = iter.method(); |
|
1001 if (nm->is_marked_for_deoptimization()) { |
|
1002 |
|
1003 // If the nmethod has already been made non-entrant and it can be converted |
|
1004 // then zombie it now. Otherwise make it non-entrant and it will eventually |
|
1005 // be zombied when it is no longer seen on the stack. Note that the nmethod |
|
1006 // might be "entrant" and not on the stack and so could be zombied immediately |
|
1007 // but we can't tell because we don't track it on stack until it becomes |
|
1008 // non-entrant. |
|
1009 |
|
1010 if (nm->is_not_entrant() && nm->can_not_entrant_be_converted()) { |
|
1011 nm->make_zombie(); |
|
1012 } else { |
|
1013 nm->make_not_entrant(); |
|
1014 } |
|
1015 } |
|
1016 } |
|
1017 } |
|
1018 |
|
1019 void CodeCache::make_marked_nmethods_not_entrant() { |
999 void CodeCache::make_marked_nmethods_not_entrant() { |
1020 assert_locked_or_safepoint(CodeCache_lock); |
1000 assert_locked_or_safepoint(CodeCache_lock); |
1021 NMethodIterator iter; |
1001 NMethodIterator iter; |
1022 while(iter.next_alive()) { |
1002 while(iter.next_alive()) { |
1023 nmethod* nm = iter.method(); |
1003 nmethod* nm = iter.method(); |
1070 DeoptimizationMarker dm; |
1050 DeoptimizationMarker dm; |
1071 |
1051 |
1072 // Deoptimize all activations depending on marked nmethods |
1052 // Deoptimize all activations depending on marked nmethods |
1073 Deoptimization::deoptimize_dependents(); |
1053 Deoptimization::deoptimize_dependents(); |
1074 |
1054 |
1075 // Make the dependent methods not entrant (in VM_Deoptimize they are made zombies) |
1055 // Make the dependent methods not entrant |
1076 make_marked_nmethods_not_entrant(); |
1056 make_marked_nmethods_not_entrant(); |
1077 } |
1057 } |
1078 } |
1058 } |
1079 #endif // HOTSWAP |
1059 #endif // HOTSWAP |
1080 |
1060 |
1100 DeoptimizationMarker dm; |
1080 DeoptimizationMarker dm; |
1101 |
1081 |
1102 // Deoptimize all activations depending on marked nmethods |
1082 // Deoptimize all activations depending on marked nmethods |
1103 Deoptimization::deoptimize_dependents(); |
1083 Deoptimization::deoptimize_dependents(); |
1104 |
1084 |
1105 // Make the dependent methods not entrant (in VM_Deoptimize they are made zombies) |
1085 // Make the dependent methods not entrant |
1106 make_marked_nmethods_not_entrant(); |
1086 make_marked_nmethods_not_entrant(); |
1107 } |
1087 } |
1108 } |
1088 } |
1109 |
1089 |
1110 void CodeCache::verify() { |
1090 void CodeCache::verify() { |