8234437: Remove CollectedHeap::safe_object_iterate()
authorpliden
Wed, 20 Nov 2019 10:37:46 +0100
changeset 59153 1152339c298a
parent 59152 59272e9e0635
child 59154 0c2e1808f800
8234437: Remove CollectedHeap::safe_object_iterate() Reviewed-by: kbarrett, sjohanss
src/hotspot/share/gc/epsilon/epsilonHeap.cpp
src/hotspot/share/gc/epsilon/epsilonHeap.hpp
src/hotspot/share/gc/g1/g1CollectedHeap.hpp
src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp
src/hotspot/share/gc/shared/collectedHeap.hpp
src/hotspot/share/gc/shared/genCollectedHeap.cpp
src/hotspot/share/gc/shared/genCollectedHeap.hpp
src/hotspot/share/gc/shared/generation.cpp
src/hotspot/share/gc/shared/generation.hpp
src/hotspot/share/gc/shared/space.cpp
src/hotspot/share/gc/shared/space.hpp
src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp
src/hotspot/share/gc/shenandoah/shenandoahHeap.hpp
src/hotspot/share/gc/z/zCollectedHeap.cpp
src/hotspot/share/gc/z/zCollectedHeap.hpp
src/hotspot/share/memory/heapInspection.cpp
src/hotspot/share/prims/jvmtiTagMap.cpp
src/hotspot/share/services/heapDumper.cpp
--- a/src/hotspot/share/gc/epsilon/epsilonHeap.cpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/epsilon/epsilonHeap.cpp	Wed Nov 20 10:37:46 2019 +0100
@@ -290,8 +290,8 @@
   collect(gc_cause());
 }
 
-void EpsilonHeap::safe_object_iterate(ObjectClosure *cl) {
-  _space->safe_object_iterate(cl);
+void EpsilonHeap::object_iterate(ObjectClosure *cl) {
+  _space->object_iterate(cl);
 }
 
 void EpsilonHeap::print_on(outputStream *st) const {
--- a/src/hotspot/share/gc/epsilon/epsilonHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/epsilon/epsilonHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
@@ -103,10 +103,7 @@
   virtual void do_full_collection(bool clear_all_soft_refs);
 
   // Heap walking support
-  virtual void safe_object_iterate(ObjectClosure* cl);
-  virtual void object_iterate(ObjectClosure* cl) {
-    safe_object_iterate(cl);
-  }
+  virtual void object_iterate(ObjectClosure* cl);
 
   // Object pinning support: every object is implicitly pinned
   virtual bool supports_object_pinning() const           { return true; }
--- a/src/hotspot/share/gc/g1/g1CollectedHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/g1/g1CollectedHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
@@ -1166,10 +1166,6 @@
   // Iterate over all objects, calling "cl.do_object" on each.
   virtual void object_iterate(ObjectClosure* cl);
 
-  virtual void safe_object_iterate(ObjectClosure* cl) {
-    object_iterate(cl);
-  }
-
   // Iterate over heap regions, in address order, terminating the
   // iteration early if the "do_heap_region" method returns "true".
   void heap_region_iterate(HeapRegionClosure* blk) const;
--- a/src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/parallel/parallelScavengeHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
@@ -213,7 +213,6 @@
   size_t unsafe_max_tlab_alloc(Thread* thr) const;
 
   void object_iterate(ObjectClosure* cl);
-  void safe_object_iterate(ObjectClosure* cl) { object_iterate(cl); }
 
   HeapWord* block_start(const void* addr) const;
   bool block_is_obj(const HeapWord* addr) const;
--- a/src/hotspot/share/gc/shared/collectedHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/shared/collectedHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
@@ -387,10 +387,6 @@
   // Iterate over all objects, calling "cl.do_object" on each.
   virtual void object_iterate(ObjectClosure* cl) = 0;
 
-  // Similar to object_iterate() except iterates only
-  // over live objects.
-  virtual void safe_object_iterate(ObjectClosure* cl) = 0;
-
   // Returns the longest time (in ms) that has elapsed since the last
   // time that any part of the heap was examined by a garbage collection.
   virtual jlong millis_since_last_gc() = 0;
--- a/src/hotspot/share/gc/shared/genCollectedHeap.cpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/shared/genCollectedHeap.cpp	Wed Nov 20 10:37:46 2019 +0100
@@ -1034,11 +1034,6 @@
   _old_gen->object_iterate(cl);
 }
 
-void GenCollectedHeap::safe_object_iterate(ObjectClosure* cl) {
-  _young_gen->safe_object_iterate(cl);
-  _old_gen->safe_object_iterate(cl);
-}
-
 Space* GenCollectedHeap::space_containing(const void* addr) const {
   Space* res = _young_gen->space_containing(addr);
   if (res != NULL) {
--- a/src/hotspot/share/gc/shared/genCollectedHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/shared/genCollectedHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
@@ -248,7 +248,6 @@
   // Iteration functions.
   void oop_iterate(OopIterateClosure* cl);
   void object_iterate(ObjectClosure* cl);
-  void safe_object_iterate(ObjectClosure* cl);
   Space* space_containing(const void* addr) const;
 
   // A CollectedHeap is divided into a dense sequence of "blocks"; that is,
--- a/src/hotspot/share/gc/shared/generation.cpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/shared/generation.cpp	Wed Nov 20 10:37:46 2019 +0100
@@ -289,21 +289,6 @@
   space_iterate(&blk);
 }
 
-class GenerationSafeObjIterateClosure : public SpaceClosure {
- private:
-  ObjectClosure* _cl;
- public:
-  virtual void do_space(Space* s) {
-    s->safe_object_iterate(_cl);
-  }
-  GenerationSafeObjIterateClosure(ObjectClosure* cl) : _cl(cl) {}
-};
-
-void Generation::safe_object_iterate(ObjectClosure* cl) {
-  GenerationSafeObjIterateClosure blk(cl);
-  space_iterate(&blk);
-}
-
 #if INCLUDE_SERIALGC
 
 void Generation::prepare_for_compaction(CompactPoint* cp) {
--- a/src/hotspot/share/gc/shared/generation.hpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/shared/generation.hpp	Wed Nov 20 10:37:46 2019 +0100
@@ -470,11 +470,6 @@
   // each.
   virtual void object_iterate(ObjectClosure* cl);
 
-  // Iterate over all safe objects in the generation, calling "cl.do_object" on
-  // each.  An object is safe if its references point to other objects in
-  // the heap.  This defaults to object_iterate() unless overridden.
-  virtual void safe_object_iterate(ObjectClosure* cl);
-
   // Apply "cl->do_oop" to (the address of) all and only all the ref fields
   // in the current generation that contain pointers to objects in younger
   // generations. Objects allocated since the last "save_marks" call are
--- a/src/hotspot/share/gc/shared/space.cpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/shared/space.cpp	Wed Nov 20 10:37:46 2019 +0100
@@ -498,12 +498,6 @@
   object_iterate_from(bottom(), blk);
 }
 
-// For a ContiguousSpace object_iterate() and safe_object_iterate()
-// are the same.
-void ContiguousSpace::safe_object_iterate(ObjectClosure* blk) {
-  object_iterate(blk);
-}
-
 void ContiguousSpace::object_iterate_from(HeapWord* mark, ObjectClosure* blk) {
   while (mark < top()) {
     blk->do_object(oop(mark));
--- a/src/hotspot/share/gc/shared/space.hpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/shared/space.hpp	Wed Nov 20 10:37:46 2019 +0100
@@ -175,9 +175,6 @@
   // each.  Objects allocated by applications of the closure are not
   // included in the iteration.
   virtual void object_iterate(ObjectClosure* blk) = 0;
-  // Similar to object_iterate() except only iterates over
-  // objects whose internal references point to objects in the space.
-  virtual void safe_object_iterate(ObjectClosure* blk) = 0;
 
   // Create and return a new dirty card to oop closure. Can be
   // overridden to return the appropriate type of closure
@@ -584,9 +581,6 @@
   // Iteration
   void oop_iterate(OopIterateClosure* cl);
   void object_iterate(ObjectClosure* blk);
-  // For contiguous spaces this method will iterate safely over objects
-  // in the space (i.e., between bottom and top) when at a safepoint.
-  void safe_object_iterate(ObjectClosure* blk);
 
   // Iterate over as many initialized objects in the space as possible,
   // calling "cl.do_object_careful" on each. Return NULL if all objects
--- a/src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp	Wed Nov 20 10:37:46 2019 +0100
@@ -1324,11 +1324,6 @@
   }
 }
 
-void ShenandoahHeap::safe_object_iterate(ObjectClosure* cl) {
-  assert(SafepointSynchronize::is_at_safepoint(), "safe iteration is only available during safepoints");
-  object_iterate(cl);
-}
-
 void ShenandoahHeap::heap_region_iterate(ShenandoahHeapRegionClosure* blk) const {
   for (size_t i = 0; i < num_regions(); i++) {
     ShenandoahHeapRegion* current = get_region(i);
--- a/src/hotspot/share/gc/shenandoah/shenandoahHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/shenandoah/shenandoahHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
@@ -547,7 +547,6 @@
 
   // Used for native heap walkers: heap dumpers, mostly
   void object_iterate(ObjectClosure* cl);
-  void safe_object_iterate(ObjectClosure* cl);
 
   // Used by RMI
   jlong millis_since_last_gc();
--- a/src/hotspot/share/gc/z/zCollectedHeap.cpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/z/zCollectedHeap.cpp	Wed Nov 20 10:37:46 2019 +0100
@@ -246,10 +246,6 @@
   _heap.object_iterate(cl, true /* visit_weaks */);
 }
 
-void ZCollectedHeap::safe_object_iterate(ObjectClosure* cl) {
-  _heap.object_iterate(cl, true /* visit_weaks */);
-}
-
 void ZCollectedHeap::register_nmethod(nmethod* nm) {
   ZNMethod::register_nmethod(nm);
 }
--- a/src/hotspot/share/gc/z/zCollectedHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/gc/z/zCollectedHeap.hpp	Wed Nov 20 10:37:46 2019 +0100
@@ -98,7 +98,6 @@
   virtual GrowableArray<MemoryPool*> memory_pools();
 
   virtual void object_iterate(ObjectClosure* cl);
-  virtual void safe_object_iterate(ObjectClosure* cl);
 
   virtual void register_nmethod(nmethod* nm);
   virtual void unregister_nmethod(nmethod* nm);
--- a/src/hotspot/share/memory/heapInspection.cpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/memory/heapInspection.cpp	Wed Nov 20 10:37:46 2019 +0100
@@ -719,7 +719,7 @@
   ResourceMark rm;
 
   RecordInstanceClosure ric(cit, filter);
-  Universe::heap()->safe_object_iterate(&ric);
+  Universe::heap()->object_iterate(&ric);
   return ric.missed_count();
 }
 
@@ -792,5 +792,5 @@
 
   // Iterate over objects in the heap
   FindInstanceClosure fic(k, result);
-  Universe::heap()->safe_object_iterate(&fic);
+  Universe::heap()->object_iterate(&fic);
 }
--- a/src/hotspot/share/prims/jvmtiTagMap.cpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/prims/jvmtiTagMap.cpp	Wed Nov 20 10:37:46 2019 +0100
@@ -1271,9 +1271,6 @@
     }
 
     // do the iteration
-    // If this operation encounters a bad object when using CMS,
-    // consider using safe_object_iterate() which avoids perm gen
-    // objects that may contain bad references.
     Universe::heap()->object_iterate(_blk);
   }
 
--- a/src/hotspot/share/services/heapDumper.cpp	Wed Nov 20 10:37:46 2019 +0100
+++ b/src/hotspot/share/services/heapDumper.cpp	Wed Nov 20 10:37:46 2019 +0100
@@ -1882,7 +1882,7 @@
   // The HPROF_GC_CLASS_DUMP and HPROF_GC_INSTANCE_DUMP are the vast bulk
   // of the heap dump.
   HeapObjectDumper obj_dumper(this, writer());
-  Universe::heap()->safe_object_iterate(&obj_dumper);
+  Universe::heap()->object_iterate(&obj_dumper);
 
   // HPROF_GC_ROOT_THREAD_OBJ + frames + jni locals
   do_threads();