8151158: [TESTBUG] java/util/concurrent/forkjoin/FJExceptionTableLeak.java fails due to out of memory
authordl
Mon, 15 Aug 2016 09:13:22 -0700
changeset 40279 7a1e94e544d6
parent 40278 8801563939d0
child 40280 5542e4584972
8151158: [TESTBUG] java/util/concurrent/forkjoin/FJExceptionTableLeak.java fails due to out of memory 8144836: [TESTBUG] FJExceptionTableLeak and RemoveLeak fail with -XX:+UseParallelGC -XX:+AggressiveOpts Reviewed-by: martin, psandoz, amlu, darcy
jdk/test/java/util/concurrent/BlockingQueue/PollMemoryLeak.java
jdk/test/java/util/concurrent/ConcurrentLinkedQueue/RemoveLeak.java
jdk/test/java/util/concurrent/forkjoin/FJExceptionTableLeak.java
--- a/jdk/test/java/util/concurrent/BlockingQueue/PollMemoryLeak.java	Mon Aug 15 09:09:00 2016 -0700
+++ b/jdk/test/java/util/concurrent/BlockingQueue/PollMemoryLeak.java	Mon Aug 15 09:13:22 2016 -0700
@@ -26,43 +26,133 @@
  * However, the following notice accompanied the original version of this
  * file:
  *
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
+ * Written by Doug Lea and Martin Buchholz with assistance from
+ * members of JCP JSR-166 Expert Group and released to the public
+ * domain, as explained at
  * http://creativecommons.org/publicdomain/zero/1.0/
  */
 
 /*
  * @test
  * @bug 6236036 6264015
- * @compile PollMemoryLeak.java
- * @run main/othervm -Xmx8m PollMemoryLeak
- * @summary  Checks for OutOfMemoryError when an unbounded
- * number of aborted timed waits occur without a signal.
+ * @summary Checks for a memory leak when a sequence of aborted timed
+ * waits occur without a signal.  Uses the strategy of detecting
+ * changes in the size of the object graph retained by a root object.
  */
 
+import java.lang.reflect.AccessibleObject;
+import java.lang.reflect.Array;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.util.ArrayDeque;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.IdentityHashMap;
+import java.util.Set;
 import java.util.concurrent.ArrayBlockingQueue;
 import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingDeque;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.LinkedTransferQueue;
+import java.util.concurrent.PriorityBlockingQueue;
 import java.util.concurrent.SynchronousQueue;
 import java.util.concurrent.TimeUnit;
 
 public class PollMemoryLeak {
-    public static void main(String[] args) throws InterruptedException {
-        final BlockingQueue[] qs = {
-            new LinkedBlockingDeque(10),
-            new LinkedBlockingQueue(10),
-            new LinkedTransferQueue(),
-            new ArrayBlockingQueue(10),
-            new ArrayBlockingQueue(10, true),
-            new SynchronousQueue(),
-            new SynchronousQueue(true),
-        };
-        final long start = System.currentTimeMillis();
-        final long end = start + 10 * 1000;
-        while (System.currentTimeMillis() < end)
-            for (BlockingQueue q : qs)
-                q.poll(1, TimeUnit.NANOSECONDS);
+    public static void main(String[] args) throws Throwable {
+        new PollMemoryLeak().main();
+    }
+
+    void main() throws Throwable {
+        test(new LinkedBlockingDeque(10));
+        test(new LinkedBlockingQueue(10));
+        test(new LinkedTransferQueue());
+        test(new ArrayBlockingQueue(10));
+        test(new PriorityBlockingQueue());
+        test(new SynchronousQueue());
+        test(new SynchronousQueue(true));
+    }
+
+    void test(BlockingQueue q) throws Throwable {
+        assertNoLeak(q, () -> timedPoll(q));
+
+        // A demo that the leak detection infrastructure works
+        // assertNoLeak(q, () -> q.add(1));
+        // printRetainedObjects(q);
+    }
+
+    static void timedPoll(BlockingQueue q) {
+        try { q.poll(1, TimeUnit.NANOSECONDS); }
+        catch (InterruptedException ex) { throw new AssertionError(ex); }
+    }
+
+    // -------- leak detection infrastructure ---------------
+    void assertNoLeak(Object root, Runnable r) {
+        int prev = retainedObjects(root).size();
+        for (int i = 0; i < 3; i++) {
+            for (int j = 0; j < 3; j++) r.run();
+            int next = retainedObjects(root).size();
+            if (next <= prev)
+                return;
+            prev = next;
+        }
+        throw new AssertionError(
+            String.format("probable memory leak in %s: %s",
+                          root.getClass().getSimpleName(), root));
+    }
+
+    ConcurrentHashMap<Class<?>, Collection<Field>> classFields
+        = new ConcurrentHashMap<Class<?>, Collection<Field>>();
+
+    Collection<Field> referenceFieldsOf(Class<?> k) {
+        Collection<Field> fields = classFields.get(k);
+        if (fields == null) {
+            fields = new ArrayDeque<Field>();
+            ArrayDeque<Field> allFields = new ArrayDeque<Field>();
+            for (Class<?> c = k; c != null; c = c.getSuperclass())
+                for (Field field : c.getDeclaredFields())
+                    if (!Modifier.isStatic(field.getModifiers())
+                        && !field.getType().isPrimitive())
+                        fields.add(field);
+            AccessibleObject.setAccessible(
+                fields.toArray(new AccessibleObject[0]), true);
+            classFields.put(k, fields);
+        }
+        return fields;
+    }
+
+    static Object get(Field field, Object x) {
+        try { return field.get(x); }
+        catch (IllegalAccessException ex) { throw new AssertionError(ex); }
+    }
+
+    Set<Object> retainedObjects(Object x) {
+        ArrayDeque<Object> todo = new ArrayDeque<Object>() {
+            public void push(Object x) { if (x != null) super.push(x); }};
+        Set<Object> uniqueObjects = Collections.newSetFromMap(
+            new IdentityHashMap<Object, Boolean>());
+        todo.push(x);
+        while (!todo.isEmpty()) {
+            Object y = todo.pop();
+            if (uniqueObjects.contains(y))
+                continue;
+            uniqueObjects.add(y);
+            Class<?> k = y.getClass();
+            if (k.isArray() && !k.getComponentType().isPrimitive()) {
+                for (int i = 0, len = Array.getLength(y); i < len; i++)
+                    todo.push(Array.get(y, i));
+            } else {
+                for (Field field : referenceFieldsOf(k))
+                    todo.push(get(field, y));
+            }
+        }
+        return uniqueObjects;
+    }
+
+    /** for debugging the retained object graph */
+    void printRetainedObjects(Object x) {
+        for (Object y : retainedObjects(x))
+            System.out.printf("%s : %s%n", y.getClass().getSimpleName(), y);
     }
 }
--- a/jdk/test/java/util/concurrent/ConcurrentLinkedQueue/RemoveLeak.java	Mon Aug 15 09:09:00 2016 -0700
+++ b/jdk/test/java/util/concurrent/ConcurrentLinkedQueue/RemoveLeak.java	Mon Aug 15 09:13:22 2016 -0700
@@ -35,26 +35,120 @@
  * @test
  * @bug 8054446 8137184 8137185
  * @summary Regression test for memory leak in remove(Object)
- * @run main/othervm -Xmx2200k RemoveLeak
  */
 
+import java.lang.reflect.AccessibleObject;
+import java.lang.reflect.Array;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.util.ArrayDeque;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.IdentityHashMap;
+import java.util.Set;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentLinkedDeque;
 import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.LinkedTransferQueue;
+import java.util.concurrent.PriorityBlockingQueue;
 
 public class RemoveLeak {
-    public static void main(String[] args) {
-        int i = 0;
-        // Without bug fix, OutOfMemoryError was observed at iteration 65120
-        int iterations = 10 * 65120;
-        try {
-            ConcurrentLinkedQueue<Long> queue = new ConcurrentLinkedQueue<>();
-            queue.add(0L);
-            while (i++ < iterations) {
-                queue.add(1L);
-                queue.remove(1L);
+    public static void main(String[] args) throws Throwable {
+        new RemoveLeak().main();
+    }
+
+    void main() throws Throwable {
+        test(new ConcurrentLinkedDeque());
+        test(new ConcurrentLinkedQueue());
+        test(new LinkedBlockingDeque(10));
+        test(new LinkedBlockingQueue(10));
+        test(new LinkedTransferQueue());
+        test(new ArrayBlockingQueue(10));
+        test(new PriorityBlockingQueue());
+    }
+
+    void test(Collection c) throws Throwable {
+        assertNoLeak(c, () -> addRemove(c));
+
+        // A demo that the leak detection infrastructure works
+        // assertNoLeak(c, () -> c.add(1));
+        // printRetainedObjects(c);
+    }
+
+    static void addRemove(Collection c) {
+        c.add(1);
+        c.remove(1);
+    }
+
+    // -------- leak detection infrastructure ---------------
+    void assertNoLeak(Object root, Runnable r) {
+        int prev = retainedObjects(root).size();
+        for (int i = 0; i < 3; i++) {
+            for (int j = 0; j < 3; j++) r.run();
+            int next = retainedObjects(root).size();
+            if (next <= prev)
+                return;
+            prev = next;
+        }
+        throw new AssertionError(
+            String.format("probable memory leak in %s: %s",
+                          root.getClass().getSimpleName(), root));
+    }
+
+    ConcurrentHashMap<Class<?>, Collection<Field>> classFields
+        = new ConcurrentHashMap<Class<?>, Collection<Field>>();
+
+    Collection<Field> referenceFieldsOf(Class<?> k) {
+        Collection<Field> fields = classFields.get(k);
+        if (fields == null) {
+            fields = new ArrayDeque<Field>();
+            ArrayDeque<Field> allFields = new ArrayDeque<Field>();
+            for (Class<?> c = k; c != null; c = c.getSuperclass())
+                for (Field field : c.getDeclaredFields())
+                    if (!Modifier.isStatic(field.getModifiers())
+                        && !field.getType().isPrimitive())
+                        fields.add(field);
+            AccessibleObject.setAccessible(
+                fields.toArray(new AccessibleObject[0]), true);
+            classFields.put(k, fields);
+        }
+        return fields;
+    }
+
+    static Object get(Field field, Object x) {
+        try { return field.get(x); }
+        catch (IllegalAccessException ex) { throw new AssertionError(ex); }
+    }
+
+    Set<Object> retainedObjects(Object x) {
+        ArrayDeque<Object> todo = new ArrayDeque<Object>() {
+            public void push(Object x) { if (x != null) super.push(x); }};
+        Set<Object> uniqueObjects = Collections.newSetFromMap(
+            new IdentityHashMap<Object, Boolean>());
+        todo.push(x);
+        while (!todo.isEmpty()) {
+            Object y = todo.pop();
+            if (uniqueObjects.contains(y))
+                continue;
+            uniqueObjects.add(y);
+            Class<?> k = y.getClass();
+            if (k.isArray() && !k.getComponentType().isPrimitive()) {
+                for (int i = 0, len = Array.getLength(y); i < len; i++)
+                    todo.push(Array.get(y, i));
+            } else {
+                for (Field field : referenceFieldsOf(k))
+                    todo.push(get(field, y));
             }
-        } catch (Error t) {
-            System.err.printf("failed at iteration %d/%d%n", i, iterations);
-            throw t;
         }
+        return uniqueObjects;
+    }
+
+    /** for debugging the retained object graph */
+    void printRetainedObjects(Object x) {
+        for (Object y : retainedObjects(x))
+            System.out.printf("%s : %s%n", y.getClass().getSimpleName(), y);
     }
 }
--- a/jdk/test/java/util/concurrent/forkjoin/FJExceptionTableLeak.java	Mon Aug 15 09:09:00 2016 -0700
+++ b/jdk/test/java/util/concurrent/forkjoin/FJExceptionTableLeak.java	Mon Aug 15 09:13:22 2016 -0700
@@ -26,51 +26,117 @@
  * However, the following notice accompanied the original version of this
  * file:
  *
- * Written by Doug Lea with assistance from members of JCP JSR-166
- * Expert Group and released to the public domain, as explained at
+ * Written by Doug Lea and Martin Buchholz with assistance from
+ * members of JCP JSR-166 Expert Group and released to the public
+ * domain, as explained at
  * http://creativecommons.org/publicdomain/zero/1.0/
  */
 
 /*
  * @test
- * @author Doug Lea
  * @bug 8004138
- * @key intermittent
- * @summary Check if ForkJoinPool table leaks thrown exceptions.
- * @run main/othervm -Xmx8m -Djava.util.concurrent.ForkJoinPool.common.parallelism=4 FJExceptionTableLeak
+ * @summary Checks that ForkJoinTask thrown exceptions are not leaked.
+ * This whitebox test is sensitive to forkjoin implementation details.
  */
 
+import static java.util.concurrent.TimeUnit.SECONDS;
+
+import java.lang.ref.WeakReference;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ForkJoinPool;
+import java.util.concurrent.ForkJoinTask;
 import java.util.concurrent.RecursiveAction;
+import java.util.function.BooleanSupplier;
 
 public class FJExceptionTableLeak {
-    // This test was observed to fail with pre-bug-fix jdk7 -Xmx8m,
-    // using STEPS = 220 and TASKS_PER_STEP = 100
-    static final int PRE_BUG_FIX_FAILURE_STEPS = 220;
-    static final int STEPS = 10 * PRE_BUG_FIX_FAILURE_STEPS;
-    static final int TASKS_PER_STEP = 100;
-
     static class FailingTaskException extends RuntimeException {}
     static class FailingTask extends RecursiveAction {
-        public void compute() {
-            throw new FailingTaskException();
-        }
+        public void compute() { throw new FailingTaskException(); }
+    }
+
+    static int bucketsInuse(Object[] exceptionTable) {
+        int count = 0;
+        for (Object x : exceptionTable)
+            if (x != null) count++;
+        return count;
     }
 
-    public static void main(String[] args) throws InterruptedException {
-        ForkJoinPool pool = new ForkJoinPool(4);
-        FailingTask[] tasks = new FailingTask[TASKS_PER_STEP];
-        for (int k = 0; k < STEPS; ++k) {
-            for (int i = 0; i < tasks.length; ++i)
-                tasks[i] = new FailingTask();
-            for (int i = 0; i < tasks.length; ++i)
-                pool.execute(tasks[i]);
-            for (int i = 0; i < tasks.length; ++i) {
+    public static void main(String[] args) throws Exception {
+        final ForkJoinPool pool = new ForkJoinPool(4);
+        final Field exceptionTableField =
+            ForkJoinTask.class.getDeclaredField("exceptionTable");
+        exceptionTableField.setAccessible(true);
+        final Object[] exceptionTable = (Object[]) exceptionTableField.get(null);
+
+        if (bucketsInuse(exceptionTable) != 0) throw new AssertionError();
+
+        final ArrayList<FailingTask> tasks = new ArrayList<>();
+
+        // Keep submitting failing tasks until most of the exception
+        // table buckets are in use
+        do {
+            for (int i = 0; i < exceptionTable.length; i++) {
+                FailingTask task = new FailingTask();
+                pool.execute(task);
+                tasks.add(task); // retain strong refs to all tasks, for now
+            }
+            for (FailingTask task : tasks) {
                 try {
-                    tasks[i].join();
+                    task.join();
                     throw new AssertionError("should throw");
                 } catch (FailingTaskException success) {}
             }
+        } while (bucketsInuse(exceptionTable) < exceptionTable.length * 3 / 4);
+
+        // Retain a strong ref to one last failing task;
+        // task.join() will trigger exception table expunging.
+        FailingTask lastTask = tasks.get(0);
+
+        // Clear all other strong refs, making exception table cleanable
+        tasks.clear();
+
+        BooleanSupplier exceptionTableIsClean = () -> {
+            try {
+                lastTask.join();
+                throw new AssertionError("should throw");
+            } catch (FailingTaskException expected) {}
+            int count = bucketsInuse(exceptionTable);
+            if (count == 0)
+                throw new AssertionError("expected to find last task");
+            return count == 1;
+        };
+        gcAwait(exceptionTableIsClean);
+    }
+
+    // --------------- GC finalization infrastructure ---------------
+
+    /** No guarantees, but effective in practice. */
+    static void forceFullGc() {
+        CountDownLatch finalizeDone = new CountDownLatch(1);
+        WeakReference<?> ref = new WeakReference<Object>(new Object() {
+            protected void finalize() { finalizeDone.countDown(); }});
+        try {
+            for (int i = 0; i < 10; i++) {
+                System.gc();
+                if (finalizeDone.await(1L, SECONDS) && ref.get() == null) {
+                    System.runFinalization(); // try to pick up stragglers
+                    return;
+                }
+            }
+        } catch (InterruptedException unexpected) {
+            throw new AssertionError("unexpected InterruptedException");
         }
+        throw new AssertionError("failed to do a \"full\" gc");
+    }
+
+    static void gcAwait(BooleanSupplier s) {
+        for (int i = 0; i < 10; i++) {
+            if (s.getAsBoolean())
+                return;
+            forceFullGc();
+        }
+        throw new AssertionError("failed to satisfy condition");
     }
 }