--- a/jdk/src/share/classes/java/util/ArrayList.java Fri Apr 12 10:02:33 2013 -0700
+++ b/jdk/src/share/classes/java/util/ArrayList.java Wed Apr 10 12:43:18 2013 -0700
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -105,8 +105,20 @@
private static final long serialVersionUID = 8683452581122892189L;
/**
+ * Default initial capacity.
+ */
+ private static final int DEFAULT_CAPACITY = 10;
+
+ /**
+ * Shared empty array instance used for empty instances.
+ */
+ private static final Object[] EMPTY_ELEMENTDATA = {};
+
+ /**
* The array buffer into which the elements of the ArrayList are stored.
- * The capacity of the ArrayList is the length of this array buffer.
+ * The capacity of the ArrayList is the length of this array buffer. Any
+ * empty ArrayList with elementData == EMPTY_ELEMENTDATA will be expanded to
+ * DEFAULT_CAPACITY when the first element is added.
*/
private transient Object[] elementData;
@@ -136,7 +148,8 @@
* Constructs an empty list with an initial capacity of ten.
*/
public ArrayList() {
- this(10);
+ super();
+ this.elementData = EMPTY_ELEMENTDATA;
}
/**
@@ -162,8 +175,7 @@
*/
public void trimToSize() {
modCount++;
- int oldCapacity = elementData.length;
- if (size < oldCapacity) {
+ if (size < elementData.length) {
elementData = Arrays.copyOf(elementData, size);
}
}
@@ -176,12 +188,29 @@
* @param minCapacity the desired minimum capacity
*/
public void ensureCapacity(int minCapacity) {
- if (minCapacity > 0)
- ensureCapacityInternal(minCapacity);
+ int minExpand = (elementData != EMPTY_ELEMENTDATA)
+ // any size if real element table
+ ? 0
+ // larger than default for empty table. It's already supposed to be
+ // at default size.
+ : DEFAULT_CAPACITY;
+
+ if (minCapacity > minExpand) {
+ ensureExplicitCapacity(minCapacity);
+ }
}
private void ensureCapacityInternal(int minCapacity) {
+ if (elementData == EMPTY_ELEMENTDATA) {
+ minCapacity = Math.max(DEFAULT_CAPACITY, minCapacity);
+ }
+
+ ensureExplicitCapacity(minCapacity);
+ }
+
+ private void ensureExplicitCapacity(int minCapacity) {
modCount++;
+
// overflow-conscious code
if (minCapacity - elementData.length > 0)
grow(minCapacity);
@@ -450,7 +479,7 @@
if (numMoved > 0)
System.arraycopy(elementData, index+1, elementData, index,
numMoved);
- elementData[--size] = null; // Let gc do its work
+ elementData[--size] = null; // clear to let GC do its work
return oldValue;
}
@@ -495,7 +524,7 @@
if (numMoved > 0)
System.arraycopy(elementData, index+1, elementData, index,
numMoved);
- elementData[--size] = null; // Let gc do its work
+ elementData[--size] = null; // clear to let GC do its work
}
/**
@@ -505,7 +534,7 @@
public void clear() {
modCount++;
- // Let gc do its work
+ // clear to let GC do its work
for (int i = 0; i < size; i++)
elementData[i] = null;
@@ -586,10 +615,12 @@
System.arraycopy(elementData, toIndex, elementData, fromIndex,
numMoved);
- // Let gc do its work
+ // clear to let GC do its work
int newSize = size - (toIndex-fromIndex);
- while (size != newSize)
- elementData[--size] = null;
+ for (int i = newSize; i < size; i++) {
+ elementData[i] = null;
+ }
+ size = newSize;
}
/**
@@ -677,6 +708,7 @@
w += size - r;
}
if (w != size) {
+ // clear to let GC do its work
for (int i = w; i < size; i++)
elementData[i] = null;
modCount += size - w;
@@ -701,17 +733,17 @@
int expectedModCount = modCount;
s.defaultWriteObject();
- // Write out array length
- s.writeInt(elementData.length);
+ // Write out size as capacity for behavioural compatibility with clone()
+ s.writeInt(size);
// Write out all elements in the proper order.
- for (int i=0; i<size; i++)
+ for (int i=0; i<size; i++) {
s.writeObject(elementData[i]);
+ }
if (modCount != expectedModCount) {
throw new ConcurrentModificationException();
}
-
}
/**
@@ -720,16 +752,24 @@
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
+ elementData = EMPTY_ELEMENTDATA;
+
// Read in size, and any hidden stuff
s.defaultReadObject();
- // Read in array length and allocate array
- int arrayLength = s.readInt();
- Object[] a = elementData = new Object[arrayLength];
+ // Read in capacity
+ s.readInt(); // ignored
+
+ if (size > 0) {
+ // be like clone(), allocate array based upon size not capacity
+ ensureCapacityInternal(size);
- // Read in all elements in the proper order.
- for (int i=0; i<size; i++)
- a[i] = s.readObject();
+ Object[] a = elementData;
+ // Read in all elements in the proper order.
+ for (int i=0; i<size; i++) {
+ a[i] = s.readObject();
+ }
+ }
}
/**
--- a/jdk/src/share/classes/java/util/HashMap.java Fri Apr 12 10:02:33 2013 -0700
+++ b/jdk/src/share/classes/java/util/HashMap.java Wed Apr 10 12:43:18 2013 -0700
@@ -129,7 +129,7 @@
/**
* The default initial capacity - MUST be a power of two.
*/
- static final int DEFAULT_INITIAL_CAPACITY = 16;
+ static final int DEFAULT_INITIAL_CAPACITY = 1 << 4; // aka 16
/**
* The maximum capacity, used if a higher value is implicitly specified
@@ -144,9 +144,14 @@
static final float DEFAULT_LOAD_FACTOR = 0.75f;
/**
+ * An empty table instance to share when the table is not inflated.
+ */
+ static final Entry<?,?>[] EMPTY_TABLE = {};
+
+ /**
* The table, resized as necessary. Length MUST Always be a power of two.
*/
- transient Entry<?,?>[] table;
+ transient Entry<?,?>[] table = EMPTY_TABLE;
/**
* The number of key-value mappings contained in this map.
@@ -157,6 +162,8 @@
* The next size value at which to resize (capacity * load factor).
* @serial
*/
+ // If table == EMPTY_TABLE then this is the initial capacity at which the
+ // table will be created when inflated.
int threshold;
/**
@@ -223,14 +230,8 @@
throw new IllegalArgumentException("Illegal load factor: " +
loadFactor);
- // Find a power of 2 >= initialCapacity
- int capacity = 1;
- while (capacity < initialCapacity)
- capacity <<= 1;
-
this.loadFactor = loadFactor;
- threshold = (int)Math.min(capacity * loadFactor, MAXIMUM_CAPACITY + 1);
- table = new Entry<?,?>[capacity];
+ threshold = initialCapacity;
init();
}
@@ -265,9 +266,33 @@
public HashMap(Map<? extends K, ? extends V> m) {
this(Math.max((int) (m.size() / DEFAULT_LOAD_FACTOR) + 1,
DEFAULT_INITIAL_CAPACITY), DEFAULT_LOAD_FACTOR);
+ inflateTable(threshold);
+
putAllForCreate(m);
}
+ private static int roundUpToPowerOf2(int number) {
+ // assert number >= 0 : "number must be non-negative";
+ int rounded = number >= MAXIMUM_CAPACITY
+ ? MAXIMUM_CAPACITY
+ : (rounded = Integer.highestOneBit(number)) != 0
+ ? (Integer.bitCount(number) > 1) ? rounded << 1 : rounded
+ : 1;
+
+ return rounded;
+ }
+
+ /**
+ * Inflates the table.
+ */
+ private void inflateTable(int toSize) {
+ // Find a power of 2 >= toSize
+ int capacity = roundUpToPowerOf2(toSize);
+
+ threshold = (int) Math.min(capacity * loadFactor, MAXIMUM_CAPACITY + 1);
+ table = new Entry[capacity];
+ }
+
// internal utilities
/**
@@ -305,6 +330,7 @@
* Returns index for hash code h.
*/
static int indexFor(int h, int length) {
+ // assert Integer.bitCount(length) == 1 : "length must be a non-zero power of 2";
return h & (length-1);
}
@@ -369,6 +395,10 @@
*/
@SuppressWarnings("unchecked")
final Entry<K,V> getEntry(Object key) {
+ if (isEmpty()) {
+ return null;
+ }
+
int hash = (key == null) ? 0 : hash(key);
for (Entry<?,?> e = table[indexFor(hash, table.length)];
e != null;
@@ -381,7 +411,6 @@
return null;
}
-
/**
* Associates the specified value with the specified key in this map.
* If the map previously contained a mapping for the key, the old
@@ -395,6 +424,9 @@
* previously associated <tt>null</tt> with <tt>key</tt>.)
*/
public V put(K key, V value) {
+ if (table == EMPTY_TABLE) {
+ inflateTable(threshold);
+ }
if (key == null)
return putForNullKey(value);
int hash = hash(key);
@@ -529,6 +561,10 @@
if (numKeysToBeAdded == 0)
return;
+ if (table == EMPTY_TABLE) {
+ inflateTable((int) Math.max(numKeysToBeAdded * loadFactor, threshold));
+ }
+
/*
* Expand the map if the map if the number of mappings to be added
* is greater than or equal to threshold. This is conservative; the
@@ -573,6 +609,9 @@
* for this key.
*/
final Entry<K,V> removeEntryForKey(Object key) {
+ if (isEmpty()) {
+ return null;
+ }
int hash = (key == null) ? 0 : hash(key);
int i = indexFor(hash, table.length);
@SuppressWarnings("unchecked")
@@ -605,7 +644,7 @@
* for matching.
*/
final Entry<K,V> removeMapping(Object o) {
- if (!(o instanceof Map.Entry))
+ if (isEmpty() || !(o instanceof Map.Entry))
return null;
Map.Entry<?,?> entry = (Map.Entry<?,?>) o;
@@ -641,9 +680,7 @@
*/
public void clear() {
modCount++;
- Entry<?,?>[] tab = table;
- for (int i = 0; i < tab.length; i++)
- tab[i] = null;
+ Arrays.fill(table, null);
size = 0;
}
@@ -693,7 +730,14 @@
} catch (CloneNotSupportedException e) {
// assert false;
}
- result.table = new Entry<?,?>[table.length];
+ if (result.table != EMPTY_TABLE) {
+ result.inflateTable(Math.min(
+ (int) Math.min(
+ size * Math.min(1 / loadFactor, 4.0f),
+ // we have limits...
+ HashMap.MAXIMUM_CAPACITY),
+ table.length));
+ }
result.entrySet = null;
result.modCount = 0;
result.size = 0;
@@ -749,8 +793,7 @@
}
public final int hashCode() {
- return (key==null ? 0 : key.hashCode()) ^
- (value==null ? 0 : value.hashCode());
+ return Objects.hashCode(getKey()) ^ Objects.hashCode(getValue());
}
public final String toString() {
@@ -1017,14 +1060,15 @@
private void writeObject(java.io.ObjectOutputStream s)
throws IOException
{
- Iterator<Map.Entry<K,V>> i =
- (size > 0) ? entrySet0().iterator() : null;
-
// Write out the threshold, loadfactor, and any hidden stuff
s.defaultWriteObject();
// Write out number of buckets
- s.writeInt(table.length);
+ if (table==EMPTY_TABLE) {
+ s.writeInt(roundUpToPowerOf2(threshold));
+ } else {
+ s.writeInt(table.length);
+ }
// Write out size (number of Mappings)
s.writeInt(size);
@@ -1049,16 +1093,18 @@
{
// Read in the threshold (ignored), loadfactor, and any hidden stuff
s.defaultReadObject();
- if (loadFactor <= 0 || Float.isNaN(loadFactor))
+ if (loadFactor <= 0 || Float.isNaN(loadFactor)) {
throw new InvalidObjectException("Illegal load factor: " +
loadFactor);
+ }
- // set hashMask
+ // set other fields that need values
Holder.UNSAFE.putIntVolatile(this, Holder.HASHSEED_OFFSET,
sun.misc.Hashing.randomHashSeed(this));
+ table = EMPTY_TABLE;
- // Read in number of buckets and allocate the bucket array;
- s.readInt(); // ignored
+ // Read in number of buckets
+ s.readInt(); // ignored.
// Read number of mappings
int mappings = s.readInt();
@@ -1066,23 +1112,21 @@
throw new InvalidObjectException("Illegal mappings count: " +
mappings);
- int initialCapacity = (int) Math.min(
- // capacity chosen by number of mappings
- // and desired load (if >= 0.25)
- mappings * Math.min(1 / loadFactor, 4.0f),
- // we have limits...
- HashMap.MAXIMUM_CAPACITY);
- int capacity = 1;
- // find smallest power of two which holds all mappings
- while (capacity < initialCapacity) {
- capacity <<= 1;
+ // capacity chosen by number of mappings and desired load (if >= 0.25)
+ int capacity = (int) Math.min(
+ mappings * Math.min(1 / loadFactor, 4.0f),
+ // we have limits...
+ HashMap.MAXIMUM_CAPACITY);
+
+ // allocate the bucket array;
+ if (mappings > 0) {
+ inflateTable(capacity);
+ } else {
+ threshold = capacity;
}
- table = new Entry<?,?>[capacity];
- threshold = (int) Math.min(capacity * loadFactor, MAXIMUM_CAPACITY + 1);
init(); // Give subclass a chance to do its thing.
-
// Read the keys and values, and put the mappings in the HashMap
for (int i=0; i<mappings; i++) {
@SuppressWarnings("unchecked")
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/jdk/test/java/util/Map/BasicSerialization.java Wed Apr 10 12:43:18 2013 -0700
@@ -0,0 +1,221 @@
+/*
+ * Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @bug 8011200
+ * @run testng BasicSerialization
+ * @summary Ensure Maps can be serialized and deserialized.
+ * @author Mike Duigou
+ */
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.io.ByteArrayInputStream;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Method;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentSkipListMap;
+
+import org.testng.annotations.Test;
+import org.testng.annotations.DataProvider;
+import static org.testng.Assert.fail;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+import static org.testng.Assert.assertFalse;
+import static org.testng.Assert.assertSame;
+
+public class BasicSerialization {
+
+ enum IntegerEnum {
+
+ e0, e1, e2, e3, e4, e5, e6, e7, e8, e9,
+ e10, e11, e12, e13, e14, e15, e16, e17, e18, e19,
+ e20, e21, e22, e23, e24, e25, e26, e27, e28, e29,
+ e30, e31, e32, e33, e34, e35, e36, e37, e38, e39,
+ e40, e41, e42, e43, e44, e45, e46, e47, e48, e49,
+ e50, e51, e52, e53, e54, e55, e56, e57, e58, e59,
+ e60, e61, e62, e63, e64, e65, e66, e67, e68, e69,
+ e70, e71, e72, e73, e74, e75, e76, e77, e78, e79,
+ e80, e81, e82, e83, e84, e85, e86, e87, e88, e89,
+ e90, e91, e92, e93, e94, e95, e96, e97, e98, e99,
+ EXTRA_KEY;
+ public static final int SIZE = values().length;
+ };
+ private static final int TEST_SIZE = IntegerEnum.SIZE - 1;
+ /**
+ * Realized keys ensure that there is always a hard ref to all test objects.
+ */
+ private static final IntegerEnum[] KEYS = new IntegerEnum[TEST_SIZE];
+ /**
+ * Realized values ensure that there is always a hard ref to all test
+ * objects.
+ */
+ private static final String[] VALUES = new String[TEST_SIZE];
+
+ static {
+ IntegerEnum[] keys = IntegerEnum.values();
+ for (int each = 0; each < TEST_SIZE; each++) {
+ KEYS[each] = keys[each];
+ VALUES[each] = keys[each].name();
+ }
+ }
+ private static final IntegerEnum EXTRA_KEY = IntegerEnum.EXTRA_KEY;
+ private static final String EXTRA_VALUE = IntegerEnum.EXTRA_KEY.name();
+
+ public static <K, V> Map<K, V> mapClone(Map<K, V> map) {
+ Method cloneMethod;
+
+ try {
+ cloneMethod = map.getClass().getMethod("clone", new Class[]{});
+ } catch (NoSuchMethodException | SecurityException all) {
+ cloneMethod = null;
+ }
+
+ if (null != cloneMethod) {
+ try {
+ Map<K, V> result = (Map<K, V>)cloneMethod.invoke(map, new Object[]{});
+ return result;
+ } catch (Exception all) {
+ fail("clone() failed " + map.getClass().getSimpleName(), all);
+ return null;
+ }
+ } else {
+ Constructor<? extends Map> copyConstructor;
+ try {
+ copyConstructor = (Constructor<? extends Map>)map.getClass().getConstructor(new Class[]{Map.class});
+
+ Map<K, V> result = (Map<K, V>)copyConstructor.newInstance(new Object[]{map});
+
+ return result;
+ } catch (Exception all) {
+ return serialClone(map);
+ }
+ }
+ }
+
+ @Test(dataProvider = "Map<IntegerEnum,String>")
+ public void testSerialization(String description, Map<IntegerEnum, String> map) {
+ Object foo = new Object();
+
+ Map<IntegerEnum, String> clone = mapClone(map);
+ Map<IntegerEnum, String> serialClone = serialClone(map);
+
+ assertEquals(map, map, description + ":should equal self");
+ assertEquals(clone, map, description + ":should equal clone");
+ assertEquals(map, clone, description + ": should equal orginal map");
+ assertEquals(serialClone, map, description + ": should equal deserialized clone");
+ assertEquals(map, serialClone, description + ": should equal original map");
+ assertEquals(serialClone, clone, description + ": deserialized clone should equal clone");
+ assertEquals(clone, serialClone, description + ": clone should equal deserialized clone");
+
+ assertFalse(map.containsKey(EXTRA_KEY), description + ":unexpected key");
+ assertFalse(clone.containsKey(EXTRA_KEY), description + ":unexpected key");
+ assertFalse(serialClone.containsKey(EXTRA_KEY), description + ":unexpected key");
+ map.put(EXTRA_KEY, EXTRA_VALUE);
+ clone.put(EXTRA_KEY, EXTRA_VALUE);
+ serialClone.put(EXTRA_KEY, EXTRA_VALUE);
+ assertTrue(map.containsKey(EXTRA_KEY), description + ":missing key");
+ assertTrue(clone.containsKey(EXTRA_KEY), description + ":missing key");
+ assertTrue(serialClone.containsKey(EXTRA_KEY), description + ":missing key");
+ assertSame(map.get(EXTRA_KEY), EXTRA_VALUE, description + ":wrong value");
+ assertSame(clone.get(EXTRA_KEY), EXTRA_VALUE, description + ":wrong value");
+ assertSame(serialClone.get(EXTRA_KEY), EXTRA_VALUE, description + ":wrong value");
+
+ assertEquals(map, map, description + ":should equal self");
+ assertEquals(clone, map, description + ":should equal clone");
+ assertEquals(map, clone, description + ": should equal orginal map");
+ assertEquals(serialClone, map, description + ": should equal deserialized clone");
+ assertEquals(map, serialClone, description + ": should equal original map");
+ assertEquals(serialClone, clone, description + ": deserialized clone should equal clone");
+ assertEquals(clone, serialClone, description + ": clone should equal deserialized clone");
+ }
+
+ static byte[] serializedForm(Object obj) {
+ try {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ new ObjectOutputStream(baos).writeObject(obj);
+ return baos.toByteArray();
+ } catch (IOException e) {
+ fail("Unexpected Exception", e);
+ return null;
+ }
+ }
+
+ static Object readObject(byte[] bytes) throws IOException, ClassNotFoundException {
+ InputStream is = new ByteArrayInputStream(bytes);
+ return new ObjectInputStream(is).readObject();
+ }
+
+ @SuppressWarnings("unchecked")
+ static <T> T serialClone(T obj) {
+ try {
+ return (T)readObject(serializedForm(obj));
+ } catch (IOException | ClassNotFoundException e) {
+ fail("Unexpected Exception", e);
+ return null;
+ }
+ }
+
+ @DataProvider(name = "Map<IntegerEnum,String>", parallel = true)
+ private static Iterator<Object[]> makeMaps() {
+ return Arrays.asList(
+ // empty
+ new Object[]{"HashMap", new HashMap()},
+ new Object[]{"LinkedHashMap", new LinkedHashMap()},
+ new Object[]{"Collections.checkedMap(HashMap)", Collections.checkedMap(new HashMap(), IntegerEnum.class, String.class)},
+ new Object[]{"Collections.synchronizedMap(HashMap)", Collections.synchronizedMap(new HashMap())},
+ // null hostile
+ new Object[]{"EnumMap", new EnumMap(IntegerEnum.class)},
+ new Object[]{"Hashtable", new Hashtable()},
+ new Object[]{"TreeMap", new TreeMap()},
+ new Object[]{"ConcurrentHashMap", new ConcurrentHashMap()},
+ new Object[]{"ConcurrentSkipListMap", new ConcurrentSkipListMap()},
+ new Object[]{"Collections.checkedMap(ConcurrentHashMap)", Collections.checkedMap(new ConcurrentHashMap(), IntegerEnum.class, String.class)},
+ new Object[]{"Collections.synchronizedMap(EnumMap)", Collections.synchronizedMap(new EnumMap(IntegerEnum.class))},
+ // filled
+ new Object[]{"HashMap", fillMap(new HashMap())},
+ new Object[]{"LinkedHashMap", fillMap(new LinkedHashMap())},
+ new Object[]{"Collections.checkedMap(HashMap)", Collections.checkedMap(fillMap(new HashMap()), IntegerEnum.class, String.class)},
+ new Object[]{"Collections.synchronizedMap(HashMap)", Collections.synchronizedMap(fillMap(new HashMap()))},
+ // null hostile
+ new Object[]{"EnumMap", fillMap(new EnumMap(IntegerEnum.class))},
+ new Object[]{"Hashtable", fillMap(new Hashtable())},
+ new Object[]{"TreeMap", fillMap(new TreeMap())},
+ new Object[]{"ConcurrentHashMap", fillMap(new ConcurrentHashMap())},
+ new Object[]{"ConcurrentSkipListMap", fillMap(new ConcurrentSkipListMap())},
+ new Object[]{"Collections.checkedMap(ConcurrentHashMap)", Collections.checkedMap(fillMap(new ConcurrentHashMap()), IntegerEnum.class, String.class)},
+ new Object[]{"Collections.synchronizedMap(EnumMap)", Collections.synchronizedMap(fillMap(new EnumMap(IntegerEnum.class)))}).iterator();
+ }
+
+ private static Map<IntegerEnum, String> fillMap(Map<IntegerEnum, String> result) {
+ for (int each = 0; each < TEST_SIZE; each++) {
+ result.put(KEYS[each], VALUES[each]);
+ }
+
+ return result;
+ }
+}