diff --git a/spring-core/src/main/java/org/springframework/util/ConcurrentReferenceHashMap.java b/spring-core/src/main/java/org/springframework/util/ConcurrentReferenceHashMap.java
new file mode 100644
index 00000000000..2dc03b5076f
--- /dev/null
+++ b/spring-core/src/main/java/org/springframework/util/ConcurrentReferenceHashMap.java
@@ -0,0 +1,1002 @@
+/*
+ * Copyright 2002-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.util;
+
+import java.lang.ref.ReferenceQueue;
+import java.lang.ref.SoftReference;
+import java.lang.ref.WeakReference;
+import java.lang.reflect.Array;
+import java.util.AbstractMap;
+import java.util.AbstractSet;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * A {@link ConcurrentHashMap} that uses {@link ReferenceType#SOFT soft} or
+ * {@link ReferenceType#WEAK weak} references for both {@code keys} and {@code values}.
+ *
+ *
This class can be used as an alternative to
+ * {@code Collections.synchronizedMap(new WeakHashMap>())} in order to
+ * support better performance when accessed concurrently. This implementation follows the
+ * same design constraints as {@link ConcurrentHashMap} with the exception that
+ * {@code null} values and {@code null} keys are supported.
+ *
+ * NOTE: The use of references means that there is no guarantee that items
+ * placed into the map will be subsequently available. The garbage collector may discard
+ * references at any time, so it may appear that a unknown thread is silently removing
+ * entries.
+ *
+ *
If not explicitly specified this implementation will use
+ * {@link SoftReference soft entry references}.
+ *
+ * @param The key type
+ * @param The value type
+ * @author Phillip Webb
+ * @since 3.2
+ */
+public class ConcurrentReferenceHashMap extends AbstractMap implements
+ ConcurrentMap {
+
+ private static final int DEFAULT_INITIAL_CAPACITY = 16;
+
+ private static final int DEFAULT_CONCURRENCY_LEVEL = 16;
+
+ private static final float DEFAULT_LOAD_FACTOR = 0.75f;
+
+ private static final ReferenceType DEFAULT_REFERENCE_TYPE = ReferenceType.SOFT;
+
+ private static final int MAXIMUM_CONCURRENCY_LEVEL = 1 << 16;
+
+ private static final int MAXIMUM_SEGMENT_SIZE = 1 << 30;
+
+
+ /**
+ * Array of segment indexed using the high order bits from the hash.
+ */
+ private final Segment[] segments;
+
+ /**
+ * When the average number of references per table exceeds this value resize will be attempted.
+ */
+ private final float loadFactor;
+
+ private final ReferenceType referenceType;
+
+ /**
+ * The shift value used to calculate the size of the segments array and an index from the hash.
+ */
+ private final int shift;
+
+ /**
+ * Late binding entry set.
+ */
+ private Set> entrySet;
+
+
+ /**
+ * Create a new {@link ConcurrentReferenceHashMap} instance.
+ */
+ public ConcurrentReferenceHashMap() {
+ this(DEFAULT_INITIAL_CAPACITY, DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL,
+ DEFAULT_REFERENCE_TYPE);
+ }
+
+ /**
+ * Create a new {@link ConcurrentReferenceHashMap} instance.
+ * @param initialCapacity the initial capacity of the map
+ */
+ public ConcurrentReferenceHashMap(int initialCapacity) {
+ this(initialCapacity, DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL,
+ DEFAULT_REFERENCE_TYPE);
+ }
+
+ /**
+ * Create a new {@link ConcurrentReferenceHashMap} instance.
+ * @param initialCapacity the initial capacity of the map
+ * @param loadFactor the load factor. When the average number of references per table
+ * exceeds this value resize will be attempted
+ */
+ public ConcurrentReferenceHashMap(int initialCapacity, float loadFactor) {
+ this(initialCapacity, loadFactor, DEFAULT_CONCURRENCY_LEVEL,
+ DEFAULT_REFERENCE_TYPE);
+ }
+
+ /**
+ * Create a new {@link ConcurrentReferenceHashMap} instance.
+ * @param initialCapacity the initial capacity of the map
+ * @param concurrencyLevel the expected number of threads that will concurrently write
+ * to the map
+ */
+ public ConcurrentReferenceHashMap(int initialCapacity, int concurrencyLevel) {
+ this(initialCapacity, DEFAULT_LOAD_FACTOR, concurrencyLevel,
+ DEFAULT_REFERENCE_TYPE);
+ }
+
+ /**
+ * Create a new {@link ConcurrentReferenceHashMap} instance.
+ * @param initialCapacity the initial capacity of the map
+ * @param loadFactor the load factor. When the average number of references per table
+ * exceeds this value resize will be attempted
+ * @param concurrencyLevel the expected number of threads that will concurrently write
+ * to the map
+ */
+ public ConcurrentReferenceHashMap(int initialCapacity, float loadFactor,
+ int concurrencyLevel) {
+ this(initialCapacity, loadFactor, concurrencyLevel, DEFAULT_REFERENCE_TYPE);
+ }
+
+ /**
+ * Create a new {@link ConcurrentReferenceHashMap} instance.
+ * @param initialCapacity the initial capacity of the map
+ * @param loadFactor the load factor. When the average number of references per table
+ * exceeds this value resize will be attempted
+ * @param concurrencyLevel the expected number of threads that will concurrently write
+ * to the map
+ * @param referenceType the reference type used for entries
+ */
+ public ConcurrentReferenceHashMap(int initialCapacity, float loadFactor,
+ int concurrencyLevel, ReferenceType referenceType) {
+ Assert.isTrue(concurrencyLevel > 0, "ConcurrencyLevel must be positive");
+ Assert.isTrue(initialCapacity >= 0, "InitialCapactity must not be negative");
+ Assert.isTrue(loadFactor > 0f, "LoadFactor must be positive");
+ Assert.notNull(referenceType, "Reference type must not be null");
+ this.loadFactor = loadFactor;
+ this.shift = calculateShift(concurrencyLevel, MAXIMUM_CONCURRENCY_LEVEL);
+ int size = 1 << this.shift;
+ this.referenceType = referenceType;
+ int roundedUpSegmentCapactity = (int) ((initialCapacity + size - 1L) / size);
+ this.segments = createSegmentsArray(size);
+ for (int i = 0; i < this.segments.length; i++) {
+ this.segments[i] = new Segment(roundedUpSegmentCapactity);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private Segment[] createSegmentsArray(int size) {
+ return (Segment[]) Array.newInstance(Segment.class, size);
+ }
+
+
+ protected final float getLoadFactor() {
+ return this.loadFactor;
+ }
+
+ protected final int getSegmentsSize() {
+ return this.segments.length;
+ }
+
+ protected final Segment getSegment(int index) {
+ return this.segments[index];
+ }
+
+ /**
+ * Factory method that returns the {@link ReferenceManager}. This method will be
+ * called once for each {@link Segment}.
+ * @return a new reference manager
+ */
+ protected ReferenceManager createReferenceManager() {
+ return new ReferenceManager();
+ }
+
+ /**
+ * Get the hash for a given object, apply an additional hash function to reduce
+ * collisions. This implementation uses the same Wang/Jenkins algorithm as
+ * {@link ConcurrentHashMap}. Subclasses can override to provide alternative hashing.
+ * @param o the object to hash (may be null)
+ * @return the resulting hash code
+ */
+ protected int getHash(Object o) {
+ int hash = o == null ? 0 : o.hashCode();
+ hash += (hash << 15) ^ 0xffffcd7d;
+ hash ^= (hash >>> 10);
+ hash += (hash << 3);
+ hash ^= (hash >>> 6);
+ hash += (hash << 2) + (hash << 14);
+ hash ^= (hash >>> 16);
+ return hash;
+ }
+
+ @Override
+ public V get(Object key) {
+ Reference reference = getReference(key, Restructure.WHEN_NECESSARY);
+ Entry entry = (reference == null ? null : reference.get());
+ return (entry == null ? null : entry.getValue());
+ }
+
+ @Override
+ public boolean containsKey(Object key) {
+ Reference reference = getReference(key, Restructure.WHEN_NECESSARY);
+ Entry entry = (reference == null ? null : reference.get());
+ return (entry != null && ObjectUtils.nullSafeEquals(entry.getKey(), key));
+ }
+
+ /**
+ * Returns a {@link Reference} to the {@link Entry} for the specified {@code key} or
+ * {@code null} if not found.
+ * @param key the key (can be {@code null})
+ * @param restructure types of restructure allowed during this call
+ * @return the reference or {@code null}
+ */
+ protected final Reference getReference(Object key, Restructure restructure) {
+ int hash = getHash(key);
+ return getSegmentForHash(hash).getReference(key, hash, restructure);
+ }
+
+ @Override
+ public V put(K key, V value) {
+ return put(key, value, true);
+ }
+
+ public V putIfAbsent(K key, V value) {
+ return put(key, value, false);
+ }
+
+ private V put(final K key, final V value, final boolean overwriteExisting) {
+ return doTask(key, new Task(TaskOption.RESTRUCTURE_BEFORE, TaskOption.RESIZE) {
+ @Override
+ protected V execute(Reference reference, Entry entry, Entries entries) {
+ if (entry != null) {
+ V previousValue = entry.getValue();
+ if (overwriteExisting) {
+ entry.setValue(value);
+ }
+ return previousValue;
+ }
+ entries.add(value);
+ return null;
+ }
+ });
+ }
+
+ @Override
+ public V remove(Object key) {
+ return doTask(key, new Task(TaskOption.RESTRUCTURE_AFTER, TaskOption.SKIP_IF_EMPTY) {
+ @Override
+ protected V execute(Reference reference, Entry entry) {
+ if (entry != null) {
+ reference.release();
+ return entry.value;
+ }
+ return null;
+ }
+ });
+ }
+
+ public boolean remove(Object key, final Object value) {
+ return doTask(key, new Task(TaskOption.RESTRUCTURE_AFTER, TaskOption.SKIP_IF_EMPTY) {
+ @Override
+ protected Boolean execute(Reference reference, Entry entry) {
+ if (entry != null && ObjectUtils.nullSafeEquals(entry.getValue(), value)) {
+ reference.release();
+ return true;
+ }
+ return false;
+ }
+ });
+ }
+
+ public boolean replace(K key, final V oldValue, final V newValue) {
+ return doTask(key, new Task(TaskOption.RESTRUCTURE_BEFORE, TaskOption.SKIP_IF_EMPTY) {
+ @Override
+ protected Boolean execute(Reference reference, Entry entry) {
+ if (entry != null && ObjectUtils.nullSafeEquals(entry.getValue(), oldValue)) {
+ entry.setValue(newValue);
+ return true;
+ }
+ return false;
+ }
+ });
+ }
+
+ public V replace(K key, final V value) {
+ return doTask(key, new Task(TaskOption.RESTRUCTURE_BEFORE, TaskOption.SKIP_IF_EMPTY) {
+ @Override
+ protected V execute(Reference reference, Entry entry) {
+ if (entry != null) {
+ V previousValue = entry.getValue();
+ entry.setValue(value);
+ return previousValue;
+ }
+ return null;
+ }
+ });
+ }
+
+ @Override
+ public void clear() {
+ for (Segment segment : this.segments) {
+ segment.clear();
+ }
+ }
+
+ @Override
+ public int size() {
+ int size = 0;
+ for (Segment segment : this.segments) {
+ size += segment.getCount();
+ }
+ return size;
+ }
+
+ @Override
+ public Set> entrySet() {
+ if (this.entrySet == null) {
+ this.entrySet = new EntrySet();
+ }
+ return this.entrySet;
+ }
+
+ private T doTask(Object key, Task task) {
+ int hash = getHash(key);
+ return getSegmentForHash(hash).doTask(hash, key, task);
+ }
+
+ private Segment getSegmentForHash(int hash) {
+ return this.segments[(hash >>> (32 - this.shift)) & (this.segments.length - 1)];
+ }
+
+ /**
+ * Calculate a shift value that can be to create a power-of-two value between
+ * the specified maximum and minimum values.
+ * @param minimuxmValue the minimum value
+ * @param maximumValue the maximum value
+ * @return the calculated shift (use {@code 1 << shift} to obtain a value)
+ */
+ protected static int calculateShift(int minimuxmValue, int maximumValue) {
+ int shift = 0;
+ int value = 1;
+ while (value < minimuxmValue && value < minimuxmValue) {
+ value <<= 1;
+ shift++;
+ }
+ return shift;
+ }
+
+
+ /**
+ * Various reference types supported by this map.
+ */
+ public static enum ReferenceType {
+
+ /**
+ * Use {@link SoftReference}s.
+ */
+ SOFT,
+
+ /**
+ * Use {@link WeakReference}s.
+ */
+ WEAK;
+ }
+
+
+ /**
+ * A single segment used to divide the map to allow better concurrent performance.
+ */
+ protected final class Segment extends ReentrantLock {
+
+ private final ReferenceManager referenceManager;
+
+ private final int initialSize;
+
+ /**
+ * Array of references indexed using the low order bits from the hash. This
+ * property should only be set via {@link #setReferences(Reference[])} to ensure
+ * that the resizeThreshold is maintained.
+ */
+ private volatile Reference[] references;
+
+ /**
+ * The total number of references contained in this segment. This includes chained
+ * references and references that have been garbage collected but not purged.
+ */
+ private volatile int count = 0;
+
+ /**
+ * The threshold when resizing of the references should occur. When {@code count}
+ * exceeds this value references will be resized.
+ */
+ private int resizeThreshold;
+
+
+ public Segment(int initialCapacity) {
+ this.referenceManager = createReferenceManager();
+ this.initialSize = 1 << calculateShift(initialCapacity, MAXIMUM_SEGMENT_SIZE);
+ setReferences(createReferenceArray(this.initialSize));
+ }
+
+
+ public Reference getReference(Object key, int hash, Restructure restructure) {
+ if (restructure == Restructure.WHEN_NECESSARY) {
+ restructureIfNecessary(false);
+ }
+ if (this.count == 0) {
+ return null;
+ }
+ // Use a local copy to protect against other threads writing
+ Reference[] references = this.references;
+ int index = getIndex(hash, references);
+ Reference head = references[index];
+ return findInChain(head, key, hash);
+ }
+
+ /**
+ * Apply an update operation to this segment. The segment will be locked
+ * during update.
+ * @param hash the hash of the key
+ * @param key the key
+ * @param task the update operation
+ * @return the result of the operation
+ */
+ public T doTask(final int hash, final Object key, final Task task) {
+
+ boolean resize = task.hasOption(TaskOption.RESIZE);
+
+ if (task.hasOption(TaskOption.RESTRUCTURE_BEFORE)) {
+ restructureIfNecessary(resize);
+ }
+
+ if (task.hasOption(TaskOption.SKIP_IF_EMPTY) && (this.count == 0)) {
+ return task.execute(null, null, null);
+ }
+
+ lock();
+ try {
+ final int index = getIndex(hash, this.references);
+ final Reference head = this.references[index];
+ Reference reference = findInChain(head, key, hash);
+ Entry entry = (reference == null ? null : reference.get());
+ Entries entries = new Entries() {
+ @Override
+ public void add(V value) {
+ @SuppressWarnings("unchecked")
+ Entry newEntry = new Entry((K)key, value);
+ Reference newReference = Segment.this.referenceManager.createReference(newEntry, hash, head);
+ Segment.this.references[index] = newReference;
+ Segment.this.count++;
+ }
+ };
+ return task.execute(reference, entry, entries);
+ } finally {
+ unlock();
+ if (task.hasOption(TaskOption.RESTRUCTURE_AFTER)) {
+ restructureIfNecessary(resize);
+ }
+ }
+ }
+
+ /**
+ * Clear all items from this segment.
+ */
+ public void clear() {
+ if (this.count == 0) {
+ return;
+ }
+ lock();
+ try {
+ setReferences(createReferenceArray(this.initialSize));
+ this.count = 0;
+ } finally {
+ unlock();
+ }
+ }
+
+ /**
+ * Restructure the underlying data structure when it becomes necessary. This
+ * method can increase the size of the references table as well as purge any
+ * references that have been garbage collected.
+ * @param allowResize if resizing is permitted
+ */
+ private void restructureIfNecessary(boolean allowResize) {
+ boolean needsResize = ((this.count > 0) && (this.count >= this.resizeThreshold));
+ Reference reference = this.referenceManager.pollForPurge();
+ if ((reference != null) || (needsResize && allowResize)) {
+ lock();
+ try {
+ int countAfterRestructure = this.count;
+
+ Set> toPurge = Collections.emptySet();
+ if (reference != null) {
+ toPurge = new HashSet>();
+ while (reference != null) {
+ toPurge.add(reference);
+ reference = this.referenceManager.pollForPurge();
+ }
+ }
+ countAfterRestructure -= toPurge.size();
+
+ // Recalculate taking into account count inside lock and items that
+ // will be purged
+ needsResize = ((countAfterRestructure > 0) && (countAfterRestructure >= this.resizeThreshold));
+ boolean resizing = false;
+ int restructureSize = this.references.length;
+ if (allowResize && needsResize && (restructureSize < MAXIMUM_SEGMENT_SIZE)) {
+ restructureSize <<= 1;
+ resizing = true;
+ }
+
+ // Either create a new table or reuse the existing one
+ Reference[] restructured = (resizing ? createReferenceArray(restructureSize) : this.references);
+
+ // Restructure
+ for (int i = 0; i < this.references.length; i++) {
+ reference = this.references[i];
+ if (!resizing) {
+ restructured[i] = null;
+ }
+ while (reference != null) {
+ if (!toPurge.contains(reference)) {
+ int index = getIndex(reference.getHash(), restructured);
+ restructured[index] = this.referenceManager.createReference(
+ reference.get(), reference.getHash(),
+ restructured[index]);
+ }
+ reference = reference.getNext();
+ }
+ }
+
+ // Replace volatile members
+ if (resizing) {
+ setReferences(restructured);
+ }
+ this.count = countAfterRestructure;
+ } finally {
+ unlock();
+ }
+ }
+ }
+
+ private Reference findInChain(Reference reference, Object key,
+ int hash) {
+ while (reference != null) {
+ if (reference.getHash() == hash) {
+ Entry entry = reference.get();
+ if (entry != null) {
+ K entryKey = entry.getKey();
+ if (entryKey == key || entryKey.equals(key)) {
+ return reference;
+ }
+ }
+ }
+ reference = reference.getNext();
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ private Reference[] createReferenceArray(int size) {
+ return (Reference[]) Array.newInstance(Reference.class, size);
+ }
+
+ private int getIndex(int hash, Reference[] references) {
+ return hash & (references.length - 1);
+ }
+
+ /**
+ * Replace the references with a new value, recalculating the resizeThreshold.
+ * @param references the new references
+ */
+ private void setReferences(Reference[] references) {
+ this.references = references;
+ this.resizeThreshold = (int) (references.length * getLoadFactor());
+ }
+
+ /**
+ * @return the size of the current references array
+ */
+ public final int getSize() {
+ return this.references.length;
+ }
+
+ /**
+ * @return the total number of references in this segment
+ */
+ public final int getCount() {
+ return this.count;
+ }
+ }
+
+
+ /**
+ * A reference to an {@link Entry} contained in the map. Implementations are usually
+ * wrappers around specific java reference implementations (eg {@link SoftReference}).
+ */
+ protected static interface Reference {
+
+ /**
+ * Returns the referenced entry or {@code null} if the entry is no longer
+ * available.
+ * @return the entry or {@code null}
+ */
+ Entry get();
+
+ /**
+ * Returns the hash for the reference.
+ * @return the hash
+ */
+ int getHash();
+
+ /**
+ * Returns the next reference in the chain or {@code null}
+ * @return the next reference of {@code null}
+ */
+ Reference getNext();
+
+ /**
+ * Release this entry and ensure that it will be returned from
+ * {@link ReferenceManager#pollForPurge()}.
+ */
+ void release();
+ }
+
+
+ /**
+ * A single map entry.
+ */
+ protected static final class Entry implements Map.Entry {
+
+ private final K key;
+
+ private volatile V value;
+
+ public Entry(K key, V value) {
+ this.key = key;
+ this.value = value;
+ }
+
+ public K getKey() {
+ return this.key;
+ }
+
+ public V getValue() {
+ return this.value;
+ }
+
+ public V setValue(V value) {
+ V previous = this.value;
+ this.value = value;
+ return previous;
+ }
+
+ @Override
+ public String toString() {
+ return this.key + "=" + this.value;
+ }
+
+ @Override
+ @SuppressWarnings("rawtypes")
+ public final boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+ if (o != null && o instanceof Map.Entry) {
+ Map.Entry other = (Map.Entry) o;
+ return ObjectUtils.nullSafeEquals(getKey(), other.getKey())
+ && ObjectUtils.nullSafeEquals(getValue(), other.getValue());
+ }
+ return false;
+ }
+
+ @Override
+ public final int hashCode() {
+ return ObjectUtils.nullSafeHashCode(this.key)
+ ^ ObjectUtils.nullSafeHashCode(this.value);
+ }
+ }
+
+
+ /**
+ * A task that can be {@link Segment#doTask run} against a {@link Segment}.
+ */
+ private abstract class Task {
+
+ private final EnumSet options;
+
+ public Task(TaskOption... options) {
+ this.options = (options.length == 0 ? EnumSet.noneOf(TaskOption.class) : EnumSet.of(options[0], options));
+ }
+
+ public boolean hasOption(TaskOption option) {
+ return this.options.contains(option);
+ }
+
+ /**
+ * Execute the task.
+ * @param reference the found reference or {@code null}
+ * @param entry the found entry or {@code null}
+ * @param entries access to the underlying entries
+ * @return the result of the task
+ * @see #execute(Reference, Entry)
+ */
+ protected T execute(Reference reference, Entry entry, Entries entries) {
+ return execute(reference, entry);
+ }
+
+ /**
+ * Convenience method that can be used for tasks that do not need access to {@link Entries}.
+ * @param reference the found reference or {@code null}
+ * @param entry the found entry or {@code null}
+ * @return the result of the task
+ * @see #execute(Reference, Entry, Entries)
+ */
+ protected T execute(Reference reference, Entry entry) {
+ return null;
+ }
+}
+
+
+ /**
+ * Various options supported by a {@link Task}.
+ */
+ private static enum TaskOption {
+ RESTRUCTURE_BEFORE, RESTRUCTURE_AFTER, SKIP_IF_EMPTY, RESIZE
+ }
+
+
+ /**
+ * Allows a task access to {@link Segment} entries.
+ */
+ private abstract class Entries {
+
+ /**
+ * Add a new entry with the specified value.
+ * @param value the value to add
+ */
+ public abstract void add(V value);
+ }
+
+
+ /**
+ * Internal entry-set implementation.
+ */
+ private class EntrySet extends AbstractSet> {
+
+ @Override
+ public Iterator> iterator() {
+ return new EntryIterator();
+ }
+
+ @Override
+ public boolean contains(Object o) {
+ if (o != null && o instanceof Map.Entry, ?>) {
+ Map.Entry, ?> entry = (java.util.Map.Entry, ?>) o;
+ Reference reference = ConcurrentReferenceHashMap.this.getReference(
+ entry.getKey(), Restructure.NEVER);
+ Entry other = (reference == null ? null : reference.get());
+ if (other != null) {
+ return ObjectUtils.nullSafeEquals(entry.getValue(), other.getValue());
+ }
+ }
+ return false;
+ }
+
+ @Override
+ public boolean remove(Object o) {
+ if (o instanceof Map.Entry, ?>) {
+ Map.Entry, ?> entry = (Map.Entry, ?>) o;
+ return ConcurrentReferenceHashMap.this.remove(entry.getKey(),
+ entry.getValue());
+ }
+ return false;
+ }
+
+ @Override
+ public int size() {
+ return ConcurrentReferenceHashMap.this.size();
+ }
+
+ @Override
+ public void clear() {
+ ConcurrentReferenceHashMap.this.clear();
+ }
+ }
+
+
+ /**
+ * Internal entry iterator implementation.
+ */
+ private class EntryIterator implements Iterator> {
+
+ private int segmentIndex;
+
+ private int referenceIndex;
+
+ private Reference[] references;
+
+ private Reference reference;
+
+ private Entry next;
+
+ private Entry last;
+
+ public EntryIterator() {
+ moveToNextSegment();
+ }
+
+ public boolean hasNext() {
+ getNextIfNecessary();
+ return this.next != null;
+ }
+
+ public Entry next() {
+ getNextIfNecessary();
+ if (this.next == null) {
+ throw new NoSuchElementException();
+ }
+ this.last = this.next;
+ this.next = null;
+ return this.last;
+ }
+
+ private void getNextIfNecessary() {
+ while (this.next == null) {
+ moveToNextReference();
+ if (this.reference == null) {
+ return;
+ }
+ this.next = this.reference.get();
+ }
+ }
+
+ private void moveToNextReference() {
+ if (this.reference != null) {
+ this.reference = this.reference.getNext();
+ }
+ while (this.reference == null && this.references != null) {
+ if (this.referenceIndex >= this.references.length) {
+ moveToNextSegment();
+ this.referenceIndex = 0;
+ }
+ else {
+ this.reference = this.references[this.referenceIndex];
+ this.referenceIndex++;
+ }
+ }
+ }
+
+ private void moveToNextSegment() {
+ this.reference = null;
+ this.references = null;
+ if (this.segmentIndex < ConcurrentReferenceHashMap.this.segments.length) {
+ this.references = ConcurrentReferenceHashMap.this.segments[this.segmentIndex].references;
+ this.segmentIndex++;
+ }
+ }
+
+ public void remove() {
+ Assert.state(this.last != null);
+ ConcurrentReferenceHashMap.this.remove(this.last.getKey());
+ }
+ }
+
+
+ /**
+ * The types of restructure that can be performed.
+ */
+ protected static enum Restructure {
+ WHEN_NECESSARY, NEVER
+ }
+
+
+ /**
+ * Strategy class used to manage {@link Reference}s. This class can be overridden if
+ * alternative reference types need to be supported.
+ */
+ protected class ReferenceManager {
+
+ private final ReferenceQueue> queue = new ReferenceQueue>();
+
+ /**
+ * Factory method used to create a new {@link Reference}.
+ * @param entry the entry contained in the reference
+ * @param hash the hash
+ * @param next the next reference in the chain or {@code null}
+ * @return a new {@link Reference}
+ */
+ public Reference createReference(Entry entry, int hash,
+ Reference next) {
+ if (ConcurrentReferenceHashMap.this.referenceType == ReferenceType.WEAK) {
+ return new WeakEntryReference(entry, hash, next, this.queue);
+ }
+ return new SoftEntryReference(entry, hash, next, this.queue);
+ }
+
+ /**
+ * Return any reference that has been garbage collected and can be purged from the
+ * underlying structure or {@code null} if no references need purging. This
+ * method must be thread safe and ideally should not block when returning
+ * {@code null}. References should be returned once and only once.
+ * @return a reference to purge or {@code null}
+ */
+ @SuppressWarnings("unchecked")
+ public Reference pollForPurge() {
+ return (Reference) this.queue.poll();
+ }
+ }
+
+
+ /**
+ * Internal {@link Reference} implementation for {@link SoftReference}s.
+ */
+ private static final class SoftEntryReference extends
+ SoftReference> implements Reference {
+
+ private final int hash;
+
+ private final Reference nextReference;
+
+ public SoftEntryReference(Entry entry, int hash, Reference next,
+ ReferenceQueue> queue) {
+ super(entry, queue);
+ this.hash = hash;
+ this.nextReference = next;
+ }
+
+ public int getHash() {
+ return this.hash;
+ }
+
+ public Reference getNext() {
+ return this.nextReference;
+ }
+
+ public void release() {
+ enqueue();
+ clear();
+ }
+ }
+
+
+ /**
+ * Internal {@link Reference} implementation for {@link WeakReference}s.
+ */
+ private static final class WeakEntryReference extends
+ WeakReference> implements Reference {
+
+ private final int hash;
+
+ private final Reference nextReference;
+
+ public WeakEntryReference(Entry entry, int hash, Reference next,
+ ReferenceQueue> queue) {
+ super(entry, queue);
+ this.hash = hash;
+ this.nextReference = next;
+ }
+
+ public int getHash() {
+ return this.hash;
+ }
+
+ public Reference getNext() {
+ return this.nextReference;
+ }
+
+ public void release() {
+ enqueue();
+ clear();
+ }
+ }
+}
diff --git a/spring-core/src/test/java/org/springframework/util/ConcurrentReferenceHashMapTests.java b/spring-core/src/test/java/org/springframework/util/ConcurrentReferenceHashMapTests.java
new file mode 100644
index 00000000000..a3d5867d853
--- /dev/null
+++ b/spring-core/src/test/java/org/springframework/util/ConcurrentReferenceHashMapTests.java
@@ -0,0 +1,674 @@
+/*
+ * Copyright 2002-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.springframework.util;
+
+import static org.hamcrest.Matchers.*;
+import static org.junit.Assert.*;
+
+import java.lang.ref.WeakReference;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.WeakHashMap;
+
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.springframework.util.ConcurrentReferenceHashMap.Entry;
+import org.springframework.util.ConcurrentReferenceHashMap.Reference;
+import org.springframework.util.ConcurrentReferenceHashMap.Restructure;
+import org.springframework.util.comparator.ComparableComparator;
+import org.springframework.util.comparator.NullSafeComparator;
+
+/**
+ * Tests for {@link ConcurrentReferenceHashMap}.
+ * @author Phillip Webb
+ */
+public class ConcurrentReferenceHashMapTests {
+
+ private static final Comparator super String> NULL_SAFE_STRING_SORT = new NullSafeComparator(
+ new ComparableComparator(), true);
+
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
+
+ private TestWeakConcurrentCache map = new TestWeakConcurrentCache();
+
+ @Test
+ public void shouldCreateWithDefaults() throws Exception {
+ ConcurrentReferenceHashMap map = new ConcurrentReferenceHashMap();
+ assertThat(map.getSegmentsSize(), is(16));
+ assertThat(map.getSegment(0).getSize(), is(1));
+ assertThat(map.getLoadFactor(), is(0.75f));
+ }
+
+ @Test
+ public void shouldCreateWithInitialCapacity() throws Exception {
+ ConcurrentReferenceHashMap map = new ConcurrentReferenceHashMap(
+ 32);
+ assertThat(map.getSegmentsSize(), is(16));
+ assertThat(map.getSegment(0).getSize(), is(2));
+ assertThat(map.getLoadFactor(), is(0.75f));
+ }
+
+ @Test
+ public void shouldCreateWithInitialCapacityAndLoadFactor() throws Exception {
+ ConcurrentReferenceHashMap map = new ConcurrentReferenceHashMap(
+ 32, 0.5f);
+ assertThat(map.getSegmentsSize(), is(16));
+ assertThat(map.getSegment(0).getSize(), is(2));
+ assertThat(map.getLoadFactor(), is(0.5f));
+ }
+
+ @Test
+ public void shouldCreateWithInitialCapacityAndConcurrenyLevel() throws Exception {
+ ConcurrentReferenceHashMap map = new ConcurrentReferenceHashMap(
+ 16, 2);
+ assertThat(map.getSegmentsSize(), is(2));
+ assertThat(map.getSegment(0).getSize(), is(8));
+ assertThat(map.getLoadFactor(), is(0.75f));
+ }
+
+ @Test
+ public void shouldCreateFullyCustom() throws Exception {
+ ConcurrentReferenceHashMap map = new ConcurrentReferenceHashMap(
+ 5, 0.5f, 3);
+ // concurrencyLevel of 3 ends up as 4 (nearest power of 2)
+ assertThat(map.getSegmentsSize(), is(4));
+ // initialCapacity is 5/4 (rounded up, to nearest power of 2)
+ assertThat(map.getSegment(0).getSize(), is(2));
+ assertThat(map.getLoadFactor(), is(0.5f));
+ }
+
+ @Test
+ public void shouldNeedPositiveConcurrenyLevel() throws Exception {
+ new ConcurrentReferenceHashMap(1, 1);
+ this.thrown.expect(IllegalArgumentException.class);
+ this.thrown.expectMessage("ConcurrencyLevel must be positive");
+ new TestWeakConcurrentCache(1, 0);
+ }
+
+ @Test
+ public void shouldNeedNonNegativeInitialCapacity() throws Exception {
+ new ConcurrentReferenceHashMap(0, 1);
+ this.thrown.expect(IllegalArgumentException.class);
+ this.thrown.expectMessage("InitialCapactity must not be negative");
+ new TestWeakConcurrentCache(-1, 1);
+ }
+
+ @Test
+ public void shouldNeedPositiveLoadFactor() throws Exception {
+ new ConcurrentReferenceHashMap(0, 0.1f, 1);
+ this.thrown.expect(IllegalArgumentException.class);
+ this.thrown.expectMessage("LoadFactor must be positive");
+ new TestWeakConcurrentCache(0, 0.0f, 1);
+ }
+
+ @Test
+ public void shouldPutAndGet() throws Exception {
+ // NOTE we are using mock references so we don't need to worry about GC
+ assertThat(this.map.size(), is(0));
+ this.map.put(123, "123");
+ assertThat(this.map.get(123), is("123"));
+ assertThat(this.map.size(), is(1));
+ this.map.put(123, "123b");
+ assertThat(this.map.size(), is(1));
+ this.map.put(123, null);
+ assertThat(this.map.size(), is(1));
+ }
+
+ @Test
+ public void shouldReplaceOnDoublePut() throws Exception {
+ this.map.put(123, "321");
+ this.map.put(123, "123");
+ assertThat(this.map.get(123), is("123"));
+ }
+
+ @Test
+ public void shouldPutNullKey() throws Exception {
+ this.map.put(null, "123");
+ assertThat(this.map.get(null), is("123"));
+ }
+
+ @Test
+ public void shouldPutNullValue() throws Exception {
+ this.map.put(123, "321");
+ this.map.put(123, null);
+ assertThat(this.map.get(123), is(nullValue()));
+ }
+
+ @Test
+ public void shouldGetWithNoItems() throws Exception {
+ assertThat(this.map.get(123), is(nullValue()));
+ }
+
+ @Test
+ public void shouldApplySupplimentalHash() throws Exception {
+ Integer key = 123;
+ this.map.put(key, "123");
+ assertThat(this.map.getSupplimentalHash(), is(not(key.hashCode())));
+ assertThat(this.map.getSupplimentalHash() >> 30 & 0xFF, is(not(0)));
+ }
+
+ @Test
+ public void shouldGetFollowingNexts() throws Exception {
+ // Use loadFactor to disable resize
+ this.map = new TestWeakConcurrentCache(1, 10.0f, 1);
+ this.map.put(1, "1");
+ this.map.put(2, "2");
+ this.map.put(3, "3");
+ assertThat(this.map.getSegment(0).getSize(), is(1));
+ assertThat(this.map.get(1), is("1"));
+ assertThat(this.map.get(2), is("2"));
+ assertThat(this.map.get(3), is("3"));
+ assertThat(this.map.get(4), is(nullValue()));
+ }
+
+ @Test
+ public void shouldResize() throws Exception {
+ this.map = new TestWeakConcurrentCache(1, 0.75f, 1);
+ this.map.put(1, "1");
+ assertThat(this.map.getSegment(0).getSize(), is(1));
+ assertThat(this.map.get(1), is("1"));
+
+ this.map.put(2, "2");
+ assertThat(this.map.getSegment(0).getSize(), is(2));
+ assertThat(this.map.get(1), is("1"));
+ assertThat(this.map.get(2), is("2"));
+
+ this.map.put(3, "3");
+ assertThat(this.map.getSegment(0).getSize(), is(4));
+ assertThat(this.map.get(1), is("1"));
+ assertThat(this.map.get(2), is("2"));
+ assertThat(this.map.get(3), is("3"));
+
+ this.map.put(4, "4");
+ assertThat(this.map.getSegment(0).getSize(), is(8));
+ assertThat(this.map.get(4), is("4"));
+
+ // Putting again should not increase the count
+ for (int i = 1; i <= 5; i++) {
+ this.map.put(i, String.valueOf(i));
+ }
+ assertThat(this.map.getSegment(0).getSize(), is(8));
+ assertThat(this.map.get(5), is("5"));
+ }
+
+ @Test
+ public void shouldPurgeOnGet() throws Exception {
+ this.map = new TestWeakConcurrentCache(1, 0.75f, 1);
+ for (int i = 1; i <= 5; i++) {
+ this.map.put(i, String.valueOf(i));
+ }
+ this.map.getMockReference(1, Restructure.NEVER).queueForPurge();
+ this.map.getMockReference(3, Restructure.NEVER).queueForPurge();
+ assertThat(this.map.getReference(1, Restructure.WHEN_NECESSARY), is(nullValue()));
+ assertThat(this.map.get(2), is("2"));
+ assertThat(this.map.getReference(3, Restructure.WHEN_NECESSARY), is(nullValue()));
+ assertThat(this.map.get(4), is("4"));
+ assertThat(this.map.get(5), is("5"));
+ }
+
+ @Test
+ public void shouldPergeOnPut() throws Exception {
+ this.map = new TestWeakConcurrentCache(1, 0.75f, 1);
+ for (int i = 1; i <= 5; i++) {
+ this.map.put(i, String.valueOf(i));
+ }
+ this.map.getMockReference(1, Restructure.NEVER).queueForPurge();
+ this.map.getMockReference(3, Restructure.NEVER).queueForPurge();
+ this.map.put(1, "1");
+ assertThat(this.map.get(1), is("1"));
+ assertThat(this.map.get(2), is("2"));
+ assertThat(this.map.getReference(3, Restructure.WHEN_NECESSARY), is(nullValue()));
+ assertThat(this.map.get(4), is("4"));
+ assertThat(this.map.get(5), is("5"));
+ }
+
+ @Test
+ public void shouldPutIfAbsent() throws Exception {
+ assertThat(this.map.putIfAbsent(123, "123"), is(nullValue()));
+ assertThat(this.map.putIfAbsent(123, "123b"), is("123"));
+ assertThat(this.map.get(123), is("123"));
+ }
+
+ @Test
+ public void shouldPutIfAbsentWithNullValue() throws Exception {
+ assertThat(this.map.putIfAbsent(123, null), is(nullValue()));
+ assertThat(this.map.putIfAbsent(123, "123"), is(nullValue()));
+ assertThat(this.map.get(123), is(nullValue()));
+ }
+
+ @Test
+ public void shouldPutIfAbsentWithNullKey() throws Exception {
+ assertThat(this.map.putIfAbsent(null, "123"), is(nullValue()));
+ assertThat(this.map.putIfAbsent(null, "123b"), is("123"));
+ assertThat(this.map.get(null), is("123"));
+ }
+
+ @Test
+ public void shouldRemoveKeyAndValue() throws Exception {
+ this.map.put(123, "123");
+ assertThat(this.map.remove(123, "456"), is(false));
+ assertThat(this.map.get(123), is("123"));
+ assertThat(this.map.remove(123, "123"), is(true));
+ assertFalse(this.map.containsKey(123));
+ assertThat(this.map.isEmpty(), is(true));
+ }
+
+ @Test
+ public void shouldRemoveKeyAndValueWithExistingNull() throws Exception {
+ this.map.put(123, null);
+ assertThat(this.map.remove(123, "456"), is(false));
+ assertThat(this.map.get(123), is(nullValue()));
+ assertThat(this.map.remove(123, null), is(true));
+ assertFalse(this.map.containsKey(123));
+ assertThat(this.map.isEmpty(), is(true));
+ }
+
+ @Test
+ public void shouldReplaceOldValueWithNewValue() throws Exception {
+ this.map.put(123, "123");
+ assertThat(this.map.replace(123, "456", "789"), is(false));
+ assertThat(this.map.get(123), is("123"));
+ assertThat(this.map.replace(123, "123", "789"), is(true));
+ assertThat(this.map.get(123), is("789"));
+ }
+
+ @Test
+ public void shouldReplaceOldNullValueWithNewValue() throws Exception {
+ this.map.put(123, null);
+ assertThat(this.map.replace(123, "456", "789"), is(false));
+ assertThat(this.map.get(123), is(nullValue()));
+ assertThat(this.map.replace(123, null, "789"), is(true));
+ assertThat(this.map.get(123), is("789"));
+ }
+
+ @Test
+ public void shouldReplaceValue() throws Exception {
+ this.map.put(123, "123");
+ assertThat(this.map.replace(123, "456"), is("123"));
+ assertThat(this.map.get(123), is("456"));
+ }
+
+ @Test
+ public void shouldReplaceNullValue() throws Exception {
+ this.map.put(123, null);
+ assertThat(this.map.replace(123, "456"), is(nullValue()));
+ assertThat(this.map.get(123), is("456"));
+ }
+
+ @Test
+ public void shouldGetSize() throws Exception {
+ assertThat(this.map.size(), is(0));
+ this.map.put(123, "123");
+ this.map.put(123, null);
+ this.map.put(456, "456");
+ assertThat(this.map.size(), is(2));
+ }
+
+ @Test
+ public void shouldSupportIsEmpty() throws Exception {
+ assertThat(this.map.isEmpty(), is(true));
+ this.map.put(123, "123");
+ this.map.put(123, null);
+ this.map.put(456, "456");
+ assertThat(this.map.isEmpty(), is(false));
+ }
+
+ @Test
+ public void shouldContainKey() throws Exception {
+ assertThat(this.map.containsKey(123), is(false));
+ assertThat(this.map.containsKey(456), is(false));
+ this.map.put(123, "123");
+ this.map.put(456, null);
+ assertThat(this.map.containsKey(123), is(true));
+ assertThat(this.map.containsKey(456), is(true));
+ }
+
+ @Test
+ public void shouldContainValue() throws Exception {
+ assertThat(this.map.containsValue("123"), is(false));
+ assertThat(this.map.containsValue(null), is(false));
+ this.map.put(123, "123");
+ this.map.put(456, null);
+ assertThat(this.map.containsValue("123"), is(true));
+ assertThat(this.map.containsValue(null), is(true));
+ }
+
+ @Test
+ public void shouldRemoveWhenKeyIsInMap() throws Exception {
+ this.map.put(123, null);
+ this.map.put(456, "456");
+ this.map.put(null, "789");
+ assertThat(this.map.remove(123), is(nullValue()));
+ assertThat(this.map.remove(456), is("456"));
+ assertThat(this.map.remove(null), is("789"));
+ assertThat(this.map.isEmpty(), is(true));
+ }
+
+ @Test
+ public void shouldRemoveWhenKeyIsNotInMap() throws Exception {
+ assertThat(this.map.remove(123), is(nullValue()));
+ assertThat(this.map.remove(null), is(nullValue()));
+ assertThat(this.map.isEmpty(), is(true));
+ }
+
+ @Test
+ public void shouldPutAll() throws Exception {
+ Map m = new HashMap();
+ m.put(123, "123");
+ m.put(456, null);
+ m.put(null, "789");
+ this.map.putAll(m);
+ assertThat(this.map.size(), is(3));
+ assertThat(this.map.get(123), is("123"));
+ assertThat(this.map.get(456), is(nullValue()));
+ assertThat(this.map.get(null), is("789"));
+ }
+
+ @Test
+ public void shouldClear() throws Exception {
+ this.map.put(123, "123");
+ this.map.put(456, null);
+ this.map.put(null, "789");
+ this.map.clear();
+ assertThat(this.map.size(), is(0));
+ assertThat(this.map.containsKey(123), is(false));
+ assertThat(this.map.containsKey(456), is(false));
+ assertThat(this.map.containsKey(null), is(false));
+ }
+
+ @Test
+ public void shouldGetKeySet() throws Exception {
+ this.map.put(123, "123");
+ this.map.put(456, null);
+ this.map.put(null, "789");
+ Set expected = new HashSet();
+ expected.add(123);
+ expected.add(456);
+ expected.add(null);
+ assertThat(this.map.keySet(), is(expected));
+ }
+
+ @Test
+ public void shouldGetValues() throws Exception {
+ this.map.put(123, "123");
+ this.map.put(456, null);
+ this.map.put(null, "789");
+ List actual = new ArrayList(this.map.values());
+ List expected = new ArrayList();
+ expected.add("123");
+ expected.add(null);
+ expected.add("789");
+ Collections.sort(actual, NULL_SAFE_STRING_SORT);
+ Collections.sort(expected, NULL_SAFE_STRING_SORT);
+ assertThat(actual, is(expected));
+ }
+
+ @Test
+ public void shouldGetEntrySet() throws Exception {
+ this.map.put(123, "123");
+ this.map.put(456, null);
+ this.map.put(null, "789");
+ HashMap expected = new HashMap();
+ expected.put(123, "123");
+ expected.put(456, null);
+ expected.put(null, "789");
+ assertThat(this.map.entrySet(), is(expected.entrySet()));
+ }
+
+ @Test
+ public void shouldGetEntrySetFollowingNext() throws Exception {
+ // Use loadFactor to disable resize
+ this.map = new TestWeakConcurrentCache(1, 10.0f, 1);
+ this.map.put(1, "1");
+ this.map.put(2, "2");
+ this.map.put(3, "3");
+ HashMap expected = new HashMap();
+ expected.put(1, "1");
+ expected.put(2, "2");
+ expected.put(3, "3");
+ assertThat(this.map.entrySet(), is(expected.entrySet()));
+ }
+
+ @Test
+ public void shouldRemoveViaEntrySet() throws Exception {
+ this.map.put(1, "1");
+ this.map.put(2, "2");
+ this.map.put(3, "3");
+ Iterator> iterator = this.map.entrySet().iterator();
+ iterator.next();
+ iterator.next();
+ iterator.remove();
+ iterator.next();
+ assertThat(iterator.hasNext(), is(false));
+ assertThat(this.map.size(), is(2));
+ assertThat(this.map.containsKey(2), is(false));
+ }
+
+ @Test
+ public void shouldSetViaEntrySet() throws Exception {
+ this.map.put(1, "1");
+ this.map.put(2, "2");
+ this.map.put(3, "3");
+ Iterator> iterator = this.map.entrySet().iterator();
+ iterator.next();
+ iterator.next().setValue("2b");
+ iterator.next();
+ assertThat(iterator.hasNext(), is(false));
+ assertThat(this.map.size(), is(3));
+ assertThat(this.map.get(2), is("2b"));
+ }
+
+ @Test
+ @Ignore("Intended for use during development only")
+ public void shouldBeFasterThanSynchronizedMap() throws Exception {
+ Map> synchronizedMap = Collections.synchronizedMap(new WeakHashMap>());
+ StopWatch mapTime = timeMultiThreaded("SynchronizedMap", synchronizedMap,
+ new ValueFactory>() {
+
+ public WeakReference newValue(int v) {
+ return new WeakReference(String.valueOf(v));
+ }
+ });
+ System.out.println(mapTime.prettyPrint());
+
+ this.map.setDisableTestHooks(true);
+ StopWatch cacheTime = timeMultiThreaded("WeakConcurrentCache", this.map,
+ new ValueFactory() {
+
+ public String newValue(int v) {
+ return String.valueOf(v);
+ }
+ });
+ System.out.println(cacheTime.prettyPrint());
+
+ // We should be at least 4 time faster
+ assertThat(cacheTime.getTotalTimeSeconds(),
+ is(lessThan(mapTime.getTotalTimeSeconds() / 4.0)));
+ }
+
+ @Test
+ public void shouldSupportNullReference() throws Exception {
+ // GC could happen during restructure so we must be able to create a reference for a null entry
+ map.createReferenceManager().createReference(null, 1234, null);
+ }
+
+ /**
+ * Time a multi-threaded access to a cache.
+ *
+ * @param cache the cache to test
+ * @return the timing stopwatch
+ * @throws InterruptedException
+ */
+ private StopWatch timeMultiThreaded(String id, final Map map,
+ ValueFactory factory) throws InterruptedException {
+ StopWatch stopWatch = new StopWatch(id);
+ for (int i = 0; i < 500; i++) {
+ map.put(i, factory.newValue(i));
+ }
+ Thread[] threads = new Thread[30];
+ stopWatch.start("Running threads");
+ for (int threadIndex = 0; threadIndex < threads.length; threadIndex++) {
+ threads[threadIndex] = new Thread("Cache access thread " + threadIndex) {
+
+ @Override
+ public void run() {
+ for (int j = 0; j < 1000; j++) {
+ for (int i = 0; i < 1000; i++) {
+ map.get(i);
+ }
+ }
+ };
+ };
+ }
+ for (int i = 0; i < threads.length; i++) {
+ threads[i].start();
+ }
+
+ for (int i = 0; i < threads.length; i++) {
+ if (threads[i].isAlive()) {
+ threads[i].join(2000);
+ }
+ }
+ stopWatch.stop();
+ return stopWatch;
+ }
+
+ private static interface ValueFactory {
+
+ V newValue(int k);
+ }
+
+ private static class TestWeakConcurrentCache extends
+ ConcurrentReferenceHashMap {
+
+ private int supplimentalHash;
+
+ private final LinkedList> queue = new LinkedList>();
+
+ private boolean disableTestHooks;
+
+ public TestWeakConcurrentCache() {
+ super();
+ }
+
+ public void setDisableTestHooks(boolean disableTestHooks) {
+ this.disableTestHooks = disableTestHooks;
+ }
+
+ public TestWeakConcurrentCache(int initialCapacity, float loadFactor,
+ int concurrencyLevel) {
+ super(initialCapacity, loadFactor, concurrencyLevel);
+ }
+
+ public TestWeakConcurrentCache(int initialCapacity, int concurrencyLevel) {
+ super(initialCapacity, concurrencyLevel);
+ }
+
+ @Override
+ protected int getHash(Object o) {
+ if (this.disableTestHooks) {
+ return super.getHash(o);
+ }
+ // For testing we want more control of the hash
+ this.supplimentalHash = super.getHash(o);
+ return o == null ? 0 : o.hashCode();
+ }
+
+ public int getSupplimentalHash() {
+ return this.supplimentalHash;
+ }
+
+ @Override
+ protected ReferenceManager createReferenceManager() {
+
+ return new ReferenceManager() {
+
+ @Override
+ public Reference createReference(Entry entry, int hash,
+ Reference next) {
+ if (TestWeakConcurrentCache.this.disableTestHooks) {
+ return super.createReference(entry, hash, next);
+ }
+ return new MockReference(entry, hash, next, TestWeakConcurrentCache.this.queue);
+ }
+
+ @Override
+ public Reference pollForPurge() {
+ if (TestWeakConcurrentCache.this.disableTestHooks) {
+ return super.pollForPurge();
+ }
+ return TestWeakConcurrentCache.this.queue.isEmpty() ? null : TestWeakConcurrentCache.this.queue.removeFirst();
+ }
+ };
+ }
+
+ public MockReference getMockReference(K key, Restructure restructure) {
+ return (MockReference) super.getReference(key, restructure);
+ }
+ }
+
+ private static class MockReference implements Reference {
+
+ private final int hash;
+
+ private Entry entry;
+
+ private final Reference next;
+
+ private final LinkedList> queue;
+
+ public MockReference(Entry entry, int hash, Reference next,
+ LinkedList> queue) {
+ this.hash = hash;
+ this.entry = entry;
+ this.next = next;
+ this.queue = queue;
+ }
+
+ public Entry get() {
+ return this.entry;
+ }
+
+ public int getHash() {
+ return this.hash;
+ }
+
+ public Reference getNext() {
+ return this.next;
+ }
+
+ public void release() {
+ this.queue.add(this);
+ this.entry = null;
+ }
+
+ public void queueForPurge() {
+ this.queue.add(this);
+ }
+ }
+}