/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.cache; import static com.google.common.cache.CacheBuilder.NULL_TICKER; import static com.google.common.cache.LocalCache.DISCARDING_QUEUE; import static com.google.common.cache.LocalCache.DRAIN_THRESHOLD; import static com.google.common.cache.LocalCache.nullEntry; import static com.google.common.cache.LocalCache.unset; import static com.google.common.cache.TestingCacheLoaders.identityLoader; import static com.google.common.cache.TestingRemovalListeners.countingRemovalListener; import static com.google.common.cache.TestingRemovalListeners.queuingRemovalListener; import static com.google.common.cache.TestingWeighers.constantWeigher; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.immutableEntry; import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.concurrent.TimeUnit.NANOSECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import com.google.common.base.Equivalence; import com.google.common.base.Ticker; import com.google.common.cache.LocalCache.EntryFactory; import com.google.common.cache.LocalCache.LoadingValueReference; import com.google.common.cache.LocalCache.LocalLoadingCache; import com.google.common.cache.LocalCache.LocalManualCache; import com.google.common.cache.LocalCache.ReferenceEntry; import com.google.common.cache.LocalCache.Segment; import com.google.common.cache.LocalCache.Strength; import com.google.common.cache.LocalCache.ValueReference; import com.google.common.cache.TestingCacheLoaders.CountingLoader; import com.google.common.cache.TestingRemovalListeners.CountingRemovalListener; import com.google.common.cache.TestingRemovalListeners.QueuingRemovalListener; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.testing.MapTestSuiteBuilder; import com.google.common.collect.testing.TestStringMapGenerator; import com.google.common.collect.testing.features.CollectionFeature; import com.google.common.collect.testing.features.CollectionSize; import com.google.common.collect.testing.features.MapFeature; import com.google.common.testing.FakeTicker; import com.google.common.testing.NullPointerTester; import com.google.common.testing.SerializableTester; import com.google.common.testing.TestLogHandler; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import java.io.Serializable; import java.lang.ref.Reference; import java.lang.ref.ReferenceQueue; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Random; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.logging.LogRecord; /** * @author Charles Fry */ public class LocalCacheTest extends TestCase { public static Test suite() { TestSuite suite = new TestSuite(); suite.addTestSuite(LocalCacheTest.class); suite.addTest(MapTestSuiteBuilder.using(new TestStringMapGenerator() { @Override public Map create( Entry[] entries) { LocalCache map = makeLocalCache(createCacheBuilder()); for (Entry entry : entries) { map.put(entry.getKey(), entry.getValue()); } return map; } }).named("LocalCache with defaults") .withFeatures(CollectionSize.ANY, MapFeature.GENERAL_PURPOSE, CollectionFeature.SUPPORTS_ITERATOR_REMOVE) .createTestSuite()); return suite; } static final int SMALL_MAX_SIZE = DRAIN_THRESHOLD * 5; TestLogHandler logHandler; @Override public void setUp() throws Exception { super.setUp(); logHandler = new TestLogHandler(); LocalCache.logger.addHandler(logHandler); } @Override public void tearDown() throws Exception { super.tearDown(); LocalCache.logger.removeHandler(logHandler); } private Throwable popLoggedThrowable() { List logRecords = logHandler.getStoredLogRecords(); assertSame(1, logRecords.size()); LogRecord logRecord = logRecords.get(0); logHandler.clear(); return logRecord.getThrown(); } private void checkNothingLogged() { assertTrue(logHandler.getStoredLogRecords().isEmpty()); } private void checkLogged(Throwable t) { assertSame(t, popLoggedThrowable()); } private static LocalCache makeLocalCache( CacheBuilder builder) { return new LocalCache(builder, null); } private static CacheBuilder createCacheBuilder() { return new CacheBuilder(); } // constructor tests public void testDefaults() { LocalCache map = makeLocalCache(createCacheBuilder()); assertSame(Strength.STRONG, map.keyStrength); assertSame(Strength.STRONG, map.valueStrength); assertSame(map.keyStrength.defaultEquivalence(), map.keyEquivalence); assertSame(map.valueStrength.defaultEquivalence(), map.valueEquivalence); assertEquals(0, map.expireAfterAccessNanos); assertEquals(0, map.expireAfterWriteNanos); assertEquals(0, map.refreshNanos); assertEquals(CacheBuilder.UNSET_INT, map.maxWeight); assertSame(EntryFactory.STRONG, map.entryFactory); assertSame(CacheBuilder.NullListener.INSTANCE, map.removalListener); assertSame(DISCARDING_QUEUE, map.removalNotificationQueue); assertSame(NULL_TICKER, map.ticker); assertEquals(4, map.concurrencyLevel); // concurrency level assertEquals(4, map.segments.length); // initial capacity / concurrency level assertEquals(16 / map.segments.length, map.segments[0].table.length()); assertFalse(map.evictsBySize()); assertFalse(map.expires()); assertFalse(map.expiresAfterWrite()); assertFalse(map.expiresAfterAccess()); assertFalse(map.refreshes()); } public void testSetKeyEquivalence() { Equivalence testEquivalence = new Equivalence() { @Override protected boolean doEquivalent(Object a, Object b) { return false; } @Override protected int doHash(Object t) { return 0; } }; LocalCache map = makeLocalCache(createCacheBuilder().keyEquivalence(testEquivalence)); assertSame(testEquivalence, map.keyEquivalence); assertSame(map.valueStrength.defaultEquivalence(), map.valueEquivalence); } public void testSetValueEquivalence() { Equivalence testEquivalence = new Equivalence() { @Override protected boolean doEquivalent(Object a, Object b) { return false; } @Override protected int doHash(Object t) { return 0; } }; LocalCache map = makeLocalCache(createCacheBuilder().valueEquivalence(testEquivalence)); assertSame(testEquivalence, map.valueEquivalence); assertSame(map.keyStrength.defaultEquivalence(), map.keyEquivalence); } public void testSetConcurrencyLevel() { // round up to nearest power of two checkConcurrencyLevel(1, 1); checkConcurrencyLevel(2, 2); checkConcurrencyLevel(3, 4); checkConcurrencyLevel(4, 4); checkConcurrencyLevel(5, 8); checkConcurrencyLevel(6, 8); checkConcurrencyLevel(7, 8); checkConcurrencyLevel(8, 8); } private static void checkConcurrencyLevel(int concurrencyLevel, int segmentCount) { LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(concurrencyLevel)); assertEquals(segmentCount, map.segments.length); } public void testSetInitialCapacity() { // share capacity over each segment, then round up to nearest power of two checkInitialCapacity(1, 0, 1); checkInitialCapacity(1, 1, 1); checkInitialCapacity(1, 2, 2); checkInitialCapacity(1, 3, 4); checkInitialCapacity(1, 4, 4); checkInitialCapacity(1, 5, 8); checkInitialCapacity(1, 6, 8); checkInitialCapacity(1, 7, 8); checkInitialCapacity(1, 8, 8); checkInitialCapacity(2, 0, 1); checkInitialCapacity(2, 1, 1); checkInitialCapacity(2, 2, 1); checkInitialCapacity(2, 3, 2); checkInitialCapacity(2, 4, 2); checkInitialCapacity(2, 5, 4); checkInitialCapacity(2, 6, 4); checkInitialCapacity(2, 7, 4); checkInitialCapacity(2, 8, 4); checkInitialCapacity(4, 0, 1); checkInitialCapacity(4, 1, 1); checkInitialCapacity(4, 2, 1); checkInitialCapacity(4, 3, 1); checkInitialCapacity(4, 4, 1); checkInitialCapacity(4, 5, 2); checkInitialCapacity(4, 6, 2); checkInitialCapacity(4, 7, 2); checkInitialCapacity(4, 8, 2); } private static void checkInitialCapacity( int concurrencyLevel, int initialCapacity, int segmentSize) { LocalCache map = makeLocalCache( createCacheBuilder().concurrencyLevel(concurrencyLevel).initialCapacity(initialCapacity)); for (int i = 0; i < map.segments.length; i++) { assertEquals(segmentSize, map.segments[i].table.length()); } } public void testSetMaximumSize() { // vary maximumSize wrt concurrencyLevel for (int maxSize = 1; maxSize < 100; maxSize++) { checkMaximumSize(1, 8, maxSize); checkMaximumSize(2, 8, maxSize); checkMaximumSize(4, 8, maxSize); checkMaximumSize(8, 8, maxSize); } checkMaximumSize(1, 8, Long.MAX_VALUE); checkMaximumSize(2, 8, Long.MAX_VALUE); checkMaximumSize(4, 8, Long.MAX_VALUE); checkMaximumSize(8, 8, Long.MAX_VALUE); // vary initial capacity wrt maximumSize for (int capacity = 0; capacity < 8; capacity++) { checkMaximumSize(1, capacity, 4); checkMaximumSize(2, capacity, 4); checkMaximumSize(4, capacity, 4); checkMaximumSize(8, capacity, 4); } } private static void checkMaximumSize(int concurrencyLevel, int initialCapacity, long maxSize) { LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(concurrencyLevel) .initialCapacity(initialCapacity) .maximumSize(maxSize)); long totalCapacity = 0; assertTrue("segments=" + map.segments.length + ", maxSize=" + maxSize, map.segments.length <= Math.max(1, maxSize / 10)); for (int i = 0; i < map.segments.length; i++) { totalCapacity += map.segments[i].maxSegmentWeight; } assertTrue("totalCapacity=" + totalCapacity + ", maxSize=" + maxSize, totalCapacity == maxSize); map = makeLocalCache(createCacheBuilder() .concurrencyLevel(concurrencyLevel) .initialCapacity(initialCapacity) .maximumWeight(maxSize) .weigher(constantWeigher(1))); assertTrue("segments=" + map.segments.length + ", maxSize=" + maxSize, map.segments.length <= Math.max(1, maxSize / 10)); totalCapacity = 0; for (int i = 0; i < map.segments.length; i++) { totalCapacity += map.segments[i].maxSegmentWeight; } assertTrue("totalCapacity=" + totalCapacity + ", maxSize=" + maxSize, totalCapacity == maxSize); } public void testSetWeigher() { Weigher testWeigher = new Weigher() { @Override public int weigh(Object key, Object value) { return 42; } }; LocalCache map = makeLocalCache(createCacheBuilder().maximumWeight(1).weigher(testWeigher)); assertSame(testWeigher, map.weigher); } public void testSetWeakKeys() { LocalCache map = makeLocalCache(createCacheBuilder().weakKeys()); checkStrength(map, Strength.WEAK, Strength.STRONG); assertSame(EntryFactory.WEAK, map.entryFactory); } public void testSetWeakValues() { LocalCache map = makeLocalCache(createCacheBuilder().weakValues()); checkStrength(map, Strength.STRONG, Strength.WEAK); assertSame(EntryFactory.STRONG, map.entryFactory); } public void testSetSoftValues() { LocalCache map = makeLocalCache(createCacheBuilder().softValues()); checkStrength(map, Strength.STRONG, Strength.SOFT); assertSame(EntryFactory.STRONG, map.entryFactory); } private static void checkStrength( LocalCache map, Strength keyStrength, Strength valueStrength) { assertSame(keyStrength, map.keyStrength); assertSame(valueStrength, map.valueStrength); assertSame(keyStrength.defaultEquivalence(), map.keyEquivalence); assertSame(valueStrength.defaultEquivalence(), map.valueEquivalence); } public void testSetExpireAfterWrite() { long duration = 42; TimeUnit unit = TimeUnit.SECONDS; LocalCache map = makeLocalCache(createCacheBuilder().expireAfterWrite(duration, unit)); assertEquals(unit.toNanos(duration), map.expireAfterWriteNanos); } public void testSetExpireAfterAccess() { long duration = 42; TimeUnit unit = TimeUnit.SECONDS; LocalCache map = makeLocalCache(createCacheBuilder().expireAfterAccess(duration, unit)); assertEquals(unit.toNanos(duration), map.expireAfterAccessNanos); } public void testSetRefresh() { long duration = 42; TimeUnit unit = TimeUnit.SECONDS; LocalCache map = makeLocalCache(createCacheBuilder().refreshAfterWrite(duration, unit)); assertEquals(unit.toNanos(duration), map.refreshNanos); } public void testSetRemovalListener() { RemovalListener testListener = TestingRemovalListeners.nullRemovalListener(); LocalCache map = makeLocalCache(createCacheBuilder().removalListener(testListener)); assertSame(testListener, map.removalListener); } public void testSetTicker() { Ticker testTicker = new Ticker() { @Override public long read() { return 0; } }; LocalCache map = makeLocalCache(createCacheBuilder().ticker(testTicker)); assertSame(testTicker, map.ticker); } public void testEntryFactory() { assertSame(EntryFactory.STRONG, EntryFactory.getFactory(Strength.STRONG, false, false)); assertSame(EntryFactory.STRONG_ACCESS, EntryFactory.getFactory(Strength.STRONG, true, false)); assertSame(EntryFactory.STRONG_WRITE, EntryFactory.getFactory(Strength.STRONG, false, true)); assertSame(EntryFactory.STRONG_ACCESS_WRITE, EntryFactory.getFactory(Strength.STRONG, true, true)); assertSame(EntryFactory.WEAK, EntryFactory.getFactory(Strength.WEAK, false, false)); assertSame(EntryFactory.WEAK_ACCESS, EntryFactory.getFactory(Strength.WEAK, true, false)); assertSame(EntryFactory.WEAK_WRITE, EntryFactory.getFactory(Strength.WEAK, false, true)); assertSame(EntryFactory.WEAK_ACCESS_WRITE, EntryFactory.getFactory(Strength.WEAK, true, true)); } // computation tests public void testCompute() throws ExecutionException { CountingLoader loader = new CountingLoader(); LocalCache map = makeLocalCache(createCacheBuilder()); assertEquals(0, loader.getCount()); Object key = new Object(); Object value = map.get(key, loader); assertEquals(1, loader.getCount()); assertEquals(value, map.get(key, loader)); assertEquals(1, loader.getCount()); } public void testRecordReadOnCompute() throws ExecutionException { CountingLoader loader = new CountingLoader(); for (CacheBuilder builder : allEvictingMakers()) { LocalCache map = makeLocalCache(builder.concurrencyLevel(1)); Segment segment = map.segments[0]; List> writeOrder = Lists.newLinkedList(); List> readOrder = Lists.newLinkedList(); for (int i = 0; i < SMALL_MAX_SIZE; i++) { Object key = new Object(); int hash = map.hash(key); map.get(key, loader); ReferenceEntry entry = segment.getEntry(key, hash); writeOrder.add(entry); readOrder.add(entry); } checkEvictionQueues(map, segment, readOrder, writeOrder); checkExpirationTimes(map); assertTrue(segment.recencyQueue.isEmpty()); // access some of the elements Random random = new Random(); List> reads = Lists.newArrayList(); Iterator> i = readOrder.iterator(); while (i.hasNext()) { ReferenceEntry entry = i.next(); if (random.nextBoolean()) { map.get(entry.getKey(), loader); reads.add(entry); i.remove(); assertTrue(segment.recencyQueue.size() <= DRAIN_THRESHOLD); } } int undrainedIndex = reads.size() - segment.recencyQueue.size(); checkAndDrainRecencyQueue(map, segment, reads.subList(undrainedIndex, reads.size())); readOrder.addAll(reads); checkEvictionQueues(map, segment, readOrder, writeOrder); checkExpirationTimes(map); } } public void testComputeExistingEntry() throws ExecutionException { CountingLoader loader = new CountingLoader(); LocalCache map = makeLocalCache(createCacheBuilder()); assertEquals(0, loader.getCount()); Object key = new Object(); Object value = new Object(); map.put(key, value); assertEquals(value, map.get(key, loader)); assertEquals(0, loader.getCount()); } public void testComputePartiallyCollectedKey() throws ExecutionException { CacheBuilder builder = createCacheBuilder().concurrencyLevel(1); CountingLoader loader = new CountingLoader(); LocalCache map = makeLocalCache(builder); Segment segment = map.segments[0]; AtomicReferenceArray> table = segment.table; assertEquals(0, loader.getCount()); Object key = new Object(); int hash = map.hash(key); Object value = new Object(); int index = hash & (table.length() - 1); DummyEntry entry = DummyEntry.create(key, hash, null); DummyValueReference valueRef = DummyValueReference.create(value); entry.setValueReference(valueRef); table.set(index, entry); segment.count++; assertSame(value, map.get(key, loader)); assertEquals(0, loader.getCount()); assertEquals(1, segment.count); entry.clearKey(); assertNotSame(value, map.get(key, loader)); assertEquals(1, loader.getCount()); assertEquals(2, segment.count); } public void testComputePartiallyCollectedValue() throws ExecutionException { CacheBuilder builder = createCacheBuilder().concurrencyLevel(1); CountingLoader loader = new CountingLoader(); LocalCache map = makeLocalCache(builder); Segment segment = map.segments[0]; AtomicReferenceArray> table = segment.table; assertEquals(0, loader.getCount()); Object key = new Object(); int hash = map.hash(key); Object value = new Object(); int index = hash & (table.length() - 1); DummyEntry entry = DummyEntry.create(key, hash, null); DummyValueReference valueRef = DummyValueReference.create(value); entry.setValueReference(valueRef); table.set(index, entry); segment.count++; assertSame(value, map.get(key, loader)); assertEquals(0, loader.getCount()); assertEquals(1, segment.count); valueRef.clear(); assertNotSame(value, map.get(key, loader)); assertEquals(1, loader.getCount()); assertEquals(1, segment.count); } public void testComputeExpiredEntry() throws ExecutionException { CacheBuilder builder = createCacheBuilder() .expireAfterWrite(1, TimeUnit.NANOSECONDS); CountingLoader loader = new CountingLoader(); LocalCache map = makeLocalCache(builder); assertEquals(0, loader.getCount()); Object key = new Object(); Object one = map.get(key, loader); assertEquals(1, loader.getCount()); Object two = map.get(key, loader); assertNotSame(one, two); assertEquals(2, loader.getCount()); } public void testValues() { LocalCache map = makeLocalCache(createCacheBuilder()); map.put("foo", "bar"); map.put("baz", "bar"); map.put("quux", "quux"); assertFalse(map.values() instanceof Set); assertTrue(map.values().removeAll(ImmutableSet.of("bar"))); assertEquals(1, map.size()); } public void testCopyEntry_computing() { final CountDownLatch startSignal = new CountDownLatch(1); final CountDownLatch computingSignal = new CountDownLatch(1); final CountDownLatch doneSignal = new CountDownLatch(2); final Object computedObject = new Object(); final CacheLoader loader = new CacheLoader() { @Override public Object load(Object key) throws Exception { computingSignal.countDown(); startSignal.await(); return computedObject; } }; QueuingRemovalListener listener = queuingRemovalListener(); CacheBuilder builder = createCacheBuilder() .concurrencyLevel(1) .removalListener(listener); final LocalCache map = makeLocalCache(builder); Segment segment = map.segments[0]; AtomicReferenceArray> table = segment.table; assertTrue(listener.isEmpty()); final Object one = new Object(); int hash = map.hash(one); int index = hash & (table.length() - 1); new Thread() { @Override public void run() { try { map.get(one, loader); } catch (ExecutionException e) { throw new RuntimeException(e); } doneSignal.countDown(); } }.start(); try { computingSignal.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } new Thread() { @Override public void run() { try { map.get(one, loader); } catch (ExecutionException e) { throw new RuntimeException(e); } doneSignal.countDown(); } }.start(); ReferenceEntry entry = segment.getEntry(one, hash); ReferenceEntry newEntry = segment.copyEntry(entry, null); table.set(index, newEntry); @SuppressWarnings("unchecked") LoadingValueReference valueReference = (LoadingValueReference) newEntry.getValueReference(); assertFalse(valueReference.futureValue.isDone()); startSignal.countDown(); try { doneSignal.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } map.cleanUp(); // force notifications assertTrue(listener.isEmpty()); assertTrue(map.containsKey(one)); assertEquals(1, map.size()); assertSame(computedObject, map.get(one)); } public void testRemovalListenerCheckedException() { final RuntimeException e = new RuntimeException(); RemovalListener listener = new RemovalListener() { @Override public void onRemoval(RemovalNotification notification) { throw e; } }; CacheBuilder builder = createCacheBuilder().removalListener(listener); final LocalCache cache = makeLocalCache(builder); Object key = new Object(); cache.put(key, new Object()); checkNothingLogged(); cache.remove(key); checkLogged(e); } public void testRemovalListener_replaced_computing() { final CountDownLatch startSignal = new CountDownLatch(1); final CountDownLatch computingSignal = new CountDownLatch(1); final CountDownLatch doneSignal = new CountDownLatch(1); final Object computedObject = new Object(); final CacheLoader loader = new CacheLoader() { @Override public Object load(Object key) throws Exception { computingSignal.countDown(); startSignal.await(); return computedObject; } }; QueuingRemovalListener listener = queuingRemovalListener(); CacheBuilder builder = createCacheBuilder().removalListener(listener); final LocalCache map = makeLocalCache(builder); assertTrue(listener.isEmpty()); final Object one = new Object(); final Object two = new Object(); new Thread() { @Override public void run() { try { map.get(one, loader); } catch (ExecutionException e) { throw new RuntimeException(e); } doneSignal.countDown(); } }.start(); try { computingSignal.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } map.put(one, two); assertSame(two, map.get(one)); startSignal.countDown(); try { doneSignal.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } map.cleanUp(); // force notifications assertNotified(listener, one, computedObject, RemovalCause.REPLACED); assertTrue(listener.isEmpty()); } public void testSegmentRefresh_duplicate() throws ExecutionException { LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1)); Segment segment = map.segments[0]; Object key = new Object(); int hash = map.hash(key); AtomicReferenceArray> table = segment.table; int index = hash & (table.length() - 1); // already loading DummyEntry entry = DummyEntry.create(key, hash, null); DummyValueReference valueRef = DummyValueReference.create(null); valueRef.setLoading(true); entry.setValueReference(valueRef); table.set(index, entry); assertNull(segment.refresh(key, hash, identityLoader(), false)); } // Removal listener tests public void testRemovalListener_explicit() { QueuingRemovalListener listener = queuingRemovalListener(); LocalCache map = makeLocalCache(createCacheBuilder() .removalListener(listener)); assertTrue(listener.isEmpty()); Object one = new Object(); Object two = new Object(); Object three = new Object(); Object four = new Object(); Object five = new Object(); Object six = new Object(); map.put(one, two); map.remove(one); assertNotified(listener, one, two, RemovalCause.EXPLICIT); map.put(two, three); map.remove(two, three); assertNotified(listener, two, three, RemovalCause.EXPLICIT); map.put(three, four); Iterator i = map.entrySet().iterator(); i.next(); i.remove(); assertNotified(listener, three, four, RemovalCause.EXPLICIT); map.put(four, five); i = map.keySet().iterator(); i.next(); i.remove(); assertNotified(listener, four, five, RemovalCause.EXPLICIT); map.put(five, six); i = map.values().iterator(); i.next(); i.remove(); assertNotified(listener, five, six, RemovalCause.EXPLICIT); assertTrue(listener.isEmpty()); } public void testRemovalListener_replaced() { QueuingRemovalListener listener = queuingRemovalListener(); LocalCache map = makeLocalCache(createCacheBuilder() .removalListener(listener)); assertTrue(listener.isEmpty()); Object one = new Object(); Object two = new Object(); Object three = new Object(); Object four = new Object(); Object five = new Object(); Object six = new Object(); map.put(one, two); map.put(one, three); assertNotified(listener, one, two, RemovalCause.REPLACED); Map newMap = ImmutableMap.of(one, four); map.putAll(newMap); assertNotified(listener, one, three, RemovalCause.REPLACED); map.replace(one, five); assertNotified(listener, one, four, RemovalCause.REPLACED); map.replace(one, five, six); assertNotified(listener, one, five, RemovalCause.REPLACED); } public void testRemovalListener_collected() { QueuingRemovalListener listener = queuingRemovalListener(); LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .softValues() .removalListener(listener)); Segment segment = map.segments[0]; assertTrue(listener.isEmpty()); Object one = new Object(); Object two = new Object(); Object three = new Object(); map.put(one, two); map.put(two, three); assertTrue(listener.isEmpty()); int hash = map.hash(one); ReferenceEntry entry = segment.getEntry(one, hash); map.reclaimValue(entry.getValueReference()); assertNotified(listener, one, two, RemovalCause.COLLECTED); assertTrue(listener.isEmpty()); } public void testRemovalListener_expired() { FakeTicker ticker = new FakeTicker(); QueuingRemovalListener listener = queuingRemovalListener(); LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .expireAfterWrite(3, TimeUnit.NANOSECONDS) .ticker(ticker) .removalListener(listener)); assertTrue(listener.isEmpty()); Object one = new Object(); Object two = new Object(); Object three = new Object(); Object four = new Object(); Object five = new Object(); map.put(one, two); ticker.advance(1); map.put(two, three); ticker.advance(1); map.put(three, four); assertTrue(listener.isEmpty()); ticker.advance(1); map.put(four, five); assertNotified(listener, one, two, RemovalCause.EXPIRED); assertTrue(listener.isEmpty()); } public void testRemovalListener_size() { QueuingRemovalListener listener = queuingRemovalListener(); LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .maximumSize(2) .removalListener(listener)); assertTrue(listener.isEmpty()); Object one = new Object(); Object two = new Object(); Object three = new Object(); Object four = new Object(); map.put(one, two); map.put(two, three); assertTrue(listener.isEmpty()); map.put(three, four); assertNotified(listener, one, two, RemovalCause.SIZE); assertTrue(listener.isEmpty()); } static void assertNotified( QueuingRemovalListener listener, K key, V value, RemovalCause cause) { RemovalNotification notification = listener.remove(); assertSame(key, notification.getKey()); assertSame(value, notification.getValue()); assertSame(cause, notification.getCause()); } // Segment core tests public void testNewEntry() { for (CacheBuilder builder : allEntryTypeMakers()) { LocalCache map = makeLocalCache(builder); Object keyOne = new Object(); Object valueOne = new Object(); int hashOne = map.hash(keyOne); ReferenceEntry entryOne = map.newEntry(keyOne, hashOne, null); ValueReference valueRefOne = map.newValueReference(entryOne, valueOne, 1); assertSame(valueOne, valueRefOne.get()); entryOne.setValueReference(valueRefOne); assertSame(keyOne, entryOne.getKey()); assertEquals(hashOne, entryOne.getHash()); assertNull(entryOne.getNext()); assertSame(valueRefOne, entryOne.getValueReference()); Object keyTwo = new Object(); Object valueTwo = new Object(); int hashTwo = map.hash(keyTwo); ReferenceEntry entryTwo = map.newEntry(keyTwo, hashTwo, entryOne); ValueReference valueRefTwo = map.newValueReference(entryTwo, valueTwo, 1); assertSame(valueTwo, valueRefTwo.get()); entryTwo.setValueReference(valueRefTwo); assertSame(keyTwo, entryTwo.getKey()); assertEquals(hashTwo, entryTwo.getHash()); assertSame(entryOne, entryTwo.getNext()); assertSame(valueRefTwo, entryTwo.getValueReference()); } } public void testCopyEntry() { for (CacheBuilder builder : allEntryTypeMakers()) { LocalCache map = makeLocalCache(builder); Object keyOne = new Object(); Object valueOne = new Object(); int hashOne = map.hash(keyOne); ReferenceEntry entryOne = map.newEntry(keyOne, hashOne, null); entryOne.setValueReference(map.newValueReference(entryOne, valueOne, 1)); Object keyTwo = new Object(); Object valueTwo = new Object(); int hashTwo = map.hash(keyTwo); ReferenceEntry entryTwo = map.newEntry(keyTwo, hashTwo, entryOne); entryTwo.setValueReference(map.newValueReference(entryTwo, valueTwo, 1)); if (map.usesAccessQueue()) { LocalCache.connectAccessOrder(entryOne, entryTwo); } if (map.usesWriteQueue()) { LocalCache.connectWriteOrder(entryOne, entryTwo); } assertConnected(map, entryOne, entryTwo); ReferenceEntry copyOne = map.copyEntry(entryOne, null); assertSame(keyOne, entryOne.getKey()); assertEquals(hashOne, entryOne.getHash()); assertNull(entryOne.getNext()); assertSame(valueOne, copyOne.getValueReference().get()); assertConnected(map, copyOne, entryTwo); ReferenceEntry copyTwo = map.copyEntry(entryTwo, copyOne); assertSame(keyTwo, copyTwo.getKey()); assertEquals(hashTwo, copyTwo.getHash()); assertSame(copyOne, copyTwo.getNext()); assertSame(valueTwo, copyTwo.getValueReference().get()); assertConnected(map, copyOne, copyTwo); } } private static void assertConnected( LocalCache map, ReferenceEntry one, ReferenceEntry two) { if (map.usesWriteQueue()) { assertSame(two, one.getNextInWriteQueue()); } if (map.usesAccessQueue()) { assertSame(two, one.getNextInAccessQueue()); } } public void testSegmentGetAndContains() { FakeTicker ticker = new FakeTicker(); LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .ticker(ticker) .expireAfterAccess(1, TimeUnit.NANOSECONDS)); Segment segment = map.segments[0]; // TODO(fry): check recency ordering Object key = new Object(); int hash = map.hash(key); Object value = new Object(); AtomicReferenceArray> table = segment.table; int index = hash & (table.length() - 1); ReferenceEntry entry = map.newEntry(key, hash, null); ValueReference valueRef = map.newValueReference(entry, value, 1); entry.setValueReference(valueRef); assertNull(segment.get(key, hash)); // count == 0 table.set(index, entry); assertNull(segment.get(key, hash)); assertFalse(segment.containsKey(key, hash)); assertFalse(segment.containsValue(value)); // count == 1 segment.count++; assertSame(value, segment.get(key, hash)); assertTrue(segment.containsKey(key, hash)); assertTrue(segment.containsValue(value)); // don't see absent values now that count > 0 assertNull(segment.get(new Object(), hash)); // null key DummyEntry nullEntry = DummyEntry.create(null, hash, entry); Object nullValue = new Object(); ValueReference nullValueRef = map.newValueReference(nullEntry, nullValue, 1); nullEntry.setValueReference(nullValueRef); table.set(index, nullEntry); // skip the null key assertSame(value, segment.get(key, hash)); assertTrue(segment.containsKey(key, hash)); assertTrue(segment.containsValue(value)); assertFalse(segment.containsValue(nullValue)); // hash collision DummyEntry dummy = DummyEntry.create(new Object(), hash, entry); Object dummyValue = new Object(); ValueReference dummyValueRef = map.newValueReference(dummy, dummyValue, 1); dummy.setValueReference(dummyValueRef); table.set(index, dummy); assertSame(value, segment.get(key, hash)); assertTrue(segment.containsKey(key, hash)); assertTrue(segment.containsValue(value)); assertTrue(segment.containsValue(dummyValue)); // key collision dummy = DummyEntry.create(key, hash, entry); dummyValue = new Object(); dummyValueRef = map.newValueReference(dummy, dummyValue, 1); dummy.setValueReference(dummyValueRef); table.set(index, dummy); // returns the most recent entry assertSame(dummyValue, segment.get(key, hash)); assertTrue(segment.containsKey(key, hash)); assertTrue(segment.containsValue(value)); assertTrue(segment.containsValue(dummyValue)); // expired dummy.setAccessTime(ticker.read() - 2); assertNull(segment.get(key, hash)); assertFalse(segment.containsKey(key, hash)); assertTrue(segment.containsValue(value)); assertFalse(segment.containsValue(dummyValue)); } public void testSegmentReplaceValue() { LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).expireAfterAccess(99999, SECONDS)); Segment segment = map.segments[0]; // TODO(fry): check recency ordering Object key = new Object(); int hash = map.hash(key); Object oldValue = new Object(); Object newValue = new Object(); AtomicReferenceArray> table = segment.table; int index = hash & (table.length() - 1); DummyEntry entry = DummyEntry.create(key, hash, null); DummyValueReference oldValueRef = DummyValueReference.create(oldValue); entry.setValueReference(oldValueRef); // no entry assertFalse(segment.replace(key, hash, oldValue, newValue)); assertEquals(0, segment.count); // same value table.set(index, entry); segment.count++; assertEquals(1, segment.count); assertSame(oldValue, segment.get(key, hash)); assertTrue(segment.replace(key, hash, oldValue, newValue)); assertEquals(1, segment.count); assertSame(newValue, segment.get(key, hash)); // different value assertFalse(segment.replace(key, hash, oldValue, newValue)); assertEquals(1, segment.count); assertSame(newValue, segment.get(key, hash)); // cleared entry.setValueReference(oldValueRef); assertSame(oldValue, segment.get(key, hash)); oldValueRef.clear(); assertFalse(segment.replace(key, hash, oldValue, newValue)); assertEquals(0, segment.count); assertNull(segment.get(key, hash)); } public void testSegmentReplace() { LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).expireAfterAccess(99999, SECONDS)); Segment segment = map.segments[0]; // TODO(fry): check recency ordering Object key = new Object(); int hash = map.hash(key); Object oldValue = new Object(); Object newValue = new Object(); AtomicReferenceArray> table = segment.table; int index = hash & (table.length() - 1); DummyEntry entry = DummyEntry.create(key, hash, null); DummyValueReference oldValueRef = DummyValueReference.create(oldValue); entry.setValueReference(oldValueRef); // no entry assertNull(segment.replace(key, hash, newValue)); assertEquals(0, segment.count); // same key table.set(index, entry); segment.count++; assertEquals(1, segment.count); assertSame(oldValue, segment.get(key, hash)); assertSame(oldValue, segment.replace(key, hash, newValue)); assertEquals(1, segment.count); assertSame(newValue, segment.get(key, hash)); // cleared entry.setValueReference(oldValueRef); assertSame(oldValue, segment.get(key, hash)); oldValueRef.clear(); assertNull(segment.replace(key, hash, newValue)); assertEquals(0, segment.count); assertNull(segment.get(key, hash)); } public void testSegmentPut() { LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).expireAfterAccess(99999, SECONDS)); Segment segment = map.segments[0]; // TODO(fry): check recency ordering Object key = new Object(); int hash = map.hash(key); Object oldValue = new Object(); Object newValue = new Object(); // no entry assertEquals(0, segment.count); assertNull(segment.put(key, hash, oldValue, false)); assertEquals(1, segment.count); // same key assertSame(oldValue, segment.put(key, hash, newValue, false)); assertEquals(1, segment.count); assertSame(newValue, segment.get(key, hash)); // cleared ReferenceEntry entry = segment.getEntry(key, hash); DummyValueReference oldValueRef = DummyValueReference.create(oldValue); entry.setValueReference(oldValueRef); assertSame(oldValue, segment.get(key, hash)); oldValueRef.clear(); assertNull(segment.put(key, hash, newValue, false)); assertEquals(1, segment.count); assertSame(newValue, segment.get(key, hash)); } public void testSegmentPutIfAbsent() { LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).expireAfterAccess(99999, SECONDS)); Segment segment = map.segments[0]; // TODO(fry): check recency ordering Object key = new Object(); int hash = map.hash(key); Object oldValue = new Object(); Object newValue = new Object(); // no entry assertEquals(0, segment.count); assertNull(segment.put(key, hash, oldValue, true)); assertEquals(1, segment.count); // same key assertSame(oldValue, segment.put(key, hash, newValue, true)); assertEquals(1, segment.count); assertSame(oldValue, segment.get(key, hash)); // cleared ReferenceEntry entry = segment.getEntry(key, hash); DummyValueReference oldValueRef = DummyValueReference.create(oldValue); entry.setValueReference(oldValueRef); assertSame(oldValue, segment.get(key, hash)); oldValueRef.clear(); assertNull(segment.put(key, hash, newValue, true)); assertEquals(1, segment.count); assertSame(newValue, segment.get(key, hash)); } public void testSegmentPut_expand() { LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).initialCapacity(1)); Segment segment = map.segments[0]; assertEquals(1, segment.table.length()); int count = 1024; for (int i = 0; i < count; i++) { Object key = new Object(); Object value = new Object(); int hash = map.hash(key); assertNull(segment.put(key, hash, value, false)); assertTrue(segment.table.length() > i); } } public void testSegmentPut_evict() { int maxSize = 10; LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).maximumSize(maxSize)); // manually add elements to avoid eviction int originalCount = 1024; LinkedHashMap originalMap = Maps.newLinkedHashMap(); for (int i = 0; i < originalCount; i++) { Object key = new Object(); Object value = new Object(); map.put(key, value); originalMap.put(key, value); if (i >= maxSize) { Iterator it = originalMap.keySet().iterator(); it.next(); it.remove(); } assertEquals(originalMap, map); } } public void testSegmentStoreComputedValue() { QueuingRemovalListener listener = queuingRemovalListener(); LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .removalListener(listener)); Segment segment = map.segments[0]; Object key = new Object(); int hash = map.hash(key); AtomicReferenceArray> table = segment.table; int index = hash & (table.length() - 1); DummyEntry entry = DummyEntry.create(key, hash, null); LoadingValueReference valueRef = new LoadingValueReference(); entry.setValueReference(valueRef); // absent Object value = new Object(); assertTrue(listener.isEmpty()); assertEquals(0, segment.count); assertNull(segment.get(key, hash)); assertTrue(segment.storeLoadedValue(key, hash, valueRef, value)); assertSame(value, segment.get(key, hash)); assertEquals(1, segment.count); assertTrue(listener.isEmpty()); // clobbered Object value2 = new Object(); assertFalse(segment.storeLoadedValue(key, hash, valueRef, value2)); assertEquals(1, segment.count); assertSame(value, segment.get(key, hash)); RemovalNotification notification = listener.remove(); assertEquals(immutableEntry(key, value2), notification); assertEquals(RemovalCause.REPLACED, notification.getCause()); assertTrue(listener.isEmpty()); // inactive Object value3 = new Object(); map.clear(); listener.clear(); assertEquals(0, segment.count); table.set(index, entry); assertTrue(segment.storeLoadedValue(key, hash, valueRef, value3)); assertSame(value3, segment.get(key, hash)); assertEquals(1, segment.count); assertTrue(listener.isEmpty()); // replaced Object value4 = new Object(); DummyValueReference value3Ref = DummyValueReference.create(value3); valueRef = new LoadingValueReference(value3Ref); entry.setValueReference(valueRef); table.set(index, entry); assertSame(value3, segment.get(key, hash)); assertEquals(1, segment.count); assertTrue(segment.storeLoadedValue(key, hash, valueRef, value4)); assertSame(value4, segment.get(key, hash)); assertEquals(1, segment.count); notification = listener.remove(); assertEquals(immutableEntry(key, value3), notification); assertEquals(RemovalCause.REPLACED, notification.getCause()); assertTrue(listener.isEmpty()); // collected entry.setValueReference(valueRef); table.set(index, entry); assertSame(value3, segment.get(key, hash)); assertEquals(1, segment.count); value3Ref.clear(); assertTrue(segment.storeLoadedValue(key, hash, valueRef, value4)); assertSame(value4, segment.get(key, hash)); assertEquals(1, segment.count); notification = listener.remove(); assertEquals(immutableEntry(key, null), notification); assertEquals(RemovalCause.COLLECTED, notification.getCause()); assertTrue(listener.isEmpty()); } public void testSegmentRemove() { LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1)); Segment segment = map.segments[0]; Object key = new Object(); int hash = map.hash(key); Object oldValue = new Object(); AtomicReferenceArray> table = segment.table; int index = hash & (table.length() - 1); DummyEntry entry = DummyEntry.create(key, hash, null); DummyValueReference oldValueRef = DummyValueReference.create(oldValue); entry.setValueReference(oldValueRef); // no entry assertEquals(0, segment.count); assertNull(segment.remove(key, hash)); assertEquals(0, segment.count); // same key table.set(index, entry); segment.count++; assertEquals(1, segment.count); assertSame(oldValue, segment.get(key, hash)); assertSame(oldValue, segment.remove(key, hash)); assertEquals(0, segment.count); assertNull(segment.get(key, hash)); // cleared table.set(index, entry); segment.count++; assertEquals(1, segment.count); assertSame(oldValue, segment.get(key, hash)); oldValueRef.clear(); assertNull(segment.remove(key, hash)); assertEquals(0, segment.count); assertNull(segment.get(key, hash)); } public void testSegmentRemoveValue() { LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1)); Segment segment = map.segments[0]; Object key = new Object(); int hash = map.hash(key); Object oldValue = new Object(); Object newValue = new Object(); AtomicReferenceArray> table = segment.table; int index = hash & (table.length() - 1); DummyEntry entry = DummyEntry.create(key, hash, null); DummyValueReference oldValueRef = DummyValueReference.create(oldValue); entry.setValueReference(oldValueRef); // no entry assertEquals(0, segment.count); assertNull(segment.remove(key, hash)); assertEquals(0, segment.count); // same value table.set(index, entry); segment.count++; assertEquals(1, segment.count); assertSame(oldValue, segment.get(key, hash)); assertTrue(segment.remove(key, hash, oldValue)); assertEquals(0, segment.count); assertNull(segment.get(key, hash)); // different value table.set(index, entry); segment.count++; assertEquals(1, segment.count); assertSame(oldValue, segment.get(key, hash)); assertFalse(segment.remove(key, hash, newValue)); assertEquals(1, segment.count); assertSame(oldValue, segment.get(key, hash)); // cleared assertSame(oldValue, segment.get(key, hash)); oldValueRef.clear(); assertFalse(segment.remove(key, hash, oldValue)); assertEquals(0, segment.count); assertNull(segment.get(key, hash)); } public void testExpand() { LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).initialCapacity(1)); Segment segment = map.segments[0]; assertEquals(1, segment.table.length()); // manually add elements to avoid expansion int originalCount = 1024; ReferenceEntry entry = null; for (int i = 0; i < originalCount; i++) { Object key = new Object(); Object value = new Object(); int hash = map.hash(key); // chain all entries together as we only have a single bucket entry = map.newEntry(key, hash, entry); ValueReference valueRef = map.newValueReference(entry, value, 1); entry.setValueReference(valueRef); } segment.table.set(0, entry); segment.count = originalCount; ImmutableMap originalMap = ImmutableMap.copyOf(map); assertEquals(originalCount, originalMap.size()); assertEquals(originalMap, map); for (int i = 1; i <= originalCount * 2; i *= 2) { if (i > 1) { segment.expand(); } assertEquals(i, segment.table.length()); assertEquals(originalCount, countLiveEntries(map, 0)); assertEquals(originalCount, segment.count); assertEquals(originalMap, map); } } public void testGetCausesExpansion() throws ExecutionException { for (int count = 1; count <= 100; count++) { LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).initialCapacity(1)); Segment segment = map.segments[0]; assertEquals(1, segment.table.length()); for (int i = 0; i < count; i++) { Object key = new Object(); final Object value = new Object(); segment.get(key, key.hashCode(), new CacheLoader() { @Override public Object load(Object key) { return value; } }); } assertEquals(count, segment.count); assertTrue(count <= segment.threshold); assertTrue(count <= (segment.table.length() * 3 / 4)); assertTrue(count > (segment.table.length() * 3 / 8)); } } public void testPutCausesExpansion() { for (int count = 1; count <= 100; count++) { LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).initialCapacity(1)); Segment segment = map.segments[0]; assertEquals(1, segment.table.length()); for (int i = 0; i < count; i++) { Object key = new Object(); Object value = new Object(); segment.put(key, key.hashCode(), value, true); } assertEquals(count, segment.count); assertTrue(count <= segment.threshold); assertTrue(count <= (segment.table.length() * 3 / 4)); assertTrue(count > (segment.table.length() * 3 / 8)); } } public void testReclaimKey() { CountingRemovalListener listener = countingRemovalListener(); LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .initialCapacity(1) .maximumSize(SMALL_MAX_SIZE) .expireAfterWrite(99999, SECONDS) .removalListener(listener)); Segment segment = map.segments[0]; AtomicReferenceArray> table = segment.table; assertEquals(1, table.length()); // create 3 objects and chain them together Object keyOne = new Object(); Object valueOne = new Object(); int hashOne = map.hash(keyOne); DummyEntry entryOne = createDummyEntry(keyOne, hashOne, valueOne, null); Object keyTwo = new Object(); Object valueTwo = new Object(); int hashTwo = map.hash(keyTwo); DummyEntry entryTwo = createDummyEntry(keyTwo, hashTwo, valueTwo, entryOne); Object keyThree = new Object(); Object valueThree = new Object(); int hashThree = map.hash(keyThree); DummyEntry entryThree = createDummyEntry(keyThree, hashThree, valueThree, entryTwo); // absent assertEquals(0, listener.getCount()); assertFalse(segment.reclaimKey(entryOne, hashOne)); assertEquals(0, listener.getCount()); table.set(0, entryOne); assertFalse(segment.reclaimKey(entryTwo, hashTwo)); assertEquals(0, listener.getCount()); table.set(0, entryTwo); assertFalse(segment.reclaimKey(entryThree, hashThree)); assertEquals(0, listener.getCount()); // present table.set(0, entryOne); segment.count = 1; assertTrue(segment.reclaimKey(entryOne, hashOne)); assertEquals(1, listener.getCount()); assertSame(keyOne, listener.getLastEvictedKey()); assertSame(valueOne, listener.getLastEvictedValue()); assertTrue(map.removalNotificationQueue.isEmpty()); assertFalse(segment.accessQueue.contains(entryOne)); assertFalse(segment.writeQueue.contains(entryOne)); assertEquals(0, segment.count); assertNull(table.get(0)); } public void testRemoveEntryFromChain() { LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1)); Segment segment = map.segments[0]; // create 3 objects and chain them together Object keyOne = new Object(); Object valueOne = new Object(); int hashOne = map.hash(keyOne); DummyEntry entryOne = createDummyEntry(keyOne, hashOne, valueOne, null); Object keyTwo = new Object(); Object valueTwo = new Object(); int hashTwo = map.hash(keyTwo); DummyEntry entryTwo = createDummyEntry(keyTwo, hashTwo, valueTwo, entryOne); Object keyThree = new Object(); Object valueThree = new Object(); int hashThree = map.hash(keyThree); DummyEntry entryThree = createDummyEntry(keyThree, hashThree, valueThree, entryTwo); // alone assertNull(segment.removeEntryFromChain(entryOne, entryOne)); // head assertSame(entryOne, segment.removeEntryFromChain(entryTwo, entryTwo)); // middle ReferenceEntry newFirst = segment.removeEntryFromChain(entryThree, entryTwo); assertSame(keyThree, newFirst.getKey()); assertSame(valueThree, newFirst.getValueReference().get()); assertEquals(hashThree, newFirst.getHash()); assertSame(entryOne, newFirst.getNext()); // tail (remaining entries are copied in reverse order) newFirst = segment.removeEntryFromChain(entryThree, entryOne); assertSame(keyTwo, newFirst.getKey()); assertSame(valueTwo, newFirst.getValueReference().get()); assertEquals(hashTwo, newFirst.getHash()); newFirst = newFirst.getNext(); assertSame(keyThree, newFirst.getKey()); assertSame(valueThree, newFirst.getValueReference().get()); assertEquals(hashThree, newFirst.getHash()); assertNull(newFirst.getNext()); } public void testExpand_cleanup() { LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).initialCapacity(1)); Segment segment = map.segments[0]; assertEquals(1, segment.table.length()); // manually add elements to avoid expansion // 1/3 null keys, 1/3 null values int originalCount = 1024; ReferenceEntry entry = null; for (int i = 0; i < originalCount; i++) { Object key = new Object(); Object value = (i % 3 == 0) ? null : new Object(); int hash = map.hash(key); if (i % 3 == 1) { key = null; } // chain all entries together as we only have a single bucket entry = DummyEntry.create(key, hash, entry); ValueReference valueRef = DummyValueReference.create(value); entry.setValueReference(valueRef); } segment.table.set(0, entry); segment.count = originalCount; int liveCount = originalCount / 3; assertEquals(1, segment.table.length()); assertEquals(liveCount, countLiveEntries(map, 0)); ImmutableMap originalMap = ImmutableMap.copyOf(map); assertEquals(liveCount, originalMap.size()); // can't compare map contents until cleanup occurs for (int i = 1; i <= originalCount * 2; i *= 2) { if (i > 1) { segment.expand(); } assertEquals(i, segment.table.length()); assertEquals(liveCount, countLiveEntries(map, 0)); // expansion cleanup is sloppy, with a goal of avoiding unnecessary copies assertTrue(segment.count >= liveCount); assertTrue(segment.count <= originalCount); assertEquals(originalMap, ImmutableMap.copyOf(map)); } } private static int countLiveEntries(LocalCache map, long now) { int result = 0; for (Segment segment : map.segments) { AtomicReferenceArray> table = segment.table; for (int i = 0; i < table.length(); i++) { for (ReferenceEntry e = table.get(i); e != null; e = e.getNext()) { if (map.isLive(e, now)) { result++; } } } } return result; } public void testClear() { LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .initialCapacity(1) .maximumSize(SMALL_MAX_SIZE) .expireAfterWrite(99999, SECONDS)); Segment segment = map.segments[0]; AtomicReferenceArray> table = segment.table; assertEquals(1, table.length()); Object key = new Object(); Object value = new Object(); int hash = map.hash(key); DummyEntry entry = createDummyEntry(key, hash, value, null); segment.recordWrite(entry, 1, map.ticker.read()); segment.table.set(0, entry); segment.readCount.incrementAndGet(); segment.count = 1; segment.totalWeight = 1; assertSame(entry, table.get(0)); assertSame(entry, segment.accessQueue.peek()); assertSame(entry, segment.writeQueue.peek()); segment.clear(); assertNull(table.get(0)); assertTrue(segment.accessQueue.isEmpty()); assertTrue(segment.writeQueue.isEmpty()); assertEquals(0, segment.readCount.get()); assertEquals(0, segment.count); assertEquals(0, segment.totalWeight); } public void testClear_notification() { QueuingRemovalListener listener = queuingRemovalListener(); LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .initialCapacity(1) .maximumSize(SMALL_MAX_SIZE) .expireAfterWrite(99999, SECONDS) .removalListener(listener)); Segment segment = map.segments[0]; AtomicReferenceArray> table = segment.table; assertEquals(1, table.length()); Object key = new Object(); Object value = new Object(); int hash = map.hash(key); DummyEntry entry = createDummyEntry(key, hash, value, null); segment.recordWrite(entry, 1, map.ticker.read()); segment.table.set(0, entry); segment.readCount.incrementAndGet(); segment.count = 1; segment.totalWeight = 1; assertSame(entry, table.get(0)); assertSame(entry, segment.accessQueue.peek()); assertSame(entry, segment.writeQueue.peek()); segment.clear(); assertNull(table.get(0)); assertTrue(segment.accessQueue.isEmpty()); assertTrue(segment.writeQueue.isEmpty()); assertEquals(0, segment.readCount.get()); assertEquals(0, segment.count); assertEquals(0, segment.totalWeight); assertNotified(listener, key, value, RemovalCause.EXPLICIT); } public void testRemoveEntry() { LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .initialCapacity(1) .maximumSize(SMALL_MAX_SIZE) .expireAfterWrite(99999, SECONDS) .removalListener(countingRemovalListener())); Segment segment = map.segments[0]; AtomicReferenceArray> table = segment.table; assertEquals(1, table.length()); Object key = new Object(); Object value = new Object(); int hash = map.hash(key); DummyEntry entry = createDummyEntry(key, hash, value, null); // remove absent assertFalse(segment.removeEntry(entry, hash, RemovalCause.COLLECTED)); // remove live segment.recordWrite(entry, 1, map.ticker.read()); table.set(0, entry); segment.count = 1; assertTrue(segment.removeEntry(entry, hash, RemovalCause.COLLECTED)); assertNotificationEnqueued(map, key, value, hash); assertTrue(map.removalNotificationQueue.isEmpty()); assertFalse(segment.accessQueue.contains(entry)); assertFalse(segment.writeQueue.contains(entry)); assertEquals(0, segment.count); assertNull(table.get(0)); } public void testReclaimValue() { CountingRemovalListener listener = countingRemovalListener(); LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .initialCapacity(1) .maximumSize(SMALL_MAX_SIZE) .expireAfterWrite(99999, SECONDS) .removalListener(listener)); Segment segment = map.segments[0]; AtomicReferenceArray> table = segment.table; assertEquals(1, table.length()); Object key = new Object(); Object value = new Object(); int hash = map.hash(key); DummyEntry entry = DummyEntry.create(key, hash, null); DummyValueReference valueRef = DummyValueReference.create(value); entry.setValueReference(valueRef); // reclaim absent assertFalse(segment.reclaimValue(key, hash, valueRef)); // reclaim live segment.recordWrite(entry, 1, map.ticker.read()); table.set(0, entry); segment.count = 1; assertTrue(segment.reclaimValue(key, hash, valueRef)); assertEquals(1, listener.getCount()); assertSame(key, listener.getLastEvictedKey()); assertSame(value, listener.getLastEvictedValue()); assertTrue(map.removalNotificationQueue.isEmpty()); assertFalse(segment.accessQueue.contains(entry)); assertFalse(segment.writeQueue.contains(entry)); assertEquals(0, segment.count); assertNull(table.get(0)); // reclaim wrong value reference table.set(0, entry); DummyValueReference otherValueRef = DummyValueReference.create(value); entry.setValueReference(otherValueRef); assertFalse(segment.reclaimValue(key, hash, valueRef)); assertEquals(1, listener.getCount()); assertTrue(segment.reclaimValue(key, hash, otherValueRef)); assertEquals(2, listener.getCount()); assertSame(key, listener.getLastEvictedKey()); assertSame(value, listener.getLastEvictedValue()); } public void testRemoveComputingValue() { LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .initialCapacity(1) .maximumSize(SMALL_MAX_SIZE) .expireAfterWrite(99999, SECONDS) .removalListener(countingRemovalListener())); Segment segment = map.segments[0]; AtomicReferenceArray> table = segment.table; assertEquals(1, table.length()); Object key = new Object(); int hash = map.hash(key); DummyEntry entry = DummyEntry.create(key, hash, null); LoadingValueReference valueRef = new LoadingValueReference(); entry.setValueReference(valueRef); // absent assertFalse(segment.removeLoadingValue(key, hash, valueRef)); // live table.set(0, entry); // don't increment count; this is used during computation assertTrue(segment.removeLoadingValue(key, hash, valueRef)); // no notification sent with removeLoadingValue assertTrue(map.removalNotificationQueue.isEmpty()); assertEquals(0, segment.count); assertNull(table.get(0)); // active Object value = new Object(); DummyValueReference previousRef = DummyValueReference.create(value); valueRef = new LoadingValueReference(previousRef); entry.setValueReference(valueRef); table.set(0, entry); segment.count = 1; assertTrue(segment.removeLoadingValue(key, hash, valueRef)); assertSame(entry, table.get(0)); assertSame(value, segment.get(key, hash)); // wrong value reference table.set(0, entry); DummyValueReference otherValueRef = DummyValueReference.create(value); entry.setValueReference(otherValueRef); assertFalse(segment.removeLoadingValue(key, hash, valueRef)); entry.setValueReference(valueRef); assertTrue(segment.removeLoadingValue(key, hash, valueRef)); } private static void assertNotificationEnqueued( LocalCache map, K key, V value, int hash) { RemovalNotification notification = map.removalNotificationQueue.poll(); assertSame(key, notification.getKey()); assertSame(value, notification.getValue()); } // Segment eviction tests public void testDrainRecencyQueueOnWrite() { for (CacheBuilder builder : allEvictingMakers()) { LocalCache map = makeLocalCache(builder.concurrencyLevel(1)); Segment segment = map.segments[0]; if (segment.recencyQueue != DISCARDING_QUEUE) { Object keyOne = new Object(); Object valueOne = new Object(); Object keyTwo = new Object(); Object valueTwo = new Object(); map.put(keyOne, valueOne); assertTrue(segment.recencyQueue.isEmpty()); for (int i = 0; i < DRAIN_THRESHOLD / 2; i++) { map.get(keyOne); } assertFalse(segment.recencyQueue.isEmpty()); map.put(keyTwo, valueTwo); assertTrue(segment.recencyQueue.isEmpty()); } } } public void testDrainRecencyQueueOnRead() { for (CacheBuilder builder : allEvictingMakers()) { LocalCache map = makeLocalCache(builder.concurrencyLevel(1)); Segment segment = map.segments[0]; if (segment.recencyQueue != DISCARDING_QUEUE) { Object keyOne = new Object(); Object valueOne = new Object(); // repeated get of the same key map.put(keyOne, valueOne); assertTrue(segment.recencyQueue.isEmpty()); for (int i = 0; i < DRAIN_THRESHOLD / 2; i++) { map.get(keyOne); } assertFalse(segment.recencyQueue.isEmpty()); for (int i = 0; i < DRAIN_THRESHOLD * 2; i++) { map.get(keyOne); assertTrue(segment.recencyQueue.size() <= DRAIN_THRESHOLD); } // get over many different keys for (int i = 0; i < DRAIN_THRESHOLD * 2; i++) { map.put(new Object(), new Object()); } assertTrue(segment.recencyQueue.isEmpty()); for (int i = 0; i < DRAIN_THRESHOLD / 2; i++) { map.get(keyOne); } assertFalse(segment.recencyQueue.isEmpty()); for (Object key : map.keySet()) { map.get(key); assertTrue(segment.recencyQueue.size() <= DRAIN_THRESHOLD); } } } } public void testRecordRead() { for (CacheBuilder builder : allEvictingMakers()) { LocalCache map = makeLocalCache(builder.concurrencyLevel(1)); Segment segment = map.segments[0]; List> writeOrder = Lists.newLinkedList(); List> readOrder = Lists.newLinkedList(); for (int i = 0; i < DRAIN_THRESHOLD * 2; i++) { Object key = new Object(); int hash = map.hash(key); Object value = new Object(); ReferenceEntry entry = createDummyEntry(key, hash, value, null); // must recordRead for drainRecencyQueue to believe this entry is live segment.recordWrite(entry, 1, map.ticker.read()); writeOrder.add(entry); readOrder.add(entry); } checkEvictionQueues(map, segment, readOrder, writeOrder); checkExpirationTimes(map); // access some of the elements Random random = new Random(); List> reads = Lists.newArrayList(); Iterator> i = readOrder.iterator(); while (i.hasNext()) { ReferenceEntry entry = i.next(); if (random.nextBoolean()) { segment.recordRead(entry, map.ticker.read()); reads.add(entry); i.remove(); } } checkAndDrainRecencyQueue(map, segment, reads); readOrder.addAll(reads); checkEvictionQueues(map, segment, readOrder, writeOrder); checkExpirationTimes(map); } } public void testRecordReadOnGet() { for (CacheBuilder builder : allEvictingMakers()) { LocalCache map = makeLocalCache(builder.concurrencyLevel(1)); Segment segment = map.segments[0]; List> writeOrder = Lists.newLinkedList(); List> readOrder = Lists.newLinkedList(); for (int i = 0; i < DRAIN_THRESHOLD * 2; i++) { Object key = new Object(); int hash = map.hash(key); Object value = new Object(); map.put(key, value); ReferenceEntry entry = segment.getEntry(key, hash); writeOrder.add(entry); readOrder.add(entry); } checkEvictionQueues(map, segment, readOrder, writeOrder); checkExpirationTimes(map); assertTrue(segment.recencyQueue.isEmpty()); // access some of the elements Random random = new Random(); List> reads = Lists.newArrayList(); Iterator> i = readOrder.iterator(); while (i.hasNext()) { ReferenceEntry entry = i.next(); if (random.nextBoolean()) { map.get(entry.getKey()); reads.add(entry); i.remove(); assertTrue(segment.recencyQueue.size() <= DRAIN_THRESHOLD); } } int undrainedIndex = reads.size() - segment.recencyQueue.size(); checkAndDrainRecencyQueue(map, segment, reads.subList(undrainedIndex, reads.size())); readOrder.addAll(reads); checkEvictionQueues(map, segment, readOrder, writeOrder); checkExpirationTimes(map); } } public void testRecordWrite() { for (CacheBuilder builder : allEvictingMakers()) { LocalCache map = makeLocalCache(builder.concurrencyLevel(1)); Segment segment = map.segments[0]; List> writeOrder = Lists.newLinkedList(); for (int i = 0; i < DRAIN_THRESHOLD * 2; i++) { Object key = new Object(); int hash = map.hash(key); Object value = new Object(); ReferenceEntry entry = createDummyEntry(key, hash, value, null); // must recordRead for drainRecencyQueue to believe this entry is live segment.recordWrite(entry, 1, map.ticker.read()); writeOrder.add(entry); } checkEvictionQueues(map, segment, writeOrder, writeOrder); checkExpirationTimes(map); // access some of the elements Random random = new Random(); List> writes = Lists.newArrayList(); Iterator> i = writeOrder.iterator(); while (i.hasNext()) { ReferenceEntry entry = i.next(); if (random.nextBoolean()) { segment.recordWrite(entry, 1, map.ticker.read()); writes.add(entry); i.remove(); } } writeOrder.addAll(writes); checkEvictionQueues(map, segment, writeOrder, writeOrder); checkExpirationTimes(map); } } static void checkAndDrainRecencyQueue(LocalCache map, Segment segment, List> reads) { if (map.evictsBySize() || map.expiresAfterAccess()) { assertSameEntries(reads, ImmutableList.copyOf(segment.recencyQueue)); } segment.drainRecencyQueue(); } static void checkEvictionQueues(LocalCache map, Segment segment, List> readOrder, List> writeOrder) { if (map.evictsBySize() || map.expiresAfterAccess()) { assertSameEntries(readOrder, ImmutableList.copyOf(segment.accessQueue)); } if (map.expiresAfterWrite()) { assertSameEntries(writeOrder, ImmutableList.copyOf(segment.writeQueue)); } } private static void assertSameEntries(List> expectedEntries, List> actualEntries) { int size = expectedEntries.size(); assertEquals(size, actualEntries.size()); for (int i = 0; i < size; i++) { ReferenceEntry expectedEntry = expectedEntries.get(i); ReferenceEntry actualEntry = actualEntries.get(i); assertSame(expectedEntry.getKey(), actualEntry.getKey()); assertSame(expectedEntry.getValueReference().get(), actualEntry.getValueReference().get()); } } static void checkExpirationTimes(LocalCache map) { if (!map.expires()) { return; } for (Segment segment : map.segments) { long lastAccessTime = 0; long lastWriteTime = 0; for (ReferenceEntry e : segment.recencyQueue) { long accessTime = e.getAccessTime(); assertTrue(accessTime >= lastAccessTime); lastAccessTime = accessTime; long writeTime = e.getWriteTime(); assertTrue(writeTime >= lastWriteTime); lastWriteTime = writeTime; } lastAccessTime = 0; lastWriteTime = 0; for (ReferenceEntry e : segment.accessQueue) { long accessTime = e.getAccessTime(); assertTrue(accessTime >= lastAccessTime); lastAccessTime = accessTime; } for (ReferenceEntry e : segment.writeQueue) { long writeTime = e.getWriteTime(); assertTrue(writeTime >= lastWriteTime); lastWriteTime = writeTime; } } } public void testExpireAfterWrite() { FakeTicker ticker = new FakeTicker(); LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .ticker(ticker) .expireAfterWrite(2, TimeUnit.NANOSECONDS)); Segment segment = map.segments[0]; Object key = new Object(); Object value = new Object(); map.put(key, value); ReferenceEntry entry = map.getEntry(key); assertTrue(map.isLive(entry, ticker.read())); segment.writeQueue.add(entry); assertSame(value, map.get(key)); assertSame(entry, segment.writeQueue.peek()); assertEquals(1, segment.writeQueue.size()); segment.recordRead(entry, ticker.read()); segment.expireEntries(ticker.read()); assertSame(value, map.get(key)); assertSame(entry, segment.writeQueue.peek()); assertEquals(1, segment.writeQueue.size()); ticker.advance(1); segment.recordRead(entry, ticker.read()); segment.expireEntries(ticker.read()); assertSame(value, map.get(key)); assertSame(entry, segment.writeQueue.peek()); assertEquals(1, segment.writeQueue.size()); ticker.advance(1); assertNull(map.get(key)); segment.expireEntries(ticker.read()); assertNull(map.get(key)); assertTrue(segment.writeQueue.isEmpty()); } public void testExpireAfterAccess() { FakeTicker ticker = new FakeTicker(); LocalCache map = makeLocalCache(createCacheBuilder() .concurrencyLevel(1) .ticker(ticker) .expireAfterAccess(2, TimeUnit.NANOSECONDS)); Segment segment = map.segments[0]; Object key = new Object(); Object value = new Object(); map.put(key, value); ReferenceEntry entry = map.getEntry(key); assertTrue(map.isLive(entry, ticker.read())); segment.accessQueue.add(entry); assertSame(value, map.get(key)); assertSame(entry, segment.accessQueue.peek()); assertEquals(1, segment.accessQueue.size()); segment.recordRead(entry, ticker.read()); segment.expireEntries(ticker.read()); assertTrue(map.containsKey(key)); assertSame(entry, segment.accessQueue.peek()); assertEquals(1, segment.accessQueue.size()); ticker.advance(1); segment.recordRead(entry, ticker.read()); segment.expireEntries(ticker.read()); assertTrue(map.containsKey(key)); assertSame(entry, segment.accessQueue.peek()); assertEquals(1, segment.accessQueue.size()); ticker.advance(1); segment.recordRead(entry, ticker.read()); segment.expireEntries(ticker.read()); assertTrue(map.containsKey(key)); assertSame(entry, segment.accessQueue.peek()); assertEquals(1, segment.accessQueue.size()); ticker.advance(1); segment.expireEntries(ticker.read()); assertTrue(map.containsKey(key)); assertSame(entry, segment.accessQueue.peek()); assertEquals(1, segment.accessQueue.size()); ticker.advance(1); assertFalse(map.containsKey(key)); assertNull(map.get(key)); segment.expireEntries(ticker.read()); assertFalse(map.containsKey(key)); assertNull(map.get(key)); assertTrue(segment.accessQueue.isEmpty()); } public void testEvictEntries() { int maxSize = 10; LocalCache map = makeLocalCache(createCacheBuilder().concurrencyLevel(1).maximumSize(maxSize)); Segment segment = map.segments[0]; // manually add elements to avoid eviction int originalCount = 1024; ReferenceEntry entry = null; LinkedHashMap originalMap = Maps.newLinkedHashMap(); for (int i = 0; i < originalCount; i++) { Object key = new Object(); Object value = new Object(); AtomicReferenceArray> table = segment.table; int hash = map.hash(key); int index = hash & (table.length() - 1); ReferenceEntry first = table.get(index); entry = map.newEntry(key, hash, first); ValueReference valueRef = map.newValueReference(entry, value, 1); entry.setValueReference(valueRef); segment.recordWrite(entry, 1, map.ticker.read()); table.set(index, entry); originalMap.put(key, value); } segment.count = originalCount; segment.totalWeight = originalCount; assertEquals(originalCount, map.size()); assertEquals(originalMap, map); Iterator it = originalMap.keySet().iterator(); for (int i = 0; i < originalCount - maxSize; i++) { it.next(); it.remove(); } segment.evictEntries(); assertEquals(maxSize, map.size()); assertEquals(originalMap, map); } // reference queues public void testDrainKeyReferenceQueueOnWrite() { for (CacheBuilder builder : allKeyValueStrengthMakers()) { LocalCache map = makeLocalCache(builder.concurrencyLevel(1)); if (map.usesKeyReferences()) { Segment segment = map.segments[0]; Object keyOne = new Object(); int hashOne = map.hash(keyOne); Object valueOne = new Object(); Object keyTwo = new Object(); Object valueTwo = new Object(); map.put(keyOne, valueOne); ReferenceEntry entry = segment.getEntry(keyOne, hashOne); @SuppressWarnings("unchecked") Reference reference = (Reference) entry; reference.enqueue(); map.put(keyTwo, valueTwo); assertFalse(map.containsKey(keyOne)); assertFalse(map.containsValue(valueOne)); assertNull(map.get(keyOne)); assertEquals(1, map.size()); assertNull(segment.keyReferenceQueue.poll()); } } } public void testDrainValueReferenceQueueOnWrite() { for (CacheBuilder builder : allKeyValueStrengthMakers()) { LocalCache map = makeLocalCache(builder.concurrencyLevel(1)); if (map.usesValueReferences()) { Segment segment = map.segments[0]; Object keyOne = new Object(); int hashOne = map.hash(keyOne); Object valueOne = new Object(); Object keyTwo = new Object(); Object valueTwo = new Object(); map.put(keyOne, valueOne); ReferenceEntry entry = segment.getEntry(keyOne, hashOne); ValueReference valueReference = entry.getValueReference(); @SuppressWarnings("unchecked") Reference reference = (Reference) valueReference; reference.enqueue(); map.put(keyTwo, valueTwo); assertFalse(map.containsKey(keyOne)); assertFalse(map.containsValue(valueOne)); assertNull(map.get(keyOne)); assertEquals(1, map.size()); assertNull(segment.valueReferenceQueue.poll()); } } } public void testDrainKeyReferenceQueueOnRead() { for (CacheBuilder builder : allKeyValueStrengthMakers()) { LocalCache map = makeLocalCache(builder.concurrencyLevel(1)); if (map.usesKeyReferences()) { Segment segment = map.segments[0]; Object keyOne = new Object(); int hashOne = map.hash(keyOne); Object valueOne = new Object(); Object keyTwo = new Object(); map.put(keyOne, valueOne); ReferenceEntry entry = segment.getEntry(keyOne, hashOne); @SuppressWarnings("unchecked") Reference reference = (Reference) entry; reference.enqueue(); for (int i = 0; i < SMALL_MAX_SIZE; i++) { map.get(keyTwo); } assertFalse(map.containsKey(keyOne)); assertFalse(map.containsValue(valueOne)); assertNull(map.get(keyOne)); assertEquals(0, map.size()); assertNull(segment.keyReferenceQueue.poll()); } } } public void testDrainValueReferenceQueueOnRead() { for (CacheBuilder builder : allKeyValueStrengthMakers()) { LocalCache map = makeLocalCache(builder.concurrencyLevel(1)); if (map.usesValueReferences()) { Segment segment = map.segments[0]; Object keyOne = new Object(); int hashOne = map.hash(keyOne); Object valueOne = new Object(); Object keyTwo = new Object(); map.put(keyOne, valueOne); ReferenceEntry entry = segment.getEntry(keyOne, hashOne); ValueReference valueReference = entry.getValueReference(); @SuppressWarnings("unchecked") Reference reference = (Reference) valueReference; reference.enqueue(); for (int i = 0; i < SMALL_MAX_SIZE; i++) { map.get(keyTwo); } assertFalse(map.containsKey(keyOne)); assertFalse(map.containsValue(valueOne)); assertNull(map.get(keyOne)); assertEquals(0, map.size()); assertNull(segment.valueReferenceQueue.poll()); } } } public void testNullParameters() throws Exception { NullPointerTester tester = new NullPointerTester(); tester.testAllPublicInstanceMethods(makeLocalCache(createCacheBuilder())); CacheLoader loader = identityLoader(); tester.testAllPublicInstanceMethods(makeLocalCache(createCacheBuilder())); } public void testSerializationProxyLoading() { CacheLoader loader = new SerializableCacheLoader(); RemovalListener listener = new SerializableRemovalListener(); SerializableWeigher weigher = new SerializableWeigher(); Ticker ticker = new SerializableTicker(); @SuppressWarnings("unchecked") // createMock LocalLoadingCache one = (LocalLoadingCache) CacheBuilder.newBuilder() .weakKeys() .softValues() .expireAfterAccess(123, SECONDS) .expireAfterWrite(456, MINUTES) .maximumWeight(789) .weigher(weigher) .concurrencyLevel(12) .removalListener(listener) .ticker(ticker) .build(loader); // add a non-serializable entry one.getUnchecked(new Object()); assertEquals(1, one.size()); assertFalse(one.asMap().isEmpty()); LocalLoadingCache two = SerializableTester.reserialize(one); assertEquals(0, two.size()); assertTrue(two.asMap().isEmpty()); LocalCache localCacheOne = one.localCache; LocalCache localCacheTwo = two.localCache; assertEquals(localCacheOne.keyStrength, localCacheTwo.keyStrength); assertEquals(localCacheOne.keyStrength, localCacheTwo.keyStrength); assertEquals(localCacheOne.valueEquivalence, localCacheTwo.valueEquivalence); assertEquals(localCacheOne.valueEquivalence, localCacheTwo.valueEquivalence); assertEquals(localCacheOne.maxWeight, localCacheTwo.maxWeight); assertEquals(localCacheOne.weigher, localCacheTwo.weigher); assertEquals(localCacheOne.expireAfterAccessNanos, localCacheTwo.expireAfterAccessNanos); assertEquals(localCacheOne.expireAfterWriteNanos, localCacheTwo.expireAfterWriteNanos); assertEquals(localCacheOne.refreshNanos, localCacheTwo.refreshNanos); assertEquals(localCacheOne.removalListener, localCacheTwo.removalListener); assertEquals(localCacheOne.ticker, localCacheTwo.ticker); // serialize the reconstituted version to be sure we haven't lost the ability to reserialize LocalLoadingCache three = SerializableTester.reserialize(two); LocalCache localCacheThree = three.localCache; assertEquals(localCacheTwo.defaultLoader, localCacheThree.defaultLoader); assertEquals(localCacheTwo.keyStrength, localCacheThree.keyStrength); assertEquals(localCacheTwo.keyStrength, localCacheThree.keyStrength); assertEquals(localCacheTwo.valueEquivalence, localCacheThree.valueEquivalence); assertEquals(localCacheTwo.valueEquivalence, localCacheThree.valueEquivalence); assertEquals(localCacheTwo.maxWeight, localCacheThree.maxWeight); assertEquals(localCacheTwo.weigher, localCacheThree.weigher); assertEquals(localCacheTwo.expireAfterAccessNanos, localCacheThree.expireAfterAccessNanos); assertEquals(localCacheTwo.expireAfterWriteNanos, localCacheThree.expireAfterWriteNanos); assertEquals(localCacheTwo.removalListener, localCacheThree.removalListener); assertEquals(localCacheTwo.ticker, localCacheThree.ticker); } public void testSerializationProxyManual() { RemovalListener listener = new SerializableRemovalListener(); SerializableWeigher weigher = new SerializableWeigher(); Ticker ticker = new SerializableTicker(); @SuppressWarnings("unchecked") // createMock LocalManualCache one = (LocalManualCache) CacheBuilder.newBuilder() .weakKeys() .softValues() .expireAfterAccess(123, NANOSECONDS) .maximumWeight(789) .weigher(weigher) .concurrencyLevel(12) .removalListener(listener) .ticker(ticker) .build(); // add a non-serializable entry one.put(new Object(), new Object()); assertEquals(1, one.size()); assertFalse(one.asMap().isEmpty()); LocalManualCache two = SerializableTester.reserialize(one); assertEquals(0, two.size()); assertTrue(two.asMap().isEmpty()); LocalCache localCacheOne = one.localCache; LocalCache localCacheTwo = two.localCache; assertEquals(localCacheOne.keyStrength, localCacheTwo.keyStrength); assertEquals(localCacheOne.keyStrength, localCacheTwo.keyStrength); assertEquals(localCacheOne.valueEquivalence, localCacheTwo.valueEquivalence); assertEquals(localCacheOne.valueEquivalence, localCacheTwo.valueEquivalence); assertEquals(localCacheOne.maxWeight, localCacheTwo.maxWeight); assertEquals(localCacheOne.weigher, localCacheTwo.weigher); assertEquals(localCacheOne.expireAfterAccessNanos, localCacheTwo.expireAfterAccessNanos); assertEquals(localCacheOne.expireAfterWriteNanos, localCacheTwo.expireAfterWriteNanos); assertEquals(localCacheOne.removalListener, localCacheTwo.removalListener); assertEquals(localCacheOne.ticker, localCacheTwo.ticker); // serialize the reconstituted version to be sure we haven't lost the ability to reserialize LocalManualCache three = SerializableTester.reserialize(two); LocalCache localCacheThree = three.localCache; assertEquals(localCacheTwo.keyStrength, localCacheThree.keyStrength); assertEquals(localCacheTwo.keyStrength, localCacheThree.keyStrength); assertEquals(localCacheTwo.valueEquivalence, localCacheThree.valueEquivalence); assertEquals(localCacheTwo.valueEquivalence, localCacheThree.valueEquivalence); assertEquals(localCacheTwo.maxWeight, localCacheThree.maxWeight); assertEquals(localCacheTwo.weigher, localCacheThree.weigher); assertEquals(localCacheTwo.expireAfterAccessNanos, localCacheThree.expireAfterAccessNanos); assertEquals(localCacheTwo.expireAfterWriteNanos, localCacheThree.expireAfterWriteNanos); assertEquals(localCacheTwo.removalListener, localCacheThree.removalListener); assertEquals(localCacheTwo.ticker, localCacheThree.ticker); } // utility methods /** * Returns an iterable containing all combinations of maximumSize, expireAfterAccess/Write, * weakKeys and weak/softValues. */ @SuppressWarnings("unchecked") // varargs private static Iterable> allEntryTypeMakers() { List> result = newArrayList(allKeyValueStrengthMakers()); for (CacheBuilder builder : allKeyValueStrengthMakers()) { result.add(builder.maximumSize(SMALL_MAX_SIZE)); } for (CacheBuilder builder : allKeyValueStrengthMakers()) { result.add(builder.expireAfterAccess(99999, SECONDS)); } for (CacheBuilder builder : allKeyValueStrengthMakers()) { result.add(builder.expireAfterWrite(99999, SECONDS)); } for (CacheBuilder builder : allKeyValueStrengthMakers()) { result.add(builder.maximumSize(SMALL_MAX_SIZE).expireAfterAccess(99999, SECONDS)); } for (CacheBuilder builder : allKeyValueStrengthMakers()) { result.add(builder.maximumSize(SMALL_MAX_SIZE).expireAfterWrite(99999, SECONDS)); } return result; } /** * Returns an iterable containing all combinations of maximumSize and expireAfterAccess/Write. */ @SuppressWarnings("unchecked") // varargs static Iterable> allEvictingMakers() { return ImmutableList.of(createCacheBuilder().maximumSize(SMALL_MAX_SIZE), createCacheBuilder().expireAfterAccess(99999, SECONDS), createCacheBuilder().expireAfterWrite(99999, SECONDS), createCacheBuilder() .maximumSize(SMALL_MAX_SIZE) .expireAfterAccess(SMALL_MAX_SIZE, TimeUnit.SECONDS), createCacheBuilder() .maximumSize(SMALL_MAX_SIZE) .expireAfterWrite(SMALL_MAX_SIZE, TimeUnit.SECONDS)); } /** * Returns an iterable containing all combinations weakKeys and weak/softValues. */ @SuppressWarnings("unchecked") // varargs private static Iterable> allKeyValueStrengthMakers() { return ImmutableList.of(createCacheBuilder(), createCacheBuilder().weakValues(), createCacheBuilder().softValues(), createCacheBuilder().weakKeys(), createCacheBuilder().weakKeys().weakValues(), createCacheBuilder().weakKeys().softValues()); } // entries and values private static DummyEntry createDummyEntry( K key, int hash, V value, ReferenceEntry next) { DummyEntry entry = DummyEntry.create(key, hash, next); DummyValueReference valueRef = DummyValueReference.create(value); entry.setValueReference(valueRef); return entry; } static class DummyEntry implements ReferenceEntry { private K key; private final int hash; private final ReferenceEntry next; public DummyEntry(K key, int hash, ReferenceEntry next) { this.key = key; this.hash = hash; this.next = next; } public static DummyEntry create(K key, int hash, ReferenceEntry next) { return new DummyEntry(key, hash, next); } public void clearKey() { this.key = null; } private ValueReference valueReference = unset(); @Override public ValueReference getValueReference() { return valueReference; } @Override public void setValueReference(ValueReference valueReference) { this.valueReference = valueReference; } @Override public ReferenceEntry getNext() { return next; } @Override public int getHash() { return hash; } @Override public K getKey() { return key; } private long accessTime = Long.MAX_VALUE; @Override public long getAccessTime() { return accessTime; } @Override public void setAccessTime(long time) { this.accessTime = time; } private ReferenceEntry nextAccess = nullEntry(); @Override public ReferenceEntry getNextInAccessQueue() { return nextAccess; } @Override public void setNextInAccessQueue(ReferenceEntry next) { this.nextAccess = next; } private ReferenceEntry previousAccess = nullEntry(); @Override public ReferenceEntry getPreviousInAccessQueue() { return previousAccess; } @Override public void setPreviousInAccessQueue(ReferenceEntry previous) { this.previousAccess = previous; } private long writeTime = Long.MAX_VALUE; @Override public long getWriteTime() { return writeTime; } @Override public void setWriteTime(long time) { this.writeTime = time; } private ReferenceEntry nextWrite = nullEntry(); @Override public ReferenceEntry getNextInWriteQueue() { return nextWrite; } @Override public void setNextInWriteQueue(ReferenceEntry next) { this.nextWrite = next; } private ReferenceEntry previousWrite = nullEntry(); @Override public ReferenceEntry getPreviousInWriteQueue() { return previousWrite; } @Override public void setPreviousInWriteQueue(ReferenceEntry previous) { this.previousWrite = previous; } } static class DummyValueReference implements ValueReference { private V value; boolean loading = false; public DummyValueReference() { this.loading = true; } public DummyValueReference(V value) { this.value = value; } public static DummyValueReference create(V value) { return new DummyValueReference(value); } public static DummyValueReference createLoading() { return new DummyValueReference(); } @Override public V get() { return value; } @Override public int getWeight() { return 1; } @Override public ReferenceEntry getEntry() { return null; } @Override public ValueReference copyFor( ReferenceQueue queue, V value, ReferenceEntry entry) { return this; } public void setLoading(boolean loading) { this.loading = loading; } @Override public boolean isLoading() { return loading; } @Override public boolean isActive() { return !loading; } @Override public V waitForValue() { return get(); } @Override public void notifyNewValue(V newValue) {} public void clear() { value = null; } } private static class SerializableCacheLoader extends CacheLoader implements Serializable { @Override public Object load(Object key) { return new Object(); } @Override public int hashCode() { return 42; } @Override public boolean equals(Object o) { return (o instanceof SerializableCacheLoader); } } private static class SerializableRemovalListener implements RemovalListener, Serializable { @Override public void onRemoval(RemovalNotification notification) {} @Override public int hashCode() { return 42; } @Override public boolean equals(Object o) { return (o instanceof SerializableRemovalListener); } } private static class SerializableTicker extends Ticker implements Serializable { @Override public long read() { return 42; } @Override public int hashCode() { return 42; } @Override public boolean equals(Object o) { return (o instanceof SerializableTicker); } } private static class SerializableWeigher implements Weigher, Serializable { @Override public int weigh(K key, V value) { return 42; } @Override public int hashCode() { return 42; } @Override public boolean equals(Object o) { return (o instanceof SerializableWeigher); } } }