|
|
|
@ -15,8 +15,14 @@
|
|
|
|
|
*/
|
|
|
|
|
package org.redisson.cache;
|
|
|
|
|
|
|
|
|
|
import java.util.Queue;
|
|
|
|
|
import java.util.concurrent.ConcurrentLinkedQueue;
|
|
|
|
|
import java.util.ArrayList;
|
|
|
|
|
import java.util.Collection;
|
|
|
|
|
import java.util.Collections;
|
|
|
|
|
import java.util.Iterator;
|
|
|
|
|
import java.util.LinkedHashSet;
|
|
|
|
|
import java.util.List;
|
|
|
|
|
import java.util.Set;
|
|
|
|
|
import java.util.concurrent.atomic.AtomicLong;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* LRU (least recently used) cache.
|
|
|
|
@ -28,24 +34,38 @@ import java.util.concurrent.ConcurrentLinkedQueue;
|
|
|
|
|
*/
|
|
|
|
|
public class LRUCacheMap<K, V> extends AbstractCacheMap<K, V> {
|
|
|
|
|
|
|
|
|
|
private final Queue<CachedValue> queue = new ConcurrentLinkedQueue<CachedValue>();
|
|
|
|
|
private final AtomicLong index = new AtomicLong();
|
|
|
|
|
private final List<Collection<CachedValue<K, V>>> queues =
|
|
|
|
|
new ArrayList<Collection<CachedValue<K, V>>>(Runtime.getRuntime().availableProcessors()*2);
|
|
|
|
|
|
|
|
|
|
public LRUCacheMap(int size, long timeToLiveInMillis, long maxIdleInMillis) {
|
|
|
|
|
super(size, timeToLiveInMillis, maxIdleInMillis);
|
|
|
|
|
|
|
|
|
|
for (int i = 0; i < Runtime.getRuntime().availableProcessors()*2; i++) {
|
|
|
|
|
Set<CachedValue<K, V>> instance = Collections.synchronizedSet(new LinkedHashSet<CachedValue<K, V>>());
|
|
|
|
|
queues.add(instance);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
protected void onValueCreate(CachedValue value) {
|
|
|
|
|
protected void onValueCreate(CachedValue<K, V> value) {
|
|
|
|
|
Collection<CachedValue<K, V>> queue = getQueue(value);
|
|
|
|
|
queue.add(value);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private Collection<CachedValue<K, V>> getQueue(CachedValue<K, V> value) {
|
|
|
|
|
return queues.get(value.hashCode() % queues.size());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
protected void onValueRemove(CachedValue value) {
|
|
|
|
|
protected void onValueRemove(CachedValue<K, V> value) {
|
|
|
|
|
Collection<CachedValue<K, V>> queue = getQueue(value);
|
|
|
|
|
queue.remove(value);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
protected void onValueRead(CachedValue value) {
|
|
|
|
|
protected void onValueRead(CachedValue<K, V> value) {
|
|
|
|
|
Collection<CachedValue<K, V>> queue = getQueue(value);
|
|
|
|
|
// move value to tail of queue
|
|
|
|
|
if (queue.remove(value)) {
|
|
|
|
|
queue.add(value);
|
|
|
|
@ -54,15 +74,22 @@ public class LRUCacheMap<K, V> extends AbstractCacheMap<K, V> {
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
protected void onMapFull() {
|
|
|
|
|
CachedValue value = queue.poll();
|
|
|
|
|
if (value != null) {
|
|
|
|
|
map.remove(value.getKey(), value);
|
|
|
|
|
Collection<CachedValue<K, V>> queue = queues.get((int)Math.abs(index.incrementAndGet() % queues.size()));
|
|
|
|
|
synchronized (queue) {
|
|
|
|
|
Iterator<CachedValue<K, V>> iter = queue.iterator();
|
|
|
|
|
if (iter.hasNext()) {
|
|
|
|
|
CachedValue<K, V> value = iter.next();
|
|
|
|
|
iter.remove();
|
|
|
|
|
map.remove(value.getKey(), value);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
|
public void clear() {
|
|
|
|
|
queue.clear();
|
|
|
|
|
for (Collection<CachedValue<K, V>> collection : queues) {
|
|
|
|
|
collection.clear();
|
|
|
|
|
}
|
|
|
|
|
super.clear();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|