Design and implement a data structure for Least Recently Used (LRU) cache. It should support the following operations: get
and set
.
get(key)
- Get the value (will always be positive) of the key if the key exists in the cache, otherwise return -1.
set(key, value)
- Set or insert the value if the key is not already present. When the cache reached its capacity, it should invalidate the least recently used item before inserting a new item.
According to Wikipedia, LRU is discarding the least recently used items first. This algorithm requires keeping track of what was used when, which is expensive if one wants to make sure the algorithm always discards the least recently used item. For querying, we need a hashmap to ensure the query time is O(1). And remove the least recently used entry, we can use a doubly linked list to ensure modification in O(1).
C++ Code:
class LRUCache{
public:
/*
* struct: CacheEntry
* desc: to represent each cache entry
* key: the key for quering
* value: the actual value needed
* prev: a pointer pointing to its previous node
* next: a pointer pointing to its next node
*/
struct CacheEntry{
int key;
int value;
CacheEntry *prev;
CacheEntry *next;
CacheEntry(int k, int v) : key(k), value(v), prev(nullptr), next(nullptr) {}
};
/*
* func: constructor
* @param capacity: the number of most nodes the cache can hold
*/
LRUCache(int capacity){
this->capacity = capacity;
curr_size = 0;
head = nullptr;
tail = nullptr;
}
/*
* func: get
* goal: get the value based on input key
* @param key: input key
* return: value found. If not found, return -1
*/
int get(int key){
if(cache_map.find(key) != cache_map.end()){
CacheEntry *entry = cache_map[key];
move_to_head(entry);
return entry->value;
}else{
return -1;
}
}
/*
* func: set
* goal: set the key and value into the cache, if the key is in the
cache, update the entry. If not, insert it into the cache.
* @param key: input key
* @param value: input value
* return:
*/
void set(int key, int value){
if(get(key) != -1){
CacheEntry *entry = cache_map[key];
entry->value = value;
move_to_head(entry);
}else{
if(curr_size >= capacity){
cache_map.erase(tail->key);
remove_last();
}else{
++curr_size;
}
CacheEntry *entry = new CacheEntry(key, value);
if(curr_size == 1){
head = entry;
tail = entry;
}
move_to_head(entry);
cache_map.emplace(key, entry);
}
}
private:
int capacity;
int curr_size;
unordered_map<int, CacheEntry*> cache_map;
CacheEntry *head;
CacheEntry *tail;
/*
* func: move_to_head
* goal: move the input entry to the head of the list
* @param entry: input entry
* return:
*/
void move_to_head(CacheEntry *entry){
if(entry == head){
return;
}
if(entry->prev){
entry->prev->next = entry->next;
}
if(entry->next){
entry->next->prev = entry->prev;
}
if(entry == tail){
tail = entry->prev;
}
if(head){
entry->next = head;
head->prev = entry;
}
head = entry;
entry->prev = nullptr;
}
/*
* func: remove_last
* goal: remove the last node in the list
* return:
*/
void remove_last(){
if(tail){
if(tail->prev){
tail->prev->next = nullptr;
}else{
head = nullptr;
}
CacheEntry *garbage = tail;
tail = tail->prev;
delete garbage;
}
}
};
Python Code:
class LRUCache:
class CacheEntry:
def __init__(self, k, v):
self.key = k
self.value = v
self.next = None
self.prev = None
# func: constructor
# @param capacity: the maximum capacity
def __init__(self, capacity):
self.capacity = capacity
self.curr_size = 0
self.cache_map = {}
self.head = None
self.tail = None
# func: get the value based on the key
# @param key: input key
# return: the value found. If not found, return -1
def get(self, key):
if key in self.cache_map:
entry = self.cache_map[key]
self.move_to_head(entry)
return entry.value
else:
return -1
# func: set the key and value into the cache, if key is in the cache
# update the value. If not, insert it into the cache
# @param key: input key
# @param value, input value
# @return:
def set(self, key, value):
if key in self.cache_map:
entry = self.cache_map[key]
entry.value = value
self.move_to_head(entry)
else:
if self.curr_size >= self.capacity:
self.cache_map.pop(self.tail.key)
self.remove_tail()
else:
self.curr_size += 1
new_entry = self.CacheEntry(key, value)
if self.curr_size == 1:
self.head = new_entry
self.tail = new_entry
self.cache_map[key] = new_entry
self.move_to_head(new_entry)
# func: move the input entry to the head of the list
# @param entry: input entry
# @return:
def move_to_head(self, entry):
if self.head == entry:
return
if entry.prev:
entry.prev.next = entry.next
if entry.next:
entry.next.prev = entry.prev
if entry == self.tail:
self.tail = entry.prev
if self.head:
entry.next = self.head
self.head.prev = entry
self.head = entry
entry.prev = None
# func: remove the tail node from the list
# @return:
def remove_tail(self):
if self.tail:
if self.tail.prev:
self.tail.prev.next = None
else:
self.head = None
self.tail = self.tail.prev