Medium
Design a data structure that follows the constraints of a Least Recently Used (LRU) cache.
Implement the LRUCache
class:
LRUCache(int capacity)
Initialize the LRU cache with positive sizecapacity
.int get(int key)
Return the value of thekey
if the key exists, otherwise return-1
.void put(int key, int value)
Update the value of thekey
if thekey
exists. Otherwise, add thekey-value
pair to the cache. If the number of keys exceeds thecapacity
from this operation, evict the least recently used key.
The functions get
and put
must each run in O(1)
average time complexity.
Example 1:
Input ["LRUCache", "put", "put", "get", "put", "get", "put", "get", "get", "get"] [[2], [1, 1], [2, 2], [1], [3, 3], [2], [4, 4], [1], [3], [4]] Output [null, null, null, 1, null, -1, null, -1, 3, 4] Explanation LRUCache lRUCache = new LRUCache(2); lRUCache.put(1, 1); // cache is {1=1} lRUCache.put(2, 2); // cache is {1=1, 2=2} lRUCache.get(1); // return 1 lRUCache.put(3, 3); // LRU key was 2, evicts key 2, cache is {1=1, 3=3} lRUCache.get(2); // returns -1 (not found) lRUCache.put(4, 4); // LRU key was 1, evicts key 1, cache is {4=4, 3=3} lRUCache.get(1); // return -1 (not found) lRUCache.get(3); // return 3 lRUCache.get(4); // return 4
Constraints:
1 <= capacity <= 3000
0 <= key <= 104
0 <= value <= 105
- At most 2
* 105
calls will be made toget
andput
.
class ListNode:
def __init__(self, val, key = None, next = None, prev = None):
self.val = val
self.key = key
self.next = next
self.prev = prev
class LRUCache:
def __init__(self, capacity):
self.capacity = capacity;
self.cache = dict()
self.size = 0
# doubley connected linked list
self.sentinel_head = ListNode(0)
self.sentinel_tail = ListNode(0)
self.sentinel_head.next = self.sentinel_tail
self.sentinel_tail.prev = self.sentinel_head
# self.lr = self.sentinel_head
# self.mr = self.sentinel_head
def update_node(self, node):
# removing the node
node.prev.next = node.next
node.next.prev = node.prev
# shifting to MRU
prev_node = self.sentinel_tail.prev
prev_node.next = node
node.prev = prev_node
node.next = self.sentinel_tail
self.sentinel_tail.prev = node
def get(self, key):
if key in self.cache:
node = self.cache[key]
self.update_node(node)
return node.val
else:
return -1
def put(self, key, value):
if key in self.cache:
# update it
node = self.cache[key]
node.val = value
self.update_node(node)
else:
# checking for size
if self.size == self.capacity:
# remove least used node from cache
#self.show(self.sentinel_head)
self.lr = self.sentinel_head.next
to_remove = self.lr.key
del self.cache[to_remove]
# cache is full, remove LR data
node_to_remove = self.lr
self.sentinel_head.next = node_to_remove.next
node_to_remove.next.prev = self.sentinel_head
self.lr = self.sentinel_head.next
# adding new node
node = ListNode(value, key)
self.cache[key] = node
# # shifting to MRU
# self.mr.next = node
# node.prev = self.mr
# self.mr = node
prev_point = self.sentinel_tail.prev
prev_point.next = node
node.prev = prev_point
node.next = self.sentinel_tail
self.sentinel_tail.prev = node
# self.lr = self.sentinel_head.next
# self.mr = self.sentinel_tail.prev
#print("Capacity full {}:{}, removing {}, -> {}".format(key, value, to_remove, self.cache))
else:
#self.show(self.sentinel_head)
self.size+=1
node = ListNode(value, key)
self.cache[key] = node
# shifting to MRU
# self.mr.next = node
# node.prev = self.mr
# self.mr = node
prev_point = self.sentinel_tail.prev
prev_point.next = node
node.prev = prev_point
node.next = self.sentinel_tail
self.sentinel_tail.prev = node
# self.lr = self.sentinel_head.next
# self.mr = self.sentinel_tail.prev
#print("Node k {}, v {} ".format(node.key, node.val))
#print("Adding {}:{}, -> {}".format(key, value, self.cache))
#print("From add")
#self.show(self.sentinel_head)
# Your LRUCache object will be instantiated and called as such:
# obj = LRUCache(capacity)
# param_1 = obj.get(key)
# obj.put(key,value)