Last updated on February 2nd, 2025 at 06:45 am
Here, we see LRU Cache LeetCode Solution. This Leetcode problem is solved using different approaches in many programming languages, such as C++, Java, JavaScript, Python, etc.
List of all LeetCode Solution
Topics
Design
Companies
Amazon, Bloomberg, Facebook, Google, Microsoft, Palantir, Snapchat, Twitter, Uber, Yahoo, Zenefits
Level of Question
Medium
LRU Cache LeetCode Solution
Table of Contents
1. Problem Statement
Design a data structure that follows the constraints of a Least Recently Used (LRU) cache.
Implement the LRUCache
class:
LRUCache(int capacity)
Initialize the LRU cache with positive sizecapacity
.int get(int key)
Return the value of thekey
if the key exists, otherwise return-1
.void put(int key, int value)
Update the value of thekey
if thekey
exists. Otherwise, add thekey-value
pair to the cache. If the number of keys exceeds thecapacity
from this operation, evict the least recently used key.
The functions get
and put
must each run in O(1)
average time complexity.
Example 1:
Input [“LRUCache”, “put”, “put”, “get”, “put”, “get”, “put”, “get”, “get”, “get”] [[2], [1, 1], [2, 2], [1], [3, 3], [2], [4, 4], [1], [3], [4]]
Output [null, null, null, 1, null, -1, null, -1, 3, 4]
Explanation
LRUCache lRUCache = new LRUCache(2);
lRUCache.put(1, 1); // cache is {1=1} lRUCache.put(2, 2); // cache is {1=1, 2=2}
lRUCache.get(1); // return 1 lRUCache.put(3, 3); // LRU key was 2, evicts key 2, cache is {1=1, 3=3}
lRUCache.get(2); // returns -1 (not found) lRUCache.put(4, 4); // LRU key was 1, evicts key 1, cache is {4=4, 3=3}
lRUCache.get(1); // return -1 (not found) lRUCache.get(3); // return 3
lRUCache.get(4); // return 4
2. Coding Pattern Used in Solution
The coding pattern used in the provided code is “Design Patterns”, specifically the “LRU Cache Design”. The code implements an LRU (Least Recently Used) Cache using a combination of a HashMap (or unordered_map) and a Doubly Linked List.
3. Code Implementation in Different Languages
3.1 LRU Cache C++
class LRUCache { public: list<pair<int,int>> l; unordered_map<int,list<pair<int, int>>::iterator> m; int size; LRUCache(int capacity) { size=capacity; } int get(int key) { if(m.find(key)==m.end()) return -1; l.splice(l.begin(),l,m[key]); return m[key]->second; } void put(int key, int value) { if(m.find(key)!=m.end()) { l.splice(l.begin(),l,m[key]); m[key]->second=value; return; } if(l.size()==size) { auto d_key=l.back().first; l.pop_back(); m.erase(d_key); } l.push_front({key,value}); m[key]=l.begin(); } };
3.2 LRU Cache Java
class LRUCache { class Node { int key; int val; Node prev; Node next; Node(int key, int val) { this.key = key; this.val = val; } } Node head = new Node(-1, -1); Node tail = new Node(-1, -1); int cap; HashMap<Integer, Node> m = new HashMap<>(); public LRUCache(int capacity) { cap = capacity; head.next = tail; tail.prev = head; } private void addNode(Node newnode) { Node temp = head.next; newnode.next = temp; newnode.prev = head; head.next = newnode; temp.prev = newnode; } private void deleteNode(Node delnode) { Node prevv = delnode.prev; Node nextt = delnode.next; prevv.next = nextt; nextt.prev = prevv; } public int get(int key) { if (m.containsKey(key)) { Node resNode = m.get(key); int ans = resNode.val; m.remove(key); deleteNode(resNode); addNode(resNode); m.put(key, head.next); return ans; } return -1; } public void put(int key, int value) { if (m.containsKey(key)) { Node curr = m.get(key); m.remove(key); deleteNode(curr); } if (m.size() == cap) { m.remove(tail.prev.key); deleteNode(tail.prev); } addNode(new Node(key, value)); m.put(key, head.next); } }
3.3 LRU Cache JavaScript
var LRUCache = function(capacity) { this._capacity = capacity; this._count = 0; this._head = null; this._tail = null; this._hashTable = {}; }; LRUCache.prototype.get = function(key) { if (this._hashTable[key]) { const { value } = this._hashTable[key]; const { prev, next } = this._hashTable[key]; if (prev) { prev.next = next; } if (next) { next.prev = prev || next.prev; } if (this._tail === this._hashTable[key]) { this._tail = prev || this._hashTable[key]; } this._hashTable[key].prev = null; if (this._head !== this._hashTable[key]) { this._hashTable[key].next = this._head; this._head.prev = this._hashTable[key]; } this._head = this._hashTable[key]; return value; } return -1; }; LRUCache.prototype.put = function(key, value) { if (this._hashTable[key]) { this._hashTable[key].value = value; this.get(key); } else { this._hashTable[key] = { key, value, prev: null, next: null }; if (this._head) { this._head.prev = this._hashTable[key]; this._hashTable[key].next = this._head; } this._head = this._hashTable[key]; if (!this._tail) { this._tail = this._hashTable[key]; } this._count += 1; } if (this._count > this._capacity) { let removedKey = this._tail.key; if (this._tail.prev) { this._tail.prev.next = null; this._tail = this._tail.prev; this._hashTable[removedKey].prev = null; } delete this._hashTable[removedKey]; this._count -= 1; } };
3.4 LRU Cache Python
class LRUCache: class Node: def __init__(self, key, val): self.key = key self.val = val self.prev = None self.next = None def __init__(self, capacity): self.cap = capacity self.head = self.Node(-1, -1) self.tail = self.Node(-1, -1) self.head.next = self.tail self.tail.prev = self.head self.m = {} def addNode(self, newnode): temp = self.head.next newnode.next = temp newnode.prev = self.head self.head.next = newnode temp.prev = newnode def deleteNode(self, delnode): prevv = delnode.prev nextt = delnode.next prevv.next = nextt nextt.prev = prevv def get(self, key): if key in self.m: resNode = self.m[key] ans = resNode.val del self.m[key] self.deleteNode(resNode) self.addNode(resNode) self.m[key] = self.head.next return ans return -1 def put(self, key, value): if key in self.m: curr = self.m[key] del self.m[key] self.deleteNode(curr) if len(self.m) == self.cap: del self.m[self.tail.prev.key] self.deleteNode(self.tail.prev) self.addNode(self.Node(key, value)) self.m[key] = self.head.next
4. Time and Space Complexity
Time Complexity | Space Complexity | |
C++ | O(1) | O(n) |
Java | O(1) | O(n) |
JavaScript | O(1) | O(n) |
Python | O(1) | O(n) |
The provided code is an efficient implementation of an LRU Cache using a combination of a HashMap and a Doubly Linked List. It ensures O(1) time complexity for both get
and put
operations, with a space complexity of O(n). This design pattern is commonly used in scenarios where fast access and eviction of the least recently used items are required, such as in caching systems.