LFU Cache LeetCode Solution

Last updated on January 17th, 2025 at 01:03 am

Here, we see an LFU Cache LeetCode Solution. This Leetcode problem is solved using different approaches in many programming languages, such as C++, Java, JavaScript, Python, etc.

List of all LeetCode Solution

Topics

Design

Companies

Amazon, Google

Level of Question

Hard

LFU Cache LeetCode Solution

LFU Cache LeetCode Solution

1. Problem Statement

Design and implement a data structure for a Least Frequently Used (LFU) cache.

Implement the LFUCache class:

  • LFUCache(int capacity) Initializes the object with the capacity of the data structure.
  • int get(int key) Gets the value of the key if the key exists in the cache. Otherwise, returns -1.
  • void put(int key, int value) Update the value of the key if present, or inserts the key if not already present. When the cache reaches its capacity, it should invalidate and remove the least frequently used key before inserting a new item. For this problem, when there is a tie (i.e., two or more keys with the same frequency), the least recently used key would be invalidated.

To determine the least frequently used key, a use counter is maintained for each key in the cache. The key with the smallest use counter is the least frequently used key.

When a key is first inserted into the cache, its use counter is set to 1 (due to the put operation). The use counter for a key in the cache is incremented either a get or put operation is called on it.

The functions get and put must each run in O(1) average time complexity.

Example 1:
Input [“LFUCache”, “put”, “put”, “get”, “put”, “get”, “get”, “put”, “get”, “get”, “get”] [[2], [1, 1], [2, 2], [1], [3, 3], [2], [3], [4, 4], [1], [3], [4]]
Output [null, null, null, 1, null, -1, 3, null, -1, 3, 4]

Explanation
// cnt(x) = the use counter for key x
// cache=[] will show the last used order for tiebreakers (leftmost element is most recent)
LFUCache lfu = new LFUCache(2);
lfu.put(1, 1); // cache=[1,_], cnt(1)=1
lfu.put(2, 2); // cache=[2,1], cnt(2)=1, cnt(1)=1
lfu.get(1); // return 1
// cache=[1,2], cnt(2)=1, cnt(1)=2
lfu.put(3, 3); // 2 is the LFU key because cnt(2)=1 is the smallest, invalidate 2.  
// cache=[3,1], cnt(3)=1, cnt(1)=2
lfu.get(2); // return -1 (not found)
lfu.get(3); // return 3
// cache=[3,1], cnt(3)=2, cnt(1)=2
lfu.put(4, 4); // Both 1 and 3 have the same cnt, but 1 is LRU, invalidate 1.
// cache=[4,3], cnt(4)=1, cnt(3)=2
lfu.get(1); // return -1 (not found)
lfu.get(3); // return 3
// cache=[3,4], cnt(4)=1, cnt(3)=3
lfu.get(4); // return 4
// cache=[4,3], cnt(4)=2, cnt(3)=3

2. Coding Pattern Used in Solution

The provided code implements the LFU Cache (Least Frequently Used Cache). Instead, it can be categorized under a “Cache Design” pattern. This pattern involves designing a data structure to efficiently manage a cache with specific constraints, such as eviction policies (e.g., LFU, LRU).

3. Code Implementation in Different Languages

3.1 LFU Cache C++

class LFUCache {
    int capacity, minFreq;
    struct Element {
        int value, fr;
        list<int>::iterator it;
    };
    unordered_map<int, Element> cache;
    unordered_map<int, list<int> > freq;
public:
    LFUCache(int capacity) {
        this->capacity = capacity;
    }
    int get(int key) {
        if (!cache.count(key)) return -1;      
        freq[cache[key].fr].erase(cache[key].it);
        cache[key].fr++;
        freq[cache[key].fr].push_back(key);
        cache[key].it = prev(freq[ cache[key].fr].end());
        minFreq += freq[minFreq].empty();
        return cache[key].value;
    }

    void put(int key, int value) {
        if (!capacity) return;
        if (get(key) != -1) {
            cache[key].value = value;
            return;
        }
        if (cache.size() == capacity){
            cache.erase(freq[minFreq].front());
            freq[minFreq].pop_front();
        }
        freq[1].push_back(key);
        cache[key] = {value, 1, prev(freq[1].end())};
        minFreq = 1;
    }
};

3.2 LFU Cache Java

class LFUCache {
    private final Map<Integer, CachedElement<Integer, Integer>> elements;
    private final TreeMap<Integer, LinkedList<Integer>> frequencies = new TreeMap<>();
    private final int capacity;
    public LFUCache(int capacity) {
        this.capacity = capacity;
        this.elements = new HashMap<>(capacity);
    }

    public int get(int key) {
        CachedElement<Integer, Integer> element = this.elements.get(key);
        if (element == null) {
            return -1;
        }
        incrementFrequency(element);
        return element.value;
    }

    public void put(int key, int value) {
        if (!this.elements.containsKey(key) && this.capacity == this.elements.size()) {
            this.removeLFU();
        }
        CachedElement<Integer, Integer> element = this.elements.get(key);
        if (element == null) {
            element = new CachedElement<>(key, value);
            this.addNew(element);
        } else {
            element.value = value;
            incrementFrequency(element);
        }
        this.elements.put(key, element);
    }
    private void addNew(CachedElement<Integer, Integer> element) {
        LinkedList<Integer> elementsOfOneFrequency = this.frequencies.getOrDefault(
                element.accessCount,
                new LinkedList<>()
        );
        elementsOfOneFrequency.addFirst(element.key);
        this.frequencies.put(element.accessCount, elementsOfOneFrequency);
    }
    private void removeLFU() {
        int elementsOfMinFrequencyKey = frequencies.firstKey();
        LinkedList<Integer> elementsOfMinFrequency = frequencies.remove(elementsOfMinFrequencyKey);
        int lfuKey = elementsOfMinFrequency.removeLast();
        if (!elementsOfMinFrequency.isEmpty()) {
            this.frequencies.put(elementsOfMinFrequencyKey, elementsOfMinFrequency);
        }
        this.elements.remove(lfuKey);
    }

    private void incrementFrequency(CachedElement<Integer, Integer> element) {
        LinkedList<Integer> elementsOfCurrentFrequency = this.frequencies.remove(element.accessCount);
        elementsOfCurrentFrequency.remove(element.key);
        if (!elementsOfCurrentFrequency.isEmpty()) {
            this.frequencies.put(element.accessCount, elementsOfCurrentFrequency);
        }
        element.accessCount++;
        LinkedList<Integer> elementsOfHigherFrequency = this.frequencies.getOrDefault(
                element.accessCount,
                new LinkedList<>()
        );
        elementsOfHigherFrequency.addFirst(element.key);
        this.frequencies.put(element.accessCount, elementsOfHigherFrequency);
    }

    private static class CachedElement<K, V> {
        private final K key;
        private V value;
        private int accessCount;
        CachedElement(K key, V value) {
            this.key = key;
            this.value = value;
            this.accessCount = 1;
        }
    }
}

3.3 LFU Cache JavaScript

var LFUCache = function (capacity) {
    this.MIN_FREQUENCY = 1;
    this.currentMinFrequency = Number.MAX_VALUE;
    this.capacity = capacity;
    this.savedCapacity = capacity; // to check case with capacity === 0
    this.itemsByFrequency = new Map();
    this.frequencyByKey = new Map();
};

LFUCache.prototype.getValue = function (key) {
    if (!this.frequencyByKey.has(key)) return -1;
    const frequencyKey = this.frequencyByKey.get(key);
    return this.itemsByFrequency.get(frequencyKey).get(key);
}

LFUCache.prototype.addItem = function (key, value) {
    this.currentMinFrequency = this.MIN_FREQUENCY;
    this.frequencyByKey.set(key, this.MIN_FREQUENCY);
    const items = this.itemsByFrequency.get(this.MIN_FREQUENCY) || new Map();
    items.set(key, value);
    this.itemsByFrequency.set(this.MIN_FREQUENCY, items);
}

LFUCache.prototype.updateItem = function (key, value) {
    if (value === -1) return -1;
    let currentItemFreaquency = this.frequencyByKey.get(key);
    const currentItems = this.itemsByFrequency.get(currentItemFreaquency);
    currentItems.delete(key);
    if (!currentItems.size && this.currentMinFrequency === currentItemFreaquency) this.currentMinFrequency++;
    if (!currentItems.size) this.itemsByFrequency.delete(currentItemFreaquency);
    if (currentItems.size) this.itemsByFrequency.set(currentItemFreaquency, currentItems);
    this.frequencyByKey.delete(key);
    const updatedItemFrequency = currentItemFreaquency + 1;
    this.frequencyByKey.set(key, updatedItemFrequency);
    const updatedItems = this.itemsByFrequency.get(updatedItemFrequency) || new Map();
    updatedItems.set(key, value);
    this.itemsByFrequency.set(updatedItemFrequency, updatedItems);
}

LFUCache.prototype.removeLFUItem = function () {
    if (!this.itemsByFrequency.size || !this.frequencyByKey.size) return null;
    const LFUItems = this.itemsByFrequency.get(this.currentMinFrequency);
    const LFUKey = LFUItems.keys().next().value;
    this.frequencyByKey.delete(Number(LFUKey));
    if (LFUItems.size === 1) {
        this.itemsByFrequency.delete(this.currentMinFrequency);
        this.currentMinFrequency = this.itemsByFrequency.keys().next().value;
    } else {
        LFUItems.delete(LFUKey);
        this.itemsByFrequency.set(this.currentMinFrequency, LFUItems);
    }
}
LFUCache.prototype.put = function (key, value) {
    if (this.frequencyByKey.has(key)) {
        this.updateItem(key, value);
        return null;
    }
    if (this.capacity) {
        this.addItem(key, value);
        this.capacity--;
        return null;
    }
    if (!this.capacity && this.savedCapacity) {
        this.removeLFUItem(key, value);
        this.addItem(key, value);
        return null;
    }
};
LFUCache.prototype.get = function (key) {
    this.updateItem(key, this.getValue(key));
    return this.getValue(key);
};

3.4 LFU Cache Python

class ListNode(object):
    def __init__(self, key, value):
        self.key = key
        self.val = value
        self.freq = 1

class LFUCache(object):
    def __init__(self, capacity):
        self.capacity = capacity
        self.cache = dict() 
        self.usage = collections.defaultdict(collections.OrderedDict)
        self.LF = 0

    def get(self, key):
        if key not in self.cache:return -1
        node = self.cache[key]
        self.update(node, node.val)
        return node.val
        
    def put(self, key, value):
        if self.capacity == 0: return
        if key not in self.cache: 
            if len(self.cache) >= self.capacity:
                k, v = self.usage[self.LF].popitem(last=False)
                self.cache.pop(k)
            node = ListNode(key, value)
            self.cache[key] = node
            self.usage[1][key] = value
            self.LF = 1
        else: 
            node = self.cache[key]
            node.val = value
            self.update(node, value)  
            
    def update(self, node, newVal):
        k, f = node.key, node.freq
        self.usage[f].pop(k)
        if not self.usage[f] and self.LF == f:
            self.LF += 1
        self.usage[f+1][k] = newVal
        node.freq += 1

4. Time and Space Complexity

Time ComplexitySpace Complexity
C++O(1)O(n)
JavaO(log n)O(n)
JavaScriptO(1)O(n)
PythonO(1)O(n)
  • The LFU Cache is a highly efficient data structure for managing a cache with an LFU eviction policy.
  • The implementation in C++, JavaScript, and Python achieves O(1) time complexity for both get and put operations, while Java’s implementation has O(log n) complexity due to the use of TreeMap.
  • All implementations have a space complexity of O(n).
Scroll to Top