146. LRU Cache (Medium)

Description:

Design and implement a data structure for Least Recently Used (LRU) cache. It should support the following operations: get and put.

get(key) - Get the value (will always be positive) of the key if the key exists in the cache, otherwise return -1.
put(key, value) - Set or insert the value if the key is not already present. When the cache reached its capacity, it should invalidate the least recently used item before inserting a new item.

The cache is initialized with a positive capacity.

Follow up:
Could you do both operations in O(1) time complexity?

Example:

LRUCache cache = new LRUCache( 2 /* capacity */ );

cache.put(1, 1);
cache.put(2, 2);
cache.get(1); // returns 1
cache.put(3, 3); // evicts key 2
cache.get(2); // returns -1 (not found)
cache.put(4, 4); // evicts key 1
cache.get(1); // returns -1 (not found)
cache.get(3); // returns 3
cache.get(4); // returns 4


Solutions:

Brute force:

from collections import deque
class LRUCache:

    def __init__(self, capacity: int):
        self.q = deque()
        self.dic = {}
        self.cap = capacity
        self.count = 0

    def get(self, key: int) -> int:
        if key in self.dic:
            self.q.remove(key)
            self.q.append(key)
            return self.dic[key]
        return -1

    def put(self, key: int, value: int) -> None:
        self.count += 1
        if key not in self.dic:
            self.q.append(key)
        else:
            self.q.remove(key)
            self.q.append(key)
        self.dic[key] = value
        if len(self.q) > self.cap:
            self.dic.pop(self.q.popleft())
146. LRU Cache (Medium)_第1张图片

Hashtable + double linked list: 野路子

class Node:
    def __init__(self,prev,key,value,next_,):
        self.prev = prev
        self.key = key
        self.val = value
        self.next = next_
        
class LRUCache:
    def __init__(self, capacity: int):
        self.cap = capacity+1
        self.zero = Node(None,None,None,None)
        self.left = self.zero
        self.right = self.zero
        self.dic = {None:self.zero}
        self.count = 1

    def get(self, key: int) -> int:
        if key in self.dic:
            if self.dic[key] != self.right:
                prev = self.dic[key].prev
                next_ = self.dic[key].next
                prev.next = next_
                next_.prev = prev
                if self.dic[key] == self.left:
                    self.left = self.zero.next
                self.dic[key].prev = self.right
                self.right.next = self.dic[key]
                self.right = self.dic[key]
            return self.dic[key].val
        return -1

    def put(self, key: int, value: int) -> None:
        if key not in self.dic:
            self.count += 1
            self.dic[key] = Node(self.right,key,value,None)
            if self.count == 2:
                self.left = self.dic[key]
            self.right.next = self.dic[key]
            self.right = self.dic[key]
            if self.count > self.cap:
                self.dic.pop(self.left.key)
                self.left = self.left.next
                self.zero.next = self.left
                self.left.prev = self.zero
                self.count -= 1
        elif self.dic[key] == self.right:
            self.dic[key].val = value
        else:
            prev = self.dic[key].prev
            next_ = self.dic[key].next
            prev.next = next_
            next_.prev = prev
            if self.dic[key] == self.left:
                self.left = self.zero.next
            self.dic[key].val = value
            self.dic[key].prev = self.right
            self.right.next = self.dic[key]
            self.right = self.dic[key]

Runtime: 244 ms, faster than 43.29% of Python3 online submissions for LRU Cache.
Memory Usage: 23.2 MB, less than 5.07% of Python3 online submissions for LRU Cache.

sample 192 ms submission: Cheating solution

NOTE: 重点在于使用del self.d[next(iter(self.d))]删除头部元素。看来是Python dictionary内置的功能了。

class LRUCache:
    def __init__(self, capacity: int):
        self.cap = capacity
        self.d = {}

    def get(self, key: int) -> int:
        if key in self.d:
            value = self.d[key]
        else:
            return -1

        # move the key to the 'end' of the dictionary
        del self.d[key]
        self.d[key] = value

        return value

    def put(self, key: int, value: int) -> None:
        # place the key at the 'end' of the dictionary
        if key in self.d:
            del self.d[key]
        self.d[key] = value

        # if over capacity, delete the 'front' of the dictionary
        if len(self.d) > self.cap:
            del self.d[next(iter(self.d))]

Using OrderedDict

from collections import OrderedDict
class LRUCache:

    def __init__(self, capacity: int):
        self.cap = capacity
        self.dic = OrderedDict()

    def get(self, key: int) -> int:
        if key in self.dic:
            self.dic.move_to_end(key)
            return self.dic[key]
        return -1

    def put(self, key: int, value: int) -> None:
        if key not in self.dic:
            self.dic[key] = value
            if len(self.dic) > self.cap:
                self.dic.popitem(last=False)
        else:
            self.dic[key] = value
            self.dic.move_to_end(key)

Runtime: 192 ms, faster than 99.80% of Python3 online submissions for LRU Cache.
Memory Usage: 23 MB, less than 5.07% of Python3 online submissions for LRU Cache.

https://docs.python.org/3.7/library/collections.html#collections.OrderedDict

CPP Solution: combine linked_list and hashmap

Inspired by https://zxi.mytechroad.com/blog/hashtable/leetcode-146-lru-cache/

class LRUCache {
public:
    LRUCache(int capacity_) {
        capacity = capacity_;
    }
    
    int get(int key) {
        const auto it = dic.find(key);
        if (it != dic.cend()){
            ls.splice(ls.begin(),ls,it->second);
            return it->second->second;
        }
        return -1;
    }
    
    void put(int key, int value) {
        const auto it = dic.find(key);
        if (it != dic.cend()){
            ls.splice(ls.begin(),ls,it->second);
            it->second->second = value;
        }
        else{
            ls.emplace_front(key,value);
            dic[key] = ls.begin();
            if (ls.size() > capacity){
                dic.erase(ls.back().first);
                ls.pop_back();
            }
        }
    }
private:
    int capacity;
    list> ls;
    unordered_map>::iterator> dic;
};

Runtime: 88 ms, faster than 99.62% of C++ online submissions for LRU Cache.
Memory Usage: 38 MB, less than 78.53% of C++ online submissions for LRU Cache.

你可能感兴趣的:(146. LRU Cache (Medium))