设计一个LRU(最近最少使用)缓存

约束和假设

  • 我们正在缓存什么?
    我们正在缓存Web Query的结果
  • 我们可以假设输入是有效的,还是需要对其验证?
    假设输入是有效的
  • 我们可以假设它适应内存吗?

编码实现

class Node(object):
	def __init__(self, results):
		self.results = results
		self.prev = None
		self.next = None

class LinkedList(object):
	del __init__(self):
		self.head = None
		self.tail = None
	
	def move_to_front(self, node): # ...
	def append_to_front(self, node): # ...
	def remove_from_tail(self): # ...

class Cache(obejct):
	def __init__(self, MAX_SIZE):
		self.MAX_SIZE = MAX_SIZE
		self.size = 0
		self.lookup = {}
		self.linked_list = LinkedList()

	def get(self, query)
	'''
		Get the stored query result from the cache
		Accsssing a node updates its position to the front of the LRU list
	'''
	node = self.lookup.get(query)
	if node is None:
		return None
	self.linked_list.move_to_front(node)
	return node.results


	def set(self, resuts, query):
		'''Set the results for the given query key in the cache.
		When updating an entry, updates its position to the front of the LRU list
		If the entry is new and the cache is at capacity, remove the oldest entry before the new entry is added '''
		node = self.lookup.get(query)
		if node is not None:	
			node.results = results
			self.linked_list.move_to_front(node)
		else:
			if self.size == self.MAX_SIZE
				self.lookup.pop(self.linked_list.tail.query, None)
				self.linked_list.remove_from_tail()
			else:
				self.size += 1
			new_node = Node(results)
			self.linked_list.append_to_front(new_node)
			self.lookup[query] = new_node

你可能感兴趣的:(算法,python,LRU)