数据结构-LRU缓存go实现

LRU缓存

直接看代码

package lru

import (
	"container/list"
	"sync"
)

// LRU缓存结构
type LRUCache struct {
	// 缓存容量
	capacity int
	// 缓存链表
	list *list.List
	// 缓存数据
	cache map[interface{}]*list.Element
	// 访问锁
	lock sync.RWMutex
}

type ElementValue struct {
	Key   interface{}
	Value interface{}
}

func NewLRUCache(capacity int) *LRUCache {
	return &LRUCache{
		capacity: capacity,
		list:     list.New(),
		cache:    make(map[interface{}]*list.Element),
	}
}

func (c *LRUCache) Get(key interface{}) (interface{}, bool) {
	c.lock.Lock()
	defer c.lock.Unlock()

	if element, ok := c.cache[key]; ok {
		c.list.MoveToFront(element)
		return element.Value.(*ElementValue).Value, true
	}
	return nil, false
}

func (c *LRUCache) Push(key, value interface{}) {
	c.lock.Lock()
	defer c.lock.Unlock()

	if element, ok := c.cache[key]; ok {
		c.list.MoveToFront(element)
		c.cache[key].Value.(*ElementValue).Value = value
		return
	}
	ev := &ElementValue{Key: key, Value: value}
	element := c.list.PushFront(ev)
	c.cache[key] = element
	if c.list.Len() > c.capacity {
		b := c.list.Back()
		c.list.Remove(b)
		delete(c.cache, b.Value.(*ElementValue).Key)
	}
}

func (c *LRUCache) Remove(key interface{}) {
	c.lock.Lock()
	defer c.lock.Unlock()

	if element, ok := c.cache[key]; ok {
		c.list.Remove(element)
		delete(c.cache, key)
	}
}

func (c *LRUCache) Size() int {
	c.lock.RLock()
	defer c.lock.RUnlock()

	return c.list.Len()
}

测试

  • 使用命令
go test -v -coverprofile=c.out ./lru
  • 测试代码
package lru

import (
	"testing"
)

func TestNewLRUCache(t *testing.T) {
	lru := NewLRUCache(10)
	if lru.Size() != 0 {
		t.Errorf("lru.Size() != 0")
	}
}

func TestLRUCache(t *testing.T) {
	lru := NewLRUCache(5)
	lru.Push("a", 1)
	lru.Push("b", 2)
	lru.Push("c", 3)
	if lru.Size() != 3 {
		t.Errorf("lru.Size() != 3")
	}
	if v, ok := lru.Get("a"); !ok || v.(int) != 1 {
		t.Errorf("lru.Get(\"a\") != 1")
	}
	if v, ok := lru.Get("b"); !ok || v.(int) != 2 {
		t.Errorf("lru.Get(\"b\") != 2")
	}
	if v, ok := lru.Get("c"); !ok || v.(int) != 3 {
		t.Errorf("lru.Get(\"c\") != 3")
	}
	if _, ok := lru.Get("d"); ok {
		t.Errorf("lru.Get(\"d\") != nil")
	}
	lru.Push("d", 4)
	lru.Push("e", 5)
	lru.Push("f", 6)
	if lru.Size() != 5 {
		t.Errorf("lru.Size() != 5")
	}
	if _, ok := lru.Get("a"); ok {
		t.Errorf("lru.Get(\"a\") != nil")
	}
	lru.Remove("b")
	if lru.Size() != 4 {
		t.Errorf("lru.Size() != 4")
	}
	if _, ok := lru.Get("b"); ok {
		t.Errorf("lru.Get(\"b\") != nil")
	}
}
  • 测试结果
=== RUN   TestNewLRUCache
--- PASS: TestNewLRUCache (0.00s)
=== RUN   TestLRUCache
--- PASS: TestLRUCache (0.00s)
PASS
coverage: 89.3% of statements
ok      coco/lru        0.002s  coverage: 89.3% of statements

你可能感兴趣的:(数据结构,缓存,go)