Learn how to implement a generic LRU cache in Go. This tutorial covers the core concepts and provides a step-by-step example to create a robust caching mechanism using generics in Go.
generic LRU cache, Go programming, caching mechanism, go generics, data structure, performance optimization
package main
import (
"container/list"
"fmt"
)
// CacheItem is a type that stores key-value pairs.
type CacheItem[K comparable, V any] struct {
Key K
Value V
}
// LRUCache is a generic LRU cache.
type LRUCache[K comparable, V any] struct {
capacity int
items map[K]*list.Element
order *list.List
}
// NewLRUCache creates a new LRUCache with a specified capacity.
func NewLRUCache[K comparable, V any](capacity int) *LRUCache[K, V] {
return &LRUCache[K, V]{
capacity: capacity,
items: make(map[K]*list.Element),
order: list.New(),
}
}
// Get retrieves an item from the cache and updates its position.
func (c *LRUCache[K, V]) Get(key K) (V, bool) {
if elem, found := c.items[key]; found {
c.order.MoveToFront(elem) // Move the accessed item to the front.
return elem.Value.(CacheItem[K, V]).Value, true
}
var zeroValue V
return zeroValue, false // Return zero value if not found.
}
// Put adds an item to the cache.
func (c *LRUCache[K, V]) Put(key K, value V) {
if elem, found := c.items[key]; found {
c.order.MoveToFront(elem) // Update the position of the existing item.
elem.Value = CacheItem[K, V]{Key: key, Value: value}
} else {
if c.order.Len() == c.capacity {
// Evict the least recently used item.
backElem := c.order.Back()
if backElem != nil {
c.order.Remove(backElem)
delete(c.items, backElem.Value.(CacheItem[K, V]).Key)
}
}
// Add the new item.
newItem := CacheItem[K, V]{Key: key, Value: value}
newElem := c.order.PushFront(newItem)
c.items[key] = newElem
}
}
func main() {
cache := NewLRUCache[int, string](2)
cache.Put(1, "one")
cache.Put(2, "two")
fmt.Println(cache.Get(1)) // Outputs: one true
cache.Put(3, "three") // Evicts key 2
fmt.Println(cache.Get(2)) // Outputs: "" false (not found)
cache.Put(4, "four") // Evicts key 1
fmt.Println(cache.Get(1)) // Outputs: "" false (not found)
fmt.Println(cache.Get(3)) // Outputs: three true
fmt.Println(cache.Get(4)) // Outputs: four true
}
How do I avoid rehashing overhead with std::set in multithreaded code?
How do I find elements with custom comparators with std::set for embedded targets?
How do I erase elements while iterating with std::set for embedded targets?
How do I provide stable iteration order with std::unordered_map for large datasets?
How do I reserve capacity ahead of time with std::unordered_map for large datasets?
How do I erase elements while iterating with std::unordered_map in multithreaded code?
How do I provide stable iteration order with std::map for embedded targets?
How do I provide stable iteration order with std::map in multithreaded code?
How do I avoid rehashing overhead with std::map in performance-sensitive code?
How do I merge two containers efficiently with std::map for embedded targets?