An LRU (Least Recently Used) cache is a popular caching mechanism that discards the least recently used items first. In Go, we can implement an LRU cache using a combination of a map to store the values and a doubly linked list to keep track of the order of usage.
Here’s how you can create a simple LRU cache in Go:
package main
import (
"container/list"
"fmt"
)
type LRUCache struct {
capacity int
cache map[int]*list.Element
list *list.List
}
type entry struct {
key int
value int
}
// Constructor for LRUCache
func NewLRUCache(capacity int) *LRUCache {
return &LRUCache{
capacity: capacity,
cache: make(map[int]*list.Element),
list: list.New(),
}
}
// Get value from LRU Cache
func (c *LRUCache) Get(key int) int {
if elem, found := c.cache[key]; found {
c.list.MoveToFront(elem)
return elem.Value.(entry).value
}
return -1 // Indicating that the key does not exist
}
// Put key-value pair in LRU Cache
func (c *LRUCache) Put(key int, value int) {
if elem, found := c.cache[key]; found {
// Update the existing entry
elem.Value = entry{key, value}
c.list.MoveToFront(elem)
return
}
// Create a new entry
newEntry := entry{key, value}
newElem := c.list.PushFront(newEntry)
c.cache[key] = newElem
if c.list.Len() > c.capacity {
// Remove the least recently used item
backElem := c.list.Back()
if backElem != nil {
c.list.Remove(backElem)
delete(c.cache, backElem.Value.(entry).key)
}
}
}
func main() {
lru := NewLRUCache(2) // capacity of 2
lru.Put(1, 1)
lru.Put(2, 2)
fmt.Println(lru.Get(1)) // returns 1
lru.Put(3, 3) // evicts key 2
fmt.Println(lru.Get(2)) // returns -1 (not found)
lru.Put(4, 4) // evicts key 1
fmt.Println(lru.Get(1)) // returns -1 (not found)
fmt.Println(lru.Get(3)) // returns 3
fmt.Println(lru.Get(4)) // returns 4
}
How do I avoid rehashing overhead with std::set in multithreaded code?
How do I find elements with custom comparators with std::set for embedded targets?
How do I erase elements while iterating with std::set for embedded targets?
How do I provide stable iteration order with std::unordered_map for large datasets?
How do I reserve capacity ahead of time with std::unordered_map for large datasets?
How do I erase elements while iterating with std::unordered_map in multithreaded code?
How do I provide stable iteration order with std::map for embedded targets?
How do I provide stable iteration order with std::map in multithreaded code?
How do I avoid rehashing overhead with std::map in performance-sensitive code?
How do I merge two containers efficiently with std::map for embedded targets?