An LRU (Least Recently Used) cache is a data structure that stores a limited number of items, evicting the least recently used item when the cache reaches its limit. Implementing an LRU cache in Swift can be done by utilizing a combination of a dictionary and a doubly linked list to keep track of usage order. Below is a simple implementation of an LRU cache in Swift.
    class Node {
        var key: Int
        var value: Int
        var prev: Node?
        var next: Node?
        init(key: Int, value: Int) {
            self.key = key
            self.value = value
        }
    }
    class LRUCache {
        private var capacity: Int
        private var storage: [Int: Node]
        private var head: Node
        private var tail: Node
        init(_ capacity: Int) {
            self.capacity = capacity
            self.storage = [:]
            self.head = Node(key: -1, value: -1)
            self.tail = Node(key: -1, value: -1)
            head.next = tail
            tail.prev = head
        }
        func get(_ key: Int) -> Int {
            guard let node = storage[key] else { return -1 }
            moveToHead(node)
            return node.value
        }
        func put(_ key: Int, _ value: Int) {
            if let node = storage[key] {
                node.value = value
                moveToHead(node)
            } else {
                let newNode = Node(key: key, value: value)
                storage[key] = newNode
                addToHead(newNode)
                if storage.count > capacity {
                    let lruNode = removeTail()
                    storage.removeValue(forKey: lruNode.key)
                }
            }
        }
        private func moveToHead(_ node: Node) {
            removeNode(node)
            addToHead(node)
        }
        private func addToHead(_ node: Node) {
            node.prev = head
            node.next = head.next
            head.next?.prev = node
            head.next = node
        }
        private func removeNode(_ node: Node) {
            node.prev?.next = node.next
            node.next?.prev = node.prev
        }
        private func removeTail() -> Node {
            let tailNode = tail.prev!
            removeNode(tailNode)
            return tailNode
        }
    }
    
				
	
													How do I avoid rehashing overhead with std::set in multithreaded code?
														
													How do I find elements with custom comparators with std::set for embedded targets?
														
													How do I erase elements while iterating with std::set for embedded targets?
														
													How do I provide stable iteration order with std::unordered_map for large datasets?
														
													How do I reserve capacity ahead of time with std::unordered_map for large datasets?
														
													How do I erase elements while iterating with std::unordered_map in multithreaded code?
														
													How do I provide stable iteration order with std::map for embedded targets?
														
													How do I provide stable iteration order with std::map in multithreaded code?
														
													How do I avoid rehashing overhead with std::map in performance-sensitive code?
														
													How do I merge two containers efficiently with std::map for embedded targets?