Implementing a generic hash table in Swift can enhance code reusability and performance. Here's how you can create a basic hash table that works with any type of key and value.
class HashTable {
private var buckets: [[(key: Key, value: Value)]]
private let capacity: Int
init(capacity: Int) {
self.capacity = capacity
self.buckets = Array(repeating: [], count: capacity)
}
private func index(for key: Key) -> Int {
return abs(key.hashValue) % capacity
}
func insert(key: Key, value: Value) {
let index = self.index(for: key)
for (i, (existingKey, _)) in buckets[index].enumerated() {
if existingKey == key {
buckets[index][i].value = value
return
}
}
buckets[index].append((key, value))
}
func get(key: Key) -> Value? {
let index = self.index(for: key)
for (existingKey, value) in buckets[index] {
if existingKey == key {
return value
}
}
return nil
}
func remove(key: Key) {
let index = self.index(for: key)
buckets[index].removeAll { $0.key == key }
}
}
// Example usage:
let hashTable = HashTable(capacity: 10)
hashTable.insert(key: "one", value: 1)
hashTable.insert(key: "two", value: 2)
print(hashTable.get(key: "one") ?? "Not found") // Outputs: 1
hashTable.remove(key: "one")
print(hashTable.get(key: "one") ?? "Not found") // Outputs: Not found
How do I avoid rehashing overhead with std::set in multithreaded code?
How do I find elements with custom comparators with std::set for embedded targets?
How do I erase elements while iterating with std::set for embedded targets?
How do I provide stable iteration order with std::unordered_map for large datasets?
How do I reserve capacity ahead of time with std::unordered_map for large datasets?
How do I erase elements while iterating with std::unordered_map in multithreaded code?
How do I provide stable iteration order with std::map for embedded targets?
How do I provide stable iteration order with std::map in multithreaded code?
How do I avoid rehashing overhead with std::map in performance-sensitive code?
How do I merge two containers efficiently with std::map for embedded targets?