To cache database query results with Redis in Go, you can follow the steps below. By using Redis as a caching layer, you can reduce the number of database queries and improve the performance of your application. Here's a simple example of how to implement this:
// Import required packages
package main
import (
"database/sql"
"fmt"
"log"
"time"
"github.com/go-redis/redis/v8"
_ "github.com/lib/pq" // Postgres driver
"golang.org/x/net/context"
)
// Initialize Redis client
var ctx = context.Background()
var rdb = redis.NewClient(&redis.Options{
Addr: "localhost:6379",
})
// Function to get data from the database or cache
func getData(query string) ([]string, error) {
// Check if the data is in cache
cachedData, err := rdb.Get(ctx, query).Result()
if err == nil {
fmt.Println("Data retrieved from cache")
// Return cached data
return parseData(cachedData), nil
}
// Connect to the database
db, err := sql.Open("postgres", "user=username dbname=mydb sslmode=disable")
if err != nil {
return nil, err
}
defer db.Close()
// Query the database
rows, err := db.Query(query)
if err != nil {
return nil, err
}
defer rows.Close()
var results []string
for rows.Next() {
var result string
if err := rows.Scan(&result); err != nil {
return nil, err
}
results = append(results, result)
}
// Cache the result with a TTL of 10 minutes
err = rdb.Set(ctx, query, results, 10*time.Minute).Err()
if err != nil {
return nil, err
}
fmt.Println("Data retrieved from database")
return results, nil
}
// Function to parse cached data
func parseData(data string) []string {
// This function should convert the string from Redis back to the original type
// For simplicity, we'll return a dummy slice
return []string{data}
}
func main() {
query := "SELECT name FROM users"
results, err := getData(query)
if err != nil {
log.Fatal(err)
}
fmt.Println(results)
}
How do I avoid rehashing overhead with std::set in multithreaded code?
How do I find elements with custom comparators with std::set for embedded targets?
How do I erase elements while iterating with std::set for embedded targets?
How do I provide stable iteration order with std::unordered_map for large datasets?
How do I reserve capacity ahead of time with std::unordered_map for large datasets?
How do I erase elements while iterating with std::unordered_map in multithreaded code?
How do I provide stable iteration order with std::map for embedded targets?
How do I provide stable iteration order with std::map in multithreaded code?
How do I avoid rehashing overhead with std::map in performance-sensitive code?
How do I merge two containers efficiently with std::map for embedded targets?