package main
import (
"context"
"github.com/jackc/pgx/v5"
"log"
)
func main() {
// Connect to the database
conn, err := pgx.Connect(context.Background(), "postgres://username:password@localhost:5432/dbname")
if err != nil {
log.Fatalf("Unable to connect to database: %v", err)
}
defer conn.Close(context.Background())
// Prepare the data for bulk insert
items := [][]interface{}{
{"value1", 100},
{"value2", 200},
{"value3", 300},
}
// Perform the bulk insert
batch := &pgx.Batch{}
for _, item := range items {
batch.Queue("INSERT INTO your_table (column1, column2) VALUES ($1, $2)", item[0], item[1])
}
br := conn.SendBatch(context.Background(), batch)
defer br.Close()
// Process the results
if _, err := br.Exec(); err != nil {
log.Fatalf("Batch insert failed: %v", err)
}
log.Println("Bulk insert completed successfully.")
}
How do I avoid rehashing overhead with std::set in multithreaded code?
How do I find elements with custom comparators with std::set for embedded targets?
How do I erase elements while iterating with std::set for embedded targets?
How do I provide stable iteration order with std::unordered_map for large datasets?
How do I reserve capacity ahead of time with std::unordered_map for large datasets?
How do I erase elements while iterating with std::unordered_map in multithreaded code?
How do I provide stable iteration order with std::map for embedded targets?
How do I provide stable iteration order with std::map in multithreaded code?
How do I avoid rehashing overhead with std::map in performance-sensitive code?
How do I merge two containers efficiently with std::map for embedded targets?