How do I perform bulk inserts using pgx?

Go, pgx, bulk inserts, PostgreSQL, database, performance
Learn how to efficiently perform bulk inserts in PostgreSQL using the pgx library in Go. This method enhances performance when dealing with large datasets.

package main

import (
    "context"
    "github.com/jackc/pgx/v5"
    "log"
)

func main() {
    // Connect to the database
    conn, err := pgx.Connect(context.Background(), "postgres://username:password@localhost:5432/dbname")
    if err != nil {
        log.Fatalf("Unable to connect to database: %v", err)
    }
    defer conn.Close(context.Background())

    // Prepare the data for bulk insert
    items := [][]interface{}{
        {"value1", 100},
        {"value2", 200},
        {"value3", 300},
    }

    // Perform the bulk insert
    batch := &pgx.Batch{}
    for _, item := range items {
        batch.Queue("INSERT INTO your_table (column1, column2) VALUES ($1, $2)", item[0], item[1])
    }
    
    br := conn.SendBatch(context.Background(), batch)
    defer br.Close()

    // Process the results
    if _, err := br.Exec(); err != nil {
        log.Fatalf("Batch insert failed: %v", err)
    }

    log.Println("Bulk insert completed successfully.")
}
    

Go pgx bulk inserts PostgreSQL database performance