package main
import (
"database/sql"
"encoding/json"
"fmt"
"log"
_ "github.com/lib/pq" // PostgreSQL driver
)
type MyData struct {
Name string `json:"name"`
Age int `json:"age"`
}
func main() {
connStr := "user=username dbname=mydb sslmode=disable"
db, err := sql.Open("postgres", connStr)
if err != nil {
log.Fatal(err)
}
defer db.Close()
// Example of inserting a JSONB column
data := MyData{Name: "Alice", Age: 30}
jsonData, err := json.Marshal(data)
if err != nil {
log.Fatal(err)
}
_, err = db.Exec("INSERT INTO my_table(data) VALUES ($1)", jsonData)
if err != nil {
log.Fatal(err)
}
// Example of querying a JSONB column
rows, err := db.Query("SELECT data FROM my_table")
if err != nil {
log.Fatal(err)
}
defer rows.Close()
for rows.Next() {
var jsonData json.RawMessage
if err := rows.Scan(&jsonData); err != nil {
log.Fatal(err)
}
var item MyData
if err := json.Unmarshal(jsonData, &item); err != nil {
log.Fatal(err)
}
fmt.Printf("Retrieved: %+v\n", item)
}
if err := rows.Err(); err != nil {
log.Fatal(err)
}
}
How do I avoid rehashing overhead with std::set in multithreaded code?
How do I find elements with custom comparators with std::set for embedded targets?
How do I erase elements while iterating with std::set for embedded targets?
How do I provide stable iteration order with std::unordered_map for large datasets?
How do I reserve capacity ahead of time with std::unordered_map for large datasets?
How do I erase elements while iterating with std::unordered_map in multithreaded code?
How do I provide stable iteration order with std::map for embedded targets?
How do I provide stable iteration order with std::map in multithreaded code?
How do I avoid rehashing overhead with std::map in performance-sensitive code?
How do I merge two containers efficiently with std::map for embedded targets?