How do I stream large result sets using GORM?

Streaming large result sets using GORM can be essential for applications that need to handle significant amounts of data efficiently. This approach allows for processing records in chunks rather than loading the entire result set into memory at once.
streaming, GORM, Golang, large result sets, performance optimization

package main

import (
    "fmt"
    "gorm.io/driver/sqlite"
    "gorm.io/gorm"
)

type User struct {
    ID   uint
    Name string
}

func main() {
    db, err := gorm.Open(sqlite.Open("test.db"), &gorm.Config{})
    if err != nil {
        panic("failed to connect database")
    }

    // Auto migrate the User model
    db.AutoMigrate(&User{})

    // Streaming results
    rows, err := db.Model(&User{}).Rows()
    if err != nil {
        panic(err)
    }
    defer rows.Close()

    for rows.Next() {
        var user User
        if err := db.ScanRows(rows, &user); err != nil {
            panic(err)
        }
        fmt.Println(user.Name) // Process each user record
    }
}
    

streaming GORM Golang large result sets performance optimization