如何使用 Golang lib/pq API 将二进制数据插入到 PostgreSQL BYTEA 列中?

AKh*_*AKh 7 postgresql go libpq sqlx pq

我想在 BYTEA 列中插入一些二进制数据

我将如何将 somefile.tar.gz 的内容插入带有 BYTEA 列的表中?

  1. 一个例子会有所帮助
  2. 是否可以使用 BYTEA (postgres 9.2) 的 HEX 格式?
  3. 是否可以从/从 golang 流到/从 postgres 流?

Tim*_*Tim 1

如果愿意切换到大型对象(PostgreSQL文档github.com/jackc/pgx) ,则可以进行流式传输。该类型实现:pgx.LargeObject

io.Writer
io.Reader
io.Seeker
io.Closer
Run Code Online (Sandbox Code Playgroud)

大对象存储在系统表中,没有可以在表的列中使用的大对象类型。大对象通过其对象标识符来引用。因此需要使用文件元数据和oid映射维护一个单独的表。

示例程序:

package main

import (
    "context"
    "io"
    "log"
    "os"
    "time"

    "github.com/jackc/pgx/v4"
)

const (
    // files table maps Large Object oid to file names
    createFileTable = `CREATE TABLE files (
        id oid primary key,
        name varchar,
        unique(name)
    );`
)

func main() {
    ctx, cancel := context.WithTimeout(context.TODO(), time.Minute)
    defer cancel()

    conn, err := pgx.Connect(ctx, "user=postgres host=/run/postgresql dbname=postgres")
    if err != nil {
        panic(err)
    }
    defer conn.Close(ctx)

    if _, err = conn.Exec(ctx, createFileTable); err != nil {
        panic(err)
    }

    written, err := storeFile(ctx, conn, "somefile.bin")
    log.Printf("storeFile written: %d", written)
    if err != nil {
        panic(err)
    }

    read, err := loadFile(ctx, conn, "somefile.bin")
    log.Printf("loadFile read: %d", read)
    if err != nil {
        panic(err)
    }
}

// storeFile as Large Object in the database.
// The resulting object identifier is stored along with the file name in the files table.
// The amount of written bytes and an erorr is returned, if one occured.
func storeFile(ctx context.Context, conn *pgx.Conn, name string) (written int64, err error) {
    file, err := os.Open(name)
    if err != nil {
        return 0, err
    }
    defer file.Close()

    // LargeObjects can only operate on an active TX
    tx, err := conn.Begin(ctx)
    if err != nil {
        return 0, err
    }
    defer tx.Rollback(ctx)

    lobs := tx.LargeObjects()

    // Create a new Large Object.
    // We pass 0, so the DB can pick an available oid for us.
    oid, err := lobs.Create(ctx, 0)
    if err != nil {
        return 0, err
    }

    // record the oid and filename in the files table
    _, err = tx.Exec(ctx, "INSERT INTO files (id, name) VALUES ($1, $2)", oid, name)
    if err != nil {
        return 0, err
    }

    // Open the new Object for writing.
    obj, err := lobs.Open(ctx, oid, pgx.LargeObjectModeWrite)
    if err != nil {
        return 0, err
    }

    // Copy the file stream to the Large Object stream
    written, err = io.Copy(obj, file)
    if err != nil {
        return written, err
    }

    err = tx.Commit(ctx)
    return written, err
}

// loadFile loads the file identified by name as Large Object
// and writes the contents to a local file by the same name.
// The amount of bytes read or an error is returned.
func loadFile(ctx context.Context, conn *pgx.Conn, name string) (read int64, err error) {
    tx, err := conn.Begin(ctx)
    if err != nil {
        return 0, err
    }
    defer tx.Rollback(ctx)

    var oid uint32
    err = conn.QueryRow(ctx, "SELECT id FROM files WHERE name = $1", name).Scan(&oid)
    if err != nil {
        return 0, err
    }

    file, err := os.Create(name)
    if err != nil {
        return 0, err
    }

    lobs := tx.LargeObjects()
    obj, err := lobs.Open(ctx, oid, pgx.LargeObjectModeRead)
    if err != nil {
        return 0, err
    }

    return io.Copy(file, obj)
}
Run Code Online (Sandbox Code Playgroud)