pgxscan

package module
v0.3.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Feb 9, 2021 License: MIT Imports: 9 Imported by: 5

README

PGX Scan

A simple scanning library to extend PGX's awesome capabilities.

pgxscan supports scanning to structs (including things like join tables and JSON columns), slices of structs, scanning from interface slices and variadic arguments.

How to use

For the Row interface

Scanning to a row (ie. by calling QueryRow()) which returns the row interface only exposes the scan method. Currently pgxscan or for that matter, pgx, doesnt have a way to expose the columns returned from the row query. Because of this pgxscan can only scan to pre defined types. To scan to a struct by passing in a struct, use the rows interface (ie. Query()).

Scan to standard types
var (
    ID     uint32
    Int    int
    Float  float32
    String string
    Time   time.Time
    Bool   bool
    Bytes  []byte
    Slice  []string
)

conn, _ := pgx.ConnectConfig(ctxb, "postgres://postgres:@localhost:5432/pgxscan?sslmode=disable")
stmt := `SELECT "id", "int", "float_32", "string", "time", "bool", "bytes", "string_slice" FROM "test" WHERE id = $1`
row := conn.QueryRow(context.Background(), stmt, 1)

_ := pgxscan.NewScanner(row).Scan(
    &ID,
    &Int,
    &Float,
    &String,
    &Time,
    &Bool,
    &Bytes,
    &Slice,
)
Scan to standard types from []interface{}
var (
    ID     uint32
    Int    int
    Float  float32
    String string
    Time   time.Time
    Bool   bool
    Bytes  []byte
    Slice  []string
)
dst := []interface{}{
    &ID,
    &Int,
    &Float,
    &String,
    &Time,
    &Bool,
    &Bytes,
    &Slice,
}
conn, _ := pgx.ConnectConfig(ctxb, "postgres://postgres:@localhost:5432/pgxscan?sslmode=disable")
stmt := `SELECT "id", "int", "float_32", "string", "time", "bool", "bytes", "string_slice" FROM "test" WHERE id = $1`
row := conn.QueryRow(context.Background(), stmt, 1)

_ := pgxscan.NewScanner(row).Scan(dst)
Scan to struct fields
type TestStruct struct {
    ID uint32 `db:"id"`

    // builtin types
    Int     int       `db:"int"`
    Float32 float32   `db:"float_32"`
    String  string    `db:"string"`
    Bool    bool      `db:"bool"`
    Time    time.Time `db:"time"`
    Bytes   []byte    `db:"bytes"`
    StringSlice []string  `db:"string_slice"`
    JSONB JSON  `json:"json_b" db:"json_b"`
}
type JSON struct {
    Str      string         `json:"str"`
    Int      int            `json:"int"`
    Embedded EmbeddedStruct `json:"embedded"`
    Ignore   string         `json:"-"`
}
type EmbeddedStruct struct {
    Bool bool `json:"data"`
}

// scanning to pre defined struct fields
stmt := `SELECT "id", "int", "float_32", "string", "time", "bool", "bytes", "string_slice", "json_b" FROM "test" WHERE id = $1`
var dst TestStruct
row := conn.QueryRow(context.Background(), stmt, 1)
_ := pgxscan.NewScanner(row).Scan(
    &dst.ID,
    &dst.Int,
    &dst.Float32,
    &dst.String,
    &dst.Time,
    &dst.Bool,
    &dst.Bytes,
    &dst.StringSlice,
    &dst.JSONB,
)
For the Rows interface

The Rows interface exposes more data like, returned column names, which allows us to scan into a without pre defining the values first. But all the previous examples will also work for rows too.

Scan to a struct
stmt := `SELECT "id", "int", "float_32", "string", "time", "bool", "bytes", "string_slice", "json_b" FROM "test" WHERE id = $1`
rows, _ := conn.Query(context.Background(), stmt, 1)

var dst TestStruct
// pgxscan will take care of closing the rows and calling next()
if err := pgxscan.NewScanner(rows).Scan(&dst); err != nil {
    return err
}
Scan to slice of structs
stmt := `SELECT "id", "int", "float_32", "string", "time", "bool", "bytes", "string_slice", "json_b" FROM "test" ORDER BY "id" ASC LIMIT 2`
rows, _ := conn.Query(context.Background(), stmt)

var dst []TestStruct
if err := pgxscan.NewScanner(rows).Scan(&dst); err != nil {
    return err
}
Scan to struct with join table

There's two ways to handle join tables. Either use the struct tag scan:"notate" or scan:"follow". scan notate will dot notate the struct to something like "table_one.column" this is particularly useful if joining tables that have column name conflicts. However, you will have to alias the sql column to match (either individually or with special SQL notation explained below). scan follow wont dot notate and instead go into the struct and add the field names to the map. If you know you won't have column name conflicts this will work fine and no aliasing is required.

Example with aliasing (one column at a time)

stmt := `
WITH usr AS (
	SELECT
		"id", "name", "email"
	FROM
		"users"
	WHERE
		"id" = $1
),
addresses AS (
	SELECT
		"address"."id" AS "address.id",
		"line_1" AS "address.line_1",
		"city" AS "address.city"
	FROM
		"address", usr
	WHERE
		"user_id" = usr."id"
)
SELECT
	usr.*, addresses.*
FROM
	usr,
	addresses
`
// Note the aliased dot notated SELECT's for address, line_1, and city.
rows, _ := conn.Query(context.Background(), stmt, 1)

type (
    Address struct {
        ID    uint32
        Line1 string `db:"line_1"`
        City  string
    }
    User struct {
        ID      uint32
        Name    string
        Email   string
        Address Address `scan:"notate"` // table dot notates the struct
    }
)
var user User
if err := NewScanner(rows).Scan(&user); err != nil {
    return err
}
Example with aliasing

(SQL column notation syntax)

The following example is exactly the same as the one above, but with much less verbosity. It uses "notate:address" as the column name.

stmt := `
SELECT 
    users.*,
    0 as "notate:address", -- prefix with "address." following columns
    address.*              -- fields prefixed with "address.<field>"
 FROM
   users, address
WHERE
  users.id = $1 
  AND address.user_id = users.id
`
// Note the aliased dot notated SELECT's for address, line_1, and city.
rows, _ := conn.Query(context.Background(), stmt, 1)

type (
    Address struct {
        ID    uint32
        Line1 string `db:"line_1"`
        City  string
    }
    User struct {
        ID      uint32
        Name    string
        Email   string
        Address Address `scan:"notate"` // table dot notates the struct
    }
)
var user User
scanner := NewScanner(rows, MatchAllColumns(false))  // To ignore address.user_id
if err := scanner.Scan(&user); err != nil {
    return err
}

We decided to ignore unmatched columns in the SQL with MatchAllColumns(false) in order to ignore address.user_id column, which is returned by the query but it is not available in the struct. Otherwise the Scan method would fail. If all columns are present in the struct you don't need to initialize scanner like this.

This SQL syntax allows any level of notations. You can disable by just naming a column "notate:" or use many levels like "notate:level1.level2.level3" and the notation will be prepended to following column names.

To use this notation, column value should be 0 (zero) and column name should start with "notate:".

Example of complex SQL notation

      SELECT  123 as A,

              0 as "notate:c1",  -- notate as "c1."
              c1.*,
              
              0 as "notate:c1.c2",  -- notate as "c1.c2."
              c2.*,
              
              0 as "notate:",    -- disable notations
              456 as B,
              
              0 as "notate:c3",  -- notate as "c3.
              c3.*

Imagine c1, c2 and c3 all have a column named A and B, thus, the mapping would be done as if the query was:

      SELECT  123 as A,
              c1.A as "c1.A",
              c1.B as "c1.B",
              c2.A as "c1.c2.A",
              c2.B as "c1.c2.B",
              456 as B,
              c3.A as "c3.A",
              c3.B as "c3.B",

Although in this case the second example is more clear, imagine c1, c2 and c3 have many different field names some of them being the same.

Example without aliasing

stmt := `
WITH usr AS (
	SELECT
		"id", "name", "email"
	FROM
		"users"
	WHERE
		"id" = $1
),
addresses AS (
	SELECT
		"line_1",
		"city"
	FROM
		"address", usr
	WHERE
		"user_id" = usr."id"
)
SELECT
	usr.*, addresses.*
FROM
	usr,
	addresses
`
// note that the "id" column for address is not being selected, which removes the naming conflict therefore no aliasing is necessary
rows, _ := conn.Query(context.Background(), stmt, 1)

type (
    Address struct {
        Line1 string `db:"line_1"`
        City  string
    }
    User struct {
        ID      uint32
        Name    string
        Email   string
        Address Address `scan:"follow"` // follow inspects the struct and adds the fields without being dot notated. 
    }
)
var user User
if err := NewScanner(rows).Scan(&user); err != nil {
    return err
}

Checkout the many other tests for examples on scanning to different data types

Documentation

Index

Constants

This section is empty.

Variables

View Source
var ErrColumnNotateSyntax = errors.New("column notate syntax is invalid: expecting \"notate:name\"")
View Source
var ErrNoCols = errors.New("columns can not be nil")
View Source
var QueryColumnNotatePrefix = "notate:"

Functions

func GetColumnNames added in v0.3.0

func GetColumnNames(rows *pgx.Rows) ([]string, error)

This function returns a list of column names from SQL columns.

It renames SQL notated columns, when needed, for column names who start with 'notate:' prefix (you can override prefix with QueryColumnNotatePrefix variable)

From columns:

["a", "b", "notate:whatever", "a", "b"]

Will return:

["a", "b", "notate:whatever", "whatever.a", "whatever.b"]

Background info:

In order to allow complex queries and prevent having to expand on all column names for complex mappings, delimiter columns are used to notate results from postgres.

Imagine we have `SELECT a.*, b.* FROM ...` if both _a_ and _b_ tables have a field name _id_ there would be no way for us to map it to a struct with a couple of nested structs, however, if we use column notation we can rewrite the query as:

SELECT 0 as "notate:a",
       a.*,
       0 as "notate:b",
       b.*
FROM ...

This way, everything that comes after column "notate:a" will be treated as if we would have defined an alias for each column named "a.<col>", and so on

These notations allow zero (using "notate:" with nothing after colon), one level (like the example above) or many levels of notations (just do "notate:a.sub1.sub2")

This helps map values to struct with simple queries without having to list all columns in the SQL.

func ScanStruct

func ScanStruct(scan scannerFunc, i interface{}, cols []string, matchAllColumnsToStruct bool) error

ScanStruct will scan the current row into i. When matchAllColumnsToStruct is false, it will not complain about extra columns in the result set that are not mapped to the columns in the struct, or, said another way, it will allow unmapped items, which can, sometimes, be convenient

Types

type Config

type Config struct {
	ReturnErrNoRowsForRows  bool
	MatchAllColumnsToStruct bool
}

type Option

type Option interface {
	// contains filtered or unexported methods
}

func ErrNoRowsQuery

func ErrNoRowsQuery(b bool) Option

ErrNoRowsQuery sets whether or not a pgx.ErrNoRows error should be returned on a query that has no rows

func MatchAllColumns added in v0.3.0

func MatchAllColumns(b bool) Option

MatchAllColumns sets whether or not a unableToFindFieldError error should be returned on a query that has more columns than fields in the struct

type Scanner

type Scanner interface {
	Scan(v ...interface{}) error
}

Scanner knows how to scan sql.Rows into structs.

func NewScanner

func NewScanner(src Scanner, opts ...Option) Scanner

NewScanner takes in a scanner returns a scanner Since the pgx row and rows interface both have a `Scan(v ...interface{}) error` method, either one can be passed as the argument and scanner will take care of the rest.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL