Skip to content

Commit

Permalink
Sanitize docs.
Browse files Browse the repository at this point in the history
  • Loading branch information
georgysavva committed Jun 28, 2020
1 parent 004ad83 commit fbd6b8c
Show file tree
Hide file tree
Showing 10 changed files with 66 additions and 87 deletions.
27 changes: 15 additions & 12 deletions dbscan.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,7 @@ type Rows interface {
// ScanAll iterates all rows to the end. After iterating it closes the rows,
// and propagates any errors that could pop up.
// It expected that destination should be a slice. For each row it scans data and appends it to the destination slice.
// It resets the destination slice, so if it's not empty it will overwrite all previous elements.
// ScanAll supports both types of slices: slice of structs by pointer and slice of structs by value,
// ScanAll supports both types of slices: slice of structs by a pointer and slice of structs by value,
// for example:
//
// type User struct {
Expand All @@ -34,15 +33,19 @@ type Rows interface {
// var usersByValue []User
//
// Both usersByPtr and usersByValue are valid destinations for ScanAll function.
//
// Note that before starting, ScanAll resets the destination slice,
// so if it's not empty it will overwrite all existing elements.
func ScanAll(dst interface{}, rows Rows) error {
err := processRows(dst, rows, true /* multipleRows */)
return errors.WithStack(err)
}

// ScanOne iterates all rows to the end and makes sure that there was exactly one row,
// otherwise it returns an error. After iterating it closes the rows,
// otherwise, it returns an error. Use NotFound function to check if there were no rows.
// After iterating ScanOne closes the rows,
// and propagates any errors that could pop up.
// It scans data from that single row into destination.
// It scans data from that single row into the destination.
func ScanOne(dst interface{}, rows Rows) error {
err := processRows(dst, rows, false /* multipleRows */)
return errors.WithStack(err)
Expand Down Expand Up @@ -71,7 +74,7 @@ func processRows(dst interface{}, rows Rows, multipleRows bool) error {
if err != nil {
return errors.WithStack(err)
}
// Make sure that slice is empty.
// Make sure slice is empty.
sliceMeta.val.Set(sliceMeta.val.Slice(0, 0))
}
rs := NewRowScanner(rows)
Expand Down Expand Up @@ -127,7 +130,7 @@ func parseSliceDestination(dst interface{}) (*sliceDestinationMeta, error) {
// If it's a slice of pointers to structs,
// we handle it the same way as it would be slice of struct by value
// and dereference pointers to values,
// because eventually we works with fields.
// because eventually we work with fields.
// But if it's a slice of primitive type e.g. or []string or []*string,
// we must leave and pass elements as is to Rows.Scan().
if elementBaseType.Kind() == reflect.Ptr {
Expand Down Expand Up @@ -165,12 +168,12 @@ func scanSliceElement(rs *RowScanner, sliceMeta *sliceDestinationMeta) error {
type startScannerFunc func(rs *RowScanner, dstValue reflect.Value) error

// RowScanner embraces the Rows and exposes the Scan method
// that allows to scan data from the current row into destination.
// that allows to scan data from the current row into the destination.
// The first time the Scan method is called
// it parses the destination type by reflection and caches all required information for further scans.
// it parses the destination type via reflection and caches all required information for further scans.
// Due to this caching mechanism it's not allowed to call Scan for destinations of different types,
// the behaviour is unknown in that case.
// RowScanner doesn't processed to the next row nor close them, it should be done by the client code.
// RowScanner doesn't proceed to the next row nor close them, it should be done by the client code.
//
// The main benefit of using this type directly
// is that you can instantiate a RowScanner and manually iterate over the rows
Expand All @@ -195,7 +198,7 @@ func NewRowScanner(rows Rows) *RowScanner {
}

// Scan scans data from the current row into the destination.
// On the first call it caches expensive reflection work and use it the future calls.
// On the first call it caches expensive reflection work and uses it the future calls.
// See RowScanner for details.
func (rs *RowScanner) Scan(dst interface{}) error {
dstVal, err := parseDestination(dst)
Expand Down Expand Up @@ -292,7 +295,7 @@ func (rs *RowScanner) scanStruct(structValue reflect.Value) error {
fieldIndex, ok := rs.columnToFieldIndex[column]
if !ok {
return errors.Errorf(
"dbscan: column: '%s': no corresponding field found or it's unexported in %v",
"dbscan: column: '%s': no corresponding field found, or it's unexported in %v",
column, structValue.Type(),
)
}
Expand Down Expand Up @@ -343,7 +346,7 @@ func (rs *RowScanner) ensureDistinctColumns() error {
seen := make(map[string]struct{}, len(rs.columns))
for _, column := range rs.columns {
if _, ok := seen[column]; ok {
return errors.Errorf("dbscan: rows contain duplicated column '%s'", column)
return errors.Errorf("dbscan: rows contain a duplicated column '%s'", column)
}
seen[column] = struct{}{}
}
Expand Down
27 changes: 14 additions & 13 deletions doc.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
dbscan works with abstract Rows and doesn't depend on any specific database or library.
If a type implements Rows interface it can leverage full functional of this package.
Subpackages github.com/georgysavva/dbscan/sqlscan
and github.com/georgysavva/dbscan/pgxscan are wrappers around this package
and github.com/georgysavva/dbscan/pgxscan are wrappers around this package,
they contain functions and adapters tailored to database/sql
and github.com/jackc/pgx/v4 libraries correspondingly. sqlscan and pgxscan proxy all calls to dbscan internally.
dbscan does all the logic, but generally, it shouldn't be imported by the application code directly.
Expand Down Expand Up @@ -34,10 +34,10 @@ In the example above User struct is mapped to the following columns: "user_id",
Struct can contain embedded structs as well. It allows to reuse models in different queries.
Note that non-embedded structs aren't allowed, this decision was made due to simplicity.
By default, dbscan maps fields from embedded structs to columns as is and doesn't add prefix,
By default, dbscan maps fields from embedded structs to columns as is and doesn't add any prefix,
this simulates behaviour of major SQL databases in case of a JOIN.
In order to add a prefix to all fields of the embedded struct specify it in the `db` field tag,
"." used as the separator for example:
dbscan uses "." as a separator, for example:
type User struct {
UserID string
Expand All @@ -54,11 +54,12 @@ In order to add a prefix to all fields of the embedded struct specify it in the
Post `db:post`
}
will get mapped to the following columns: "user_id", "email", "post.id", "post.text".
Row struct is mapped to the following columns: "user_id", "email", "post.id", "post.text".
In order to scan into a field it must be exported, unexported fields will be ignored.
If dbscan can't find corresponding field for a column it returns an error,
this forces to only select data from the database that application needs.
Also if struct contains multiple fields that are mapped to the same column,
Also, if struct contains multiple fields that are mapped to the same column,
dbscan won't be able to make the chose to which field to assign and return an error, for example:
type User struct {
Expand All @@ -76,15 +77,15 @@ dbscan won't be able to make the chose to which field to assign and return an er
Post
}
Row struct is invalid since both User.ID and Post.ID are mapped to the "id" column.
Row struct is invalid since both Row.User.ID and Row.Post.ID are mapped to the "id" column.
Scanning into map
Apart from scanning into structs, dbscan can handle maps,
in that case it uses column name as the map key and column data as the map value. For example:
in that case it uses column name as the map key and column data as the map value, for example:
var results []map[string]interface{}
if err := dbscan.ScanAll(&result, rows); err != nil {
if err := dbscan.ScanAll(&results, rows); err != nil {
// Handle rows processing error
}
// results variable now contains data from the row.
Expand All @@ -96,18 +97,18 @@ if all column values have the same specific type.
Scanning into other types
If the destination isn't a struct nor a map, dbscan handles it as single column scan,
it ensures that rows contain exactly one column and scans destination from the column, for example:
dbscan ensures that rows contain exactly one column and scans destination from that column, for example:
var result []string
if err := dbscan.ScanAll(&result, rows); err != nil {
var results []string
if err := dbscan.ScanAll(&results, rows); err != nil {
// Handle rows processing error
}
// result variable not contains data from the row single column.
// results variable not contains data from the row single column.
Rows processing
ScanAll and ScanOne functions take care of rows processing,
they iterate rows to the end and close them after that.
Client code doesn't need bother with all of that, it just needs to pass rows to dbscan.
Client code doesn't need to bother with that, it just passes rows to dbscan.
*/
package dbscan
2 changes: 1 addition & 1 deletion example_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ func ExampleRowScanner() {
}
}

func ExampleRowScan() {
func ExampleScanRow() {
type User struct {
ID string
Name string
Expand Down
24 changes: 6 additions & 18 deletions pgxscan/doc.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
/*
pgxscan is a wrapper around github.com/georgysavva/dbscan package.
It contains adapters and proxy functions that are meant to connect github.com/jackc/pgx/v4
with github.com/georgysavva/dbscan functionality. pgxscan mirrors all capabilities provided by dbscan.
with dbscan functionality. pgxscan mirrors all capabilities provided by dbscan.
See dbscan docs to get familiar with all details and features.
How to use
Expand All @@ -21,25 +21,13 @@ it's as simple as this:
// Use QueryAll to query multiple records.
var users []*User
if err := pgxscan.QueryAll(
ctx, &users, db, `SELECT user_id, name, email, age from users`,
); err != nil {
// Handle query or rows processing error.
}
pgxscan.QueryAll(ctx, &users, db, `SELECT user_id, name, email, age FROM users`)
// users variable now contains data from all rows.
// Use QueryOne to query exactly one record.
var user User
if err := pgxscan.QueryOne(
ctx, &user, db, `SELECT user_id, name, email, age from users where id='bob'`,
); err != nil {
// Handle query or rows processing error.
}
// users variable now contains data from all rows.
Pgx custom types
pgx custom types
pgx has a concept of custom types: https://pkg.go.dev/github.com/jackc/pgx/v4?tab=doc#hdr-Custom_Type_Support.
pgx has concept of custom types: https://pkg.go.dev/github.com/jackc/pgx/v4@v4.6.0?tab=doc#hdr-Custom_Type_Support,
You can use them with pgxscan too, here is an example of a struct with pgtype.Text field:
type User struct {
Expand All @@ -48,12 +36,12 @@ You can use them with pgxscan too, here is an example of a struct with pgtype.Te
Bio pgtype.Text
}
Note that you must use pgtype.Text by value, not by pointer. This will not work:
Note that you must specify pgtype.Text by value, not by a pointer. This will not work:
type User struct {
UserID string
Name string
Bio *pgtype.Text // pgxscan won't be able to scan data into field defined that way.
Bio *pgtype.Text // pgxscan won't be able to scan data into a field defined that way.
}
This happens because struct fields are always passed to pgx.Rows.Scan() as pointers,
Expand Down
16 changes: 8 additions & 8 deletions pgxscan/example_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ func ExampleQueryAll() {

var users []*User
if err := pgxscan.QueryAll(
ctx, &users, db, `SELECT user_id, name, email, age from users`,
ctx, &users, db, `SELECT user_id, name, email, age FROM users`,
); err != nil {
// Handle query or rows processing error.
}
Expand All @@ -37,11 +37,11 @@ func ExampleQueryOne() {

var user User
if err := pgxscan.QueryOne(
ctx, &user, db, `SELECT user_id, name, email, age from users where id='bob'`,
ctx, &user, db, `SELECT user_id, name, email, age FROM users WHERE id='bob'`,
); err != nil {
// Handle query or rows processing error.
}
// users variable now contains data from all rows.
// user variable now contains data from all rows.
}

func ExampleScanAll() {
Expand All @@ -54,7 +54,7 @@ func ExampleScanAll() {

// Query pgx.Rows from the database.
db, _ := pgxpool.Connect(ctx, "example-connection-url")
rows, _ := db.Query(ctx, `SELECT user_id, name, email, age from users`)
rows, _ := db.Query(ctx, `SELECT user_id, name, email, age FROM users`)

var users []*User
if err := pgxscan.ScanAll(&users, rows); err != nil {
Expand All @@ -73,7 +73,7 @@ func ExampleScanOne() {

// Query pgx.Rows from the database.
db, _ := pgxpool.Connect(ctx, "example-connection-url")
rows, _ := db.Query(ctx, `SELECT user_id, name, email, age from users where id='bob'`)
rows, _ := db.Query(ctx, `SELECT user_id, name, email, age FROM users WHERE id='bob'`)

var user User
if err := pgxscan.ScanOne(&user, rows); err != nil {
Expand All @@ -92,7 +92,7 @@ func ExampleRowScanner() {

// Query pgx.Rows from the database.
db, _ := pgxpool.Connect(ctx, "example-connection-url")
rows, _ := db.Query(ctx, `SELECT user_id, name, email, age from users`)
rows, _ := db.Query(ctx, `SELECT user_id, name, email, age FROM users`)

// Make sure rows are always closed.
defer rows.Close()
Expand All @@ -109,7 +109,7 @@ func ExampleRowScanner() {
}
}

func ExampleRowScan() {
func ExampleScanRow() {
type User struct {
ID string `db:"user_id"`
Name string
Expand All @@ -119,7 +119,7 @@ func ExampleRowScan() {

// Query pgx.Rows from the database.
db, _ := pgxpool.Connect(ctx, "example-connection-url")
rows, _ := db.Query(ctx, `SELECT user_id, name, email, age from users`)
rows, _ := db.Query(ctx, `SELECT user_id, name, email, age FROM users`)

// Make sure rows are always closed.
defer rows.Close()
Expand Down
4 changes: 2 additions & 2 deletions pgxscan/pgxscan.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ import (
"github.com/georgysavva/dbscan"
)

// QueryI is something that pgxscan can query and get the pgx.Rows.
// For example: *pgxpool.Pool, *pgx.Conn or pgx.Tx.
// QueryI is something that pgxscan can query and get the pgx.Rows from.
// For example, it can be: *pgxpool.Pool, *pgx.Conn or pgx.Tx.
type QueryI interface {
Query(ctx context.Context, query string, args ...interface{}) (pgx.Rows, error)
}
Expand Down
10 changes: 5 additions & 5 deletions rowscanner_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ func TestRowScanner_Scan_invalidStructDestination_returnsErr(t *testing.T) {
dst: &struct {
Bar string
}{},
expectedErr: "dbscan: column: 'foo': no corresponding field found or it's unexported in " +
expectedErr: "dbscan: column: 'foo': no corresponding field found, or it's unexported in " +
"struct { Bar string }",
},
{
Expand All @@ -270,7 +270,7 @@ func TestRowScanner_Scan_invalidStructDestination_returnsErr(t *testing.T) {
foo string
Bar string
}{},
expectedErr: "dbscan: column: 'foo': no corresponding field found or it's unexported in " +
expectedErr: "dbscan: column: 'foo': no corresponding field found, or it's unexported in " +
"struct { foo string; Bar string }",
},
{
Expand All @@ -284,7 +284,7 @@ func TestRowScanner_Scan_invalidStructDestination_returnsErr(t *testing.T) {
Foo string
Bar string
}{},
expectedErr: "dbscan: column: 'foo_nested': no corresponding field found or it's unexported in " +
expectedErr: "dbscan: column: 'foo_nested': no corresponding field found, or it's unexported in " +
"struct { dbscan_test.nestedUnexported; Foo string; Bar string }",
},
{
Expand All @@ -298,7 +298,7 @@ func TestRowScanner_Scan_invalidStructDestination_returnsErr(t *testing.T) {
Foo string
Bar string
}{},
expectedErr: "dbscan: column: 'foo_nested': no corresponding field found or it's unexported in " +
expectedErr: "dbscan: column: 'foo_nested': no corresponding field found, or it's unexported in " +
"struct { Nested dbscan_test.FooNested; Foo string; Bar string }",
},
{
Expand Down Expand Up @@ -551,7 +551,7 @@ func TestRowScanner_Scan_rowsContainDuplicatedColumn_returnsErr(t *testing.T) {
SELECT 'foo val' AS foo, 'foo val' AS foo
`
rows := queryRows(t, query)
expectedErr := "dbscan: rows contain duplicated column 'foo'"
expectedErr := "dbscan: rows contain a duplicated column 'foo'"
err := scan(t, tc.dst, rows)
assert.EqualError(t, err, expectedErr)
})
Expand Down
Loading

0 comments on commit fbd6b8c

Please sign in to comment.