diff --git a/Makefile b/Makefile index 7781380..c4890b6 100644 --- a/Makefile +++ b/Makefile @@ -43,12 +43,16 @@ test: ## run test go test -race -v ./test testdata: ## generate test models - $(MAKE) -j4 testdata/default testdata/customtypes testdata/single + $(MAKE) -j4 testdata/default testdata/underscore testdata/customtypes testdata/single testdata/default: rm -rf test/testmodels/default && mkdir -p test/testmodels/default $(YOBIN) $(SPANNER_PROJECT_NAME) $(SPANNER_INSTANCE_NAME) $(SPANNER_DATABASE_NAME) --package models --out test/testmodels/default/ +testdata/underscore: + rm -rf test/testmodels/underscore && mkdir -p test/testmodels/underscore + $(YOBIN) $(SPANNER_PROJECT_NAME) $(SPANNER_INSTANCE_NAME) $(SPANNER_DATABASE_NAME) --package models --underscore --out test/testmodels/underscore/ + testdata/single: rm -rf test/testmodels/single && mkdir -p test/testmodels/single $(YOBIN) $(SPANNER_PROJECT_NAME) $(SPANNER_INSTANCE_NAME) $(SPANNER_DATABASE_NAME) --out test/testmodels/single/single_file.go --single-file @@ -58,12 +62,16 @@ testdata/customtypes: $(YOBIN) $(SPANNER_PROJECT_NAME) $(SPANNER_INSTANCE_NAME) $(SPANNER_DATABASE_NAME) --custom-types-file test/testdata/custom_column_types.yml --out test/testmodels/customtypes/ testdata-from-ddl: - $(MAKE) -j4 testdata-from-ddl/default testdata-from-ddl/customtypes testdata-from-ddl/single + $(MAKE) -j4 testdata-from-ddl/default testdata-from-ddl/underscore testdata-from-ddl/customtypes testdata-from-ddl/single testdata-from-ddl/default: rm -rf test/testmodels/default && mkdir -p test/testmodels/default $(YOBIN) generate ./test/testdata/schema.sql --from-ddl --package models --out test/testmodels/default/ +testdata-from-ddl/underscore: + rm -rf test/testmodels/underscores && mkdir -p test/testmodels/underscores + $(YOBIN) generate ./test/testdata/schema.sql --from-ddl --package models --underscore --out test/testmodels/underscores/ + testdata-from-ddl/single: rm -rf test/testmodels/single && mkdir -p test/testmodels/single $(YOBIN) generate ./test/testdata/schema.sql --from-ddl --out test/testmodels/single/single_file.go --single-file diff --git a/README.md b/README.md index 3e64e55..beff357 100644 --- a/README.md +++ b/README.md @@ -60,6 +60,7 @@ Flags: --suffix string output file suffix (default ".yo.go") --tags string build tags to add to package header --template-path string user supplied template path + --underscore toggle underscores in file names ``` ## Generated code diff --git a/cmd/generate.go b/cmd/generate.go index 9192c43..e3634b9 100644 --- a/cmd/generate.go +++ b/cmd/generate.go @@ -90,14 +90,15 @@ var ( } g := generator.NewGenerator(loader, inflector, generator.GeneratorOption{ - PackageName: generateOpts.Package, - Tags: generateOpts.Tags, - TemplatePath: generateOpts.TemplatePath, - CustomTypePackage: generateOpts.CustomTypePackage, - FilenameSuffix: generateOpts.Suffix, - SingleFile: generateOpts.SingleFile, - Filename: generateOpts.Filename, - Path: generateOpts.Path, + PackageName: generateOpts.Package, + Tags: generateOpts.Tags, + TemplatePath: generateOpts.TemplatePath, + CustomTypePackage: generateOpts.CustomTypePackage, + FilenameSuffix: generateOpts.Suffix, + SingleFile: generateOpts.SingleFile, + Filename: generateOpts.Filename, + FilenameUnderscore: generateOpts.FilenameUnderscore, + Path: generateOpts.Path, }) if err := g.Generate(tableMap, ixMap); err != nil { return fmt.Errorf("error: %v", err) diff --git a/cmd/root.go b/cmd/root.go index cf1a8d2..b4394f0 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -89,14 +89,15 @@ var ( } g := generator.NewGenerator(loader, inflector, generator.GeneratorOption{ - PackageName: rootOpts.Package, - Tags: rootOpts.Tags, - TemplatePath: rootOpts.TemplatePath, - CustomTypePackage: rootOpts.CustomTypePackage, - FilenameSuffix: rootOpts.Suffix, - SingleFile: rootOpts.SingleFile, - Filename: rootOpts.Filename, - Path: rootOpts.Path, + PackageName: rootOpts.Package, + Tags: rootOpts.Tags, + TemplatePath: rootOpts.TemplatePath, + CustomTypePackage: rootOpts.CustomTypePackage, + FilenameSuffix: rootOpts.Suffix, + SingleFile: rootOpts.SingleFile, + Filename: rootOpts.Filename, + FilenameUnderscore: rootOpts.FilenameUnderscore, + Path: rootOpts.Path, }) if err := g.Generate(tableMap, ixMap); err != nil { return fmt.Errorf("error: %v", err) @@ -123,6 +124,7 @@ func setRootOpts(cmd *cobra.Command, opts *internal.ArgType) { cmd.Flags().StringVarP(&opts.Out, "out", "o", "", "output path or file name") cmd.Flags().StringVar(&opts.Suffix, "suffix", defaultSuffix, "output file suffix") cmd.Flags().BoolVar(&opts.SingleFile, "single-file", false, "toggle single file output") + cmd.Flags().BoolVar(&opts.FilenameUnderscore, "underscore", false, "toggle underscores in file names") cmd.Flags().StringVarP(&opts.Package, "package", "p", "", "package name used in generated Go code") cmd.Flags().StringVar(&opts.CustomTypePackage, "custom-type-package", "", "Go package name to use for custom or unknown types") cmd.Flags().StringArrayVar(&opts.TargetTables, "target-tables", nil, "tables to include from the generated Go code") diff --git a/generator/generator.go b/generator/generator.go index 11b0496..0d941d1 100644 --- a/generator/generator.go +++ b/generator/generator.go @@ -31,6 +31,7 @@ import ( "strings" "text/template" + "github.com/kenshaw/snaker" "golang.org/x/tools/imports" "go.mercari.io/yo/internal" @@ -45,14 +46,15 @@ type Loader interface { } type GeneratorOption struct { - PackageName string - Tags string - TemplatePath string - CustomTypePackage string - FilenameSuffix string - SingleFile bool - Filename string - Path string + PackageName string + Tags string + TemplatePath string + CustomTypePackage string + FilenameSuffix string + SingleFile bool + Filename string + FilenameUnderscore bool + Path string } func NewGenerator(loader Loader, inflector internal.Inflector, opt GeneratorOption) *Generator { @@ -67,6 +69,7 @@ func NewGenerator(loader Loader, inflector internal.Inflector, opt GeneratorOpti filenameSuffix: opt.FilenameSuffix, singleFile: opt.SingleFile, filename: opt.Filename, + filenameUnderscore: opt.FilenameUnderscore, path: opt.Path, files: make(map[string]*os.File), } @@ -83,13 +86,14 @@ type Generator struct { // generated is the generated templates after a run. generated []TBuf - packageName string - tags string - customTypePackage string - filenameSuffix string - singleFile bool - filename string - path string + packageName string + tags string + customTypePackage string + filenameSuffix string + singleFile bool + filename string + filenameUnderscore bool + path string nameConflictSuffix string } @@ -152,9 +156,14 @@ func (g *Generator) Generate(tableMap map[string]*internal.Type, ixMap map[strin // the os.OpenFile with the correct parameters depending on the state of args. func (g *Generator) getFile(ds *basicDataSet, t *TBuf) (*os.File, error) { // determine filename - var filename = strings.ToLower(t.Name) + g.filenameSuffix - if g.singleFile { + var filename string + switch { + case g.singleFile: filename = g.filename + case g.filenameUnderscore: + filename = snaker.CamelToSnake(t.Name) + g.filenameSuffix + default: + filename = strings.ToLower(t.Name) + g.filenameSuffix } filename = path.Join(g.path, filename) @@ -174,7 +183,7 @@ func (g *Generator) getFile(ds *basicDataSet, t *TBuf) (*os.File, error) { } // open file - f, err = os.OpenFile(filename, mode, 0666) + f, err = os.OpenFile(filename, mode, 0o666) if err != nil { return nil, err } diff --git a/internal/argtype.go b/internal/argtype.go index 28025e5..4442fcb 100644 --- a/internal/argtype.go +++ b/internal/argtype.go @@ -70,13 +70,14 @@ type ArgType struct { // Tags is the list of build tags to add to generated Go files. Tags string - Path string - Filename string + Path string + Filename string + FilenameUnderscore bool // DDLFilepath is the filepath of the ddl file. DDLFilepath string - // FromDDL indicates generating from ddl flie or not. + // FromDDL indicates generating from ddl file or not. FromDDL bool // InflectionRuleFile is custom inflection rule file. diff --git a/test/testmodels/underscore/composite_primary_key.yo.go b/test/testmodels/underscore/composite_primary_key.yo.go new file mode 100644 index 0000000..c6ed955 --- /dev/null +++ b/test/testmodels/underscore/composite_primary_key.yo.go @@ -0,0 +1,516 @@ +// Code generated by yo. DO NOT EDIT. +// Package models contains the types. +package models + +import ( + "context" + "fmt" + + "cloud.google.com/go/spanner" + "google.golang.org/api/iterator" + "google.golang.org/grpc/codes" +) + +// CompositePrimaryKey represents a row from 'CompositePrimaryKeys'. +type CompositePrimaryKey struct { + ID int64 `spanner:"Id" json:"Id"` // Id + PKey1 string `spanner:"PKey1" json:"PKey1"` // PKey1 + PKey2 int64 `spanner:"PKey2" json:"PKey2"` // PKey2 + Error int64 `spanner:"Error" json:"Error"` // Error + X string `spanner:"X" json:"X"` // X + Y string `spanner:"Y" json:"Y"` // Y + Z string `spanner:"Z" json:"Z"` // Z +} + +func CompositePrimaryKeyPrimaryKeys() []string { + return []string{ + "PKey1", + "PKey2", + } +} + +func CompositePrimaryKeyColumns() []string { + return []string{ + "Id", + "PKey1", + "PKey2", + "Error", + "X", + "Y", + "Z", + } +} + +func CompositePrimaryKeyWritableColumns() []string { + return []string{ + "Id", + "PKey1", + "PKey2", + "Error", + "X", + "Y", + "Z", + } +} + +func (cpk *CompositePrimaryKey) columnsToPtrs(cols []string, customPtrs map[string]interface{}) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + if val, ok := customPtrs[col]; ok { + ret = append(ret, val) + continue + } + + switch col { + case "Id": + ret = append(ret, &cpk.ID) + case "PKey1": + ret = append(ret, &cpk.PKey1) + case "PKey2": + ret = append(ret, &cpk.PKey2) + case "Error": + ret = append(ret, &cpk.Error) + case "X": + ret = append(ret, &cpk.X) + case "Y": + ret = append(ret, &cpk.Y) + case "Z": + ret = append(ret, &cpk.Z) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + return ret, nil +} + +func (cpk *CompositePrimaryKey) columnsToValues(cols []string) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + switch col { + case "Id": + ret = append(ret, cpk.ID) + case "PKey1": + ret = append(ret, cpk.PKey1) + case "PKey2": + ret = append(ret, cpk.PKey2) + case "Error": + ret = append(ret, cpk.Error) + case "X": + ret = append(ret, cpk.X) + case "Y": + ret = append(ret, cpk.Y) + case "Z": + ret = append(ret, cpk.Z) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + + return ret, nil +} + +// newCompositePrimaryKey_Decoder returns a decoder which reads a row from *spanner.Row +// into CompositePrimaryKey. The decoder is not goroutine-safe. Don't use it concurrently. +func newCompositePrimaryKey_Decoder(cols []string) func(*spanner.Row) (*CompositePrimaryKey, error) { + customPtrs := map[string]interface{}{} + + return func(row *spanner.Row) (*CompositePrimaryKey, error) { + var cpk CompositePrimaryKey + ptrs, err := cpk.columnsToPtrs(cols, customPtrs) + if err != nil { + return nil, err + } + + if err := row.Columns(ptrs...); err != nil { + return nil, err + } + + return &cpk, nil + } +} + +// Insert returns a Mutation to insert a row into a table. If the row already +// exists, the write or transaction fails. +func (cpk *CompositePrimaryKey) Insert(ctx context.Context) *spanner.Mutation { + values, _ := cpk.columnsToValues(CompositePrimaryKeyWritableColumns()) + return spanner.Insert("CompositePrimaryKeys", CompositePrimaryKeyWritableColumns(), values) +} + +// Update returns a Mutation to update a row in a table. If the row does not +// already exist, the write or transaction fails. +func (cpk *CompositePrimaryKey) Update(ctx context.Context) *spanner.Mutation { + values, _ := cpk.columnsToValues(CompositePrimaryKeyWritableColumns()) + return spanner.Update("CompositePrimaryKeys", CompositePrimaryKeyWritableColumns(), values) +} + +// InsertOrUpdate returns a Mutation to insert a row into a table. If the row +// already exists, it updates it instead. Any column values not explicitly +// written are preserved. +func (cpk *CompositePrimaryKey) InsertOrUpdate(ctx context.Context) *spanner.Mutation { + values, _ := cpk.columnsToValues(CompositePrimaryKeyWritableColumns()) + return spanner.InsertOrUpdate("CompositePrimaryKeys", CompositePrimaryKeyWritableColumns(), values) +} + +// UpdateColumns returns a Mutation to update specified columns of a row in a table. +func (cpk *CompositePrimaryKey) UpdateColumns(ctx context.Context, cols ...string) (*spanner.Mutation, error) { + // add primary keys to columns to update by primary keys + colsWithPKeys := append(cols, CompositePrimaryKeyPrimaryKeys()...) + + values, err := cpk.columnsToValues(colsWithPKeys) + if err != nil { + return nil, newErrorWithCode(codes.InvalidArgument, "CompositePrimaryKey.UpdateColumns", "CompositePrimaryKeys", err) + } + + return spanner.Update("CompositePrimaryKeys", colsWithPKeys, values), nil +} + +// FindCompositePrimaryKey gets a CompositePrimaryKey by primary key +func FindCompositePrimaryKey(ctx context.Context, db YORODB, pKey1 string, pKey2 int64) (*CompositePrimaryKey, error) { + key := spanner.Key{pKey1, pKey2} + row, err := db.ReadRow(ctx, "CompositePrimaryKeys", key, CompositePrimaryKeyColumns()) + if err != nil { + return nil, newError("FindCompositePrimaryKey", "CompositePrimaryKeys", err) + } + + decoder := newCompositePrimaryKey_Decoder(CompositePrimaryKeyColumns()) + cpk, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindCompositePrimaryKey", "CompositePrimaryKeys", err) + } + + return cpk, nil +} + +// ReadCompositePrimaryKey retrieves multiples rows from CompositePrimaryKey by KeySet as a slice. +func ReadCompositePrimaryKey(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*CompositePrimaryKey, error) { + var res []*CompositePrimaryKey + + decoder := newCompositePrimaryKey_Decoder(CompositePrimaryKeyColumns()) + + rows := db.Read(ctx, "CompositePrimaryKeys", keys, CompositePrimaryKeyColumns()) + err := rows.Do(func(row *spanner.Row) error { + cpk, err := decoder(row) + if err != nil { + return err + } + res = append(res, cpk) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadCompositePrimaryKey", "CompositePrimaryKeys", err) + } + + return res, nil +} + +// Delete deletes the CompositePrimaryKey from the database. +func (cpk *CompositePrimaryKey) Delete(ctx context.Context) *spanner.Mutation { + values, _ := cpk.columnsToValues(CompositePrimaryKeyPrimaryKeys()) + return spanner.Delete("CompositePrimaryKeys", spanner.Key(values)) +} + +// FindCompositePrimaryKeysByError retrieves multiple rows from 'CompositePrimaryKeys' as a slice of CompositePrimaryKey. +// +// Generated from index 'CompositePrimaryKeysByError'. +func FindCompositePrimaryKeysByError(ctx context.Context, db YORODB, e int64) ([]*CompositePrimaryKey, error) { + const sqlstr = "SELECT " + + "Id, PKey1, PKey2, Error, X, Y, Z " + + "FROM CompositePrimaryKeys@{FORCE_INDEX=CompositePrimaryKeysByError} " + + "WHERE Error = @param0" + + stmt := spanner.NewStatement(sqlstr) + stmt.Params["param0"] = e + + decoder := newCompositePrimaryKey_Decoder(CompositePrimaryKeyColumns()) + + // run query + YOLog(ctx, sqlstr, e) + iter := db.Query(ctx, stmt) + defer iter.Stop() + + // load results + res := []*CompositePrimaryKey{} + for { + row, err := iter.Next() + if err != nil { + if err == iterator.Done { + break + } + return nil, newError("FindCompositePrimaryKeysByError", "CompositePrimaryKeys", err) + } + + cpk, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindCompositePrimaryKeysByError", "CompositePrimaryKeys", err) + } + + res = append(res, cpk) + } + + return res, nil +} + +// ReadCompositePrimaryKeysByError retrieves multiples rows from 'CompositePrimaryKeys' by KeySet as a slice. +// +// This does not retrieve all columns of 'CompositePrimaryKeys' because an index has only columns +// used for primary key, index key and storing columns. If you need more columns, add storing +// columns or Read by primary key or Query with join. +// +// Generated from unique index 'CompositePrimaryKeysByError'. +func ReadCompositePrimaryKeysByError(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*CompositePrimaryKey, error) { + var res []*CompositePrimaryKey + columns := []string{ + "PKey1", + "PKey2", + "Error", + } + + decoder := newCompositePrimaryKey_Decoder(columns) + + rows := db.ReadUsingIndex(ctx, "CompositePrimaryKeys", "CompositePrimaryKeysByError", keys, columns) + err := rows.Do(func(row *spanner.Row) error { + cpk, err := decoder(row) + if err != nil { + return err + } + res = append(res, cpk) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadCompositePrimaryKeysByError", "CompositePrimaryKeys", err) + } + + return res, nil +} + +// FindCompositePrimaryKeysByZError retrieves multiple rows from 'CompositePrimaryKeys' as a slice of CompositePrimaryKey. +// +// Generated from index 'CompositePrimaryKeysByError2'. +func FindCompositePrimaryKeysByZError(ctx context.Context, db YORODB, e int64) ([]*CompositePrimaryKey, error) { + const sqlstr = "SELECT " + + "Id, PKey1, PKey2, Error, X, Y, Z " + + "FROM CompositePrimaryKeys@{FORCE_INDEX=CompositePrimaryKeysByError2} " + + "WHERE Error = @param0" + + stmt := spanner.NewStatement(sqlstr) + stmt.Params["param0"] = e + + decoder := newCompositePrimaryKey_Decoder(CompositePrimaryKeyColumns()) + + // run query + YOLog(ctx, sqlstr, e) + iter := db.Query(ctx, stmt) + defer iter.Stop() + + // load results + res := []*CompositePrimaryKey{} + for { + row, err := iter.Next() + if err != nil { + if err == iterator.Done { + break + } + return nil, newError("FindCompositePrimaryKeysByZError", "CompositePrimaryKeys", err) + } + + cpk, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindCompositePrimaryKeysByZError", "CompositePrimaryKeys", err) + } + + res = append(res, cpk) + } + + return res, nil +} + +// ReadCompositePrimaryKeysByZError retrieves multiples rows from 'CompositePrimaryKeys' by KeySet as a slice. +// +// This does not retrieve all columns of 'CompositePrimaryKeys' because an index has only columns +// used for primary key, index key and storing columns. If you need more columns, add storing +// columns or Read by primary key or Query with join. +// +// Generated from unique index 'CompositePrimaryKeysByError2'. +func ReadCompositePrimaryKeysByZError(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*CompositePrimaryKey, error) { + var res []*CompositePrimaryKey + columns := []string{ + "PKey1", + "PKey2", + "Error", + "Z", + } + + decoder := newCompositePrimaryKey_Decoder(columns) + + rows := db.ReadUsingIndex(ctx, "CompositePrimaryKeys", "CompositePrimaryKeysByError2", keys, columns) + err := rows.Do(func(row *spanner.Row) error { + cpk, err := decoder(row) + if err != nil { + return err + } + res = append(res, cpk) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadCompositePrimaryKeysByZError", "CompositePrimaryKeys", err) + } + + return res, nil +} + +// FindCompositePrimaryKeysByZYError retrieves multiple rows from 'CompositePrimaryKeys' as a slice of CompositePrimaryKey. +// +// Generated from index 'CompositePrimaryKeysByError3'. +func FindCompositePrimaryKeysByZYError(ctx context.Context, db YORODB, e int64) ([]*CompositePrimaryKey, error) { + const sqlstr = "SELECT " + + "Id, PKey1, PKey2, Error, X, Y, Z " + + "FROM CompositePrimaryKeys@{FORCE_INDEX=CompositePrimaryKeysByError3} " + + "WHERE Error = @param0" + + stmt := spanner.NewStatement(sqlstr) + stmt.Params["param0"] = e + + decoder := newCompositePrimaryKey_Decoder(CompositePrimaryKeyColumns()) + + // run query + YOLog(ctx, sqlstr, e) + iter := db.Query(ctx, stmt) + defer iter.Stop() + + // load results + res := []*CompositePrimaryKey{} + for { + row, err := iter.Next() + if err != nil { + if err == iterator.Done { + break + } + return nil, newError("FindCompositePrimaryKeysByZYError", "CompositePrimaryKeys", err) + } + + cpk, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindCompositePrimaryKeysByZYError", "CompositePrimaryKeys", err) + } + + res = append(res, cpk) + } + + return res, nil +} + +// ReadCompositePrimaryKeysByZYError retrieves multiples rows from 'CompositePrimaryKeys' by KeySet as a slice. +// +// This does not retrieve all columns of 'CompositePrimaryKeys' because an index has only columns +// used for primary key, index key and storing columns. If you need more columns, add storing +// columns or Read by primary key or Query with join. +// +// Generated from unique index 'CompositePrimaryKeysByError3'. +func ReadCompositePrimaryKeysByZYError(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*CompositePrimaryKey, error) { + var res []*CompositePrimaryKey + columns := []string{ + "PKey1", + "PKey2", + "Error", + "Z", + "Y", + } + + decoder := newCompositePrimaryKey_Decoder(columns) + + rows := db.ReadUsingIndex(ctx, "CompositePrimaryKeys", "CompositePrimaryKeysByError3", keys, columns) + err := rows.Do(func(row *spanner.Row) error { + cpk, err := decoder(row) + if err != nil { + return err + } + res = append(res, cpk) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadCompositePrimaryKeysByZYError", "CompositePrimaryKeys", err) + } + + return res, nil +} + +// FindCompositePrimaryKeysByXY retrieves multiple rows from 'CompositePrimaryKeys' as a slice of CompositePrimaryKey. +// +// Generated from index 'CompositePrimaryKeysByXY'. +func FindCompositePrimaryKeysByXY(ctx context.Context, db YORODB, x string, y string) ([]*CompositePrimaryKey, error) { + const sqlstr = "SELECT " + + "Id, PKey1, PKey2, Error, X, Y, Z " + + "FROM CompositePrimaryKeys@{FORCE_INDEX=CompositePrimaryKeysByXY} " + + "WHERE X = @param0 AND Y = @param1" + + stmt := spanner.NewStatement(sqlstr) + stmt.Params["param0"] = x + stmt.Params["param1"] = y + + decoder := newCompositePrimaryKey_Decoder(CompositePrimaryKeyColumns()) + + // run query + YOLog(ctx, sqlstr, x, y) + iter := db.Query(ctx, stmt) + defer iter.Stop() + + // load results + res := []*CompositePrimaryKey{} + for { + row, err := iter.Next() + if err != nil { + if err == iterator.Done { + break + } + return nil, newError("FindCompositePrimaryKeysByXY", "CompositePrimaryKeys", err) + } + + cpk, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindCompositePrimaryKeysByXY", "CompositePrimaryKeys", err) + } + + res = append(res, cpk) + } + + return res, nil +} + +// ReadCompositePrimaryKeysByXY retrieves multiples rows from 'CompositePrimaryKeys' by KeySet as a slice. +// +// This does not retrieve all columns of 'CompositePrimaryKeys' because an index has only columns +// used for primary key, index key and storing columns. If you need more columns, add storing +// columns or Read by primary key or Query with join. +// +// Generated from unique index 'CompositePrimaryKeysByXY'. +func ReadCompositePrimaryKeysByXY(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*CompositePrimaryKey, error) { + var res []*CompositePrimaryKey + columns := []string{ + "PKey1", + "PKey2", + "X", + "Y", + } + + decoder := newCompositePrimaryKey_Decoder(columns) + + rows := db.ReadUsingIndex(ctx, "CompositePrimaryKeys", "CompositePrimaryKeysByXY", keys, columns) + err := rows.Do(func(row *spanner.Row) error { + cpk, err := decoder(row) + if err != nil { + return err + } + res = append(res, cpk) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadCompositePrimaryKeysByXY", "CompositePrimaryKeys", err) + } + + return res, nil +} diff --git a/test/testmodels/underscore/fereign_item.yo.go b/test/testmodels/underscore/fereign_item.yo.go new file mode 100644 index 0000000..0d8914a --- /dev/null +++ b/test/testmodels/underscore/fereign_item.yo.go @@ -0,0 +1,181 @@ +// Code generated by yo. DO NOT EDIT. +// Package models contains the types. +package models + +import ( + "context" + "fmt" + + "cloud.google.com/go/spanner" + "google.golang.org/grpc/codes" +) + +// FereignItem represents a row from 'FereignItems'. +type FereignItem struct { + ID int64 `spanner:"ID" json:"ID"` // ID + ItemID int64 `spanner:"ItemID" json:"ItemID"` // ItemID + Category int64 `spanner:"Category" json:"Category"` // Category +} + +func FereignItemPrimaryKeys() []string { + return []string{ + "ID", + } +} + +func FereignItemColumns() []string { + return []string{ + "ID", + "ItemID", + "Category", + } +} + +func FereignItemWritableColumns() []string { + return []string{ + "ID", + "ItemID", + "Category", + } +} + +func (fi *FereignItem) columnsToPtrs(cols []string, customPtrs map[string]interface{}) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + if val, ok := customPtrs[col]; ok { + ret = append(ret, val) + continue + } + + switch col { + case "ID": + ret = append(ret, &fi.ID) + case "ItemID": + ret = append(ret, &fi.ItemID) + case "Category": + ret = append(ret, &fi.Category) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + return ret, nil +} + +func (fi *FereignItem) columnsToValues(cols []string) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + switch col { + case "ID": + ret = append(ret, fi.ID) + case "ItemID": + ret = append(ret, fi.ItemID) + case "Category": + ret = append(ret, fi.Category) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + + return ret, nil +} + +// newFereignItem_Decoder returns a decoder which reads a row from *spanner.Row +// into FereignItem. The decoder is not goroutine-safe. Don't use it concurrently. +func newFereignItem_Decoder(cols []string) func(*spanner.Row) (*FereignItem, error) { + customPtrs := map[string]interface{}{} + + return func(row *spanner.Row) (*FereignItem, error) { + var fi FereignItem + ptrs, err := fi.columnsToPtrs(cols, customPtrs) + if err != nil { + return nil, err + } + + if err := row.Columns(ptrs...); err != nil { + return nil, err + } + + return &fi, nil + } +} + +// Insert returns a Mutation to insert a row into a table. If the row already +// exists, the write or transaction fails. +func (fi *FereignItem) Insert(ctx context.Context) *spanner.Mutation { + values, _ := fi.columnsToValues(FereignItemWritableColumns()) + return spanner.Insert("FereignItems", FereignItemWritableColumns(), values) +} + +// Update returns a Mutation to update a row in a table. If the row does not +// already exist, the write or transaction fails. +func (fi *FereignItem) Update(ctx context.Context) *spanner.Mutation { + values, _ := fi.columnsToValues(FereignItemWritableColumns()) + return spanner.Update("FereignItems", FereignItemWritableColumns(), values) +} + +// InsertOrUpdate returns a Mutation to insert a row into a table. If the row +// already exists, it updates it instead. Any column values not explicitly +// written are preserved. +func (fi *FereignItem) InsertOrUpdate(ctx context.Context) *spanner.Mutation { + values, _ := fi.columnsToValues(FereignItemWritableColumns()) + return spanner.InsertOrUpdate("FereignItems", FereignItemWritableColumns(), values) +} + +// UpdateColumns returns a Mutation to update specified columns of a row in a table. +func (fi *FereignItem) UpdateColumns(ctx context.Context, cols ...string) (*spanner.Mutation, error) { + // add primary keys to columns to update by primary keys + colsWithPKeys := append(cols, FereignItemPrimaryKeys()...) + + values, err := fi.columnsToValues(colsWithPKeys) + if err != nil { + return nil, newErrorWithCode(codes.InvalidArgument, "FereignItem.UpdateColumns", "FereignItems", err) + } + + return spanner.Update("FereignItems", colsWithPKeys, values), nil +} + +// FindFereignItem gets a FereignItem by primary key +func FindFereignItem(ctx context.Context, db YORODB, id int64) (*FereignItem, error) { + key := spanner.Key{id} + row, err := db.ReadRow(ctx, "FereignItems", key, FereignItemColumns()) + if err != nil { + return nil, newError("FindFereignItem", "FereignItems", err) + } + + decoder := newFereignItem_Decoder(FereignItemColumns()) + fi, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindFereignItem", "FereignItems", err) + } + + return fi, nil +} + +// ReadFereignItem retrieves multiples rows from FereignItem by KeySet as a slice. +func ReadFereignItem(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*FereignItem, error) { + var res []*FereignItem + + decoder := newFereignItem_Decoder(FereignItemColumns()) + + rows := db.Read(ctx, "FereignItems", keys, FereignItemColumns()) + err := rows.Do(func(row *spanner.Row) error { + fi, err := decoder(row) + if err != nil { + return err + } + res = append(res, fi) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadFereignItem", "FereignItems", err) + } + + return res, nil +} + +// Delete deletes the FereignItem from the database. +func (fi *FereignItem) Delete(ctx context.Context) *spanner.Mutation { + values, _ := fi.columnsToValues(FereignItemPrimaryKeys()) + return spanner.Delete("FereignItems", spanner.Key(values)) +} diff --git a/test/testmodels/underscore/full_type.yo.go b/test/testmodels/underscore/full_type.yo.go new file mode 100644 index 0000000..1e3246b --- /dev/null +++ b/test/testmodels/underscore/full_type.yo.go @@ -0,0 +1,776 @@ +// Code generated by yo. DO NOT EDIT. +// Package models contains the types. +package models + +import ( + "context" + "fmt" + "strings" + "time" + + "cloud.google.com/go/civil" + "cloud.google.com/go/spanner" + "google.golang.org/api/iterator" + "google.golang.org/grpc/codes" +) + +// FullType represents a row from 'FullTypes'. +type FullType struct { + PKey string `spanner:"PKey" json:"PKey"` // PKey + FTString string `spanner:"FTString" json:"FTString"` // FTString + FTStringNull spanner.NullString `spanner:"FTStringNull" json:"FTStringNull"` // FTStringNull + FTBool bool `spanner:"FTBool" json:"FTBool"` // FTBool + FTBoolNull spanner.NullBool `spanner:"FTBoolNull" json:"FTBoolNull"` // FTBoolNull + FTBytes []byte `spanner:"FTBytes" json:"FTBytes"` // FTBytes + FTBytesNull []byte `spanner:"FTBytesNull" json:"FTBytesNull"` // FTBytesNull + FTTimestamp time.Time `spanner:"FTTimestamp" json:"FTTimestamp"` // FTTimestamp + FTTimestampNull spanner.NullTime `spanner:"FTTimestampNull" json:"FTTimestampNull"` // FTTimestampNull + FTInt int64 `spanner:"FTInt" json:"FTInt"` // FTInt + FTIntNull spanner.NullInt64 `spanner:"FTIntNull" json:"FTIntNull"` // FTIntNull + FTFloat float64 `spanner:"FTFloat" json:"FTFloat"` // FTFloat + FTFloatNull spanner.NullFloat64 `spanner:"FTFloatNull" json:"FTFloatNull"` // FTFloatNull + FTDate civil.Date `spanner:"FTDate" json:"FTDate"` // FTDate + FTDateNull spanner.NullDate `spanner:"FTDateNull" json:"FTDateNull"` // FTDateNull + FTJSON spanner.NullJSON `spanner:"FTJson" json:"FTJson"` // FTJson + FTJSONNull spanner.NullJSON `spanner:"FTJsonNull" json:"FTJsonNull"` // FTJsonNull + FTArrayStringNull []string `spanner:"FTArrayStringNull" json:"FTArrayStringNull"` // FTArrayStringNull + FTArrayString []string `spanner:"FTArrayString" json:"FTArrayString"` // FTArrayString + FTArrayBoolNull []bool `spanner:"FTArrayBoolNull" json:"FTArrayBoolNull"` // FTArrayBoolNull + FTArrayBool []bool `spanner:"FTArrayBool" json:"FTArrayBool"` // FTArrayBool + FTArrayBytesNull [][]byte `spanner:"FTArrayBytesNull" json:"FTArrayBytesNull"` // FTArrayBytesNull + FTArrayBytes [][]byte `spanner:"FTArrayBytes" json:"FTArrayBytes"` // FTArrayBytes + FTArrayTimestampNull []time.Time `spanner:"FTArrayTimestampNull" json:"FTArrayTimestampNull"` // FTArrayTimestampNull + FTArrayTimestamp []time.Time `spanner:"FTArrayTimestamp" json:"FTArrayTimestamp"` // FTArrayTimestamp + FTArrayIntNull []int64 `spanner:"FTArrayIntNull" json:"FTArrayIntNull"` // FTArrayIntNull + FTArrayInt []int64 `spanner:"FTArrayInt" json:"FTArrayInt"` // FTArrayInt + FTArrayFloatNull []float64 `spanner:"FTArrayFloatNull" json:"FTArrayFloatNull"` // FTArrayFloatNull + FTArrayFloat []float64 `spanner:"FTArrayFloat" json:"FTArrayFloat"` // FTArrayFloat + FTArrayDateNull []civil.Date `spanner:"FTArrayDateNull" json:"FTArrayDateNull"` // FTArrayDateNull + FTArrayDate []civil.Date `spanner:"FTArrayDate" json:"FTArrayDate"` // FTArrayDate + FTArrayJSONNull []spanner.NullJSON `spanner:"FTArrayJsonNull" json:"FTArrayJsonNull"` // FTArrayJsonNull + FTArrayJSON []spanner.NullJSON `spanner:"FTArrayJson" json:"FTArrayJson"` // FTArrayJson +} + +func FullTypePrimaryKeys() []string { + return []string{ + "PKey", + } +} + +func FullTypeColumns() []string { + return []string{ + "PKey", + "FTString", + "FTStringNull", + "FTBool", + "FTBoolNull", + "FTBytes", + "FTBytesNull", + "FTTimestamp", + "FTTimestampNull", + "FTInt", + "FTIntNull", + "FTFloat", + "FTFloatNull", + "FTDate", + "FTDateNull", + "FTJson", + "FTJsonNull", + "FTArrayStringNull", + "FTArrayString", + "FTArrayBoolNull", + "FTArrayBool", + "FTArrayBytesNull", + "FTArrayBytes", + "FTArrayTimestampNull", + "FTArrayTimestamp", + "FTArrayIntNull", + "FTArrayInt", + "FTArrayFloatNull", + "FTArrayFloat", + "FTArrayDateNull", + "FTArrayDate", + "FTArrayJsonNull", + "FTArrayJson", + } +} + +func FullTypeWritableColumns() []string { + return []string{ + "PKey", + "FTString", + "FTStringNull", + "FTBool", + "FTBoolNull", + "FTBytes", + "FTBytesNull", + "FTTimestamp", + "FTTimestampNull", + "FTInt", + "FTIntNull", + "FTFloat", + "FTFloatNull", + "FTDate", + "FTDateNull", + "FTJson", + "FTJsonNull", + "FTArrayStringNull", + "FTArrayString", + "FTArrayBoolNull", + "FTArrayBool", + "FTArrayBytesNull", + "FTArrayBytes", + "FTArrayTimestampNull", + "FTArrayTimestamp", + "FTArrayIntNull", + "FTArrayInt", + "FTArrayFloatNull", + "FTArrayFloat", + "FTArrayDateNull", + "FTArrayDate", + "FTArrayJsonNull", + "FTArrayJson", + } +} + +func (ft *FullType) columnsToPtrs(cols []string, customPtrs map[string]interface{}) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + if val, ok := customPtrs[col]; ok { + ret = append(ret, val) + continue + } + + switch col { + case "PKey": + ret = append(ret, &ft.PKey) + case "FTString": + ret = append(ret, &ft.FTString) + case "FTStringNull": + ret = append(ret, &ft.FTStringNull) + case "FTBool": + ret = append(ret, &ft.FTBool) + case "FTBoolNull": + ret = append(ret, &ft.FTBoolNull) + case "FTBytes": + ret = append(ret, &ft.FTBytes) + case "FTBytesNull": + ret = append(ret, &ft.FTBytesNull) + case "FTTimestamp": + ret = append(ret, &ft.FTTimestamp) + case "FTTimestampNull": + ret = append(ret, &ft.FTTimestampNull) + case "FTInt": + ret = append(ret, &ft.FTInt) + case "FTIntNull": + ret = append(ret, &ft.FTIntNull) + case "FTFloat": + ret = append(ret, &ft.FTFloat) + case "FTFloatNull": + ret = append(ret, &ft.FTFloatNull) + case "FTDate": + ret = append(ret, &ft.FTDate) + case "FTDateNull": + ret = append(ret, &ft.FTDateNull) + case "FTJson": + ret = append(ret, &ft.FTJSON) + case "FTJsonNull": + ret = append(ret, &ft.FTJSONNull) + case "FTArrayStringNull": + ret = append(ret, &ft.FTArrayStringNull) + case "FTArrayString": + ret = append(ret, &ft.FTArrayString) + case "FTArrayBoolNull": + ret = append(ret, &ft.FTArrayBoolNull) + case "FTArrayBool": + ret = append(ret, &ft.FTArrayBool) + case "FTArrayBytesNull": + ret = append(ret, &ft.FTArrayBytesNull) + case "FTArrayBytes": + ret = append(ret, &ft.FTArrayBytes) + case "FTArrayTimestampNull": + ret = append(ret, &ft.FTArrayTimestampNull) + case "FTArrayTimestamp": + ret = append(ret, &ft.FTArrayTimestamp) + case "FTArrayIntNull": + ret = append(ret, &ft.FTArrayIntNull) + case "FTArrayInt": + ret = append(ret, &ft.FTArrayInt) + case "FTArrayFloatNull": + ret = append(ret, &ft.FTArrayFloatNull) + case "FTArrayFloat": + ret = append(ret, &ft.FTArrayFloat) + case "FTArrayDateNull": + ret = append(ret, &ft.FTArrayDateNull) + case "FTArrayDate": + ret = append(ret, &ft.FTArrayDate) + case "FTArrayJsonNull": + ret = append(ret, &ft.FTArrayJSONNull) + case "FTArrayJson": + ret = append(ret, &ft.FTArrayJSON) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + return ret, nil +} + +func (ft *FullType) columnsToValues(cols []string) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + switch col { + case "PKey": + ret = append(ret, ft.PKey) + case "FTString": + ret = append(ret, ft.FTString) + case "FTStringNull": + ret = append(ret, ft.FTStringNull) + case "FTBool": + ret = append(ret, ft.FTBool) + case "FTBoolNull": + ret = append(ret, ft.FTBoolNull) + case "FTBytes": + ret = append(ret, ft.FTBytes) + case "FTBytesNull": + ret = append(ret, ft.FTBytesNull) + case "FTTimestamp": + ret = append(ret, ft.FTTimestamp) + case "FTTimestampNull": + ret = append(ret, ft.FTTimestampNull) + case "FTInt": + ret = append(ret, ft.FTInt) + case "FTIntNull": + ret = append(ret, ft.FTIntNull) + case "FTFloat": + ret = append(ret, ft.FTFloat) + case "FTFloatNull": + ret = append(ret, ft.FTFloatNull) + case "FTDate": + ret = append(ret, ft.FTDate) + case "FTDateNull": + ret = append(ret, ft.FTDateNull) + case "FTJson": + ret = append(ret, ft.FTJSON) + case "FTJsonNull": + ret = append(ret, ft.FTJSONNull) + case "FTArrayStringNull": + ret = append(ret, ft.FTArrayStringNull) + case "FTArrayString": + ret = append(ret, ft.FTArrayString) + case "FTArrayBoolNull": + ret = append(ret, ft.FTArrayBoolNull) + case "FTArrayBool": + ret = append(ret, ft.FTArrayBool) + case "FTArrayBytesNull": + ret = append(ret, ft.FTArrayBytesNull) + case "FTArrayBytes": + ret = append(ret, ft.FTArrayBytes) + case "FTArrayTimestampNull": + ret = append(ret, ft.FTArrayTimestampNull) + case "FTArrayTimestamp": + ret = append(ret, ft.FTArrayTimestamp) + case "FTArrayIntNull": + ret = append(ret, ft.FTArrayIntNull) + case "FTArrayInt": + ret = append(ret, ft.FTArrayInt) + case "FTArrayFloatNull": + ret = append(ret, ft.FTArrayFloatNull) + case "FTArrayFloat": + ret = append(ret, ft.FTArrayFloat) + case "FTArrayDateNull": + ret = append(ret, ft.FTArrayDateNull) + case "FTArrayDate": + ret = append(ret, ft.FTArrayDate) + case "FTArrayJsonNull": + ret = append(ret, ft.FTArrayJSONNull) + case "FTArrayJson": + ret = append(ret, ft.FTArrayJSON) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + + return ret, nil +} + +// newFullType_Decoder returns a decoder which reads a row from *spanner.Row +// into FullType. The decoder is not goroutine-safe. Don't use it concurrently. +func newFullType_Decoder(cols []string) func(*spanner.Row) (*FullType, error) { + customPtrs := map[string]interface{}{} + + return func(row *spanner.Row) (*FullType, error) { + var ft FullType + ptrs, err := ft.columnsToPtrs(cols, customPtrs) + if err != nil { + return nil, err + } + + if err := row.Columns(ptrs...); err != nil { + return nil, err + } + + return &ft, nil + } +} + +// Insert returns a Mutation to insert a row into a table. If the row already +// exists, the write or transaction fails. +func (ft *FullType) Insert(ctx context.Context) *spanner.Mutation { + values, _ := ft.columnsToValues(FullTypeWritableColumns()) + return spanner.Insert("FullTypes", FullTypeWritableColumns(), values) +} + +// Update returns a Mutation to update a row in a table. If the row does not +// already exist, the write or transaction fails. +func (ft *FullType) Update(ctx context.Context) *spanner.Mutation { + values, _ := ft.columnsToValues(FullTypeWritableColumns()) + return spanner.Update("FullTypes", FullTypeWritableColumns(), values) +} + +// InsertOrUpdate returns a Mutation to insert a row into a table. If the row +// already exists, it updates it instead. Any column values not explicitly +// written are preserved. +func (ft *FullType) InsertOrUpdate(ctx context.Context) *spanner.Mutation { + values, _ := ft.columnsToValues(FullTypeWritableColumns()) + return spanner.InsertOrUpdate("FullTypes", FullTypeWritableColumns(), values) +} + +// UpdateColumns returns a Mutation to update specified columns of a row in a table. +func (ft *FullType) UpdateColumns(ctx context.Context, cols ...string) (*spanner.Mutation, error) { + // add primary keys to columns to update by primary keys + colsWithPKeys := append(cols, FullTypePrimaryKeys()...) + + values, err := ft.columnsToValues(colsWithPKeys) + if err != nil { + return nil, newErrorWithCode(codes.InvalidArgument, "FullType.UpdateColumns", "FullTypes", err) + } + + return spanner.Update("FullTypes", colsWithPKeys, values), nil +} + +// FindFullType gets a FullType by primary key +func FindFullType(ctx context.Context, db YORODB, pKey string) (*FullType, error) { + key := spanner.Key{pKey} + row, err := db.ReadRow(ctx, "FullTypes", key, FullTypeColumns()) + if err != nil { + return nil, newError("FindFullType", "FullTypes", err) + } + + decoder := newFullType_Decoder(FullTypeColumns()) + ft, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindFullType", "FullTypes", err) + } + + return ft, nil +} + +// ReadFullType retrieves multiples rows from FullType by KeySet as a slice. +func ReadFullType(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*FullType, error) { + var res []*FullType + + decoder := newFullType_Decoder(FullTypeColumns()) + + rows := db.Read(ctx, "FullTypes", keys, FullTypeColumns()) + err := rows.Do(func(row *spanner.Row) error { + ft, err := decoder(row) + if err != nil { + return err + } + res = append(res, ft) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadFullType", "FullTypes", err) + } + + return res, nil +} + +// Delete deletes the FullType from the database. +func (ft *FullType) Delete(ctx context.Context) *spanner.Mutation { + values, _ := ft.columnsToValues(FullTypePrimaryKeys()) + return spanner.Delete("FullTypes", spanner.Key(values)) +} + +// FindFullTypeByFTString retrieves a row from 'FullTypes' as a FullType. +// +// If no row is present with the given key, then ReadRow returns an error where +// spanner.ErrCode(err) is codes.NotFound. +// +// Generated from unique index 'FullTypesByFTString'. +func FindFullTypeByFTString(ctx context.Context, db YORODB, fTString string) (*FullType, error) { + const sqlstr = "SELECT " + + "PKey, FTString, FTStringNull, FTBool, FTBoolNull, FTBytes, FTBytesNull, FTTimestamp, FTTimestampNull, FTInt, FTIntNull, FTFloat, FTFloatNull, FTDate, FTDateNull, FTJson, FTJsonNull, FTArrayStringNull, FTArrayString, FTArrayBoolNull, FTArrayBool, FTArrayBytesNull, FTArrayBytes, FTArrayTimestampNull, FTArrayTimestamp, FTArrayIntNull, FTArrayInt, FTArrayFloatNull, FTArrayFloat, FTArrayDateNull, FTArrayDate, FTArrayJsonNull, FTArrayJson " + + "FROM FullTypes@{FORCE_INDEX=FullTypesByFTString} " + + "WHERE FTString = @param0" + + stmt := spanner.NewStatement(sqlstr) + stmt.Params["param0"] = fTString + + decoder := newFullType_Decoder(FullTypeColumns()) + + // run query + YOLog(ctx, sqlstr, fTString) + iter := db.Query(ctx, stmt) + defer iter.Stop() + + row, err := iter.Next() + if err != nil { + if err == iterator.Done { + return nil, newErrorWithCode(codes.NotFound, "FindFullTypeByFTString", "FullTypes", err) + } + return nil, newError("FindFullTypeByFTString", "FullTypes", err) + } + + ft, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindFullTypeByFTString", "FullTypes", err) + } + + return ft, nil +} + +// ReadFullTypeByFTString retrieves multiples rows from 'FullTypes' by KeySet as a slice. +// +// This does not retrieve all columns of 'FullTypes' because an index has only columns +// used for primary key, index key and storing columns. If you need more columns, add storing +// columns or Read by primary key or Query with join. +// +// Generated from unique index 'FullTypesByFTString'. +func ReadFullTypeByFTString(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*FullType, error) { + var res []*FullType + columns := []string{ + "PKey", + "FTString", + } + + decoder := newFullType_Decoder(columns) + + rows := db.ReadUsingIndex(ctx, "FullTypes", "FullTypesByFTString", keys, columns) + err := rows.Do(func(row *spanner.Row) error { + ft, err := decoder(row) + if err != nil { + return err + } + res = append(res, ft) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadFullTypeByFTString", "FullTypes", err) + } + + return res, nil +} + +// FindFullTypesByFTIntFTTimestampNull retrieves multiple rows from 'FullTypes' as a slice of FullType. +// +// Generated from index 'FullTypesByInTimestampNull'. +func FindFullTypesByFTIntFTTimestampNull(ctx context.Context, db YORODB, fTInt int64, fTTimestampNull spanner.NullTime) ([]*FullType, error) { + var sqlstr = "SELECT " + + "PKey, FTString, FTStringNull, FTBool, FTBoolNull, FTBytes, FTBytesNull, FTTimestamp, FTTimestampNull, FTInt, FTIntNull, FTFloat, FTFloatNull, FTDate, FTDateNull, FTJson, FTJsonNull, FTArrayStringNull, FTArrayString, FTArrayBoolNull, FTArrayBool, FTArrayBytesNull, FTArrayBytes, FTArrayTimestampNull, FTArrayTimestamp, FTArrayIntNull, FTArrayInt, FTArrayFloatNull, FTArrayFloat, FTArrayDateNull, FTArrayDate, FTArrayJsonNull, FTArrayJson " + + "FROM FullTypes@{FORCE_INDEX=FullTypesByInTimestampNull} " + + conds := make([]string, 2) + conds[0] = "FTInt = @param0" + if fTTimestampNull.IsNull() { + conds[1] = "FTTimestampNull IS NULL" + } else { + conds[1] = "FTTimestampNull = @param1" + } + sqlstr += "WHERE " + strings.Join(conds, " AND ") + + stmt := spanner.NewStatement(sqlstr) + stmt.Params["param0"] = fTInt + stmt.Params["param1"] = fTTimestampNull + + decoder := newFullType_Decoder(FullTypeColumns()) + + // run query + YOLog(ctx, sqlstr, fTInt, fTTimestampNull) + iter := db.Query(ctx, stmt) + defer iter.Stop() + + // load results + res := []*FullType{} + for { + row, err := iter.Next() + if err != nil { + if err == iterator.Done { + break + } + return nil, newError("FindFullTypesByFTIntFTTimestampNull", "FullTypes", err) + } + + ft, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindFullTypesByFTIntFTTimestampNull", "FullTypes", err) + } + + res = append(res, ft) + } + + return res, nil +} + +// ReadFullTypesByFTIntFTTimestampNull retrieves multiples rows from 'FullTypes' by KeySet as a slice. +// +// This does not retrieve all columns of 'FullTypes' because an index has only columns +// used for primary key, index key and storing columns. If you need more columns, add storing +// columns or Read by primary key or Query with join. +// +// Generated from unique index 'FullTypesByInTimestampNull'. +func ReadFullTypesByFTIntFTTimestampNull(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*FullType, error) { + var res []*FullType + columns := []string{ + "PKey", + "FTInt", + "FTTimestampNull", + } + + decoder := newFullType_Decoder(columns) + + rows := db.ReadUsingIndex(ctx, "FullTypes", "FullTypesByInTimestampNull", keys, columns) + err := rows.Do(func(row *spanner.Row) error { + ft, err := decoder(row) + if err != nil { + return err + } + res = append(res, ft) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadFullTypesByFTIntFTTimestampNull", "FullTypes", err) + } + + return res, nil +} + +// FindFullTypesByFTIntFTDate retrieves multiple rows from 'FullTypes' as a slice of FullType. +// +// Generated from index 'FullTypesByIntDate'. +func FindFullTypesByFTIntFTDate(ctx context.Context, db YORODB, fTInt int64, fTDate civil.Date) ([]*FullType, error) { + const sqlstr = "SELECT " + + "PKey, FTString, FTStringNull, FTBool, FTBoolNull, FTBytes, FTBytesNull, FTTimestamp, FTTimestampNull, FTInt, FTIntNull, FTFloat, FTFloatNull, FTDate, FTDateNull, FTJson, FTJsonNull, FTArrayStringNull, FTArrayString, FTArrayBoolNull, FTArrayBool, FTArrayBytesNull, FTArrayBytes, FTArrayTimestampNull, FTArrayTimestamp, FTArrayIntNull, FTArrayInt, FTArrayFloatNull, FTArrayFloat, FTArrayDateNull, FTArrayDate, FTArrayJsonNull, FTArrayJson " + + "FROM FullTypes@{FORCE_INDEX=FullTypesByIntDate} " + + "WHERE FTInt = @param0 AND FTDate = @param1" + + stmt := spanner.NewStatement(sqlstr) + stmt.Params["param0"] = fTInt + stmt.Params["param1"] = fTDate + + decoder := newFullType_Decoder(FullTypeColumns()) + + // run query + YOLog(ctx, sqlstr, fTInt, fTDate) + iter := db.Query(ctx, stmt) + defer iter.Stop() + + // load results + res := []*FullType{} + for { + row, err := iter.Next() + if err != nil { + if err == iterator.Done { + break + } + return nil, newError("FindFullTypesByFTIntFTDate", "FullTypes", err) + } + + ft, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindFullTypesByFTIntFTDate", "FullTypes", err) + } + + res = append(res, ft) + } + + return res, nil +} + +// ReadFullTypesByFTIntFTDate retrieves multiples rows from 'FullTypes' by KeySet as a slice. +// +// This does not retrieve all columns of 'FullTypes' because an index has only columns +// used for primary key, index key and storing columns. If you need more columns, add storing +// columns or Read by primary key or Query with join. +// +// Generated from unique index 'FullTypesByIntDate'. +func ReadFullTypesByFTIntFTDate(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*FullType, error) { + var res []*FullType + columns := []string{ + "PKey", + "FTInt", + "FTDate", + } + + decoder := newFullType_Decoder(columns) + + rows := db.ReadUsingIndex(ctx, "FullTypes", "FullTypesByIntDate", keys, columns) + err := rows.Do(func(row *spanner.Row) error { + ft, err := decoder(row) + if err != nil { + return err + } + res = append(res, ft) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadFullTypesByFTIntFTDate", "FullTypes", err) + } + + return res, nil +} + +// FindFullTypesByFTIntFTTimestamp retrieves multiple rows from 'FullTypes' as a slice of FullType. +// +// Generated from index 'FullTypesByIntTimestamp'. +func FindFullTypesByFTIntFTTimestamp(ctx context.Context, db YORODB, fTInt int64, fTTimestamp time.Time) ([]*FullType, error) { + const sqlstr = "SELECT " + + "PKey, FTString, FTStringNull, FTBool, FTBoolNull, FTBytes, FTBytesNull, FTTimestamp, FTTimestampNull, FTInt, FTIntNull, FTFloat, FTFloatNull, FTDate, FTDateNull, FTJson, FTJsonNull, FTArrayStringNull, FTArrayString, FTArrayBoolNull, FTArrayBool, FTArrayBytesNull, FTArrayBytes, FTArrayTimestampNull, FTArrayTimestamp, FTArrayIntNull, FTArrayInt, FTArrayFloatNull, FTArrayFloat, FTArrayDateNull, FTArrayDate, FTArrayJsonNull, FTArrayJson " + + "FROM FullTypes@{FORCE_INDEX=FullTypesByIntTimestamp} " + + "WHERE FTInt = @param0 AND FTTimestamp = @param1" + + stmt := spanner.NewStatement(sqlstr) + stmt.Params["param0"] = fTInt + stmt.Params["param1"] = fTTimestamp + + decoder := newFullType_Decoder(FullTypeColumns()) + + // run query + YOLog(ctx, sqlstr, fTInt, fTTimestamp) + iter := db.Query(ctx, stmt) + defer iter.Stop() + + // load results + res := []*FullType{} + for { + row, err := iter.Next() + if err != nil { + if err == iterator.Done { + break + } + return nil, newError("FindFullTypesByFTIntFTTimestamp", "FullTypes", err) + } + + ft, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindFullTypesByFTIntFTTimestamp", "FullTypes", err) + } + + res = append(res, ft) + } + + return res, nil +} + +// ReadFullTypesByFTIntFTTimestamp retrieves multiples rows from 'FullTypes' by KeySet as a slice. +// +// This does not retrieve all columns of 'FullTypes' because an index has only columns +// used for primary key, index key and storing columns. If you need more columns, add storing +// columns or Read by primary key or Query with join. +// +// Generated from unique index 'FullTypesByIntTimestamp'. +func ReadFullTypesByFTIntFTTimestamp(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*FullType, error) { + var res []*FullType + columns := []string{ + "PKey", + "FTInt", + "FTTimestamp", + } + + decoder := newFullType_Decoder(columns) + + rows := db.ReadUsingIndex(ctx, "FullTypes", "FullTypesByIntTimestamp", keys, columns) + err := rows.Do(func(row *spanner.Row) error { + ft, err := decoder(row) + if err != nil { + return err + } + res = append(res, ft) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadFullTypesByFTIntFTTimestamp", "FullTypes", err) + } + + return res, nil +} + +// FindFullTypesByFTTimestamp retrieves multiple rows from 'FullTypes' as a slice of FullType. +// +// Generated from index 'FullTypesByTimestamp'. +func FindFullTypesByFTTimestamp(ctx context.Context, db YORODB, fTTimestamp time.Time) ([]*FullType, error) { + const sqlstr = "SELECT " + + "PKey, FTString, FTStringNull, FTBool, FTBoolNull, FTBytes, FTBytesNull, FTTimestamp, FTTimestampNull, FTInt, FTIntNull, FTFloat, FTFloatNull, FTDate, FTDateNull, FTJson, FTJsonNull, FTArrayStringNull, FTArrayString, FTArrayBoolNull, FTArrayBool, FTArrayBytesNull, FTArrayBytes, FTArrayTimestampNull, FTArrayTimestamp, FTArrayIntNull, FTArrayInt, FTArrayFloatNull, FTArrayFloat, FTArrayDateNull, FTArrayDate, FTArrayJsonNull, FTArrayJson " + + "FROM FullTypes@{FORCE_INDEX=FullTypesByTimestamp} " + + "WHERE FTTimestamp = @param0" + + stmt := spanner.NewStatement(sqlstr) + stmt.Params["param0"] = fTTimestamp + + decoder := newFullType_Decoder(FullTypeColumns()) + + // run query + YOLog(ctx, sqlstr, fTTimestamp) + iter := db.Query(ctx, stmt) + defer iter.Stop() + + // load results + res := []*FullType{} + for { + row, err := iter.Next() + if err != nil { + if err == iterator.Done { + break + } + return nil, newError("FindFullTypesByFTTimestamp", "FullTypes", err) + } + + ft, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindFullTypesByFTTimestamp", "FullTypes", err) + } + + res = append(res, ft) + } + + return res, nil +} + +// ReadFullTypesByFTTimestamp retrieves multiples rows from 'FullTypes' by KeySet as a slice. +// +// This does not retrieve all columns of 'FullTypes' because an index has only columns +// used for primary key, index key and storing columns. If you need more columns, add storing +// columns or Read by primary key or Query with join. +// +// Generated from unique index 'FullTypesByTimestamp'. +func ReadFullTypesByFTTimestamp(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*FullType, error) { + var res []*FullType + columns := []string{ + "PKey", + "FTTimestamp", + } + + decoder := newFullType_Decoder(columns) + + rows := db.ReadUsingIndex(ctx, "FullTypes", "FullTypesByTimestamp", keys, columns) + err := rows.Do(func(row *spanner.Row) error { + ft, err := decoder(row) + if err != nil { + return err + } + res = append(res, ft) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadFullTypesByFTTimestamp", "FullTypes", err) + } + + return res, nil +} diff --git a/test/testmodels/underscore/generated_column.yo.go b/test/testmodels/underscore/generated_column.yo.go new file mode 100644 index 0000000..ecf503a --- /dev/null +++ b/test/testmodels/underscore/generated_column.yo.go @@ -0,0 +1,187 @@ +// Code generated by yo. DO NOT EDIT. +// Package models contains the types. +package models + +import ( + "context" + "fmt" + + "cloud.google.com/go/spanner" + "google.golang.org/grpc/codes" +) + +// GeneratedColumn represents a row from 'GeneratedColumns'. +type GeneratedColumn struct { + ID int64 `spanner:"ID" json:"ID"` // ID + FirstName string `spanner:"FirstName" json:"FirstName"` // FirstName + LastName string `spanner:"LastName" json:"LastName"` // LastName + FullName string `spanner:"FullName" json:"FullName"` // FullName +} + +func GeneratedColumnPrimaryKeys() []string { + return []string{ + "ID", + } +} + +func GeneratedColumnColumns() []string { + return []string{ + "ID", + "FirstName", + "LastName", + "FullName", + } +} + +func GeneratedColumnWritableColumns() []string { + return []string{ + "ID", + "FirstName", + "LastName", + } +} + +func (gc *GeneratedColumn) columnsToPtrs(cols []string, customPtrs map[string]interface{}) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + if val, ok := customPtrs[col]; ok { + ret = append(ret, val) + continue + } + + switch col { + case "ID": + ret = append(ret, &gc.ID) + case "FirstName": + ret = append(ret, &gc.FirstName) + case "LastName": + ret = append(ret, &gc.LastName) + case "FullName": + ret = append(ret, &gc.FullName) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + return ret, nil +} + +func (gc *GeneratedColumn) columnsToValues(cols []string) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + switch col { + case "ID": + ret = append(ret, gc.ID) + case "FirstName": + ret = append(ret, gc.FirstName) + case "LastName": + ret = append(ret, gc.LastName) + case "FullName": + ret = append(ret, gc.FullName) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + + return ret, nil +} + +// newGeneratedColumn_Decoder returns a decoder which reads a row from *spanner.Row +// into GeneratedColumn. The decoder is not goroutine-safe. Don't use it concurrently. +func newGeneratedColumn_Decoder(cols []string) func(*spanner.Row) (*GeneratedColumn, error) { + customPtrs := map[string]interface{}{} + + return func(row *spanner.Row) (*GeneratedColumn, error) { + var gc GeneratedColumn + ptrs, err := gc.columnsToPtrs(cols, customPtrs) + if err != nil { + return nil, err + } + + if err := row.Columns(ptrs...); err != nil { + return nil, err + } + + return &gc, nil + } +} + +// Insert returns a Mutation to insert a row into a table. If the row already +// exists, the write or transaction fails. +func (gc *GeneratedColumn) Insert(ctx context.Context) *spanner.Mutation { + values, _ := gc.columnsToValues(GeneratedColumnWritableColumns()) + return spanner.Insert("GeneratedColumns", GeneratedColumnWritableColumns(), values) +} + +// Update returns a Mutation to update a row in a table. If the row does not +// already exist, the write or transaction fails. +func (gc *GeneratedColumn) Update(ctx context.Context) *spanner.Mutation { + values, _ := gc.columnsToValues(GeneratedColumnWritableColumns()) + return spanner.Update("GeneratedColumns", GeneratedColumnWritableColumns(), values) +} + +// InsertOrUpdate returns a Mutation to insert a row into a table. If the row +// already exists, it updates it instead. Any column values not explicitly +// written are preserved. +func (gc *GeneratedColumn) InsertOrUpdate(ctx context.Context) *spanner.Mutation { + values, _ := gc.columnsToValues(GeneratedColumnWritableColumns()) + return spanner.InsertOrUpdate("GeneratedColumns", GeneratedColumnWritableColumns(), values) +} + +// UpdateColumns returns a Mutation to update specified columns of a row in a table. +func (gc *GeneratedColumn) UpdateColumns(ctx context.Context, cols ...string) (*spanner.Mutation, error) { + // add primary keys to columns to update by primary keys + colsWithPKeys := append(cols, GeneratedColumnPrimaryKeys()...) + + values, err := gc.columnsToValues(colsWithPKeys) + if err != nil { + return nil, newErrorWithCode(codes.InvalidArgument, "GeneratedColumn.UpdateColumns", "GeneratedColumns", err) + } + + return spanner.Update("GeneratedColumns", colsWithPKeys, values), nil +} + +// FindGeneratedColumn gets a GeneratedColumn by primary key +func FindGeneratedColumn(ctx context.Context, db YORODB, id int64) (*GeneratedColumn, error) { + key := spanner.Key{id} + row, err := db.ReadRow(ctx, "GeneratedColumns", key, GeneratedColumnColumns()) + if err != nil { + return nil, newError("FindGeneratedColumn", "GeneratedColumns", err) + } + + decoder := newGeneratedColumn_Decoder(GeneratedColumnColumns()) + gc, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindGeneratedColumn", "GeneratedColumns", err) + } + + return gc, nil +} + +// ReadGeneratedColumn retrieves multiples rows from GeneratedColumn by KeySet as a slice. +func ReadGeneratedColumn(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*GeneratedColumn, error) { + var res []*GeneratedColumn + + decoder := newGeneratedColumn_Decoder(GeneratedColumnColumns()) + + rows := db.Read(ctx, "GeneratedColumns", keys, GeneratedColumnColumns()) + err := rows.Do(func(row *spanner.Row) error { + gc, err := decoder(row) + if err != nil { + return err + } + res = append(res, gc) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadGeneratedColumn", "GeneratedColumns", err) + } + + return res, nil +} + +// Delete deletes the GeneratedColumn from the database. +func (gc *GeneratedColumn) Delete(ctx context.Context) *spanner.Mutation { + values, _ := gc.columnsToValues(GeneratedColumnPrimaryKeys()) + return spanner.Delete("GeneratedColumns", spanner.Key(values)) +} diff --git a/test/testmodels/underscore/item.yo.go b/test/testmodels/underscore/item.yo.go new file mode 100644 index 0000000..0287108 --- /dev/null +++ b/test/testmodels/underscore/item.yo.go @@ -0,0 +1,174 @@ +// Code generated by yo. DO NOT EDIT. +// Package models contains the types. +package models + +import ( + "context" + "fmt" + + "cloud.google.com/go/spanner" + "google.golang.org/grpc/codes" +) + +// Item represents a row from 'Items'. +type Item struct { + ID int64 `spanner:"ID" json:"ID"` // ID + Price int64 `spanner:"Price" json:"Price"` // Price +} + +func ItemPrimaryKeys() []string { + return []string{ + "ID", + } +} + +func ItemColumns() []string { + return []string{ + "ID", + "Price", + } +} + +func ItemWritableColumns() []string { + return []string{ + "ID", + "Price", + } +} + +func (i *Item) columnsToPtrs(cols []string, customPtrs map[string]interface{}) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + if val, ok := customPtrs[col]; ok { + ret = append(ret, val) + continue + } + + switch col { + case "ID": + ret = append(ret, &i.ID) + case "Price": + ret = append(ret, &i.Price) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + return ret, nil +} + +func (i *Item) columnsToValues(cols []string) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + switch col { + case "ID": + ret = append(ret, i.ID) + case "Price": + ret = append(ret, i.Price) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + + return ret, nil +} + +// newItem_Decoder returns a decoder which reads a row from *spanner.Row +// into Item. The decoder is not goroutine-safe. Don't use it concurrently. +func newItem_Decoder(cols []string) func(*spanner.Row) (*Item, error) { + customPtrs := map[string]interface{}{} + + return func(row *spanner.Row) (*Item, error) { + var i Item + ptrs, err := i.columnsToPtrs(cols, customPtrs) + if err != nil { + return nil, err + } + + if err := row.Columns(ptrs...); err != nil { + return nil, err + } + + return &i, nil + } +} + +// Insert returns a Mutation to insert a row into a table. If the row already +// exists, the write or transaction fails. +func (i *Item) Insert(ctx context.Context) *spanner.Mutation { + values, _ := i.columnsToValues(ItemWritableColumns()) + return spanner.Insert("Items", ItemWritableColumns(), values) +} + +// Update returns a Mutation to update a row in a table. If the row does not +// already exist, the write or transaction fails. +func (i *Item) Update(ctx context.Context) *spanner.Mutation { + values, _ := i.columnsToValues(ItemWritableColumns()) + return spanner.Update("Items", ItemWritableColumns(), values) +} + +// InsertOrUpdate returns a Mutation to insert a row into a table. If the row +// already exists, it updates it instead. Any column values not explicitly +// written are preserved. +func (i *Item) InsertOrUpdate(ctx context.Context) *spanner.Mutation { + values, _ := i.columnsToValues(ItemWritableColumns()) + return spanner.InsertOrUpdate("Items", ItemWritableColumns(), values) +} + +// UpdateColumns returns a Mutation to update specified columns of a row in a table. +func (i *Item) UpdateColumns(ctx context.Context, cols ...string) (*spanner.Mutation, error) { + // add primary keys to columns to update by primary keys + colsWithPKeys := append(cols, ItemPrimaryKeys()...) + + values, err := i.columnsToValues(colsWithPKeys) + if err != nil { + return nil, newErrorWithCode(codes.InvalidArgument, "Item.UpdateColumns", "Items", err) + } + + return spanner.Update("Items", colsWithPKeys, values), nil +} + +// FindItem gets a Item by primary key +func FindItem(ctx context.Context, db YORODB, id int64) (*Item, error) { + key := spanner.Key{id} + row, err := db.ReadRow(ctx, "Items", key, ItemColumns()) + if err != nil { + return nil, newError("FindItem", "Items", err) + } + + decoder := newItem_Decoder(ItemColumns()) + i, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindItem", "Items", err) + } + + return i, nil +} + +// ReadItem retrieves multiples rows from Item by KeySet as a slice. +func ReadItem(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*Item, error) { + var res []*Item + + decoder := newItem_Decoder(ItemColumns()) + + rows := db.Read(ctx, "Items", keys, ItemColumns()) + err := rows.Do(func(row *spanner.Row) error { + i, err := decoder(row) + if err != nil { + return err + } + res = append(res, i) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadItem", "Items", err) + } + + return res, nil +} + +// Delete deletes the Item from the database. +func (i *Item) Delete(ctx context.Context) *spanner.Mutation { + values, _ := i.columnsToValues(ItemPrimaryKeys()) + return spanner.Delete("Items", spanner.Key(values)) +} diff --git a/test/testmodels/underscore/max_length.yo.go b/test/testmodels/underscore/max_length.yo.go new file mode 100644 index 0000000..b505edd --- /dev/null +++ b/test/testmodels/underscore/max_length.yo.go @@ -0,0 +1,174 @@ +// Code generated by yo. DO NOT EDIT. +// Package models contains the types. +package models + +import ( + "context" + "fmt" + + "cloud.google.com/go/spanner" + "google.golang.org/grpc/codes" +) + +// MaxLength represents a row from 'MaxLengths'. +type MaxLength struct { + MaxString string `spanner:"MaxString" json:"MaxString"` // MaxString + MaxBytes []byte `spanner:"MaxBytes" json:"MaxBytes"` // MaxBytes +} + +func MaxLengthPrimaryKeys() []string { + return []string{ + "MaxString", + } +} + +func MaxLengthColumns() []string { + return []string{ + "MaxString", + "MaxBytes", + } +} + +func MaxLengthWritableColumns() []string { + return []string{ + "MaxString", + "MaxBytes", + } +} + +func (ml *MaxLength) columnsToPtrs(cols []string, customPtrs map[string]interface{}) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + if val, ok := customPtrs[col]; ok { + ret = append(ret, val) + continue + } + + switch col { + case "MaxString": + ret = append(ret, &ml.MaxString) + case "MaxBytes": + ret = append(ret, &ml.MaxBytes) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + return ret, nil +} + +func (ml *MaxLength) columnsToValues(cols []string) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + switch col { + case "MaxString": + ret = append(ret, ml.MaxString) + case "MaxBytes": + ret = append(ret, ml.MaxBytes) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + + return ret, nil +} + +// newMaxLength_Decoder returns a decoder which reads a row from *spanner.Row +// into MaxLength. The decoder is not goroutine-safe. Don't use it concurrently. +func newMaxLength_Decoder(cols []string) func(*spanner.Row) (*MaxLength, error) { + customPtrs := map[string]interface{}{} + + return func(row *spanner.Row) (*MaxLength, error) { + var ml MaxLength + ptrs, err := ml.columnsToPtrs(cols, customPtrs) + if err != nil { + return nil, err + } + + if err := row.Columns(ptrs...); err != nil { + return nil, err + } + + return &ml, nil + } +} + +// Insert returns a Mutation to insert a row into a table. If the row already +// exists, the write or transaction fails. +func (ml *MaxLength) Insert(ctx context.Context) *spanner.Mutation { + values, _ := ml.columnsToValues(MaxLengthWritableColumns()) + return spanner.Insert("MaxLengths", MaxLengthWritableColumns(), values) +} + +// Update returns a Mutation to update a row in a table. If the row does not +// already exist, the write or transaction fails. +func (ml *MaxLength) Update(ctx context.Context) *spanner.Mutation { + values, _ := ml.columnsToValues(MaxLengthWritableColumns()) + return spanner.Update("MaxLengths", MaxLengthWritableColumns(), values) +} + +// InsertOrUpdate returns a Mutation to insert a row into a table. If the row +// already exists, it updates it instead. Any column values not explicitly +// written are preserved. +func (ml *MaxLength) InsertOrUpdate(ctx context.Context) *spanner.Mutation { + values, _ := ml.columnsToValues(MaxLengthWritableColumns()) + return spanner.InsertOrUpdate("MaxLengths", MaxLengthWritableColumns(), values) +} + +// UpdateColumns returns a Mutation to update specified columns of a row in a table. +func (ml *MaxLength) UpdateColumns(ctx context.Context, cols ...string) (*spanner.Mutation, error) { + // add primary keys to columns to update by primary keys + colsWithPKeys := append(cols, MaxLengthPrimaryKeys()...) + + values, err := ml.columnsToValues(colsWithPKeys) + if err != nil { + return nil, newErrorWithCode(codes.InvalidArgument, "MaxLength.UpdateColumns", "MaxLengths", err) + } + + return spanner.Update("MaxLengths", colsWithPKeys, values), nil +} + +// FindMaxLength gets a MaxLength by primary key +func FindMaxLength(ctx context.Context, db YORODB, maxString string) (*MaxLength, error) { + key := spanner.Key{maxString} + row, err := db.ReadRow(ctx, "MaxLengths", key, MaxLengthColumns()) + if err != nil { + return nil, newError("FindMaxLength", "MaxLengths", err) + } + + decoder := newMaxLength_Decoder(MaxLengthColumns()) + ml, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindMaxLength", "MaxLengths", err) + } + + return ml, nil +} + +// ReadMaxLength retrieves multiples rows from MaxLength by KeySet as a slice. +func ReadMaxLength(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*MaxLength, error) { + var res []*MaxLength + + decoder := newMaxLength_Decoder(MaxLengthColumns()) + + rows := db.Read(ctx, "MaxLengths", keys, MaxLengthColumns()) + err := rows.Do(func(row *spanner.Row) error { + ml, err := decoder(row) + if err != nil { + return err + } + res = append(res, ml) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadMaxLength", "MaxLengths", err) + } + + return res, nil +} + +// Delete deletes the MaxLength from the database. +func (ml *MaxLength) Delete(ctx context.Context) *spanner.Mutation { + values, _ := ml.columnsToValues(MaxLengthPrimaryKeys()) + return spanner.Delete("MaxLengths", spanner.Key(values)) +} diff --git a/test/testmodels/underscore/out_of_order_primary_key.yo.go b/test/testmodels/underscore/out_of_order_primary_key.yo.go new file mode 100644 index 0000000..96a86d9 --- /dev/null +++ b/test/testmodels/underscore/out_of_order_primary_key.yo.go @@ -0,0 +1,114 @@ +// Code generated by yo. DO NOT EDIT. +// Package models contains the types. +package models + +import ( + "context" + "fmt" + + "cloud.google.com/go/spanner" +) + +// OutOfOrderPrimaryKey represents a row from 'OutOfOrderPrimaryKeys'. +type OutOfOrderPrimaryKey struct { + PKey1 string `spanner:"PKey1" json:"PKey1"` // PKey1 + PKey2 string `spanner:"PKey2" json:"PKey2"` // PKey2 + PKey3 string `spanner:"PKey3" json:"PKey3"` // PKey3 +} + +func OutOfOrderPrimaryKeyPrimaryKeys() []string { + return []string{ + "PKey2", + "PKey1", + "PKey3", + } +} + +func OutOfOrderPrimaryKeyColumns() []string { + return []string{ + "PKey1", + "PKey2", + "PKey3", + } +} + +func OutOfOrderPrimaryKeyWritableColumns() []string { + return []string{ + "PKey1", + "PKey2", + "PKey3", + } +} + +func (ooopk *OutOfOrderPrimaryKey) columnsToPtrs(cols []string, customPtrs map[string]interface{}) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + if val, ok := customPtrs[col]; ok { + ret = append(ret, val) + continue + } + + switch col { + case "PKey1": + ret = append(ret, &ooopk.PKey1) + case "PKey2": + ret = append(ret, &ooopk.PKey2) + case "PKey3": + ret = append(ret, &ooopk.PKey3) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + return ret, nil +} + +func (ooopk *OutOfOrderPrimaryKey) columnsToValues(cols []string) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + switch col { + case "PKey1": + ret = append(ret, ooopk.PKey1) + case "PKey2": + ret = append(ret, ooopk.PKey2) + case "PKey3": + ret = append(ret, ooopk.PKey3) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + + return ret, nil +} + +// newOutOfOrderPrimaryKey_Decoder returns a decoder which reads a row from *spanner.Row +// into OutOfOrderPrimaryKey. The decoder is not goroutine-safe. Don't use it concurrently. +func newOutOfOrderPrimaryKey_Decoder(cols []string) func(*spanner.Row) (*OutOfOrderPrimaryKey, error) { + customPtrs := map[string]interface{}{} + + return func(row *spanner.Row) (*OutOfOrderPrimaryKey, error) { + var ooopk OutOfOrderPrimaryKey + ptrs, err := ooopk.columnsToPtrs(cols, customPtrs) + if err != nil { + return nil, err + } + + if err := row.Columns(ptrs...); err != nil { + return nil, err + } + + return &ooopk, nil + } +} + +// Insert returns a Mutation to insert a row into a table. If the row already +// exists, the write or transaction fails. +func (ooopk *OutOfOrderPrimaryKey) Insert(ctx context.Context) *spanner.Mutation { + values, _ := ooopk.columnsToValues(OutOfOrderPrimaryKeyWritableColumns()) + return spanner.Insert("OutOfOrderPrimaryKeys", OutOfOrderPrimaryKeyWritableColumns(), values) +} + +// Delete deletes the OutOfOrderPrimaryKey from the database. +func (ooopk *OutOfOrderPrimaryKey) Delete(ctx context.Context) *spanner.Mutation { + values, _ := ooopk.columnsToValues(OutOfOrderPrimaryKeyPrimaryKeys()) + return spanner.Delete("OutOfOrderPrimaryKeys", spanner.Key(values)) +} diff --git a/test/testmodels/underscore/snake_case.yo.go b/test/testmodels/underscore/snake_case.yo.go new file mode 100644 index 0000000..d38836b --- /dev/null +++ b/test/testmodels/underscore/snake_case.yo.go @@ -0,0 +1,258 @@ +// Code generated by yo. DO NOT EDIT. +// Package models contains the types. +package models + +import ( + "context" + "fmt" + + "cloud.google.com/go/spanner" + "google.golang.org/api/iterator" + "google.golang.org/grpc/codes" +) + +// SnakeCase represents a row from 'snake_cases'. +type SnakeCase struct { + ID int64 `spanner:"id" json:"id"` // id + StringID string `spanner:"string_id" json:"string_id"` // string_id + FooBarBaz int64 `spanner:"foo_bar_baz" json:"foo_bar_baz"` // foo_bar_baz +} + +func SnakeCasePrimaryKeys() []string { + return []string{ + "id", + } +} + +func SnakeCaseColumns() []string { + return []string{ + "id", + "string_id", + "foo_bar_baz", + } +} + +func SnakeCaseWritableColumns() []string { + return []string{ + "id", + "string_id", + "foo_bar_baz", + } +} + +func (sc *SnakeCase) columnsToPtrs(cols []string, customPtrs map[string]interface{}) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + if val, ok := customPtrs[col]; ok { + ret = append(ret, val) + continue + } + + switch col { + case "id": + ret = append(ret, &sc.ID) + case "string_id": + ret = append(ret, &sc.StringID) + case "foo_bar_baz": + ret = append(ret, &sc.FooBarBaz) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + return ret, nil +} + +func (sc *SnakeCase) columnsToValues(cols []string) ([]interface{}, error) { + ret := make([]interface{}, 0, len(cols)) + for _, col := range cols { + switch col { + case "id": + ret = append(ret, sc.ID) + case "string_id": + ret = append(ret, sc.StringID) + case "foo_bar_baz": + ret = append(ret, sc.FooBarBaz) + default: + return nil, fmt.Errorf("unknown column: %s", col) + } + } + + return ret, nil +} + +// newSnakeCase_Decoder returns a decoder which reads a row from *spanner.Row +// into SnakeCase. The decoder is not goroutine-safe. Don't use it concurrently. +func newSnakeCase_Decoder(cols []string) func(*spanner.Row) (*SnakeCase, error) { + customPtrs := map[string]interface{}{} + + return func(row *spanner.Row) (*SnakeCase, error) { + var sc SnakeCase + ptrs, err := sc.columnsToPtrs(cols, customPtrs) + if err != nil { + return nil, err + } + + if err := row.Columns(ptrs...); err != nil { + return nil, err + } + + return &sc, nil + } +} + +// Insert returns a Mutation to insert a row into a table. If the row already +// exists, the write or transaction fails. +func (sc *SnakeCase) Insert(ctx context.Context) *spanner.Mutation { + values, _ := sc.columnsToValues(SnakeCaseWritableColumns()) + return spanner.Insert("snake_cases", SnakeCaseWritableColumns(), values) +} + +// Update returns a Mutation to update a row in a table. If the row does not +// already exist, the write or transaction fails. +func (sc *SnakeCase) Update(ctx context.Context) *spanner.Mutation { + values, _ := sc.columnsToValues(SnakeCaseWritableColumns()) + return spanner.Update("snake_cases", SnakeCaseWritableColumns(), values) +} + +// InsertOrUpdate returns a Mutation to insert a row into a table. If the row +// already exists, it updates it instead. Any column values not explicitly +// written are preserved. +func (sc *SnakeCase) InsertOrUpdate(ctx context.Context) *spanner.Mutation { + values, _ := sc.columnsToValues(SnakeCaseWritableColumns()) + return spanner.InsertOrUpdate("snake_cases", SnakeCaseWritableColumns(), values) +} + +// UpdateColumns returns a Mutation to update specified columns of a row in a table. +func (sc *SnakeCase) UpdateColumns(ctx context.Context, cols ...string) (*spanner.Mutation, error) { + // add primary keys to columns to update by primary keys + colsWithPKeys := append(cols, SnakeCasePrimaryKeys()...) + + values, err := sc.columnsToValues(colsWithPKeys) + if err != nil { + return nil, newErrorWithCode(codes.InvalidArgument, "SnakeCase.UpdateColumns", "snake_cases", err) + } + + return spanner.Update("snake_cases", colsWithPKeys, values), nil +} + +// FindSnakeCase gets a SnakeCase by primary key +func FindSnakeCase(ctx context.Context, db YORODB, id int64) (*SnakeCase, error) { + key := spanner.Key{id} + row, err := db.ReadRow(ctx, "snake_cases", key, SnakeCaseColumns()) + if err != nil { + return nil, newError("FindSnakeCase", "snake_cases", err) + } + + decoder := newSnakeCase_Decoder(SnakeCaseColumns()) + sc, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindSnakeCase", "snake_cases", err) + } + + return sc, nil +} + +// ReadSnakeCase retrieves multiples rows from SnakeCase by KeySet as a slice. +func ReadSnakeCase(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*SnakeCase, error) { + var res []*SnakeCase + + decoder := newSnakeCase_Decoder(SnakeCaseColumns()) + + rows := db.Read(ctx, "snake_cases", keys, SnakeCaseColumns()) + err := rows.Do(func(row *spanner.Row) error { + sc, err := decoder(row) + if err != nil { + return err + } + res = append(res, sc) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadSnakeCase", "snake_cases", err) + } + + return res, nil +} + +// Delete deletes the SnakeCase from the database. +func (sc *SnakeCase) Delete(ctx context.Context) *spanner.Mutation { + values, _ := sc.columnsToValues(SnakeCasePrimaryKeys()) + return spanner.Delete("snake_cases", spanner.Key(values)) +} + +// FindSnakeCasesByStringIDFooBarBaz retrieves multiple rows from 'snake_cases' as a slice of SnakeCase. +// +// Generated from index 'snake_cases_by_string_id'. +func FindSnakeCasesByStringIDFooBarBaz(ctx context.Context, db YORODB, stringID string, fooBarBaz int64) ([]*SnakeCase, error) { + const sqlstr = "SELECT " + + "id, string_id, foo_bar_baz " + + "FROM snake_cases@{FORCE_INDEX=snake_cases_by_string_id} " + + "WHERE string_id = @param0 AND foo_bar_baz = @param1" + + stmt := spanner.NewStatement(sqlstr) + stmt.Params["param0"] = stringID + stmt.Params["param1"] = fooBarBaz + + decoder := newSnakeCase_Decoder(SnakeCaseColumns()) + + // run query + YOLog(ctx, sqlstr, stringID, fooBarBaz) + iter := db.Query(ctx, stmt) + defer iter.Stop() + + // load results + res := []*SnakeCase{} + for { + row, err := iter.Next() + if err != nil { + if err == iterator.Done { + break + } + return nil, newError("FindSnakeCasesByStringIDFooBarBaz", "snake_cases", err) + } + + sc, err := decoder(row) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "FindSnakeCasesByStringIDFooBarBaz", "snake_cases", err) + } + + res = append(res, sc) + } + + return res, nil +} + +// ReadSnakeCasesByStringIDFooBarBaz retrieves multiples rows from 'snake_cases' by KeySet as a slice. +// +// This does not retrieve all columns of 'snake_cases' because an index has only columns +// used for primary key, index key and storing columns. If you need more columns, add storing +// columns or Read by primary key or Query with join. +// +// Generated from unique index 'snake_cases_by_string_id'. +func ReadSnakeCasesByStringIDFooBarBaz(ctx context.Context, db YORODB, keys spanner.KeySet) ([]*SnakeCase, error) { + var res []*SnakeCase + columns := []string{ + "id", + "string_id", + "foo_bar_baz", + } + + decoder := newSnakeCase_Decoder(columns) + + rows := db.ReadUsingIndex(ctx, "snake_cases", "snake_cases_by_string_id", keys, columns) + err := rows.Do(func(row *spanner.Row) error { + sc, err := decoder(row) + if err != nil { + return err + } + res = append(res, sc) + + return nil + }) + if err != nil { + return nil, newErrorWithCode(codes.Internal, "ReadSnakeCasesByStringIDFooBarBaz", "snake_cases", err) + } + + return res, nil +} diff --git a/test/testmodels/underscore/yo_db.yo.go b/test/testmodels/underscore/yo_db.yo.go new file mode 100644 index 0000000..e2ee270 --- /dev/null +++ b/test/testmodels/underscore/yo_db.yo.go @@ -0,0 +1,79 @@ +// Code generated by yo. DO NOT EDIT. +// Package models contains the types. +package models + +import ( + "context" + "errors" + "fmt" + + "cloud.google.com/go/spanner" + "github.com/googleapis/gax-go/v2/apierror" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +// YODB is the common interface for database operations. +type YODB interface { + YORODB +} + +// YORODB is the common interface for database operations. +type YORODB interface { + ReadRow(ctx context.Context, table string, key spanner.Key, columns []string) (*spanner.Row, error) + Read(ctx context.Context, table string, keys spanner.KeySet, columns []string) *spanner.RowIterator + ReadUsingIndex(ctx context.Context, table, index string, keys spanner.KeySet, columns []string) (ri *spanner.RowIterator) + Query(ctx context.Context, statement spanner.Statement) *spanner.RowIterator +} + +// YOLog provides the log func used by generated queries. +var YOLog = func(context.Context, string, ...interface{}) {} + +func newError(method, table string, err error) error { + code := spanner.ErrCode(err) + return newErrorWithCode(code, method, table, err) +} + +func newErrorWithCode(code codes.Code, method, table string, err error) error { + return &yoError{ + method: method, + table: table, + err: err, + code: code, + } +} + +type yoError struct { + err error + method string + table string + code codes.Code +} + +func (e yoError) Error() string { + return fmt.Sprintf("yo error in %s(%s): %v", e.method, e.table, e.err) +} + +func (e yoError) Unwrap() error { + return e.err +} + +func (e yoError) DBTableName() string { + return e.table +} + +// GRPCStatus implements a conversion to a gRPC status using `status.Convert(error)`. +// If the error is originated from the Spanner library, this returns a gRPC status of +// the original error. It may contain details of the status such as RetryInfo. +func (e yoError) GRPCStatus() *status.Status { + var ae *apierror.APIError + if errors.As(e.err, &ae) { + return status.Convert(ae) + } + + return status.New(e.code, e.Error()) +} + +func (e yoError) Timeout() bool { return e.code == codes.DeadlineExceeded } +func (e yoError) Temporary() bool { return e.code == codes.DeadlineExceeded } +func (e yoError) NotFound() bool { return e.code == codes.NotFound }