nidus-sync/db/models/fileupload.csv.bob.go

603 lines
17 KiB
Go

// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package models
import (
"context"
"fmt"
"io"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
"github.com/Gleipnir-Technology/bob/dialect/psql/dm"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
"github.com/Gleipnir-Technology/bob/expr"
"github.com/Gleipnir-Technology/bob/mods"
"github.com/Gleipnir-Technology/bob/orm"
"github.com/Gleipnir-Technology/bob/types/pgtypes"
enums "github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/aarondl/opt/omit"
)
// FileuploadCSV is an object representing the database table.
type FileuploadCSV struct {
FileID int32 `db:"file_id,pk" `
Type enums.FileuploadCsvtype `db:"type_" `
R fileuploadCSVR `db:"-" `
}
// FileuploadCSVSlice is an alias for a slice of pointers to FileuploadCSV.
// This should almost always be used instead of []*FileuploadCSV.
type FileuploadCSVSlice []*FileuploadCSV
// FileuploadCSVS contains methods to work with the csv table
var FileuploadCSVS = psql.NewTablex[*FileuploadCSV, FileuploadCSVSlice, *FileuploadCSVSetter]("fileupload", "csv", buildFileuploadCSVColumns("fileupload.csv"))
// FileuploadCSVSQuery is a query on the csv table
type FileuploadCSVSQuery = *psql.ViewQuery[*FileuploadCSV, FileuploadCSVSlice]
// fileuploadCSVR is where relationships are stored.
type fileuploadCSVR struct {
File *FileuploadFile // fileupload.csv.csv_file_id_fkey
}
func buildFileuploadCSVColumns(alias string) fileuploadCSVColumns {
return fileuploadCSVColumns{
ColumnsExpr: expr.NewColumnsExpr(
"file_id", "type_",
).WithParent("fileupload.csv"),
tableAlias: alias,
FileID: psql.Quote(alias, "file_id"),
Type: psql.Quote(alias, "type_"),
}
}
type fileuploadCSVColumns struct {
expr.ColumnsExpr
tableAlias string
FileID psql.Expression
Type psql.Expression
}
func (c fileuploadCSVColumns) Alias() string {
return c.tableAlias
}
func (fileuploadCSVColumns) AliasedAs(alias string) fileuploadCSVColumns {
return buildFileuploadCSVColumns(alias)
}
// FileuploadCSVSetter is used for insert/upsert/update operations
// All values are optional, and do not have to be set
// Generated columns are not included
type FileuploadCSVSetter struct {
FileID omit.Val[int32] `db:"file_id,pk" `
Type omit.Val[enums.FileuploadCsvtype] `db:"type_" `
}
func (s FileuploadCSVSetter) SetColumns() []string {
vals := make([]string, 0, 2)
if s.FileID.IsValue() {
vals = append(vals, "file_id")
}
if s.Type.IsValue() {
vals = append(vals, "type_")
}
return vals
}
func (s FileuploadCSVSetter) Overwrite(t *FileuploadCSV) {
if s.FileID.IsValue() {
t.FileID = s.FileID.MustGet()
}
if s.Type.IsValue() {
t.Type = s.Type.MustGet()
}
}
func (s *FileuploadCSVSetter) Apply(q *dialect.InsertQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadCSVS.BeforeInsertHooks.RunHooks(ctx, exec, s)
})
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
vals := make([]bob.Expression, 2)
if s.FileID.IsValue() {
vals[0] = psql.Arg(s.FileID.MustGet())
} else {
vals[0] = psql.Raw("DEFAULT")
}
if s.Type.IsValue() {
vals[1] = psql.Arg(s.Type.MustGet())
} else {
vals[1] = psql.Raw("DEFAULT")
}
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
}))
}
func (s FileuploadCSVSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return um.Set(s.Expressions()...)
}
func (s FileuploadCSVSetter) Expressions(prefix ...string) []bob.Expression {
exprs := make([]bob.Expression, 0, 2)
if s.FileID.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "file_id")...),
psql.Arg(s.FileID),
}})
}
if s.Type.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "type_")...),
psql.Arg(s.Type),
}})
}
return exprs
}
// FindFileuploadCSV retrieves a single record by primary key
// If cols is empty Find will return all columns.
func FindFileuploadCSV(ctx context.Context, exec bob.Executor, FileIDPK int32, cols ...string) (*FileuploadCSV, error) {
if len(cols) == 0 {
return FileuploadCSVS.Query(
sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(FileIDPK))),
).One(ctx, exec)
}
return FileuploadCSVS.Query(
sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(FileIDPK))),
sm.Columns(FileuploadCSVS.Columns.Only(cols...)),
).One(ctx, exec)
}
// FileuploadCSVExists checks the presence of a single record by primary key
func FileuploadCSVExists(ctx context.Context, exec bob.Executor, FileIDPK int32) (bool, error) {
return FileuploadCSVS.Query(
sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(FileIDPK))),
).Exists(ctx, exec)
}
// AfterQueryHook is called after FileuploadCSV is retrieved from the database
func (o *FileuploadCSV) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FileuploadCSVS.AfterSelectHooks.RunHooks(ctx, exec, FileuploadCSVSlice{o})
case bob.QueryTypeInsert:
ctx, err = FileuploadCSVS.AfterInsertHooks.RunHooks(ctx, exec, FileuploadCSVSlice{o})
case bob.QueryTypeUpdate:
ctx, err = FileuploadCSVS.AfterUpdateHooks.RunHooks(ctx, exec, FileuploadCSVSlice{o})
case bob.QueryTypeDelete:
ctx, err = FileuploadCSVS.AfterDeleteHooks.RunHooks(ctx, exec, FileuploadCSVSlice{o})
}
return err
}
// primaryKeyVals returns the primary key values of the FileuploadCSV
func (o *FileuploadCSV) primaryKeyVals() bob.Expression {
return psql.Arg(o.FileID)
}
func (o *FileuploadCSV) pkEQ() dialect.Expression {
return psql.Quote("fileupload.csv", "file_id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
return o.primaryKeyVals().WriteSQL(ctx, w, d, start)
}))
}
// Update uses an executor to update the FileuploadCSV
func (o *FileuploadCSV) Update(ctx context.Context, exec bob.Executor, s *FileuploadCSVSetter) error {
v, err := FileuploadCSVS.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
if err != nil {
return err
}
o.R = v.R
*o = *v
return nil
}
// Delete deletes a single FileuploadCSV record with an executor
func (o *FileuploadCSV) Delete(ctx context.Context, exec bob.Executor) error {
_, err := FileuploadCSVS.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
return err
}
// Reload refreshes the FileuploadCSV using the executor
func (o *FileuploadCSV) Reload(ctx context.Context, exec bob.Executor) error {
o2, err := FileuploadCSVS.Query(
sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(o.FileID))),
).One(ctx, exec)
if err != nil {
return err
}
o2.R = o.R
*o = *o2
return nil
}
// AfterQueryHook is called after FileuploadCSVSlice is retrieved from the database
func (o FileuploadCSVSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FileuploadCSVS.AfterSelectHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeInsert:
ctx, err = FileuploadCSVS.AfterInsertHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeUpdate:
ctx, err = FileuploadCSVS.AfterUpdateHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeDelete:
ctx, err = FileuploadCSVS.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}
func (o FileuploadCSVSlice) pkIN() dialect.Expression {
if len(o) == 0 {
return psql.Raw("NULL")
}
return psql.Quote("fileupload.csv", "file_id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
pkPairs := make([]bob.Expression, len(o))
for i, row := range o {
pkPairs[i] = row.primaryKeyVals()
}
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
}))
}
// copyMatchingRows finds models in the given slice that have the same primary key
// then it first copies the existing relationships from the old model to the new model
// and then replaces the old model in the slice with the new model
func (o FileuploadCSVSlice) copyMatchingRows(from ...*FileuploadCSV) {
for i, old := range o {
for _, new := range from {
if new.FileID != old.FileID {
continue
}
new.R = old.R
o[i] = new
break
}
}
}
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
func (o FileuploadCSVSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadCSVS.BeforeUpdateHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FileuploadCSV:
o.copyMatchingRows(retrieved)
case []*FileuploadCSV:
o.copyMatchingRows(retrieved...)
case FileuploadCSVSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FileuploadCSV or a slice of FileuploadCSV
// then run the AfterUpdateHooks on the slice
_, err = FileuploadCSVS.AfterUpdateHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
func (o FileuploadCSVSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadCSVS.BeforeDeleteHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FileuploadCSV:
o.copyMatchingRows(retrieved)
case []*FileuploadCSV:
o.copyMatchingRows(retrieved...)
case FileuploadCSVSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FileuploadCSV or a slice of FileuploadCSV
// then run the AfterDeleteHooks on the slice
_, err = FileuploadCSVS.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
func (o FileuploadCSVSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals FileuploadCSVSetter) error {
if len(o) == 0 {
return nil
}
_, err := FileuploadCSVS.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
return err
}
func (o FileuploadCSVSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
_, err := FileuploadCSVS.Delete(o.DeleteMod()).Exec(ctx, exec)
return err
}
func (o FileuploadCSVSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
o2, err := FileuploadCSVS.Query(sm.Where(o.pkIN())).All(ctx, exec)
if err != nil {
return err
}
o.copyMatchingRows(o2...)
return nil
}
// File starts a query for related objects on fileupload.file
func (o *FileuploadCSV) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery {
return FileuploadFiles.Query(append(mods,
sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(o.FileID))),
)...)
}
func (os FileuploadCSVSlice) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery {
pkFileID := make(pgtypes.Array[int32], 0, len(os))
for _, o := range os {
if o == nil {
continue
}
pkFileID = append(pkFileID, o.FileID)
}
PKArgExpr := psql.Select(sm.Columns(
psql.F("unnest", psql.Cast(psql.Arg(pkFileID), "integer[]")),
))
return FileuploadFiles.Query(append(mods,
sm.Where(psql.Group(FileuploadFiles.Columns.ID).OP("IN", PKArgExpr)),
)...)
}
func attachFileuploadCSVFile0(ctx context.Context, exec bob.Executor, count int, fileuploadCSV0 *FileuploadCSV, fileuploadFile1 *FileuploadFile) (*FileuploadCSV, error) {
setter := &FileuploadCSVSetter{
FileID: omit.From(fileuploadFile1.ID),
}
err := fileuploadCSV0.Update(ctx, exec, setter)
if err != nil {
return nil, fmt.Errorf("attachFileuploadCSVFile0: %w", err)
}
return fileuploadCSV0, nil
}
func (fileuploadCSV0 *FileuploadCSV) InsertFile(ctx context.Context, exec bob.Executor, related *FileuploadFileSetter) error {
var err error
fileuploadFile1, err := FileuploadFiles.Insert(related).One(ctx, exec)
if err != nil {
return fmt.Errorf("inserting related objects: %w", err)
}
_, err = attachFileuploadCSVFile0(ctx, exec, 1, fileuploadCSV0, fileuploadFile1)
if err != nil {
return err
}
fileuploadCSV0.R.File = fileuploadFile1
fileuploadFile1.R.CSV = fileuploadCSV0
return nil
}
func (fileuploadCSV0 *FileuploadCSV) AttachFile(ctx context.Context, exec bob.Executor, fileuploadFile1 *FileuploadFile) error {
var err error
_, err = attachFileuploadCSVFile0(ctx, exec, 1, fileuploadCSV0, fileuploadFile1)
if err != nil {
return err
}
fileuploadCSV0.R.File = fileuploadFile1
fileuploadFile1.R.CSV = fileuploadCSV0
return nil
}
type fileuploadCSVWhere[Q psql.Filterable] struct {
FileID psql.WhereMod[Q, int32]
Type psql.WhereMod[Q, enums.FileuploadCsvtype]
}
func (fileuploadCSVWhere[Q]) AliasedAs(alias string) fileuploadCSVWhere[Q] {
return buildFileuploadCSVWhere[Q](buildFileuploadCSVColumns(alias))
}
func buildFileuploadCSVWhere[Q psql.Filterable](cols fileuploadCSVColumns) fileuploadCSVWhere[Q] {
return fileuploadCSVWhere[Q]{
FileID: psql.Where[Q, int32](cols.FileID),
Type: psql.Where[Q, enums.FileuploadCsvtype](cols.Type),
}
}
func (o *FileuploadCSV) Preload(name string, retrieved any) error {
if o == nil {
return nil
}
switch name {
case "File":
rel, ok := retrieved.(*FileuploadFile)
if !ok {
return fmt.Errorf("fileuploadCSV cannot load %T as %q", retrieved, name)
}
o.R.File = rel
if rel != nil {
rel.R.CSV = o
}
return nil
default:
return fmt.Errorf("fileuploadCSV has no relationship %q", name)
}
}
type fileuploadCSVPreloader struct {
File func(...psql.PreloadOption) psql.Preloader
}
func buildFileuploadCSVPreloader() fileuploadCSVPreloader {
return fileuploadCSVPreloader{
File: func(opts ...psql.PreloadOption) psql.Preloader {
return psql.Preload[*FileuploadFile, FileuploadFileSlice](psql.PreloadRel{
Name: "File",
Sides: []psql.PreloadSide{
{
From: FileuploadCSVS,
To: FileuploadFiles,
FromColumns: []string{"file_id"},
ToColumns: []string{"id"},
},
},
}, FileuploadFiles.Columns.Names(), opts...)
},
}
}
type fileuploadCSVThenLoader[Q orm.Loadable] struct {
File func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
}
func buildFileuploadCSVThenLoader[Q orm.Loadable]() fileuploadCSVThenLoader[Q] {
type FileLoadInterface interface {
LoadFile(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
return fileuploadCSVThenLoader[Q]{
File: thenLoadBuilder[Q](
"File",
func(ctx context.Context, exec bob.Executor, retrieved FileLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadFile(ctx, exec, mods...)
},
),
}
}
// LoadFile loads the fileuploadCSV's File into the .R struct
func (o *FileuploadCSV) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
// Reset the relationship
o.R.File = nil
related, err := o.File(mods...).One(ctx, exec)
if err != nil {
return err
}
related.R.CSV = o
o.R.File = related
return nil
}
// LoadFile loads the fileuploadCSV's File into the .R struct
func (os FileuploadCSVSlice) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
fileuploadFiles, err := os.File(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, o := range os {
if o == nil {
continue
}
for _, rel := range fileuploadFiles {
if !(o.FileID == rel.ID) {
continue
}
rel.R.CSV = o
o.R.File = rel
break
}
}
return nil
}
type fileuploadCSVJoins[Q dialect.Joinable] struct {
typ string
File modAs[Q, fileuploadFileColumns]
}
func (j fileuploadCSVJoins[Q]) aliasedAs(alias string) fileuploadCSVJoins[Q] {
return buildFileuploadCSVJoins[Q](buildFileuploadCSVColumns(alias), j.typ)
}
func buildFileuploadCSVJoins[Q dialect.Joinable](cols fileuploadCSVColumns, typ string) fileuploadCSVJoins[Q] {
return fileuploadCSVJoins[Q]{
typ: typ,
File: modAs[Q, fileuploadFileColumns]{
c: FileuploadFiles.Columns,
f: func(to fileuploadFileColumns) bob.Mod[Q] {
mods := make(mods.QueryMods[Q], 0, 1)
{
mods = append(mods, dialect.Join[Q](typ, FileuploadFiles.Name().As(to.Alias())).On(
to.ID.EQ(cols.FileID),
))
}
return mods
},
},
}
}