1914 lines
52 KiB
Go
1914 lines
52 KiB
Go
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
|
// This file is meant to be re-generated in place and/or deleted at any time.
|
|
|
|
package models
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
"io"
|
|
"time"
|
|
|
|
"github.com/Gleipnir-Technology/bob"
|
|
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
|
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
|
|
"github.com/Gleipnir-Technology/bob/dialect/psql/dm"
|
|
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
|
|
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
|
|
"github.com/Gleipnir-Technology/bob/expr"
|
|
"github.com/Gleipnir-Technology/bob/mods"
|
|
"github.com/Gleipnir-Technology/bob/orm"
|
|
"github.com/Gleipnir-Technology/bob/types/pgtypes"
|
|
enums "github.com/Gleipnir-Technology/nidus-sync/db/enums"
|
|
"github.com/aarondl/opt/null"
|
|
"github.com/aarondl/opt/omit"
|
|
"github.com/aarondl/opt/omitnull"
|
|
"github.com/google/uuid"
|
|
)
|
|
|
|
// FileuploadFile is an object representing the database table.
|
|
type FileuploadFile struct {
|
|
ID int32 `db:"id,pk" `
|
|
ContentType string `db:"content_type" `
|
|
Created time.Time `db:"created" `
|
|
CreatorID int32 `db:"creator_id" `
|
|
Deleted null.Val[time.Time] `db:"deleted" `
|
|
Name string `db:"name" `
|
|
OrganizationID int32 `db:"organization_id" `
|
|
Status enums.FileuploadFilestatustype `db:"status" `
|
|
SizeBytes int32 `db:"size_bytes" `
|
|
FileUUID uuid.UUID `db:"file_uuid" `
|
|
Committer null.Val[int32] `db:"committer" `
|
|
|
|
R fileuploadFileR `db:"-" `
|
|
|
|
C fileuploadFileC `db:"-" `
|
|
}
|
|
|
|
// FileuploadFileSlice is an alias for a slice of pointers to FileuploadFile.
|
|
// This should almost always be used instead of []*FileuploadFile.
|
|
type FileuploadFileSlice []*FileuploadFile
|
|
|
|
// FileuploadFiles contains methods to work with the file table
|
|
var FileuploadFiles = psql.NewTablex[*FileuploadFile, FileuploadFileSlice, *FileuploadFileSetter]("fileupload", "file", buildFileuploadFileColumns("fileupload.file"))
|
|
|
|
// FileuploadFilesQuery is a query on the file table
|
|
type FileuploadFilesQuery = *psql.ViewQuery[*FileuploadFile, FileuploadFileSlice]
|
|
|
|
// fileuploadFileR is where relationships are stored.
|
|
type fileuploadFileR struct {
|
|
CSV *FileuploadCSV // fileupload.csv.csv_file_id_fkey
|
|
ErrorFiles FileuploadErrorFileSlice // fileupload.error_file.error_file_file_id_fkey
|
|
CommitterUser *User // fileupload.file.file_committer_fkey
|
|
CreatorUser *User // fileupload.file.file_creator_id_fkey
|
|
Organization *Organization // fileupload.file.file_organization_id_fkey
|
|
Sites SiteSlice // site.site_file_id_fkey
|
|
}
|
|
|
|
func buildFileuploadFileColumns(alias string) fileuploadFileColumns {
|
|
return fileuploadFileColumns{
|
|
ColumnsExpr: expr.NewColumnsExpr(
|
|
"id", "content_type", "created", "creator_id", "deleted", "name", "organization_id", "status", "size_bytes", "file_uuid", "committer",
|
|
).WithParent("fileupload.file"),
|
|
tableAlias: alias,
|
|
ID: psql.Quote(alias, "id"),
|
|
ContentType: psql.Quote(alias, "content_type"),
|
|
Created: psql.Quote(alias, "created"),
|
|
CreatorID: psql.Quote(alias, "creator_id"),
|
|
Deleted: psql.Quote(alias, "deleted"),
|
|
Name: psql.Quote(alias, "name"),
|
|
OrganizationID: psql.Quote(alias, "organization_id"),
|
|
Status: psql.Quote(alias, "status"),
|
|
SizeBytes: psql.Quote(alias, "size_bytes"),
|
|
FileUUID: psql.Quote(alias, "file_uuid"),
|
|
Committer: psql.Quote(alias, "committer"),
|
|
}
|
|
}
|
|
|
|
type fileuploadFileColumns struct {
|
|
expr.ColumnsExpr
|
|
tableAlias string
|
|
ID psql.Expression
|
|
ContentType psql.Expression
|
|
Created psql.Expression
|
|
CreatorID psql.Expression
|
|
Deleted psql.Expression
|
|
Name psql.Expression
|
|
OrganizationID psql.Expression
|
|
Status psql.Expression
|
|
SizeBytes psql.Expression
|
|
FileUUID psql.Expression
|
|
Committer psql.Expression
|
|
}
|
|
|
|
func (c fileuploadFileColumns) Alias() string {
|
|
return c.tableAlias
|
|
}
|
|
|
|
func (fileuploadFileColumns) AliasedAs(alias string) fileuploadFileColumns {
|
|
return buildFileuploadFileColumns(alias)
|
|
}
|
|
|
|
// FileuploadFileSetter is used for insert/upsert/update operations
|
|
// All values are optional, and do not have to be set
|
|
// Generated columns are not included
|
|
type FileuploadFileSetter struct {
|
|
ID omit.Val[int32] `db:"id,pk" `
|
|
ContentType omit.Val[string] `db:"content_type" `
|
|
Created omit.Val[time.Time] `db:"created" `
|
|
CreatorID omit.Val[int32] `db:"creator_id" `
|
|
Deleted omitnull.Val[time.Time] `db:"deleted" `
|
|
Name omit.Val[string] `db:"name" `
|
|
OrganizationID omit.Val[int32] `db:"organization_id" `
|
|
Status omit.Val[enums.FileuploadFilestatustype] `db:"status" `
|
|
SizeBytes omit.Val[int32] `db:"size_bytes" `
|
|
FileUUID omit.Val[uuid.UUID] `db:"file_uuid" `
|
|
Committer omitnull.Val[int32] `db:"committer" `
|
|
}
|
|
|
|
func (s FileuploadFileSetter) SetColumns() []string {
|
|
vals := make([]string, 0, 11)
|
|
if s.ID.IsValue() {
|
|
vals = append(vals, "id")
|
|
}
|
|
if s.ContentType.IsValue() {
|
|
vals = append(vals, "content_type")
|
|
}
|
|
if s.Created.IsValue() {
|
|
vals = append(vals, "created")
|
|
}
|
|
if s.CreatorID.IsValue() {
|
|
vals = append(vals, "creator_id")
|
|
}
|
|
if !s.Deleted.IsUnset() {
|
|
vals = append(vals, "deleted")
|
|
}
|
|
if s.Name.IsValue() {
|
|
vals = append(vals, "name")
|
|
}
|
|
if s.OrganizationID.IsValue() {
|
|
vals = append(vals, "organization_id")
|
|
}
|
|
if s.Status.IsValue() {
|
|
vals = append(vals, "status")
|
|
}
|
|
if s.SizeBytes.IsValue() {
|
|
vals = append(vals, "size_bytes")
|
|
}
|
|
if s.FileUUID.IsValue() {
|
|
vals = append(vals, "file_uuid")
|
|
}
|
|
if !s.Committer.IsUnset() {
|
|
vals = append(vals, "committer")
|
|
}
|
|
return vals
|
|
}
|
|
|
|
func (s FileuploadFileSetter) Overwrite(t *FileuploadFile) {
|
|
if s.ID.IsValue() {
|
|
t.ID = s.ID.MustGet()
|
|
}
|
|
if s.ContentType.IsValue() {
|
|
t.ContentType = s.ContentType.MustGet()
|
|
}
|
|
if s.Created.IsValue() {
|
|
t.Created = s.Created.MustGet()
|
|
}
|
|
if s.CreatorID.IsValue() {
|
|
t.CreatorID = s.CreatorID.MustGet()
|
|
}
|
|
if !s.Deleted.IsUnset() {
|
|
t.Deleted = s.Deleted.MustGetNull()
|
|
}
|
|
if s.Name.IsValue() {
|
|
t.Name = s.Name.MustGet()
|
|
}
|
|
if s.OrganizationID.IsValue() {
|
|
t.OrganizationID = s.OrganizationID.MustGet()
|
|
}
|
|
if s.Status.IsValue() {
|
|
t.Status = s.Status.MustGet()
|
|
}
|
|
if s.SizeBytes.IsValue() {
|
|
t.SizeBytes = s.SizeBytes.MustGet()
|
|
}
|
|
if s.FileUUID.IsValue() {
|
|
t.FileUUID = s.FileUUID.MustGet()
|
|
}
|
|
if !s.Committer.IsUnset() {
|
|
t.Committer = s.Committer.MustGetNull()
|
|
}
|
|
}
|
|
|
|
func (s *FileuploadFileSetter) Apply(q *dialect.InsertQuery) {
|
|
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
|
return FileuploadFiles.BeforeInsertHooks.RunHooks(ctx, exec, s)
|
|
})
|
|
|
|
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
|
vals := make([]bob.Expression, 11)
|
|
if s.ID.IsValue() {
|
|
vals[0] = psql.Arg(s.ID.MustGet())
|
|
} else {
|
|
vals[0] = psql.Raw("DEFAULT")
|
|
}
|
|
|
|
if s.ContentType.IsValue() {
|
|
vals[1] = psql.Arg(s.ContentType.MustGet())
|
|
} else {
|
|
vals[1] = psql.Raw("DEFAULT")
|
|
}
|
|
|
|
if s.Created.IsValue() {
|
|
vals[2] = psql.Arg(s.Created.MustGet())
|
|
} else {
|
|
vals[2] = psql.Raw("DEFAULT")
|
|
}
|
|
|
|
if s.CreatorID.IsValue() {
|
|
vals[3] = psql.Arg(s.CreatorID.MustGet())
|
|
} else {
|
|
vals[3] = psql.Raw("DEFAULT")
|
|
}
|
|
|
|
if !s.Deleted.IsUnset() {
|
|
vals[4] = psql.Arg(s.Deleted.MustGetNull())
|
|
} else {
|
|
vals[4] = psql.Raw("DEFAULT")
|
|
}
|
|
|
|
if s.Name.IsValue() {
|
|
vals[5] = psql.Arg(s.Name.MustGet())
|
|
} else {
|
|
vals[5] = psql.Raw("DEFAULT")
|
|
}
|
|
|
|
if s.OrganizationID.IsValue() {
|
|
vals[6] = psql.Arg(s.OrganizationID.MustGet())
|
|
} else {
|
|
vals[6] = psql.Raw("DEFAULT")
|
|
}
|
|
|
|
if s.Status.IsValue() {
|
|
vals[7] = psql.Arg(s.Status.MustGet())
|
|
} else {
|
|
vals[7] = psql.Raw("DEFAULT")
|
|
}
|
|
|
|
if s.SizeBytes.IsValue() {
|
|
vals[8] = psql.Arg(s.SizeBytes.MustGet())
|
|
} else {
|
|
vals[8] = psql.Raw("DEFAULT")
|
|
}
|
|
|
|
if s.FileUUID.IsValue() {
|
|
vals[9] = psql.Arg(s.FileUUID.MustGet())
|
|
} else {
|
|
vals[9] = psql.Raw("DEFAULT")
|
|
}
|
|
|
|
if !s.Committer.IsUnset() {
|
|
vals[10] = psql.Arg(s.Committer.MustGetNull())
|
|
} else {
|
|
vals[10] = psql.Raw("DEFAULT")
|
|
}
|
|
|
|
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
|
|
}))
|
|
}
|
|
|
|
func (s FileuploadFileSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
|
|
return um.Set(s.Expressions()...)
|
|
}
|
|
|
|
func (s FileuploadFileSetter) Expressions(prefix ...string) []bob.Expression {
|
|
exprs := make([]bob.Expression, 0, 11)
|
|
|
|
if s.ID.IsValue() {
|
|
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
|
psql.Quote(append(prefix, "id")...),
|
|
psql.Arg(s.ID),
|
|
}})
|
|
}
|
|
|
|
if s.ContentType.IsValue() {
|
|
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
|
psql.Quote(append(prefix, "content_type")...),
|
|
psql.Arg(s.ContentType),
|
|
}})
|
|
}
|
|
|
|
if s.Created.IsValue() {
|
|
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
|
psql.Quote(append(prefix, "created")...),
|
|
psql.Arg(s.Created),
|
|
}})
|
|
}
|
|
|
|
if s.CreatorID.IsValue() {
|
|
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
|
psql.Quote(append(prefix, "creator_id")...),
|
|
psql.Arg(s.CreatorID),
|
|
}})
|
|
}
|
|
|
|
if !s.Deleted.IsUnset() {
|
|
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
|
psql.Quote(append(prefix, "deleted")...),
|
|
psql.Arg(s.Deleted),
|
|
}})
|
|
}
|
|
|
|
if s.Name.IsValue() {
|
|
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
|
psql.Quote(append(prefix, "name")...),
|
|
psql.Arg(s.Name),
|
|
}})
|
|
}
|
|
|
|
if s.OrganizationID.IsValue() {
|
|
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
|
psql.Quote(append(prefix, "organization_id")...),
|
|
psql.Arg(s.OrganizationID),
|
|
}})
|
|
}
|
|
|
|
if s.Status.IsValue() {
|
|
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
|
psql.Quote(append(prefix, "status")...),
|
|
psql.Arg(s.Status),
|
|
}})
|
|
}
|
|
|
|
if s.SizeBytes.IsValue() {
|
|
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
|
psql.Quote(append(prefix, "size_bytes")...),
|
|
psql.Arg(s.SizeBytes),
|
|
}})
|
|
}
|
|
|
|
if s.FileUUID.IsValue() {
|
|
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
|
psql.Quote(append(prefix, "file_uuid")...),
|
|
psql.Arg(s.FileUUID),
|
|
}})
|
|
}
|
|
|
|
if !s.Committer.IsUnset() {
|
|
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
|
psql.Quote(append(prefix, "committer")...),
|
|
psql.Arg(s.Committer),
|
|
}})
|
|
}
|
|
|
|
return exprs
|
|
}
|
|
|
|
// FindFileuploadFile retrieves a single record by primary key
|
|
// If cols is empty Find will return all columns.
|
|
func FindFileuploadFile(ctx context.Context, exec bob.Executor, IDPK int32, cols ...string) (*FileuploadFile, error) {
|
|
if len(cols) == 0 {
|
|
return FileuploadFiles.Query(
|
|
sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(IDPK))),
|
|
).One(ctx, exec)
|
|
}
|
|
|
|
return FileuploadFiles.Query(
|
|
sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(IDPK))),
|
|
sm.Columns(FileuploadFiles.Columns.Only(cols...)),
|
|
).One(ctx, exec)
|
|
}
|
|
|
|
// FileuploadFileExists checks the presence of a single record by primary key
|
|
func FileuploadFileExists(ctx context.Context, exec bob.Executor, IDPK int32) (bool, error) {
|
|
return FileuploadFiles.Query(
|
|
sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(IDPK))),
|
|
).Exists(ctx, exec)
|
|
}
|
|
|
|
// AfterQueryHook is called after FileuploadFile is retrieved from the database
|
|
func (o *FileuploadFile) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
|
|
var err error
|
|
|
|
switch queryType {
|
|
case bob.QueryTypeSelect:
|
|
ctx, err = FileuploadFiles.AfterSelectHooks.RunHooks(ctx, exec, FileuploadFileSlice{o})
|
|
case bob.QueryTypeInsert:
|
|
ctx, err = FileuploadFiles.AfterInsertHooks.RunHooks(ctx, exec, FileuploadFileSlice{o})
|
|
case bob.QueryTypeUpdate:
|
|
ctx, err = FileuploadFiles.AfterUpdateHooks.RunHooks(ctx, exec, FileuploadFileSlice{o})
|
|
case bob.QueryTypeDelete:
|
|
ctx, err = FileuploadFiles.AfterDeleteHooks.RunHooks(ctx, exec, FileuploadFileSlice{o})
|
|
}
|
|
|
|
return err
|
|
}
|
|
|
|
// primaryKeyVals returns the primary key values of the FileuploadFile
|
|
func (o *FileuploadFile) primaryKeyVals() bob.Expression {
|
|
return psql.Arg(o.ID)
|
|
}
|
|
|
|
func (o *FileuploadFile) pkEQ() dialect.Expression {
|
|
return psql.Quote("fileupload.file", "id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
|
return o.primaryKeyVals().WriteSQL(ctx, w, d, start)
|
|
}))
|
|
}
|
|
|
|
// Update uses an executor to update the FileuploadFile
|
|
func (o *FileuploadFile) Update(ctx context.Context, exec bob.Executor, s *FileuploadFileSetter) error {
|
|
v, err := FileuploadFiles.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
o.R = v.R
|
|
*o = *v
|
|
|
|
return nil
|
|
}
|
|
|
|
// Delete deletes a single FileuploadFile record with an executor
|
|
func (o *FileuploadFile) Delete(ctx context.Context, exec bob.Executor) error {
|
|
_, err := FileuploadFiles.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
|
|
return err
|
|
}
|
|
|
|
// Reload refreshes the FileuploadFile using the executor
|
|
func (o *FileuploadFile) Reload(ctx context.Context, exec bob.Executor) error {
|
|
o2, err := FileuploadFiles.Query(
|
|
sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(o.ID))),
|
|
).One(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
o2.R = o.R
|
|
*o = *o2
|
|
|
|
return nil
|
|
}
|
|
|
|
// AfterQueryHook is called after FileuploadFileSlice is retrieved from the database
|
|
func (o FileuploadFileSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
|
|
var err error
|
|
|
|
switch queryType {
|
|
case bob.QueryTypeSelect:
|
|
ctx, err = FileuploadFiles.AfterSelectHooks.RunHooks(ctx, exec, o)
|
|
case bob.QueryTypeInsert:
|
|
ctx, err = FileuploadFiles.AfterInsertHooks.RunHooks(ctx, exec, o)
|
|
case bob.QueryTypeUpdate:
|
|
ctx, err = FileuploadFiles.AfterUpdateHooks.RunHooks(ctx, exec, o)
|
|
case bob.QueryTypeDelete:
|
|
ctx, err = FileuploadFiles.AfterDeleteHooks.RunHooks(ctx, exec, o)
|
|
}
|
|
|
|
return err
|
|
}
|
|
|
|
func (o FileuploadFileSlice) pkIN() dialect.Expression {
|
|
if len(o) == 0 {
|
|
return psql.Raw("NULL")
|
|
}
|
|
|
|
return psql.Quote("fileupload.file", "id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
|
pkPairs := make([]bob.Expression, len(o))
|
|
for i, row := range o {
|
|
pkPairs[i] = row.primaryKeyVals()
|
|
}
|
|
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
|
|
}))
|
|
}
|
|
|
|
// copyMatchingRows finds models in the given slice that have the same primary key
|
|
// then it first copies the existing relationships from the old model to the new model
|
|
// and then replaces the old model in the slice with the new model
|
|
func (o FileuploadFileSlice) copyMatchingRows(from ...*FileuploadFile) {
|
|
for i, old := range o {
|
|
for _, new := range from {
|
|
if new.ID != old.ID {
|
|
continue
|
|
}
|
|
new.R = old.R
|
|
o[i] = new
|
|
break
|
|
}
|
|
}
|
|
}
|
|
|
|
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
|
|
func (o FileuploadFileSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
|
|
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
|
|
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
|
return FileuploadFiles.BeforeUpdateHooks.RunHooks(ctx, exec, o)
|
|
})
|
|
|
|
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
|
|
var err error
|
|
switch retrieved := retrieved.(type) {
|
|
case *FileuploadFile:
|
|
o.copyMatchingRows(retrieved)
|
|
case []*FileuploadFile:
|
|
o.copyMatchingRows(retrieved...)
|
|
case FileuploadFileSlice:
|
|
o.copyMatchingRows(retrieved...)
|
|
default:
|
|
// If the retrieved value is not a FileuploadFile or a slice of FileuploadFile
|
|
// then run the AfterUpdateHooks on the slice
|
|
_, err = FileuploadFiles.AfterUpdateHooks.RunHooks(ctx, exec, o)
|
|
}
|
|
|
|
return err
|
|
}))
|
|
|
|
q.AppendWhere(o.pkIN())
|
|
})
|
|
}
|
|
|
|
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
|
|
func (o FileuploadFileSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
|
|
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
|
|
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
|
return FileuploadFiles.BeforeDeleteHooks.RunHooks(ctx, exec, o)
|
|
})
|
|
|
|
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
|
|
var err error
|
|
switch retrieved := retrieved.(type) {
|
|
case *FileuploadFile:
|
|
o.copyMatchingRows(retrieved)
|
|
case []*FileuploadFile:
|
|
o.copyMatchingRows(retrieved...)
|
|
case FileuploadFileSlice:
|
|
o.copyMatchingRows(retrieved...)
|
|
default:
|
|
// If the retrieved value is not a FileuploadFile or a slice of FileuploadFile
|
|
// then run the AfterDeleteHooks on the slice
|
|
_, err = FileuploadFiles.AfterDeleteHooks.RunHooks(ctx, exec, o)
|
|
}
|
|
|
|
return err
|
|
}))
|
|
|
|
q.AppendWhere(o.pkIN())
|
|
})
|
|
}
|
|
|
|
func (o FileuploadFileSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals FileuploadFileSetter) error {
|
|
if len(o) == 0 {
|
|
return nil
|
|
}
|
|
|
|
_, err := FileuploadFiles.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
|
|
return err
|
|
}
|
|
|
|
func (o FileuploadFileSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
|
|
if len(o) == 0 {
|
|
return nil
|
|
}
|
|
|
|
_, err := FileuploadFiles.Delete(o.DeleteMod()).Exec(ctx, exec)
|
|
return err
|
|
}
|
|
|
|
func (o FileuploadFileSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
|
|
if len(o) == 0 {
|
|
return nil
|
|
}
|
|
|
|
o2, err := FileuploadFiles.Query(sm.Where(o.pkIN())).All(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
o.copyMatchingRows(o2...)
|
|
|
|
return nil
|
|
}
|
|
|
|
// CSV starts a query for related objects on fileupload.csv
|
|
func (o *FileuploadFile) CSV(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadCSVSQuery {
|
|
return FileuploadCSVS.Query(append(mods,
|
|
sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(o.ID))),
|
|
)...)
|
|
}
|
|
|
|
func (os FileuploadFileSlice) CSV(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadCSVSQuery {
|
|
pkID := make(pgtypes.Array[int32], 0, len(os))
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
pkID = append(pkID, o.ID)
|
|
}
|
|
PKArgExpr := psql.Select(sm.Columns(
|
|
psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")),
|
|
))
|
|
|
|
return FileuploadCSVS.Query(append(mods,
|
|
sm.Where(psql.Group(FileuploadCSVS.Columns.FileID).OP("IN", PKArgExpr)),
|
|
)...)
|
|
}
|
|
|
|
// ErrorFiles starts a query for related objects on fileupload.error_file
|
|
func (o *FileuploadFile) ErrorFiles(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadErrorFilesQuery {
|
|
return FileuploadErrorFiles.Query(append(mods,
|
|
sm.Where(FileuploadErrorFiles.Columns.FileID.EQ(psql.Arg(o.ID))),
|
|
)...)
|
|
}
|
|
|
|
func (os FileuploadFileSlice) ErrorFiles(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadErrorFilesQuery {
|
|
pkID := make(pgtypes.Array[int32], 0, len(os))
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
pkID = append(pkID, o.ID)
|
|
}
|
|
PKArgExpr := psql.Select(sm.Columns(
|
|
psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")),
|
|
))
|
|
|
|
return FileuploadErrorFiles.Query(append(mods,
|
|
sm.Where(psql.Group(FileuploadErrorFiles.Columns.FileID).OP("IN", PKArgExpr)),
|
|
)...)
|
|
}
|
|
|
|
// CommitterUser starts a query for related objects on user_
|
|
func (o *FileuploadFile) CommitterUser(mods ...bob.Mod[*dialect.SelectQuery]) UsersQuery {
|
|
return Users.Query(append(mods,
|
|
sm.Where(Users.Columns.ID.EQ(psql.Arg(o.Committer))),
|
|
)...)
|
|
}
|
|
|
|
func (os FileuploadFileSlice) CommitterUser(mods ...bob.Mod[*dialect.SelectQuery]) UsersQuery {
|
|
pkCommitter := make(pgtypes.Array[null.Val[int32]], 0, len(os))
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
pkCommitter = append(pkCommitter, o.Committer)
|
|
}
|
|
PKArgExpr := psql.Select(sm.Columns(
|
|
psql.F("unnest", psql.Cast(psql.Arg(pkCommitter), "integer[]")),
|
|
))
|
|
|
|
return Users.Query(append(mods,
|
|
sm.Where(psql.Group(Users.Columns.ID).OP("IN", PKArgExpr)),
|
|
)...)
|
|
}
|
|
|
|
// CreatorUser starts a query for related objects on user_
|
|
func (o *FileuploadFile) CreatorUser(mods ...bob.Mod[*dialect.SelectQuery]) UsersQuery {
|
|
return Users.Query(append(mods,
|
|
sm.Where(Users.Columns.ID.EQ(psql.Arg(o.CreatorID))),
|
|
)...)
|
|
}
|
|
|
|
func (os FileuploadFileSlice) CreatorUser(mods ...bob.Mod[*dialect.SelectQuery]) UsersQuery {
|
|
pkCreatorID := make(pgtypes.Array[int32], 0, len(os))
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
pkCreatorID = append(pkCreatorID, o.CreatorID)
|
|
}
|
|
PKArgExpr := psql.Select(sm.Columns(
|
|
psql.F("unnest", psql.Cast(psql.Arg(pkCreatorID), "integer[]")),
|
|
))
|
|
|
|
return Users.Query(append(mods,
|
|
sm.Where(psql.Group(Users.Columns.ID).OP("IN", PKArgExpr)),
|
|
)...)
|
|
}
|
|
|
|
// Organization starts a query for related objects on organization
|
|
func (o *FileuploadFile) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery {
|
|
return Organizations.Query(append(mods,
|
|
sm.Where(Organizations.Columns.ID.EQ(psql.Arg(o.OrganizationID))),
|
|
)...)
|
|
}
|
|
|
|
func (os FileuploadFileSlice) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery {
|
|
pkOrganizationID := make(pgtypes.Array[int32], 0, len(os))
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
pkOrganizationID = append(pkOrganizationID, o.OrganizationID)
|
|
}
|
|
PKArgExpr := psql.Select(sm.Columns(
|
|
psql.F("unnest", psql.Cast(psql.Arg(pkOrganizationID), "integer[]")),
|
|
))
|
|
|
|
return Organizations.Query(append(mods,
|
|
sm.Where(psql.Group(Organizations.Columns.ID).OP("IN", PKArgExpr)),
|
|
)...)
|
|
}
|
|
|
|
// Sites starts a query for related objects on site
|
|
func (o *FileuploadFile) Sites(mods ...bob.Mod[*dialect.SelectQuery]) SitesQuery {
|
|
return Sites.Query(append(mods,
|
|
sm.Where(Sites.Columns.FileID.EQ(psql.Arg(o.ID))),
|
|
)...)
|
|
}
|
|
|
|
func (os FileuploadFileSlice) Sites(mods ...bob.Mod[*dialect.SelectQuery]) SitesQuery {
|
|
pkID := make(pgtypes.Array[int32], 0, len(os))
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
pkID = append(pkID, o.ID)
|
|
}
|
|
PKArgExpr := psql.Select(sm.Columns(
|
|
psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")),
|
|
))
|
|
|
|
return Sites.Query(append(mods,
|
|
sm.Where(psql.Group(Sites.Columns.FileID).OP("IN", PKArgExpr)),
|
|
)...)
|
|
}
|
|
|
|
func insertFileuploadFileCSV0(ctx context.Context, exec bob.Executor, fileuploadCSV1 *FileuploadCSVSetter, fileuploadFile0 *FileuploadFile) (*FileuploadCSV, error) {
|
|
fileuploadCSV1.FileID = omit.From(fileuploadFile0.ID)
|
|
|
|
ret, err := FileuploadCSVS.Insert(fileuploadCSV1).One(ctx, exec)
|
|
if err != nil {
|
|
return ret, fmt.Errorf("insertFileuploadFileCSV0: %w", err)
|
|
}
|
|
|
|
return ret, nil
|
|
}
|
|
|
|
func attachFileuploadFileCSV0(ctx context.Context, exec bob.Executor, count int, fileuploadCSV1 *FileuploadCSV, fileuploadFile0 *FileuploadFile) (*FileuploadCSV, error) {
|
|
setter := &FileuploadCSVSetter{
|
|
FileID: omit.From(fileuploadFile0.ID),
|
|
}
|
|
|
|
err := fileuploadCSV1.Update(ctx, exec, setter)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("attachFileuploadFileCSV0: %w", err)
|
|
}
|
|
|
|
return fileuploadCSV1, nil
|
|
}
|
|
|
|
func (fileuploadFile0 *FileuploadFile) InsertCSV(ctx context.Context, exec bob.Executor, related *FileuploadCSVSetter) error {
|
|
var err error
|
|
|
|
fileuploadCSV1, err := insertFileuploadFileCSV0(ctx, exec, related, fileuploadFile0)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
fileuploadFile0.R.CSV = fileuploadCSV1
|
|
|
|
fileuploadCSV1.R.File = fileuploadFile0
|
|
|
|
return nil
|
|
}
|
|
|
|
func (fileuploadFile0 *FileuploadFile) AttachCSV(ctx context.Context, exec bob.Executor, fileuploadCSV1 *FileuploadCSV) error {
|
|
var err error
|
|
|
|
_, err = attachFileuploadFileCSV0(ctx, exec, 1, fileuploadCSV1, fileuploadFile0)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
fileuploadFile0.R.CSV = fileuploadCSV1
|
|
|
|
fileuploadCSV1.R.File = fileuploadFile0
|
|
|
|
return nil
|
|
}
|
|
|
|
func insertFileuploadFileErrorFiles0(ctx context.Context, exec bob.Executor, fileuploadErrorFiles1 []*FileuploadErrorFileSetter, fileuploadFile0 *FileuploadFile) (FileuploadErrorFileSlice, error) {
|
|
for i := range fileuploadErrorFiles1 {
|
|
fileuploadErrorFiles1[i].FileID = omit.From(fileuploadFile0.ID)
|
|
}
|
|
|
|
ret, err := FileuploadErrorFiles.Insert(bob.ToMods(fileuploadErrorFiles1...)).All(ctx, exec)
|
|
if err != nil {
|
|
return ret, fmt.Errorf("insertFileuploadFileErrorFiles0: %w", err)
|
|
}
|
|
|
|
return ret, nil
|
|
}
|
|
|
|
func attachFileuploadFileErrorFiles0(ctx context.Context, exec bob.Executor, count int, fileuploadErrorFiles1 FileuploadErrorFileSlice, fileuploadFile0 *FileuploadFile) (FileuploadErrorFileSlice, error) {
|
|
setter := &FileuploadErrorFileSetter{
|
|
FileID: omit.From(fileuploadFile0.ID),
|
|
}
|
|
|
|
err := fileuploadErrorFiles1.UpdateAll(ctx, exec, *setter)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("attachFileuploadFileErrorFiles0: %w", err)
|
|
}
|
|
|
|
return fileuploadErrorFiles1, nil
|
|
}
|
|
|
|
func (fileuploadFile0 *FileuploadFile) InsertErrorFiles(ctx context.Context, exec bob.Executor, related ...*FileuploadErrorFileSetter) error {
|
|
if len(related) == 0 {
|
|
return nil
|
|
}
|
|
|
|
var err error
|
|
|
|
fileuploadErrorFiles1, err := insertFileuploadFileErrorFiles0(ctx, exec, related, fileuploadFile0)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
fileuploadFile0.R.ErrorFiles = append(fileuploadFile0.R.ErrorFiles, fileuploadErrorFiles1...)
|
|
|
|
for _, rel := range fileuploadErrorFiles1 {
|
|
rel.R.File = fileuploadFile0
|
|
}
|
|
return nil
|
|
}
|
|
|
|
func (fileuploadFile0 *FileuploadFile) AttachErrorFiles(ctx context.Context, exec bob.Executor, related ...*FileuploadErrorFile) error {
|
|
if len(related) == 0 {
|
|
return nil
|
|
}
|
|
|
|
var err error
|
|
fileuploadErrorFiles1 := FileuploadErrorFileSlice(related)
|
|
|
|
_, err = attachFileuploadFileErrorFiles0(ctx, exec, len(related), fileuploadErrorFiles1, fileuploadFile0)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
fileuploadFile0.R.ErrorFiles = append(fileuploadFile0.R.ErrorFiles, fileuploadErrorFiles1...)
|
|
|
|
for _, rel := range related {
|
|
rel.R.File = fileuploadFile0
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
func attachFileuploadFileCommitterUser0(ctx context.Context, exec bob.Executor, count int, fileuploadFile0 *FileuploadFile, user1 *User) (*FileuploadFile, error) {
|
|
setter := &FileuploadFileSetter{
|
|
Committer: omitnull.From(user1.ID),
|
|
}
|
|
|
|
err := fileuploadFile0.Update(ctx, exec, setter)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("attachFileuploadFileCommitterUser0: %w", err)
|
|
}
|
|
|
|
return fileuploadFile0, nil
|
|
}
|
|
|
|
func (fileuploadFile0 *FileuploadFile) InsertCommitterUser(ctx context.Context, exec bob.Executor, related *UserSetter) error {
|
|
var err error
|
|
|
|
user1, err := Users.Insert(related).One(ctx, exec)
|
|
if err != nil {
|
|
return fmt.Errorf("inserting related objects: %w", err)
|
|
}
|
|
|
|
_, err = attachFileuploadFileCommitterUser0(ctx, exec, 1, fileuploadFile0, user1)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
fileuploadFile0.R.CommitterUser = user1
|
|
|
|
user1.R.CommitterFiles = append(user1.R.CommitterFiles, fileuploadFile0)
|
|
|
|
return nil
|
|
}
|
|
|
|
func (fileuploadFile0 *FileuploadFile) AttachCommitterUser(ctx context.Context, exec bob.Executor, user1 *User) error {
|
|
var err error
|
|
|
|
_, err = attachFileuploadFileCommitterUser0(ctx, exec, 1, fileuploadFile0, user1)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
fileuploadFile0.R.CommitterUser = user1
|
|
|
|
user1.R.CommitterFiles = append(user1.R.CommitterFiles, fileuploadFile0)
|
|
|
|
return nil
|
|
}
|
|
|
|
func attachFileuploadFileCreatorUser0(ctx context.Context, exec bob.Executor, count int, fileuploadFile0 *FileuploadFile, user1 *User) (*FileuploadFile, error) {
|
|
setter := &FileuploadFileSetter{
|
|
CreatorID: omit.From(user1.ID),
|
|
}
|
|
|
|
err := fileuploadFile0.Update(ctx, exec, setter)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("attachFileuploadFileCreatorUser0: %w", err)
|
|
}
|
|
|
|
return fileuploadFile0, nil
|
|
}
|
|
|
|
func (fileuploadFile0 *FileuploadFile) InsertCreatorUser(ctx context.Context, exec bob.Executor, related *UserSetter) error {
|
|
var err error
|
|
|
|
user1, err := Users.Insert(related).One(ctx, exec)
|
|
if err != nil {
|
|
return fmt.Errorf("inserting related objects: %w", err)
|
|
}
|
|
|
|
_, err = attachFileuploadFileCreatorUser0(ctx, exec, 1, fileuploadFile0, user1)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
fileuploadFile0.R.CreatorUser = user1
|
|
|
|
user1.R.CreatorFiles = append(user1.R.CreatorFiles, fileuploadFile0)
|
|
|
|
return nil
|
|
}
|
|
|
|
func (fileuploadFile0 *FileuploadFile) AttachCreatorUser(ctx context.Context, exec bob.Executor, user1 *User) error {
|
|
var err error
|
|
|
|
_, err = attachFileuploadFileCreatorUser0(ctx, exec, 1, fileuploadFile0, user1)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
fileuploadFile0.R.CreatorUser = user1
|
|
|
|
user1.R.CreatorFiles = append(user1.R.CreatorFiles, fileuploadFile0)
|
|
|
|
return nil
|
|
}
|
|
|
|
func attachFileuploadFileOrganization0(ctx context.Context, exec bob.Executor, count int, fileuploadFile0 *FileuploadFile, organization1 *Organization) (*FileuploadFile, error) {
|
|
setter := &FileuploadFileSetter{
|
|
OrganizationID: omit.From(organization1.ID),
|
|
}
|
|
|
|
err := fileuploadFile0.Update(ctx, exec, setter)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("attachFileuploadFileOrganization0: %w", err)
|
|
}
|
|
|
|
return fileuploadFile0, nil
|
|
}
|
|
|
|
func (fileuploadFile0 *FileuploadFile) InsertOrganization(ctx context.Context, exec bob.Executor, related *OrganizationSetter) error {
|
|
var err error
|
|
|
|
organization1, err := Organizations.Insert(related).One(ctx, exec)
|
|
if err != nil {
|
|
return fmt.Errorf("inserting related objects: %w", err)
|
|
}
|
|
|
|
_, err = attachFileuploadFileOrganization0(ctx, exec, 1, fileuploadFile0, organization1)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
fileuploadFile0.R.Organization = organization1
|
|
|
|
organization1.R.Files = append(organization1.R.Files, fileuploadFile0)
|
|
|
|
return nil
|
|
}
|
|
|
|
func (fileuploadFile0 *FileuploadFile) AttachOrganization(ctx context.Context, exec bob.Executor, organization1 *Organization) error {
|
|
var err error
|
|
|
|
_, err = attachFileuploadFileOrganization0(ctx, exec, 1, fileuploadFile0, organization1)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
fileuploadFile0.R.Organization = organization1
|
|
|
|
organization1.R.Files = append(organization1.R.Files, fileuploadFile0)
|
|
|
|
return nil
|
|
}
|
|
|
|
func insertFileuploadFileSites0(ctx context.Context, exec bob.Executor, sites1 []*SiteSetter, fileuploadFile0 *FileuploadFile) (SiteSlice, error) {
|
|
for i := range sites1 {
|
|
sites1[i].FileID = omitnull.From(fileuploadFile0.ID)
|
|
}
|
|
|
|
ret, err := Sites.Insert(bob.ToMods(sites1...)).All(ctx, exec)
|
|
if err != nil {
|
|
return ret, fmt.Errorf("insertFileuploadFileSites0: %w", err)
|
|
}
|
|
|
|
return ret, nil
|
|
}
|
|
|
|
func attachFileuploadFileSites0(ctx context.Context, exec bob.Executor, count int, sites1 SiteSlice, fileuploadFile0 *FileuploadFile) (SiteSlice, error) {
|
|
setter := &SiteSetter{
|
|
FileID: omitnull.From(fileuploadFile0.ID),
|
|
}
|
|
|
|
err := sites1.UpdateAll(ctx, exec, *setter)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("attachFileuploadFileSites0: %w", err)
|
|
}
|
|
|
|
return sites1, nil
|
|
}
|
|
|
|
func (fileuploadFile0 *FileuploadFile) InsertSites(ctx context.Context, exec bob.Executor, related ...*SiteSetter) error {
|
|
if len(related) == 0 {
|
|
return nil
|
|
}
|
|
|
|
var err error
|
|
|
|
sites1, err := insertFileuploadFileSites0(ctx, exec, related, fileuploadFile0)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
fileuploadFile0.R.Sites = append(fileuploadFile0.R.Sites, sites1...)
|
|
|
|
for _, rel := range sites1 {
|
|
rel.R.File = fileuploadFile0
|
|
}
|
|
return nil
|
|
}
|
|
|
|
func (fileuploadFile0 *FileuploadFile) AttachSites(ctx context.Context, exec bob.Executor, related ...*Site) error {
|
|
if len(related) == 0 {
|
|
return nil
|
|
}
|
|
|
|
var err error
|
|
sites1 := SiteSlice(related)
|
|
|
|
_, err = attachFileuploadFileSites0(ctx, exec, len(related), sites1, fileuploadFile0)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
fileuploadFile0.R.Sites = append(fileuploadFile0.R.Sites, sites1...)
|
|
|
|
for _, rel := range related {
|
|
rel.R.File = fileuploadFile0
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
type fileuploadFileWhere[Q psql.Filterable] struct {
|
|
ID psql.WhereMod[Q, int32]
|
|
ContentType psql.WhereMod[Q, string]
|
|
Created psql.WhereMod[Q, time.Time]
|
|
CreatorID psql.WhereMod[Q, int32]
|
|
Deleted psql.WhereNullMod[Q, time.Time]
|
|
Name psql.WhereMod[Q, string]
|
|
OrganizationID psql.WhereMod[Q, int32]
|
|
Status psql.WhereMod[Q, enums.FileuploadFilestatustype]
|
|
SizeBytes psql.WhereMod[Q, int32]
|
|
FileUUID psql.WhereMod[Q, uuid.UUID]
|
|
Committer psql.WhereNullMod[Q, int32]
|
|
}
|
|
|
|
func (fileuploadFileWhere[Q]) AliasedAs(alias string) fileuploadFileWhere[Q] {
|
|
return buildFileuploadFileWhere[Q](buildFileuploadFileColumns(alias))
|
|
}
|
|
|
|
func buildFileuploadFileWhere[Q psql.Filterable](cols fileuploadFileColumns) fileuploadFileWhere[Q] {
|
|
return fileuploadFileWhere[Q]{
|
|
ID: psql.Where[Q, int32](cols.ID),
|
|
ContentType: psql.Where[Q, string](cols.ContentType),
|
|
Created: psql.Where[Q, time.Time](cols.Created),
|
|
CreatorID: psql.Where[Q, int32](cols.CreatorID),
|
|
Deleted: psql.WhereNull[Q, time.Time](cols.Deleted),
|
|
Name: psql.Where[Q, string](cols.Name),
|
|
OrganizationID: psql.Where[Q, int32](cols.OrganizationID),
|
|
Status: psql.Where[Q, enums.FileuploadFilestatustype](cols.Status),
|
|
SizeBytes: psql.Where[Q, int32](cols.SizeBytes),
|
|
FileUUID: psql.Where[Q, uuid.UUID](cols.FileUUID),
|
|
Committer: psql.WhereNull[Q, int32](cols.Committer),
|
|
}
|
|
}
|
|
|
|
func (o *FileuploadFile) Preload(name string, retrieved any) error {
|
|
if o == nil {
|
|
return nil
|
|
}
|
|
|
|
switch name {
|
|
case "CSV":
|
|
rel, ok := retrieved.(*FileuploadCSV)
|
|
if !ok {
|
|
return fmt.Errorf("fileuploadFile cannot load %T as %q", retrieved, name)
|
|
}
|
|
|
|
o.R.CSV = rel
|
|
|
|
if rel != nil {
|
|
rel.R.File = o
|
|
}
|
|
return nil
|
|
case "ErrorFiles":
|
|
rels, ok := retrieved.(FileuploadErrorFileSlice)
|
|
if !ok {
|
|
return fmt.Errorf("fileuploadFile cannot load %T as %q", retrieved, name)
|
|
}
|
|
|
|
o.R.ErrorFiles = rels
|
|
|
|
for _, rel := range rels {
|
|
if rel != nil {
|
|
rel.R.File = o
|
|
}
|
|
}
|
|
return nil
|
|
case "CommitterUser":
|
|
rel, ok := retrieved.(*User)
|
|
if !ok {
|
|
return fmt.Errorf("fileuploadFile cannot load %T as %q", retrieved, name)
|
|
}
|
|
|
|
o.R.CommitterUser = rel
|
|
|
|
if rel != nil {
|
|
rel.R.CommitterFiles = FileuploadFileSlice{o}
|
|
}
|
|
return nil
|
|
case "CreatorUser":
|
|
rel, ok := retrieved.(*User)
|
|
if !ok {
|
|
return fmt.Errorf("fileuploadFile cannot load %T as %q", retrieved, name)
|
|
}
|
|
|
|
o.R.CreatorUser = rel
|
|
|
|
if rel != nil {
|
|
rel.R.CreatorFiles = FileuploadFileSlice{o}
|
|
}
|
|
return nil
|
|
case "Organization":
|
|
rel, ok := retrieved.(*Organization)
|
|
if !ok {
|
|
return fmt.Errorf("fileuploadFile cannot load %T as %q", retrieved, name)
|
|
}
|
|
|
|
o.R.Organization = rel
|
|
|
|
if rel != nil {
|
|
rel.R.Files = FileuploadFileSlice{o}
|
|
}
|
|
return nil
|
|
case "Sites":
|
|
rels, ok := retrieved.(SiteSlice)
|
|
if !ok {
|
|
return fmt.Errorf("fileuploadFile cannot load %T as %q", retrieved, name)
|
|
}
|
|
|
|
o.R.Sites = rels
|
|
|
|
for _, rel := range rels {
|
|
if rel != nil {
|
|
rel.R.File = o
|
|
}
|
|
}
|
|
return nil
|
|
default:
|
|
return fmt.Errorf("fileuploadFile has no relationship %q", name)
|
|
}
|
|
}
|
|
|
|
type fileuploadFilePreloader struct {
|
|
CSV func(...psql.PreloadOption) psql.Preloader
|
|
CommitterUser func(...psql.PreloadOption) psql.Preloader
|
|
CreatorUser func(...psql.PreloadOption) psql.Preloader
|
|
Organization func(...psql.PreloadOption) psql.Preloader
|
|
}
|
|
|
|
func buildFileuploadFilePreloader() fileuploadFilePreloader {
|
|
return fileuploadFilePreloader{
|
|
CSV: func(opts ...psql.PreloadOption) psql.Preloader {
|
|
return psql.Preload[*FileuploadCSV, FileuploadCSVSlice](psql.PreloadRel{
|
|
Name: "CSV",
|
|
Sides: []psql.PreloadSide{
|
|
{
|
|
From: FileuploadFiles,
|
|
To: FileuploadCSVS,
|
|
FromColumns: []string{"id"},
|
|
ToColumns: []string{"file_id"},
|
|
},
|
|
},
|
|
}, FileuploadCSVS.Columns.Names(), opts...)
|
|
},
|
|
CommitterUser: func(opts ...psql.PreloadOption) psql.Preloader {
|
|
return psql.Preload[*User, UserSlice](psql.PreloadRel{
|
|
Name: "CommitterUser",
|
|
Sides: []psql.PreloadSide{
|
|
{
|
|
From: FileuploadFiles,
|
|
To: Users,
|
|
FromColumns: []string{"committer"},
|
|
ToColumns: []string{"id"},
|
|
},
|
|
},
|
|
}, Users.Columns.Names(), opts...)
|
|
},
|
|
CreatorUser: func(opts ...psql.PreloadOption) psql.Preloader {
|
|
return psql.Preload[*User, UserSlice](psql.PreloadRel{
|
|
Name: "CreatorUser",
|
|
Sides: []psql.PreloadSide{
|
|
{
|
|
From: FileuploadFiles,
|
|
To: Users,
|
|
FromColumns: []string{"creator_id"},
|
|
ToColumns: []string{"id"},
|
|
},
|
|
},
|
|
}, Users.Columns.Names(), opts...)
|
|
},
|
|
Organization: func(opts ...psql.PreloadOption) psql.Preloader {
|
|
return psql.Preload[*Organization, OrganizationSlice](psql.PreloadRel{
|
|
Name: "Organization",
|
|
Sides: []psql.PreloadSide{
|
|
{
|
|
From: FileuploadFiles,
|
|
To: Organizations,
|
|
FromColumns: []string{"organization_id"},
|
|
ToColumns: []string{"id"},
|
|
},
|
|
},
|
|
}, Organizations.Columns.Names(), opts...)
|
|
},
|
|
}
|
|
}
|
|
|
|
type fileuploadFileThenLoader[Q orm.Loadable] struct {
|
|
CSV func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
|
ErrorFiles func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
|
CommitterUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
|
CreatorUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
|
Organization func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
|
Sites func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
|
}
|
|
|
|
func buildFileuploadFileThenLoader[Q orm.Loadable]() fileuploadFileThenLoader[Q] {
|
|
type CSVLoadInterface interface {
|
|
LoadCSV(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
|
}
|
|
type ErrorFilesLoadInterface interface {
|
|
LoadErrorFiles(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
|
}
|
|
type CommitterUserLoadInterface interface {
|
|
LoadCommitterUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
|
}
|
|
type CreatorUserLoadInterface interface {
|
|
LoadCreatorUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
|
}
|
|
type OrganizationLoadInterface interface {
|
|
LoadOrganization(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
|
}
|
|
type SitesLoadInterface interface {
|
|
LoadSites(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
|
}
|
|
|
|
return fileuploadFileThenLoader[Q]{
|
|
CSV: thenLoadBuilder[Q](
|
|
"CSV",
|
|
func(ctx context.Context, exec bob.Executor, retrieved CSVLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
return retrieved.LoadCSV(ctx, exec, mods...)
|
|
},
|
|
),
|
|
ErrorFiles: thenLoadBuilder[Q](
|
|
"ErrorFiles",
|
|
func(ctx context.Context, exec bob.Executor, retrieved ErrorFilesLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
return retrieved.LoadErrorFiles(ctx, exec, mods...)
|
|
},
|
|
),
|
|
CommitterUser: thenLoadBuilder[Q](
|
|
"CommitterUser",
|
|
func(ctx context.Context, exec bob.Executor, retrieved CommitterUserLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
return retrieved.LoadCommitterUser(ctx, exec, mods...)
|
|
},
|
|
),
|
|
CreatorUser: thenLoadBuilder[Q](
|
|
"CreatorUser",
|
|
func(ctx context.Context, exec bob.Executor, retrieved CreatorUserLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
return retrieved.LoadCreatorUser(ctx, exec, mods...)
|
|
},
|
|
),
|
|
Organization: thenLoadBuilder[Q](
|
|
"Organization",
|
|
func(ctx context.Context, exec bob.Executor, retrieved OrganizationLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
return retrieved.LoadOrganization(ctx, exec, mods...)
|
|
},
|
|
),
|
|
Sites: thenLoadBuilder[Q](
|
|
"Sites",
|
|
func(ctx context.Context, exec bob.Executor, retrieved SitesLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
return retrieved.LoadSites(ctx, exec, mods...)
|
|
},
|
|
),
|
|
}
|
|
}
|
|
|
|
// LoadCSV loads the fileuploadFile's CSV into the .R struct
|
|
func (o *FileuploadFile) LoadCSV(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if o == nil {
|
|
return nil
|
|
}
|
|
|
|
// Reset the relationship
|
|
o.R.CSV = nil
|
|
|
|
related, err := o.CSV(mods...).One(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
related.R.File = o
|
|
|
|
o.R.CSV = related
|
|
return nil
|
|
}
|
|
|
|
// LoadCSV loads the fileuploadFile's CSV into the .R struct
|
|
func (os FileuploadFileSlice) LoadCSV(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if len(os) == 0 {
|
|
return nil
|
|
}
|
|
|
|
fileuploadCSVS, err := os.CSV(mods...).All(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
|
|
for _, rel := range fileuploadCSVS {
|
|
|
|
if !(o.ID == rel.FileID) {
|
|
continue
|
|
}
|
|
|
|
rel.R.File = o
|
|
|
|
o.R.CSV = rel
|
|
break
|
|
}
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
// LoadErrorFiles loads the fileuploadFile's ErrorFiles into the .R struct
|
|
func (o *FileuploadFile) LoadErrorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if o == nil {
|
|
return nil
|
|
}
|
|
|
|
// Reset the relationship
|
|
o.R.ErrorFiles = nil
|
|
|
|
related, err := o.ErrorFiles(mods...).All(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
for _, rel := range related {
|
|
rel.R.File = o
|
|
}
|
|
|
|
o.R.ErrorFiles = related
|
|
return nil
|
|
}
|
|
|
|
// LoadErrorFiles loads the fileuploadFile's ErrorFiles into the .R struct
|
|
func (os FileuploadFileSlice) LoadErrorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if len(os) == 0 {
|
|
return nil
|
|
}
|
|
|
|
fileuploadErrorFiles, err := os.ErrorFiles(mods...).All(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
|
|
o.R.ErrorFiles = nil
|
|
}
|
|
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
|
|
for _, rel := range fileuploadErrorFiles {
|
|
|
|
if !(o.ID == rel.FileID) {
|
|
continue
|
|
}
|
|
|
|
rel.R.File = o
|
|
|
|
o.R.ErrorFiles = append(o.R.ErrorFiles, rel)
|
|
}
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
// LoadCommitterUser loads the fileuploadFile's CommitterUser into the .R struct
|
|
func (o *FileuploadFile) LoadCommitterUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if o == nil {
|
|
return nil
|
|
}
|
|
|
|
// Reset the relationship
|
|
o.R.CommitterUser = nil
|
|
|
|
related, err := o.CommitterUser(mods...).One(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
related.R.CommitterFiles = FileuploadFileSlice{o}
|
|
|
|
o.R.CommitterUser = related
|
|
return nil
|
|
}
|
|
|
|
// LoadCommitterUser loads the fileuploadFile's CommitterUser into the .R struct
|
|
func (os FileuploadFileSlice) LoadCommitterUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if len(os) == 0 {
|
|
return nil
|
|
}
|
|
|
|
users, err := os.CommitterUser(mods...).All(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
|
|
for _, rel := range users {
|
|
if !o.Committer.IsValue() {
|
|
continue
|
|
}
|
|
|
|
if !(o.Committer.IsValue() && o.Committer.MustGet() == rel.ID) {
|
|
continue
|
|
}
|
|
|
|
rel.R.CommitterFiles = append(rel.R.CommitterFiles, o)
|
|
|
|
o.R.CommitterUser = rel
|
|
break
|
|
}
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
// LoadCreatorUser loads the fileuploadFile's CreatorUser into the .R struct
|
|
func (o *FileuploadFile) LoadCreatorUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if o == nil {
|
|
return nil
|
|
}
|
|
|
|
// Reset the relationship
|
|
o.R.CreatorUser = nil
|
|
|
|
related, err := o.CreatorUser(mods...).One(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
related.R.CreatorFiles = FileuploadFileSlice{o}
|
|
|
|
o.R.CreatorUser = related
|
|
return nil
|
|
}
|
|
|
|
// LoadCreatorUser loads the fileuploadFile's CreatorUser into the .R struct
|
|
func (os FileuploadFileSlice) LoadCreatorUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if len(os) == 0 {
|
|
return nil
|
|
}
|
|
|
|
users, err := os.CreatorUser(mods...).All(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
|
|
for _, rel := range users {
|
|
|
|
if !(o.CreatorID == rel.ID) {
|
|
continue
|
|
}
|
|
|
|
rel.R.CreatorFiles = append(rel.R.CreatorFiles, o)
|
|
|
|
o.R.CreatorUser = rel
|
|
break
|
|
}
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
// LoadOrganization loads the fileuploadFile's Organization into the .R struct
|
|
func (o *FileuploadFile) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if o == nil {
|
|
return nil
|
|
}
|
|
|
|
// Reset the relationship
|
|
o.R.Organization = nil
|
|
|
|
related, err := o.Organization(mods...).One(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
related.R.Files = FileuploadFileSlice{o}
|
|
|
|
o.R.Organization = related
|
|
return nil
|
|
}
|
|
|
|
// LoadOrganization loads the fileuploadFile's Organization into the .R struct
|
|
func (os FileuploadFileSlice) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if len(os) == 0 {
|
|
return nil
|
|
}
|
|
|
|
organizations, err := os.Organization(mods...).All(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
|
|
for _, rel := range organizations {
|
|
|
|
if !(o.OrganizationID == rel.ID) {
|
|
continue
|
|
}
|
|
|
|
rel.R.Files = append(rel.R.Files, o)
|
|
|
|
o.R.Organization = rel
|
|
break
|
|
}
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
// LoadSites loads the fileuploadFile's Sites into the .R struct
|
|
func (o *FileuploadFile) LoadSites(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if o == nil {
|
|
return nil
|
|
}
|
|
|
|
// Reset the relationship
|
|
o.R.Sites = nil
|
|
|
|
related, err := o.Sites(mods...).All(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
for _, rel := range related {
|
|
rel.R.File = o
|
|
}
|
|
|
|
o.R.Sites = related
|
|
return nil
|
|
}
|
|
|
|
// LoadSites loads the fileuploadFile's Sites into the .R struct
|
|
func (os FileuploadFileSlice) LoadSites(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if len(os) == 0 {
|
|
return nil
|
|
}
|
|
|
|
sites, err := os.Sites(mods...).All(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
|
|
o.R.Sites = nil
|
|
}
|
|
|
|
for _, o := range os {
|
|
if o == nil {
|
|
continue
|
|
}
|
|
|
|
for _, rel := range sites {
|
|
|
|
if !rel.FileID.IsValue() {
|
|
continue
|
|
}
|
|
if !(rel.FileID.IsValue() && o.ID == rel.FileID.MustGet()) {
|
|
continue
|
|
}
|
|
|
|
rel.R.File = o
|
|
|
|
o.R.Sites = append(o.R.Sites, rel)
|
|
}
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
// fileuploadFileC is where relationship counts are stored.
|
|
type fileuploadFileC struct {
|
|
ErrorFiles *int64
|
|
Sites *int64
|
|
}
|
|
|
|
// PreloadCount sets a count in the C struct by name
|
|
func (o *FileuploadFile) PreloadCount(name string, count int64) error {
|
|
if o == nil {
|
|
return nil
|
|
}
|
|
|
|
switch name {
|
|
case "ErrorFiles":
|
|
o.C.ErrorFiles = &count
|
|
case "Sites":
|
|
o.C.Sites = &count
|
|
}
|
|
return nil
|
|
}
|
|
|
|
type fileuploadFileCountPreloader struct {
|
|
ErrorFiles func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
|
|
Sites func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
|
|
}
|
|
|
|
func buildFileuploadFileCountPreloader() fileuploadFileCountPreloader {
|
|
return fileuploadFileCountPreloader{
|
|
ErrorFiles: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader {
|
|
return countPreloader[*FileuploadFile]("ErrorFiles", func(parent string) bob.Expression {
|
|
// Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk)
|
|
if parent == "" {
|
|
parent = FileuploadFiles.Alias()
|
|
}
|
|
|
|
subqueryMods := []bob.Mod[*dialect.SelectQuery]{
|
|
sm.Columns(psql.Raw("count(*)")),
|
|
|
|
sm.From(FileuploadErrorFiles.Name()),
|
|
sm.Where(psql.Quote(FileuploadErrorFiles.Alias(), "file_id").EQ(psql.Quote(parent, "id"))),
|
|
}
|
|
subqueryMods = append(subqueryMods, mods...)
|
|
return psql.Group(psql.Select(subqueryMods...).Expression)
|
|
})
|
|
},
|
|
Sites: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader {
|
|
return countPreloader[*FileuploadFile]("Sites", func(parent string) bob.Expression {
|
|
// Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk)
|
|
if parent == "" {
|
|
parent = FileuploadFiles.Alias()
|
|
}
|
|
|
|
subqueryMods := []bob.Mod[*dialect.SelectQuery]{
|
|
sm.Columns(psql.Raw("count(*)")),
|
|
|
|
sm.From(Sites.Name()),
|
|
sm.Where(psql.Quote(Sites.Alias(), "file_id").EQ(psql.Quote(parent, "id"))),
|
|
}
|
|
subqueryMods = append(subqueryMods, mods...)
|
|
return psql.Group(psql.Select(subqueryMods...).Expression)
|
|
})
|
|
},
|
|
}
|
|
}
|
|
|
|
type fileuploadFileCountThenLoader[Q orm.Loadable] struct {
|
|
ErrorFiles func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
|
Sites func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
|
}
|
|
|
|
func buildFileuploadFileCountThenLoader[Q orm.Loadable]() fileuploadFileCountThenLoader[Q] {
|
|
type ErrorFilesCountInterface interface {
|
|
LoadCountErrorFiles(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
|
}
|
|
type SitesCountInterface interface {
|
|
LoadCountSites(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
|
}
|
|
|
|
return fileuploadFileCountThenLoader[Q]{
|
|
ErrorFiles: countThenLoadBuilder[Q](
|
|
"ErrorFiles",
|
|
func(ctx context.Context, exec bob.Executor, retrieved ErrorFilesCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
return retrieved.LoadCountErrorFiles(ctx, exec, mods...)
|
|
},
|
|
),
|
|
Sites: countThenLoadBuilder[Q](
|
|
"Sites",
|
|
func(ctx context.Context, exec bob.Executor, retrieved SitesCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
return retrieved.LoadCountSites(ctx, exec, mods...)
|
|
},
|
|
),
|
|
}
|
|
}
|
|
|
|
// LoadCountErrorFiles loads the count of ErrorFiles into the C struct
|
|
func (o *FileuploadFile) LoadCountErrorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if o == nil {
|
|
return nil
|
|
}
|
|
|
|
count, err := o.ErrorFiles(mods...).Count(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
o.C.ErrorFiles = &count
|
|
return nil
|
|
}
|
|
|
|
// LoadCountErrorFiles loads the count of ErrorFiles for a slice
|
|
func (os FileuploadFileSlice) LoadCountErrorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if len(os) == 0 {
|
|
return nil
|
|
}
|
|
|
|
for _, o := range os {
|
|
if err := o.LoadCountErrorFiles(ctx, exec, mods...); err != nil {
|
|
return err
|
|
}
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
// LoadCountSites loads the count of Sites into the C struct
|
|
func (o *FileuploadFile) LoadCountSites(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if o == nil {
|
|
return nil
|
|
}
|
|
|
|
count, err := o.Sites(mods...).Count(ctx, exec)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
o.C.Sites = &count
|
|
return nil
|
|
}
|
|
|
|
// LoadCountSites loads the count of Sites for a slice
|
|
func (os FileuploadFileSlice) LoadCountSites(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
|
if len(os) == 0 {
|
|
return nil
|
|
}
|
|
|
|
for _, o := range os {
|
|
if err := o.LoadCountSites(ctx, exec, mods...); err != nil {
|
|
return err
|
|
}
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
type fileuploadFileJoins[Q dialect.Joinable] struct {
|
|
typ string
|
|
CSV modAs[Q, fileuploadCSVColumns]
|
|
ErrorFiles modAs[Q, fileuploadErrorFileColumns]
|
|
CommitterUser modAs[Q, userColumns]
|
|
CreatorUser modAs[Q, userColumns]
|
|
Organization modAs[Q, organizationColumns]
|
|
Sites modAs[Q, siteColumns]
|
|
}
|
|
|
|
func (j fileuploadFileJoins[Q]) aliasedAs(alias string) fileuploadFileJoins[Q] {
|
|
return buildFileuploadFileJoins[Q](buildFileuploadFileColumns(alias), j.typ)
|
|
}
|
|
|
|
func buildFileuploadFileJoins[Q dialect.Joinable](cols fileuploadFileColumns, typ string) fileuploadFileJoins[Q] {
|
|
return fileuploadFileJoins[Q]{
|
|
typ: typ,
|
|
CSV: modAs[Q, fileuploadCSVColumns]{
|
|
c: FileuploadCSVS.Columns,
|
|
f: func(to fileuploadCSVColumns) bob.Mod[Q] {
|
|
mods := make(mods.QueryMods[Q], 0, 1)
|
|
|
|
{
|
|
mods = append(mods, dialect.Join[Q](typ, FileuploadCSVS.Name().As(to.Alias())).On(
|
|
to.FileID.EQ(cols.ID),
|
|
))
|
|
}
|
|
|
|
return mods
|
|
},
|
|
},
|
|
ErrorFiles: modAs[Q, fileuploadErrorFileColumns]{
|
|
c: FileuploadErrorFiles.Columns,
|
|
f: func(to fileuploadErrorFileColumns) bob.Mod[Q] {
|
|
mods := make(mods.QueryMods[Q], 0, 1)
|
|
|
|
{
|
|
mods = append(mods, dialect.Join[Q](typ, FileuploadErrorFiles.Name().As(to.Alias())).On(
|
|
to.FileID.EQ(cols.ID),
|
|
))
|
|
}
|
|
|
|
return mods
|
|
},
|
|
},
|
|
CommitterUser: modAs[Q, userColumns]{
|
|
c: Users.Columns,
|
|
f: func(to userColumns) bob.Mod[Q] {
|
|
mods := make(mods.QueryMods[Q], 0, 1)
|
|
|
|
{
|
|
mods = append(mods, dialect.Join[Q](typ, Users.Name().As(to.Alias())).On(
|
|
to.ID.EQ(cols.Committer),
|
|
))
|
|
}
|
|
|
|
return mods
|
|
},
|
|
},
|
|
CreatorUser: modAs[Q, userColumns]{
|
|
c: Users.Columns,
|
|
f: func(to userColumns) bob.Mod[Q] {
|
|
mods := make(mods.QueryMods[Q], 0, 1)
|
|
|
|
{
|
|
mods = append(mods, dialect.Join[Q](typ, Users.Name().As(to.Alias())).On(
|
|
to.ID.EQ(cols.CreatorID),
|
|
))
|
|
}
|
|
|
|
return mods
|
|
},
|
|
},
|
|
Organization: modAs[Q, organizationColumns]{
|
|
c: Organizations.Columns,
|
|
f: func(to organizationColumns) bob.Mod[Q] {
|
|
mods := make(mods.QueryMods[Q], 0, 1)
|
|
|
|
{
|
|
mods = append(mods, dialect.Join[Q](typ, Organizations.Name().As(to.Alias())).On(
|
|
to.ID.EQ(cols.OrganizationID),
|
|
))
|
|
}
|
|
|
|
return mods
|
|
},
|
|
},
|
|
Sites: modAs[Q, siteColumns]{
|
|
c: Sites.Columns,
|
|
f: func(to siteColumns) bob.Mod[Q] {
|
|
mods := make(mods.QueryMods[Q], 0, 1)
|
|
|
|
{
|
|
mods = append(mods, dialect.Join[Q](typ, Sites.Name().As(to.Alias())).On(
|
|
to.FileID.EQ(cols.ID),
|
|
))
|
|
}
|
|
|
|
return mods
|
|
},
|
|
},
|
|
}
|
|
}
|