From 6716bc68c95b5aac9582bd36a47e60d4dc517576 Mon Sep 17 00:00:00 2001 From: Eli Ribble Date: Sun, 8 Feb 2026 01:44:44 +0000 Subject: [PATCH] Make file uploads of CSV actually save to disk --- db/bobgen.yaml | 1 + db/dberrors/fileupload.csv.bob.go | 17 + db/dberrors/fileupload.error.bob.go | 17 + db/dberrors/fileupload.file.bob.go | 17 + db/dbinfo/fileupload.csv.bob.go | 117 +++ db/dbinfo/fileupload.error.bob.go | 137 +++ db/dbinfo/fileupload.file.bob.go | 187 ++++ db/enums/enums.bob.go | 143 +++ db/factory/bobfactory_context.bob.go | 15 + db/factory/bobfactory_main.bob.go | 135 +++ db/factory/bobfactory_random.bob.go | 20 + db/factory/fileupload.csv.bob.go | 370 ++++++++ db/factory/fileupload.error.bob.go | 453 ++++++++++ db/factory/fileupload.file.bob.go | 847 +++++++++++++++++ db/factory/user_.bob.go | 120 ++- db/models/bob_counts.bob.go | 4 + db/models/bob_joins.bob.go | 6 + db/models/bob_loaders.bob.go | 12 + db/models/bob_where.bob.go | 9 + db/models/fileupload.csv.bob.go | 603 +++++++++++++ db/models/fileupload.error.bob.go | 652 ++++++++++++++ db/models/fileupload.file.bob.go | 1252 ++++++++++++++++++++++++++ db/models/user_.bob.go | 270 +++++- platform/pool.go | 50 + rmo/image.go | 10 +- sync/pool.go | 17 +- userfile/upload.go | 15 +- 27 files changed, 5459 insertions(+), 37 deletions(-) create mode 100644 db/dberrors/fileupload.csv.bob.go create mode 100644 db/dberrors/fileupload.error.bob.go create mode 100644 db/dberrors/fileupload.file.bob.go create mode 100644 db/dbinfo/fileupload.csv.bob.go create mode 100644 db/dbinfo/fileupload.error.bob.go create mode 100644 db/dbinfo/fileupload.file.bob.go create mode 100644 db/factory/fileupload.csv.bob.go create mode 100644 db/factory/fileupload.error.bob.go create mode 100644 db/factory/fileupload.file.bob.go create mode 100644 db/models/fileupload.csv.bob.go create mode 100644 db/models/fileupload.error.bob.go create mode 100644 db/models/fileupload.file.bob.go create mode 100644 platform/pool.go diff --git a/db/bobgen.yaml b/db/bobgen.yaml index 92053a07..d4f748cb 100644 --- a/db/bobgen.yaml +++ b/db/bobgen.yaml @@ -17,6 +17,7 @@ psql: schemas: - "arcgis" - "comms" + - "fileupload" - "import" - "public" - "publicreport" diff --git a/db/dberrors/fileupload.csv.bob.go b/db/dberrors/fileupload.csv.bob.go new file mode 100644 index 00000000..57522a40 --- /dev/null +++ b/db/dberrors/fileupload.csv.bob.go @@ -0,0 +1,17 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dberrors + +var FileuploadCSVErrors = &fileuploadCSVErrors{ + ErrUniqueCsvPkey: &UniqueConstraintError{ + schema: "fileupload", + table: "csv", + columns: []string{"file_id"}, + s: "csv_pkey", + }, +} + +type fileuploadCSVErrors struct { + ErrUniqueCsvPkey *UniqueConstraintError +} diff --git a/db/dberrors/fileupload.error.bob.go b/db/dberrors/fileupload.error.bob.go new file mode 100644 index 00000000..ecff0f3c --- /dev/null +++ b/db/dberrors/fileupload.error.bob.go @@ -0,0 +1,17 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dberrors + +var FileuploadErrorErrors = &fileuploadErrorErrors{ + ErrUniqueErrorPkey: &UniqueConstraintError{ + schema: "fileupload", + table: "error", + columns: []string{"id"}, + s: "error_pkey", + }, +} + +type fileuploadErrorErrors struct { + ErrUniqueErrorPkey *UniqueConstraintError +} diff --git a/db/dberrors/fileupload.file.bob.go b/db/dberrors/fileupload.file.bob.go new file mode 100644 index 00000000..fb03e727 --- /dev/null +++ b/db/dberrors/fileupload.file.bob.go @@ -0,0 +1,17 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dberrors + +var FileuploadFileErrors = &fileuploadFileErrors{ + ErrUniqueFilePkey: &UniqueConstraintError{ + schema: "fileupload", + table: "file", + columns: []string{"id"}, + s: "file_pkey", + }, +} + +type fileuploadFileErrors struct { + ErrUniqueFilePkey *UniqueConstraintError +} diff --git a/db/dbinfo/fileupload.csv.bob.go b/db/dbinfo/fileupload.csv.bob.go new file mode 100644 index 00000000..e46c03a2 --- /dev/null +++ b/db/dbinfo/fileupload.csv.bob.go @@ -0,0 +1,117 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dbinfo + +import "github.com/aarondl/opt/null" + +var FileuploadCSVS = Table[ + fileuploadCSVColumns, + fileuploadCSVIndexes, + fileuploadCSVForeignKeys, + fileuploadCSVUniques, + fileuploadCSVChecks, +]{ + Schema: "fileupload", + Name: "csv", + Columns: fileuploadCSVColumns{ + FileID: column{ + Name: "file_id", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Type: column{ + Name: "type_", + DBType: "fileupload.csvtype", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + }, + Indexes: fileuploadCSVIndexes{ + CSVPkey: index{ + Type: "btree", + Name: "csv_pkey", + Columns: []indexColumn{ + { + Name: "file_id", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + }, + PrimaryKey: &constraint{ + Name: "csv_pkey", + Columns: []string{"file_id"}, + Comment: "", + }, + ForeignKeys: fileuploadCSVForeignKeys{ + FileuploadCSVCSVFileIDFkey: foreignKey{ + constraint: constraint{ + Name: "fileupload.csv.csv_file_id_fkey", + Columns: []string{"file_id"}, + Comment: "", + }, + ForeignTable: "fileupload.file", + ForeignColumns: []string{"id"}, + }, + }, + + Comment: "", +} + +type fileuploadCSVColumns struct { + FileID column + Type column +} + +func (c fileuploadCSVColumns) AsSlice() []column { + return []column{ + c.FileID, c.Type, + } +} + +type fileuploadCSVIndexes struct { + CSVPkey index +} + +func (i fileuploadCSVIndexes) AsSlice() []index { + return []index{ + i.CSVPkey, + } +} + +type fileuploadCSVForeignKeys struct { + FileuploadCSVCSVFileIDFkey foreignKey +} + +func (f fileuploadCSVForeignKeys) AsSlice() []foreignKey { + return []foreignKey{ + f.FileuploadCSVCSVFileIDFkey, + } +} + +type fileuploadCSVUniques struct{} + +func (u fileuploadCSVUniques) AsSlice() []constraint { + return []constraint{} +} + +type fileuploadCSVChecks struct{} + +func (c fileuploadCSVChecks) AsSlice() []check { + return []check{} +} diff --git a/db/dbinfo/fileupload.error.bob.go b/db/dbinfo/fileupload.error.bob.go new file mode 100644 index 00000000..747931c5 --- /dev/null +++ b/db/dbinfo/fileupload.error.bob.go @@ -0,0 +1,137 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dbinfo + +import "github.com/aarondl/opt/null" + +var FileuploadErrors = Table[ + fileuploadErrorColumns, + fileuploadErrorIndexes, + fileuploadErrorForeignKeys, + fileuploadErrorUniques, + fileuploadErrorChecks, +]{ + Schema: "fileupload", + Name: "error", + Columns: fileuploadErrorColumns{ + FileID: column{ + Name: "file_id", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + ID: column{ + Name: "id", + DBType: "integer", + Default: "nextval('fileupload.error_id_seq'::regclass)", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Line: column{ + Name: "line", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Message: column{ + Name: "message", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + }, + Indexes: fileuploadErrorIndexes{ + ErrorPkey: index{ + Type: "btree", + Name: "error_pkey", + Columns: []indexColumn{ + { + Name: "id", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + }, + PrimaryKey: &constraint{ + Name: "error_pkey", + Columns: []string{"id"}, + Comment: "", + }, + ForeignKeys: fileuploadErrorForeignKeys{ + FileuploadErrorErrorFileIDFkey: foreignKey{ + constraint: constraint{ + Name: "fileupload.error.error_file_id_fkey", + Columns: []string{"file_id"}, + Comment: "", + }, + ForeignTable: "fileupload.file", + ForeignColumns: []string{"id"}, + }, + }, + + Comment: "", +} + +type fileuploadErrorColumns struct { + FileID column + ID column + Line column + Message column +} + +func (c fileuploadErrorColumns) AsSlice() []column { + return []column{ + c.FileID, c.ID, c.Line, c.Message, + } +} + +type fileuploadErrorIndexes struct { + ErrorPkey index +} + +func (i fileuploadErrorIndexes) AsSlice() []index { + return []index{ + i.ErrorPkey, + } +} + +type fileuploadErrorForeignKeys struct { + FileuploadErrorErrorFileIDFkey foreignKey +} + +func (f fileuploadErrorForeignKeys) AsSlice() []foreignKey { + return []foreignKey{ + f.FileuploadErrorErrorFileIDFkey, + } +} + +type fileuploadErrorUniques struct{} + +func (u fileuploadErrorUniques) AsSlice() []constraint { + return []constraint{} +} + +type fileuploadErrorChecks struct{} + +func (c fileuploadErrorChecks) AsSlice() []check { + return []check{} +} diff --git a/db/dbinfo/fileupload.file.bob.go b/db/dbinfo/fileupload.file.bob.go new file mode 100644 index 00000000..45ded8f9 --- /dev/null +++ b/db/dbinfo/fileupload.file.bob.go @@ -0,0 +1,187 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dbinfo + +import "github.com/aarondl/opt/null" + +var FileuploadFiles = Table[ + fileuploadFileColumns, + fileuploadFileIndexes, + fileuploadFileForeignKeys, + fileuploadFileUniques, + fileuploadFileChecks, +]{ + Schema: "fileupload", + Name: "file", + Columns: fileuploadFileColumns{ + ID: column{ + Name: "id", + DBType: "integer", + Default: "nextval('fileupload.file_id_seq'::regclass)", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + ContentType: column{ + Name: "content_type", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Created: column{ + Name: "created", + DBType: "timestamp without time zone", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + CreatorID: column{ + Name: "creator_id", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Deleted: column{ + Name: "deleted", + DBType: "timestamp without time zone", + Default: "NULL", + Comment: "", + Nullable: true, + Generated: false, + AutoIncr: false, + }, + Name: column{ + Name: "name", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Status: column{ + Name: "status", + DBType: "fileupload.filestatustype", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + SizeBytes: column{ + Name: "size_bytes", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + FileUUID: column{ + Name: "file_uuid", + DBType: "uuid", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + }, + Indexes: fileuploadFileIndexes{ + FilePkey: index{ + Type: "btree", + Name: "file_pkey", + Columns: []indexColumn{ + { + Name: "id", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + }, + PrimaryKey: &constraint{ + Name: "file_pkey", + Columns: []string{"id"}, + Comment: "", + }, + ForeignKeys: fileuploadFileForeignKeys{ + FileuploadFileFileCreatorIDFkey: foreignKey{ + constraint: constraint{ + Name: "fileupload.file.file_creator_id_fkey", + Columns: []string{"creator_id"}, + Comment: "", + }, + ForeignTable: "user_", + ForeignColumns: []string{"id"}, + }, + }, + + Comment: "", +} + +type fileuploadFileColumns struct { + ID column + ContentType column + Created column + CreatorID column + Deleted column + Name column + Status column + SizeBytes column + FileUUID column +} + +func (c fileuploadFileColumns) AsSlice() []column { + return []column{ + c.ID, c.ContentType, c.Created, c.CreatorID, c.Deleted, c.Name, c.Status, c.SizeBytes, c.FileUUID, + } +} + +type fileuploadFileIndexes struct { + FilePkey index +} + +func (i fileuploadFileIndexes) AsSlice() []index { + return []index{ + i.FilePkey, + } +} + +type fileuploadFileForeignKeys struct { + FileuploadFileFileCreatorIDFkey foreignKey +} + +func (f fileuploadFileForeignKeys) AsSlice() []foreignKey { + return []foreignKey{ + f.FileuploadFileFileCreatorIDFkey, + } +} + +type fileuploadFileUniques struct{} + +func (u fileuploadFileUniques) AsSlice() []constraint { + return []constraint{} +} + +type fileuploadFileChecks struct{} + +func (c fileuploadFileChecks) AsSlice() []check { + return []check{} +} diff --git a/db/enums/enums.bob.go b/db/enums/enums.bob.go index bd3a03a9..213f1b82 100644 --- a/db/enums/enums.bob.go +++ b/db/enums/enums.bob.go @@ -579,6 +579,149 @@ func (e *CommsTextorigin) Scan(value any) error { return nil } +// Enum values for FileuploadCsvtype +const ( + FileuploadCsvtypePoollist FileuploadCsvtype = "PoolList" +) + +func AllFileuploadCsvtype() []FileuploadCsvtype { + return []FileuploadCsvtype{ + FileuploadCsvtypePoollist, + } +} + +type FileuploadCsvtype string + +func (e FileuploadCsvtype) String() string { + return string(e) +} + +func (e FileuploadCsvtype) Valid() bool { + switch e { + case FileuploadCsvtypePoollist: + return true + default: + return false + } +} + +// useful when testing in other packages +func (e FileuploadCsvtype) All() []FileuploadCsvtype { + return AllFileuploadCsvtype() +} + +func (e FileuploadCsvtype) MarshalText() ([]byte, error) { + return []byte(e), nil +} + +func (e *FileuploadCsvtype) UnmarshalText(text []byte) error { + return e.Scan(text) +} + +func (e FileuploadCsvtype) MarshalBinary() ([]byte, error) { + return []byte(e), nil +} + +func (e *FileuploadCsvtype) UnmarshalBinary(data []byte) error { + return e.Scan(data) +} + +func (e FileuploadCsvtype) Value() (driver.Value, error) { + return string(e), nil +} + +func (e *FileuploadCsvtype) Scan(value any) error { + switch x := value.(type) { + case string: + *e = FileuploadCsvtype(x) + case []byte: + *e = FileuploadCsvtype(x) + case nil: + return fmt.Errorf("cannot nil into FileuploadCsvtype") + default: + return fmt.Errorf("cannot scan type %T: %v", value, value) + } + + if !e.Valid() { + return fmt.Errorf("invalid FileuploadCsvtype value: %s", *e) + } + + return nil +} + +// Enum values for FileuploadFilestatustype +const ( + FileuploadFilestatustypeUploaded FileuploadFilestatustype = "uploaded" + FileuploadFilestatustypeParsed FileuploadFilestatustype = "parsed" +) + +func AllFileuploadFilestatustype() []FileuploadFilestatustype { + return []FileuploadFilestatustype{ + FileuploadFilestatustypeUploaded, + FileuploadFilestatustypeParsed, + } +} + +type FileuploadFilestatustype string + +func (e FileuploadFilestatustype) String() string { + return string(e) +} + +func (e FileuploadFilestatustype) Valid() bool { + switch e { + case FileuploadFilestatustypeUploaded, + FileuploadFilestatustypeParsed: + return true + default: + return false + } +} + +// useful when testing in other packages +func (e FileuploadFilestatustype) All() []FileuploadFilestatustype { + return AllFileuploadFilestatustype() +} + +func (e FileuploadFilestatustype) MarshalText() ([]byte, error) { + return []byte(e), nil +} + +func (e *FileuploadFilestatustype) UnmarshalText(text []byte) error { + return e.Scan(text) +} + +func (e FileuploadFilestatustype) MarshalBinary() ([]byte, error) { + return []byte(e), nil +} + +func (e *FileuploadFilestatustype) UnmarshalBinary(data []byte) error { + return e.Scan(data) +} + +func (e FileuploadFilestatustype) Value() (driver.Value, error) { + return string(e), nil +} + +func (e *FileuploadFilestatustype) Scan(value any) error { + switch x := value.(type) { + case string: + *e = FileuploadFilestatustype(x) + case []byte: + *e = FileuploadFilestatustype(x) + case nil: + return fmt.Errorf("cannot nil into FileuploadFilestatustype") + default: + return fmt.Errorf("cannot scan type %T: %v", value, value) + } + + if !e.Valid() { + return fmt.Errorf("invalid FileuploadFilestatustype value: %s", *e) + } + + return nil +} + // Enum values for H3aggregationtype const ( H3aggregationtypeMosquitosource H3aggregationtype = "MosquitoSource" diff --git a/db/factory/bobfactory_context.bob.go b/db/factory/bobfactory_context.bob.go index e65d0672..5706ee74 100644 --- a/db/factory/bobfactory_context.bob.go +++ b/db/factory/bobfactory_context.bob.go @@ -173,6 +173,20 @@ var ( fieldseekerSyncWithParentsCascadingCtx = newContextual[bool]("fieldseekerSyncWithParentsCascading") fieldseekerSyncRelOrganizationCtx = newContextual[bool]("fieldseeker_sync.organization.fieldseeker_sync.fieldseeker_sync_organization_id_fkey") + // Relationship Contexts for fileupload.csv + fileuploadCSVWithParentsCascadingCtx = newContextual[bool]("fileuploadCSVWithParentsCascading") + fileuploadCSVRelFileCtx = newContextual[bool]("fileupload.csv.fileupload.file.fileupload.csv.csv_file_id_fkey") + + // Relationship Contexts for fileupload.error + fileuploadErrorWithParentsCascadingCtx = newContextual[bool]("fileuploadErrorWithParentsCascading") + fileuploadErrorRelFileCtx = newContextual[bool]("fileupload.error.fileupload.file.fileupload.error.error_file_id_fkey") + + // Relationship Contexts for fileupload.file + fileuploadFileWithParentsCascadingCtx = newContextual[bool]("fileuploadFileWithParentsCascading") + fileuploadFileRelCSVCtx = newContextual[bool]("fileupload.csv.fileupload.file.fileupload.csv.csv_file_id_fkey") + fileuploadFileRelErrorsCtx = newContextual[bool]("fileupload.error.fileupload.file.fileupload.error.error_file_id_fkey") + fileuploadFileRelCreatorUserCtx = newContextual[bool]("fileupload.file.user_.fileupload.file.file_creator_id_fkey") + // Relationship Contexts for geography_columns geographyColumnWithParentsCascadingCtx = newContextual[bool]("geographyColumnWithParentsCascading") @@ -354,6 +368,7 @@ var ( // Relationship Contexts for user_ userWithParentsCascadingCtx = newContextual[bool]("userWithParentsCascading") userRelPublicUserUserCtx = newContextual[bool]("arcgis.user_.user_.arcgis.user_.user__public_user_id_fkey") + userRelCreatorFilesCtx = newContextual[bool]("fileupload.file.user_.fileupload.file.file_creator_id_fkey") userRelCreatorNoteAudiosCtx = newContextual[bool]("note_audio.user_.note_audio.note_audio_creator_id_fkey") userRelDeletorNoteAudiosCtx = newContextual[bool]("note_audio.user_.note_audio.note_audio_deletor_id_fkey") userRelCreatorNoteImagesCtx = newContextual[bool]("note_image.user_.note_image.note_image_creator_id_fkey") diff --git a/db/factory/bobfactory_main.bob.go b/db/factory/bobfactory_main.bob.go index 3bde7fc0..cbb6221a 100644 --- a/db/factory/bobfactory_main.bob.go +++ b/db/factory/bobfactory_main.bob.go @@ -57,6 +57,9 @@ type Factory struct { baseFieldseekerZoneMods FieldseekerZoneModSlice baseFieldseekerZones2Mods FieldseekerZones2ModSlice baseFieldseekerSyncMods FieldseekerSyncModSlice + baseFileuploadCSVMods FileuploadCSVModSlice + baseFileuploadErrorMods FileuploadErrorModSlice + baseFileuploadFileMods FileuploadFileModSlice baseGeographyColumnMods GeographyColumnModSlice baseGeometryColumnMods GeometryColumnModSlice baseGooseDBVersionMods GooseDBVersionModSlice @@ -2229,6 +2232,111 @@ func (f *Factory) FromExistingFieldseekerSync(m *models.FieldseekerSync) *Fields return o } +func (f *Factory) NewFileuploadCSV(mods ...FileuploadCSVMod) *FileuploadCSVTemplate { + return f.NewFileuploadCSVWithContext(context.Background(), mods...) +} + +func (f *Factory) NewFileuploadCSVWithContext(ctx context.Context, mods ...FileuploadCSVMod) *FileuploadCSVTemplate { + o := &FileuploadCSVTemplate{f: f} + + if f != nil { + f.baseFileuploadCSVMods.Apply(ctx, o) + } + + FileuploadCSVModSlice(mods).Apply(ctx, o) + + return o +} + +func (f *Factory) FromExistingFileuploadCSV(m *models.FileuploadCSV) *FileuploadCSVTemplate { + o := &FileuploadCSVTemplate{f: f, alreadyPersisted: true} + + o.FileID = func() int32 { return m.FileID } + o.Type = func() enums.FileuploadCsvtype { return m.Type } + + ctx := context.Background() + if m.R.File != nil { + FileuploadCSVMods.WithExistingFile(m.R.File).Apply(ctx, o) + } + + return o +} + +func (f *Factory) NewFileuploadError(mods ...FileuploadErrorMod) *FileuploadErrorTemplate { + return f.NewFileuploadErrorWithContext(context.Background(), mods...) +} + +func (f *Factory) NewFileuploadErrorWithContext(ctx context.Context, mods ...FileuploadErrorMod) *FileuploadErrorTemplate { + o := &FileuploadErrorTemplate{f: f} + + if f != nil { + f.baseFileuploadErrorMods.Apply(ctx, o) + } + + FileuploadErrorModSlice(mods).Apply(ctx, o) + + return o +} + +func (f *Factory) FromExistingFileuploadError(m *models.FileuploadError) *FileuploadErrorTemplate { + o := &FileuploadErrorTemplate{f: f, alreadyPersisted: true} + + o.FileID = func() int32 { return m.FileID } + o.ID = func() int32 { return m.ID } + o.Line = func() int32 { return m.Line } + o.Message = func() string { return m.Message } + + ctx := context.Background() + if m.R.File != nil { + FileuploadErrorMods.WithExistingFile(m.R.File).Apply(ctx, o) + } + + return o +} + +func (f *Factory) NewFileuploadFile(mods ...FileuploadFileMod) *FileuploadFileTemplate { + return f.NewFileuploadFileWithContext(context.Background(), mods...) +} + +func (f *Factory) NewFileuploadFileWithContext(ctx context.Context, mods ...FileuploadFileMod) *FileuploadFileTemplate { + o := &FileuploadFileTemplate{f: f} + + if f != nil { + f.baseFileuploadFileMods.Apply(ctx, o) + } + + FileuploadFileModSlice(mods).Apply(ctx, o) + + return o +} + +func (f *Factory) FromExistingFileuploadFile(m *models.FileuploadFile) *FileuploadFileTemplate { + o := &FileuploadFileTemplate{f: f, alreadyPersisted: true} + + o.ID = func() int32 { return m.ID } + o.ContentType = func() string { return m.ContentType } + o.Created = func() time.Time { return m.Created } + o.CreatorID = func() int32 { return m.CreatorID } + o.Deleted = func() null.Val[time.Time] { return m.Deleted } + o.Name = func() string { return m.Name } + o.Status = func() enums.FileuploadFilestatustype { return m.Status } + o.SizeBytes = func() int32 { return m.SizeBytes } + o.FileUUID = func() uuid.UUID { return m.FileUUID } + + ctx := context.Background() + if m.R.CSV != nil { + FileuploadFileMods.WithExistingCSV(m.R.CSV).Apply(ctx, o) + } + if len(m.R.Errors) > 0 { + FileuploadFileMods.AddExistingErrors(m.R.Errors...).Apply(ctx, o) + } + if m.R.CreatorUser != nil { + FileuploadFileMods.WithExistingCreatorUser(m.R.CreatorUser).Apply(ctx, o) + } + + return o +} + func (f *Factory) NewGeographyColumn(mods ...GeographyColumnMod) *GeographyColumnTemplate { return f.NewGeographyColumnWithContext(context.Background(), mods...) } @@ -3545,6 +3653,9 @@ func (f *Factory) FromExistingUser(m *models.User) *UserTemplate { if len(m.R.PublicUserUser) > 0 { UserMods.AddExistingPublicUserUser(m.R.PublicUserUser...).Apply(ctx, o) } + if len(m.R.CreatorFiles) > 0 { + UserMods.AddExistingCreatorFiles(m.R.CreatorFiles...).Apply(ctx, o) + } if len(m.R.CreatorNoteAudios) > 0 { UserMods.AddExistingCreatorNoteAudios(m.R.CreatorNoteAudios...).Apply(ctx, o) } @@ -3874,6 +3985,30 @@ func (f *Factory) AddBaseFieldseekerSyncMod(mods ...FieldseekerSyncMod) { f.baseFieldseekerSyncMods = append(f.baseFieldseekerSyncMods, mods...) } +func (f *Factory) ClearBaseFileuploadCSVMods() { + f.baseFileuploadCSVMods = nil +} + +func (f *Factory) AddBaseFileuploadCSVMod(mods ...FileuploadCSVMod) { + f.baseFileuploadCSVMods = append(f.baseFileuploadCSVMods, mods...) +} + +func (f *Factory) ClearBaseFileuploadErrorMods() { + f.baseFileuploadErrorMods = nil +} + +func (f *Factory) AddBaseFileuploadErrorMod(mods ...FileuploadErrorMod) { + f.baseFileuploadErrorMods = append(f.baseFileuploadErrorMods, mods...) +} + +func (f *Factory) ClearBaseFileuploadFileMods() { + f.baseFileuploadFileMods = nil +} + +func (f *Factory) AddBaseFileuploadFileMod(mods ...FileuploadFileMod) { + f.baseFileuploadFileMods = append(f.baseFileuploadFileMods, mods...) +} + func (f *Factory) ClearBaseGeographyColumnMods() { f.baseGeographyColumnMods = nil } diff --git a/db/factory/bobfactory_random.bob.go b/db/factory/bobfactory_random.bob.go index 6cae9966..af661428 100644 --- a/db/factory/bobfactory_random.bob.go +++ b/db/factory/bobfactory_random.bob.go @@ -141,6 +141,26 @@ func random_enums_CommsTextorigin(f *faker.Faker, limits ...string) enums.CommsT return all[f.IntBetween(0, len(all)-1)] } +func random_enums_FileuploadCsvtype(f *faker.Faker, limits ...string) enums.FileuploadCsvtype { + if f == nil { + f = &defaultFaker + } + + var e enums.FileuploadCsvtype + all := e.All() + return all[f.IntBetween(0, len(all)-1)] +} + +func random_enums_FileuploadFilestatustype(f *faker.Faker, limits ...string) enums.FileuploadFilestatustype { + if f == nil { + f = &defaultFaker + } + + var e enums.FileuploadFilestatustype + all := e.All() + return all[f.IntBetween(0, len(all)-1)] +} + func random_enums_H3aggregationtype(f *faker.Faker, limits ...string) enums.H3aggregationtype { if f == nil { f = &defaultFaker diff --git a/db/factory/fileupload.csv.bob.go b/db/factory/fileupload.csv.bob.go new file mode 100644 index 00000000..95969355 --- /dev/null +++ b/db/factory/fileupload.csv.bob.go @@ -0,0 +1,370 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package factory + +import ( + "context" + "testing" + + "github.com/Gleipnir-Technology/bob" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + models "github.com/Gleipnir-Technology/nidus-sync/db/models" + "github.com/aarondl/opt/omit" + "github.com/jaswdr/faker/v2" +) + +type FileuploadCSVMod interface { + Apply(context.Context, *FileuploadCSVTemplate) +} + +type FileuploadCSVModFunc func(context.Context, *FileuploadCSVTemplate) + +func (f FileuploadCSVModFunc) Apply(ctx context.Context, n *FileuploadCSVTemplate) { + f(ctx, n) +} + +type FileuploadCSVModSlice []FileuploadCSVMod + +func (mods FileuploadCSVModSlice) Apply(ctx context.Context, n *FileuploadCSVTemplate) { + for _, f := range mods { + f.Apply(ctx, n) + } +} + +// FileuploadCSVTemplate is an object representing the database table. +// all columns are optional and should be set by mods +type FileuploadCSVTemplate struct { + FileID func() int32 + Type func() enums.FileuploadCsvtype + + r fileuploadCSVR + f *Factory + + alreadyPersisted bool +} + +type fileuploadCSVR struct { + File *fileuploadCSVRFileR +} + +type fileuploadCSVRFileR struct { + o *FileuploadFileTemplate +} + +// Apply mods to the FileuploadCSVTemplate +func (o *FileuploadCSVTemplate) Apply(ctx context.Context, mods ...FileuploadCSVMod) { + for _, mod := range mods { + mod.Apply(ctx, o) + } +} + +// setModelRels creates and sets the relationships on *models.FileuploadCSV +// according to the relationships in the template. Nothing is inserted into the db +func (t FileuploadCSVTemplate) setModelRels(o *models.FileuploadCSV) { + if t.r.File != nil { + rel := t.r.File.o.Build() + rel.R.CSV = o + o.FileID = rel.ID // h2 + o.R.File = rel + } +} + +// BuildSetter returns an *models.FileuploadCSVSetter +// this does nothing with the relationship templates +func (o FileuploadCSVTemplate) BuildSetter() *models.FileuploadCSVSetter { + m := &models.FileuploadCSVSetter{} + + if o.FileID != nil { + val := o.FileID() + m.FileID = omit.From(val) + } + if o.Type != nil { + val := o.Type() + m.Type = omit.From(val) + } + + return m +} + +// BuildManySetter returns an []*models.FileuploadCSVSetter +// this does nothing with the relationship templates +func (o FileuploadCSVTemplate) BuildManySetter(number int) []*models.FileuploadCSVSetter { + m := make([]*models.FileuploadCSVSetter, number) + + for i := range m { + m[i] = o.BuildSetter() + } + + return m +} + +// Build returns an *models.FileuploadCSV +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use FileuploadCSVTemplate.Create +func (o FileuploadCSVTemplate) Build() *models.FileuploadCSV { + m := &models.FileuploadCSV{} + + if o.FileID != nil { + m.FileID = o.FileID() + } + if o.Type != nil { + m.Type = o.Type() + } + + o.setModelRels(m) + + return m +} + +// BuildMany returns an models.FileuploadCSVSlice +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use FileuploadCSVTemplate.CreateMany +func (o FileuploadCSVTemplate) BuildMany(number int) models.FileuploadCSVSlice { + m := make(models.FileuploadCSVSlice, number) + + for i := range m { + m[i] = o.Build() + } + + return m +} + +func ensureCreatableFileuploadCSV(m *models.FileuploadCSVSetter) { + if !(m.FileID.IsValue()) { + val := random_int32(nil) + m.FileID = omit.From(val) + } + if !(m.Type.IsValue()) { + val := random_enums_FileuploadCsvtype(nil) + m.Type = omit.From(val) + } +} + +// insertOptRels creates and inserts any optional the relationships on *models.FileuploadCSV +// according to the relationships in the template. +// any required relationship should have already exist on the model +func (o *FileuploadCSVTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.FileuploadCSV) error { + var err error + + return err +} + +// Create builds a fileuploadCSV and inserts it into the database +// Relations objects are also inserted and placed in the .R field +func (o *FileuploadCSVTemplate) Create(ctx context.Context, exec bob.Executor) (*models.FileuploadCSV, error) { + var err error + opt := o.BuildSetter() + ensureCreatableFileuploadCSV(opt) + + if o.r.File == nil { + FileuploadCSVMods.WithNewFile().Apply(ctx, o) + } + + var rel0 *models.FileuploadFile + + if o.r.File.o.alreadyPersisted { + rel0 = o.r.File.o.Build() + } else { + rel0, err = o.r.File.o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + opt.FileID = omit.From(rel0.ID) + + m, err := models.FileuploadCSVS.Insert(opt).One(ctx, exec) + if err != nil { + return nil, err + } + + m.R.File = rel0 + + if err := o.insertOptRels(ctx, exec, m); err != nil { + return nil, err + } + return m, err +} + +// MustCreate builds a fileuploadCSV and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o *FileuploadCSVTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.FileuploadCSV { + m, err := o.Create(ctx, exec) + if err != nil { + panic(err) + } + return m +} + +// CreateOrFail builds a fileuploadCSV and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o *FileuploadCSVTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.FileuploadCSV { + tb.Helper() + m, err := o.Create(ctx, exec) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// CreateMany builds multiple fileuploadCSVS and inserts them into the database +// Relations objects are also inserted and placed in the .R field +func (o FileuploadCSVTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.FileuploadCSVSlice, error) { + var err error + m := make(models.FileuploadCSVSlice, number) + + for i := range m { + m[i], err = o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + return m, nil +} + +// MustCreateMany builds multiple fileuploadCSVS and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o FileuploadCSVTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.FileuploadCSVSlice { + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + panic(err) + } + return m +} + +// CreateManyOrFail builds multiple fileuploadCSVS and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o FileuploadCSVTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.FileuploadCSVSlice { + tb.Helper() + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// FileuploadCSV has methods that act as mods for the FileuploadCSVTemplate +var FileuploadCSVMods fileuploadCSVMods + +type fileuploadCSVMods struct{} + +func (m fileuploadCSVMods) RandomizeAllColumns(f *faker.Faker) FileuploadCSVMod { + return FileuploadCSVModSlice{ + FileuploadCSVMods.RandomFileID(f), + FileuploadCSVMods.RandomType(f), + } +} + +// Set the model columns to this value +func (m fileuploadCSVMods) FileID(val int32) FileuploadCSVMod { + return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) { + o.FileID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m fileuploadCSVMods) FileIDFunc(f func() int32) FileuploadCSVMod { + return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) { + o.FileID = f + }) +} + +// Clear any values for the column +func (m fileuploadCSVMods) UnsetFileID() FileuploadCSVMod { + return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) { + o.FileID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadCSVMods) RandomFileID(f *faker.Faker) FileuploadCSVMod { + return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) { + o.FileID = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m fileuploadCSVMods) Type(val enums.FileuploadCsvtype) FileuploadCSVMod { + return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) { + o.Type = func() enums.FileuploadCsvtype { return val } + }) +} + +// Set the Column from the function +func (m fileuploadCSVMods) TypeFunc(f func() enums.FileuploadCsvtype) FileuploadCSVMod { + return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) { + o.Type = f + }) +} + +// Clear any values for the column +func (m fileuploadCSVMods) UnsetType() FileuploadCSVMod { + return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) { + o.Type = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadCSVMods) RandomType(f *faker.Faker) FileuploadCSVMod { + return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) { + o.Type = func() enums.FileuploadCsvtype { + return random_enums_FileuploadCsvtype(f) + } + }) +} + +func (m fileuploadCSVMods) WithParentsCascading() FileuploadCSVMod { + return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) { + if isDone, _ := fileuploadCSVWithParentsCascadingCtx.Value(ctx); isDone { + return + } + ctx = fileuploadCSVWithParentsCascadingCtx.WithValue(ctx, true) + { + + related := o.f.NewFileuploadFileWithContext(ctx, FileuploadFileMods.WithParentsCascading()) + m.WithFile(related).Apply(ctx, o) + } + }) +} + +func (m fileuploadCSVMods) WithFile(rel *FileuploadFileTemplate) FileuploadCSVMod { + return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) { + o.r.File = &fileuploadCSVRFileR{ + o: rel, + } + }) +} + +func (m fileuploadCSVMods) WithNewFile(mods ...FileuploadFileMod) FileuploadCSVMod { + return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) { + related := o.f.NewFileuploadFileWithContext(ctx, mods...) + + m.WithFile(related).Apply(ctx, o) + }) +} + +func (m fileuploadCSVMods) WithExistingFile(em *models.FileuploadFile) FileuploadCSVMod { + return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) { + o.r.File = &fileuploadCSVRFileR{ + o: o.f.FromExistingFileuploadFile(em), + } + }) +} + +func (m fileuploadCSVMods) WithoutFile() FileuploadCSVMod { + return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) { + o.r.File = nil + }) +} diff --git a/db/factory/fileupload.error.bob.go b/db/factory/fileupload.error.bob.go new file mode 100644 index 00000000..f6ec243f --- /dev/null +++ b/db/factory/fileupload.error.bob.go @@ -0,0 +1,453 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package factory + +import ( + "context" + "testing" + + "github.com/Gleipnir-Technology/bob" + models "github.com/Gleipnir-Technology/nidus-sync/db/models" + "github.com/aarondl/opt/omit" + "github.com/jaswdr/faker/v2" +) + +type FileuploadErrorMod interface { + Apply(context.Context, *FileuploadErrorTemplate) +} + +type FileuploadErrorModFunc func(context.Context, *FileuploadErrorTemplate) + +func (f FileuploadErrorModFunc) Apply(ctx context.Context, n *FileuploadErrorTemplate) { + f(ctx, n) +} + +type FileuploadErrorModSlice []FileuploadErrorMod + +func (mods FileuploadErrorModSlice) Apply(ctx context.Context, n *FileuploadErrorTemplate) { + for _, f := range mods { + f.Apply(ctx, n) + } +} + +// FileuploadErrorTemplate is an object representing the database table. +// all columns are optional and should be set by mods +type FileuploadErrorTemplate struct { + FileID func() int32 + ID func() int32 + Line func() int32 + Message func() string + + r fileuploadErrorR + f *Factory + + alreadyPersisted bool +} + +type fileuploadErrorR struct { + File *fileuploadErrorRFileR +} + +type fileuploadErrorRFileR struct { + o *FileuploadFileTemplate +} + +// Apply mods to the FileuploadErrorTemplate +func (o *FileuploadErrorTemplate) Apply(ctx context.Context, mods ...FileuploadErrorMod) { + for _, mod := range mods { + mod.Apply(ctx, o) + } +} + +// setModelRels creates and sets the relationships on *models.FileuploadError +// according to the relationships in the template. Nothing is inserted into the db +func (t FileuploadErrorTemplate) setModelRels(o *models.FileuploadError) { + if t.r.File != nil { + rel := t.r.File.o.Build() + rel.R.Errors = append(rel.R.Errors, o) + o.FileID = rel.ID // h2 + o.R.File = rel + } +} + +// BuildSetter returns an *models.FileuploadErrorSetter +// this does nothing with the relationship templates +func (o FileuploadErrorTemplate) BuildSetter() *models.FileuploadErrorSetter { + m := &models.FileuploadErrorSetter{} + + if o.FileID != nil { + val := o.FileID() + m.FileID = omit.From(val) + } + if o.ID != nil { + val := o.ID() + m.ID = omit.From(val) + } + if o.Line != nil { + val := o.Line() + m.Line = omit.From(val) + } + if o.Message != nil { + val := o.Message() + m.Message = omit.From(val) + } + + return m +} + +// BuildManySetter returns an []*models.FileuploadErrorSetter +// this does nothing with the relationship templates +func (o FileuploadErrorTemplate) BuildManySetter(number int) []*models.FileuploadErrorSetter { + m := make([]*models.FileuploadErrorSetter, number) + + for i := range m { + m[i] = o.BuildSetter() + } + + return m +} + +// Build returns an *models.FileuploadError +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use FileuploadErrorTemplate.Create +func (o FileuploadErrorTemplate) Build() *models.FileuploadError { + m := &models.FileuploadError{} + + if o.FileID != nil { + m.FileID = o.FileID() + } + if o.ID != nil { + m.ID = o.ID() + } + if o.Line != nil { + m.Line = o.Line() + } + if o.Message != nil { + m.Message = o.Message() + } + + o.setModelRels(m) + + return m +} + +// BuildMany returns an models.FileuploadErrorSlice +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use FileuploadErrorTemplate.CreateMany +func (o FileuploadErrorTemplate) BuildMany(number int) models.FileuploadErrorSlice { + m := make(models.FileuploadErrorSlice, number) + + for i := range m { + m[i] = o.Build() + } + + return m +} + +func ensureCreatableFileuploadError(m *models.FileuploadErrorSetter) { + if !(m.FileID.IsValue()) { + val := random_int32(nil) + m.FileID = omit.From(val) + } + if !(m.Line.IsValue()) { + val := random_int32(nil) + m.Line = omit.From(val) + } + if !(m.Message.IsValue()) { + val := random_string(nil) + m.Message = omit.From(val) + } +} + +// insertOptRels creates and inserts any optional the relationships on *models.FileuploadError +// according to the relationships in the template. +// any required relationship should have already exist on the model +func (o *FileuploadErrorTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.FileuploadError) error { + var err error + + return err +} + +// Create builds a fileuploadError and inserts it into the database +// Relations objects are also inserted and placed in the .R field +func (o *FileuploadErrorTemplate) Create(ctx context.Context, exec bob.Executor) (*models.FileuploadError, error) { + var err error + opt := o.BuildSetter() + ensureCreatableFileuploadError(opt) + + if o.r.File == nil { + FileuploadErrorMods.WithNewFile().Apply(ctx, o) + } + + var rel0 *models.FileuploadFile + + if o.r.File.o.alreadyPersisted { + rel0 = o.r.File.o.Build() + } else { + rel0, err = o.r.File.o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + opt.FileID = omit.From(rel0.ID) + + m, err := models.FileuploadErrors.Insert(opt).One(ctx, exec) + if err != nil { + return nil, err + } + + m.R.File = rel0 + + if err := o.insertOptRels(ctx, exec, m); err != nil { + return nil, err + } + return m, err +} + +// MustCreate builds a fileuploadError and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o *FileuploadErrorTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.FileuploadError { + m, err := o.Create(ctx, exec) + if err != nil { + panic(err) + } + return m +} + +// CreateOrFail builds a fileuploadError and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o *FileuploadErrorTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.FileuploadError { + tb.Helper() + m, err := o.Create(ctx, exec) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// CreateMany builds multiple fileuploadErrors and inserts them into the database +// Relations objects are also inserted and placed in the .R field +func (o FileuploadErrorTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.FileuploadErrorSlice, error) { + var err error + m := make(models.FileuploadErrorSlice, number) + + for i := range m { + m[i], err = o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + return m, nil +} + +// MustCreateMany builds multiple fileuploadErrors and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o FileuploadErrorTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.FileuploadErrorSlice { + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + panic(err) + } + return m +} + +// CreateManyOrFail builds multiple fileuploadErrors and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o FileuploadErrorTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.FileuploadErrorSlice { + tb.Helper() + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// FileuploadError has methods that act as mods for the FileuploadErrorTemplate +var FileuploadErrorMods fileuploadErrorMods + +type fileuploadErrorMods struct{} + +func (m fileuploadErrorMods) RandomizeAllColumns(f *faker.Faker) FileuploadErrorMod { + return FileuploadErrorModSlice{ + FileuploadErrorMods.RandomFileID(f), + FileuploadErrorMods.RandomID(f), + FileuploadErrorMods.RandomLine(f), + FileuploadErrorMods.RandomMessage(f), + } +} + +// Set the model columns to this value +func (m fileuploadErrorMods) FileID(val int32) FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.FileID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m fileuploadErrorMods) FileIDFunc(f func() int32) FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.FileID = f + }) +} + +// Clear any values for the column +func (m fileuploadErrorMods) UnsetFileID() FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.FileID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadErrorMods) RandomFileID(f *faker.Faker) FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.FileID = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m fileuploadErrorMods) ID(val int32) FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.ID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m fileuploadErrorMods) IDFunc(f func() int32) FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.ID = f + }) +} + +// Clear any values for the column +func (m fileuploadErrorMods) UnsetID() FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.ID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadErrorMods) RandomID(f *faker.Faker) FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.ID = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m fileuploadErrorMods) Line(val int32) FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.Line = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m fileuploadErrorMods) LineFunc(f func() int32) FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.Line = f + }) +} + +// Clear any values for the column +func (m fileuploadErrorMods) UnsetLine() FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.Line = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadErrorMods) RandomLine(f *faker.Faker) FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.Line = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m fileuploadErrorMods) Message(val string) FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.Message = func() string { return val } + }) +} + +// Set the Column from the function +func (m fileuploadErrorMods) MessageFunc(f func() string) FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.Message = f + }) +} + +// Clear any values for the column +func (m fileuploadErrorMods) UnsetMessage() FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.Message = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadErrorMods) RandomMessage(f *faker.Faker) FileuploadErrorMod { + return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) { + o.Message = func() string { + return random_string(f) + } + }) +} + +func (m fileuploadErrorMods) WithParentsCascading() FileuploadErrorMod { + return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) { + if isDone, _ := fileuploadErrorWithParentsCascadingCtx.Value(ctx); isDone { + return + } + ctx = fileuploadErrorWithParentsCascadingCtx.WithValue(ctx, true) + { + + related := o.f.NewFileuploadFileWithContext(ctx, FileuploadFileMods.WithParentsCascading()) + m.WithFile(related).Apply(ctx, o) + } + }) +} + +func (m fileuploadErrorMods) WithFile(rel *FileuploadFileTemplate) FileuploadErrorMod { + return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) { + o.r.File = &fileuploadErrorRFileR{ + o: rel, + } + }) +} + +func (m fileuploadErrorMods) WithNewFile(mods ...FileuploadFileMod) FileuploadErrorMod { + return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) { + related := o.f.NewFileuploadFileWithContext(ctx, mods...) + + m.WithFile(related).Apply(ctx, o) + }) +} + +func (m fileuploadErrorMods) WithExistingFile(em *models.FileuploadFile) FileuploadErrorMod { + return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) { + o.r.File = &fileuploadErrorRFileR{ + o: o.f.FromExistingFileuploadFile(em), + } + }) +} + +func (m fileuploadErrorMods) WithoutFile() FileuploadErrorMod { + return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) { + o.r.File = nil + }) +} diff --git a/db/factory/fileupload.file.bob.go b/db/factory/fileupload.file.bob.go new file mode 100644 index 00000000..7390b2cc --- /dev/null +++ b/db/factory/fileupload.file.bob.go @@ -0,0 +1,847 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package factory + +import ( + "context" + "testing" + "time" + + "github.com/Gleipnir-Technology/bob" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + models "github.com/Gleipnir-Technology/nidus-sync/db/models" + "github.com/aarondl/opt/null" + "github.com/aarondl/opt/omit" + "github.com/aarondl/opt/omitnull" + "github.com/google/uuid" + "github.com/jaswdr/faker/v2" +) + +type FileuploadFileMod interface { + Apply(context.Context, *FileuploadFileTemplate) +} + +type FileuploadFileModFunc func(context.Context, *FileuploadFileTemplate) + +func (f FileuploadFileModFunc) Apply(ctx context.Context, n *FileuploadFileTemplate) { + f(ctx, n) +} + +type FileuploadFileModSlice []FileuploadFileMod + +func (mods FileuploadFileModSlice) Apply(ctx context.Context, n *FileuploadFileTemplate) { + for _, f := range mods { + f.Apply(ctx, n) + } +} + +// FileuploadFileTemplate is an object representing the database table. +// all columns are optional and should be set by mods +type FileuploadFileTemplate struct { + ID func() int32 + ContentType func() string + Created func() time.Time + CreatorID func() int32 + Deleted func() null.Val[time.Time] + Name func() string + Status func() enums.FileuploadFilestatustype + SizeBytes func() int32 + FileUUID func() uuid.UUID + + r fileuploadFileR + f *Factory + + alreadyPersisted bool +} + +type fileuploadFileR struct { + CSV *fileuploadFileRCSVR + Errors []*fileuploadFileRErrorsR + CreatorUser *fileuploadFileRCreatorUserR +} + +type fileuploadFileRCSVR struct { + o *FileuploadCSVTemplate +} +type fileuploadFileRErrorsR struct { + number int + o *FileuploadErrorTemplate +} +type fileuploadFileRCreatorUserR struct { + o *UserTemplate +} + +// Apply mods to the FileuploadFileTemplate +func (o *FileuploadFileTemplate) Apply(ctx context.Context, mods ...FileuploadFileMod) { + for _, mod := range mods { + mod.Apply(ctx, o) + } +} + +// setModelRels creates and sets the relationships on *models.FileuploadFile +// according to the relationships in the template. Nothing is inserted into the db +func (t FileuploadFileTemplate) setModelRels(o *models.FileuploadFile) { + if t.r.CSV != nil { + rel := t.r.CSV.o.Build() + rel.R.File = o + rel.FileID = o.ID // h2 + o.R.CSV = rel + } + + if t.r.Errors != nil { + rel := models.FileuploadErrorSlice{} + for _, r := range t.r.Errors { + related := r.o.BuildMany(r.number) + for _, rel := range related { + rel.FileID = o.ID // h2 + rel.R.File = o + } + rel = append(rel, related...) + } + o.R.Errors = rel + } + + if t.r.CreatorUser != nil { + rel := t.r.CreatorUser.o.Build() + rel.R.CreatorFiles = append(rel.R.CreatorFiles, o) + o.CreatorID = rel.ID // h2 + o.R.CreatorUser = rel + } +} + +// BuildSetter returns an *models.FileuploadFileSetter +// this does nothing with the relationship templates +func (o FileuploadFileTemplate) BuildSetter() *models.FileuploadFileSetter { + m := &models.FileuploadFileSetter{} + + if o.ID != nil { + val := o.ID() + m.ID = omit.From(val) + } + if o.ContentType != nil { + val := o.ContentType() + m.ContentType = omit.From(val) + } + if o.Created != nil { + val := o.Created() + m.Created = omit.From(val) + } + if o.CreatorID != nil { + val := o.CreatorID() + m.CreatorID = omit.From(val) + } + if o.Deleted != nil { + val := o.Deleted() + m.Deleted = omitnull.FromNull(val) + } + if o.Name != nil { + val := o.Name() + m.Name = omit.From(val) + } + if o.Status != nil { + val := o.Status() + m.Status = omit.From(val) + } + if o.SizeBytes != nil { + val := o.SizeBytes() + m.SizeBytes = omit.From(val) + } + if o.FileUUID != nil { + val := o.FileUUID() + m.FileUUID = omit.From(val) + } + + return m +} + +// BuildManySetter returns an []*models.FileuploadFileSetter +// this does nothing with the relationship templates +func (o FileuploadFileTemplate) BuildManySetter(number int) []*models.FileuploadFileSetter { + m := make([]*models.FileuploadFileSetter, number) + + for i := range m { + m[i] = o.BuildSetter() + } + + return m +} + +// Build returns an *models.FileuploadFile +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use FileuploadFileTemplate.Create +func (o FileuploadFileTemplate) Build() *models.FileuploadFile { + m := &models.FileuploadFile{} + + if o.ID != nil { + m.ID = o.ID() + } + if o.ContentType != nil { + m.ContentType = o.ContentType() + } + if o.Created != nil { + m.Created = o.Created() + } + if o.CreatorID != nil { + m.CreatorID = o.CreatorID() + } + if o.Deleted != nil { + m.Deleted = o.Deleted() + } + if o.Name != nil { + m.Name = o.Name() + } + if o.Status != nil { + m.Status = o.Status() + } + if o.SizeBytes != nil { + m.SizeBytes = o.SizeBytes() + } + if o.FileUUID != nil { + m.FileUUID = o.FileUUID() + } + + o.setModelRels(m) + + return m +} + +// BuildMany returns an models.FileuploadFileSlice +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use FileuploadFileTemplate.CreateMany +func (o FileuploadFileTemplate) BuildMany(number int) models.FileuploadFileSlice { + m := make(models.FileuploadFileSlice, number) + + for i := range m { + m[i] = o.Build() + } + + return m +} + +func ensureCreatableFileuploadFile(m *models.FileuploadFileSetter) { + if !(m.ContentType.IsValue()) { + val := random_string(nil) + m.ContentType = omit.From(val) + } + if !(m.Created.IsValue()) { + val := random_time_Time(nil) + m.Created = omit.From(val) + } + if !(m.CreatorID.IsValue()) { + val := random_int32(nil) + m.CreatorID = omit.From(val) + } + if !(m.Name.IsValue()) { + val := random_string(nil) + m.Name = omit.From(val) + } + if !(m.Status.IsValue()) { + val := random_enums_FileuploadFilestatustype(nil) + m.Status = omit.From(val) + } + if !(m.SizeBytes.IsValue()) { + val := random_int32(nil) + m.SizeBytes = omit.From(val) + } + if !(m.FileUUID.IsValue()) { + val := random_uuid_UUID(nil) + m.FileUUID = omit.From(val) + } +} + +// insertOptRels creates and inserts any optional the relationships on *models.FileuploadFile +// according to the relationships in the template. +// any required relationship should have already exist on the model +func (o *FileuploadFileTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.FileuploadFile) error { + var err error + + isCSVDone, _ := fileuploadFileRelCSVCtx.Value(ctx) + if !isCSVDone && o.r.CSV != nil { + ctx = fileuploadFileRelCSVCtx.WithValue(ctx, true) + if o.r.CSV.o.alreadyPersisted { + m.R.CSV = o.r.CSV.o.Build() + } else { + var rel0 *models.FileuploadCSV + rel0, err = o.r.CSV.o.Create(ctx, exec) + if err != nil { + return err + } + err = m.AttachCSV(ctx, exec, rel0) + if err != nil { + return err + } + } + + } + + isErrorsDone, _ := fileuploadFileRelErrorsCtx.Value(ctx) + if !isErrorsDone && o.r.Errors != nil { + ctx = fileuploadFileRelErrorsCtx.WithValue(ctx, true) + for _, r := range o.r.Errors { + if r.o.alreadyPersisted { + m.R.Errors = append(m.R.Errors, r.o.Build()) + } else { + rel1, err := r.o.CreateMany(ctx, exec, r.number) + if err != nil { + return err + } + + err = m.AttachErrors(ctx, exec, rel1...) + if err != nil { + return err + } + } + } + } + + return err +} + +// Create builds a fileuploadFile and inserts it into the database +// Relations objects are also inserted and placed in the .R field +func (o *FileuploadFileTemplate) Create(ctx context.Context, exec bob.Executor) (*models.FileuploadFile, error) { + var err error + opt := o.BuildSetter() + ensureCreatableFileuploadFile(opt) + + if o.r.CreatorUser == nil { + FileuploadFileMods.WithNewCreatorUser().Apply(ctx, o) + } + + var rel2 *models.User + + if o.r.CreatorUser.o.alreadyPersisted { + rel2 = o.r.CreatorUser.o.Build() + } else { + rel2, err = o.r.CreatorUser.o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + opt.CreatorID = omit.From(rel2.ID) + + m, err := models.FileuploadFiles.Insert(opt).One(ctx, exec) + if err != nil { + return nil, err + } + + m.R.CreatorUser = rel2 + + if err := o.insertOptRels(ctx, exec, m); err != nil { + return nil, err + } + return m, err +} + +// MustCreate builds a fileuploadFile and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o *FileuploadFileTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.FileuploadFile { + m, err := o.Create(ctx, exec) + if err != nil { + panic(err) + } + return m +} + +// CreateOrFail builds a fileuploadFile and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o *FileuploadFileTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.FileuploadFile { + tb.Helper() + m, err := o.Create(ctx, exec) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// CreateMany builds multiple fileuploadFiles and inserts them into the database +// Relations objects are also inserted and placed in the .R field +func (o FileuploadFileTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.FileuploadFileSlice, error) { + var err error + m := make(models.FileuploadFileSlice, number) + + for i := range m { + m[i], err = o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + return m, nil +} + +// MustCreateMany builds multiple fileuploadFiles and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o FileuploadFileTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.FileuploadFileSlice { + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + panic(err) + } + return m +} + +// CreateManyOrFail builds multiple fileuploadFiles and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o FileuploadFileTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.FileuploadFileSlice { + tb.Helper() + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// FileuploadFile has methods that act as mods for the FileuploadFileTemplate +var FileuploadFileMods fileuploadFileMods + +type fileuploadFileMods struct{} + +func (m fileuploadFileMods) RandomizeAllColumns(f *faker.Faker) FileuploadFileMod { + return FileuploadFileModSlice{ + FileuploadFileMods.RandomID(f), + FileuploadFileMods.RandomContentType(f), + FileuploadFileMods.RandomCreated(f), + FileuploadFileMods.RandomCreatorID(f), + FileuploadFileMods.RandomDeleted(f), + FileuploadFileMods.RandomName(f), + FileuploadFileMods.RandomStatus(f), + FileuploadFileMods.RandomSizeBytes(f), + FileuploadFileMods.RandomFileUUID(f), + } +} + +// Set the model columns to this value +func (m fileuploadFileMods) ID(val int32) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.ID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m fileuploadFileMods) IDFunc(f func() int32) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.ID = f + }) +} + +// Clear any values for the column +func (m fileuploadFileMods) UnsetID() FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.ID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadFileMods) RandomID(f *faker.Faker) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.ID = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m fileuploadFileMods) ContentType(val string) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.ContentType = func() string { return val } + }) +} + +// Set the Column from the function +func (m fileuploadFileMods) ContentTypeFunc(f func() string) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.ContentType = f + }) +} + +// Clear any values for the column +func (m fileuploadFileMods) UnsetContentType() FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.ContentType = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadFileMods) RandomContentType(f *faker.Faker) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.ContentType = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m fileuploadFileMods) Created(val time.Time) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Created = func() time.Time { return val } + }) +} + +// Set the Column from the function +func (m fileuploadFileMods) CreatedFunc(f func() time.Time) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Created = f + }) +} + +// Clear any values for the column +func (m fileuploadFileMods) UnsetCreated() FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Created = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadFileMods) RandomCreated(f *faker.Faker) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Created = func() time.Time { + return random_time_Time(f) + } + }) +} + +// Set the model columns to this value +func (m fileuploadFileMods) CreatorID(val int32) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.CreatorID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m fileuploadFileMods) CreatorIDFunc(f func() int32) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.CreatorID = f + }) +} + +// Clear any values for the column +func (m fileuploadFileMods) UnsetCreatorID() FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.CreatorID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadFileMods) RandomCreatorID(f *faker.Faker) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.CreatorID = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m fileuploadFileMods) Deleted(val null.Val[time.Time]) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Deleted = func() null.Val[time.Time] { return val } + }) +} + +// Set the Column from the function +func (m fileuploadFileMods) DeletedFunc(f func() null.Val[time.Time]) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Deleted = f + }) +} + +// Clear any values for the column +func (m fileuploadFileMods) UnsetDeleted() FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Deleted = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +// The generated value is sometimes null +func (m fileuploadFileMods) RandomDeleted(f *faker.Faker) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Deleted = func() null.Val[time.Time] { + if f == nil { + f = &defaultFaker + } + + val := random_time_Time(f) + return null.From(val) + } + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +// The generated value is never null +func (m fileuploadFileMods) RandomDeletedNotNull(f *faker.Faker) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Deleted = func() null.Val[time.Time] { + if f == nil { + f = &defaultFaker + } + + val := random_time_Time(f) + return null.From(val) + } + }) +} + +// Set the model columns to this value +func (m fileuploadFileMods) Name(val string) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Name = func() string { return val } + }) +} + +// Set the Column from the function +func (m fileuploadFileMods) NameFunc(f func() string) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Name = f + }) +} + +// Clear any values for the column +func (m fileuploadFileMods) UnsetName() FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Name = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadFileMods) RandomName(f *faker.Faker) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Name = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m fileuploadFileMods) Status(val enums.FileuploadFilestatustype) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Status = func() enums.FileuploadFilestatustype { return val } + }) +} + +// Set the Column from the function +func (m fileuploadFileMods) StatusFunc(f func() enums.FileuploadFilestatustype) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Status = f + }) +} + +// Clear any values for the column +func (m fileuploadFileMods) UnsetStatus() FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Status = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadFileMods) RandomStatus(f *faker.Faker) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.Status = func() enums.FileuploadFilestatustype { + return random_enums_FileuploadFilestatustype(f) + } + }) +} + +// Set the model columns to this value +func (m fileuploadFileMods) SizeBytes(val int32) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.SizeBytes = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m fileuploadFileMods) SizeBytesFunc(f func() int32) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.SizeBytes = f + }) +} + +// Clear any values for the column +func (m fileuploadFileMods) UnsetSizeBytes() FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.SizeBytes = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadFileMods) RandomSizeBytes(f *faker.Faker) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.SizeBytes = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m fileuploadFileMods) FileUUID(val uuid.UUID) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.FileUUID = func() uuid.UUID { return val } + }) +} + +// Set the Column from the function +func (m fileuploadFileMods) FileUUIDFunc(f func() uuid.UUID) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.FileUUID = f + }) +} + +// Clear any values for the column +func (m fileuploadFileMods) UnsetFileUUID() FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.FileUUID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m fileuploadFileMods) RandomFileUUID(f *faker.Faker) FileuploadFileMod { + return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) { + o.FileUUID = func() uuid.UUID { + return random_uuid_UUID(f) + } + }) +} + +func (m fileuploadFileMods) WithParentsCascading() FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + if isDone, _ := fileuploadFileWithParentsCascadingCtx.Value(ctx); isDone { + return + } + ctx = fileuploadFileWithParentsCascadingCtx.WithValue(ctx, true) + { + + related := o.f.NewFileuploadCSVWithContext(ctx, FileuploadCSVMods.WithParentsCascading()) + m.WithCSV(related).Apply(ctx, o) + } + { + + related := o.f.NewUserWithContext(ctx, UserMods.WithParentsCascading()) + m.WithCreatorUser(related).Apply(ctx, o) + } + }) +} + +func (m fileuploadFileMods) WithCSV(rel *FileuploadCSVTemplate) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + o.r.CSV = &fileuploadFileRCSVR{ + o: rel, + } + }) +} + +func (m fileuploadFileMods) WithNewCSV(mods ...FileuploadCSVMod) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + related := o.f.NewFileuploadCSVWithContext(ctx, mods...) + + m.WithCSV(related).Apply(ctx, o) + }) +} + +func (m fileuploadFileMods) WithExistingCSV(em *models.FileuploadCSV) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + o.r.CSV = &fileuploadFileRCSVR{ + o: o.f.FromExistingFileuploadCSV(em), + } + }) +} + +func (m fileuploadFileMods) WithoutCSV() FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + o.r.CSV = nil + }) +} + +func (m fileuploadFileMods) WithCreatorUser(rel *UserTemplate) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + o.r.CreatorUser = &fileuploadFileRCreatorUserR{ + o: rel, + } + }) +} + +func (m fileuploadFileMods) WithNewCreatorUser(mods ...UserMod) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + related := o.f.NewUserWithContext(ctx, mods...) + + m.WithCreatorUser(related).Apply(ctx, o) + }) +} + +func (m fileuploadFileMods) WithExistingCreatorUser(em *models.User) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + o.r.CreatorUser = &fileuploadFileRCreatorUserR{ + o: o.f.FromExistingUser(em), + } + }) +} + +func (m fileuploadFileMods) WithoutCreatorUser() FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + o.r.CreatorUser = nil + }) +} + +func (m fileuploadFileMods) WithErrors(number int, related *FileuploadErrorTemplate) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + o.r.Errors = []*fileuploadFileRErrorsR{{ + number: number, + o: related, + }} + }) +} + +func (m fileuploadFileMods) WithNewErrors(number int, mods ...FileuploadErrorMod) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + related := o.f.NewFileuploadErrorWithContext(ctx, mods...) + m.WithErrors(number, related).Apply(ctx, o) + }) +} + +func (m fileuploadFileMods) AddErrors(number int, related *FileuploadErrorTemplate) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + o.r.Errors = append(o.r.Errors, &fileuploadFileRErrorsR{ + number: number, + o: related, + }) + }) +} + +func (m fileuploadFileMods) AddNewErrors(number int, mods ...FileuploadErrorMod) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + related := o.f.NewFileuploadErrorWithContext(ctx, mods...) + m.AddErrors(number, related).Apply(ctx, o) + }) +} + +func (m fileuploadFileMods) AddExistingErrors(existingModels ...*models.FileuploadError) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + for _, em := range existingModels { + o.r.Errors = append(o.r.Errors, &fileuploadFileRErrorsR{ + o: o.f.FromExistingFileuploadError(em), + }) + } + }) +} + +func (m fileuploadFileMods) WithoutErrors() FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + o.r.Errors = nil + }) +} diff --git a/db/factory/user_.bob.go b/db/factory/user_.bob.go index 5112ee5b..df65186b 100644 --- a/db/factory/user_.bob.go +++ b/db/factory/user_.bob.go @@ -59,6 +59,7 @@ type UserTemplate struct { type userR struct { PublicUserUser []*userRPublicUserUserR + CreatorFiles []*userRCreatorFilesR CreatorNoteAudios []*userRCreatorNoteAudiosR DeletorNoteAudios []*userRDeletorNoteAudiosR CreatorNoteImages []*userRCreatorNoteImagesR @@ -72,6 +73,10 @@ type userRPublicUserUserR struct { number int o *ArcgisUserTemplate } +type userRCreatorFilesR struct { + number int + o *FileuploadFileTemplate +} type userRCreatorNoteAudiosR struct { number int o *NoteAudioTemplate @@ -123,6 +128,19 @@ func (t UserTemplate) setModelRels(o *models.User) { o.R.PublicUserUser = rel } + if t.r.CreatorFiles != nil { + rel := models.FileuploadFileSlice{} + for _, r := range t.r.CreatorFiles { + related := r.o.BuildMany(r.number) + for _, rel := range related { + rel.CreatorID = o.ID // h2 + rel.R.CreatorUser = o + } + rel = append(rel, related...) + } + o.R.CreatorFiles = rel + } + if t.r.CreatorNoteAudios != nil { rel := models.NoteAudioSlice{} for _, r := range t.r.CreatorNoteAudios { @@ -388,6 +406,26 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m * } } + isCreatorFilesDone, _ := userRelCreatorFilesCtx.Value(ctx) + if !isCreatorFilesDone && o.r.CreatorFiles != nil { + ctx = userRelCreatorFilesCtx.WithValue(ctx, true) + for _, r := range o.r.CreatorFiles { + if r.o.alreadyPersisted { + m.R.CreatorFiles = append(m.R.CreatorFiles, r.o.Build()) + } else { + rel1, err := r.o.CreateMany(ctx, exec, r.number) + if err != nil { + return err + } + + err = m.AttachCreatorFiles(ctx, exec, rel1...) + if err != nil { + return err + } + } + } + } + isCreatorNoteAudiosDone, _ := userRelCreatorNoteAudiosCtx.Value(ctx) if !isCreatorNoteAudiosDone && o.r.CreatorNoteAudios != nil { ctx = userRelCreatorNoteAudiosCtx.WithValue(ctx, true) @@ -395,12 +433,12 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m * if r.o.alreadyPersisted { m.R.CreatorNoteAudios = append(m.R.CreatorNoteAudios, r.o.Build()) } else { - rel1, err := r.o.CreateMany(ctx, exec, r.number) + rel2, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachCreatorNoteAudios(ctx, exec, rel1...) + err = m.AttachCreatorNoteAudios(ctx, exec, rel2...) if err != nil { return err } @@ -415,12 +453,12 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m * if r.o.alreadyPersisted { m.R.DeletorNoteAudios = append(m.R.DeletorNoteAudios, r.o.Build()) } else { - rel2, err := r.o.CreateMany(ctx, exec, r.number) + rel3, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachDeletorNoteAudios(ctx, exec, rel2...) + err = m.AttachDeletorNoteAudios(ctx, exec, rel3...) if err != nil { return err } @@ -435,12 +473,12 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m * if r.o.alreadyPersisted { m.R.CreatorNoteImages = append(m.R.CreatorNoteImages, r.o.Build()) } else { - rel3, err := r.o.CreateMany(ctx, exec, r.number) + rel4, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachCreatorNoteImages(ctx, exec, rel3...) + err = m.AttachCreatorNoteImages(ctx, exec, rel4...) if err != nil { return err } @@ -455,12 +493,12 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m * if r.o.alreadyPersisted { m.R.DeletorNoteImages = append(m.R.DeletorNoteImages, r.o.Build()) } else { - rel4, err := r.o.CreateMany(ctx, exec, r.number) + rel5, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachDeletorNoteImages(ctx, exec, rel4...) + err = m.AttachDeletorNoteImages(ctx, exec, rel5...) if err != nil { return err } @@ -475,12 +513,12 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m * if r.o.alreadyPersisted { m.R.UserNotifications = append(m.R.UserNotifications, r.o.Build()) } else { - rel5, err := r.o.CreateMany(ctx, exec, r.number) + rel6, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachUserNotifications(ctx, exec, rel5...) + err = m.AttachUserNotifications(ctx, exec, rel6...) if err != nil { return err } @@ -495,12 +533,12 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m * if r.o.alreadyPersisted { m.R.UserOauthTokens = append(m.R.UserOauthTokens, r.o.Build()) } else { - rel6, err := r.o.CreateMany(ctx, exec, r.number) + rel7, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachUserOauthTokens(ctx, exec, rel6...) + err = m.AttachUserOauthTokens(ctx, exec, rel7...) if err != nil { return err } @@ -522,25 +560,25 @@ func (o *UserTemplate) Create(ctx context.Context, exec bob.Executor) (*models.U UserMods.WithNewOrganization().Apply(ctx, o) } - var rel7 *models.Organization + var rel8 *models.Organization if o.r.Organization.o.alreadyPersisted { - rel7 = o.r.Organization.o.Build() + rel8 = o.r.Organization.o.Build() } else { - rel7, err = o.r.Organization.o.Create(ctx, exec) + rel8, err = o.r.Organization.o.Create(ctx, exec) if err != nil { return nil, err } } - opt.OrganizationID = omit.From(rel7.ID) + opt.OrganizationID = omit.From(rel8.ID) m, err := models.Users.Insert(opt).One(ctx, exec) if err != nil { return nil, err } - m.R.Organization = rel7 + m.R.Organization = rel8 if err := o.insertOptRels(ctx, exec, m); err != nil { return nil, err @@ -1230,6 +1268,54 @@ func (m userMods) WithoutPublicUserUser() UserMod { }) } +func (m userMods) WithCreatorFiles(number int, related *FileuploadFileTemplate) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + o.r.CreatorFiles = []*userRCreatorFilesR{{ + number: number, + o: related, + }} + }) +} + +func (m userMods) WithNewCreatorFiles(number int, mods ...FileuploadFileMod) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + related := o.f.NewFileuploadFileWithContext(ctx, mods...) + m.WithCreatorFiles(number, related).Apply(ctx, o) + }) +} + +func (m userMods) AddCreatorFiles(number int, related *FileuploadFileTemplate) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + o.r.CreatorFiles = append(o.r.CreatorFiles, &userRCreatorFilesR{ + number: number, + o: related, + }) + }) +} + +func (m userMods) AddNewCreatorFiles(number int, mods ...FileuploadFileMod) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + related := o.f.NewFileuploadFileWithContext(ctx, mods...) + m.AddCreatorFiles(number, related).Apply(ctx, o) + }) +} + +func (m userMods) AddExistingCreatorFiles(existingModels ...*models.FileuploadFile) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + for _, em := range existingModels { + o.r.CreatorFiles = append(o.r.CreatorFiles, &userRCreatorFilesR{ + o: o.f.FromExistingFileuploadFile(em), + }) + } + }) +} + +func (m userMods) WithoutCreatorFiles() UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + o.r.CreatorFiles = nil + }) +} + func (m userMods) WithCreatorNoteAudios(number int, related *NoteAudioTemplate) UserMod { return UserModFunc(func(ctx context.Context, o *UserTemplate) { o.r.CreatorNoteAudios = []*userRCreatorNoteAudiosR{{ diff --git a/db/models/bob_counts.bob.go b/db/models/bob_counts.bob.go index d8760c06..9d8d7027 100644 --- a/db/models/bob_counts.bob.go +++ b/db/models/bob_counts.bob.go @@ -25,6 +25,7 @@ type preloadCounts struct { CommsEmailContact commsEmailContactCountPreloader CommsEmailTemplate commsEmailTemplateCountPreloader CommsPhone commsPhoneCountPreloader + FileuploadFile fileuploadFileCountPreloader NoteAudio noteAudioCountPreloader NoteImage noteImageCountPreloader Organization organizationCountPreloader @@ -41,6 +42,7 @@ func getPreloadCount() preloadCounts { CommsEmailContact: buildCommsEmailContactCountPreloader(), CommsEmailTemplate: buildCommsEmailTemplateCountPreloader(), CommsPhone: buildCommsPhoneCountPreloader(), + FileuploadFile: buildFileuploadFileCountPreloader(), NoteAudio: buildNoteAudioCountPreloader(), NoteImage: buildNoteImageCountPreloader(), Organization: buildOrganizationCountPreloader(), @@ -57,6 +59,7 @@ type thenLoadCounts[Q orm.Loadable] struct { CommsEmailContact commsEmailContactCountThenLoader[Q] CommsEmailTemplate commsEmailTemplateCountThenLoader[Q] CommsPhone commsPhoneCountThenLoader[Q] + FileuploadFile fileuploadFileCountThenLoader[Q] NoteAudio noteAudioCountThenLoader[Q] NoteImage noteImageCountThenLoader[Q] Organization organizationCountThenLoader[Q] @@ -73,6 +76,7 @@ func getThenLoadCount[Q orm.Loadable]() thenLoadCounts[Q] { CommsEmailContact: buildCommsEmailContactCountThenLoader[Q](), CommsEmailTemplate: buildCommsEmailTemplateCountThenLoader[Q](), CommsPhone: buildCommsPhoneCountThenLoader[Q](), + FileuploadFile: buildFileuploadFileCountThenLoader[Q](), NoteAudio: buildNoteAudioCountThenLoader[Q](), NoteImage: buildNoteImageCountThenLoader[Q](), Organization: buildOrganizationCountThenLoader[Q](), diff --git a/db/models/bob_joins.bob.go b/db/models/bob_joins.bob.go index e49886fb..13bacc2e 100644 --- a/db/models/bob_joins.bob.go +++ b/db/models/bob_joins.bob.go @@ -70,6 +70,9 @@ type joins[Q dialect.Joinable] struct { FieldseekerZones joinSet[fieldseekerZoneJoins[Q]] FieldseekerZones2s joinSet[fieldseekerZones2Joins[Q]] FieldseekerSyncs joinSet[fieldseekerSyncJoins[Q]] + FileuploadCSVS joinSet[fileuploadCSVJoins[Q]] + FileuploadErrors joinSet[fileuploadErrorJoins[Q]] + FileuploadFiles joinSet[fileuploadFileJoins[Q]] H3Aggregations joinSet[h3AggregationJoins[Q]] ImportDistricts joinSet[importDistrictJoins[Q]] NoteAudios joinSet[noteAudioJoins[Q]] @@ -144,6 +147,9 @@ func getJoins[Q dialect.Joinable]() joins[Q] { FieldseekerZones: buildJoinSet[fieldseekerZoneJoins[Q]](FieldseekerZones.Columns, buildFieldseekerZoneJoins), FieldseekerZones2s: buildJoinSet[fieldseekerZones2Joins[Q]](FieldseekerZones2s.Columns, buildFieldseekerZones2Joins), FieldseekerSyncs: buildJoinSet[fieldseekerSyncJoins[Q]](FieldseekerSyncs.Columns, buildFieldseekerSyncJoins), + FileuploadCSVS: buildJoinSet[fileuploadCSVJoins[Q]](FileuploadCSVS.Columns, buildFileuploadCSVJoins), + FileuploadErrors: buildJoinSet[fileuploadErrorJoins[Q]](FileuploadErrors.Columns, buildFileuploadErrorJoins), + FileuploadFiles: buildJoinSet[fileuploadFileJoins[Q]](FileuploadFiles.Columns, buildFileuploadFileJoins), H3Aggregations: buildJoinSet[h3AggregationJoins[Q]](H3Aggregations.Columns, buildH3AggregationJoins), ImportDistricts: buildJoinSet[importDistrictJoins[Q]](ImportDistricts.Columns, buildImportDistrictJoins), NoteAudios: buildJoinSet[noteAudioJoins[Q]](NoteAudios.Columns, buildNoteAudioJoins), diff --git a/db/models/bob_loaders.bob.go b/db/models/bob_loaders.bob.go index bf7bd00d..914eefff 100644 --- a/db/models/bob_loaders.bob.go +++ b/db/models/bob_loaders.bob.go @@ -55,6 +55,9 @@ type preloaders struct { FieldseekerZone fieldseekerZonePreloader FieldseekerZones2 fieldseekerZones2Preloader FieldseekerSync fieldseekerSyncPreloader + FileuploadCSV fileuploadCSVPreloader + FileuploadError fileuploadErrorPreloader + FileuploadFile fileuploadFilePreloader H3Aggregation h3AggregationPreloader ImportDistrict importDistrictPreloader NoteAudio noteAudioPreloader @@ -121,6 +124,9 @@ func getPreloaders() preloaders { FieldseekerZone: buildFieldseekerZonePreloader(), FieldseekerZones2: buildFieldseekerZones2Preloader(), FieldseekerSync: buildFieldseekerSyncPreloader(), + FileuploadCSV: buildFileuploadCSVPreloader(), + FileuploadError: buildFileuploadErrorPreloader(), + FileuploadFile: buildFileuploadFilePreloader(), H3Aggregation: buildH3AggregationPreloader(), ImportDistrict: buildImportDistrictPreloader(), NoteAudio: buildNoteAudioPreloader(), @@ -193,6 +199,9 @@ type thenLoaders[Q orm.Loadable] struct { FieldseekerZone fieldseekerZoneThenLoader[Q] FieldseekerZones2 fieldseekerZones2ThenLoader[Q] FieldseekerSync fieldseekerSyncThenLoader[Q] + FileuploadCSV fileuploadCSVThenLoader[Q] + FileuploadError fileuploadErrorThenLoader[Q] + FileuploadFile fileuploadFileThenLoader[Q] H3Aggregation h3AggregationThenLoader[Q] ImportDistrict importDistrictThenLoader[Q] NoteAudio noteAudioThenLoader[Q] @@ -259,6 +268,9 @@ func getThenLoaders[Q orm.Loadable]() thenLoaders[Q] { FieldseekerZone: buildFieldseekerZoneThenLoader[Q](), FieldseekerZones2: buildFieldseekerZones2ThenLoader[Q](), FieldseekerSync: buildFieldseekerSyncThenLoader[Q](), + FileuploadCSV: buildFileuploadCSVThenLoader[Q](), + FileuploadError: buildFileuploadErrorThenLoader[Q](), + FileuploadFile: buildFileuploadFileThenLoader[Q](), H3Aggregation: buildH3AggregationThenLoader[Q](), ImportDistrict: buildImportDistrictThenLoader[Q](), NoteAudio: buildNoteAudioThenLoader[Q](), diff --git a/db/models/bob_where.bob.go b/db/models/bob_where.bob.go index 027167f9..453435f1 100644 --- a/db/models/bob_where.bob.go +++ b/db/models/bob_where.bob.go @@ -55,6 +55,9 @@ func Where[Q psql.Filterable]() struct { FieldseekerZones fieldseekerZoneWhere[Q] FieldseekerZones2s fieldseekerZones2Where[Q] FieldseekerSyncs fieldseekerSyncWhere[Q] + FileuploadCSVS fileuploadCSVWhere[Q] + FileuploadErrors fileuploadErrorWhere[Q] + FileuploadFiles fileuploadFileWhere[Q] GeographyColumns geographyColumnWhere[Q] GeometryColumns geometryColumnWhere[Q] GooseDBVersions gooseDBVersionWhere[Q] @@ -127,6 +130,9 @@ func Where[Q psql.Filterable]() struct { FieldseekerZones fieldseekerZoneWhere[Q] FieldseekerZones2s fieldseekerZones2Where[Q] FieldseekerSyncs fieldseekerSyncWhere[Q] + FileuploadCSVS fileuploadCSVWhere[Q] + FileuploadErrors fileuploadErrorWhere[Q] + FileuploadFiles fileuploadFileWhere[Q] GeographyColumns geographyColumnWhere[Q] GeometryColumns geometryColumnWhere[Q] GooseDBVersions gooseDBVersionWhere[Q] @@ -198,6 +204,9 @@ func Where[Q psql.Filterable]() struct { FieldseekerZones: buildFieldseekerZoneWhere[Q](FieldseekerZones.Columns), FieldseekerZones2s: buildFieldseekerZones2Where[Q](FieldseekerZones2s.Columns), FieldseekerSyncs: buildFieldseekerSyncWhere[Q](FieldseekerSyncs.Columns), + FileuploadCSVS: buildFileuploadCSVWhere[Q](FileuploadCSVS.Columns), + FileuploadErrors: buildFileuploadErrorWhere[Q](FileuploadErrors.Columns), + FileuploadFiles: buildFileuploadFileWhere[Q](FileuploadFiles.Columns), GeographyColumns: buildGeographyColumnWhere[Q](GeographyColumns.Columns), GeometryColumns: buildGeometryColumnWhere[Q](GeometryColumns.Columns), GooseDBVersions: buildGooseDBVersionWhere[Q](GooseDBVersions.Columns), diff --git a/db/models/fileupload.csv.bob.go b/db/models/fileupload.csv.bob.go new file mode 100644 index 00000000..01fa18f4 --- /dev/null +++ b/db/models/fileupload.csv.bob.go @@ -0,0 +1,603 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "fmt" + "io" + + "github.com/Gleipnir-Technology/bob" + "github.com/Gleipnir-Technology/bob/dialect/psql" + "github.com/Gleipnir-Technology/bob/dialect/psql/dialect" + "github.com/Gleipnir-Technology/bob/dialect/psql/dm" + "github.com/Gleipnir-Technology/bob/dialect/psql/sm" + "github.com/Gleipnir-Technology/bob/dialect/psql/um" + "github.com/Gleipnir-Technology/bob/expr" + "github.com/Gleipnir-Technology/bob/mods" + "github.com/Gleipnir-Technology/bob/orm" + "github.com/Gleipnir-Technology/bob/types/pgtypes" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + "github.com/aarondl/opt/omit" +) + +// FileuploadCSV is an object representing the database table. +type FileuploadCSV struct { + FileID int32 `db:"file_id,pk" ` + Type enums.FileuploadCsvtype `db:"type_" ` + + R fileuploadCSVR `db:"-" ` +} + +// FileuploadCSVSlice is an alias for a slice of pointers to FileuploadCSV. +// This should almost always be used instead of []*FileuploadCSV. +type FileuploadCSVSlice []*FileuploadCSV + +// FileuploadCSVS contains methods to work with the csv table +var FileuploadCSVS = psql.NewTablex[*FileuploadCSV, FileuploadCSVSlice, *FileuploadCSVSetter]("fileupload", "csv", buildFileuploadCSVColumns("fileupload.csv")) + +// FileuploadCSVSQuery is a query on the csv table +type FileuploadCSVSQuery = *psql.ViewQuery[*FileuploadCSV, FileuploadCSVSlice] + +// fileuploadCSVR is where relationships are stored. +type fileuploadCSVR struct { + File *FileuploadFile // fileupload.csv.csv_file_id_fkey +} + +func buildFileuploadCSVColumns(alias string) fileuploadCSVColumns { + return fileuploadCSVColumns{ + ColumnsExpr: expr.NewColumnsExpr( + "file_id", "type_", + ).WithParent("fileupload.csv"), + tableAlias: alias, + FileID: psql.Quote(alias, "file_id"), + Type: psql.Quote(alias, "type_"), + } +} + +type fileuploadCSVColumns struct { + expr.ColumnsExpr + tableAlias string + FileID psql.Expression + Type psql.Expression +} + +func (c fileuploadCSVColumns) Alias() string { + return c.tableAlias +} + +func (fileuploadCSVColumns) AliasedAs(alias string) fileuploadCSVColumns { + return buildFileuploadCSVColumns(alias) +} + +// FileuploadCSVSetter is used for insert/upsert/update operations +// All values are optional, and do not have to be set +// Generated columns are not included +type FileuploadCSVSetter struct { + FileID omit.Val[int32] `db:"file_id,pk" ` + Type omit.Val[enums.FileuploadCsvtype] `db:"type_" ` +} + +func (s FileuploadCSVSetter) SetColumns() []string { + vals := make([]string, 0, 2) + if s.FileID.IsValue() { + vals = append(vals, "file_id") + } + if s.Type.IsValue() { + vals = append(vals, "type_") + } + return vals +} + +func (s FileuploadCSVSetter) Overwrite(t *FileuploadCSV) { + if s.FileID.IsValue() { + t.FileID = s.FileID.MustGet() + } + if s.Type.IsValue() { + t.Type = s.Type.MustGet() + } +} + +func (s *FileuploadCSVSetter) Apply(q *dialect.InsertQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return FileuploadCSVS.BeforeInsertHooks.RunHooks(ctx, exec, s) + }) + + q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + vals := make([]bob.Expression, 2) + if s.FileID.IsValue() { + vals[0] = psql.Arg(s.FileID.MustGet()) + } else { + vals[0] = psql.Raw("DEFAULT") + } + + if s.Type.IsValue() { + vals[1] = psql.Arg(s.Type.MustGet()) + } else { + vals[1] = psql.Raw("DEFAULT") + } + + return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "") + })) +} + +func (s FileuploadCSVSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return um.Set(s.Expressions()...) +} + +func (s FileuploadCSVSetter) Expressions(prefix ...string) []bob.Expression { + exprs := make([]bob.Expression, 0, 2) + + if s.FileID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "file_id")...), + psql.Arg(s.FileID), + }}) + } + + if s.Type.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "type_")...), + psql.Arg(s.Type), + }}) + } + + return exprs +} + +// FindFileuploadCSV retrieves a single record by primary key +// If cols is empty Find will return all columns. +func FindFileuploadCSV(ctx context.Context, exec bob.Executor, FileIDPK int32, cols ...string) (*FileuploadCSV, error) { + if len(cols) == 0 { + return FileuploadCSVS.Query( + sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(FileIDPK))), + ).One(ctx, exec) + } + + return FileuploadCSVS.Query( + sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(FileIDPK))), + sm.Columns(FileuploadCSVS.Columns.Only(cols...)), + ).One(ctx, exec) +} + +// FileuploadCSVExists checks the presence of a single record by primary key +func FileuploadCSVExists(ctx context.Context, exec bob.Executor, FileIDPK int32) (bool, error) { + return FileuploadCSVS.Query( + sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(FileIDPK))), + ).Exists(ctx, exec) +} + +// AfterQueryHook is called after FileuploadCSV is retrieved from the database +func (o *FileuploadCSV) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = FileuploadCSVS.AfterSelectHooks.RunHooks(ctx, exec, FileuploadCSVSlice{o}) + case bob.QueryTypeInsert: + ctx, err = FileuploadCSVS.AfterInsertHooks.RunHooks(ctx, exec, FileuploadCSVSlice{o}) + case bob.QueryTypeUpdate: + ctx, err = FileuploadCSVS.AfterUpdateHooks.RunHooks(ctx, exec, FileuploadCSVSlice{o}) + case bob.QueryTypeDelete: + ctx, err = FileuploadCSVS.AfterDeleteHooks.RunHooks(ctx, exec, FileuploadCSVSlice{o}) + } + + return err +} + +// primaryKeyVals returns the primary key values of the FileuploadCSV +func (o *FileuploadCSV) primaryKeyVals() bob.Expression { + return psql.Arg(o.FileID) +} + +func (o *FileuploadCSV) pkEQ() dialect.Expression { + return psql.Quote("fileupload.csv", "file_id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + return o.primaryKeyVals().WriteSQL(ctx, w, d, start) + })) +} + +// Update uses an executor to update the FileuploadCSV +func (o *FileuploadCSV) Update(ctx context.Context, exec bob.Executor, s *FileuploadCSVSetter) error { + v, err := FileuploadCSVS.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec) + if err != nil { + return err + } + + o.R = v.R + *o = *v + + return nil +} + +// Delete deletes a single FileuploadCSV record with an executor +func (o *FileuploadCSV) Delete(ctx context.Context, exec bob.Executor) error { + _, err := FileuploadCSVS.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec) + return err +} + +// Reload refreshes the FileuploadCSV using the executor +func (o *FileuploadCSV) Reload(ctx context.Context, exec bob.Executor) error { + o2, err := FileuploadCSVS.Query( + sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(o.FileID))), + ).One(ctx, exec) + if err != nil { + return err + } + o2.R = o.R + *o = *o2 + + return nil +} + +// AfterQueryHook is called after FileuploadCSVSlice is retrieved from the database +func (o FileuploadCSVSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = FileuploadCSVS.AfterSelectHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeInsert: + ctx, err = FileuploadCSVS.AfterInsertHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeUpdate: + ctx, err = FileuploadCSVS.AfterUpdateHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeDelete: + ctx, err = FileuploadCSVS.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err +} + +func (o FileuploadCSVSlice) pkIN() dialect.Expression { + if len(o) == 0 { + return psql.Raw("NULL") + } + + return psql.Quote("fileupload.csv", "file_id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + pkPairs := make([]bob.Expression, len(o)) + for i, row := range o { + pkPairs[i] = row.primaryKeyVals() + } + return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "") + })) +} + +// copyMatchingRows finds models in the given slice that have the same primary key +// then it first copies the existing relationships from the old model to the new model +// and then replaces the old model in the slice with the new model +func (o FileuploadCSVSlice) copyMatchingRows(from ...*FileuploadCSV) { + for i, old := range o { + for _, new := range from { + if new.FileID != old.FileID { + continue + } + new.R = old.R + o[i] = new + break + } + } +} + +// UpdateMod modifies an update query with "WHERE primary_key IN (o...)" +func (o FileuploadCSVSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return FileuploadCSVS.BeforeUpdateHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *FileuploadCSV: + o.copyMatchingRows(retrieved) + case []*FileuploadCSV: + o.copyMatchingRows(retrieved...) + case FileuploadCSVSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a FileuploadCSV or a slice of FileuploadCSV + // then run the AfterUpdateHooks on the slice + _, err = FileuploadCSVS.AfterUpdateHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)" +func (o FileuploadCSVSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] { + return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return FileuploadCSVS.BeforeDeleteHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *FileuploadCSV: + o.copyMatchingRows(retrieved) + case []*FileuploadCSV: + o.copyMatchingRows(retrieved...) + case FileuploadCSVSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a FileuploadCSV or a slice of FileuploadCSV + // then run the AfterDeleteHooks on the slice + _, err = FileuploadCSVS.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +func (o FileuploadCSVSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals FileuploadCSVSetter) error { + if len(o) == 0 { + return nil + } + + _, err := FileuploadCSVS.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec) + return err +} + +func (o FileuploadCSVSlice) DeleteAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + _, err := FileuploadCSVS.Delete(o.DeleteMod()).Exec(ctx, exec) + return err +} + +func (o FileuploadCSVSlice) ReloadAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + o2, err := FileuploadCSVS.Query(sm.Where(o.pkIN())).All(ctx, exec) + if err != nil { + return err + } + + o.copyMatchingRows(o2...) + + return nil +} + +// File starts a query for related objects on fileupload.file +func (o *FileuploadCSV) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery { + return FileuploadFiles.Query(append(mods, + sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(o.FileID))), + )...) +} + +func (os FileuploadCSVSlice) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery { + pkFileID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkFileID = append(pkFileID, o.FileID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkFileID), "integer[]")), + )) + + return FileuploadFiles.Query(append(mods, + sm.Where(psql.Group(FileuploadFiles.Columns.ID).OP("IN", PKArgExpr)), + )...) +} + +func attachFileuploadCSVFile0(ctx context.Context, exec bob.Executor, count int, fileuploadCSV0 *FileuploadCSV, fileuploadFile1 *FileuploadFile) (*FileuploadCSV, error) { + setter := &FileuploadCSVSetter{ + FileID: omit.From(fileuploadFile1.ID), + } + + err := fileuploadCSV0.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachFileuploadCSVFile0: %w", err) + } + + return fileuploadCSV0, nil +} + +func (fileuploadCSV0 *FileuploadCSV) InsertFile(ctx context.Context, exec bob.Executor, related *FileuploadFileSetter) error { + var err error + + fileuploadFile1, err := FileuploadFiles.Insert(related).One(ctx, exec) + if err != nil { + return fmt.Errorf("inserting related objects: %w", err) + } + + _, err = attachFileuploadCSVFile0(ctx, exec, 1, fileuploadCSV0, fileuploadFile1) + if err != nil { + return err + } + + fileuploadCSV0.R.File = fileuploadFile1 + + fileuploadFile1.R.CSV = fileuploadCSV0 + + return nil +} + +func (fileuploadCSV0 *FileuploadCSV) AttachFile(ctx context.Context, exec bob.Executor, fileuploadFile1 *FileuploadFile) error { + var err error + + _, err = attachFileuploadCSVFile0(ctx, exec, 1, fileuploadCSV0, fileuploadFile1) + if err != nil { + return err + } + + fileuploadCSV0.R.File = fileuploadFile1 + + fileuploadFile1.R.CSV = fileuploadCSV0 + + return nil +} + +type fileuploadCSVWhere[Q psql.Filterable] struct { + FileID psql.WhereMod[Q, int32] + Type psql.WhereMod[Q, enums.FileuploadCsvtype] +} + +func (fileuploadCSVWhere[Q]) AliasedAs(alias string) fileuploadCSVWhere[Q] { + return buildFileuploadCSVWhere[Q](buildFileuploadCSVColumns(alias)) +} + +func buildFileuploadCSVWhere[Q psql.Filterable](cols fileuploadCSVColumns) fileuploadCSVWhere[Q] { + return fileuploadCSVWhere[Q]{ + FileID: psql.Where[Q, int32](cols.FileID), + Type: psql.Where[Q, enums.FileuploadCsvtype](cols.Type), + } +} + +func (o *FileuploadCSV) Preload(name string, retrieved any) error { + if o == nil { + return nil + } + + switch name { + case "File": + rel, ok := retrieved.(*FileuploadFile) + if !ok { + return fmt.Errorf("fileuploadCSV cannot load %T as %q", retrieved, name) + } + + o.R.File = rel + + if rel != nil { + rel.R.CSV = o + } + return nil + default: + return fmt.Errorf("fileuploadCSV has no relationship %q", name) + } +} + +type fileuploadCSVPreloader struct { + File func(...psql.PreloadOption) psql.Preloader +} + +func buildFileuploadCSVPreloader() fileuploadCSVPreloader { + return fileuploadCSVPreloader{ + File: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*FileuploadFile, FileuploadFileSlice](psql.PreloadRel{ + Name: "File", + Sides: []psql.PreloadSide{ + { + From: FileuploadCSVS, + To: FileuploadFiles, + FromColumns: []string{"file_id"}, + ToColumns: []string{"id"}, + }, + }, + }, FileuploadFiles.Columns.Names(), opts...) + }, + } +} + +type fileuploadCSVThenLoader[Q orm.Loadable] struct { + File func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildFileuploadCSVThenLoader[Q orm.Loadable]() fileuploadCSVThenLoader[Q] { + type FileLoadInterface interface { + LoadFile(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return fileuploadCSVThenLoader[Q]{ + File: thenLoadBuilder[Q]( + "File", + func(ctx context.Context, exec bob.Executor, retrieved FileLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadFile(ctx, exec, mods...) + }, + ), + } +} + +// LoadFile loads the fileuploadCSV's File into the .R struct +func (o *FileuploadCSV) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.File = nil + + related, err := o.File(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.CSV = o + + o.R.File = related + return nil +} + +// LoadFile loads the fileuploadCSV's File into the .R struct +func (os FileuploadCSVSlice) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + fileuploadFiles, err := os.File(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range fileuploadFiles { + + if !(o.FileID == rel.ID) { + continue + } + + rel.R.CSV = o + + o.R.File = rel + break + } + } + + return nil +} + +type fileuploadCSVJoins[Q dialect.Joinable] struct { + typ string + File modAs[Q, fileuploadFileColumns] +} + +func (j fileuploadCSVJoins[Q]) aliasedAs(alias string) fileuploadCSVJoins[Q] { + return buildFileuploadCSVJoins[Q](buildFileuploadCSVColumns(alias), j.typ) +} + +func buildFileuploadCSVJoins[Q dialect.Joinable](cols fileuploadCSVColumns, typ string) fileuploadCSVJoins[Q] { + return fileuploadCSVJoins[Q]{ + typ: typ, + File: modAs[Q, fileuploadFileColumns]{ + c: FileuploadFiles.Columns, + f: func(to fileuploadFileColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, FileuploadFiles.Name().As(to.Alias())).On( + to.ID.EQ(cols.FileID), + )) + } + + return mods + }, + }, + } +} diff --git a/db/models/fileupload.error.bob.go b/db/models/fileupload.error.bob.go new file mode 100644 index 00000000..1170cedc --- /dev/null +++ b/db/models/fileupload.error.bob.go @@ -0,0 +1,652 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "fmt" + "io" + + "github.com/Gleipnir-Technology/bob" + "github.com/Gleipnir-Technology/bob/dialect/psql" + "github.com/Gleipnir-Technology/bob/dialect/psql/dialect" + "github.com/Gleipnir-Technology/bob/dialect/psql/dm" + "github.com/Gleipnir-Technology/bob/dialect/psql/sm" + "github.com/Gleipnir-Technology/bob/dialect/psql/um" + "github.com/Gleipnir-Technology/bob/expr" + "github.com/Gleipnir-Technology/bob/mods" + "github.com/Gleipnir-Technology/bob/orm" + "github.com/Gleipnir-Technology/bob/types/pgtypes" + "github.com/aarondl/opt/omit" +) + +// FileuploadError is an object representing the database table. +type FileuploadError struct { + FileID int32 `db:"file_id" ` + ID int32 `db:"id,pk" ` + Line int32 `db:"line" ` + Message string `db:"message" ` + + R fileuploadErrorR `db:"-" ` +} + +// FileuploadErrorSlice is an alias for a slice of pointers to FileuploadError. +// This should almost always be used instead of []*FileuploadError. +type FileuploadErrorSlice []*FileuploadError + +// FileuploadErrors contains methods to work with the error table +var FileuploadErrors = psql.NewTablex[*FileuploadError, FileuploadErrorSlice, *FileuploadErrorSetter]("fileupload", "error", buildFileuploadErrorColumns("fileupload.error")) + +// FileuploadErrorsQuery is a query on the error table +type FileuploadErrorsQuery = *psql.ViewQuery[*FileuploadError, FileuploadErrorSlice] + +// fileuploadErrorR is where relationships are stored. +type fileuploadErrorR struct { + File *FileuploadFile // fileupload.error.error_file_id_fkey +} + +func buildFileuploadErrorColumns(alias string) fileuploadErrorColumns { + return fileuploadErrorColumns{ + ColumnsExpr: expr.NewColumnsExpr( + "file_id", "id", "line", "message", + ).WithParent("fileupload.error"), + tableAlias: alias, + FileID: psql.Quote(alias, "file_id"), + ID: psql.Quote(alias, "id"), + Line: psql.Quote(alias, "line"), + Message: psql.Quote(alias, "message"), + } +} + +type fileuploadErrorColumns struct { + expr.ColumnsExpr + tableAlias string + FileID psql.Expression + ID psql.Expression + Line psql.Expression + Message psql.Expression +} + +func (c fileuploadErrorColumns) Alias() string { + return c.tableAlias +} + +func (fileuploadErrorColumns) AliasedAs(alias string) fileuploadErrorColumns { + return buildFileuploadErrorColumns(alias) +} + +// FileuploadErrorSetter is used for insert/upsert/update operations +// All values are optional, and do not have to be set +// Generated columns are not included +type FileuploadErrorSetter struct { + FileID omit.Val[int32] `db:"file_id" ` + ID omit.Val[int32] `db:"id,pk" ` + Line omit.Val[int32] `db:"line" ` + Message omit.Val[string] `db:"message" ` +} + +func (s FileuploadErrorSetter) SetColumns() []string { + vals := make([]string, 0, 4) + if s.FileID.IsValue() { + vals = append(vals, "file_id") + } + if s.ID.IsValue() { + vals = append(vals, "id") + } + if s.Line.IsValue() { + vals = append(vals, "line") + } + if s.Message.IsValue() { + vals = append(vals, "message") + } + return vals +} + +func (s FileuploadErrorSetter) Overwrite(t *FileuploadError) { + if s.FileID.IsValue() { + t.FileID = s.FileID.MustGet() + } + if s.ID.IsValue() { + t.ID = s.ID.MustGet() + } + if s.Line.IsValue() { + t.Line = s.Line.MustGet() + } + if s.Message.IsValue() { + t.Message = s.Message.MustGet() + } +} + +func (s *FileuploadErrorSetter) Apply(q *dialect.InsertQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return FileuploadErrors.BeforeInsertHooks.RunHooks(ctx, exec, s) + }) + + q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + vals := make([]bob.Expression, 4) + if s.FileID.IsValue() { + vals[0] = psql.Arg(s.FileID.MustGet()) + } else { + vals[0] = psql.Raw("DEFAULT") + } + + if s.ID.IsValue() { + vals[1] = psql.Arg(s.ID.MustGet()) + } else { + vals[1] = psql.Raw("DEFAULT") + } + + if s.Line.IsValue() { + vals[2] = psql.Arg(s.Line.MustGet()) + } else { + vals[2] = psql.Raw("DEFAULT") + } + + if s.Message.IsValue() { + vals[3] = psql.Arg(s.Message.MustGet()) + } else { + vals[3] = psql.Raw("DEFAULT") + } + + return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "") + })) +} + +func (s FileuploadErrorSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return um.Set(s.Expressions()...) +} + +func (s FileuploadErrorSetter) Expressions(prefix ...string) []bob.Expression { + exprs := make([]bob.Expression, 0, 4) + + if s.FileID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "file_id")...), + psql.Arg(s.FileID), + }}) + } + + if s.ID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "id")...), + psql.Arg(s.ID), + }}) + } + + if s.Line.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "line")...), + psql.Arg(s.Line), + }}) + } + + if s.Message.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "message")...), + psql.Arg(s.Message), + }}) + } + + return exprs +} + +// FindFileuploadError retrieves a single record by primary key +// If cols is empty Find will return all columns. +func FindFileuploadError(ctx context.Context, exec bob.Executor, IDPK int32, cols ...string) (*FileuploadError, error) { + if len(cols) == 0 { + return FileuploadErrors.Query( + sm.Where(FileuploadErrors.Columns.ID.EQ(psql.Arg(IDPK))), + ).One(ctx, exec) + } + + return FileuploadErrors.Query( + sm.Where(FileuploadErrors.Columns.ID.EQ(psql.Arg(IDPK))), + sm.Columns(FileuploadErrors.Columns.Only(cols...)), + ).One(ctx, exec) +} + +// FileuploadErrorExists checks the presence of a single record by primary key +func FileuploadErrorExists(ctx context.Context, exec bob.Executor, IDPK int32) (bool, error) { + return FileuploadErrors.Query( + sm.Where(FileuploadErrors.Columns.ID.EQ(psql.Arg(IDPK))), + ).Exists(ctx, exec) +} + +// AfterQueryHook is called after FileuploadError is retrieved from the database +func (o *FileuploadError) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = FileuploadErrors.AfterSelectHooks.RunHooks(ctx, exec, FileuploadErrorSlice{o}) + case bob.QueryTypeInsert: + ctx, err = FileuploadErrors.AfterInsertHooks.RunHooks(ctx, exec, FileuploadErrorSlice{o}) + case bob.QueryTypeUpdate: + ctx, err = FileuploadErrors.AfterUpdateHooks.RunHooks(ctx, exec, FileuploadErrorSlice{o}) + case bob.QueryTypeDelete: + ctx, err = FileuploadErrors.AfterDeleteHooks.RunHooks(ctx, exec, FileuploadErrorSlice{o}) + } + + return err +} + +// primaryKeyVals returns the primary key values of the FileuploadError +func (o *FileuploadError) primaryKeyVals() bob.Expression { + return psql.Arg(o.ID) +} + +func (o *FileuploadError) pkEQ() dialect.Expression { + return psql.Quote("fileupload.error", "id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + return o.primaryKeyVals().WriteSQL(ctx, w, d, start) + })) +} + +// Update uses an executor to update the FileuploadError +func (o *FileuploadError) Update(ctx context.Context, exec bob.Executor, s *FileuploadErrorSetter) error { + v, err := FileuploadErrors.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec) + if err != nil { + return err + } + + o.R = v.R + *o = *v + + return nil +} + +// Delete deletes a single FileuploadError record with an executor +func (o *FileuploadError) Delete(ctx context.Context, exec bob.Executor) error { + _, err := FileuploadErrors.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec) + return err +} + +// Reload refreshes the FileuploadError using the executor +func (o *FileuploadError) Reload(ctx context.Context, exec bob.Executor) error { + o2, err := FileuploadErrors.Query( + sm.Where(FileuploadErrors.Columns.ID.EQ(psql.Arg(o.ID))), + ).One(ctx, exec) + if err != nil { + return err + } + o2.R = o.R + *o = *o2 + + return nil +} + +// AfterQueryHook is called after FileuploadErrorSlice is retrieved from the database +func (o FileuploadErrorSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = FileuploadErrors.AfterSelectHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeInsert: + ctx, err = FileuploadErrors.AfterInsertHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeUpdate: + ctx, err = FileuploadErrors.AfterUpdateHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeDelete: + ctx, err = FileuploadErrors.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err +} + +func (o FileuploadErrorSlice) pkIN() dialect.Expression { + if len(o) == 0 { + return psql.Raw("NULL") + } + + return psql.Quote("fileupload.error", "id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + pkPairs := make([]bob.Expression, len(o)) + for i, row := range o { + pkPairs[i] = row.primaryKeyVals() + } + return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "") + })) +} + +// copyMatchingRows finds models in the given slice that have the same primary key +// then it first copies the existing relationships from the old model to the new model +// and then replaces the old model in the slice with the new model +func (o FileuploadErrorSlice) copyMatchingRows(from ...*FileuploadError) { + for i, old := range o { + for _, new := range from { + if new.ID != old.ID { + continue + } + new.R = old.R + o[i] = new + break + } + } +} + +// UpdateMod modifies an update query with "WHERE primary_key IN (o...)" +func (o FileuploadErrorSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return FileuploadErrors.BeforeUpdateHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *FileuploadError: + o.copyMatchingRows(retrieved) + case []*FileuploadError: + o.copyMatchingRows(retrieved...) + case FileuploadErrorSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a FileuploadError or a slice of FileuploadError + // then run the AfterUpdateHooks on the slice + _, err = FileuploadErrors.AfterUpdateHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)" +func (o FileuploadErrorSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] { + return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return FileuploadErrors.BeforeDeleteHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *FileuploadError: + o.copyMatchingRows(retrieved) + case []*FileuploadError: + o.copyMatchingRows(retrieved...) + case FileuploadErrorSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a FileuploadError or a slice of FileuploadError + // then run the AfterDeleteHooks on the slice + _, err = FileuploadErrors.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +func (o FileuploadErrorSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals FileuploadErrorSetter) error { + if len(o) == 0 { + return nil + } + + _, err := FileuploadErrors.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec) + return err +} + +func (o FileuploadErrorSlice) DeleteAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + _, err := FileuploadErrors.Delete(o.DeleteMod()).Exec(ctx, exec) + return err +} + +func (o FileuploadErrorSlice) ReloadAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + o2, err := FileuploadErrors.Query(sm.Where(o.pkIN())).All(ctx, exec) + if err != nil { + return err + } + + o.copyMatchingRows(o2...) + + return nil +} + +// File starts a query for related objects on fileupload.file +func (o *FileuploadError) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery { + return FileuploadFiles.Query(append(mods, + sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(o.FileID))), + )...) +} + +func (os FileuploadErrorSlice) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery { + pkFileID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkFileID = append(pkFileID, o.FileID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkFileID), "integer[]")), + )) + + return FileuploadFiles.Query(append(mods, + sm.Where(psql.Group(FileuploadFiles.Columns.ID).OP("IN", PKArgExpr)), + )...) +} + +func attachFileuploadErrorFile0(ctx context.Context, exec bob.Executor, count int, fileuploadError0 *FileuploadError, fileuploadFile1 *FileuploadFile) (*FileuploadError, error) { + setter := &FileuploadErrorSetter{ + FileID: omit.From(fileuploadFile1.ID), + } + + err := fileuploadError0.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachFileuploadErrorFile0: %w", err) + } + + return fileuploadError0, nil +} + +func (fileuploadError0 *FileuploadError) InsertFile(ctx context.Context, exec bob.Executor, related *FileuploadFileSetter) error { + var err error + + fileuploadFile1, err := FileuploadFiles.Insert(related).One(ctx, exec) + if err != nil { + return fmt.Errorf("inserting related objects: %w", err) + } + + _, err = attachFileuploadErrorFile0(ctx, exec, 1, fileuploadError0, fileuploadFile1) + if err != nil { + return err + } + + fileuploadError0.R.File = fileuploadFile1 + + fileuploadFile1.R.Errors = append(fileuploadFile1.R.Errors, fileuploadError0) + + return nil +} + +func (fileuploadError0 *FileuploadError) AttachFile(ctx context.Context, exec bob.Executor, fileuploadFile1 *FileuploadFile) error { + var err error + + _, err = attachFileuploadErrorFile0(ctx, exec, 1, fileuploadError0, fileuploadFile1) + if err != nil { + return err + } + + fileuploadError0.R.File = fileuploadFile1 + + fileuploadFile1.R.Errors = append(fileuploadFile1.R.Errors, fileuploadError0) + + return nil +} + +type fileuploadErrorWhere[Q psql.Filterable] struct { + FileID psql.WhereMod[Q, int32] + ID psql.WhereMod[Q, int32] + Line psql.WhereMod[Q, int32] + Message psql.WhereMod[Q, string] +} + +func (fileuploadErrorWhere[Q]) AliasedAs(alias string) fileuploadErrorWhere[Q] { + return buildFileuploadErrorWhere[Q](buildFileuploadErrorColumns(alias)) +} + +func buildFileuploadErrorWhere[Q psql.Filterable](cols fileuploadErrorColumns) fileuploadErrorWhere[Q] { + return fileuploadErrorWhere[Q]{ + FileID: psql.Where[Q, int32](cols.FileID), + ID: psql.Where[Q, int32](cols.ID), + Line: psql.Where[Q, int32](cols.Line), + Message: psql.Where[Q, string](cols.Message), + } +} + +func (o *FileuploadError) Preload(name string, retrieved any) error { + if o == nil { + return nil + } + + switch name { + case "File": + rel, ok := retrieved.(*FileuploadFile) + if !ok { + return fmt.Errorf("fileuploadError cannot load %T as %q", retrieved, name) + } + + o.R.File = rel + + if rel != nil { + rel.R.Errors = FileuploadErrorSlice{o} + } + return nil + default: + return fmt.Errorf("fileuploadError has no relationship %q", name) + } +} + +type fileuploadErrorPreloader struct { + File func(...psql.PreloadOption) psql.Preloader +} + +func buildFileuploadErrorPreloader() fileuploadErrorPreloader { + return fileuploadErrorPreloader{ + File: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*FileuploadFile, FileuploadFileSlice](psql.PreloadRel{ + Name: "File", + Sides: []psql.PreloadSide{ + { + From: FileuploadErrors, + To: FileuploadFiles, + FromColumns: []string{"file_id"}, + ToColumns: []string{"id"}, + }, + }, + }, FileuploadFiles.Columns.Names(), opts...) + }, + } +} + +type fileuploadErrorThenLoader[Q orm.Loadable] struct { + File func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildFileuploadErrorThenLoader[Q orm.Loadable]() fileuploadErrorThenLoader[Q] { + type FileLoadInterface interface { + LoadFile(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return fileuploadErrorThenLoader[Q]{ + File: thenLoadBuilder[Q]( + "File", + func(ctx context.Context, exec bob.Executor, retrieved FileLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadFile(ctx, exec, mods...) + }, + ), + } +} + +// LoadFile loads the fileuploadError's File into the .R struct +func (o *FileuploadError) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.File = nil + + related, err := o.File(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.Errors = FileuploadErrorSlice{o} + + o.R.File = related + return nil +} + +// LoadFile loads the fileuploadError's File into the .R struct +func (os FileuploadErrorSlice) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + fileuploadFiles, err := os.File(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range fileuploadFiles { + + if !(o.FileID == rel.ID) { + continue + } + + rel.R.Errors = append(rel.R.Errors, o) + + o.R.File = rel + break + } + } + + return nil +} + +type fileuploadErrorJoins[Q dialect.Joinable] struct { + typ string + File modAs[Q, fileuploadFileColumns] +} + +func (j fileuploadErrorJoins[Q]) aliasedAs(alias string) fileuploadErrorJoins[Q] { + return buildFileuploadErrorJoins[Q](buildFileuploadErrorColumns(alias), j.typ) +} + +func buildFileuploadErrorJoins[Q dialect.Joinable](cols fileuploadErrorColumns, typ string) fileuploadErrorJoins[Q] { + return fileuploadErrorJoins[Q]{ + typ: typ, + File: modAs[Q, fileuploadFileColumns]{ + c: FileuploadFiles.Columns, + f: func(to fileuploadFileColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, FileuploadFiles.Name().As(to.Alias())).On( + to.ID.EQ(cols.FileID), + )) + } + + return mods + }, + }, + } +} diff --git a/db/models/fileupload.file.bob.go b/db/models/fileupload.file.bob.go new file mode 100644 index 00000000..86ffc12e --- /dev/null +++ b/db/models/fileupload.file.bob.go @@ -0,0 +1,1252 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "fmt" + "io" + "time" + + "github.com/Gleipnir-Technology/bob" + "github.com/Gleipnir-Technology/bob/dialect/psql" + "github.com/Gleipnir-Technology/bob/dialect/psql/dialect" + "github.com/Gleipnir-Technology/bob/dialect/psql/dm" + "github.com/Gleipnir-Technology/bob/dialect/psql/sm" + "github.com/Gleipnir-Technology/bob/dialect/psql/um" + "github.com/Gleipnir-Technology/bob/expr" + "github.com/Gleipnir-Technology/bob/mods" + "github.com/Gleipnir-Technology/bob/orm" + "github.com/Gleipnir-Technology/bob/types/pgtypes" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + "github.com/aarondl/opt/null" + "github.com/aarondl/opt/omit" + "github.com/aarondl/opt/omitnull" + "github.com/google/uuid" +) + +// FileuploadFile is an object representing the database table. +type FileuploadFile struct { + ID int32 `db:"id,pk" ` + ContentType string `db:"content_type" ` + Created time.Time `db:"created" ` + CreatorID int32 `db:"creator_id" ` + Deleted null.Val[time.Time] `db:"deleted" ` + Name string `db:"name" ` + Status enums.FileuploadFilestatustype `db:"status" ` + SizeBytes int32 `db:"size_bytes" ` + FileUUID uuid.UUID `db:"file_uuid" ` + + R fileuploadFileR `db:"-" ` + + C fileuploadFileC `db:"-" ` +} + +// FileuploadFileSlice is an alias for a slice of pointers to FileuploadFile. +// This should almost always be used instead of []*FileuploadFile. +type FileuploadFileSlice []*FileuploadFile + +// FileuploadFiles contains methods to work with the file table +var FileuploadFiles = psql.NewTablex[*FileuploadFile, FileuploadFileSlice, *FileuploadFileSetter]("fileupload", "file", buildFileuploadFileColumns("fileupload.file")) + +// FileuploadFilesQuery is a query on the file table +type FileuploadFilesQuery = *psql.ViewQuery[*FileuploadFile, FileuploadFileSlice] + +// fileuploadFileR is where relationships are stored. +type fileuploadFileR struct { + CSV *FileuploadCSV // fileupload.csv.csv_file_id_fkey + Errors FileuploadErrorSlice // fileupload.error.error_file_id_fkey + CreatorUser *User // fileupload.file.file_creator_id_fkey +} + +func buildFileuploadFileColumns(alias string) fileuploadFileColumns { + return fileuploadFileColumns{ + ColumnsExpr: expr.NewColumnsExpr( + "id", "content_type", "created", "creator_id", "deleted", "name", "status", "size_bytes", "file_uuid", + ).WithParent("fileupload.file"), + tableAlias: alias, + ID: psql.Quote(alias, "id"), + ContentType: psql.Quote(alias, "content_type"), + Created: psql.Quote(alias, "created"), + CreatorID: psql.Quote(alias, "creator_id"), + Deleted: psql.Quote(alias, "deleted"), + Name: psql.Quote(alias, "name"), + Status: psql.Quote(alias, "status"), + SizeBytes: psql.Quote(alias, "size_bytes"), + FileUUID: psql.Quote(alias, "file_uuid"), + } +} + +type fileuploadFileColumns struct { + expr.ColumnsExpr + tableAlias string + ID psql.Expression + ContentType psql.Expression + Created psql.Expression + CreatorID psql.Expression + Deleted psql.Expression + Name psql.Expression + Status psql.Expression + SizeBytes psql.Expression + FileUUID psql.Expression +} + +func (c fileuploadFileColumns) Alias() string { + return c.tableAlias +} + +func (fileuploadFileColumns) AliasedAs(alias string) fileuploadFileColumns { + return buildFileuploadFileColumns(alias) +} + +// FileuploadFileSetter is used for insert/upsert/update operations +// All values are optional, and do not have to be set +// Generated columns are not included +type FileuploadFileSetter struct { + ID omit.Val[int32] `db:"id,pk" ` + ContentType omit.Val[string] `db:"content_type" ` + Created omit.Val[time.Time] `db:"created" ` + CreatorID omit.Val[int32] `db:"creator_id" ` + Deleted omitnull.Val[time.Time] `db:"deleted" ` + Name omit.Val[string] `db:"name" ` + Status omit.Val[enums.FileuploadFilestatustype] `db:"status" ` + SizeBytes omit.Val[int32] `db:"size_bytes" ` + FileUUID omit.Val[uuid.UUID] `db:"file_uuid" ` +} + +func (s FileuploadFileSetter) SetColumns() []string { + vals := make([]string, 0, 9) + if s.ID.IsValue() { + vals = append(vals, "id") + } + if s.ContentType.IsValue() { + vals = append(vals, "content_type") + } + if s.Created.IsValue() { + vals = append(vals, "created") + } + if s.CreatorID.IsValue() { + vals = append(vals, "creator_id") + } + if !s.Deleted.IsUnset() { + vals = append(vals, "deleted") + } + if s.Name.IsValue() { + vals = append(vals, "name") + } + if s.Status.IsValue() { + vals = append(vals, "status") + } + if s.SizeBytes.IsValue() { + vals = append(vals, "size_bytes") + } + if s.FileUUID.IsValue() { + vals = append(vals, "file_uuid") + } + return vals +} + +func (s FileuploadFileSetter) Overwrite(t *FileuploadFile) { + if s.ID.IsValue() { + t.ID = s.ID.MustGet() + } + if s.ContentType.IsValue() { + t.ContentType = s.ContentType.MustGet() + } + if s.Created.IsValue() { + t.Created = s.Created.MustGet() + } + if s.CreatorID.IsValue() { + t.CreatorID = s.CreatorID.MustGet() + } + if !s.Deleted.IsUnset() { + t.Deleted = s.Deleted.MustGetNull() + } + if s.Name.IsValue() { + t.Name = s.Name.MustGet() + } + if s.Status.IsValue() { + t.Status = s.Status.MustGet() + } + if s.SizeBytes.IsValue() { + t.SizeBytes = s.SizeBytes.MustGet() + } + if s.FileUUID.IsValue() { + t.FileUUID = s.FileUUID.MustGet() + } +} + +func (s *FileuploadFileSetter) Apply(q *dialect.InsertQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return FileuploadFiles.BeforeInsertHooks.RunHooks(ctx, exec, s) + }) + + q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + vals := make([]bob.Expression, 9) + if s.ID.IsValue() { + vals[0] = psql.Arg(s.ID.MustGet()) + } else { + vals[0] = psql.Raw("DEFAULT") + } + + if s.ContentType.IsValue() { + vals[1] = psql.Arg(s.ContentType.MustGet()) + } else { + vals[1] = psql.Raw("DEFAULT") + } + + if s.Created.IsValue() { + vals[2] = psql.Arg(s.Created.MustGet()) + } else { + vals[2] = psql.Raw("DEFAULT") + } + + if s.CreatorID.IsValue() { + vals[3] = psql.Arg(s.CreatorID.MustGet()) + } else { + vals[3] = psql.Raw("DEFAULT") + } + + if !s.Deleted.IsUnset() { + vals[4] = psql.Arg(s.Deleted.MustGetNull()) + } else { + vals[4] = psql.Raw("DEFAULT") + } + + if s.Name.IsValue() { + vals[5] = psql.Arg(s.Name.MustGet()) + } else { + vals[5] = psql.Raw("DEFAULT") + } + + if s.Status.IsValue() { + vals[6] = psql.Arg(s.Status.MustGet()) + } else { + vals[6] = psql.Raw("DEFAULT") + } + + if s.SizeBytes.IsValue() { + vals[7] = psql.Arg(s.SizeBytes.MustGet()) + } else { + vals[7] = psql.Raw("DEFAULT") + } + + if s.FileUUID.IsValue() { + vals[8] = psql.Arg(s.FileUUID.MustGet()) + } else { + vals[8] = psql.Raw("DEFAULT") + } + + return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "") + })) +} + +func (s FileuploadFileSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return um.Set(s.Expressions()...) +} + +func (s FileuploadFileSetter) Expressions(prefix ...string) []bob.Expression { + exprs := make([]bob.Expression, 0, 9) + + if s.ID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "id")...), + psql.Arg(s.ID), + }}) + } + + if s.ContentType.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "content_type")...), + psql.Arg(s.ContentType), + }}) + } + + if s.Created.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "created")...), + psql.Arg(s.Created), + }}) + } + + if s.CreatorID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "creator_id")...), + psql.Arg(s.CreatorID), + }}) + } + + if !s.Deleted.IsUnset() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "deleted")...), + psql.Arg(s.Deleted), + }}) + } + + if s.Name.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "name")...), + psql.Arg(s.Name), + }}) + } + + if s.Status.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "status")...), + psql.Arg(s.Status), + }}) + } + + if s.SizeBytes.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "size_bytes")...), + psql.Arg(s.SizeBytes), + }}) + } + + if s.FileUUID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "file_uuid")...), + psql.Arg(s.FileUUID), + }}) + } + + return exprs +} + +// FindFileuploadFile retrieves a single record by primary key +// If cols is empty Find will return all columns. +func FindFileuploadFile(ctx context.Context, exec bob.Executor, IDPK int32, cols ...string) (*FileuploadFile, error) { + if len(cols) == 0 { + return FileuploadFiles.Query( + sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(IDPK))), + ).One(ctx, exec) + } + + return FileuploadFiles.Query( + sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(IDPK))), + sm.Columns(FileuploadFiles.Columns.Only(cols...)), + ).One(ctx, exec) +} + +// FileuploadFileExists checks the presence of a single record by primary key +func FileuploadFileExists(ctx context.Context, exec bob.Executor, IDPK int32) (bool, error) { + return FileuploadFiles.Query( + sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(IDPK))), + ).Exists(ctx, exec) +} + +// AfterQueryHook is called after FileuploadFile is retrieved from the database +func (o *FileuploadFile) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = FileuploadFiles.AfterSelectHooks.RunHooks(ctx, exec, FileuploadFileSlice{o}) + case bob.QueryTypeInsert: + ctx, err = FileuploadFiles.AfterInsertHooks.RunHooks(ctx, exec, FileuploadFileSlice{o}) + case bob.QueryTypeUpdate: + ctx, err = FileuploadFiles.AfterUpdateHooks.RunHooks(ctx, exec, FileuploadFileSlice{o}) + case bob.QueryTypeDelete: + ctx, err = FileuploadFiles.AfterDeleteHooks.RunHooks(ctx, exec, FileuploadFileSlice{o}) + } + + return err +} + +// primaryKeyVals returns the primary key values of the FileuploadFile +func (o *FileuploadFile) primaryKeyVals() bob.Expression { + return psql.Arg(o.ID) +} + +func (o *FileuploadFile) pkEQ() dialect.Expression { + return psql.Quote("fileupload.file", "id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + return o.primaryKeyVals().WriteSQL(ctx, w, d, start) + })) +} + +// Update uses an executor to update the FileuploadFile +func (o *FileuploadFile) Update(ctx context.Context, exec bob.Executor, s *FileuploadFileSetter) error { + v, err := FileuploadFiles.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec) + if err != nil { + return err + } + + o.R = v.R + *o = *v + + return nil +} + +// Delete deletes a single FileuploadFile record with an executor +func (o *FileuploadFile) Delete(ctx context.Context, exec bob.Executor) error { + _, err := FileuploadFiles.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec) + return err +} + +// Reload refreshes the FileuploadFile using the executor +func (o *FileuploadFile) Reload(ctx context.Context, exec bob.Executor) error { + o2, err := FileuploadFiles.Query( + sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(o.ID))), + ).One(ctx, exec) + if err != nil { + return err + } + o2.R = o.R + *o = *o2 + + return nil +} + +// AfterQueryHook is called after FileuploadFileSlice is retrieved from the database +func (o FileuploadFileSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = FileuploadFiles.AfterSelectHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeInsert: + ctx, err = FileuploadFiles.AfterInsertHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeUpdate: + ctx, err = FileuploadFiles.AfterUpdateHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeDelete: + ctx, err = FileuploadFiles.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err +} + +func (o FileuploadFileSlice) pkIN() dialect.Expression { + if len(o) == 0 { + return psql.Raw("NULL") + } + + return psql.Quote("fileupload.file", "id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + pkPairs := make([]bob.Expression, len(o)) + for i, row := range o { + pkPairs[i] = row.primaryKeyVals() + } + return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "") + })) +} + +// copyMatchingRows finds models in the given slice that have the same primary key +// then it first copies the existing relationships from the old model to the new model +// and then replaces the old model in the slice with the new model +func (o FileuploadFileSlice) copyMatchingRows(from ...*FileuploadFile) { + for i, old := range o { + for _, new := range from { + if new.ID != old.ID { + continue + } + new.R = old.R + o[i] = new + break + } + } +} + +// UpdateMod modifies an update query with "WHERE primary_key IN (o...)" +func (o FileuploadFileSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return FileuploadFiles.BeforeUpdateHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *FileuploadFile: + o.copyMatchingRows(retrieved) + case []*FileuploadFile: + o.copyMatchingRows(retrieved...) + case FileuploadFileSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a FileuploadFile or a slice of FileuploadFile + // then run the AfterUpdateHooks on the slice + _, err = FileuploadFiles.AfterUpdateHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)" +func (o FileuploadFileSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] { + return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return FileuploadFiles.BeforeDeleteHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *FileuploadFile: + o.copyMatchingRows(retrieved) + case []*FileuploadFile: + o.copyMatchingRows(retrieved...) + case FileuploadFileSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a FileuploadFile or a slice of FileuploadFile + // then run the AfterDeleteHooks on the slice + _, err = FileuploadFiles.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +func (o FileuploadFileSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals FileuploadFileSetter) error { + if len(o) == 0 { + return nil + } + + _, err := FileuploadFiles.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec) + return err +} + +func (o FileuploadFileSlice) DeleteAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + _, err := FileuploadFiles.Delete(o.DeleteMod()).Exec(ctx, exec) + return err +} + +func (o FileuploadFileSlice) ReloadAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + o2, err := FileuploadFiles.Query(sm.Where(o.pkIN())).All(ctx, exec) + if err != nil { + return err + } + + o.copyMatchingRows(o2...) + + return nil +} + +// CSV starts a query for related objects on fileupload.csv +func (o *FileuploadFile) CSV(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadCSVSQuery { + return FileuploadCSVS.Query(append(mods, + sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(o.ID))), + )...) +} + +func (os FileuploadFileSlice) CSV(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadCSVSQuery { + pkID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkID = append(pkID, o.ID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")), + )) + + return FileuploadCSVS.Query(append(mods, + sm.Where(psql.Group(FileuploadCSVS.Columns.FileID).OP("IN", PKArgExpr)), + )...) +} + +// Errors starts a query for related objects on fileupload.error +func (o *FileuploadFile) Errors(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadErrorsQuery { + return FileuploadErrors.Query(append(mods, + sm.Where(FileuploadErrors.Columns.FileID.EQ(psql.Arg(o.ID))), + )...) +} + +func (os FileuploadFileSlice) Errors(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadErrorsQuery { + pkID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkID = append(pkID, o.ID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")), + )) + + return FileuploadErrors.Query(append(mods, + sm.Where(psql.Group(FileuploadErrors.Columns.FileID).OP("IN", PKArgExpr)), + )...) +} + +// CreatorUser starts a query for related objects on user_ +func (o *FileuploadFile) CreatorUser(mods ...bob.Mod[*dialect.SelectQuery]) UsersQuery { + return Users.Query(append(mods, + sm.Where(Users.Columns.ID.EQ(psql.Arg(o.CreatorID))), + )...) +} + +func (os FileuploadFileSlice) CreatorUser(mods ...bob.Mod[*dialect.SelectQuery]) UsersQuery { + pkCreatorID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkCreatorID = append(pkCreatorID, o.CreatorID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkCreatorID), "integer[]")), + )) + + return Users.Query(append(mods, + sm.Where(psql.Group(Users.Columns.ID).OP("IN", PKArgExpr)), + )...) +} + +func insertFileuploadFileCSV0(ctx context.Context, exec bob.Executor, fileuploadCSV1 *FileuploadCSVSetter, fileuploadFile0 *FileuploadFile) (*FileuploadCSV, error) { + fileuploadCSV1.FileID = omit.From(fileuploadFile0.ID) + + ret, err := FileuploadCSVS.Insert(fileuploadCSV1).One(ctx, exec) + if err != nil { + return ret, fmt.Errorf("insertFileuploadFileCSV0: %w", err) + } + + return ret, nil +} + +func attachFileuploadFileCSV0(ctx context.Context, exec bob.Executor, count int, fileuploadCSV1 *FileuploadCSV, fileuploadFile0 *FileuploadFile) (*FileuploadCSV, error) { + setter := &FileuploadCSVSetter{ + FileID: omit.From(fileuploadFile0.ID), + } + + err := fileuploadCSV1.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachFileuploadFileCSV0: %w", err) + } + + return fileuploadCSV1, nil +} + +func (fileuploadFile0 *FileuploadFile) InsertCSV(ctx context.Context, exec bob.Executor, related *FileuploadCSVSetter) error { + var err error + + fileuploadCSV1, err := insertFileuploadFileCSV0(ctx, exec, related, fileuploadFile0) + if err != nil { + return err + } + + fileuploadFile0.R.CSV = fileuploadCSV1 + + fileuploadCSV1.R.File = fileuploadFile0 + + return nil +} + +func (fileuploadFile0 *FileuploadFile) AttachCSV(ctx context.Context, exec bob.Executor, fileuploadCSV1 *FileuploadCSV) error { + var err error + + _, err = attachFileuploadFileCSV0(ctx, exec, 1, fileuploadCSV1, fileuploadFile0) + if err != nil { + return err + } + + fileuploadFile0.R.CSV = fileuploadCSV1 + + fileuploadCSV1.R.File = fileuploadFile0 + + return nil +} + +func insertFileuploadFileErrors0(ctx context.Context, exec bob.Executor, fileuploadErrors1 []*FileuploadErrorSetter, fileuploadFile0 *FileuploadFile) (FileuploadErrorSlice, error) { + for i := range fileuploadErrors1 { + fileuploadErrors1[i].FileID = omit.From(fileuploadFile0.ID) + } + + ret, err := FileuploadErrors.Insert(bob.ToMods(fileuploadErrors1...)).All(ctx, exec) + if err != nil { + return ret, fmt.Errorf("insertFileuploadFileErrors0: %w", err) + } + + return ret, nil +} + +func attachFileuploadFileErrors0(ctx context.Context, exec bob.Executor, count int, fileuploadErrors1 FileuploadErrorSlice, fileuploadFile0 *FileuploadFile) (FileuploadErrorSlice, error) { + setter := &FileuploadErrorSetter{ + FileID: omit.From(fileuploadFile0.ID), + } + + err := fileuploadErrors1.UpdateAll(ctx, exec, *setter) + if err != nil { + return nil, fmt.Errorf("attachFileuploadFileErrors0: %w", err) + } + + return fileuploadErrors1, nil +} + +func (fileuploadFile0 *FileuploadFile) InsertErrors(ctx context.Context, exec bob.Executor, related ...*FileuploadErrorSetter) error { + if len(related) == 0 { + return nil + } + + var err error + + fileuploadErrors1, err := insertFileuploadFileErrors0(ctx, exec, related, fileuploadFile0) + if err != nil { + return err + } + + fileuploadFile0.R.Errors = append(fileuploadFile0.R.Errors, fileuploadErrors1...) + + for _, rel := range fileuploadErrors1 { + rel.R.File = fileuploadFile0 + } + return nil +} + +func (fileuploadFile0 *FileuploadFile) AttachErrors(ctx context.Context, exec bob.Executor, related ...*FileuploadError) error { + if len(related) == 0 { + return nil + } + + var err error + fileuploadErrors1 := FileuploadErrorSlice(related) + + _, err = attachFileuploadFileErrors0(ctx, exec, len(related), fileuploadErrors1, fileuploadFile0) + if err != nil { + return err + } + + fileuploadFile0.R.Errors = append(fileuploadFile0.R.Errors, fileuploadErrors1...) + + for _, rel := range related { + rel.R.File = fileuploadFile0 + } + + return nil +} + +func attachFileuploadFileCreatorUser0(ctx context.Context, exec bob.Executor, count int, fileuploadFile0 *FileuploadFile, user1 *User) (*FileuploadFile, error) { + setter := &FileuploadFileSetter{ + CreatorID: omit.From(user1.ID), + } + + err := fileuploadFile0.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachFileuploadFileCreatorUser0: %w", err) + } + + return fileuploadFile0, nil +} + +func (fileuploadFile0 *FileuploadFile) InsertCreatorUser(ctx context.Context, exec bob.Executor, related *UserSetter) error { + var err error + + user1, err := Users.Insert(related).One(ctx, exec) + if err != nil { + return fmt.Errorf("inserting related objects: %w", err) + } + + _, err = attachFileuploadFileCreatorUser0(ctx, exec, 1, fileuploadFile0, user1) + if err != nil { + return err + } + + fileuploadFile0.R.CreatorUser = user1 + + user1.R.CreatorFiles = append(user1.R.CreatorFiles, fileuploadFile0) + + return nil +} + +func (fileuploadFile0 *FileuploadFile) AttachCreatorUser(ctx context.Context, exec bob.Executor, user1 *User) error { + var err error + + _, err = attachFileuploadFileCreatorUser0(ctx, exec, 1, fileuploadFile0, user1) + if err != nil { + return err + } + + fileuploadFile0.R.CreatorUser = user1 + + user1.R.CreatorFiles = append(user1.R.CreatorFiles, fileuploadFile0) + + return nil +} + +type fileuploadFileWhere[Q psql.Filterable] struct { + ID psql.WhereMod[Q, int32] + ContentType psql.WhereMod[Q, string] + Created psql.WhereMod[Q, time.Time] + CreatorID psql.WhereMod[Q, int32] + Deleted psql.WhereNullMod[Q, time.Time] + Name psql.WhereMod[Q, string] + Status psql.WhereMod[Q, enums.FileuploadFilestatustype] + SizeBytes psql.WhereMod[Q, int32] + FileUUID psql.WhereMod[Q, uuid.UUID] +} + +func (fileuploadFileWhere[Q]) AliasedAs(alias string) fileuploadFileWhere[Q] { + return buildFileuploadFileWhere[Q](buildFileuploadFileColumns(alias)) +} + +func buildFileuploadFileWhere[Q psql.Filterable](cols fileuploadFileColumns) fileuploadFileWhere[Q] { + return fileuploadFileWhere[Q]{ + ID: psql.Where[Q, int32](cols.ID), + ContentType: psql.Where[Q, string](cols.ContentType), + Created: psql.Where[Q, time.Time](cols.Created), + CreatorID: psql.Where[Q, int32](cols.CreatorID), + Deleted: psql.WhereNull[Q, time.Time](cols.Deleted), + Name: psql.Where[Q, string](cols.Name), + Status: psql.Where[Q, enums.FileuploadFilestatustype](cols.Status), + SizeBytes: psql.Where[Q, int32](cols.SizeBytes), + FileUUID: psql.Where[Q, uuid.UUID](cols.FileUUID), + } +} + +func (o *FileuploadFile) Preload(name string, retrieved any) error { + if o == nil { + return nil + } + + switch name { + case "CSV": + rel, ok := retrieved.(*FileuploadCSV) + if !ok { + return fmt.Errorf("fileuploadFile cannot load %T as %q", retrieved, name) + } + + o.R.CSV = rel + + if rel != nil { + rel.R.File = o + } + return nil + case "Errors": + rels, ok := retrieved.(FileuploadErrorSlice) + if !ok { + return fmt.Errorf("fileuploadFile cannot load %T as %q", retrieved, name) + } + + o.R.Errors = rels + + for _, rel := range rels { + if rel != nil { + rel.R.File = o + } + } + return nil + case "CreatorUser": + rel, ok := retrieved.(*User) + if !ok { + return fmt.Errorf("fileuploadFile cannot load %T as %q", retrieved, name) + } + + o.R.CreatorUser = rel + + if rel != nil { + rel.R.CreatorFiles = FileuploadFileSlice{o} + } + return nil + default: + return fmt.Errorf("fileuploadFile has no relationship %q", name) + } +} + +type fileuploadFilePreloader struct { + CSV func(...psql.PreloadOption) psql.Preloader + CreatorUser func(...psql.PreloadOption) psql.Preloader +} + +func buildFileuploadFilePreloader() fileuploadFilePreloader { + return fileuploadFilePreloader{ + CSV: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*FileuploadCSV, FileuploadCSVSlice](psql.PreloadRel{ + Name: "CSV", + Sides: []psql.PreloadSide{ + { + From: FileuploadFiles, + To: FileuploadCSVS, + FromColumns: []string{"id"}, + ToColumns: []string{"file_id"}, + }, + }, + }, FileuploadCSVS.Columns.Names(), opts...) + }, + CreatorUser: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*User, UserSlice](psql.PreloadRel{ + Name: "CreatorUser", + Sides: []psql.PreloadSide{ + { + From: FileuploadFiles, + To: Users, + FromColumns: []string{"creator_id"}, + ToColumns: []string{"id"}, + }, + }, + }, Users.Columns.Names(), opts...) + }, + } +} + +type fileuploadFileThenLoader[Q orm.Loadable] struct { + CSV func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + Errors func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + CreatorUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildFileuploadFileThenLoader[Q orm.Loadable]() fileuploadFileThenLoader[Q] { + type CSVLoadInterface interface { + LoadCSV(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type ErrorsLoadInterface interface { + LoadErrors(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type CreatorUserLoadInterface interface { + LoadCreatorUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return fileuploadFileThenLoader[Q]{ + CSV: thenLoadBuilder[Q]( + "CSV", + func(ctx context.Context, exec bob.Executor, retrieved CSVLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCSV(ctx, exec, mods...) + }, + ), + Errors: thenLoadBuilder[Q]( + "Errors", + func(ctx context.Context, exec bob.Executor, retrieved ErrorsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadErrors(ctx, exec, mods...) + }, + ), + CreatorUser: thenLoadBuilder[Q]( + "CreatorUser", + func(ctx context.Context, exec bob.Executor, retrieved CreatorUserLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCreatorUser(ctx, exec, mods...) + }, + ), + } +} + +// LoadCSV loads the fileuploadFile's CSV into the .R struct +func (o *FileuploadFile) LoadCSV(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.CSV = nil + + related, err := o.CSV(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.File = o + + o.R.CSV = related + return nil +} + +// LoadCSV loads the fileuploadFile's CSV into the .R struct +func (os FileuploadFileSlice) LoadCSV(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + fileuploadCSVS, err := os.CSV(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range fileuploadCSVS { + + if !(o.ID == rel.FileID) { + continue + } + + rel.R.File = o + + o.R.CSV = rel + break + } + } + + return nil +} + +// LoadErrors loads the fileuploadFile's Errors into the .R struct +func (o *FileuploadFile) LoadErrors(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.Errors = nil + + related, err := o.Errors(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, rel := range related { + rel.R.File = o + } + + o.R.Errors = related + return nil +} + +// LoadErrors loads the fileuploadFile's Errors into the .R struct +func (os FileuploadFileSlice) LoadErrors(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + fileuploadErrors, err := os.Errors(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + o.R.Errors = nil + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range fileuploadErrors { + + if !(o.ID == rel.FileID) { + continue + } + + rel.R.File = o + + o.R.Errors = append(o.R.Errors, rel) + } + } + + return nil +} + +// LoadCreatorUser loads the fileuploadFile's CreatorUser into the .R struct +func (o *FileuploadFile) LoadCreatorUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.CreatorUser = nil + + related, err := o.CreatorUser(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.CreatorFiles = FileuploadFileSlice{o} + + o.R.CreatorUser = related + return nil +} + +// LoadCreatorUser loads the fileuploadFile's CreatorUser into the .R struct +func (os FileuploadFileSlice) LoadCreatorUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + users, err := os.CreatorUser(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range users { + + if !(o.CreatorID == rel.ID) { + continue + } + + rel.R.CreatorFiles = append(rel.R.CreatorFiles, o) + + o.R.CreatorUser = rel + break + } + } + + return nil +} + +// fileuploadFileC is where relationship counts are stored. +type fileuploadFileC struct { + Errors *int64 +} + +// PreloadCount sets a count in the C struct by name +func (o *FileuploadFile) PreloadCount(name string, count int64) error { + if o == nil { + return nil + } + + switch name { + case "Errors": + o.C.Errors = &count + } + return nil +} + +type fileuploadFileCountPreloader struct { + Errors func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader +} + +func buildFileuploadFileCountPreloader() fileuploadFileCountPreloader { + return fileuploadFileCountPreloader{ + Errors: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { + return countPreloader[*FileuploadFile]("Errors", func(parent string) bob.Expression { + // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) + if parent == "" { + parent = FileuploadFiles.Alias() + } + + subqueryMods := []bob.Mod[*dialect.SelectQuery]{ + sm.Columns(psql.Raw("count(*)")), + + sm.From(FileuploadErrors.Name()), + sm.Where(psql.Quote(FileuploadErrors.Alias(), "file_id").EQ(psql.Quote(parent, "id"))), + } + subqueryMods = append(subqueryMods, mods...) + return psql.Group(psql.Select(subqueryMods...).Expression) + }) + }, + } +} + +type fileuploadFileCountThenLoader[Q orm.Loadable] struct { + Errors func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildFileuploadFileCountThenLoader[Q orm.Loadable]() fileuploadFileCountThenLoader[Q] { + type ErrorsCountInterface interface { + LoadCountErrors(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return fileuploadFileCountThenLoader[Q]{ + Errors: countThenLoadBuilder[Q]( + "Errors", + func(ctx context.Context, exec bob.Executor, retrieved ErrorsCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCountErrors(ctx, exec, mods...) + }, + ), + } +} + +// LoadCountErrors loads the count of Errors into the C struct +func (o *FileuploadFile) LoadCountErrors(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + count, err := o.Errors(mods...).Count(ctx, exec) + if err != nil { + return err + } + + o.C.Errors = &count + return nil +} + +// LoadCountErrors loads the count of Errors for a slice +func (os FileuploadFileSlice) LoadCountErrors(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + for _, o := range os { + if err := o.LoadCountErrors(ctx, exec, mods...); err != nil { + return err + } + } + + return nil +} + +type fileuploadFileJoins[Q dialect.Joinable] struct { + typ string + CSV modAs[Q, fileuploadCSVColumns] + Errors modAs[Q, fileuploadErrorColumns] + CreatorUser modAs[Q, userColumns] +} + +func (j fileuploadFileJoins[Q]) aliasedAs(alias string) fileuploadFileJoins[Q] { + return buildFileuploadFileJoins[Q](buildFileuploadFileColumns(alias), j.typ) +} + +func buildFileuploadFileJoins[Q dialect.Joinable](cols fileuploadFileColumns, typ string) fileuploadFileJoins[Q] { + return fileuploadFileJoins[Q]{ + typ: typ, + CSV: modAs[Q, fileuploadCSVColumns]{ + c: FileuploadCSVS.Columns, + f: func(to fileuploadCSVColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, FileuploadCSVS.Name().As(to.Alias())).On( + to.FileID.EQ(cols.ID), + )) + } + + return mods + }, + }, + Errors: modAs[Q, fileuploadErrorColumns]{ + c: FileuploadErrors.Columns, + f: func(to fileuploadErrorColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, FileuploadErrors.Name().As(to.Alias())).On( + to.FileID.EQ(cols.ID), + )) + } + + return mods + }, + }, + CreatorUser: modAs[Q, userColumns]{ + c: Users.Columns, + f: func(to userColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, Users.Name().As(to.Alias())).On( + to.ID.EQ(cols.CreatorID), + )) + } + + return mods + }, + }, + } +} diff --git a/db/models/user_.bob.go b/db/models/user_.bob.go index 9b74ebf1..4738d743 100644 --- a/db/models/user_.bob.go +++ b/db/models/user_.bob.go @@ -57,14 +57,15 @@ type UsersQuery = *psql.ViewQuery[*User, UserSlice] // userR is where relationships are stored. type userR struct { - PublicUserUser ArcgisUserSlice // arcgis.user_.user__public_user_id_fkey - CreatorNoteAudios NoteAudioSlice // note_audio.note_audio_creator_id_fkey - DeletorNoteAudios NoteAudioSlice // note_audio.note_audio_deletor_id_fkey - CreatorNoteImages NoteImageSlice // note_image.note_image_creator_id_fkey - DeletorNoteImages NoteImageSlice // note_image.note_image_deletor_id_fkey - UserNotifications NotificationSlice // notification.notification_user_id_fkey - UserOauthTokens OauthTokenSlice // oauth_token.oauth_token_user_id_fkey - Organization *Organization // user_.user__organization_id_fkey + PublicUserUser ArcgisUserSlice // arcgis.user_.user__public_user_id_fkey + CreatorFiles FileuploadFileSlice // fileupload.file.file_creator_id_fkey + CreatorNoteAudios NoteAudioSlice // note_audio.note_audio_creator_id_fkey + DeletorNoteAudios NoteAudioSlice // note_audio.note_audio_deletor_id_fkey + CreatorNoteImages NoteImageSlice // note_image.note_image_creator_id_fkey + DeletorNoteImages NoteImageSlice // note_image.note_image_deletor_id_fkey + UserNotifications NotificationSlice // notification.notification_user_id_fkey + UserOauthTokens OauthTokenSlice // oauth_token.oauth_token_user_id_fkey + Organization *Organization // user_.user__organization_id_fkey } func buildUserColumns(alias string) userColumns { @@ -635,6 +636,30 @@ func (os UserSlice) PublicUserUser(mods ...bob.Mod[*dialect.SelectQuery]) Arcgis )...) } +// CreatorFiles starts a query for related objects on fileupload.file +func (o *User) CreatorFiles(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery { + return FileuploadFiles.Query(append(mods, + sm.Where(FileuploadFiles.Columns.CreatorID.EQ(psql.Arg(o.ID))), + )...) +} + +func (os UserSlice) CreatorFiles(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery { + pkID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkID = append(pkID, o.ID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")), + )) + + return FileuploadFiles.Query(append(mods, + sm.Where(psql.Group(FileuploadFiles.Columns.CreatorID).OP("IN", PKArgExpr)), + )...) +} + // CreatorNoteAudios starts a query for related objects on note_audio func (o *User) CreatorNoteAudios(mods ...bob.Mod[*dialect.SelectQuery]) NoteAudiosQuery { return NoteAudios.Query(append(mods, @@ -871,6 +896,74 @@ func (user0 *User) AttachPublicUserUser(ctx context.Context, exec bob.Executor, return nil } +func insertUserCreatorFiles0(ctx context.Context, exec bob.Executor, fileuploadFiles1 []*FileuploadFileSetter, user0 *User) (FileuploadFileSlice, error) { + for i := range fileuploadFiles1 { + fileuploadFiles1[i].CreatorID = omit.From(user0.ID) + } + + ret, err := FileuploadFiles.Insert(bob.ToMods(fileuploadFiles1...)).All(ctx, exec) + if err != nil { + return ret, fmt.Errorf("insertUserCreatorFiles0: %w", err) + } + + return ret, nil +} + +func attachUserCreatorFiles0(ctx context.Context, exec bob.Executor, count int, fileuploadFiles1 FileuploadFileSlice, user0 *User) (FileuploadFileSlice, error) { + setter := &FileuploadFileSetter{ + CreatorID: omit.From(user0.ID), + } + + err := fileuploadFiles1.UpdateAll(ctx, exec, *setter) + if err != nil { + return nil, fmt.Errorf("attachUserCreatorFiles0: %w", err) + } + + return fileuploadFiles1, nil +} + +func (user0 *User) InsertCreatorFiles(ctx context.Context, exec bob.Executor, related ...*FileuploadFileSetter) error { + if len(related) == 0 { + return nil + } + + var err error + + fileuploadFiles1, err := insertUserCreatorFiles0(ctx, exec, related, user0) + if err != nil { + return err + } + + user0.R.CreatorFiles = append(user0.R.CreatorFiles, fileuploadFiles1...) + + for _, rel := range fileuploadFiles1 { + rel.R.CreatorUser = user0 + } + return nil +} + +func (user0 *User) AttachCreatorFiles(ctx context.Context, exec bob.Executor, related ...*FileuploadFile) error { + if len(related) == 0 { + return nil + } + + var err error + fileuploadFiles1 := FileuploadFileSlice(related) + + _, err = attachUserCreatorFiles0(ctx, exec, len(related), fileuploadFiles1, user0) + if err != nil { + return err + } + + user0.R.CreatorFiles = append(user0.R.CreatorFiles, fileuploadFiles1...) + + for _, rel := range related { + rel.R.CreatorUser = user0 + } + + return nil +} + func insertUserCreatorNoteAudios0(ctx context.Context, exec bob.Executor, noteAudios1 []*NoteAudioSetter, user0 *User) (NoteAudioSlice, error) { for i := range noteAudios1 { noteAudios1[i].CreatorID = omit.From(user0.ID) @@ -1383,6 +1476,20 @@ func (o *User) Preload(name string, retrieved any) error { } } return nil + case "CreatorFiles": + rels, ok := retrieved.(FileuploadFileSlice) + if !ok { + return fmt.Errorf("user cannot load %T as %q", retrieved, name) + } + + o.R.CreatorFiles = rels + + for _, rel := range rels { + if rel != nil { + rel.R.CreatorUser = o + } + } + return nil case "CreatorNoteAudios": rels, ok := retrieved.(NoteAudioSlice) if !ok { @@ -1508,6 +1615,7 @@ func buildUserPreloader() userPreloader { type userThenLoader[Q orm.Loadable] struct { PublicUserUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + CreatorFiles func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] DeletorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorNoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] @@ -1521,6 +1629,9 @@ func buildUserThenLoader[Q orm.Loadable]() userThenLoader[Q] { type PublicUserUserLoadInterface interface { LoadPublicUserUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } + type CreatorFilesLoadInterface interface { + LoadCreatorFiles(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } type CreatorNoteAudiosLoadInterface interface { LoadCreatorNoteAudios(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } @@ -1550,6 +1661,12 @@ func buildUserThenLoader[Q orm.Loadable]() userThenLoader[Q] { return retrieved.LoadPublicUserUser(ctx, exec, mods...) }, ), + CreatorFiles: thenLoadBuilder[Q]( + "CreatorFiles", + func(ctx context.Context, exec bob.Executor, retrieved CreatorFilesLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCreatorFiles(ctx, exec, mods...) + }, + ), CreatorNoteAudios: thenLoadBuilder[Q]( "CreatorNoteAudios", func(ctx context.Context, exec bob.Executor, retrieved CreatorNoteAudiosLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { @@ -1656,6 +1773,67 @@ func (os UserSlice) LoadPublicUserUser(ctx context.Context, exec bob.Executor, m return nil } +// LoadCreatorFiles loads the user's CreatorFiles into the .R struct +func (o *User) LoadCreatorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.CreatorFiles = nil + + related, err := o.CreatorFiles(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, rel := range related { + rel.R.CreatorUser = o + } + + o.R.CreatorFiles = related + return nil +} + +// LoadCreatorFiles loads the user's CreatorFiles into the .R struct +func (os UserSlice) LoadCreatorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + fileuploadFiles, err := os.CreatorFiles(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + o.R.CreatorFiles = nil + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range fileuploadFiles { + + if !(o.ID == rel.CreatorID) { + continue + } + + rel.R.CreatorUser = o + + o.R.CreatorFiles = append(o.R.CreatorFiles, rel) + } + } + + return nil +} + // LoadCreatorNoteAudios loads the user's CreatorNoteAudios into the .R struct func (o *User) LoadCreatorNoteAudios(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { if o == nil { @@ -2083,6 +2261,7 @@ func (os UserSlice) LoadOrganization(ctx context.Context, exec bob.Executor, mod // userC is where relationship counts are stored. type userC struct { PublicUserUser *int64 + CreatorFiles *int64 CreatorNoteAudios *int64 DeletorNoteAudios *int64 CreatorNoteImages *int64 @@ -2100,6 +2279,8 @@ func (o *User) PreloadCount(name string, count int64) error { switch name { case "PublicUserUser": o.C.PublicUserUser = &count + case "CreatorFiles": + o.C.CreatorFiles = &count case "CreatorNoteAudios": o.C.CreatorNoteAudios = &count case "DeletorNoteAudios": @@ -2118,6 +2299,7 @@ func (o *User) PreloadCount(name string, count int64) error { type userCountPreloader struct { PublicUserUser func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader + CreatorFiles func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader CreatorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader DeletorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader CreatorNoteImages func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader @@ -2145,6 +2327,23 @@ func buildUserCountPreloader() userCountPreloader { return psql.Group(psql.Select(subqueryMods...).Expression) }) }, + CreatorFiles: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { + return countPreloader[*User]("CreatorFiles", func(parent string) bob.Expression { + // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) + if parent == "" { + parent = Users.Alias() + } + + subqueryMods := []bob.Mod[*dialect.SelectQuery]{ + sm.Columns(psql.Raw("count(*)")), + + sm.From(FileuploadFiles.Name()), + sm.Where(psql.Quote(FileuploadFiles.Alias(), "creator_id").EQ(psql.Quote(parent, "id"))), + } + subqueryMods = append(subqueryMods, mods...) + return psql.Group(psql.Select(subqueryMods...).Expression) + }) + }, CreatorNoteAudios: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { return countPreloader[*User]("CreatorNoteAudios", func(parent string) bob.Expression { // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) @@ -2252,6 +2451,7 @@ func buildUserCountPreloader() userCountPreloader { type userCountThenLoader[Q orm.Loadable] struct { PublicUserUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + CreatorFiles func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] DeletorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorNoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] @@ -2264,6 +2464,9 @@ func buildUserCountThenLoader[Q orm.Loadable]() userCountThenLoader[Q] { type PublicUserUserCountInterface interface { LoadCountPublicUserUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } + type CreatorFilesCountInterface interface { + LoadCountCreatorFiles(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } type CreatorNoteAudiosCountInterface interface { LoadCountCreatorNoteAudios(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } @@ -2290,6 +2493,12 @@ func buildUserCountThenLoader[Q orm.Loadable]() userCountThenLoader[Q] { return retrieved.LoadCountPublicUserUser(ctx, exec, mods...) }, ), + CreatorFiles: countThenLoadBuilder[Q]( + "CreatorFiles", + func(ctx context.Context, exec bob.Executor, retrieved CreatorFilesCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCountCreatorFiles(ctx, exec, mods...) + }, + ), CreatorNoteAudios: countThenLoadBuilder[Q]( "CreatorNoteAudios", func(ctx context.Context, exec bob.Executor, retrieved CreatorNoteAudiosCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { @@ -2359,6 +2568,36 @@ func (os UserSlice) LoadCountPublicUserUser(ctx context.Context, exec bob.Execut return nil } +// LoadCountCreatorFiles loads the count of CreatorFiles into the C struct +func (o *User) LoadCountCreatorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + count, err := o.CreatorFiles(mods...).Count(ctx, exec) + if err != nil { + return err + } + + o.C.CreatorFiles = &count + return nil +} + +// LoadCountCreatorFiles loads the count of CreatorFiles for a slice +func (os UserSlice) LoadCountCreatorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + for _, o := range os { + if err := o.LoadCountCreatorFiles(ctx, exec, mods...); err != nil { + return err + } + } + + return nil +} + // LoadCountCreatorNoteAudios loads the count of CreatorNoteAudios into the C struct func (o *User) LoadCountCreatorNoteAudios(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { if o == nil { @@ -2542,6 +2781,7 @@ func (os UserSlice) LoadCountUserOauthTokens(ctx context.Context, exec bob.Execu type userJoins[Q dialect.Joinable] struct { typ string PublicUserUser modAs[Q, arcgisuserColumns] + CreatorFiles modAs[Q, fileuploadFileColumns] CreatorNoteAudios modAs[Q, noteAudioColumns] DeletorNoteAudios modAs[Q, noteAudioColumns] CreatorNoteImages modAs[Q, noteImageColumns] @@ -2572,6 +2812,20 @@ func buildUserJoins[Q dialect.Joinable](cols userColumns, typ string) userJoins[ return mods }, }, + CreatorFiles: modAs[Q, fileuploadFileColumns]{ + c: FileuploadFiles.Columns, + f: func(to fileuploadFileColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, FileuploadFiles.Name().As(to.Alias())).On( + to.CreatorID.EQ(cols.ID), + )) + } + + return mods + }, + }, CreatorNoteAudios: modAs[Q, noteAudioColumns]{ c: NoteAudios.Columns, f: func(to noteAudioColumns) bob.Mod[Q] { diff --git a/platform/pool.go b/platform/pool.go new file mode 100644 index 00000000..61adbc1f --- /dev/null +++ b/platform/pool.go @@ -0,0 +1,50 @@ +package platform + +import ( + "context" + "fmt" + "time" + + "github.com/Gleipnir-Technology/nidus-sync/db" + "github.com/Gleipnir-Technology/nidus-sync/db/enums" + "github.com/Gleipnir-Technology/nidus-sync/db/models" + "github.com/Gleipnir-Technology/nidus-sync/userfile" + "github.com/aarondl/opt/omit" + "github.com/aarondl/opt/omitnull" +) + +type PoolUpload struct { + ID int32 +} + +func NewPoolUpload(ctx context.Context, u *models.User, upload userfile.FileUpload) (PoolUpload, error) { + txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil) + if err != nil { + return PoolUpload{}, fmt.Errorf("Failed to begin transaction: %w", err) + } + + file, err := models.FileuploadFiles.Insert(&models.FileuploadFileSetter{ + ContentType: omit.From(upload.ContentType), + Created: omit.From(time.Now()), + CreatorID: omit.From(u.ID), + Deleted: omitnull.FromPtr[time.Time](nil), + Name: omit.From(upload.Name), + Status: omit.From(enums.FileuploadFilestatustypeUploaded), + SizeBytes: omit.From(int32(upload.SizeBytes)), + FileUUID: omit.From(upload.UUID), + }).One(ctx, txn) + if err != nil { + return PoolUpload{}, fmt.Errorf("Failed to create file upload: %w", err) + } + _, err = models.FileuploadCSVS.Insert(&models.FileuploadCSVSetter{ + FileID: omit.From(file.ID), + Type: omit.From(enums.FileuploadCsvtypePoollist), + }).One(ctx, txn) + if err != nil { + return PoolUpload{}, fmt.Errorf("Failed to create csv: %w", err) + } + txn.Commit(ctx) + return PoolUpload{ + ID: file.ID, + }, nil +} diff --git a/rmo/image.go b/rmo/image.go index 550956d3..5c965228 100644 --- a/rmo/image.go +++ b/rmo/image.go @@ -5,14 +5,20 @@ import ( "github.com/Gleipnir-Technology/nidus-sync/userfile" "github.com/go-chi/chi/v5" + "github.com/google/uuid" ) // ServeImageByUUID reads an image with the given UUID from disk and writes it to the HTTP response func getImageByUUID(w http.ResponseWriter, r *http.Request) { - uid := chi.URLParam(r, "uuid") - if uid == "" { + u := chi.URLParam(r, "uuid") + if u == "" { http.NotFound(w, r) return } + uid, err := uuid.Parse(u) + if err != nil { + http.Error(w, "Failed to parse uuid", http.StatusBadRequest) + return + } userfile.PublicImageFileToResponse(w, uid) } diff --git a/sync/pool.go b/sync/pool.go index b1968a68..03b4c2bc 100644 --- a/sync/pool.go +++ b/sync/pool.go @@ -1,10 +1,13 @@ package sync import ( + "fmt" + "net/http" + "github.com/Gleipnir-Technology/nidus-sync/db/models" "github.com/Gleipnir-Technology/nidus-sync/html" + "github.com/Gleipnir-Technology/nidus-sync/platform" "github.com/Gleipnir-Technology/nidus-sync/userfile" - "net/http" ) type ContentPoolList struct { @@ -52,5 +55,15 @@ func postPoolUpload(w http.ResponseWriter, r *http.Request, u *models.User) { respondError(w, "Failed to extract image uploads", err, http.StatusInternalServerError) return } - images, err := saveImageUploads(r.Context(), tx, uploads) + if len(uploads) == 0 { + respondError(w, "No upload found", nil, http.StatusBadRequest) + return + } + if len(uploads) != 1 { + respondError(w, "You must only submit one file at a time", nil, http.StatusBadRequest) + return + } + upload := uploads[0] + pool_upload, err := platform.NewPoolUpload(r.Context(), u, upload) + http.Redirect(w, r, fmt.Sprintf("/pool/upload/%d", pool_upload.ID), http.StatusFound) } diff --git a/userfile/upload.go b/userfile/upload.go index 1e8a577c..caa21db9 100644 --- a/userfile/upload.go +++ b/userfile/upload.go @@ -13,10 +13,9 @@ import ( type FileUpload struct { ContentType string - - UploadFilesize int - UploadFilename string - UUID uuid.UUID + Name string + SizeBytes int + UUID uuid.UUID } func SaveFileUpload(r *http.Request, name string, subdir string, extension string) ([]FileUpload, error) { @@ -69,9 +68,9 @@ func saveFileUpload(headers *multipart.FileHeader, subdir string, extension stri } log.Info().Int("size", len(file_bytes)).Str("uploaded_filename", headers.Filename).Str("content-type", content_type).Str("uuid", u.String()).Msg("Saved an uploaded file to disk") return FileUpload{ - ContentType: content_type, - UploadFilename: headers.Filename, - UploadFilesize: len(file_bytes), - UUID: u, + ContentType: content_type, + Name: headers.Filename, + SizeBytes: len(file_bytes), + UUID: u, }, nil }