Make file uploads of CSV actually save to disk

This commit is contained in:
Eli Ribble 2026-02-08 01:44:44 +00:00
parent 0d55eb1da4
commit 6716bc68c9
No known key found for this signature in database
27 changed files with 5459 additions and 37 deletions

View file

@ -17,6 +17,7 @@ psql:
schemas: schemas:
- "arcgis" - "arcgis"
- "comms" - "comms"
- "fileupload"
- "import" - "import"
- "public" - "public"
- "publicreport" - "publicreport"

View file

@ -0,0 +1,17 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package dberrors
var FileuploadCSVErrors = &fileuploadCSVErrors{
ErrUniqueCsvPkey: &UniqueConstraintError{
schema: "fileupload",
table: "csv",
columns: []string{"file_id"},
s: "csv_pkey",
},
}
type fileuploadCSVErrors struct {
ErrUniqueCsvPkey *UniqueConstraintError
}

View file

@ -0,0 +1,17 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package dberrors
var FileuploadErrorErrors = &fileuploadErrorErrors{
ErrUniqueErrorPkey: &UniqueConstraintError{
schema: "fileupload",
table: "error",
columns: []string{"id"},
s: "error_pkey",
},
}
type fileuploadErrorErrors struct {
ErrUniqueErrorPkey *UniqueConstraintError
}

View file

@ -0,0 +1,17 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package dberrors
var FileuploadFileErrors = &fileuploadFileErrors{
ErrUniqueFilePkey: &UniqueConstraintError{
schema: "fileupload",
table: "file",
columns: []string{"id"},
s: "file_pkey",
},
}
type fileuploadFileErrors struct {
ErrUniqueFilePkey *UniqueConstraintError
}

View file

@ -0,0 +1,117 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package dbinfo
import "github.com/aarondl/opt/null"
var FileuploadCSVS = Table[
fileuploadCSVColumns,
fileuploadCSVIndexes,
fileuploadCSVForeignKeys,
fileuploadCSVUniques,
fileuploadCSVChecks,
]{
Schema: "fileupload",
Name: "csv",
Columns: fileuploadCSVColumns{
FileID: column{
Name: "file_id",
DBType: "integer",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Type: column{
Name: "type_",
DBType: "fileupload.csvtype",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
},
Indexes: fileuploadCSVIndexes{
CSVPkey: index{
Type: "btree",
Name: "csv_pkey",
Columns: []indexColumn{
{
Name: "file_id",
Desc: null.FromCond(false, true),
IsExpression: false,
},
},
Unique: true,
Comment: "",
NullsFirst: []bool{false},
NullsDistinct: false,
Where: "",
Include: []string{},
},
},
PrimaryKey: &constraint{
Name: "csv_pkey",
Columns: []string{"file_id"},
Comment: "",
},
ForeignKeys: fileuploadCSVForeignKeys{
FileuploadCSVCSVFileIDFkey: foreignKey{
constraint: constraint{
Name: "fileupload.csv.csv_file_id_fkey",
Columns: []string{"file_id"},
Comment: "",
},
ForeignTable: "fileupload.file",
ForeignColumns: []string{"id"},
},
},
Comment: "",
}
type fileuploadCSVColumns struct {
FileID column
Type column
}
func (c fileuploadCSVColumns) AsSlice() []column {
return []column{
c.FileID, c.Type,
}
}
type fileuploadCSVIndexes struct {
CSVPkey index
}
func (i fileuploadCSVIndexes) AsSlice() []index {
return []index{
i.CSVPkey,
}
}
type fileuploadCSVForeignKeys struct {
FileuploadCSVCSVFileIDFkey foreignKey
}
func (f fileuploadCSVForeignKeys) AsSlice() []foreignKey {
return []foreignKey{
f.FileuploadCSVCSVFileIDFkey,
}
}
type fileuploadCSVUniques struct{}
func (u fileuploadCSVUniques) AsSlice() []constraint {
return []constraint{}
}
type fileuploadCSVChecks struct{}
func (c fileuploadCSVChecks) AsSlice() []check {
return []check{}
}

View file

@ -0,0 +1,137 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package dbinfo
import "github.com/aarondl/opt/null"
var FileuploadErrors = Table[
fileuploadErrorColumns,
fileuploadErrorIndexes,
fileuploadErrorForeignKeys,
fileuploadErrorUniques,
fileuploadErrorChecks,
]{
Schema: "fileupload",
Name: "error",
Columns: fileuploadErrorColumns{
FileID: column{
Name: "file_id",
DBType: "integer",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
ID: column{
Name: "id",
DBType: "integer",
Default: "nextval('fileupload.error_id_seq'::regclass)",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Line: column{
Name: "line",
DBType: "integer",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Message: column{
Name: "message",
DBType: "text",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
},
Indexes: fileuploadErrorIndexes{
ErrorPkey: index{
Type: "btree",
Name: "error_pkey",
Columns: []indexColumn{
{
Name: "id",
Desc: null.FromCond(false, true),
IsExpression: false,
},
},
Unique: true,
Comment: "",
NullsFirst: []bool{false},
NullsDistinct: false,
Where: "",
Include: []string{},
},
},
PrimaryKey: &constraint{
Name: "error_pkey",
Columns: []string{"id"},
Comment: "",
},
ForeignKeys: fileuploadErrorForeignKeys{
FileuploadErrorErrorFileIDFkey: foreignKey{
constraint: constraint{
Name: "fileupload.error.error_file_id_fkey",
Columns: []string{"file_id"},
Comment: "",
},
ForeignTable: "fileupload.file",
ForeignColumns: []string{"id"},
},
},
Comment: "",
}
type fileuploadErrorColumns struct {
FileID column
ID column
Line column
Message column
}
func (c fileuploadErrorColumns) AsSlice() []column {
return []column{
c.FileID, c.ID, c.Line, c.Message,
}
}
type fileuploadErrorIndexes struct {
ErrorPkey index
}
func (i fileuploadErrorIndexes) AsSlice() []index {
return []index{
i.ErrorPkey,
}
}
type fileuploadErrorForeignKeys struct {
FileuploadErrorErrorFileIDFkey foreignKey
}
func (f fileuploadErrorForeignKeys) AsSlice() []foreignKey {
return []foreignKey{
f.FileuploadErrorErrorFileIDFkey,
}
}
type fileuploadErrorUniques struct{}
func (u fileuploadErrorUniques) AsSlice() []constraint {
return []constraint{}
}
type fileuploadErrorChecks struct{}
func (c fileuploadErrorChecks) AsSlice() []check {
return []check{}
}

View file

@ -0,0 +1,187 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package dbinfo
import "github.com/aarondl/opt/null"
var FileuploadFiles = Table[
fileuploadFileColumns,
fileuploadFileIndexes,
fileuploadFileForeignKeys,
fileuploadFileUniques,
fileuploadFileChecks,
]{
Schema: "fileupload",
Name: "file",
Columns: fileuploadFileColumns{
ID: column{
Name: "id",
DBType: "integer",
Default: "nextval('fileupload.file_id_seq'::regclass)",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
ContentType: column{
Name: "content_type",
DBType: "text",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Created: column{
Name: "created",
DBType: "timestamp without time zone",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
CreatorID: column{
Name: "creator_id",
DBType: "integer",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Deleted: column{
Name: "deleted",
DBType: "timestamp without time zone",
Default: "NULL",
Comment: "",
Nullable: true,
Generated: false,
AutoIncr: false,
},
Name: column{
Name: "name",
DBType: "text",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Status: column{
Name: "status",
DBType: "fileupload.filestatustype",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
SizeBytes: column{
Name: "size_bytes",
DBType: "integer",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
FileUUID: column{
Name: "file_uuid",
DBType: "uuid",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
},
Indexes: fileuploadFileIndexes{
FilePkey: index{
Type: "btree",
Name: "file_pkey",
Columns: []indexColumn{
{
Name: "id",
Desc: null.FromCond(false, true),
IsExpression: false,
},
},
Unique: true,
Comment: "",
NullsFirst: []bool{false},
NullsDistinct: false,
Where: "",
Include: []string{},
},
},
PrimaryKey: &constraint{
Name: "file_pkey",
Columns: []string{"id"},
Comment: "",
},
ForeignKeys: fileuploadFileForeignKeys{
FileuploadFileFileCreatorIDFkey: foreignKey{
constraint: constraint{
Name: "fileupload.file.file_creator_id_fkey",
Columns: []string{"creator_id"},
Comment: "",
},
ForeignTable: "user_",
ForeignColumns: []string{"id"},
},
},
Comment: "",
}
type fileuploadFileColumns struct {
ID column
ContentType column
Created column
CreatorID column
Deleted column
Name column
Status column
SizeBytes column
FileUUID column
}
func (c fileuploadFileColumns) AsSlice() []column {
return []column{
c.ID, c.ContentType, c.Created, c.CreatorID, c.Deleted, c.Name, c.Status, c.SizeBytes, c.FileUUID,
}
}
type fileuploadFileIndexes struct {
FilePkey index
}
func (i fileuploadFileIndexes) AsSlice() []index {
return []index{
i.FilePkey,
}
}
type fileuploadFileForeignKeys struct {
FileuploadFileFileCreatorIDFkey foreignKey
}
func (f fileuploadFileForeignKeys) AsSlice() []foreignKey {
return []foreignKey{
f.FileuploadFileFileCreatorIDFkey,
}
}
type fileuploadFileUniques struct{}
func (u fileuploadFileUniques) AsSlice() []constraint {
return []constraint{}
}
type fileuploadFileChecks struct{}
func (c fileuploadFileChecks) AsSlice() []check {
return []check{}
}

View file

@ -579,6 +579,149 @@ func (e *CommsTextorigin) Scan(value any) error {
return nil return nil
} }
// Enum values for FileuploadCsvtype
const (
FileuploadCsvtypePoollist FileuploadCsvtype = "PoolList"
)
func AllFileuploadCsvtype() []FileuploadCsvtype {
return []FileuploadCsvtype{
FileuploadCsvtypePoollist,
}
}
type FileuploadCsvtype string
func (e FileuploadCsvtype) String() string {
return string(e)
}
func (e FileuploadCsvtype) Valid() bool {
switch e {
case FileuploadCsvtypePoollist:
return true
default:
return false
}
}
// useful when testing in other packages
func (e FileuploadCsvtype) All() []FileuploadCsvtype {
return AllFileuploadCsvtype()
}
func (e FileuploadCsvtype) MarshalText() ([]byte, error) {
return []byte(e), nil
}
func (e *FileuploadCsvtype) UnmarshalText(text []byte) error {
return e.Scan(text)
}
func (e FileuploadCsvtype) MarshalBinary() ([]byte, error) {
return []byte(e), nil
}
func (e *FileuploadCsvtype) UnmarshalBinary(data []byte) error {
return e.Scan(data)
}
func (e FileuploadCsvtype) Value() (driver.Value, error) {
return string(e), nil
}
func (e *FileuploadCsvtype) Scan(value any) error {
switch x := value.(type) {
case string:
*e = FileuploadCsvtype(x)
case []byte:
*e = FileuploadCsvtype(x)
case nil:
return fmt.Errorf("cannot nil into FileuploadCsvtype")
default:
return fmt.Errorf("cannot scan type %T: %v", value, value)
}
if !e.Valid() {
return fmt.Errorf("invalid FileuploadCsvtype value: %s", *e)
}
return nil
}
// Enum values for FileuploadFilestatustype
const (
FileuploadFilestatustypeUploaded FileuploadFilestatustype = "uploaded"
FileuploadFilestatustypeParsed FileuploadFilestatustype = "parsed"
)
func AllFileuploadFilestatustype() []FileuploadFilestatustype {
return []FileuploadFilestatustype{
FileuploadFilestatustypeUploaded,
FileuploadFilestatustypeParsed,
}
}
type FileuploadFilestatustype string
func (e FileuploadFilestatustype) String() string {
return string(e)
}
func (e FileuploadFilestatustype) Valid() bool {
switch e {
case FileuploadFilestatustypeUploaded,
FileuploadFilestatustypeParsed:
return true
default:
return false
}
}
// useful when testing in other packages
func (e FileuploadFilestatustype) All() []FileuploadFilestatustype {
return AllFileuploadFilestatustype()
}
func (e FileuploadFilestatustype) MarshalText() ([]byte, error) {
return []byte(e), nil
}
func (e *FileuploadFilestatustype) UnmarshalText(text []byte) error {
return e.Scan(text)
}
func (e FileuploadFilestatustype) MarshalBinary() ([]byte, error) {
return []byte(e), nil
}
func (e *FileuploadFilestatustype) UnmarshalBinary(data []byte) error {
return e.Scan(data)
}
func (e FileuploadFilestatustype) Value() (driver.Value, error) {
return string(e), nil
}
func (e *FileuploadFilestatustype) Scan(value any) error {
switch x := value.(type) {
case string:
*e = FileuploadFilestatustype(x)
case []byte:
*e = FileuploadFilestatustype(x)
case nil:
return fmt.Errorf("cannot nil into FileuploadFilestatustype")
default:
return fmt.Errorf("cannot scan type %T: %v", value, value)
}
if !e.Valid() {
return fmt.Errorf("invalid FileuploadFilestatustype value: %s", *e)
}
return nil
}
// Enum values for H3aggregationtype // Enum values for H3aggregationtype
const ( const (
H3aggregationtypeMosquitosource H3aggregationtype = "MosquitoSource" H3aggregationtypeMosquitosource H3aggregationtype = "MosquitoSource"

View file

@ -173,6 +173,20 @@ var (
fieldseekerSyncWithParentsCascadingCtx = newContextual[bool]("fieldseekerSyncWithParentsCascading") fieldseekerSyncWithParentsCascadingCtx = newContextual[bool]("fieldseekerSyncWithParentsCascading")
fieldseekerSyncRelOrganizationCtx = newContextual[bool]("fieldseeker_sync.organization.fieldseeker_sync.fieldseeker_sync_organization_id_fkey") fieldseekerSyncRelOrganizationCtx = newContextual[bool]("fieldseeker_sync.organization.fieldseeker_sync.fieldseeker_sync_organization_id_fkey")
// Relationship Contexts for fileupload.csv
fileuploadCSVWithParentsCascadingCtx = newContextual[bool]("fileuploadCSVWithParentsCascading")
fileuploadCSVRelFileCtx = newContextual[bool]("fileupload.csv.fileupload.file.fileupload.csv.csv_file_id_fkey")
// Relationship Contexts for fileupload.error
fileuploadErrorWithParentsCascadingCtx = newContextual[bool]("fileuploadErrorWithParentsCascading")
fileuploadErrorRelFileCtx = newContextual[bool]("fileupload.error.fileupload.file.fileupload.error.error_file_id_fkey")
// Relationship Contexts for fileupload.file
fileuploadFileWithParentsCascadingCtx = newContextual[bool]("fileuploadFileWithParentsCascading")
fileuploadFileRelCSVCtx = newContextual[bool]("fileupload.csv.fileupload.file.fileupload.csv.csv_file_id_fkey")
fileuploadFileRelErrorsCtx = newContextual[bool]("fileupload.error.fileupload.file.fileupload.error.error_file_id_fkey")
fileuploadFileRelCreatorUserCtx = newContextual[bool]("fileupload.file.user_.fileupload.file.file_creator_id_fkey")
// Relationship Contexts for geography_columns // Relationship Contexts for geography_columns
geographyColumnWithParentsCascadingCtx = newContextual[bool]("geographyColumnWithParentsCascading") geographyColumnWithParentsCascadingCtx = newContextual[bool]("geographyColumnWithParentsCascading")
@ -354,6 +368,7 @@ var (
// Relationship Contexts for user_ // Relationship Contexts for user_
userWithParentsCascadingCtx = newContextual[bool]("userWithParentsCascading") userWithParentsCascadingCtx = newContextual[bool]("userWithParentsCascading")
userRelPublicUserUserCtx = newContextual[bool]("arcgis.user_.user_.arcgis.user_.user__public_user_id_fkey") userRelPublicUserUserCtx = newContextual[bool]("arcgis.user_.user_.arcgis.user_.user__public_user_id_fkey")
userRelCreatorFilesCtx = newContextual[bool]("fileupload.file.user_.fileupload.file.file_creator_id_fkey")
userRelCreatorNoteAudiosCtx = newContextual[bool]("note_audio.user_.note_audio.note_audio_creator_id_fkey") userRelCreatorNoteAudiosCtx = newContextual[bool]("note_audio.user_.note_audio.note_audio_creator_id_fkey")
userRelDeletorNoteAudiosCtx = newContextual[bool]("note_audio.user_.note_audio.note_audio_deletor_id_fkey") userRelDeletorNoteAudiosCtx = newContextual[bool]("note_audio.user_.note_audio.note_audio_deletor_id_fkey")
userRelCreatorNoteImagesCtx = newContextual[bool]("note_image.user_.note_image.note_image_creator_id_fkey") userRelCreatorNoteImagesCtx = newContextual[bool]("note_image.user_.note_image.note_image_creator_id_fkey")

View file

@ -57,6 +57,9 @@ type Factory struct {
baseFieldseekerZoneMods FieldseekerZoneModSlice baseFieldseekerZoneMods FieldseekerZoneModSlice
baseFieldseekerZones2Mods FieldseekerZones2ModSlice baseFieldseekerZones2Mods FieldseekerZones2ModSlice
baseFieldseekerSyncMods FieldseekerSyncModSlice baseFieldseekerSyncMods FieldseekerSyncModSlice
baseFileuploadCSVMods FileuploadCSVModSlice
baseFileuploadErrorMods FileuploadErrorModSlice
baseFileuploadFileMods FileuploadFileModSlice
baseGeographyColumnMods GeographyColumnModSlice baseGeographyColumnMods GeographyColumnModSlice
baseGeometryColumnMods GeometryColumnModSlice baseGeometryColumnMods GeometryColumnModSlice
baseGooseDBVersionMods GooseDBVersionModSlice baseGooseDBVersionMods GooseDBVersionModSlice
@ -2229,6 +2232,111 @@ func (f *Factory) FromExistingFieldseekerSync(m *models.FieldseekerSync) *Fields
return o return o
} }
func (f *Factory) NewFileuploadCSV(mods ...FileuploadCSVMod) *FileuploadCSVTemplate {
return f.NewFileuploadCSVWithContext(context.Background(), mods...)
}
func (f *Factory) NewFileuploadCSVWithContext(ctx context.Context, mods ...FileuploadCSVMod) *FileuploadCSVTemplate {
o := &FileuploadCSVTemplate{f: f}
if f != nil {
f.baseFileuploadCSVMods.Apply(ctx, o)
}
FileuploadCSVModSlice(mods).Apply(ctx, o)
return o
}
func (f *Factory) FromExistingFileuploadCSV(m *models.FileuploadCSV) *FileuploadCSVTemplate {
o := &FileuploadCSVTemplate{f: f, alreadyPersisted: true}
o.FileID = func() int32 { return m.FileID }
o.Type = func() enums.FileuploadCsvtype { return m.Type }
ctx := context.Background()
if m.R.File != nil {
FileuploadCSVMods.WithExistingFile(m.R.File).Apply(ctx, o)
}
return o
}
func (f *Factory) NewFileuploadError(mods ...FileuploadErrorMod) *FileuploadErrorTemplate {
return f.NewFileuploadErrorWithContext(context.Background(), mods...)
}
func (f *Factory) NewFileuploadErrorWithContext(ctx context.Context, mods ...FileuploadErrorMod) *FileuploadErrorTemplate {
o := &FileuploadErrorTemplate{f: f}
if f != nil {
f.baseFileuploadErrorMods.Apply(ctx, o)
}
FileuploadErrorModSlice(mods).Apply(ctx, o)
return o
}
func (f *Factory) FromExistingFileuploadError(m *models.FileuploadError) *FileuploadErrorTemplate {
o := &FileuploadErrorTemplate{f: f, alreadyPersisted: true}
o.FileID = func() int32 { return m.FileID }
o.ID = func() int32 { return m.ID }
o.Line = func() int32 { return m.Line }
o.Message = func() string { return m.Message }
ctx := context.Background()
if m.R.File != nil {
FileuploadErrorMods.WithExistingFile(m.R.File).Apply(ctx, o)
}
return o
}
func (f *Factory) NewFileuploadFile(mods ...FileuploadFileMod) *FileuploadFileTemplate {
return f.NewFileuploadFileWithContext(context.Background(), mods...)
}
func (f *Factory) NewFileuploadFileWithContext(ctx context.Context, mods ...FileuploadFileMod) *FileuploadFileTemplate {
o := &FileuploadFileTemplate{f: f}
if f != nil {
f.baseFileuploadFileMods.Apply(ctx, o)
}
FileuploadFileModSlice(mods).Apply(ctx, o)
return o
}
func (f *Factory) FromExistingFileuploadFile(m *models.FileuploadFile) *FileuploadFileTemplate {
o := &FileuploadFileTemplate{f: f, alreadyPersisted: true}
o.ID = func() int32 { return m.ID }
o.ContentType = func() string { return m.ContentType }
o.Created = func() time.Time { return m.Created }
o.CreatorID = func() int32 { return m.CreatorID }
o.Deleted = func() null.Val[time.Time] { return m.Deleted }
o.Name = func() string { return m.Name }
o.Status = func() enums.FileuploadFilestatustype { return m.Status }
o.SizeBytes = func() int32 { return m.SizeBytes }
o.FileUUID = func() uuid.UUID { return m.FileUUID }
ctx := context.Background()
if m.R.CSV != nil {
FileuploadFileMods.WithExistingCSV(m.R.CSV).Apply(ctx, o)
}
if len(m.R.Errors) > 0 {
FileuploadFileMods.AddExistingErrors(m.R.Errors...).Apply(ctx, o)
}
if m.R.CreatorUser != nil {
FileuploadFileMods.WithExistingCreatorUser(m.R.CreatorUser).Apply(ctx, o)
}
return o
}
func (f *Factory) NewGeographyColumn(mods ...GeographyColumnMod) *GeographyColumnTemplate { func (f *Factory) NewGeographyColumn(mods ...GeographyColumnMod) *GeographyColumnTemplate {
return f.NewGeographyColumnWithContext(context.Background(), mods...) return f.NewGeographyColumnWithContext(context.Background(), mods...)
} }
@ -3545,6 +3653,9 @@ func (f *Factory) FromExistingUser(m *models.User) *UserTemplate {
if len(m.R.PublicUserUser) > 0 { if len(m.R.PublicUserUser) > 0 {
UserMods.AddExistingPublicUserUser(m.R.PublicUserUser...).Apply(ctx, o) UserMods.AddExistingPublicUserUser(m.R.PublicUserUser...).Apply(ctx, o)
} }
if len(m.R.CreatorFiles) > 0 {
UserMods.AddExistingCreatorFiles(m.R.CreatorFiles...).Apply(ctx, o)
}
if len(m.R.CreatorNoteAudios) > 0 { if len(m.R.CreatorNoteAudios) > 0 {
UserMods.AddExistingCreatorNoteAudios(m.R.CreatorNoteAudios...).Apply(ctx, o) UserMods.AddExistingCreatorNoteAudios(m.R.CreatorNoteAudios...).Apply(ctx, o)
} }
@ -3874,6 +3985,30 @@ func (f *Factory) AddBaseFieldseekerSyncMod(mods ...FieldseekerSyncMod) {
f.baseFieldseekerSyncMods = append(f.baseFieldseekerSyncMods, mods...) f.baseFieldseekerSyncMods = append(f.baseFieldseekerSyncMods, mods...)
} }
func (f *Factory) ClearBaseFileuploadCSVMods() {
f.baseFileuploadCSVMods = nil
}
func (f *Factory) AddBaseFileuploadCSVMod(mods ...FileuploadCSVMod) {
f.baseFileuploadCSVMods = append(f.baseFileuploadCSVMods, mods...)
}
func (f *Factory) ClearBaseFileuploadErrorMods() {
f.baseFileuploadErrorMods = nil
}
func (f *Factory) AddBaseFileuploadErrorMod(mods ...FileuploadErrorMod) {
f.baseFileuploadErrorMods = append(f.baseFileuploadErrorMods, mods...)
}
func (f *Factory) ClearBaseFileuploadFileMods() {
f.baseFileuploadFileMods = nil
}
func (f *Factory) AddBaseFileuploadFileMod(mods ...FileuploadFileMod) {
f.baseFileuploadFileMods = append(f.baseFileuploadFileMods, mods...)
}
func (f *Factory) ClearBaseGeographyColumnMods() { func (f *Factory) ClearBaseGeographyColumnMods() {
f.baseGeographyColumnMods = nil f.baseGeographyColumnMods = nil
} }

View file

@ -141,6 +141,26 @@ func random_enums_CommsTextorigin(f *faker.Faker, limits ...string) enums.CommsT
return all[f.IntBetween(0, len(all)-1)] return all[f.IntBetween(0, len(all)-1)]
} }
func random_enums_FileuploadCsvtype(f *faker.Faker, limits ...string) enums.FileuploadCsvtype {
if f == nil {
f = &defaultFaker
}
var e enums.FileuploadCsvtype
all := e.All()
return all[f.IntBetween(0, len(all)-1)]
}
func random_enums_FileuploadFilestatustype(f *faker.Faker, limits ...string) enums.FileuploadFilestatustype {
if f == nil {
f = &defaultFaker
}
var e enums.FileuploadFilestatustype
all := e.All()
return all[f.IntBetween(0, len(all)-1)]
}
func random_enums_H3aggregationtype(f *faker.Faker, limits ...string) enums.H3aggregationtype { func random_enums_H3aggregationtype(f *faker.Faker, limits ...string) enums.H3aggregationtype {
if f == nil { if f == nil {
f = &defaultFaker f = &defaultFaker

View file

@ -0,0 +1,370 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package factory
import (
"context"
"testing"
"github.com/Gleipnir-Technology/bob"
enums "github.com/Gleipnir-Technology/nidus-sync/db/enums"
models "github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/aarondl/opt/omit"
"github.com/jaswdr/faker/v2"
)
type FileuploadCSVMod interface {
Apply(context.Context, *FileuploadCSVTemplate)
}
type FileuploadCSVModFunc func(context.Context, *FileuploadCSVTemplate)
func (f FileuploadCSVModFunc) Apply(ctx context.Context, n *FileuploadCSVTemplate) {
f(ctx, n)
}
type FileuploadCSVModSlice []FileuploadCSVMod
func (mods FileuploadCSVModSlice) Apply(ctx context.Context, n *FileuploadCSVTemplate) {
for _, f := range mods {
f.Apply(ctx, n)
}
}
// FileuploadCSVTemplate is an object representing the database table.
// all columns are optional and should be set by mods
type FileuploadCSVTemplate struct {
FileID func() int32
Type func() enums.FileuploadCsvtype
r fileuploadCSVR
f *Factory
alreadyPersisted bool
}
type fileuploadCSVR struct {
File *fileuploadCSVRFileR
}
type fileuploadCSVRFileR struct {
o *FileuploadFileTemplate
}
// Apply mods to the FileuploadCSVTemplate
func (o *FileuploadCSVTemplate) Apply(ctx context.Context, mods ...FileuploadCSVMod) {
for _, mod := range mods {
mod.Apply(ctx, o)
}
}
// setModelRels creates and sets the relationships on *models.FileuploadCSV
// according to the relationships in the template. Nothing is inserted into the db
func (t FileuploadCSVTemplate) setModelRels(o *models.FileuploadCSV) {
if t.r.File != nil {
rel := t.r.File.o.Build()
rel.R.CSV = o
o.FileID = rel.ID // h2
o.R.File = rel
}
}
// BuildSetter returns an *models.FileuploadCSVSetter
// this does nothing with the relationship templates
func (o FileuploadCSVTemplate) BuildSetter() *models.FileuploadCSVSetter {
m := &models.FileuploadCSVSetter{}
if o.FileID != nil {
val := o.FileID()
m.FileID = omit.From(val)
}
if o.Type != nil {
val := o.Type()
m.Type = omit.From(val)
}
return m
}
// BuildManySetter returns an []*models.FileuploadCSVSetter
// this does nothing with the relationship templates
func (o FileuploadCSVTemplate) BuildManySetter(number int) []*models.FileuploadCSVSetter {
m := make([]*models.FileuploadCSVSetter, number)
for i := range m {
m[i] = o.BuildSetter()
}
return m
}
// Build returns an *models.FileuploadCSV
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use FileuploadCSVTemplate.Create
func (o FileuploadCSVTemplate) Build() *models.FileuploadCSV {
m := &models.FileuploadCSV{}
if o.FileID != nil {
m.FileID = o.FileID()
}
if o.Type != nil {
m.Type = o.Type()
}
o.setModelRels(m)
return m
}
// BuildMany returns an models.FileuploadCSVSlice
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use FileuploadCSVTemplate.CreateMany
func (o FileuploadCSVTemplate) BuildMany(number int) models.FileuploadCSVSlice {
m := make(models.FileuploadCSVSlice, number)
for i := range m {
m[i] = o.Build()
}
return m
}
func ensureCreatableFileuploadCSV(m *models.FileuploadCSVSetter) {
if !(m.FileID.IsValue()) {
val := random_int32(nil)
m.FileID = omit.From(val)
}
if !(m.Type.IsValue()) {
val := random_enums_FileuploadCsvtype(nil)
m.Type = omit.From(val)
}
}
// insertOptRels creates and inserts any optional the relationships on *models.FileuploadCSV
// according to the relationships in the template.
// any required relationship should have already exist on the model
func (o *FileuploadCSVTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.FileuploadCSV) error {
var err error
return err
}
// Create builds a fileuploadCSV and inserts it into the database
// Relations objects are also inserted and placed in the .R field
func (o *FileuploadCSVTemplate) Create(ctx context.Context, exec bob.Executor) (*models.FileuploadCSV, error) {
var err error
opt := o.BuildSetter()
ensureCreatableFileuploadCSV(opt)
if o.r.File == nil {
FileuploadCSVMods.WithNewFile().Apply(ctx, o)
}
var rel0 *models.FileuploadFile
if o.r.File.o.alreadyPersisted {
rel0 = o.r.File.o.Build()
} else {
rel0, err = o.r.File.o.Create(ctx, exec)
if err != nil {
return nil, err
}
}
opt.FileID = omit.From(rel0.ID)
m, err := models.FileuploadCSVS.Insert(opt).One(ctx, exec)
if err != nil {
return nil, err
}
m.R.File = rel0
if err := o.insertOptRels(ctx, exec, m); err != nil {
return nil, err
}
return m, err
}
// MustCreate builds a fileuploadCSV and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o *FileuploadCSVTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.FileuploadCSV {
m, err := o.Create(ctx, exec)
if err != nil {
panic(err)
}
return m
}
// CreateOrFail builds a fileuploadCSV and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o *FileuploadCSVTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.FileuploadCSV {
tb.Helper()
m, err := o.Create(ctx, exec)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// CreateMany builds multiple fileuploadCSVS and inserts them into the database
// Relations objects are also inserted and placed in the .R field
func (o FileuploadCSVTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.FileuploadCSVSlice, error) {
var err error
m := make(models.FileuploadCSVSlice, number)
for i := range m {
m[i], err = o.Create(ctx, exec)
if err != nil {
return nil, err
}
}
return m, nil
}
// MustCreateMany builds multiple fileuploadCSVS and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o FileuploadCSVTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.FileuploadCSVSlice {
m, err := o.CreateMany(ctx, exec, number)
if err != nil {
panic(err)
}
return m
}
// CreateManyOrFail builds multiple fileuploadCSVS and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o FileuploadCSVTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.FileuploadCSVSlice {
tb.Helper()
m, err := o.CreateMany(ctx, exec, number)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// FileuploadCSV has methods that act as mods for the FileuploadCSVTemplate
var FileuploadCSVMods fileuploadCSVMods
type fileuploadCSVMods struct{}
func (m fileuploadCSVMods) RandomizeAllColumns(f *faker.Faker) FileuploadCSVMod {
return FileuploadCSVModSlice{
FileuploadCSVMods.RandomFileID(f),
FileuploadCSVMods.RandomType(f),
}
}
// Set the model columns to this value
func (m fileuploadCSVMods) FileID(val int32) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.FileID = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadCSVMods) FileIDFunc(f func() int32) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.FileID = f
})
}
// Clear any values for the column
func (m fileuploadCSVMods) UnsetFileID() FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.FileID = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadCSVMods) RandomFileID(f *faker.Faker) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.FileID = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadCSVMods) Type(val enums.FileuploadCsvtype) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.Type = func() enums.FileuploadCsvtype { return val }
})
}
// Set the Column from the function
func (m fileuploadCSVMods) TypeFunc(f func() enums.FileuploadCsvtype) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.Type = f
})
}
// Clear any values for the column
func (m fileuploadCSVMods) UnsetType() FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.Type = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadCSVMods) RandomType(f *faker.Faker) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.Type = func() enums.FileuploadCsvtype {
return random_enums_FileuploadCsvtype(f)
}
})
}
func (m fileuploadCSVMods) WithParentsCascading() FileuploadCSVMod {
return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) {
if isDone, _ := fileuploadCSVWithParentsCascadingCtx.Value(ctx); isDone {
return
}
ctx = fileuploadCSVWithParentsCascadingCtx.WithValue(ctx, true)
{
related := o.f.NewFileuploadFileWithContext(ctx, FileuploadFileMods.WithParentsCascading())
m.WithFile(related).Apply(ctx, o)
}
})
}
func (m fileuploadCSVMods) WithFile(rel *FileuploadFileTemplate) FileuploadCSVMod {
return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) {
o.r.File = &fileuploadCSVRFileR{
o: rel,
}
})
}
func (m fileuploadCSVMods) WithNewFile(mods ...FileuploadFileMod) FileuploadCSVMod {
return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) {
related := o.f.NewFileuploadFileWithContext(ctx, mods...)
m.WithFile(related).Apply(ctx, o)
})
}
func (m fileuploadCSVMods) WithExistingFile(em *models.FileuploadFile) FileuploadCSVMod {
return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) {
o.r.File = &fileuploadCSVRFileR{
o: o.f.FromExistingFileuploadFile(em),
}
})
}
func (m fileuploadCSVMods) WithoutFile() FileuploadCSVMod {
return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) {
o.r.File = nil
})
}

View file

@ -0,0 +1,453 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package factory
import (
"context"
"testing"
"github.com/Gleipnir-Technology/bob"
models "github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/aarondl/opt/omit"
"github.com/jaswdr/faker/v2"
)
type FileuploadErrorMod interface {
Apply(context.Context, *FileuploadErrorTemplate)
}
type FileuploadErrorModFunc func(context.Context, *FileuploadErrorTemplate)
func (f FileuploadErrorModFunc) Apply(ctx context.Context, n *FileuploadErrorTemplate) {
f(ctx, n)
}
type FileuploadErrorModSlice []FileuploadErrorMod
func (mods FileuploadErrorModSlice) Apply(ctx context.Context, n *FileuploadErrorTemplate) {
for _, f := range mods {
f.Apply(ctx, n)
}
}
// FileuploadErrorTemplate is an object representing the database table.
// all columns are optional and should be set by mods
type FileuploadErrorTemplate struct {
FileID func() int32
ID func() int32
Line func() int32
Message func() string
r fileuploadErrorR
f *Factory
alreadyPersisted bool
}
type fileuploadErrorR struct {
File *fileuploadErrorRFileR
}
type fileuploadErrorRFileR struct {
o *FileuploadFileTemplate
}
// Apply mods to the FileuploadErrorTemplate
func (o *FileuploadErrorTemplate) Apply(ctx context.Context, mods ...FileuploadErrorMod) {
for _, mod := range mods {
mod.Apply(ctx, o)
}
}
// setModelRels creates and sets the relationships on *models.FileuploadError
// according to the relationships in the template. Nothing is inserted into the db
func (t FileuploadErrorTemplate) setModelRels(o *models.FileuploadError) {
if t.r.File != nil {
rel := t.r.File.o.Build()
rel.R.Errors = append(rel.R.Errors, o)
o.FileID = rel.ID // h2
o.R.File = rel
}
}
// BuildSetter returns an *models.FileuploadErrorSetter
// this does nothing with the relationship templates
func (o FileuploadErrorTemplate) BuildSetter() *models.FileuploadErrorSetter {
m := &models.FileuploadErrorSetter{}
if o.FileID != nil {
val := o.FileID()
m.FileID = omit.From(val)
}
if o.ID != nil {
val := o.ID()
m.ID = omit.From(val)
}
if o.Line != nil {
val := o.Line()
m.Line = omit.From(val)
}
if o.Message != nil {
val := o.Message()
m.Message = omit.From(val)
}
return m
}
// BuildManySetter returns an []*models.FileuploadErrorSetter
// this does nothing with the relationship templates
func (o FileuploadErrorTemplate) BuildManySetter(number int) []*models.FileuploadErrorSetter {
m := make([]*models.FileuploadErrorSetter, number)
for i := range m {
m[i] = o.BuildSetter()
}
return m
}
// Build returns an *models.FileuploadError
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use FileuploadErrorTemplate.Create
func (o FileuploadErrorTemplate) Build() *models.FileuploadError {
m := &models.FileuploadError{}
if o.FileID != nil {
m.FileID = o.FileID()
}
if o.ID != nil {
m.ID = o.ID()
}
if o.Line != nil {
m.Line = o.Line()
}
if o.Message != nil {
m.Message = o.Message()
}
o.setModelRels(m)
return m
}
// BuildMany returns an models.FileuploadErrorSlice
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use FileuploadErrorTemplate.CreateMany
func (o FileuploadErrorTemplate) BuildMany(number int) models.FileuploadErrorSlice {
m := make(models.FileuploadErrorSlice, number)
for i := range m {
m[i] = o.Build()
}
return m
}
func ensureCreatableFileuploadError(m *models.FileuploadErrorSetter) {
if !(m.FileID.IsValue()) {
val := random_int32(nil)
m.FileID = omit.From(val)
}
if !(m.Line.IsValue()) {
val := random_int32(nil)
m.Line = omit.From(val)
}
if !(m.Message.IsValue()) {
val := random_string(nil)
m.Message = omit.From(val)
}
}
// insertOptRels creates and inserts any optional the relationships on *models.FileuploadError
// according to the relationships in the template.
// any required relationship should have already exist on the model
func (o *FileuploadErrorTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.FileuploadError) error {
var err error
return err
}
// Create builds a fileuploadError and inserts it into the database
// Relations objects are also inserted and placed in the .R field
func (o *FileuploadErrorTemplate) Create(ctx context.Context, exec bob.Executor) (*models.FileuploadError, error) {
var err error
opt := o.BuildSetter()
ensureCreatableFileuploadError(opt)
if o.r.File == nil {
FileuploadErrorMods.WithNewFile().Apply(ctx, o)
}
var rel0 *models.FileuploadFile
if o.r.File.o.alreadyPersisted {
rel0 = o.r.File.o.Build()
} else {
rel0, err = o.r.File.o.Create(ctx, exec)
if err != nil {
return nil, err
}
}
opt.FileID = omit.From(rel0.ID)
m, err := models.FileuploadErrors.Insert(opt).One(ctx, exec)
if err != nil {
return nil, err
}
m.R.File = rel0
if err := o.insertOptRels(ctx, exec, m); err != nil {
return nil, err
}
return m, err
}
// MustCreate builds a fileuploadError and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o *FileuploadErrorTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.FileuploadError {
m, err := o.Create(ctx, exec)
if err != nil {
panic(err)
}
return m
}
// CreateOrFail builds a fileuploadError and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o *FileuploadErrorTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.FileuploadError {
tb.Helper()
m, err := o.Create(ctx, exec)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// CreateMany builds multiple fileuploadErrors and inserts them into the database
// Relations objects are also inserted and placed in the .R field
func (o FileuploadErrorTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.FileuploadErrorSlice, error) {
var err error
m := make(models.FileuploadErrorSlice, number)
for i := range m {
m[i], err = o.Create(ctx, exec)
if err != nil {
return nil, err
}
}
return m, nil
}
// MustCreateMany builds multiple fileuploadErrors and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o FileuploadErrorTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.FileuploadErrorSlice {
m, err := o.CreateMany(ctx, exec, number)
if err != nil {
panic(err)
}
return m
}
// CreateManyOrFail builds multiple fileuploadErrors and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o FileuploadErrorTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.FileuploadErrorSlice {
tb.Helper()
m, err := o.CreateMany(ctx, exec, number)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// FileuploadError has methods that act as mods for the FileuploadErrorTemplate
var FileuploadErrorMods fileuploadErrorMods
type fileuploadErrorMods struct{}
func (m fileuploadErrorMods) RandomizeAllColumns(f *faker.Faker) FileuploadErrorMod {
return FileuploadErrorModSlice{
FileuploadErrorMods.RandomFileID(f),
FileuploadErrorMods.RandomID(f),
FileuploadErrorMods.RandomLine(f),
FileuploadErrorMods.RandomMessage(f),
}
}
// Set the model columns to this value
func (m fileuploadErrorMods) FileID(val int32) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.FileID = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorMods) FileIDFunc(f func() int32) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.FileID = f
})
}
// Clear any values for the column
func (m fileuploadErrorMods) UnsetFileID() FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.FileID = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorMods) RandomFileID(f *faker.Faker) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.FileID = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadErrorMods) ID(val int32) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.ID = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorMods) IDFunc(f func() int32) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.ID = f
})
}
// Clear any values for the column
func (m fileuploadErrorMods) UnsetID() FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.ID = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorMods) RandomID(f *faker.Faker) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.ID = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadErrorMods) Line(val int32) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Line = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorMods) LineFunc(f func() int32) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Line = f
})
}
// Clear any values for the column
func (m fileuploadErrorMods) UnsetLine() FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Line = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorMods) RandomLine(f *faker.Faker) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Line = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadErrorMods) Message(val string) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Message = func() string { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorMods) MessageFunc(f func() string) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Message = f
})
}
// Clear any values for the column
func (m fileuploadErrorMods) UnsetMessage() FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Message = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorMods) RandomMessage(f *faker.Faker) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Message = func() string {
return random_string(f)
}
})
}
func (m fileuploadErrorMods) WithParentsCascading() FileuploadErrorMod {
return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) {
if isDone, _ := fileuploadErrorWithParentsCascadingCtx.Value(ctx); isDone {
return
}
ctx = fileuploadErrorWithParentsCascadingCtx.WithValue(ctx, true)
{
related := o.f.NewFileuploadFileWithContext(ctx, FileuploadFileMods.WithParentsCascading())
m.WithFile(related).Apply(ctx, o)
}
})
}
func (m fileuploadErrorMods) WithFile(rel *FileuploadFileTemplate) FileuploadErrorMod {
return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) {
o.r.File = &fileuploadErrorRFileR{
o: rel,
}
})
}
func (m fileuploadErrorMods) WithNewFile(mods ...FileuploadFileMod) FileuploadErrorMod {
return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) {
related := o.f.NewFileuploadFileWithContext(ctx, mods...)
m.WithFile(related).Apply(ctx, o)
})
}
func (m fileuploadErrorMods) WithExistingFile(em *models.FileuploadFile) FileuploadErrorMod {
return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) {
o.r.File = &fileuploadErrorRFileR{
o: o.f.FromExistingFileuploadFile(em),
}
})
}
func (m fileuploadErrorMods) WithoutFile() FileuploadErrorMod {
return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) {
o.r.File = nil
})
}

View file

@ -0,0 +1,847 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package factory
import (
"context"
"testing"
"time"
"github.com/Gleipnir-Technology/bob"
enums "github.com/Gleipnir-Technology/nidus-sync/db/enums"
models "github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/aarondl/opt/null"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/google/uuid"
"github.com/jaswdr/faker/v2"
)
type FileuploadFileMod interface {
Apply(context.Context, *FileuploadFileTemplate)
}
type FileuploadFileModFunc func(context.Context, *FileuploadFileTemplate)
func (f FileuploadFileModFunc) Apply(ctx context.Context, n *FileuploadFileTemplate) {
f(ctx, n)
}
type FileuploadFileModSlice []FileuploadFileMod
func (mods FileuploadFileModSlice) Apply(ctx context.Context, n *FileuploadFileTemplate) {
for _, f := range mods {
f.Apply(ctx, n)
}
}
// FileuploadFileTemplate is an object representing the database table.
// all columns are optional and should be set by mods
type FileuploadFileTemplate struct {
ID func() int32
ContentType func() string
Created func() time.Time
CreatorID func() int32
Deleted func() null.Val[time.Time]
Name func() string
Status func() enums.FileuploadFilestatustype
SizeBytes func() int32
FileUUID func() uuid.UUID
r fileuploadFileR
f *Factory
alreadyPersisted bool
}
type fileuploadFileR struct {
CSV *fileuploadFileRCSVR
Errors []*fileuploadFileRErrorsR
CreatorUser *fileuploadFileRCreatorUserR
}
type fileuploadFileRCSVR struct {
o *FileuploadCSVTemplate
}
type fileuploadFileRErrorsR struct {
number int
o *FileuploadErrorTemplate
}
type fileuploadFileRCreatorUserR struct {
o *UserTemplate
}
// Apply mods to the FileuploadFileTemplate
func (o *FileuploadFileTemplate) Apply(ctx context.Context, mods ...FileuploadFileMod) {
for _, mod := range mods {
mod.Apply(ctx, o)
}
}
// setModelRels creates and sets the relationships on *models.FileuploadFile
// according to the relationships in the template. Nothing is inserted into the db
func (t FileuploadFileTemplate) setModelRels(o *models.FileuploadFile) {
if t.r.CSV != nil {
rel := t.r.CSV.o.Build()
rel.R.File = o
rel.FileID = o.ID // h2
o.R.CSV = rel
}
if t.r.Errors != nil {
rel := models.FileuploadErrorSlice{}
for _, r := range t.r.Errors {
related := r.o.BuildMany(r.number)
for _, rel := range related {
rel.FileID = o.ID // h2
rel.R.File = o
}
rel = append(rel, related...)
}
o.R.Errors = rel
}
if t.r.CreatorUser != nil {
rel := t.r.CreatorUser.o.Build()
rel.R.CreatorFiles = append(rel.R.CreatorFiles, o)
o.CreatorID = rel.ID // h2
o.R.CreatorUser = rel
}
}
// BuildSetter returns an *models.FileuploadFileSetter
// this does nothing with the relationship templates
func (o FileuploadFileTemplate) BuildSetter() *models.FileuploadFileSetter {
m := &models.FileuploadFileSetter{}
if o.ID != nil {
val := o.ID()
m.ID = omit.From(val)
}
if o.ContentType != nil {
val := o.ContentType()
m.ContentType = omit.From(val)
}
if o.Created != nil {
val := o.Created()
m.Created = omit.From(val)
}
if o.CreatorID != nil {
val := o.CreatorID()
m.CreatorID = omit.From(val)
}
if o.Deleted != nil {
val := o.Deleted()
m.Deleted = omitnull.FromNull(val)
}
if o.Name != nil {
val := o.Name()
m.Name = omit.From(val)
}
if o.Status != nil {
val := o.Status()
m.Status = omit.From(val)
}
if o.SizeBytes != nil {
val := o.SizeBytes()
m.SizeBytes = omit.From(val)
}
if o.FileUUID != nil {
val := o.FileUUID()
m.FileUUID = omit.From(val)
}
return m
}
// BuildManySetter returns an []*models.FileuploadFileSetter
// this does nothing with the relationship templates
func (o FileuploadFileTemplate) BuildManySetter(number int) []*models.FileuploadFileSetter {
m := make([]*models.FileuploadFileSetter, number)
for i := range m {
m[i] = o.BuildSetter()
}
return m
}
// Build returns an *models.FileuploadFile
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use FileuploadFileTemplate.Create
func (o FileuploadFileTemplate) Build() *models.FileuploadFile {
m := &models.FileuploadFile{}
if o.ID != nil {
m.ID = o.ID()
}
if o.ContentType != nil {
m.ContentType = o.ContentType()
}
if o.Created != nil {
m.Created = o.Created()
}
if o.CreatorID != nil {
m.CreatorID = o.CreatorID()
}
if o.Deleted != nil {
m.Deleted = o.Deleted()
}
if o.Name != nil {
m.Name = o.Name()
}
if o.Status != nil {
m.Status = o.Status()
}
if o.SizeBytes != nil {
m.SizeBytes = o.SizeBytes()
}
if o.FileUUID != nil {
m.FileUUID = o.FileUUID()
}
o.setModelRels(m)
return m
}
// BuildMany returns an models.FileuploadFileSlice
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use FileuploadFileTemplate.CreateMany
func (o FileuploadFileTemplate) BuildMany(number int) models.FileuploadFileSlice {
m := make(models.FileuploadFileSlice, number)
for i := range m {
m[i] = o.Build()
}
return m
}
func ensureCreatableFileuploadFile(m *models.FileuploadFileSetter) {
if !(m.ContentType.IsValue()) {
val := random_string(nil)
m.ContentType = omit.From(val)
}
if !(m.Created.IsValue()) {
val := random_time_Time(nil)
m.Created = omit.From(val)
}
if !(m.CreatorID.IsValue()) {
val := random_int32(nil)
m.CreatorID = omit.From(val)
}
if !(m.Name.IsValue()) {
val := random_string(nil)
m.Name = omit.From(val)
}
if !(m.Status.IsValue()) {
val := random_enums_FileuploadFilestatustype(nil)
m.Status = omit.From(val)
}
if !(m.SizeBytes.IsValue()) {
val := random_int32(nil)
m.SizeBytes = omit.From(val)
}
if !(m.FileUUID.IsValue()) {
val := random_uuid_UUID(nil)
m.FileUUID = omit.From(val)
}
}
// insertOptRels creates and inserts any optional the relationships on *models.FileuploadFile
// according to the relationships in the template.
// any required relationship should have already exist on the model
func (o *FileuploadFileTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.FileuploadFile) error {
var err error
isCSVDone, _ := fileuploadFileRelCSVCtx.Value(ctx)
if !isCSVDone && o.r.CSV != nil {
ctx = fileuploadFileRelCSVCtx.WithValue(ctx, true)
if o.r.CSV.o.alreadyPersisted {
m.R.CSV = o.r.CSV.o.Build()
} else {
var rel0 *models.FileuploadCSV
rel0, err = o.r.CSV.o.Create(ctx, exec)
if err != nil {
return err
}
err = m.AttachCSV(ctx, exec, rel0)
if err != nil {
return err
}
}
}
isErrorsDone, _ := fileuploadFileRelErrorsCtx.Value(ctx)
if !isErrorsDone && o.r.Errors != nil {
ctx = fileuploadFileRelErrorsCtx.WithValue(ctx, true)
for _, r := range o.r.Errors {
if r.o.alreadyPersisted {
m.R.Errors = append(m.R.Errors, r.o.Build())
} else {
rel1, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil {
return err
}
err = m.AttachErrors(ctx, exec, rel1...)
if err != nil {
return err
}
}
}
}
return err
}
// Create builds a fileuploadFile and inserts it into the database
// Relations objects are also inserted and placed in the .R field
func (o *FileuploadFileTemplate) Create(ctx context.Context, exec bob.Executor) (*models.FileuploadFile, error) {
var err error
opt := o.BuildSetter()
ensureCreatableFileuploadFile(opt)
if o.r.CreatorUser == nil {
FileuploadFileMods.WithNewCreatorUser().Apply(ctx, o)
}
var rel2 *models.User
if o.r.CreatorUser.o.alreadyPersisted {
rel2 = o.r.CreatorUser.o.Build()
} else {
rel2, err = o.r.CreatorUser.o.Create(ctx, exec)
if err != nil {
return nil, err
}
}
opt.CreatorID = omit.From(rel2.ID)
m, err := models.FileuploadFiles.Insert(opt).One(ctx, exec)
if err != nil {
return nil, err
}
m.R.CreatorUser = rel2
if err := o.insertOptRels(ctx, exec, m); err != nil {
return nil, err
}
return m, err
}
// MustCreate builds a fileuploadFile and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o *FileuploadFileTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.FileuploadFile {
m, err := o.Create(ctx, exec)
if err != nil {
panic(err)
}
return m
}
// CreateOrFail builds a fileuploadFile and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o *FileuploadFileTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.FileuploadFile {
tb.Helper()
m, err := o.Create(ctx, exec)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// CreateMany builds multiple fileuploadFiles and inserts them into the database
// Relations objects are also inserted and placed in the .R field
func (o FileuploadFileTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.FileuploadFileSlice, error) {
var err error
m := make(models.FileuploadFileSlice, number)
for i := range m {
m[i], err = o.Create(ctx, exec)
if err != nil {
return nil, err
}
}
return m, nil
}
// MustCreateMany builds multiple fileuploadFiles and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o FileuploadFileTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.FileuploadFileSlice {
m, err := o.CreateMany(ctx, exec, number)
if err != nil {
panic(err)
}
return m
}
// CreateManyOrFail builds multiple fileuploadFiles and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o FileuploadFileTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.FileuploadFileSlice {
tb.Helper()
m, err := o.CreateMany(ctx, exec, number)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// FileuploadFile has methods that act as mods for the FileuploadFileTemplate
var FileuploadFileMods fileuploadFileMods
type fileuploadFileMods struct{}
func (m fileuploadFileMods) RandomizeAllColumns(f *faker.Faker) FileuploadFileMod {
return FileuploadFileModSlice{
FileuploadFileMods.RandomID(f),
FileuploadFileMods.RandomContentType(f),
FileuploadFileMods.RandomCreated(f),
FileuploadFileMods.RandomCreatorID(f),
FileuploadFileMods.RandomDeleted(f),
FileuploadFileMods.RandomName(f),
FileuploadFileMods.RandomStatus(f),
FileuploadFileMods.RandomSizeBytes(f),
FileuploadFileMods.RandomFileUUID(f),
}
}
// Set the model columns to this value
func (m fileuploadFileMods) ID(val int32) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.ID = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadFileMods) IDFunc(f func() int32) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.ID = f
})
}
// Clear any values for the column
func (m fileuploadFileMods) UnsetID() FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.ID = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadFileMods) RandomID(f *faker.Faker) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.ID = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadFileMods) ContentType(val string) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.ContentType = func() string { return val }
})
}
// Set the Column from the function
func (m fileuploadFileMods) ContentTypeFunc(f func() string) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.ContentType = f
})
}
// Clear any values for the column
func (m fileuploadFileMods) UnsetContentType() FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.ContentType = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadFileMods) RandomContentType(f *faker.Faker) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.ContentType = func() string {
return random_string(f)
}
})
}
// Set the model columns to this value
func (m fileuploadFileMods) Created(val time.Time) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Created = func() time.Time { return val }
})
}
// Set the Column from the function
func (m fileuploadFileMods) CreatedFunc(f func() time.Time) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Created = f
})
}
// Clear any values for the column
func (m fileuploadFileMods) UnsetCreated() FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Created = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadFileMods) RandomCreated(f *faker.Faker) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Created = func() time.Time {
return random_time_Time(f)
}
})
}
// Set the model columns to this value
func (m fileuploadFileMods) CreatorID(val int32) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.CreatorID = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadFileMods) CreatorIDFunc(f func() int32) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.CreatorID = f
})
}
// Clear any values for the column
func (m fileuploadFileMods) UnsetCreatorID() FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.CreatorID = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadFileMods) RandomCreatorID(f *faker.Faker) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.CreatorID = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadFileMods) Deleted(val null.Val[time.Time]) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Deleted = func() null.Val[time.Time] { return val }
})
}
// Set the Column from the function
func (m fileuploadFileMods) DeletedFunc(f func() null.Val[time.Time]) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Deleted = f
})
}
// Clear any values for the column
func (m fileuploadFileMods) UnsetDeleted() FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Deleted = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
// The generated value is sometimes null
func (m fileuploadFileMods) RandomDeleted(f *faker.Faker) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Deleted = func() null.Val[time.Time] {
if f == nil {
f = &defaultFaker
}
val := random_time_Time(f)
return null.From(val)
}
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
// The generated value is never null
func (m fileuploadFileMods) RandomDeletedNotNull(f *faker.Faker) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Deleted = func() null.Val[time.Time] {
if f == nil {
f = &defaultFaker
}
val := random_time_Time(f)
return null.From(val)
}
})
}
// Set the model columns to this value
func (m fileuploadFileMods) Name(val string) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Name = func() string { return val }
})
}
// Set the Column from the function
func (m fileuploadFileMods) NameFunc(f func() string) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Name = f
})
}
// Clear any values for the column
func (m fileuploadFileMods) UnsetName() FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Name = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadFileMods) RandomName(f *faker.Faker) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Name = func() string {
return random_string(f)
}
})
}
// Set the model columns to this value
func (m fileuploadFileMods) Status(val enums.FileuploadFilestatustype) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Status = func() enums.FileuploadFilestatustype { return val }
})
}
// Set the Column from the function
func (m fileuploadFileMods) StatusFunc(f func() enums.FileuploadFilestatustype) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Status = f
})
}
// Clear any values for the column
func (m fileuploadFileMods) UnsetStatus() FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Status = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadFileMods) RandomStatus(f *faker.Faker) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.Status = func() enums.FileuploadFilestatustype {
return random_enums_FileuploadFilestatustype(f)
}
})
}
// Set the model columns to this value
func (m fileuploadFileMods) SizeBytes(val int32) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.SizeBytes = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadFileMods) SizeBytesFunc(f func() int32) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.SizeBytes = f
})
}
// Clear any values for the column
func (m fileuploadFileMods) UnsetSizeBytes() FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.SizeBytes = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadFileMods) RandomSizeBytes(f *faker.Faker) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.SizeBytes = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadFileMods) FileUUID(val uuid.UUID) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.FileUUID = func() uuid.UUID { return val }
})
}
// Set the Column from the function
func (m fileuploadFileMods) FileUUIDFunc(f func() uuid.UUID) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.FileUUID = f
})
}
// Clear any values for the column
func (m fileuploadFileMods) UnsetFileUUID() FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.FileUUID = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadFileMods) RandomFileUUID(f *faker.Faker) FileuploadFileMod {
return FileuploadFileModFunc(func(_ context.Context, o *FileuploadFileTemplate) {
o.FileUUID = func() uuid.UUID {
return random_uuid_UUID(f)
}
})
}
func (m fileuploadFileMods) WithParentsCascading() FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
if isDone, _ := fileuploadFileWithParentsCascadingCtx.Value(ctx); isDone {
return
}
ctx = fileuploadFileWithParentsCascadingCtx.WithValue(ctx, true)
{
related := o.f.NewFileuploadCSVWithContext(ctx, FileuploadCSVMods.WithParentsCascading())
m.WithCSV(related).Apply(ctx, o)
}
{
related := o.f.NewUserWithContext(ctx, UserMods.WithParentsCascading())
m.WithCreatorUser(related).Apply(ctx, o)
}
})
}
func (m fileuploadFileMods) WithCSV(rel *FileuploadCSVTemplate) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
o.r.CSV = &fileuploadFileRCSVR{
o: rel,
}
})
}
func (m fileuploadFileMods) WithNewCSV(mods ...FileuploadCSVMod) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
related := o.f.NewFileuploadCSVWithContext(ctx, mods...)
m.WithCSV(related).Apply(ctx, o)
})
}
func (m fileuploadFileMods) WithExistingCSV(em *models.FileuploadCSV) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
o.r.CSV = &fileuploadFileRCSVR{
o: o.f.FromExistingFileuploadCSV(em),
}
})
}
func (m fileuploadFileMods) WithoutCSV() FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
o.r.CSV = nil
})
}
func (m fileuploadFileMods) WithCreatorUser(rel *UserTemplate) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
o.r.CreatorUser = &fileuploadFileRCreatorUserR{
o: rel,
}
})
}
func (m fileuploadFileMods) WithNewCreatorUser(mods ...UserMod) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
related := o.f.NewUserWithContext(ctx, mods...)
m.WithCreatorUser(related).Apply(ctx, o)
})
}
func (m fileuploadFileMods) WithExistingCreatorUser(em *models.User) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
o.r.CreatorUser = &fileuploadFileRCreatorUserR{
o: o.f.FromExistingUser(em),
}
})
}
func (m fileuploadFileMods) WithoutCreatorUser() FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
o.r.CreatorUser = nil
})
}
func (m fileuploadFileMods) WithErrors(number int, related *FileuploadErrorTemplate) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
o.r.Errors = []*fileuploadFileRErrorsR{{
number: number,
o: related,
}}
})
}
func (m fileuploadFileMods) WithNewErrors(number int, mods ...FileuploadErrorMod) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
related := o.f.NewFileuploadErrorWithContext(ctx, mods...)
m.WithErrors(number, related).Apply(ctx, o)
})
}
func (m fileuploadFileMods) AddErrors(number int, related *FileuploadErrorTemplate) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
o.r.Errors = append(o.r.Errors, &fileuploadFileRErrorsR{
number: number,
o: related,
})
})
}
func (m fileuploadFileMods) AddNewErrors(number int, mods ...FileuploadErrorMod) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
related := o.f.NewFileuploadErrorWithContext(ctx, mods...)
m.AddErrors(number, related).Apply(ctx, o)
})
}
func (m fileuploadFileMods) AddExistingErrors(existingModels ...*models.FileuploadError) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
for _, em := range existingModels {
o.r.Errors = append(o.r.Errors, &fileuploadFileRErrorsR{
o: o.f.FromExistingFileuploadError(em),
})
}
})
}
func (m fileuploadFileMods) WithoutErrors() FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
o.r.Errors = nil
})
}

View file

@ -59,6 +59,7 @@ type UserTemplate struct {
type userR struct { type userR struct {
PublicUserUser []*userRPublicUserUserR PublicUserUser []*userRPublicUserUserR
CreatorFiles []*userRCreatorFilesR
CreatorNoteAudios []*userRCreatorNoteAudiosR CreatorNoteAudios []*userRCreatorNoteAudiosR
DeletorNoteAudios []*userRDeletorNoteAudiosR DeletorNoteAudios []*userRDeletorNoteAudiosR
CreatorNoteImages []*userRCreatorNoteImagesR CreatorNoteImages []*userRCreatorNoteImagesR
@ -72,6 +73,10 @@ type userRPublicUserUserR struct {
number int number int
o *ArcgisUserTemplate o *ArcgisUserTemplate
} }
type userRCreatorFilesR struct {
number int
o *FileuploadFileTemplate
}
type userRCreatorNoteAudiosR struct { type userRCreatorNoteAudiosR struct {
number int number int
o *NoteAudioTemplate o *NoteAudioTemplate
@ -123,6 +128,19 @@ func (t UserTemplate) setModelRels(o *models.User) {
o.R.PublicUserUser = rel o.R.PublicUserUser = rel
} }
if t.r.CreatorFiles != nil {
rel := models.FileuploadFileSlice{}
for _, r := range t.r.CreatorFiles {
related := r.o.BuildMany(r.number)
for _, rel := range related {
rel.CreatorID = o.ID // h2
rel.R.CreatorUser = o
}
rel = append(rel, related...)
}
o.R.CreatorFiles = rel
}
if t.r.CreatorNoteAudios != nil { if t.r.CreatorNoteAudios != nil {
rel := models.NoteAudioSlice{} rel := models.NoteAudioSlice{}
for _, r := range t.r.CreatorNoteAudios { for _, r := range t.r.CreatorNoteAudios {
@ -388,6 +406,26 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *
} }
} }
isCreatorFilesDone, _ := userRelCreatorFilesCtx.Value(ctx)
if !isCreatorFilesDone && o.r.CreatorFiles != nil {
ctx = userRelCreatorFilesCtx.WithValue(ctx, true)
for _, r := range o.r.CreatorFiles {
if r.o.alreadyPersisted {
m.R.CreatorFiles = append(m.R.CreatorFiles, r.o.Build())
} else {
rel1, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil {
return err
}
err = m.AttachCreatorFiles(ctx, exec, rel1...)
if err != nil {
return err
}
}
}
}
isCreatorNoteAudiosDone, _ := userRelCreatorNoteAudiosCtx.Value(ctx) isCreatorNoteAudiosDone, _ := userRelCreatorNoteAudiosCtx.Value(ctx)
if !isCreatorNoteAudiosDone && o.r.CreatorNoteAudios != nil { if !isCreatorNoteAudiosDone && o.r.CreatorNoteAudios != nil {
ctx = userRelCreatorNoteAudiosCtx.WithValue(ctx, true) ctx = userRelCreatorNoteAudiosCtx.WithValue(ctx, true)
@ -395,12 +433,12 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *
if r.o.alreadyPersisted { if r.o.alreadyPersisted {
m.R.CreatorNoteAudios = append(m.R.CreatorNoteAudios, r.o.Build()) m.R.CreatorNoteAudios = append(m.R.CreatorNoteAudios, r.o.Build())
} else { } else {
rel1, err := r.o.CreateMany(ctx, exec, r.number) rel2, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil { if err != nil {
return err return err
} }
err = m.AttachCreatorNoteAudios(ctx, exec, rel1...) err = m.AttachCreatorNoteAudios(ctx, exec, rel2...)
if err != nil { if err != nil {
return err return err
} }
@ -415,12 +453,12 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *
if r.o.alreadyPersisted { if r.o.alreadyPersisted {
m.R.DeletorNoteAudios = append(m.R.DeletorNoteAudios, r.o.Build()) m.R.DeletorNoteAudios = append(m.R.DeletorNoteAudios, r.o.Build())
} else { } else {
rel2, err := r.o.CreateMany(ctx, exec, r.number) rel3, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil { if err != nil {
return err return err
} }
err = m.AttachDeletorNoteAudios(ctx, exec, rel2...) err = m.AttachDeletorNoteAudios(ctx, exec, rel3...)
if err != nil { if err != nil {
return err return err
} }
@ -435,12 +473,12 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *
if r.o.alreadyPersisted { if r.o.alreadyPersisted {
m.R.CreatorNoteImages = append(m.R.CreatorNoteImages, r.o.Build()) m.R.CreatorNoteImages = append(m.R.CreatorNoteImages, r.o.Build())
} else { } else {
rel3, err := r.o.CreateMany(ctx, exec, r.number) rel4, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil { if err != nil {
return err return err
} }
err = m.AttachCreatorNoteImages(ctx, exec, rel3...) err = m.AttachCreatorNoteImages(ctx, exec, rel4...)
if err != nil { if err != nil {
return err return err
} }
@ -455,12 +493,12 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *
if r.o.alreadyPersisted { if r.o.alreadyPersisted {
m.R.DeletorNoteImages = append(m.R.DeletorNoteImages, r.o.Build()) m.R.DeletorNoteImages = append(m.R.DeletorNoteImages, r.o.Build())
} else { } else {
rel4, err := r.o.CreateMany(ctx, exec, r.number) rel5, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil { if err != nil {
return err return err
} }
err = m.AttachDeletorNoteImages(ctx, exec, rel4...) err = m.AttachDeletorNoteImages(ctx, exec, rel5...)
if err != nil { if err != nil {
return err return err
} }
@ -475,12 +513,12 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *
if r.o.alreadyPersisted { if r.o.alreadyPersisted {
m.R.UserNotifications = append(m.R.UserNotifications, r.o.Build()) m.R.UserNotifications = append(m.R.UserNotifications, r.o.Build())
} else { } else {
rel5, err := r.o.CreateMany(ctx, exec, r.number) rel6, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil { if err != nil {
return err return err
} }
err = m.AttachUserNotifications(ctx, exec, rel5...) err = m.AttachUserNotifications(ctx, exec, rel6...)
if err != nil { if err != nil {
return err return err
} }
@ -495,12 +533,12 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *
if r.o.alreadyPersisted { if r.o.alreadyPersisted {
m.R.UserOauthTokens = append(m.R.UserOauthTokens, r.o.Build()) m.R.UserOauthTokens = append(m.R.UserOauthTokens, r.o.Build())
} else { } else {
rel6, err := r.o.CreateMany(ctx, exec, r.number) rel7, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil { if err != nil {
return err return err
} }
err = m.AttachUserOauthTokens(ctx, exec, rel6...) err = m.AttachUserOauthTokens(ctx, exec, rel7...)
if err != nil { if err != nil {
return err return err
} }
@ -522,25 +560,25 @@ func (o *UserTemplate) Create(ctx context.Context, exec bob.Executor) (*models.U
UserMods.WithNewOrganization().Apply(ctx, o) UserMods.WithNewOrganization().Apply(ctx, o)
} }
var rel7 *models.Organization var rel8 *models.Organization
if o.r.Organization.o.alreadyPersisted { if o.r.Organization.o.alreadyPersisted {
rel7 = o.r.Organization.o.Build() rel8 = o.r.Organization.o.Build()
} else { } else {
rel7, err = o.r.Organization.o.Create(ctx, exec) rel8, err = o.r.Organization.o.Create(ctx, exec)
if err != nil { if err != nil {
return nil, err return nil, err
} }
} }
opt.OrganizationID = omit.From(rel7.ID) opt.OrganizationID = omit.From(rel8.ID)
m, err := models.Users.Insert(opt).One(ctx, exec) m, err := models.Users.Insert(opt).One(ctx, exec)
if err != nil { if err != nil {
return nil, err return nil, err
} }
m.R.Organization = rel7 m.R.Organization = rel8
if err := o.insertOptRels(ctx, exec, m); err != nil { if err := o.insertOptRels(ctx, exec, m); err != nil {
return nil, err return nil, err
@ -1230,6 +1268,54 @@ func (m userMods) WithoutPublicUserUser() UserMod {
}) })
} }
func (m userMods) WithCreatorFiles(number int, related *FileuploadFileTemplate) UserMod {
return UserModFunc(func(ctx context.Context, o *UserTemplate) {
o.r.CreatorFiles = []*userRCreatorFilesR{{
number: number,
o: related,
}}
})
}
func (m userMods) WithNewCreatorFiles(number int, mods ...FileuploadFileMod) UserMod {
return UserModFunc(func(ctx context.Context, o *UserTemplate) {
related := o.f.NewFileuploadFileWithContext(ctx, mods...)
m.WithCreatorFiles(number, related).Apply(ctx, o)
})
}
func (m userMods) AddCreatorFiles(number int, related *FileuploadFileTemplate) UserMod {
return UserModFunc(func(ctx context.Context, o *UserTemplate) {
o.r.CreatorFiles = append(o.r.CreatorFiles, &userRCreatorFilesR{
number: number,
o: related,
})
})
}
func (m userMods) AddNewCreatorFiles(number int, mods ...FileuploadFileMod) UserMod {
return UserModFunc(func(ctx context.Context, o *UserTemplate) {
related := o.f.NewFileuploadFileWithContext(ctx, mods...)
m.AddCreatorFiles(number, related).Apply(ctx, o)
})
}
func (m userMods) AddExistingCreatorFiles(existingModels ...*models.FileuploadFile) UserMod {
return UserModFunc(func(ctx context.Context, o *UserTemplate) {
for _, em := range existingModels {
o.r.CreatorFiles = append(o.r.CreatorFiles, &userRCreatorFilesR{
o: o.f.FromExistingFileuploadFile(em),
})
}
})
}
func (m userMods) WithoutCreatorFiles() UserMod {
return UserModFunc(func(ctx context.Context, o *UserTemplate) {
o.r.CreatorFiles = nil
})
}
func (m userMods) WithCreatorNoteAudios(number int, related *NoteAudioTemplate) UserMod { func (m userMods) WithCreatorNoteAudios(number int, related *NoteAudioTemplate) UserMod {
return UserModFunc(func(ctx context.Context, o *UserTemplate) { return UserModFunc(func(ctx context.Context, o *UserTemplate) {
o.r.CreatorNoteAudios = []*userRCreatorNoteAudiosR{{ o.r.CreatorNoteAudios = []*userRCreatorNoteAudiosR{{

View file

@ -25,6 +25,7 @@ type preloadCounts struct {
CommsEmailContact commsEmailContactCountPreloader CommsEmailContact commsEmailContactCountPreloader
CommsEmailTemplate commsEmailTemplateCountPreloader CommsEmailTemplate commsEmailTemplateCountPreloader
CommsPhone commsPhoneCountPreloader CommsPhone commsPhoneCountPreloader
FileuploadFile fileuploadFileCountPreloader
NoteAudio noteAudioCountPreloader NoteAudio noteAudioCountPreloader
NoteImage noteImageCountPreloader NoteImage noteImageCountPreloader
Organization organizationCountPreloader Organization organizationCountPreloader
@ -41,6 +42,7 @@ func getPreloadCount() preloadCounts {
CommsEmailContact: buildCommsEmailContactCountPreloader(), CommsEmailContact: buildCommsEmailContactCountPreloader(),
CommsEmailTemplate: buildCommsEmailTemplateCountPreloader(), CommsEmailTemplate: buildCommsEmailTemplateCountPreloader(),
CommsPhone: buildCommsPhoneCountPreloader(), CommsPhone: buildCommsPhoneCountPreloader(),
FileuploadFile: buildFileuploadFileCountPreloader(),
NoteAudio: buildNoteAudioCountPreloader(), NoteAudio: buildNoteAudioCountPreloader(),
NoteImage: buildNoteImageCountPreloader(), NoteImage: buildNoteImageCountPreloader(),
Organization: buildOrganizationCountPreloader(), Organization: buildOrganizationCountPreloader(),
@ -57,6 +59,7 @@ type thenLoadCounts[Q orm.Loadable] struct {
CommsEmailContact commsEmailContactCountThenLoader[Q] CommsEmailContact commsEmailContactCountThenLoader[Q]
CommsEmailTemplate commsEmailTemplateCountThenLoader[Q] CommsEmailTemplate commsEmailTemplateCountThenLoader[Q]
CommsPhone commsPhoneCountThenLoader[Q] CommsPhone commsPhoneCountThenLoader[Q]
FileuploadFile fileuploadFileCountThenLoader[Q]
NoteAudio noteAudioCountThenLoader[Q] NoteAudio noteAudioCountThenLoader[Q]
NoteImage noteImageCountThenLoader[Q] NoteImage noteImageCountThenLoader[Q]
Organization organizationCountThenLoader[Q] Organization organizationCountThenLoader[Q]
@ -73,6 +76,7 @@ func getThenLoadCount[Q orm.Loadable]() thenLoadCounts[Q] {
CommsEmailContact: buildCommsEmailContactCountThenLoader[Q](), CommsEmailContact: buildCommsEmailContactCountThenLoader[Q](),
CommsEmailTemplate: buildCommsEmailTemplateCountThenLoader[Q](), CommsEmailTemplate: buildCommsEmailTemplateCountThenLoader[Q](),
CommsPhone: buildCommsPhoneCountThenLoader[Q](), CommsPhone: buildCommsPhoneCountThenLoader[Q](),
FileuploadFile: buildFileuploadFileCountThenLoader[Q](),
NoteAudio: buildNoteAudioCountThenLoader[Q](), NoteAudio: buildNoteAudioCountThenLoader[Q](),
NoteImage: buildNoteImageCountThenLoader[Q](), NoteImage: buildNoteImageCountThenLoader[Q](),
Organization: buildOrganizationCountThenLoader[Q](), Organization: buildOrganizationCountThenLoader[Q](),

View file

@ -70,6 +70,9 @@ type joins[Q dialect.Joinable] struct {
FieldseekerZones joinSet[fieldseekerZoneJoins[Q]] FieldseekerZones joinSet[fieldseekerZoneJoins[Q]]
FieldseekerZones2s joinSet[fieldseekerZones2Joins[Q]] FieldseekerZones2s joinSet[fieldseekerZones2Joins[Q]]
FieldseekerSyncs joinSet[fieldseekerSyncJoins[Q]] FieldseekerSyncs joinSet[fieldseekerSyncJoins[Q]]
FileuploadCSVS joinSet[fileuploadCSVJoins[Q]]
FileuploadErrors joinSet[fileuploadErrorJoins[Q]]
FileuploadFiles joinSet[fileuploadFileJoins[Q]]
H3Aggregations joinSet[h3AggregationJoins[Q]] H3Aggregations joinSet[h3AggregationJoins[Q]]
ImportDistricts joinSet[importDistrictJoins[Q]] ImportDistricts joinSet[importDistrictJoins[Q]]
NoteAudios joinSet[noteAudioJoins[Q]] NoteAudios joinSet[noteAudioJoins[Q]]
@ -144,6 +147,9 @@ func getJoins[Q dialect.Joinable]() joins[Q] {
FieldseekerZones: buildJoinSet[fieldseekerZoneJoins[Q]](FieldseekerZones.Columns, buildFieldseekerZoneJoins), FieldseekerZones: buildJoinSet[fieldseekerZoneJoins[Q]](FieldseekerZones.Columns, buildFieldseekerZoneJoins),
FieldseekerZones2s: buildJoinSet[fieldseekerZones2Joins[Q]](FieldseekerZones2s.Columns, buildFieldseekerZones2Joins), FieldseekerZones2s: buildJoinSet[fieldseekerZones2Joins[Q]](FieldseekerZones2s.Columns, buildFieldseekerZones2Joins),
FieldseekerSyncs: buildJoinSet[fieldseekerSyncJoins[Q]](FieldseekerSyncs.Columns, buildFieldseekerSyncJoins), FieldseekerSyncs: buildJoinSet[fieldseekerSyncJoins[Q]](FieldseekerSyncs.Columns, buildFieldseekerSyncJoins),
FileuploadCSVS: buildJoinSet[fileuploadCSVJoins[Q]](FileuploadCSVS.Columns, buildFileuploadCSVJoins),
FileuploadErrors: buildJoinSet[fileuploadErrorJoins[Q]](FileuploadErrors.Columns, buildFileuploadErrorJoins),
FileuploadFiles: buildJoinSet[fileuploadFileJoins[Q]](FileuploadFiles.Columns, buildFileuploadFileJoins),
H3Aggregations: buildJoinSet[h3AggregationJoins[Q]](H3Aggregations.Columns, buildH3AggregationJoins), H3Aggregations: buildJoinSet[h3AggregationJoins[Q]](H3Aggregations.Columns, buildH3AggregationJoins),
ImportDistricts: buildJoinSet[importDistrictJoins[Q]](ImportDistricts.Columns, buildImportDistrictJoins), ImportDistricts: buildJoinSet[importDistrictJoins[Q]](ImportDistricts.Columns, buildImportDistrictJoins),
NoteAudios: buildJoinSet[noteAudioJoins[Q]](NoteAudios.Columns, buildNoteAudioJoins), NoteAudios: buildJoinSet[noteAudioJoins[Q]](NoteAudios.Columns, buildNoteAudioJoins),

View file

@ -55,6 +55,9 @@ type preloaders struct {
FieldseekerZone fieldseekerZonePreloader FieldseekerZone fieldseekerZonePreloader
FieldseekerZones2 fieldseekerZones2Preloader FieldseekerZones2 fieldseekerZones2Preloader
FieldseekerSync fieldseekerSyncPreloader FieldseekerSync fieldseekerSyncPreloader
FileuploadCSV fileuploadCSVPreloader
FileuploadError fileuploadErrorPreloader
FileuploadFile fileuploadFilePreloader
H3Aggregation h3AggregationPreloader H3Aggregation h3AggregationPreloader
ImportDistrict importDistrictPreloader ImportDistrict importDistrictPreloader
NoteAudio noteAudioPreloader NoteAudio noteAudioPreloader
@ -121,6 +124,9 @@ func getPreloaders() preloaders {
FieldseekerZone: buildFieldseekerZonePreloader(), FieldseekerZone: buildFieldseekerZonePreloader(),
FieldseekerZones2: buildFieldseekerZones2Preloader(), FieldseekerZones2: buildFieldseekerZones2Preloader(),
FieldseekerSync: buildFieldseekerSyncPreloader(), FieldseekerSync: buildFieldseekerSyncPreloader(),
FileuploadCSV: buildFileuploadCSVPreloader(),
FileuploadError: buildFileuploadErrorPreloader(),
FileuploadFile: buildFileuploadFilePreloader(),
H3Aggregation: buildH3AggregationPreloader(), H3Aggregation: buildH3AggregationPreloader(),
ImportDistrict: buildImportDistrictPreloader(), ImportDistrict: buildImportDistrictPreloader(),
NoteAudio: buildNoteAudioPreloader(), NoteAudio: buildNoteAudioPreloader(),
@ -193,6 +199,9 @@ type thenLoaders[Q orm.Loadable] struct {
FieldseekerZone fieldseekerZoneThenLoader[Q] FieldseekerZone fieldseekerZoneThenLoader[Q]
FieldseekerZones2 fieldseekerZones2ThenLoader[Q] FieldseekerZones2 fieldseekerZones2ThenLoader[Q]
FieldseekerSync fieldseekerSyncThenLoader[Q] FieldseekerSync fieldseekerSyncThenLoader[Q]
FileuploadCSV fileuploadCSVThenLoader[Q]
FileuploadError fileuploadErrorThenLoader[Q]
FileuploadFile fileuploadFileThenLoader[Q]
H3Aggregation h3AggregationThenLoader[Q] H3Aggregation h3AggregationThenLoader[Q]
ImportDistrict importDistrictThenLoader[Q] ImportDistrict importDistrictThenLoader[Q]
NoteAudio noteAudioThenLoader[Q] NoteAudio noteAudioThenLoader[Q]
@ -259,6 +268,9 @@ func getThenLoaders[Q orm.Loadable]() thenLoaders[Q] {
FieldseekerZone: buildFieldseekerZoneThenLoader[Q](), FieldseekerZone: buildFieldseekerZoneThenLoader[Q](),
FieldseekerZones2: buildFieldseekerZones2ThenLoader[Q](), FieldseekerZones2: buildFieldseekerZones2ThenLoader[Q](),
FieldseekerSync: buildFieldseekerSyncThenLoader[Q](), FieldseekerSync: buildFieldseekerSyncThenLoader[Q](),
FileuploadCSV: buildFileuploadCSVThenLoader[Q](),
FileuploadError: buildFileuploadErrorThenLoader[Q](),
FileuploadFile: buildFileuploadFileThenLoader[Q](),
H3Aggregation: buildH3AggregationThenLoader[Q](), H3Aggregation: buildH3AggregationThenLoader[Q](),
ImportDistrict: buildImportDistrictThenLoader[Q](), ImportDistrict: buildImportDistrictThenLoader[Q](),
NoteAudio: buildNoteAudioThenLoader[Q](), NoteAudio: buildNoteAudioThenLoader[Q](),

View file

@ -55,6 +55,9 @@ func Where[Q psql.Filterable]() struct {
FieldseekerZones fieldseekerZoneWhere[Q] FieldseekerZones fieldseekerZoneWhere[Q]
FieldseekerZones2s fieldseekerZones2Where[Q] FieldseekerZones2s fieldseekerZones2Where[Q]
FieldseekerSyncs fieldseekerSyncWhere[Q] FieldseekerSyncs fieldseekerSyncWhere[Q]
FileuploadCSVS fileuploadCSVWhere[Q]
FileuploadErrors fileuploadErrorWhere[Q]
FileuploadFiles fileuploadFileWhere[Q]
GeographyColumns geographyColumnWhere[Q] GeographyColumns geographyColumnWhere[Q]
GeometryColumns geometryColumnWhere[Q] GeometryColumns geometryColumnWhere[Q]
GooseDBVersions gooseDBVersionWhere[Q] GooseDBVersions gooseDBVersionWhere[Q]
@ -127,6 +130,9 @@ func Where[Q psql.Filterable]() struct {
FieldseekerZones fieldseekerZoneWhere[Q] FieldseekerZones fieldseekerZoneWhere[Q]
FieldseekerZones2s fieldseekerZones2Where[Q] FieldseekerZones2s fieldseekerZones2Where[Q]
FieldseekerSyncs fieldseekerSyncWhere[Q] FieldseekerSyncs fieldseekerSyncWhere[Q]
FileuploadCSVS fileuploadCSVWhere[Q]
FileuploadErrors fileuploadErrorWhere[Q]
FileuploadFiles fileuploadFileWhere[Q]
GeographyColumns geographyColumnWhere[Q] GeographyColumns geographyColumnWhere[Q]
GeometryColumns geometryColumnWhere[Q] GeometryColumns geometryColumnWhere[Q]
GooseDBVersions gooseDBVersionWhere[Q] GooseDBVersions gooseDBVersionWhere[Q]
@ -198,6 +204,9 @@ func Where[Q psql.Filterable]() struct {
FieldseekerZones: buildFieldseekerZoneWhere[Q](FieldseekerZones.Columns), FieldseekerZones: buildFieldseekerZoneWhere[Q](FieldseekerZones.Columns),
FieldseekerZones2s: buildFieldseekerZones2Where[Q](FieldseekerZones2s.Columns), FieldseekerZones2s: buildFieldseekerZones2Where[Q](FieldseekerZones2s.Columns),
FieldseekerSyncs: buildFieldseekerSyncWhere[Q](FieldseekerSyncs.Columns), FieldseekerSyncs: buildFieldseekerSyncWhere[Q](FieldseekerSyncs.Columns),
FileuploadCSVS: buildFileuploadCSVWhere[Q](FileuploadCSVS.Columns),
FileuploadErrors: buildFileuploadErrorWhere[Q](FileuploadErrors.Columns),
FileuploadFiles: buildFileuploadFileWhere[Q](FileuploadFiles.Columns),
GeographyColumns: buildGeographyColumnWhere[Q](GeographyColumns.Columns), GeographyColumns: buildGeographyColumnWhere[Q](GeographyColumns.Columns),
GeometryColumns: buildGeometryColumnWhere[Q](GeometryColumns.Columns), GeometryColumns: buildGeometryColumnWhere[Q](GeometryColumns.Columns),
GooseDBVersions: buildGooseDBVersionWhere[Q](GooseDBVersions.Columns), GooseDBVersions: buildGooseDBVersionWhere[Q](GooseDBVersions.Columns),

View file

@ -0,0 +1,603 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package models
import (
"context"
"fmt"
"io"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
"github.com/Gleipnir-Technology/bob/dialect/psql/dm"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
"github.com/Gleipnir-Technology/bob/expr"
"github.com/Gleipnir-Technology/bob/mods"
"github.com/Gleipnir-Technology/bob/orm"
"github.com/Gleipnir-Technology/bob/types/pgtypes"
enums "github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/aarondl/opt/omit"
)
// FileuploadCSV is an object representing the database table.
type FileuploadCSV struct {
FileID int32 `db:"file_id,pk" `
Type enums.FileuploadCsvtype `db:"type_" `
R fileuploadCSVR `db:"-" `
}
// FileuploadCSVSlice is an alias for a slice of pointers to FileuploadCSV.
// This should almost always be used instead of []*FileuploadCSV.
type FileuploadCSVSlice []*FileuploadCSV
// FileuploadCSVS contains methods to work with the csv table
var FileuploadCSVS = psql.NewTablex[*FileuploadCSV, FileuploadCSVSlice, *FileuploadCSVSetter]("fileupload", "csv", buildFileuploadCSVColumns("fileupload.csv"))
// FileuploadCSVSQuery is a query on the csv table
type FileuploadCSVSQuery = *psql.ViewQuery[*FileuploadCSV, FileuploadCSVSlice]
// fileuploadCSVR is where relationships are stored.
type fileuploadCSVR struct {
File *FileuploadFile // fileupload.csv.csv_file_id_fkey
}
func buildFileuploadCSVColumns(alias string) fileuploadCSVColumns {
return fileuploadCSVColumns{
ColumnsExpr: expr.NewColumnsExpr(
"file_id", "type_",
).WithParent("fileupload.csv"),
tableAlias: alias,
FileID: psql.Quote(alias, "file_id"),
Type: psql.Quote(alias, "type_"),
}
}
type fileuploadCSVColumns struct {
expr.ColumnsExpr
tableAlias string
FileID psql.Expression
Type psql.Expression
}
func (c fileuploadCSVColumns) Alias() string {
return c.tableAlias
}
func (fileuploadCSVColumns) AliasedAs(alias string) fileuploadCSVColumns {
return buildFileuploadCSVColumns(alias)
}
// FileuploadCSVSetter is used for insert/upsert/update operations
// All values are optional, and do not have to be set
// Generated columns are not included
type FileuploadCSVSetter struct {
FileID omit.Val[int32] `db:"file_id,pk" `
Type omit.Val[enums.FileuploadCsvtype] `db:"type_" `
}
func (s FileuploadCSVSetter) SetColumns() []string {
vals := make([]string, 0, 2)
if s.FileID.IsValue() {
vals = append(vals, "file_id")
}
if s.Type.IsValue() {
vals = append(vals, "type_")
}
return vals
}
func (s FileuploadCSVSetter) Overwrite(t *FileuploadCSV) {
if s.FileID.IsValue() {
t.FileID = s.FileID.MustGet()
}
if s.Type.IsValue() {
t.Type = s.Type.MustGet()
}
}
func (s *FileuploadCSVSetter) Apply(q *dialect.InsertQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadCSVS.BeforeInsertHooks.RunHooks(ctx, exec, s)
})
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
vals := make([]bob.Expression, 2)
if s.FileID.IsValue() {
vals[0] = psql.Arg(s.FileID.MustGet())
} else {
vals[0] = psql.Raw("DEFAULT")
}
if s.Type.IsValue() {
vals[1] = psql.Arg(s.Type.MustGet())
} else {
vals[1] = psql.Raw("DEFAULT")
}
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
}))
}
func (s FileuploadCSVSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return um.Set(s.Expressions()...)
}
func (s FileuploadCSVSetter) Expressions(prefix ...string) []bob.Expression {
exprs := make([]bob.Expression, 0, 2)
if s.FileID.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "file_id")...),
psql.Arg(s.FileID),
}})
}
if s.Type.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "type_")...),
psql.Arg(s.Type),
}})
}
return exprs
}
// FindFileuploadCSV retrieves a single record by primary key
// If cols is empty Find will return all columns.
func FindFileuploadCSV(ctx context.Context, exec bob.Executor, FileIDPK int32, cols ...string) (*FileuploadCSV, error) {
if len(cols) == 0 {
return FileuploadCSVS.Query(
sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(FileIDPK))),
).One(ctx, exec)
}
return FileuploadCSVS.Query(
sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(FileIDPK))),
sm.Columns(FileuploadCSVS.Columns.Only(cols...)),
).One(ctx, exec)
}
// FileuploadCSVExists checks the presence of a single record by primary key
func FileuploadCSVExists(ctx context.Context, exec bob.Executor, FileIDPK int32) (bool, error) {
return FileuploadCSVS.Query(
sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(FileIDPK))),
).Exists(ctx, exec)
}
// AfterQueryHook is called after FileuploadCSV is retrieved from the database
func (o *FileuploadCSV) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FileuploadCSVS.AfterSelectHooks.RunHooks(ctx, exec, FileuploadCSVSlice{o})
case bob.QueryTypeInsert:
ctx, err = FileuploadCSVS.AfterInsertHooks.RunHooks(ctx, exec, FileuploadCSVSlice{o})
case bob.QueryTypeUpdate:
ctx, err = FileuploadCSVS.AfterUpdateHooks.RunHooks(ctx, exec, FileuploadCSVSlice{o})
case bob.QueryTypeDelete:
ctx, err = FileuploadCSVS.AfterDeleteHooks.RunHooks(ctx, exec, FileuploadCSVSlice{o})
}
return err
}
// primaryKeyVals returns the primary key values of the FileuploadCSV
func (o *FileuploadCSV) primaryKeyVals() bob.Expression {
return psql.Arg(o.FileID)
}
func (o *FileuploadCSV) pkEQ() dialect.Expression {
return psql.Quote("fileupload.csv", "file_id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
return o.primaryKeyVals().WriteSQL(ctx, w, d, start)
}))
}
// Update uses an executor to update the FileuploadCSV
func (o *FileuploadCSV) Update(ctx context.Context, exec bob.Executor, s *FileuploadCSVSetter) error {
v, err := FileuploadCSVS.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
if err != nil {
return err
}
o.R = v.R
*o = *v
return nil
}
// Delete deletes a single FileuploadCSV record with an executor
func (o *FileuploadCSV) Delete(ctx context.Context, exec bob.Executor) error {
_, err := FileuploadCSVS.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
return err
}
// Reload refreshes the FileuploadCSV using the executor
func (o *FileuploadCSV) Reload(ctx context.Context, exec bob.Executor) error {
o2, err := FileuploadCSVS.Query(
sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(o.FileID))),
).One(ctx, exec)
if err != nil {
return err
}
o2.R = o.R
*o = *o2
return nil
}
// AfterQueryHook is called after FileuploadCSVSlice is retrieved from the database
func (o FileuploadCSVSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FileuploadCSVS.AfterSelectHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeInsert:
ctx, err = FileuploadCSVS.AfterInsertHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeUpdate:
ctx, err = FileuploadCSVS.AfterUpdateHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeDelete:
ctx, err = FileuploadCSVS.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}
func (o FileuploadCSVSlice) pkIN() dialect.Expression {
if len(o) == 0 {
return psql.Raw("NULL")
}
return psql.Quote("fileupload.csv", "file_id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
pkPairs := make([]bob.Expression, len(o))
for i, row := range o {
pkPairs[i] = row.primaryKeyVals()
}
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
}))
}
// copyMatchingRows finds models in the given slice that have the same primary key
// then it first copies the existing relationships from the old model to the new model
// and then replaces the old model in the slice with the new model
func (o FileuploadCSVSlice) copyMatchingRows(from ...*FileuploadCSV) {
for i, old := range o {
for _, new := range from {
if new.FileID != old.FileID {
continue
}
new.R = old.R
o[i] = new
break
}
}
}
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
func (o FileuploadCSVSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadCSVS.BeforeUpdateHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FileuploadCSV:
o.copyMatchingRows(retrieved)
case []*FileuploadCSV:
o.copyMatchingRows(retrieved...)
case FileuploadCSVSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FileuploadCSV or a slice of FileuploadCSV
// then run the AfterUpdateHooks on the slice
_, err = FileuploadCSVS.AfterUpdateHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
func (o FileuploadCSVSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadCSVS.BeforeDeleteHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FileuploadCSV:
o.copyMatchingRows(retrieved)
case []*FileuploadCSV:
o.copyMatchingRows(retrieved...)
case FileuploadCSVSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FileuploadCSV or a slice of FileuploadCSV
// then run the AfterDeleteHooks on the slice
_, err = FileuploadCSVS.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
func (o FileuploadCSVSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals FileuploadCSVSetter) error {
if len(o) == 0 {
return nil
}
_, err := FileuploadCSVS.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
return err
}
func (o FileuploadCSVSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
_, err := FileuploadCSVS.Delete(o.DeleteMod()).Exec(ctx, exec)
return err
}
func (o FileuploadCSVSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
o2, err := FileuploadCSVS.Query(sm.Where(o.pkIN())).All(ctx, exec)
if err != nil {
return err
}
o.copyMatchingRows(o2...)
return nil
}
// File starts a query for related objects on fileupload.file
func (o *FileuploadCSV) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery {
return FileuploadFiles.Query(append(mods,
sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(o.FileID))),
)...)
}
func (os FileuploadCSVSlice) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery {
pkFileID := make(pgtypes.Array[int32], 0, len(os))
for _, o := range os {
if o == nil {
continue
}
pkFileID = append(pkFileID, o.FileID)
}
PKArgExpr := psql.Select(sm.Columns(
psql.F("unnest", psql.Cast(psql.Arg(pkFileID), "integer[]")),
))
return FileuploadFiles.Query(append(mods,
sm.Where(psql.Group(FileuploadFiles.Columns.ID).OP("IN", PKArgExpr)),
)...)
}
func attachFileuploadCSVFile0(ctx context.Context, exec bob.Executor, count int, fileuploadCSV0 *FileuploadCSV, fileuploadFile1 *FileuploadFile) (*FileuploadCSV, error) {
setter := &FileuploadCSVSetter{
FileID: omit.From(fileuploadFile1.ID),
}
err := fileuploadCSV0.Update(ctx, exec, setter)
if err != nil {
return nil, fmt.Errorf("attachFileuploadCSVFile0: %w", err)
}
return fileuploadCSV0, nil
}
func (fileuploadCSV0 *FileuploadCSV) InsertFile(ctx context.Context, exec bob.Executor, related *FileuploadFileSetter) error {
var err error
fileuploadFile1, err := FileuploadFiles.Insert(related).One(ctx, exec)
if err != nil {
return fmt.Errorf("inserting related objects: %w", err)
}
_, err = attachFileuploadCSVFile0(ctx, exec, 1, fileuploadCSV0, fileuploadFile1)
if err != nil {
return err
}
fileuploadCSV0.R.File = fileuploadFile1
fileuploadFile1.R.CSV = fileuploadCSV0
return nil
}
func (fileuploadCSV0 *FileuploadCSV) AttachFile(ctx context.Context, exec bob.Executor, fileuploadFile1 *FileuploadFile) error {
var err error
_, err = attachFileuploadCSVFile0(ctx, exec, 1, fileuploadCSV0, fileuploadFile1)
if err != nil {
return err
}
fileuploadCSV0.R.File = fileuploadFile1
fileuploadFile1.R.CSV = fileuploadCSV0
return nil
}
type fileuploadCSVWhere[Q psql.Filterable] struct {
FileID psql.WhereMod[Q, int32]
Type psql.WhereMod[Q, enums.FileuploadCsvtype]
}
func (fileuploadCSVWhere[Q]) AliasedAs(alias string) fileuploadCSVWhere[Q] {
return buildFileuploadCSVWhere[Q](buildFileuploadCSVColumns(alias))
}
func buildFileuploadCSVWhere[Q psql.Filterable](cols fileuploadCSVColumns) fileuploadCSVWhere[Q] {
return fileuploadCSVWhere[Q]{
FileID: psql.Where[Q, int32](cols.FileID),
Type: psql.Where[Q, enums.FileuploadCsvtype](cols.Type),
}
}
func (o *FileuploadCSV) Preload(name string, retrieved any) error {
if o == nil {
return nil
}
switch name {
case "File":
rel, ok := retrieved.(*FileuploadFile)
if !ok {
return fmt.Errorf("fileuploadCSV cannot load %T as %q", retrieved, name)
}
o.R.File = rel
if rel != nil {
rel.R.CSV = o
}
return nil
default:
return fmt.Errorf("fileuploadCSV has no relationship %q", name)
}
}
type fileuploadCSVPreloader struct {
File func(...psql.PreloadOption) psql.Preloader
}
func buildFileuploadCSVPreloader() fileuploadCSVPreloader {
return fileuploadCSVPreloader{
File: func(opts ...psql.PreloadOption) psql.Preloader {
return psql.Preload[*FileuploadFile, FileuploadFileSlice](psql.PreloadRel{
Name: "File",
Sides: []psql.PreloadSide{
{
From: FileuploadCSVS,
To: FileuploadFiles,
FromColumns: []string{"file_id"},
ToColumns: []string{"id"},
},
},
}, FileuploadFiles.Columns.Names(), opts...)
},
}
}
type fileuploadCSVThenLoader[Q orm.Loadable] struct {
File func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
}
func buildFileuploadCSVThenLoader[Q orm.Loadable]() fileuploadCSVThenLoader[Q] {
type FileLoadInterface interface {
LoadFile(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
return fileuploadCSVThenLoader[Q]{
File: thenLoadBuilder[Q](
"File",
func(ctx context.Context, exec bob.Executor, retrieved FileLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadFile(ctx, exec, mods...)
},
),
}
}
// LoadFile loads the fileuploadCSV's File into the .R struct
func (o *FileuploadCSV) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
// Reset the relationship
o.R.File = nil
related, err := o.File(mods...).One(ctx, exec)
if err != nil {
return err
}
related.R.CSV = o
o.R.File = related
return nil
}
// LoadFile loads the fileuploadCSV's File into the .R struct
func (os FileuploadCSVSlice) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
fileuploadFiles, err := os.File(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, o := range os {
if o == nil {
continue
}
for _, rel := range fileuploadFiles {
if !(o.FileID == rel.ID) {
continue
}
rel.R.CSV = o
o.R.File = rel
break
}
}
return nil
}
type fileuploadCSVJoins[Q dialect.Joinable] struct {
typ string
File modAs[Q, fileuploadFileColumns]
}
func (j fileuploadCSVJoins[Q]) aliasedAs(alias string) fileuploadCSVJoins[Q] {
return buildFileuploadCSVJoins[Q](buildFileuploadCSVColumns(alias), j.typ)
}
func buildFileuploadCSVJoins[Q dialect.Joinable](cols fileuploadCSVColumns, typ string) fileuploadCSVJoins[Q] {
return fileuploadCSVJoins[Q]{
typ: typ,
File: modAs[Q, fileuploadFileColumns]{
c: FileuploadFiles.Columns,
f: func(to fileuploadFileColumns) bob.Mod[Q] {
mods := make(mods.QueryMods[Q], 0, 1)
{
mods = append(mods, dialect.Join[Q](typ, FileuploadFiles.Name().As(to.Alias())).On(
to.ID.EQ(cols.FileID),
))
}
return mods
},
},
}
}

View file

@ -0,0 +1,652 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package models
import (
"context"
"fmt"
"io"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
"github.com/Gleipnir-Technology/bob/dialect/psql/dm"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
"github.com/Gleipnir-Technology/bob/expr"
"github.com/Gleipnir-Technology/bob/mods"
"github.com/Gleipnir-Technology/bob/orm"
"github.com/Gleipnir-Technology/bob/types/pgtypes"
"github.com/aarondl/opt/omit"
)
// FileuploadError is an object representing the database table.
type FileuploadError struct {
FileID int32 `db:"file_id" `
ID int32 `db:"id,pk" `
Line int32 `db:"line" `
Message string `db:"message" `
R fileuploadErrorR `db:"-" `
}
// FileuploadErrorSlice is an alias for a slice of pointers to FileuploadError.
// This should almost always be used instead of []*FileuploadError.
type FileuploadErrorSlice []*FileuploadError
// FileuploadErrors contains methods to work with the error table
var FileuploadErrors = psql.NewTablex[*FileuploadError, FileuploadErrorSlice, *FileuploadErrorSetter]("fileupload", "error", buildFileuploadErrorColumns("fileupload.error"))
// FileuploadErrorsQuery is a query on the error table
type FileuploadErrorsQuery = *psql.ViewQuery[*FileuploadError, FileuploadErrorSlice]
// fileuploadErrorR is where relationships are stored.
type fileuploadErrorR struct {
File *FileuploadFile // fileupload.error.error_file_id_fkey
}
func buildFileuploadErrorColumns(alias string) fileuploadErrorColumns {
return fileuploadErrorColumns{
ColumnsExpr: expr.NewColumnsExpr(
"file_id", "id", "line", "message",
).WithParent("fileupload.error"),
tableAlias: alias,
FileID: psql.Quote(alias, "file_id"),
ID: psql.Quote(alias, "id"),
Line: psql.Quote(alias, "line"),
Message: psql.Quote(alias, "message"),
}
}
type fileuploadErrorColumns struct {
expr.ColumnsExpr
tableAlias string
FileID psql.Expression
ID psql.Expression
Line psql.Expression
Message psql.Expression
}
func (c fileuploadErrorColumns) Alias() string {
return c.tableAlias
}
func (fileuploadErrorColumns) AliasedAs(alias string) fileuploadErrorColumns {
return buildFileuploadErrorColumns(alias)
}
// FileuploadErrorSetter is used for insert/upsert/update operations
// All values are optional, and do not have to be set
// Generated columns are not included
type FileuploadErrorSetter struct {
FileID omit.Val[int32] `db:"file_id" `
ID omit.Val[int32] `db:"id,pk" `
Line omit.Val[int32] `db:"line" `
Message omit.Val[string] `db:"message" `
}
func (s FileuploadErrorSetter) SetColumns() []string {
vals := make([]string, 0, 4)
if s.FileID.IsValue() {
vals = append(vals, "file_id")
}
if s.ID.IsValue() {
vals = append(vals, "id")
}
if s.Line.IsValue() {
vals = append(vals, "line")
}
if s.Message.IsValue() {
vals = append(vals, "message")
}
return vals
}
func (s FileuploadErrorSetter) Overwrite(t *FileuploadError) {
if s.FileID.IsValue() {
t.FileID = s.FileID.MustGet()
}
if s.ID.IsValue() {
t.ID = s.ID.MustGet()
}
if s.Line.IsValue() {
t.Line = s.Line.MustGet()
}
if s.Message.IsValue() {
t.Message = s.Message.MustGet()
}
}
func (s *FileuploadErrorSetter) Apply(q *dialect.InsertQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadErrors.BeforeInsertHooks.RunHooks(ctx, exec, s)
})
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
vals := make([]bob.Expression, 4)
if s.FileID.IsValue() {
vals[0] = psql.Arg(s.FileID.MustGet())
} else {
vals[0] = psql.Raw("DEFAULT")
}
if s.ID.IsValue() {
vals[1] = psql.Arg(s.ID.MustGet())
} else {
vals[1] = psql.Raw("DEFAULT")
}
if s.Line.IsValue() {
vals[2] = psql.Arg(s.Line.MustGet())
} else {
vals[2] = psql.Raw("DEFAULT")
}
if s.Message.IsValue() {
vals[3] = psql.Arg(s.Message.MustGet())
} else {
vals[3] = psql.Raw("DEFAULT")
}
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
}))
}
func (s FileuploadErrorSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return um.Set(s.Expressions()...)
}
func (s FileuploadErrorSetter) Expressions(prefix ...string) []bob.Expression {
exprs := make([]bob.Expression, 0, 4)
if s.FileID.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "file_id")...),
psql.Arg(s.FileID),
}})
}
if s.ID.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "id")...),
psql.Arg(s.ID),
}})
}
if s.Line.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "line")...),
psql.Arg(s.Line),
}})
}
if s.Message.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "message")...),
psql.Arg(s.Message),
}})
}
return exprs
}
// FindFileuploadError retrieves a single record by primary key
// If cols is empty Find will return all columns.
func FindFileuploadError(ctx context.Context, exec bob.Executor, IDPK int32, cols ...string) (*FileuploadError, error) {
if len(cols) == 0 {
return FileuploadErrors.Query(
sm.Where(FileuploadErrors.Columns.ID.EQ(psql.Arg(IDPK))),
).One(ctx, exec)
}
return FileuploadErrors.Query(
sm.Where(FileuploadErrors.Columns.ID.EQ(psql.Arg(IDPK))),
sm.Columns(FileuploadErrors.Columns.Only(cols...)),
).One(ctx, exec)
}
// FileuploadErrorExists checks the presence of a single record by primary key
func FileuploadErrorExists(ctx context.Context, exec bob.Executor, IDPK int32) (bool, error) {
return FileuploadErrors.Query(
sm.Where(FileuploadErrors.Columns.ID.EQ(psql.Arg(IDPK))),
).Exists(ctx, exec)
}
// AfterQueryHook is called after FileuploadError is retrieved from the database
func (o *FileuploadError) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FileuploadErrors.AfterSelectHooks.RunHooks(ctx, exec, FileuploadErrorSlice{o})
case bob.QueryTypeInsert:
ctx, err = FileuploadErrors.AfterInsertHooks.RunHooks(ctx, exec, FileuploadErrorSlice{o})
case bob.QueryTypeUpdate:
ctx, err = FileuploadErrors.AfterUpdateHooks.RunHooks(ctx, exec, FileuploadErrorSlice{o})
case bob.QueryTypeDelete:
ctx, err = FileuploadErrors.AfterDeleteHooks.RunHooks(ctx, exec, FileuploadErrorSlice{o})
}
return err
}
// primaryKeyVals returns the primary key values of the FileuploadError
func (o *FileuploadError) primaryKeyVals() bob.Expression {
return psql.Arg(o.ID)
}
func (o *FileuploadError) pkEQ() dialect.Expression {
return psql.Quote("fileupload.error", "id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
return o.primaryKeyVals().WriteSQL(ctx, w, d, start)
}))
}
// Update uses an executor to update the FileuploadError
func (o *FileuploadError) Update(ctx context.Context, exec bob.Executor, s *FileuploadErrorSetter) error {
v, err := FileuploadErrors.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
if err != nil {
return err
}
o.R = v.R
*o = *v
return nil
}
// Delete deletes a single FileuploadError record with an executor
func (o *FileuploadError) Delete(ctx context.Context, exec bob.Executor) error {
_, err := FileuploadErrors.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
return err
}
// Reload refreshes the FileuploadError using the executor
func (o *FileuploadError) Reload(ctx context.Context, exec bob.Executor) error {
o2, err := FileuploadErrors.Query(
sm.Where(FileuploadErrors.Columns.ID.EQ(psql.Arg(o.ID))),
).One(ctx, exec)
if err != nil {
return err
}
o2.R = o.R
*o = *o2
return nil
}
// AfterQueryHook is called after FileuploadErrorSlice is retrieved from the database
func (o FileuploadErrorSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FileuploadErrors.AfterSelectHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeInsert:
ctx, err = FileuploadErrors.AfterInsertHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeUpdate:
ctx, err = FileuploadErrors.AfterUpdateHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeDelete:
ctx, err = FileuploadErrors.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}
func (o FileuploadErrorSlice) pkIN() dialect.Expression {
if len(o) == 0 {
return psql.Raw("NULL")
}
return psql.Quote("fileupload.error", "id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
pkPairs := make([]bob.Expression, len(o))
for i, row := range o {
pkPairs[i] = row.primaryKeyVals()
}
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
}))
}
// copyMatchingRows finds models in the given slice that have the same primary key
// then it first copies the existing relationships from the old model to the new model
// and then replaces the old model in the slice with the new model
func (o FileuploadErrorSlice) copyMatchingRows(from ...*FileuploadError) {
for i, old := range o {
for _, new := range from {
if new.ID != old.ID {
continue
}
new.R = old.R
o[i] = new
break
}
}
}
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
func (o FileuploadErrorSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadErrors.BeforeUpdateHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FileuploadError:
o.copyMatchingRows(retrieved)
case []*FileuploadError:
o.copyMatchingRows(retrieved...)
case FileuploadErrorSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FileuploadError or a slice of FileuploadError
// then run the AfterUpdateHooks on the slice
_, err = FileuploadErrors.AfterUpdateHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
func (o FileuploadErrorSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadErrors.BeforeDeleteHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FileuploadError:
o.copyMatchingRows(retrieved)
case []*FileuploadError:
o.copyMatchingRows(retrieved...)
case FileuploadErrorSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FileuploadError or a slice of FileuploadError
// then run the AfterDeleteHooks on the slice
_, err = FileuploadErrors.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
func (o FileuploadErrorSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals FileuploadErrorSetter) error {
if len(o) == 0 {
return nil
}
_, err := FileuploadErrors.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
return err
}
func (o FileuploadErrorSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
_, err := FileuploadErrors.Delete(o.DeleteMod()).Exec(ctx, exec)
return err
}
func (o FileuploadErrorSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
o2, err := FileuploadErrors.Query(sm.Where(o.pkIN())).All(ctx, exec)
if err != nil {
return err
}
o.copyMatchingRows(o2...)
return nil
}
// File starts a query for related objects on fileupload.file
func (o *FileuploadError) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery {
return FileuploadFiles.Query(append(mods,
sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(o.FileID))),
)...)
}
func (os FileuploadErrorSlice) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery {
pkFileID := make(pgtypes.Array[int32], 0, len(os))
for _, o := range os {
if o == nil {
continue
}
pkFileID = append(pkFileID, o.FileID)
}
PKArgExpr := psql.Select(sm.Columns(
psql.F("unnest", psql.Cast(psql.Arg(pkFileID), "integer[]")),
))
return FileuploadFiles.Query(append(mods,
sm.Where(psql.Group(FileuploadFiles.Columns.ID).OP("IN", PKArgExpr)),
)...)
}
func attachFileuploadErrorFile0(ctx context.Context, exec bob.Executor, count int, fileuploadError0 *FileuploadError, fileuploadFile1 *FileuploadFile) (*FileuploadError, error) {
setter := &FileuploadErrorSetter{
FileID: omit.From(fileuploadFile1.ID),
}
err := fileuploadError0.Update(ctx, exec, setter)
if err != nil {
return nil, fmt.Errorf("attachFileuploadErrorFile0: %w", err)
}
return fileuploadError0, nil
}
func (fileuploadError0 *FileuploadError) InsertFile(ctx context.Context, exec bob.Executor, related *FileuploadFileSetter) error {
var err error
fileuploadFile1, err := FileuploadFiles.Insert(related).One(ctx, exec)
if err != nil {
return fmt.Errorf("inserting related objects: %w", err)
}
_, err = attachFileuploadErrorFile0(ctx, exec, 1, fileuploadError0, fileuploadFile1)
if err != nil {
return err
}
fileuploadError0.R.File = fileuploadFile1
fileuploadFile1.R.Errors = append(fileuploadFile1.R.Errors, fileuploadError0)
return nil
}
func (fileuploadError0 *FileuploadError) AttachFile(ctx context.Context, exec bob.Executor, fileuploadFile1 *FileuploadFile) error {
var err error
_, err = attachFileuploadErrorFile0(ctx, exec, 1, fileuploadError0, fileuploadFile1)
if err != nil {
return err
}
fileuploadError0.R.File = fileuploadFile1
fileuploadFile1.R.Errors = append(fileuploadFile1.R.Errors, fileuploadError0)
return nil
}
type fileuploadErrorWhere[Q psql.Filterable] struct {
FileID psql.WhereMod[Q, int32]
ID psql.WhereMod[Q, int32]
Line psql.WhereMod[Q, int32]
Message psql.WhereMod[Q, string]
}
func (fileuploadErrorWhere[Q]) AliasedAs(alias string) fileuploadErrorWhere[Q] {
return buildFileuploadErrorWhere[Q](buildFileuploadErrorColumns(alias))
}
func buildFileuploadErrorWhere[Q psql.Filterable](cols fileuploadErrorColumns) fileuploadErrorWhere[Q] {
return fileuploadErrorWhere[Q]{
FileID: psql.Where[Q, int32](cols.FileID),
ID: psql.Where[Q, int32](cols.ID),
Line: psql.Where[Q, int32](cols.Line),
Message: psql.Where[Q, string](cols.Message),
}
}
func (o *FileuploadError) Preload(name string, retrieved any) error {
if o == nil {
return nil
}
switch name {
case "File":
rel, ok := retrieved.(*FileuploadFile)
if !ok {
return fmt.Errorf("fileuploadError cannot load %T as %q", retrieved, name)
}
o.R.File = rel
if rel != nil {
rel.R.Errors = FileuploadErrorSlice{o}
}
return nil
default:
return fmt.Errorf("fileuploadError has no relationship %q", name)
}
}
type fileuploadErrorPreloader struct {
File func(...psql.PreloadOption) psql.Preloader
}
func buildFileuploadErrorPreloader() fileuploadErrorPreloader {
return fileuploadErrorPreloader{
File: func(opts ...psql.PreloadOption) psql.Preloader {
return psql.Preload[*FileuploadFile, FileuploadFileSlice](psql.PreloadRel{
Name: "File",
Sides: []psql.PreloadSide{
{
From: FileuploadErrors,
To: FileuploadFiles,
FromColumns: []string{"file_id"},
ToColumns: []string{"id"},
},
},
}, FileuploadFiles.Columns.Names(), opts...)
},
}
}
type fileuploadErrorThenLoader[Q orm.Loadable] struct {
File func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
}
func buildFileuploadErrorThenLoader[Q orm.Loadable]() fileuploadErrorThenLoader[Q] {
type FileLoadInterface interface {
LoadFile(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
return fileuploadErrorThenLoader[Q]{
File: thenLoadBuilder[Q](
"File",
func(ctx context.Context, exec bob.Executor, retrieved FileLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadFile(ctx, exec, mods...)
},
),
}
}
// LoadFile loads the fileuploadError's File into the .R struct
func (o *FileuploadError) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
// Reset the relationship
o.R.File = nil
related, err := o.File(mods...).One(ctx, exec)
if err != nil {
return err
}
related.R.Errors = FileuploadErrorSlice{o}
o.R.File = related
return nil
}
// LoadFile loads the fileuploadError's File into the .R struct
func (os FileuploadErrorSlice) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
fileuploadFiles, err := os.File(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, o := range os {
if o == nil {
continue
}
for _, rel := range fileuploadFiles {
if !(o.FileID == rel.ID) {
continue
}
rel.R.Errors = append(rel.R.Errors, o)
o.R.File = rel
break
}
}
return nil
}
type fileuploadErrorJoins[Q dialect.Joinable] struct {
typ string
File modAs[Q, fileuploadFileColumns]
}
func (j fileuploadErrorJoins[Q]) aliasedAs(alias string) fileuploadErrorJoins[Q] {
return buildFileuploadErrorJoins[Q](buildFileuploadErrorColumns(alias), j.typ)
}
func buildFileuploadErrorJoins[Q dialect.Joinable](cols fileuploadErrorColumns, typ string) fileuploadErrorJoins[Q] {
return fileuploadErrorJoins[Q]{
typ: typ,
File: modAs[Q, fileuploadFileColumns]{
c: FileuploadFiles.Columns,
f: func(to fileuploadFileColumns) bob.Mod[Q] {
mods := make(mods.QueryMods[Q], 0, 1)
{
mods = append(mods, dialect.Join[Q](typ, FileuploadFiles.Name().As(to.Alias())).On(
to.ID.EQ(cols.FileID),
))
}
return mods
},
},
}
}

File diff suppressed because it is too large Load diff

View file

@ -57,14 +57,15 @@ type UsersQuery = *psql.ViewQuery[*User, UserSlice]
// userR is where relationships are stored. // userR is where relationships are stored.
type userR struct { type userR struct {
PublicUserUser ArcgisUserSlice // arcgis.user_.user__public_user_id_fkey PublicUserUser ArcgisUserSlice // arcgis.user_.user__public_user_id_fkey
CreatorNoteAudios NoteAudioSlice // note_audio.note_audio_creator_id_fkey CreatorFiles FileuploadFileSlice // fileupload.file.file_creator_id_fkey
DeletorNoteAudios NoteAudioSlice // note_audio.note_audio_deletor_id_fkey CreatorNoteAudios NoteAudioSlice // note_audio.note_audio_creator_id_fkey
CreatorNoteImages NoteImageSlice // note_image.note_image_creator_id_fkey DeletorNoteAudios NoteAudioSlice // note_audio.note_audio_deletor_id_fkey
DeletorNoteImages NoteImageSlice // note_image.note_image_deletor_id_fkey CreatorNoteImages NoteImageSlice // note_image.note_image_creator_id_fkey
UserNotifications NotificationSlice // notification.notification_user_id_fkey DeletorNoteImages NoteImageSlice // note_image.note_image_deletor_id_fkey
UserOauthTokens OauthTokenSlice // oauth_token.oauth_token_user_id_fkey UserNotifications NotificationSlice // notification.notification_user_id_fkey
Organization *Organization // user_.user__organization_id_fkey UserOauthTokens OauthTokenSlice // oauth_token.oauth_token_user_id_fkey
Organization *Organization // user_.user__organization_id_fkey
} }
func buildUserColumns(alias string) userColumns { func buildUserColumns(alias string) userColumns {
@ -635,6 +636,30 @@ func (os UserSlice) PublicUserUser(mods ...bob.Mod[*dialect.SelectQuery]) Arcgis
)...) )...)
} }
// CreatorFiles starts a query for related objects on fileupload.file
func (o *User) CreatorFiles(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery {
return FileuploadFiles.Query(append(mods,
sm.Where(FileuploadFiles.Columns.CreatorID.EQ(psql.Arg(o.ID))),
)...)
}
func (os UserSlice) CreatorFiles(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery {
pkID := make(pgtypes.Array[int32], 0, len(os))
for _, o := range os {
if o == nil {
continue
}
pkID = append(pkID, o.ID)
}
PKArgExpr := psql.Select(sm.Columns(
psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")),
))
return FileuploadFiles.Query(append(mods,
sm.Where(psql.Group(FileuploadFiles.Columns.CreatorID).OP("IN", PKArgExpr)),
)...)
}
// CreatorNoteAudios starts a query for related objects on note_audio // CreatorNoteAudios starts a query for related objects on note_audio
func (o *User) CreatorNoteAudios(mods ...bob.Mod[*dialect.SelectQuery]) NoteAudiosQuery { func (o *User) CreatorNoteAudios(mods ...bob.Mod[*dialect.SelectQuery]) NoteAudiosQuery {
return NoteAudios.Query(append(mods, return NoteAudios.Query(append(mods,
@ -871,6 +896,74 @@ func (user0 *User) AttachPublicUserUser(ctx context.Context, exec bob.Executor,
return nil return nil
} }
func insertUserCreatorFiles0(ctx context.Context, exec bob.Executor, fileuploadFiles1 []*FileuploadFileSetter, user0 *User) (FileuploadFileSlice, error) {
for i := range fileuploadFiles1 {
fileuploadFiles1[i].CreatorID = omit.From(user0.ID)
}
ret, err := FileuploadFiles.Insert(bob.ToMods(fileuploadFiles1...)).All(ctx, exec)
if err != nil {
return ret, fmt.Errorf("insertUserCreatorFiles0: %w", err)
}
return ret, nil
}
func attachUserCreatorFiles0(ctx context.Context, exec bob.Executor, count int, fileuploadFiles1 FileuploadFileSlice, user0 *User) (FileuploadFileSlice, error) {
setter := &FileuploadFileSetter{
CreatorID: omit.From(user0.ID),
}
err := fileuploadFiles1.UpdateAll(ctx, exec, *setter)
if err != nil {
return nil, fmt.Errorf("attachUserCreatorFiles0: %w", err)
}
return fileuploadFiles1, nil
}
func (user0 *User) InsertCreatorFiles(ctx context.Context, exec bob.Executor, related ...*FileuploadFileSetter) error {
if len(related) == 0 {
return nil
}
var err error
fileuploadFiles1, err := insertUserCreatorFiles0(ctx, exec, related, user0)
if err != nil {
return err
}
user0.R.CreatorFiles = append(user0.R.CreatorFiles, fileuploadFiles1...)
for _, rel := range fileuploadFiles1 {
rel.R.CreatorUser = user0
}
return nil
}
func (user0 *User) AttachCreatorFiles(ctx context.Context, exec bob.Executor, related ...*FileuploadFile) error {
if len(related) == 0 {
return nil
}
var err error
fileuploadFiles1 := FileuploadFileSlice(related)
_, err = attachUserCreatorFiles0(ctx, exec, len(related), fileuploadFiles1, user0)
if err != nil {
return err
}
user0.R.CreatorFiles = append(user0.R.CreatorFiles, fileuploadFiles1...)
for _, rel := range related {
rel.R.CreatorUser = user0
}
return nil
}
func insertUserCreatorNoteAudios0(ctx context.Context, exec bob.Executor, noteAudios1 []*NoteAudioSetter, user0 *User) (NoteAudioSlice, error) { func insertUserCreatorNoteAudios0(ctx context.Context, exec bob.Executor, noteAudios1 []*NoteAudioSetter, user0 *User) (NoteAudioSlice, error) {
for i := range noteAudios1 { for i := range noteAudios1 {
noteAudios1[i].CreatorID = omit.From(user0.ID) noteAudios1[i].CreatorID = omit.From(user0.ID)
@ -1383,6 +1476,20 @@ func (o *User) Preload(name string, retrieved any) error {
} }
} }
return nil return nil
case "CreatorFiles":
rels, ok := retrieved.(FileuploadFileSlice)
if !ok {
return fmt.Errorf("user cannot load %T as %q", retrieved, name)
}
o.R.CreatorFiles = rels
for _, rel := range rels {
if rel != nil {
rel.R.CreatorUser = o
}
}
return nil
case "CreatorNoteAudios": case "CreatorNoteAudios":
rels, ok := retrieved.(NoteAudioSlice) rels, ok := retrieved.(NoteAudioSlice)
if !ok { if !ok {
@ -1508,6 +1615,7 @@ func buildUserPreloader() userPreloader {
type userThenLoader[Q orm.Loadable] struct { type userThenLoader[Q orm.Loadable] struct {
PublicUserUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] PublicUserUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
CreatorFiles func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
CreatorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
DeletorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] DeletorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
CreatorNoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorNoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
@ -1521,6 +1629,9 @@ func buildUserThenLoader[Q orm.Loadable]() userThenLoader[Q] {
type PublicUserUserLoadInterface interface { type PublicUserUserLoadInterface interface {
LoadPublicUserUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error LoadPublicUserUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
} }
type CreatorFilesLoadInterface interface {
LoadCreatorFiles(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type CreatorNoteAudiosLoadInterface interface { type CreatorNoteAudiosLoadInterface interface {
LoadCreatorNoteAudios(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error LoadCreatorNoteAudios(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
} }
@ -1550,6 +1661,12 @@ func buildUserThenLoader[Q orm.Loadable]() userThenLoader[Q] {
return retrieved.LoadPublicUserUser(ctx, exec, mods...) return retrieved.LoadPublicUserUser(ctx, exec, mods...)
}, },
), ),
CreatorFiles: thenLoadBuilder[Q](
"CreatorFiles",
func(ctx context.Context, exec bob.Executor, retrieved CreatorFilesLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadCreatorFiles(ctx, exec, mods...)
},
),
CreatorNoteAudios: thenLoadBuilder[Q]( CreatorNoteAudios: thenLoadBuilder[Q](
"CreatorNoteAudios", "CreatorNoteAudios",
func(ctx context.Context, exec bob.Executor, retrieved CreatorNoteAudiosLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { func(ctx context.Context, exec bob.Executor, retrieved CreatorNoteAudiosLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
@ -1656,6 +1773,67 @@ func (os UserSlice) LoadPublicUserUser(ctx context.Context, exec bob.Executor, m
return nil return nil
} }
// LoadCreatorFiles loads the user's CreatorFiles into the .R struct
func (o *User) LoadCreatorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
// Reset the relationship
o.R.CreatorFiles = nil
related, err := o.CreatorFiles(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, rel := range related {
rel.R.CreatorUser = o
}
o.R.CreatorFiles = related
return nil
}
// LoadCreatorFiles loads the user's CreatorFiles into the .R struct
func (os UserSlice) LoadCreatorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
fileuploadFiles, err := os.CreatorFiles(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, o := range os {
if o == nil {
continue
}
o.R.CreatorFiles = nil
}
for _, o := range os {
if o == nil {
continue
}
for _, rel := range fileuploadFiles {
if !(o.ID == rel.CreatorID) {
continue
}
rel.R.CreatorUser = o
o.R.CreatorFiles = append(o.R.CreatorFiles, rel)
}
}
return nil
}
// LoadCreatorNoteAudios loads the user's CreatorNoteAudios into the .R struct // LoadCreatorNoteAudios loads the user's CreatorNoteAudios into the .R struct
func (o *User) LoadCreatorNoteAudios(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { func (o *User) LoadCreatorNoteAudios(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil { if o == nil {
@ -2083,6 +2261,7 @@ func (os UserSlice) LoadOrganization(ctx context.Context, exec bob.Executor, mod
// userC is where relationship counts are stored. // userC is where relationship counts are stored.
type userC struct { type userC struct {
PublicUserUser *int64 PublicUserUser *int64
CreatorFiles *int64
CreatorNoteAudios *int64 CreatorNoteAudios *int64
DeletorNoteAudios *int64 DeletorNoteAudios *int64
CreatorNoteImages *int64 CreatorNoteImages *int64
@ -2100,6 +2279,8 @@ func (o *User) PreloadCount(name string, count int64) error {
switch name { switch name {
case "PublicUserUser": case "PublicUserUser":
o.C.PublicUserUser = &count o.C.PublicUserUser = &count
case "CreatorFiles":
o.C.CreatorFiles = &count
case "CreatorNoteAudios": case "CreatorNoteAudios":
o.C.CreatorNoteAudios = &count o.C.CreatorNoteAudios = &count
case "DeletorNoteAudios": case "DeletorNoteAudios":
@ -2118,6 +2299,7 @@ func (o *User) PreloadCount(name string, count int64) error {
type userCountPreloader struct { type userCountPreloader struct {
PublicUserUser func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader PublicUserUser func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
CreatorFiles func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
CreatorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader CreatorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
DeletorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader DeletorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
CreatorNoteImages func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader CreatorNoteImages func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
@ -2145,6 +2327,23 @@ func buildUserCountPreloader() userCountPreloader {
return psql.Group(psql.Select(subqueryMods...).Expression) return psql.Group(psql.Select(subqueryMods...).Expression)
}) })
}, },
CreatorFiles: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader {
return countPreloader[*User]("CreatorFiles", func(parent string) bob.Expression {
// Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk)
if parent == "" {
parent = Users.Alias()
}
subqueryMods := []bob.Mod[*dialect.SelectQuery]{
sm.Columns(psql.Raw("count(*)")),
sm.From(FileuploadFiles.Name()),
sm.Where(psql.Quote(FileuploadFiles.Alias(), "creator_id").EQ(psql.Quote(parent, "id"))),
}
subqueryMods = append(subqueryMods, mods...)
return psql.Group(psql.Select(subqueryMods...).Expression)
})
},
CreatorNoteAudios: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { CreatorNoteAudios: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader {
return countPreloader[*User]("CreatorNoteAudios", func(parent string) bob.Expression { return countPreloader[*User]("CreatorNoteAudios", func(parent string) bob.Expression {
// Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk)
@ -2252,6 +2451,7 @@ func buildUserCountPreloader() userCountPreloader {
type userCountThenLoader[Q orm.Loadable] struct { type userCountThenLoader[Q orm.Loadable] struct {
PublicUserUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] PublicUserUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
CreatorFiles func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
CreatorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
DeletorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] DeletorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
CreatorNoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorNoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
@ -2264,6 +2464,9 @@ func buildUserCountThenLoader[Q orm.Loadable]() userCountThenLoader[Q] {
type PublicUserUserCountInterface interface { type PublicUserUserCountInterface interface {
LoadCountPublicUserUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error LoadCountPublicUserUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
} }
type CreatorFilesCountInterface interface {
LoadCountCreatorFiles(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type CreatorNoteAudiosCountInterface interface { type CreatorNoteAudiosCountInterface interface {
LoadCountCreatorNoteAudios(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error LoadCountCreatorNoteAudios(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
} }
@ -2290,6 +2493,12 @@ func buildUserCountThenLoader[Q orm.Loadable]() userCountThenLoader[Q] {
return retrieved.LoadCountPublicUserUser(ctx, exec, mods...) return retrieved.LoadCountPublicUserUser(ctx, exec, mods...)
}, },
), ),
CreatorFiles: countThenLoadBuilder[Q](
"CreatorFiles",
func(ctx context.Context, exec bob.Executor, retrieved CreatorFilesCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadCountCreatorFiles(ctx, exec, mods...)
},
),
CreatorNoteAudios: countThenLoadBuilder[Q]( CreatorNoteAudios: countThenLoadBuilder[Q](
"CreatorNoteAudios", "CreatorNoteAudios",
func(ctx context.Context, exec bob.Executor, retrieved CreatorNoteAudiosCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { func(ctx context.Context, exec bob.Executor, retrieved CreatorNoteAudiosCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
@ -2359,6 +2568,36 @@ func (os UserSlice) LoadCountPublicUserUser(ctx context.Context, exec bob.Execut
return nil return nil
} }
// LoadCountCreatorFiles loads the count of CreatorFiles into the C struct
func (o *User) LoadCountCreatorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
count, err := o.CreatorFiles(mods...).Count(ctx, exec)
if err != nil {
return err
}
o.C.CreatorFiles = &count
return nil
}
// LoadCountCreatorFiles loads the count of CreatorFiles for a slice
func (os UserSlice) LoadCountCreatorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
for _, o := range os {
if err := o.LoadCountCreatorFiles(ctx, exec, mods...); err != nil {
return err
}
}
return nil
}
// LoadCountCreatorNoteAudios loads the count of CreatorNoteAudios into the C struct // LoadCountCreatorNoteAudios loads the count of CreatorNoteAudios into the C struct
func (o *User) LoadCountCreatorNoteAudios(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { func (o *User) LoadCountCreatorNoteAudios(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil { if o == nil {
@ -2542,6 +2781,7 @@ func (os UserSlice) LoadCountUserOauthTokens(ctx context.Context, exec bob.Execu
type userJoins[Q dialect.Joinable] struct { type userJoins[Q dialect.Joinable] struct {
typ string typ string
PublicUserUser modAs[Q, arcgisuserColumns] PublicUserUser modAs[Q, arcgisuserColumns]
CreatorFiles modAs[Q, fileuploadFileColumns]
CreatorNoteAudios modAs[Q, noteAudioColumns] CreatorNoteAudios modAs[Q, noteAudioColumns]
DeletorNoteAudios modAs[Q, noteAudioColumns] DeletorNoteAudios modAs[Q, noteAudioColumns]
CreatorNoteImages modAs[Q, noteImageColumns] CreatorNoteImages modAs[Q, noteImageColumns]
@ -2572,6 +2812,20 @@ func buildUserJoins[Q dialect.Joinable](cols userColumns, typ string) userJoins[
return mods return mods
}, },
}, },
CreatorFiles: modAs[Q, fileuploadFileColumns]{
c: FileuploadFiles.Columns,
f: func(to fileuploadFileColumns) bob.Mod[Q] {
mods := make(mods.QueryMods[Q], 0, 1)
{
mods = append(mods, dialect.Join[Q](typ, FileuploadFiles.Name().As(to.Alias())).On(
to.CreatorID.EQ(cols.ID),
))
}
return mods
},
},
CreatorNoteAudios: modAs[Q, noteAudioColumns]{ CreatorNoteAudios: modAs[Q, noteAudioColumns]{
c: NoteAudios.Columns, c: NoteAudios.Columns,
f: func(to noteAudioColumns) bob.Mod[Q] { f: func(to noteAudioColumns) bob.Mod[Q] {

50
platform/pool.go Normal file
View file

@ -0,0 +1,50 @@
package platform
import (
"context"
"fmt"
"time"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
)
type PoolUpload struct {
ID int32
}
func NewPoolUpload(ctx context.Context, u *models.User, upload userfile.FileUpload) (PoolUpload, error) {
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
if err != nil {
return PoolUpload{}, fmt.Errorf("Failed to begin transaction: %w", err)
}
file, err := models.FileuploadFiles.Insert(&models.FileuploadFileSetter{
ContentType: omit.From(upload.ContentType),
Created: omit.From(time.Now()),
CreatorID: omit.From(u.ID),
Deleted: omitnull.FromPtr[time.Time](nil),
Name: omit.From(upload.Name),
Status: omit.From(enums.FileuploadFilestatustypeUploaded),
SizeBytes: omit.From(int32(upload.SizeBytes)),
FileUUID: omit.From(upload.UUID),
}).One(ctx, txn)
if err != nil {
return PoolUpload{}, fmt.Errorf("Failed to create file upload: %w", err)
}
_, err = models.FileuploadCSVS.Insert(&models.FileuploadCSVSetter{
FileID: omit.From(file.ID),
Type: omit.From(enums.FileuploadCsvtypePoollist),
}).One(ctx, txn)
if err != nil {
return PoolUpload{}, fmt.Errorf("Failed to create csv: %w", err)
}
txn.Commit(ctx)
return PoolUpload{
ID: file.ID,
}, nil
}

View file

@ -5,14 +5,20 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/userfile" "github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5"
"github.com/google/uuid"
) )
// ServeImageByUUID reads an image with the given UUID from disk and writes it to the HTTP response // ServeImageByUUID reads an image with the given UUID from disk and writes it to the HTTP response
func getImageByUUID(w http.ResponseWriter, r *http.Request) { func getImageByUUID(w http.ResponseWriter, r *http.Request) {
uid := chi.URLParam(r, "uuid") u := chi.URLParam(r, "uuid")
if uid == "" { if u == "" {
http.NotFound(w, r) http.NotFound(w, r)
return return
} }
uid, err := uuid.Parse(u)
if err != nil {
http.Error(w, "Failed to parse uuid", http.StatusBadRequest)
return
}
userfile.PublicImageFileToResponse(w, uid) userfile.PublicImageFileToResponse(w, uid)
} }

View file

@ -1,10 +1,13 @@
package sync package sync
import ( import (
"fmt"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/db/models" "github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html" "github.com/Gleipnir-Technology/nidus-sync/html"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/Gleipnir-Technology/nidus-sync/userfile" "github.com/Gleipnir-Technology/nidus-sync/userfile"
"net/http"
) )
type ContentPoolList struct { type ContentPoolList struct {
@ -52,5 +55,15 @@ func postPoolUpload(w http.ResponseWriter, r *http.Request, u *models.User) {
respondError(w, "Failed to extract image uploads", err, http.StatusInternalServerError) respondError(w, "Failed to extract image uploads", err, http.StatusInternalServerError)
return return
} }
images, err := saveImageUploads(r.Context(), tx, uploads) if len(uploads) == 0 {
respondError(w, "No upload found", nil, http.StatusBadRequest)
return
}
if len(uploads) != 1 {
respondError(w, "You must only submit one file at a time", nil, http.StatusBadRequest)
return
}
upload := uploads[0]
pool_upload, err := platform.NewPoolUpload(r.Context(), u, upload)
http.Redirect(w, r, fmt.Sprintf("/pool/upload/%d", pool_upload.ID), http.StatusFound)
} }

View file

@ -13,10 +13,9 @@ import (
type FileUpload struct { type FileUpload struct {
ContentType string ContentType string
Name string
UploadFilesize int SizeBytes int
UploadFilename string UUID uuid.UUID
UUID uuid.UUID
} }
func SaveFileUpload(r *http.Request, name string, subdir string, extension string) ([]FileUpload, error) { func SaveFileUpload(r *http.Request, name string, subdir string, extension string) ([]FileUpload, error) {
@ -69,9 +68,9 @@ func saveFileUpload(headers *multipart.FileHeader, subdir string, extension stri
} }
log.Info().Int("size", len(file_bytes)).Str("uploaded_filename", headers.Filename).Str("content-type", content_type).Str("uuid", u.String()).Msg("Saved an uploaded file to disk") log.Info().Int("size", len(file_bytes)).Str("uploaded_filename", headers.Filename).Str("content-type", content_type).Str("uuid", u.String()).Msg("Saved an uploaded file to disk")
return FileUpload{ return FileUpload{
ContentType: content_type, ContentType: content_type,
UploadFilename: headers.Filename, Name: headers.Filename,
UploadFilesize: len(file_bytes), SizeBytes: len(file_bytes),
UUID: u, UUID: u,
}, nil }, nil
} }