Do file upload, show list of uploads, do initial processing.

This commit is contained in:
Eli Ribble 2026-02-09 18:25:44 +00:00
parent 8d4195a024
commit 135ad2b73e
No known key found for this signature in database
45 changed files with 7126 additions and 1464 deletions

View file

@ -7,6 +7,7 @@ aliases:
organization:
relationships:
publicreport.pool.pool_organization_id_fkey: "PublicreportPool"
fieldseeker.pool.pool_organization_id_fkey: "FieldseekerPool"
user_:
up_plural: "Users"
up_singular: "User"

View file

@ -1,17 +0,0 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package dberrors
var FileuploadErrorErrors = &fileuploadErrorErrors{
ErrUniqueErrorPkey: &UniqueConstraintError{
schema: "fileupload",
table: "error",
columns: []string{"id"},
s: "error_pkey",
},
}
type fileuploadErrorErrors struct {
ErrUniqueErrorPkey *UniqueConstraintError
}

View file

@ -0,0 +1,17 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package dberrors
var FileuploadErrorCSVErrors = &fileuploadErrorCSVErrors{
ErrUniqueErrorCsvPkey: &UniqueConstraintError{
schema: "fileupload",
table: "error_csv",
columns: []string{"id"},
s: "error_csv_pkey",
},
}
type fileuploadErrorCSVErrors struct {
ErrUniqueErrorCsvPkey *UniqueConstraintError
}

View file

@ -0,0 +1,17 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package dberrors
var FileuploadErrorFileErrors = &fileuploadErrorFileErrors{
ErrUniqueErrorFilePkey: &UniqueConstraintError{
schema: "fileupload",
table: "error_file",
columns: []string{"id"},
s: "error_file_pkey",
},
}
type fileuploadErrorFileErrors struct {
ErrUniqueErrorFilePkey *UniqueConstraintError
}

17
db/dberrors/pool.bob.go Normal file
View file

@ -0,0 +1,17 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package dberrors
var PoolErrors = &poolErrors{
ErrUniquePoolPkey: &UniqueConstraintError{
schema: "",
table: "pool",
columns: []string{"id", "version"},
s: "pool_pkey",
},
}
type poolErrors struct {
ErrUniquePoolPkey *UniqueConstraintError
}

View file

@ -15,6 +15,15 @@ var FileuploadCSVS = Table[
Schema: "fileupload",
Name: "csv",
Columns: fileuploadCSVColumns{
Committed: column{
Name: "committed",
DBType: "timestamp without time zone",
Default: "NULL",
Comment: "",
Nullable: true,
Generated: false,
AutoIncr: false,
},
FileID: column{
Name: "file_id",
DBType: "integer",
@ -24,6 +33,15 @@ var FileuploadCSVS = Table[
Generated: false,
AutoIncr: false,
},
Rowcount: column{
Name: "rowcount",
DBType: "integer",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Type: column{
Name: "type_",
DBType: "fileupload.csvtype",
@ -74,13 +92,15 @@ var FileuploadCSVS = Table[
}
type fileuploadCSVColumns struct {
FileID column
Type column
Committed column
FileID column
Rowcount column
Type column
}
func (c fileuploadCSVColumns) AsSlice() []column {
return []column{
c.FileID, c.Type,
c.Committed, c.FileID, c.Rowcount, c.Type,
}
}

View file

@ -0,0 +1,147 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package dbinfo
import "github.com/aarondl/opt/null"
var FileuploadErrorCSVS = Table[
fileuploadErrorCSVColumns,
fileuploadErrorCSVIndexes,
fileuploadErrorCSVForeignKeys,
fileuploadErrorCSVUniques,
fileuploadErrorCSVChecks,
]{
Schema: "fileupload",
Name: "error_csv",
Columns: fileuploadErrorCSVColumns{
Col: column{
Name: "col",
DBType: "integer",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
CSVFileID: column{
Name: "csv_file_id",
DBType: "integer",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
ID: column{
Name: "id",
DBType: "integer",
Default: "nextval('fileupload.error_csv_id_seq'::regclass)",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Line: column{
Name: "line",
DBType: "integer",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Message: column{
Name: "message",
DBType: "text",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
},
Indexes: fileuploadErrorCSVIndexes{
ErrorCSVPkey: index{
Type: "btree",
Name: "error_csv_pkey",
Columns: []indexColumn{
{
Name: "id",
Desc: null.FromCond(false, true),
IsExpression: false,
},
},
Unique: true,
Comment: "",
NullsFirst: []bool{false},
NullsDistinct: false,
Where: "",
Include: []string{},
},
},
PrimaryKey: &constraint{
Name: "error_csv_pkey",
Columns: []string{"id"},
Comment: "",
},
ForeignKeys: fileuploadErrorCSVForeignKeys{
FileuploadErrorCSVErrorCSVCSVFileIDFkey: foreignKey{
constraint: constraint{
Name: "fileupload.error_csv.error_csv_csv_file_id_fkey",
Columns: []string{"csv_file_id"},
Comment: "",
},
ForeignTable: "fileupload.csv",
ForeignColumns: []string{"file_id"},
},
},
Comment: "",
}
type fileuploadErrorCSVColumns struct {
Col column
CSVFileID column
ID column
Line column
Message column
}
func (c fileuploadErrorCSVColumns) AsSlice() []column {
return []column{
c.Col, c.CSVFileID, c.ID, c.Line, c.Message,
}
}
type fileuploadErrorCSVIndexes struct {
ErrorCSVPkey index
}
func (i fileuploadErrorCSVIndexes) AsSlice() []index {
return []index{
i.ErrorCSVPkey,
}
}
type fileuploadErrorCSVForeignKeys struct {
FileuploadErrorCSVErrorCSVCSVFileIDFkey foreignKey
}
func (f fileuploadErrorCSVForeignKeys) AsSlice() []foreignKey {
return []foreignKey{
f.FileuploadErrorCSVErrorCSVCSVFileIDFkey,
}
}
type fileuploadErrorCSVUniques struct{}
func (u fileuploadErrorCSVUniques) AsSlice() []constraint {
return []constraint{}
}
type fileuploadErrorCSVChecks struct{}
func (c fileuploadErrorCSVChecks) AsSlice() []check {
return []check{}
}

View file

@ -5,16 +5,16 @@ package dbinfo
import "github.com/aarondl/opt/null"
var FileuploadErrors = Table[
fileuploadErrorColumns,
fileuploadErrorIndexes,
fileuploadErrorForeignKeys,
fileuploadErrorUniques,
fileuploadErrorChecks,
var FileuploadErrorFiles = Table[
fileuploadErrorFileColumns,
fileuploadErrorFileIndexes,
fileuploadErrorFileForeignKeys,
fileuploadErrorFileUniques,
fileuploadErrorFileChecks,
]{
Schema: "fileupload",
Name: "error",
Columns: fileuploadErrorColumns{
Name: "error_file",
Columns: fileuploadErrorFileColumns{
FileID: column{
Name: "file_id",
DBType: "integer",
@ -27,16 +27,7 @@ var FileuploadErrors = Table[
ID: column{
Name: "id",
DBType: "integer",
Default: "nextval('fileupload.error_id_seq'::regclass)",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Line: column{
Name: "line",
DBType: "integer",
Default: "",
Default: "nextval('fileupload.error_file_id_seq'::regclass)",
Comment: "",
Nullable: false,
Generated: false,
@ -52,10 +43,10 @@ var FileuploadErrors = Table[
AutoIncr: false,
},
},
Indexes: fileuploadErrorIndexes{
ErrorPkey: index{
Indexes: fileuploadErrorFileIndexes{
ErrorFilePkey: index{
Type: "btree",
Name: "error_pkey",
Name: "error_file_pkey",
Columns: []indexColumn{
{
Name: "id",
@ -72,14 +63,14 @@ var FileuploadErrors = Table[
},
},
PrimaryKey: &constraint{
Name: "error_pkey",
Name: "error_file_pkey",
Columns: []string{"id"},
Comment: "",
},
ForeignKeys: fileuploadErrorForeignKeys{
FileuploadErrorErrorFileIDFkey: foreignKey{
ForeignKeys: fileuploadErrorFileForeignKeys{
FileuploadErrorFileErrorFileFileIDFkey: foreignKey{
constraint: constraint{
Name: "fileupload.error.error_file_id_fkey",
Name: "fileupload.error_file.error_file_file_id_fkey",
Columns: []string{"file_id"},
Comment: "",
},
@ -91,47 +82,46 @@ var FileuploadErrors = Table[
Comment: "",
}
type fileuploadErrorColumns struct {
type fileuploadErrorFileColumns struct {
FileID column
ID column
Line column
Message column
}
func (c fileuploadErrorColumns) AsSlice() []column {
func (c fileuploadErrorFileColumns) AsSlice() []column {
return []column{
c.FileID, c.ID, c.Line, c.Message,
c.FileID, c.ID, c.Message,
}
}
type fileuploadErrorIndexes struct {
ErrorPkey index
type fileuploadErrorFileIndexes struct {
ErrorFilePkey index
}
func (i fileuploadErrorIndexes) AsSlice() []index {
func (i fileuploadErrorFileIndexes) AsSlice() []index {
return []index{
i.ErrorPkey,
i.ErrorFilePkey,
}
}
type fileuploadErrorForeignKeys struct {
FileuploadErrorErrorFileIDFkey foreignKey
type fileuploadErrorFileForeignKeys struct {
FileuploadErrorFileErrorFileFileIDFkey foreignKey
}
func (f fileuploadErrorForeignKeys) AsSlice() []foreignKey {
func (f fileuploadErrorFileForeignKeys) AsSlice() []foreignKey {
return []foreignKey{
f.FileuploadErrorErrorFileIDFkey,
f.FileuploadErrorFileErrorFileFileIDFkey,
}
}
type fileuploadErrorUniques struct{}
type fileuploadErrorFileUniques struct{}
func (u fileuploadErrorUniques) AsSlice() []constraint {
func (u fileuploadErrorFileUniques) AsSlice() []constraint {
return []constraint{}
}
type fileuploadErrorChecks struct{}
type fileuploadErrorFileChecks struct{}
func (c fileuploadErrorChecks) AsSlice() []check {
func (c fileuploadErrorFileChecks) AsSlice() []check {
return []check{}
}

272
db/dbinfo/pool.bob.go Normal file
View file

@ -0,0 +1,272 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package dbinfo
import "github.com/aarondl/opt/null"
var Pools = Table[
poolColumns,
poolIndexes,
poolForeignKeys,
poolUniques,
poolChecks,
]{
Schema: "",
Name: "pool",
Columns: poolColumns{
AddressCity: column{
Name: "address_city",
DBType: "text",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
AddressPostalCode: column{
Name: "address_postal_code",
DBType: "text",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
AddressStreet: column{
Name: "address_street",
DBType: "text",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Condition: column{
Name: "condition",
DBType: "public.poolconditiontype",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Created: column{
Name: "created",
DBType: "timestamp without time zone",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
CreatorID: column{
Name: "creator_id",
DBType: "integer",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Deleted: column{
Name: "deleted",
DBType: "timestamp without time zone",
Default: "NULL",
Comment: "",
Nullable: true,
Generated: false,
AutoIncr: false,
},
Committed: column{
Name: "committed",
DBType: "boolean",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
ID: column{
Name: "id",
DBType: "integer",
Default: "nextval('pool_id_seq'::regclass)",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
Notes: column{
Name: "notes",
DBType: "text",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
OrganizationID: column{
Name: "organization_id",
DBType: "integer",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
PropertyOwnerName: column{
Name: "property_owner_name",
DBType: "text",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
PropertyOwnerPhone: column{
Name: "property_owner_phone",
DBType: "phone",
Default: "NULL",
Comment: "",
Nullable: true,
Generated: false,
AutoIncr: false,
},
ResidentOwned: column{
Name: "resident_owned",
DBType: "boolean",
Default: "NULL",
Comment: "",
Nullable: true,
Generated: false,
AutoIncr: false,
},
ResidentPhone: column{
Name: "resident_phone",
DBType: "phone",
Default: "NULL",
Comment: "",
Nullable: true,
Generated: false,
AutoIncr: false,
},
Version: column{
Name: "version",
DBType: "integer",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
},
Indexes: poolIndexes{
PoolPkey: index{
Type: "btree",
Name: "pool_pkey",
Columns: []indexColumn{
{
Name: "id",
Desc: null.FromCond(false, true),
IsExpression: false,
},
{
Name: "version",
Desc: null.FromCond(false, true),
IsExpression: false,
},
},
Unique: true,
Comment: "",
NullsFirst: []bool{false, false},
NullsDistinct: false,
Where: "",
Include: []string{},
},
},
PrimaryKey: &constraint{
Name: "pool_pkey",
Columns: []string{"id", "version"},
Comment: "",
},
ForeignKeys: poolForeignKeys{
PoolPoolCreatorIDFkey: foreignKey{
constraint: constraint{
Name: "pool.pool_creator_id_fkey",
Columns: []string{"creator_id"},
Comment: "",
},
ForeignTable: "user_",
ForeignColumns: []string{"id"},
},
PoolPoolOrganizationIDFkey: foreignKey{
constraint: constraint{
Name: "pool.pool_organization_id_fkey",
Columns: []string{"organization_id"},
Comment: "",
},
ForeignTable: "organization",
ForeignColumns: []string{"id"},
},
},
Comment: "",
}
type poolColumns struct {
AddressCity column
AddressPostalCode column
AddressStreet column
Condition column
Created column
CreatorID column
Deleted column
Committed column
ID column
Notes column
OrganizationID column
PropertyOwnerName column
PropertyOwnerPhone column
ResidentOwned column
ResidentPhone column
Version column
}
func (c poolColumns) AsSlice() []column {
return []column{
c.AddressCity, c.AddressPostalCode, c.AddressStreet, c.Condition, c.Created, c.CreatorID, c.Deleted, c.Committed, c.ID, c.Notes, c.OrganizationID, c.PropertyOwnerName, c.PropertyOwnerPhone, c.ResidentOwned, c.ResidentPhone, c.Version,
}
}
type poolIndexes struct {
PoolPkey index
}
func (i poolIndexes) AsSlice() []index {
return []index{
i.PoolPkey,
}
}
type poolForeignKeys struct {
PoolPoolCreatorIDFkey foreignKey
PoolPoolOrganizationIDFkey foreignKey
}
func (f poolForeignKeys) AsSlice() []foreignKey {
return []foreignKey{
f.PoolPoolCreatorIDFkey, f.PoolPoolOrganizationIDFkey,
}
}
type poolUniques struct{}
func (u poolUniques) AsSlice() []constraint {
return []constraint{}
}
type poolChecks struct{}
func (c poolChecks) AsSlice() []check {
return []check{}
}

View file

@ -651,14 +651,16 @@ func (e *FileuploadCsvtype) Scan(value any) error {
// Enum values for FileuploadFilestatustype
const (
FileuploadFilestatustypeUploaded FileuploadFilestatustype = "uploaded"
FileuploadFilestatustypeError FileuploadFilestatustype = "error"
FileuploadFilestatustypeParsed FileuploadFilestatustype = "parsed"
FileuploadFilestatustypeUploaded FileuploadFilestatustype = "uploaded"
)
func AllFileuploadFilestatustype() []FileuploadFilestatustype {
return []FileuploadFilestatustype{
FileuploadFilestatustypeUploaded,
FileuploadFilestatustypeError,
FileuploadFilestatustypeParsed,
FileuploadFilestatustypeUploaded,
}
}
@ -670,8 +672,9 @@ func (e FileuploadFilestatustype) String() string {
func (e FileuploadFilestatustype) Valid() bool {
switch e {
case FileuploadFilestatustypeUploaded,
FileuploadFilestatustypeParsed:
case FileuploadFilestatustypeError,
FileuploadFilestatustypeParsed,
FileuploadFilestatustypeUploaded:
return true
default:
return false
@ -1010,6 +1013,85 @@ func (e *Notificationtype) Scan(value any) error {
return nil
}
// Enum values for Poolconditiontype
const (
PoolconditiontypeGreen Poolconditiontype = "green"
PoolconditiontypeMurky Poolconditiontype = "murky"
PoolconditiontypeBlue Poolconditiontype = "blue"
PoolconditiontypeUnknown Poolconditiontype = "unknown"
)
func AllPoolconditiontype() []Poolconditiontype {
return []Poolconditiontype{
PoolconditiontypeGreen,
PoolconditiontypeMurky,
PoolconditiontypeBlue,
PoolconditiontypeUnknown,
}
}
type Poolconditiontype string
func (e Poolconditiontype) String() string {
return string(e)
}
func (e Poolconditiontype) Valid() bool {
switch e {
case PoolconditiontypeGreen,
PoolconditiontypeMurky,
PoolconditiontypeBlue,
PoolconditiontypeUnknown:
return true
default:
return false
}
}
// useful when testing in other packages
func (e Poolconditiontype) All() []Poolconditiontype {
return AllPoolconditiontype()
}
func (e Poolconditiontype) MarshalText() ([]byte, error) {
return []byte(e), nil
}
func (e *Poolconditiontype) UnmarshalText(text []byte) error {
return e.Scan(text)
}
func (e Poolconditiontype) MarshalBinary() ([]byte, error) {
return []byte(e), nil
}
func (e *Poolconditiontype) UnmarshalBinary(data []byte) error {
return e.Scan(data)
}
func (e Poolconditiontype) Value() (driver.Value, error) {
return string(e), nil
}
func (e *Poolconditiontype) Scan(value any) error {
switch x := value.(type) {
case string:
*e = Poolconditiontype(x)
case []byte:
*e = Poolconditiontype(x)
case nil:
return fmt.Errorf("cannot nil into Poolconditiontype")
default:
return fmt.Errorf("cannot scan type %T: %v", value, value)
}
if !e.Valid() {
return fmt.Errorf("invalid Poolconditiontype value: %s", *e)
}
return nil
}
// Enum values for PublicreportAccuracytype
const (
PublicreportAccuracytypeRooftop PublicreportAccuracytype = "rooftop"

View file

@ -176,15 +176,20 @@ var (
// Relationship Contexts for fileupload.csv
fileuploadCSVWithParentsCascadingCtx = newContextual[bool]("fileuploadCSVWithParentsCascading")
fileuploadCSVRelFileCtx = newContextual[bool]("fileupload.csv.fileupload.file.fileupload.csv.csv_file_id_fkey")
fileuploadCSVRelCSVFileErrorCSVSCtx = newContextual[bool]("fileupload.csv.fileupload.error_csv.fileupload.error_csv.error_csv_csv_file_id_fkey")
// Relationship Contexts for fileupload.error
fileuploadErrorWithParentsCascadingCtx = newContextual[bool]("fileuploadErrorWithParentsCascading")
fileuploadErrorRelFileCtx = newContextual[bool]("fileupload.error.fileupload.file.fileupload.error.error_file_id_fkey")
// Relationship Contexts for fileupload.error_csv
fileuploadErrorCSVWithParentsCascadingCtx = newContextual[bool]("fileuploadErrorCSVWithParentsCascading")
fileuploadErrorCSVRelCSVFileCSVCtx = newContextual[bool]("fileupload.csv.fileupload.error_csv.fileupload.error_csv.error_csv_csv_file_id_fkey")
// Relationship Contexts for fileupload.error_file
fileuploadErrorFileWithParentsCascadingCtx = newContextual[bool]("fileuploadErrorFileWithParentsCascading")
fileuploadErrorFileRelFileCtx = newContextual[bool]("fileupload.error_file.fileupload.file.fileupload.error_file.error_file_file_id_fkey")
// Relationship Contexts for fileupload.file
fileuploadFileWithParentsCascadingCtx = newContextual[bool]("fileuploadFileWithParentsCascading")
fileuploadFileRelCSVCtx = newContextual[bool]("fileupload.csv.fileupload.file.fileupload.csv.csv_file_id_fkey")
fileuploadFileRelErrorsCtx = newContextual[bool]("fileupload.error.fileupload.file.fileupload.error.error_file_id_fkey")
fileuploadFileRelErrorFilesCtx = newContextual[bool]("fileupload.error_file.fileupload.file.fileupload.error_file.error_file_file_id_fkey")
fileuploadFileRelCreatorUserCtx = newContextual[bool]("fileupload.file.user_.fileupload.file.file_creator_id_fkey")
fileuploadFileRelOrganizationCtx = newContextual[bool]("fileupload.file.organization.fileupload.file.file_organization_id_fkey")
@ -259,7 +264,7 @@ var (
organizationRelMosquitoinspectionsCtx = newContextual[bool]("fieldseeker.mosquitoinspection.organization.fieldseeker.mosquitoinspection.mosquitoinspection_organization_id_fkey")
organizationRelPointlocationsCtx = newContextual[bool]("fieldseeker.pointlocation.organization.fieldseeker.pointlocation.pointlocation_organization_id_fkey")
organizationRelPolygonlocationsCtx = newContextual[bool]("fieldseeker.polygonlocation.organization.fieldseeker.polygonlocation.polygonlocation_organization_id_fkey")
organizationRelPoolsCtx = newContextual[bool]("fieldseeker.pool.organization.fieldseeker.pool.pool_organization_id_fkey")
organizationRelFieldseekerPoolCtx = newContextual[bool]("fieldseeker.pool.organization.fieldseeker.pool.pool_organization_id_fkey")
organizationRelPooldetailsCtx = newContextual[bool]("fieldseeker.pooldetail.organization.fieldseeker.pooldetail.pooldetail_organization_id_fkey")
organizationRelProposedtreatmentareasCtx = newContextual[bool]("fieldseeker.proposedtreatmentarea.organization.fieldseeker.proposedtreatmentarea.proposedtreatmentarea_organization_id_fkey")
organizationRelQamosquitoinspectionsCtx = newContextual[bool]("fieldseeker.qamosquitoinspection.organization.fieldseeker.qamosquitoinspection.qamosquitoinspection_organization_id_fkey")
@ -282,11 +287,17 @@ var (
organizationRelNoteAudiosCtx = newContextual[bool]("note_audio.organization.note_audio.note_audio_organization_id_fkey")
organizationRelNoteImagesCtx = newContextual[bool]("note_image.organization.note_image.note_image_organization_id_fkey")
organizationRelImportDistrictGidDistrictCtx = newContextual[bool]("import.district.organization.organization.organization_import_district_gid_fkey")
organizationRelPoolsCtx = newContextual[bool]("organization.pool.pool.pool_organization_id_fkey")
organizationRelNuisancesCtx = newContextual[bool]("organization.publicreport.nuisance.publicreport.nuisance.nuisance_organization_id_fkey")
organizationRelPublicreportPoolCtx = newContextual[bool]("organization.publicreport.pool.publicreport.pool.pool_organization_id_fkey")
organizationRelQuicksCtx = newContextual[bool]("organization.publicreport.quick.publicreport.quick.quick_organization_id_fkey")
organizationRelUserCtx = newContextual[bool]("organization.user_.user_.user__organization_id_fkey")
// Relationship Contexts for pool
poolWithParentsCascadingCtx = newContextual[bool]("poolWithParentsCascading")
poolRelCreatorUserCtx = newContextual[bool]("pool.user_.pool.pool_creator_id_fkey")
poolRelOrganizationCtx = newContextual[bool]("organization.pool.pool.pool_organization_id_fkey")
// Relationship Contexts for publicreport.image
publicreportImageWithParentsCascadingCtx = newContextual[bool]("publicreportImageWithParentsCascading")
publicreportImageRelImageExifsCtx = newContextual[bool]("publicreport.image.publicreport.image_exif.publicreport.image_exif.image_exif_image_id_fkey")
@ -377,6 +388,7 @@ var (
userRelDeletorNoteImagesCtx = newContextual[bool]("note_image.user_.note_image.note_image_deletor_id_fkey")
userRelUserNotificationsCtx = newContextual[bool]("notification.user_.notification.notification_user_id_fkey")
userRelUserOauthTokensCtx = newContextual[bool]("oauth_token.user_.oauth_token.oauth_token_user_id_fkey")
userRelCreatorPoolsCtx = newContextual[bool]("pool.user_.pool.pool_creator_id_fkey")
userRelOrganizationCtx = newContextual[bool]("organization.user_.user_.user__organization_id_fkey")
)

View file

@ -58,7 +58,8 @@ type Factory struct {
baseFieldseekerZones2Mods FieldseekerZones2ModSlice
baseFieldseekerSyncMods FieldseekerSyncModSlice
baseFileuploadCSVMods FileuploadCSVModSlice
baseFileuploadErrorMods FileuploadErrorModSlice
baseFileuploadErrorCSVMods FileuploadErrorCSVModSlice
baseFileuploadErrorFileMods FileuploadErrorFileModSlice
baseFileuploadFileMods FileuploadFileModSlice
baseGeographyColumnMods GeographyColumnModSlice
baseGeometryColumnMods GeometryColumnModSlice
@ -74,6 +75,7 @@ type Factory struct {
baseNotificationMods NotificationModSlice
baseOauthTokenMods OauthTokenModSlice
baseOrganizationMods OrganizationModSlice
basePoolMods PoolModSlice
basePublicreportImageMods PublicreportImageModSlice
basePublicreportImageExifMods PublicreportImageExifModSlice
basePublicreportNotifyEmailNuisanceMods PublicreportNotifyEmailNuisanceModSlice
@ -2251,44 +2253,81 @@ func (f *Factory) NewFileuploadCSVWithContext(ctx context.Context, mods ...Fileu
func (f *Factory) FromExistingFileuploadCSV(m *models.FileuploadCSV) *FileuploadCSVTemplate {
o := &FileuploadCSVTemplate{f: f, alreadyPersisted: true}
o.Committed = func() null.Val[time.Time] { return m.Committed }
o.FileID = func() int32 { return m.FileID }
o.Rowcount = func() int32 { return m.Rowcount }
o.Type = func() enums.FileuploadCsvtype { return m.Type }
ctx := context.Background()
if m.R.File != nil {
FileuploadCSVMods.WithExistingFile(m.R.File).Apply(ctx, o)
}
return o
}
func (f *Factory) NewFileuploadError(mods ...FileuploadErrorMod) *FileuploadErrorTemplate {
return f.NewFileuploadErrorWithContext(context.Background(), mods...)
}
func (f *Factory) NewFileuploadErrorWithContext(ctx context.Context, mods ...FileuploadErrorMod) *FileuploadErrorTemplate {
o := &FileuploadErrorTemplate{f: f}
if f != nil {
f.baseFileuploadErrorMods.Apply(ctx, o)
if len(m.R.CSVFileErrorCSVS) > 0 {
FileuploadCSVMods.AddExistingCSVFileErrorCSVS(m.R.CSVFileErrorCSVS...).Apply(ctx, o)
}
FileuploadErrorModSlice(mods).Apply(ctx, o)
return o
}
func (f *Factory) NewFileuploadErrorCSV(mods ...FileuploadErrorCSVMod) *FileuploadErrorCSVTemplate {
return f.NewFileuploadErrorCSVWithContext(context.Background(), mods...)
}
func (f *Factory) NewFileuploadErrorCSVWithContext(ctx context.Context, mods ...FileuploadErrorCSVMod) *FileuploadErrorCSVTemplate {
o := &FileuploadErrorCSVTemplate{f: f}
if f != nil {
f.baseFileuploadErrorCSVMods.Apply(ctx, o)
}
FileuploadErrorCSVModSlice(mods).Apply(ctx, o)
return o
}
func (f *Factory) FromExistingFileuploadError(m *models.FileuploadError) *FileuploadErrorTemplate {
o := &FileuploadErrorTemplate{f: f, alreadyPersisted: true}
func (f *Factory) FromExistingFileuploadErrorCSV(m *models.FileuploadErrorCSV) *FileuploadErrorCSVTemplate {
o := &FileuploadErrorCSVTemplate{f: f, alreadyPersisted: true}
o.FileID = func() int32 { return m.FileID }
o.Col = func() int32 { return m.Col }
o.CSVFileID = func() int32 { return m.CSVFileID }
o.ID = func() int32 { return m.ID }
o.Line = func() int32 { return m.Line }
o.Message = func() string { return m.Message }
ctx := context.Background()
if m.R.CSVFileCSV != nil {
FileuploadErrorCSVMods.WithExistingCSVFileCSV(m.R.CSVFileCSV).Apply(ctx, o)
}
return o
}
func (f *Factory) NewFileuploadErrorFile(mods ...FileuploadErrorFileMod) *FileuploadErrorFileTemplate {
return f.NewFileuploadErrorFileWithContext(context.Background(), mods...)
}
func (f *Factory) NewFileuploadErrorFileWithContext(ctx context.Context, mods ...FileuploadErrorFileMod) *FileuploadErrorFileTemplate {
o := &FileuploadErrorFileTemplate{f: f}
if f != nil {
f.baseFileuploadErrorFileMods.Apply(ctx, o)
}
FileuploadErrorFileModSlice(mods).Apply(ctx, o)
return o
}
func (f *Factory) FromExistingFileuploadErrorFile(m *models.FileuploadErrorFile) *FileuploadErrorFileTemplate {
o := &FileuploadErrorFileTemplate{f: f, alreadyPersisted: true}
o.FileID = func() int32 { return m.FileID }
o.ID = func() int32 { return m.ID }
o.Message = func() string { return m.Message }
ctx := context.Background()
if m.R.File != nil {
FileuploadErrorMods.WithExistingFile(m.R.File).Apply(ctx, o)
FileuploadErrorFileMods.WithExistingFile(m.R.File).Apply(ctx, o)
}
return o
@ -2328,8 +2367,8 @@ func (f *Factory) FromExistingFileuploadFile(m *models.FileuploadFile) *Fileuplo
if m.R.CSV != nil {
FileuploadFileMods.WithExistingCSV(m.R.CSV).Apply(ctx, o)
}
if len(m.R.Errors) > 0 {
FileuploadFileMods.AddExistingErrors(m.R.Errors...).Apply(ctx, o)
if len(m.R.ErrorFiles) > 0 {
FileuploadFileMods.AddExistingErrorFiles(m.R.ErrorFiles...).Apply(ctx, o)
}
if m.R.CreatorUser != nil {
FileuploadFileMods.WithExistingCreatorUser(m.R.CreatorUser).Apply(ctx, o)
@ -2882,8 +2921,8 @@ func (f *Factory) FromExistingOrganization(m *models.Organization) *Organization
if len(m.R.Polygonlocations) > 0 {
OrganizationMods.AddExistingPolygonlocations(m.R.Polygonlocations...).Apply(ctx, o)
}
if len(m.R.Pools) > 0 {
OrganizationMods.AddExistingPools(m.R.Pools...).Apply(ctx, o)
if len(m.R.FieldseekerPool) > 0 {
OrganizationMods.AddExistingFieldseekerPool(m.R.FieldseekerPool...).Apply(ctx, o)
}
if len(m.R.Pooldetails) > 0 {
OrganizationMods.AddExistingPooldetails(m.R.Pooldetails...).Apply(ctx, o)
@ -2951,6 +2990,9 @@ func (f *Factory) FromExistingOrganization(m *models.Organization) *Organization
if m.R.ImportDistrictGidDistrict != nil {
OrganizationMods.WithExistingImportDistrictGidDistrict(m.R.ImportDistrictGidDistrict).Apply(ctx, o)
}
if len(m.R.Pools) > 0 {
OrganizationMods.AddExistingPools(m.R.Pools...).Apply(ctx, o)
}
if len(m.R.Nuisances) > 0 {
OrganizationMods.AddExistingNuisances(m.R.Nuisances...).Apply(ctx, o)
}
@ -2967,6 +3009,53 @@ func (f *Factory) FromExistingOrganization(m *models.Organization) *Organization
return o
}
func (f *Factory) NewPool(mods ...PoolMod) *PoolTemplate {
return f.NewPoolWithContext(context.Background(), mods...)
}
func (f *Factory) NewPoolWithContext(ctx context.Context, mods ...PoolMod) *PoolTemplate {
o := &PoolTemplate{f: f}
if f != nil {
f.basePoolMods.Apply(ctx, o)
}
PoolModSlice(mods).Apply(ctx, o)
return o
}
func (f *Factory) FromExistingPool(m *models.Pool) *PoolTemplate {
o := &PoolTemplate{f: f, alreadyPersisted: true}
o.AddressCity = func() string { return m.AddressCity }
o.AddressPostalCode = func() string { return m.AddressPostalCode }
o.AddressStreet = func() string { return m.AddressStreet }
o.Condition = func() enums.Poolconditiontype { return m.Condition }
o.Created = func() time.Time { return m.Created }
o.CreatorID = func() int32 { return m.CreatorID }
o.Deleted = func() null.Val[time.Time] { return m.Deleted }
o.Committed = func() bool { return m.Committed }
o.ID = func() int32 { return m.ID }
o.Notes = func() string { return m.Notes }
o.OrganizationID = func() int32 { return m.OrganizationID }
o.PropertyOwnerName = func() string { return m.PropertyOwnerName }
o.PropertyOwnerPhone = func() null.Val[string] { return m.PropertyOwnerPhone }
o.ResidentOwned = func() null.Val[bool] { return m.ResidentOwned }
o.ResidentPhone = func() null.Val[string] { return m.ResidentPhone }
o.Version = func() int32 { return m.Version }
ctx := context.Background()
if m.R.CreatorUser != nil {
PoolMods.WithExistingCreatorUser(m.R.CreatorUser).Apply(ctx, o)
}
if m.R.Organization != nil {
PoolMods.WithExistingOrganization(m.R.Organization).Apply(ctx, o)
}
return o
}
func (f *Factory) NewPublicreportImage(mods ...PublicreportImageMod) *PublicreportImageTemplate {
return f.NewPublicreportImageWithContext(context.Background(), mods...)
}
@ -3681,6 +3770,9 @@ func (f *Factory) FromExistingUser(m *models.User) *UserTemplate {
if len(m.R.UserOauthTokens) > 0 {
UserMods.AddExistingUserOauthTokens(m.R.UserOauthTokens...).Apply(ctx, o)
}
if len(m.R.CreatorPools) > 0 {
UserMods.AddExistingCreatorPools(m.R.CreatorPools...).Apply(ctx, o)
}
if m.R.Organization != nil {
UserMods.WithExistingOrganization(m.R.Organization).Apply(ctx, o)
}
@ -4000,12 +4092,20 @@ func (f *Factory) AddBaseFileuploadCSVMod(mods ...FileuploadCSVMod) {
f.baseFileuploadCSVMods = append(f.baseFileuploadCSVMods, mods...)
}
func (f *Factory) ClearBaseFileuploadErrorMods() {
f.baseFileuploadErrorMods = nil
func (f *Factory) ClearBaseFileuploadErrorCSVMods() {
f.baseFileuploadErrorCSVMods = nil
}
func (f *Factory) AddBaseFileuploadErrorMod(mods ...FileuploadErrorMod) {
f.baseFileuploadErrorMods = append(f.baseFileuploadErrorMods, mods...)
func (f *Factory) AddBaseFileuploadErrorCSVMod(mods ...FileuploadErrorCSVMod) {
f.baseFileuploadErrorCSVMods = append(f.baseFileuploadErrorCSVMods, mods...)
}
func (f *Factory) ClearBaseFileuploadErrorFileMods() {
f.baseFileuploadErrorFileMods = nil
}
func (f *Factory) AddBaseFileuploadErrorFileMod(mods ...FileuploadErrorFileMod) {
f.baseFileuploadErrorFileMods = append(f.baseFileuploadErrorFileMods, mods...)
}
func (f *Factory) ClearBaseFileuploadFileMods() {
@ -4128,6 +4228,14 @@ func (f *Factory) AddBaseOrganizationMod(mods ...OrganizationMod) {
f.baseOrganizationMods = append(f.baseOrganizationMods, mods...)
}
func (f *Factory) ClearBasePoolMods() {
f.basePoolMods = nil
}
func (f *Factory) AddBasePoolMod(mods ...PoolMod) {
f.basePoolMods = append(f.basePoolMods, mods...)
}
func (f *Factory) ClearBasePublicreportImageMods() {
f.basePublicreportImageMods = nil
}

View file

@ -191,6 +191,16 @@ func random_enums_Notificationtype(f *faker.Faker, limits ...string) enums.Notif
return all[f.IntBetween(0, len(all)-1)]
}
func random_enums_Poolconditiontype(f *faker.Faker, limits ...string) enums.Poolconditiontype {
if f == nil {
f = &defaultFaker
}
var e enums.Poolconditiontype
all := e.All()
return all[f.IntBetween(0, len(all)-1)]
}
func random_enums_PublicreportAccuracytype(f *faker.Faker, limits ...string) enums.PublicreportAccuracytype {
if f == nil {
f = &defaultFaker

View file

@ -99,7 +99,7 @@ func (o *FieldseekerPoolTemplate) Apply(ctx context.Context, mods ...Fieldseeker
func (t FieldseekerPoolTemplate) setModelRels(o *models.FieldseekerPool) {
if t.r.Organization != nil {
rel := t.r.Organization.o.Build()
rel.R.Pools = append(rel.R.Pools, o)
rel.R.FieldseekerPool = append(rel.R.FieldseekerPool, o)
o.OrganizationID = rel.ID // h2
o.R.Organization = rel
}

View file

@ -6,11 +6,14 @@ package factory
import (
"context"
"testing"
"time"
"github.com/Gleipnir-Technology/bob"
enums "github.com/Gleipnir-Technology/nidus-sync/db/enums"
models "github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/aarondl/opt/null"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/jaswdr/faker/v2"
)
@ -35,8 +38,10 @@ func (mods FileuploadCSVModSlice) Apply(ctx context.Context, n *FileuploadCSVTem
// FileuploadCSVTemplate is an object representing the database table.
// all columns are optional and should be set by mods
type FileuploadCSVTemplate struct {
FileID func() int32
Type func() enums.FileuploadCsvtype
Committed func() null.Val[time.Time]
FileID func() int32
Rowcount func() int32
Type func() enums.FileuploadCsvtype
r fileuploadCSVR
f *Factory
@ -45,12 +50,17 @@ type FileuploadCSVTemplate struct {
}
type fileuploadCSVR struct {
File *fileuploadCSVRFileR
File *fileuploadCSVRFileR
CSVFileErrorCSVS []*fileuploadCSVRCSVFileErrorCSVSR
}
type fileuploadCSVRFileR struct {
o *FileuploadFileTemplate
}
type fileuploadCSVRCSVFileErrorCSVSR struct {
number int
o *FileuploadErrorCSVTemplate
}
// Apply mods to the FileuploadCSVTemplate
func (o *FileuploadCSVTemplate) Apply(ctx context.Context, mods ...FileuploadCSVMod) {
@ -68,6 +78,19 @@ func (t FileuploadCSVTemplate) setModelRels(o *models.FileuploadCSV) {
o.FileID = rel.ID // h2
o.R.File = rel
}
if t.r.CSVFileErrorCSVS != nil {
rel := models.FileuploadErrorCSVSlice{}
for _, r := range t.r.CSVFileErrorCSVS {
related := r.o.BuildMany(r.number)
for _, rel := range related {
rel.CSVFileID = o.FileID // h2
rel.R.CSVFileCSV = o
}
rel = append(rel, related...)
}
o.R.CSVFileErrorCSVS = rel
}
}
// BuildSetter returns an *models.FileuploadCSVSetter
@ -75,10 +98,18 @@ func (t FileuploadCSVTemplate) setModelRels(o *models.FileuploadCSV) {
func (o FileuploadCSVTemplate) BuildSetter() *models.FileuploadCSVSetter {
m := &models.FileuploadCSVSetter{}
if o.Committed != nil {
val := o.Committed()
m.Committed = omitnull.FromNull(val)
}
if o.FileID != nil {
val := o.FileID()
m.FileID = omit.From(val)
}
if o.Rowcount != nil {
val := o.Rowcount()
m.Rowcount = omit.From(val)
}
if o.Type != nil {
val := o.Type()
m.Type = omit.From(val)
@ -105,9 +136,15 @@ func (o FileuploadCSVTemplate) BuildManySetter(number int) []*models.FileuploadC
func (o FileuploadCSVTemplate) Build() *models.FileuploadCSV {
m := &models.FileuploadCSV{}
if o.Committed != nil {
m.Committed = o.Committed()
}
if o.FileID != nil {
m.FileID = o.FileID()
}
if o.Rowcount != nil {
m.Rowcount = o.Rowcount()
}
if o.Type != nil {
m.Type = o.Type()
}
@ -135,6 +172,10 @@ func ensureCreatableFileuploadCSV(m *models.FileuploadCSVSetter) {
val := random_int32(nil)
m.FileID = omit.From(val)
}
if !(m.Rowcount.IsValue()) {
val := random_int32(nil)
m.Rowcount = omit.From(val)
}
if !(m.Type.IsValue()) {
val := random_enums_FileuploadCsvtype(nil)
m.Type = omit.From(val)
@ -147,6 +188,26 @@ func ensureCreatableFileuploadCSV(m *models.FileuploadCSVSetter) {
func (o *FileuploadCSVTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.FileuploadCSV) error {
var err error
isCSVFileErrorCSVSDone, _ := fileuploadCSVRelCSVFileErrorCSVSCtx.Value(ctx)
if !isCSVFileErrorCSVSDone && o.r.CSVFileErrorCSVS != nil {
ctx = fileuploadCSVRelCSVFileErrorCSVSCtx.WithValue(ctx, true)
for _, r := range o.r.CSVFileErrorCSVS {
if r.o.alreadyPersisted {
m.R.CSVFileErrorCSVS = append(m.R.CSVFileErrorCSVS, r.o.Build())
} else {
rel1, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil {
return err
}
err = m.AttachCSVFileErrorCSVS(ctx, exec, rel1...)
if err != nil {
return err
}
}
}
}
return err
}
@ -258,11 +319,66 @@ type fileuploadCSVMods struct{}
func (m fileuploadCSVMods) RandomizeAllColumns(f *faker.Faker) FileuploadCSVMod {
return FileuploadCSVModSlice{
FileuploadCSVMods.RandomCommitted(f),
FileuploadCSVMods.RandomFileID(f),
FileuploadCSVMods.RandomRowcount(f),
FileuploadCSVMods.RandomType(f),
}
}
// Set the model columns to this value
func (m fileuploadCSVMods) Committed(val null.Val[time.Time]) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.Committed = func() null.Val[time.Time] { return val }
})
}
// Set the Column from the function
func (m fileuploadCSVMods) CommittedFunc(f func() null.Val[time.Time]) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.Committed = f
})
}
// Clear any values for the column
func (m fileuploadCSVMods) UnsetCommitted() FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.Committed = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
// The generated value is sometimes null
func (m fileuploadCSVMods) RandomCommitted(f *faker.Faker) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.Committed = func() null.Val[time.Time] {
if f == nil {
f = &defaultFaker
}
val := random_time_Time(f)
return null.From(val)
}
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
// The generated value is never null
func (m fileuploadCSVMods) RandomCommittedNotNull(f *faker.Faker) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.Committed = func() null.Val[time.Time] {
if f == nil {
f = &defaultFaker
}
val := random_time_Time(f)
return null.From(val)
}
})
}
// Set the model columns to this value
func (m fileuploadCSVMods) FileID(val int32) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
@ -294,6 +410,37 @@ func (m fileuploadCSVMods) RandomFileID(f *faker.Faker) FileuploadCSVMod {
})
}
// Set the model columns to this value
func (m fileuploadCSVMods) Rowcount(val int32) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.Rowcount = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadCSVMods) RowcountFunc(f func() int32) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.Rowcount = f
})
}
// Clear any values for the column
func (m fileuploadCSVMods) UnsetRowcount() FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.Rowcount = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadCSVMods) RandomRowcount(f *faker.Faker) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
o.Rowcount = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadCSVMods) Type(val enums.FileuploadCsvtype) FileuploadCSVMod {
return FileuploadCSVModFunc(func(_ context.Context, o *FileuploadCSVTemplate) {
@ -368,3 +515,51 @@ func (m fileuploadCSVMods) WithoutFile() FileuploadCSVMod {
o.r.File = nil
})
}
func (m fileuploadCSVMods) WithCSVFileErrorCSVS(number int, related *FileuploadErrorCSVTemplate) FileuploadCSVMod {
return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) {
o.r.CSVFileErrorCSVS = []*fileuploadCSVRCSVFileErrorCSVSR{{
number: number,
o: related,
}}
})
}
func (m fileuploadCSVMods) WithNewCSVFileErrorCSVS(number int, mods ...FileuploadErrorCSVMod) FileuploadCSVMod {
return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) {
related := o.f.NewFileuploadErrorCSVWithContext(ctx, mods...)
m.WithCSVFileErrorCSVS(number, related).Apply(ctx, o)
})
}
func (m fileuploadCSVMods) AddCSVFileErrorCSVS(number int, related *FileuploadErrorCSVTemplate) FileuploadCSVMod {
return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) {
o.r.CSVFileErrorCSVS = append(o.r.CSVFileErrorCSVS, &fileuploadCSVRCSVFileErrorCSVSR{
number: number,
o: related,
})
})
}
func (m fileuploadCSVMods) AddNewCSVFileErrorCSVS(number int, mods ...FileuploadErrorCSVMod) FileuploadCSVMod {
return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) {
related := o.f.NewFileuploadErrorCSVWithContext(ctx, mods...)
m.AddCSVFileErrorCSVS(number, related).Apply(ctx, o)
})
}
func (m fileuploadCSVMods) AddExistingCSVFileErrorCSVS(existingModels ...*models.FileuploadErrorCSV) FileuploadCSVMod {
return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) {
for _, em := range existingModels {
o.r.CSVFileErrorCSVS = append(o.r.CSVFileErrorCSVS, &fileuploadCSVRCSVFileErrorCSVSR{
o: o.f.FromExistingFileuploadErrorCSV(em),
})
}
})
}
func (m fileuploadCSVMods) WithoutCSVFileErrorCSVS() FileuploadCSVMod {
return FileuploadCSVModFunc(func(ctx context.Context, o *FileuploadCSVTemplate) {
o.r.CSVFileErrorCSVS = nil
})
}

View file

@ -1,453 +0,0 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package factory
import (
"context"
"testing"
"github.com/Gleipnir-Technology/bob"
models "github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/aarondl/opt/omit"
"github.com/jaswdr/faker/v2"
)
type FileuploadErrorMod interface {
Apply(context.Context, *FileuploadErrorTemplate)
}
type FileuploadErrorModFunc func(context.Context, *FileuploadErrorTemplate)
func (f FileuploadErrorModFunc) Apply(ctx context.Context, n *FileuploadErrorTemplate) {
f(ctx, n)
}
type FileuploadErrorModSlice []FileuploadErrorMod
func (mods FileuploadErrorModSlice) Apply(ctx context.Context, n *FileuploadErrorTemplate) {
for _, f := range mods {
f.Apply(ctx, n)
}
}
// FileuploadErrorTemplate is an object representing the database table.
// all columns are optional and should be set by mods
type FileuploadErrorTemplate struct {
FileID func() int32
ID func() int32
Line func() int32
Message func() string
r fileuploadErrorR
f *Factory
alreadyPersisted bool
}
type fileuploadErrorR struct {
File *fileuploadErrorRFileR
}
type fileuploadErrorRFileR struct {
o *FileuploadFileTemplate
}
// Apply mods to the FileuploadErrorTemplate
func (o *FileuploadErrorTemplate) Apply(ctx context.Context, mods ...FileuploadErrorMod) {
for _, mod := range mods {
mod.Apply(ctx, o)
}
}
// setModelRels creates and sets the relationships on *models.FileuploadError
// according to the relationships in the template. Nothing is inserted into the db
func (t FileuploadErrorTemplate) setModelRels(o *models.FileuploadError) {
if t.r.File != nil {
rel := t.r.File.o.Build()
rel.R.Errors = append(rel.R.Errors, o)
o.FileID = rel.ID // h2
o.R.File = rel
}
}
// BuildSetter returns an *models.FileuploadErrorSetter
// this does nothing with the relationship templates
func (o FileuploadErrorTemplate) BuildSetter() *models.FileuploadErrorSetter {
m := &models.FileuploadErrorSetter{}
if o.FileID != nil {
val := o.FileID()
m.FileID = omit.From(val)
}
if o.ID != nil {
val := o.ID()
m.ID = omit.From(val)
}
if o.Line != nil {
val := o.Line()
m.Line = omit.From(val)
}
if o.Message != nil {
val := o.Message()
m.Message = omit.From(val)
}
return m
}
// BuildManySetter returns an []*models.FileuploadErrorSetter
// this does nothing with the relationship templates
func (o FileuploadErrorTemplate) BuildManySetter(number int) []*models.FileuploadErrorSetter {
m := make([]*models.FileuploadErrorSetter, number)
for i := range m {
m[i] = o.BuildSetter()
}
return m
}
// Build returns an *models.FileuploadError
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use FileuploadErrorTemplate.Create
func (o FileuploadErrorTemplate) Build() *models.FileuploadError {
m := &models.FileuploadError{}
if o.FileID != nil {
m.FileID = o.FileID()
}
if o.ID != nil {
m.ID = o.ID()
}
if o.Line != nil {
m.Line = o.Line()
}
if o.Message != nil {
m.Message = o.Message()
}
o.setModelRels(m)
return m
}
// BuildMany returns an models.FileuploadErrorSlice
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use FileuploadErrorTemplate.CreateMany
func (o FileuploadErrorTemplate) BuildMany(number int) models.FileuploadErrorSlice {
m := make(models.FileuploadErrorSlice, number)
for i := range m {
m[i] = o.Build()
}
return m
}
func ensureCreatableFileuploadError(m *models.FileuploadErrorSetter) {
if !(m.FileID.IsValue()) {
val := random_int32(nil)
m.FileID = omit.From(val)
}
if !(m.Line.IsValue()) {
val := random_int32(nil)
m.Line = omit.From(val)
}
if !(m.Message.IsValue()) {
val := random_string(nil)
m.Message = omit.From(val)
}
}
// insertOptRels creates and inserts any optional the relationships on *models.FileuploadError
// according to the relationships in the template.
// any required relationship should have already exist on the model
func (o *FileuploadErrorTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.FileuploadError) error {
var err error
return err
}
// Create builds a fileuploadError and inserts it into the database
// Relations objects are also inserted and placed in the .R field
func (o *FileuploadErrorTemplate) Create(ctx context.Context, exec bob.Executor) (*models.FileuploadError, error) {
var err error
opt := o.BuildSetter()
ensureCreatableFileuploadError(opt)
if o.r.File == nil {
FileuploadErrorMods.WithNewFile().Apply(ctx, o)
}
var rel0 *models.FileuploadFile
if o.r.File.o.alreadyPersisted {
rel0 = o.r.File.o.Build()
} else {
rel0, err = o.r.File.o.Create(ctx, exec)
if err != nil {
return nil, err
}
}
opt.FileID = omit.From(rel0.ID)
m, err := models.FileuploadErrors.Insert(opt).One(ctx, exec)
if err != nil {
return nil, err
}
m.R.File = rel0
if err := o.insertOptRels(ctx, exec, m); err != nil {
return nil, err
}
return m, err
}
// MustCreate builds a fileuploadError and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o *FileuploadErrorTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.FileuploadError {
m, err := o.Create(ctx, exec)
if err != nil {
panic(err)
}
return m
}
// CreateOrFail builds a fileuploadError and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o *FileuploadErrorTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.FileuploadError {
tb.Helper()
m, err := o.Create(ctx, exec)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// CreateMany builds multiple fileuploadErrors and inserts them into the database
// Relations objects are also inserted and placed in the .R field
func (o FileuploadErrorTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.FileuploadErrorSlice, error) {
var err error
m := make(models.FileuploadErrorSlice, number)
for i := range m {
m[i], err = o.Create(ctx, exec)
if err != nil {
return nil, err
}
}
return m, nil
}
// MustCreateMany builds multiple fileuploadErrors and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o FileuploadErrorTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.FileuploadErrorSlice {
m, err := o.CreateMany(ctx, exec, number)
if err != nil {
panic(err)
}
return m
}
// CreateManyOrFail builds multiple fileuploadErrors and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o FileuploadErrorTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.FileuploadErrorSlice {
tb.Helper()
m, err := o.CreateMany(ctx, exec, number)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// FileuploadError has methods that act as mods for the FileuploadErrorTemplate
var FileuploadErrorMods fileuploadErrorMods
type fileuploadErrorMods struct{}
func (m fileuploadErrorMods) RandomizeAllColumns(f *faker.Faker) FileuploadErrorMod {
return FileuploadErrorModSlice{
FileuploadErrorMods.RandomFileID(f),
FileuploadErrorMods.RandomID(f),
FileuploadErrorMods.RandomLine(f),
FileuploadErrorMods.RandomMessage(f),
}
}
// Set the model columns to this value
func (m fileuploadErrorMods) FileID(val int32) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.FileID = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorMods) FileIDFunc(f func() int32) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.FileID = f
})
}
// Clear any values for the column
func (m fileuploadErrorMods) UnsetFileID() FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.FileID = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorMods) RandomFileID(f *faker.Faker) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.FileID = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadErrorMods) ID(val int32) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.ID = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorMods) IDFunc(f func() int32) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.ID = f
})
}
// Clear any values for the column
func (m fileuploadErrorMods) UnsetID() FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.ID = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorMods) RandomID(f *faker.Faker) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.ID = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadErrorMods) Line(val int32) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Line = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorMods) LineFunc(f func() int32) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Line = f
})
}
// Clear any values for the column
func (m fileuploadErrorMods) UnsetLine() FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Line = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorMods) RandomLine(f *faker.Faker) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Line = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadErrorMods) Message(val string) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Message = func() string { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorMods) MessageFunc(f func() string) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Message = f
})
}
// Clear any values for the column
func (m fileuploadErrorMods) UnsetMessage() FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Message = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorMods) RandomMessage(f *faker.Faker) FileuploadErrorMod {
return FileuploadErrorModFunc(func(_ context.Context, o *FileuploadErrorTemplate) {
o.Message = func() string {
return random_string(f)
}
})
}
func (m fileuploadErrorMods) WithParentsCascading() FileuploadErrorMod {
return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) {
if isDone, _ := fileuploadErrorWithParentsCascadingCtx.Value(ctx); isDone {
return
}
ctx = fileuploadErrorWithParentsCascadingCtx.WithValue(ctx, true)
{
related := o.f.NewFileuploadFileWithContext(ctx, FileuploadFileMods.WithParentsCascading())
m.WithFile(related).Apply(ctx, o)
}
})
}
func (m fileuploadErrorMods) WithFile(rel *FileuploadFileTemplate) FileuploadErrorMod {
return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) {
o.r.File = &fileuploadErrorRFileR{
o: rel,
}
})
}
func (m fileuploadErrorMods) WithNewFile(mods ...FileuploadFileMod) FileuploadErrorMod {
return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) {
related := o.f.NewFileuploadFileWithContext(ctx, mods...)
m.WithFile(related).Apply(ctx, o)
})
}
func (m fileuploadErrorMods) WithExistingFile(em *models.FileuploadFile) FileuploadErrorMod {
return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) {
o.r.File = &fileuploadErrorRFileR{
o: o.f.FromExistingFileuploadFile(em),
}
})
}
func (m fileuploadErrorMods) WithoutFile() FileuploadErrorMod {
return FileuploadErrorModFunc(func(ctx context.Context, o *FileuploadErrorTemplate) {
o.r.File = nil
})
}

View file

@ -0,0 +1,497 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package factory
import (
"context"
"testing"
"github.com/Gleipnir-Technology/bob"
models "github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/aarondl/opt/omit"
"github.com/jaswdr/faker/v2"
)
type FileuploadErrorCSVMod interface {
Apply(context.Context, *FileuploadErrorCSVTemplate)
}
type FileuploadErrorCSVModFunc func(context.Context, *FileuploadErrorCSVTemplate)
func (f FileuploadErrorCSVModFunc) Apply(ctx context.Context, n *FileuploadErrorCSVTemplate) {
f(ctx, n)
}
type FileuploadErrorCSVModSlice []FileuploadErrorCSVMod
func (mods FileuploadErrorCSVModSlice) Apply(ctx context.Context, n *FileuploadErrorCSVTemplate) {
for _, f := range mods {
f.Apply(ctx, n)
}
}
// FileuploadErrorCSVTemplate is an object representing the database table.
// all columns are optional and should be set by mods
type FileuploadErrorCSVTemplate struct {
Col func() int32
CSVFileID func() int32
ID func() int32
Line func() int32
Message func() string
r fileuploadErrorCSVR
f *Factory
alreadyPersisted bool
}
type fileuploadErrorCSVR struct {
CSVFileCSV *fileuploadErrorCSVRCSVFileCSVR
}
type fileuploadErrorCSVRCSVFileCSVR struct {
o *FileuploadCSVTemplate
}
// Apply mods to the FileuploadErrorCSVTemplate
func (o *FileuploadErrorCSVTemplate) Apply(ctx context.Context, mods ...FileuploadErrorCSVMod) {
for _, mod := range mods {
mod.Apply(ctx, o)
}
}
// setModelRels creates and sets the relationships on *models.FileuploadErrorCSV
// according to the relationships in the template. Nothing is inserted into the db
func (t FileuploadErrorCSVTemplate) setModelRels(o *models.FileuploadErrorCSV) {
if t.r.CSVFileCSV != nil {
rel := t.r.CSVFileCSV.o.Build()
rel.R.CSVFileErrorCSVS = append(rel.R.CSVFileErrorCSVS, o)
o.CSVFileID = rel.FileID // h2
o.R.CSVFileCSV = rel
}
}
// BuildSetter returns an *models.FileuploadErrorCSVSetter
// this does nothing with the relationship templates
func (o FileuploadErrorCSVTemplate) BuildSetter() *models.FileuploadErrorCSVSetter {
m := &models.FileuploadErrorCSVSetter{}
if o.Col != nil {
val := o.Col()
m.Col = omit.From(val)
}
if o.CSVFileID != nil {
val := o.CSVFileID()
m.CSVFileID = omit.From(val)
}
if o.ID != nil {
val := o.ID()
m.ID = omit.From(val)
}
if o.Line != nil {
val := o.Line()
m.Line = omit.From(val)
}
if o.Message != nil {
val := o.Message()
m.Message = omit.From(val)
}
return m
}
// BuildManySetter returns an []*models.FileuploadErrorCSVSetter
// this does nothing with the relationship templates
func (o FileuploadErrorCSVTemplate) BuildManySetter(number int) []*models.FileuploadErrorCSVSetter {
m := make([]*models.FileuploadErrorCSVSetter, number)
for i := range m {
m[i] = o.BuildSetter()
}
return m
}
// Build returns an *models.FileuploadErrorCSV
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use FileuploadErrorCSVTemplate.Create
func (o FileuploadErrorCSVTemplate) Build() *models.FileuploadErrorCSV {
m := &models.FileuploadErrorCSV{}
if o.Col != nil {
m.Col = o.Col()
}
if o.CSVFileID != nil {
m.CSVFileID = o.CSVFileID()
}
if o.ID != nil {
m.ID = o.ID()
}
if o.Line != nil {
m.Line = o.Line()
}
if o.Message != nil {
m.Message = o.Message()
}
o.setModelRels(m)
return m
}
// BuildMany returns an models.FileuploadErrorCSVSlice
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use FileuploadErrorCSVTemplate.CreateMany
func (o FileuploadErrorCSVTemplate) BuildMany(number int) models.FileuploadErrorCSVSlice {
m := make(models.FileuploadErrorCSVSlice, number)
for i := range m {
m[i] = o.Build()
}
return m
}
func ensureCreatableFileuploadErrorCSV(m *models.FileuploadErrorCSVSetter) {
if !(m.Col.IsValue()) {
val := random_int32(nil)
m.Col = omit.From(val)
}
if !(m.CSVFileID.IsValue()) {
val := random_int32(nil)
m.CSVFileID = omit.From(val)
}
if !(m.Line.IsValue()) {
val := random_int32(nil)
m.Line = omit.From(val)
}
if !(m.Message.IsValue()) {
val := random_string(nil)
m.Message = omit.From(val)
}
}
// insertOptRels creates and inserts any optional the relationships on *models.FileuploadErrorCSV
// according to the relationships in the template.
// any required relationship should have already exist on the model
func (o *FileuploadErrorCSVTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.FileuploadErrorCSV) error {
var err error
return err
}
// Create builds a fileuploadErrorCSV and inserts it into the database
// Relations objects are also inserted and placed in the .R field
func (o *FileuploadErrorCSVTemplate) Create(ctx context.Context, exec bob.Executor) (*models.FileuploadErrorCSV, error) {
var err error
opt := o.BuildSetter()
ensureCreatableFileuploadErrorCSV(opt)
if o.r.CSVFileCSV == nil {
FileuploadErrorCSVMods.WithNewCSVFileCSV().Apply(ctx, o)
}
var rel0 *models.FileuploadCSV
if o.r.CSVFileCSV.o.alreadyPersisted {
rel0 = o.r.CSVFileCSV.o.Build()
} else {
rel0, err = o.r.CSVFileCSV.o.Create(ctx, exec)
if err != nil {
return nil, err
}
}
opt.CSVFileID = omit.From(rel0.FileID)
m, err := models.FileuploadErrorCSVS.Insert(opt).One(ctx, exec)
if err != nil {
return nil, err
}
m.R.CSVFileCSV = rel0
if err := o.insertOptRels(ctx, exec, m); err != nil {
return nil, err
}
return m, err
}
// MustCreate builds a fileuploadErrorCSV and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o *FileuploadErrorCSVTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.FileuploadErrorCSV {
m, err := o.Create(ctx, exec)
if err != nil {
panic(err)
}
return m
}
// CreateOrFail builds a fileuploadErrorCSV and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o *FileuploadErrorCSVTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.FileuploadErrorCSV {
tb.Helper()
m, err := o.Create(ctx, exec)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// CreateMany builds multiple fileuploadErrorCSVS and inserts them into the database
// Relations objects are also inserted and placed in the .R field
func (o FileuploadErrorCSVTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.FileuploadErrorCSVSlice, error) {
var err error
m := make(models.FileuploadErrorCSVSlice, number)
for i := range m {
m[i], err = o.Create(ctx, exec)
if err != nil {
return nil, err
}
}
return m, nil
}
// MustCreateMany builds multiple fileuploadErrorCSVS and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o FileuploadErrorCSVTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.FileuploadErrorCSVSlice {
m, err := o.CreateMany(ctx, exec, number)
if err != nil {
panic(err)
}
return m
}
// CreateManyOrFail builds multiple fileuploadErrorCSVS and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o FileuploadErrorCSVTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.FileuploadErrorCSVSlice {
tb.Helper()
m, err := o.CreateMany(ctx, exec, number)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// FileuploadErrorCSV has methods that act as mods for the FileuploadErrorCSVTemplate
var FileuploadErrorCSVMods fileuploadErrorCSVMods
type fileuploadErrorCSVMods struct{}
func (m fileuploadErrorCSVMods) RandomizeAllColumns(f *faker.Faker) FileuploadErrorCSVMod {
return FileuploadErrorCSVModSlice{
FileuploadErrorCSVMods.RandomCol(f),
FileuploadErrorCSVMods.RandomCSVFileID(f),
FileuploadErrorCSVMods.RandomID(f),
FileuploadErrorCSVMods.RandomLine(f),
FileuploadErrorCSVMods.RandomMessage(f),
}
}
// Set the model columns to this value
func (m fileuploadErrorCSVMods) Col(val int32) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.Col = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorCSVMods) ColFunc(f func() int32) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.Col = f
})
}
// Clear any values for the column
func (m fileuploadErrorCSVMods) UnsetCol() FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.Col = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorCSVMods) RandomCol(f *faker.Faker) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.Col = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadErrorCSVMods) CSVFileID(val int32) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.CSVFileID = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorCSVMods) CSVFileIDFunc(f func() int32) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.CSVFileID = f
})
}
// Clear any values for the column
func (m fileuploadErrorCSVMods) UnsetCSVFileID() FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.CSVFileID = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorCSVMods) RandomCSVFileID(f *faker.Faker) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.CSVFileID = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadErrorCSVMods) ID(val int32) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.ID = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorCSVMods) IDFunc(f func() int32) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.ID = f
})
}
// Clear any values for the column
func (m fileuploadErrorCSVMods) UnsetID() FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.ID = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorCSVMods) RandomID(f *faker.Faker) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.ID = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadErrorCSVMods) Line(val int32) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.Line = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorCSVMods) LineFunc(f func() int32) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.Line = f
})
}
// Clear any values for the column
func (m fileuploadErrorCSVMods) UnsetLine() FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.Line = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorCSVMods) RandomLine(f *faker.Faker) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.Line = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadErrorCSVMods) Message(val string) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.Message = func() string { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorCSVMods) MessageFunc(f func() string) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.Message = f
})
}
// Clear any values for the column
func (m fileuploadErrorCSVMods) UnsetMessage() FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.Message = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorCSVMods) RandomMessage(f *faker.Faker) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(_ context.Context, o *FileuploadErrorCSVTemplate) {
o.Message = func() string {
return random_string(f)
}
})
}
func (m fileuploadErrorCSVMods) WithParentsCascading() FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(ctx context.Context, o *FileuploadErrorCSVTemplate) {
if isDone, _ := fileuploadErrorCSVWithParentsCascadingCtx.Value(ctx); isDone {
return
}
ctx = fileuploadErrorCSVWithParentsCascadingCtx.WithValue(ctx, true)
{
related := o.f.NewFileuploadCSVWithContext(ctx, FileuploadCSVMods.WithParentsCascading())
m.WithCSVFileCSV(related).Apply(ctx, o)
}
})
}
func (m fileuploadErrorCSVMods) WithCSVFileCSV(rel *FileuploadCSVTemplate) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(ctx context.Context, o *FileuploadErrorCSVTemplate) {
o.r.CSVFileCSV = &fileuploadErrorCSVRCSVFileCSVR{
o: rel,
}
})
}
func (m fileuploadErrorCSVMods) WithNewCSVFileCSV(mods ...FileuploadCSVMod) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(ctx context.Context, o *FileuploadErrorCSVTemplate) {
related := o.f.NewFileuploadCSVWithContext(ctx, mods...)
m.WithCSVFileCSV(related).Apply(ctx, o)
})
}
func (m fileuploadErrorCSVMods) WithExistingCSVFileCSV(em *models.FileuploadCSV) FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(ctx context.Context, o *FileuploadErrorCSVTemplate) {
o.r.CSVFileCSV = &fileuploadErrorCSVRCSVFileCSVR{
o: o.f.FromExistingFileuploadCSV(em),
}
})
}
func (m fileuploadErrorCSVMods) WithoutCSVFileCSV() FileuploadErrorCSVMod {
return FileuploadErrorCSVModFunc(func(ctx context.Context, o *FileuploadErrorCSVTemplate) {
o.r.CSVFileCSV = nil
})
}

View file

@ -0,0 +1,409 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package factory
import (
"context"
"testing"
"github.com/Gleipnir-Technology/bob"
models "github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/aarondl/opt/omit"
"github.com/jaswdr/faker/v2"
)
type FileuploadErrorFileMod interface {
Apply(context.Context, *FileuploadErrorFileTemplate)
}
type FileuploadErrorFileModFunc func(context.Context, *FileuploadErrorFileTemplate)
func (f FileuploadErrorFileModFunc) Apply(ctx context.Context, n *FileuploadErrorFileTemplate) {
f(ctx, n)
}
type FileuploadErrorFileModSlice []FileuploadErrorFileMod
func (mods FileuploadErrorFileModSlice) Apply(ctx context.Context, n *FileuploadErrorFileTemplate) {
for _, f := range mods {
f.Apply(ctx, n)
}
}
// FileuploadErrorFileTemplate is an object representing the database table.
// all columns are optional and should be set by mods
type FileuploadErrorFileTemplate struct {
FileID func() int32
ID func() int32
Message func() string
r fileuploadErrorFileR
f *Factory
alreadyPersisted bool
}
type fileuploadErrorFileR struct {
File *fileuploadErrorFileRFileR
}
type fileuploadErrorFileRFileR struct {
o *FileuploadFileTemplate
}
// Apply mods to the FileuploadErrorFileTemplate
func (o *FileuploadErrorFileTemplate) Apply(ctx context.Context, mods ...FileuploadErrorFileMod) {
for _, mod := range mods {
mod.Apply(ctx, o)
}
}
// setModelRels creates and sets the relationships on *models.FileuploadErrorFile
// according to the relationships in the template. Nothing is inserted into the db
func (t FileuploadErrorFileTemplate) setModelRels(o *models.FileuploadErrorFile) {
if t.r.File != nil {
rel := t.r.File.o.Build()
rel.R.ErrorFiles = append(rel.R.ErrorFiles, o)
o.FileID = rel.ID // h2
o.R.File = rel
}
}
// BuildSetter returns an *models.FileuploadErrorFileSetter
// this does nothing with the relationship templates
func (o FileuploadErrorFileTemplate) BuildSetter() *models.FileuploadErrorFileSetter {
m := &models.FileuploadErrorFileSetter{}
if o.FileID != nil {
val := o.FileID()
m.FileID = omit.From(val)
}
if o.ID != nil {
val := o.ID()
m.ID = omit.From(val)
}
if o.Message != nil {
val := o.Message()
m.Message = omit.From(val)
}
return m
}
// BuildManySetter returns an []*models.FileuploadErrorFileSetter
// this does nothing with the relationship templates
func (o FileuploadErrorFileTemplate) BuildManySetter(number int) []*models.FileuploadErrorFileSetter {
m := make([]*models.FileuploadErrorFileSetter, number)
for i := range m {
m[i] = o.BuildSetter()
}
return m
}
// Build returns an *models.FileuploadErrorFile
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use FileuploadErrorFileTemplate.Create
func (o FileuploadErrorFileTemplate) Build() *models.FileuploadErrorFile {
m := &models.FileuploadErrorFile{}
if o.FileID != nil {
m.FileID = o.FileID()
}
if o.ID != nil {
m.ID = o.ID()
}
if o.Message != nil {
m.Message = o.Message()
}
o.setModelRels(m)
return m
}
// BuildMany returns an models.FileuploadErrorFileSlice
// Related objects are also created and placed in the .R field
// NOTE: Objects are not inserted into the database. Use FileuploadErrorFileTemplate.CreateMany
func (o FileuploadErrorFileTemplate) BuildMany(number int) models.FileuploadErrorFileSlice {
m := make(models.FileuploadErrorFileSlice, number)
for i := range m {
m[i] = o.Build()
}
return m
}
func ensureCreatableFileuploadErrorFile(m *models.FileuploadErrorFileSetter) {
if !(m.FileID.IsValue()) {
val := random_int32(nil)
m.FileID = omit.From(val)
}
if !(m.Message.IsValue()) {
val := random_string(nil)
m.Message = omit.From(val)
}
}
// insertOptRels creates and inserts any optional the relationships on *models.FileuploadErrorFile
// according to the relationships in the template.
// any required relationship should have already exist on the model
func (o *FileuploadErrorFileTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.FileuploadErrorFile) error {
var err error
return err
}
// Create builds a fileuploadErrorFile and inserts it into the database
// Relations objects are also inserted and placed in the .R field
func (o *FileuploadErrorFileTemplate) Create(ctx context.Context, exec bob.Executor) (*models.FileuploadErrorFile, error) {
var err error
opt := o.BuildSetter()
ensureCreatableFileuploadErrorFile(opt)
if o.r.File == nil {
FileuploadErrorFileMods.WithNewFile().Apply(ctx, o)
}
var rel0 *models.FileuploadFile
if o.r.File.o.alreadyPersisted {
rel0 = o.r.File.o.Build()
} else {
rel0, err = o.r.File.o.Create(ctx, exec)
if err != nil {
return nil, err
}
}
opt.FileID = omit.From(rel0.ID)
m, err := models.FileuploadErrorFiles.Insert(opt).One(ctx, exec)
if err != nil {
return nil, err
}
m.R.File = rel0
if err := o.insertOptRels(ctx, exec, m); err != nil {
return nil, err
}
return m, err
}
// MustCreate builds a fileuploadErrorFile and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o *FileuploadErrorFileTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.FileuploadErrorFile {
m, err := o.Create(ctx, exec)
if err != nil {
panic(err)
}
return m
}
// CreateOrFail builds a fileuploadErrorFile and inserts it into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o *FileuploadErrorFileTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.FileuploadErrorFile {
tb.Helper()
m, err := o.Create(ctx, exec)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// CreateMany builds multiple fileuploadErrorFiles and inserts them into the database
// Relations objects are also inserted and placed in the .R field
func (o FileuploadErrorFileTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.FileuploadErrorFileSlice, error) {
var err error
m := make(models.FileuploadErrorFileSlice, number)
for i := range m {
m[i], err = o.Create(ctx, exec)
if err != nil {
return nil, err
}
}
return m, nil
}
// MustCreateMany builds multiple fileuploadErrorFiles and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// panics if an error occurs
func (o FileuploadErrorFileTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.FileuploadErrorFileSlice {
m, err := o.CreateMany(ctx, exec, number)
if err != nil {
panic(err)
}
return m
}
// CreateManyOrFail builds multiple fileuploadErrorFiles and inserts them into the database
// Relations objects are also inserted and placed in the .R field
// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs
func (o FileuploadErrorFileTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.FileuploadErrorFileSlice {
tb.Helper()
m, err := o.CreateMany(ctx, exec, number)
if err != nil {
tb.Fatal(err)
return nil
}
return m
}
// FileuploadErrorFile has methods that act as mods for the FileuploadErrorFileTemplate
var FileuploadErrorFileMods fileuploadErrorFileMods
type fileuploadErrorFileMods struct{}
func (m fileuploadErrorFileMods) RandomizeAllColumns(f *faker.Faker) FileuploadErrorFileMod {
return FileuploadErrorFileModSlice{
FileuploadErrorFileMods.RandomFileID(f),
FileuploadErrorFileMods.RandomID(f),
FileuploadErrorFileMods.RandomMessage(f),
}
}
// Set the model columns to this value
func (m fileuploadErrorFileMods) FileID(val int32) FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(_ context.Context, o *FileuploadErrorFileTemplate) {
o.FileID = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorFileMods) FileIDFunc(f func() int32) FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(_ context.Context, o *FileuploadErrorFileTemplate) {
o.FileID = f
})
}
// Clear any values for the column
func (m fileuploadErrorFileMods) UnsetFileID() FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(_ context.Context, o *FileuploadErrorFileTemplate) {
o.FileID = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorFileMods) RandomFileID(f *faker.Faker) FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(_ context.Context, o *FileuploadErrorFileTemplate) {
o.FileID = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadErrorFileMods) ID(val int32) FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(_ context.Context, o *FileuploadErrorFileTemplate) {
o.ID = func() int32 { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorFileMods) IDFunc(f func() int32) FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(_ context.Context, o *FileuploadErrorFileTemplate) {
o.ID = f
})
}
// Clear any values for the column
func (m fileuploadErrorFileMods) UnsetID() FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(_ context.Context, o *FileuploadErrorFileTemplate) {
o.ID = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorFileMods) RandomID(f *faker.Faker) FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(_ context.Context, o *FileuploadErrorFileTemplate) {
o.ID = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m fileuploadErrorFileMods) Message(val string) FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(_ context.Context, o *FileuploadErrorFileTemplate) {
o.Message = func() string { return val }
})
}
// Set the Column from the function
func (m fileuploadErrorFileMods) MessageFunc(f func() string) FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(_ context.Context, o *FileuploadErrorFileTemplate) {
o.Message = f
})
}
// Clear any values for the column
func (m fileuploadErrorFileMods) UnsetMessage() FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(_ context.Context, o *FileuploadErrorFileTemplate) {
o.Message = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadErrorFileMods) RandomMessage(f *faker.Faker) FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(_ context.Context, o *FileuploadErrorFileTemplate) {
o.Message = func() string {
return random_string(f)
}
})
}
func (m fileuploadErrorFileMods) WithParentsCascading() FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(ctx context.Context, o *FileuploadErrorFileTemplate) {
if isDone, _ := fileuploadErrorFileWithParentsCascadingCtx.Value(ctx); isDone {
return
}
ctx = fileuploadErrorFileWithParentsCascadingCtx.WithValue(ctx, true)
{
related := o.f.NewFileuploadFileWithContext(ctx, FileuploadFileMods.WithParentsCascading())
m.WithFile(related).Apply(ctx, o)
}
})
}
func (m fileuploadErrorFileMods) WithFile(rel *FileuploadFileTemplate) FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(ctx context.Context, o *FileuploadErrorFileTemplate) {
o.r.File = &fileuploadErrorFileRFileR{
o: rel,
}
})
}
func (m fileuploadErrorFileMods) WithNewFile(mods ...FileuploadFileMod) FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(ctx context.Context, o *FileuploadErrorFileTemplate) {
related := o.f.NewFileuploadFileWithContext(ctx, mods...)
m.WithFile(related).Apply(ctx, o)
})
}
func (m fileuploadErrorFileMods) WithExistingFile(em *models.FileuploadFile) FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(ctx context.Context, o *FileuploadErrorFileTemplate) {
o.r.File = &fileuploadErrorFileRFileR{
o: o.f.FromExistingFileuploadFile(em),
}
})
}
func (m fileuploadErrorFileMods) WithoutFile() FileuploadErrorFileMod {
return FileuploadErrorFileModFunc(func(ctx context.Context, o *FileuploadErrorFileTemplate) {
o.r.File = nil
})
}

View file

@ -58,7 +58,7 @@ type FileuploadFileTemplate struct {
type fileuploadFileR struct {
CSV *fileuploadFileRCSVR
Errors []*fileuploadFileRErrorsR
ErrorFiles []*fileuploadFileRErrorFilesR
CreatorUser *fileuploadFileRCreatorUserR
Organization *fileuploadFileROrganizationR
}
@ -66,9 +66,9 @@ type fileuploadFileR struct {
type fileuploadFileRCSVR struct {
o *FileuploadCSVTemplate
}
type fileuploadFileRErrorsR struct {
type fileuploadFileRErrorFilesR struct {
number int
o *FileuploadErrorTemplate
o *FileuploadErrorFileTemplate
}
type fileuploadFileRCreatorUserR struct {
o *UserTemplate
@ -94,9 +94,9 @@ func (t FileuploadFileTemplate) setModelRels(o *models.FileuploadFile) {
o.R.CSV = rel
}
if t.r.Errors != nil {
rel := models.FileuploadErrorSlice{}
for _, r := range t.r.Errors {
if t.r.ErrorFiles != nil {
rel := models.FileuploadErrorFileSlice{}
for _, r := range t.r.ErrorFiles {
related := r.o.BuildMany(r.number)
for _, rel := range related {
rel.FileID = o.ID // h2
@ -104,7 +104,7 @@ func (t FileuploadFileTemplate) setModelRels(o *models.FileuploadFile) {
}
rel = append(rel, related...)
}
o.R.Errors = rel
o.R.ErrorFiles = rel
}
if t.r.CreatorUser != nil {
@ -298,19 +298,19 @@ func (o *FileuploadFileTemplate) insertOptRels(ctx context.Context, exec bob.Exe
}
isErrorsDone, _ := fileuploadFileRelErrorsCtx.Value(ctx)
if !isErrorsDone && o.r.Errors != nil {
ctx = fileuploadFileRelErrorsCtx.WithValue(ctx, true)
for _, r := range o.r.Errors {
isErrorFilesDone, _ := fileuploadFileRelErrorFilesCtx.Value(ctx)
if !isErrorFilesDone && o.r.ErrorFiles != nil {
ctx = fileuploadFileRelErrorFilesCtx.WithValue(ctx, true)
for _, r := range o.r.ErrorFiles {
if r.o.alreadyPersisted {
m.R.Errors = append(m.R.Errors, r.o.Build())
m.R.ErrorFiles = append(m.R.ErrorFiles, r.o.Build())
} else {
rel1, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil {
return err
}
err = m.AttachErrors(ctx, exec, rel1...)
err = m.AttachErrorFiles(ctx, exec, rel1...)
if err != nil {
return err
}
@ -906,50 +906,50 @@ func (m fileuploadFileMods) WithoutOrganization() FileuploadFileMod {
})
}
func (m fileuploadFileMods) WithErrors(number int, related *FileuploadErrorTemplate) FileuploadFileMod {
func (m fileuploadFileMods) WithErrorFiles(number int, related *FileuploadErrorFileTemplate) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
o.r.Errors = []*fileuploadFileRErrorsR{{
o.r.ErrorFiles = []*fileuploadFileRErrorFilesR{{
number: number,
o: related,
}}
})
}
func (m fileuploadFileMods) WithNewErrors(number int, mods ...FileuploadErrorMod) FileuploadFileMod {
func (m fileuploadFileMods) WithNewErrorFiles(number int, mods ...FileuploadErrorFileMod) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
related := o.f.NewFileuploadErrorWithContext(ctx, mods...)
m.WithErrors(number, related).Apply(ctx, o)
related := o.f.NewFileuploadErrorFileWithContext(ctx, mods...)
m.WithErrorFiles(number, related).Apply(ctx, o)
})
}
func (m fileuploadFileMods) AddErrors(number int, related *FileuploadErrorTemplate) FileuploadFileMod {
func (m fileuploadFileMods) AddErrorFiles(number int, related *FileuploadErrorFileTemplate) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
o.r.Errors = append(o.r.Errors, &fileuploadFileRErrorsR{
o.r.ErrorFiles = append(o.r.ErrorFiles, &fileuploadFileRErrorFilesR{
number: number,
o: related,
})
})
}
func (m fileuploadFileMods) AddNewErrors(number int, mods ...FileuploadErrorMod) FileuploadFileMod {
func (m fileuploadFileMods) AddNewErrorFiles(number int, mods ...FileuploadErrorFileMod) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
related := o.f.NewFileuploadErrorWithContext(ctx, mods...)
m.AddErrors(number, related).Apply(ctx, o)
related := o.f.NewFileuploadErrorFileWithContext(ctx, mods...)
m.AddErrorFiles(number, related).Apply(ctx, o)
})
}
func (m fileuploadFileMods) AddExistingErrors(existingModels ...*models.FileuploadError) FileuploadFileMod {
func (m fileuploadFileMods) AddExistingErrorFiles(existingModels ...*models.FileuploadErrorFile) FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
for _, em := range existingModels {
o.r.Errors = append(o.r.Errors, &fileuploadFileRErrorsR{
o: o.f.FromExistingFileuploadError(em),
o.r.ErrorFiles = append(o.r.ErrorFiles, &fileuploadFileRErrorFilesR{
o: o.f.FromExistingFileuploadErrorFile(em),
})
}
})
}
func (m fileuploadFileMods) WithoutErrors() FileuploadFileMod {
func (m fileuploadFileMods) WithoutErrorFiles() FileuploadFileMod {
return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) {
o.r.Errors = nil
o.r.ErrorFiles = nil
})
}

View file

@ -66,7 +66,7 @@ type organizationR struct {
Mosquitoinspections []*organizationRMosquitoinspectionsR
Pointlocations []*organizationRPointlocationsR
Polygonlocations []*organizationRPolygonlocationsR
Pools []*organizationRPoolsR
FieldseekerPool []*organizationRFieldseekerPoolR
Pooldetails []*organizationRPooldetailsR
Proposedtreatmentareas []*organizationRProposedtreatmentareasR
Qamosquitoinspections []*organizationRQamosquitoinspectionsR
@ -89,6 +89,7 @@ type organizationR struct {
NoteAudios []*organizationRNoteAudiosR
NoteImages []*organizationRNoteImagesR
ImportDistrictGidDistrict *organizationRImportDistrictGidDistrictR
Pools []*organizationRPoolsR
Nuisances []*organizationRNuisancesR
PublicreportPool []*organizationRPublicreportPoolR
Quicks []*organizationRQuicksR
@ -143,7 +144,7 @@ type organizationRPolygonlocationsR struct {
number int
o *FieldseekerPolygonlocationTemplate
}
type organizationRPoolsR struct {
type organizationRFieldseekerPoolR struct {
number int
o *FieldseekerPoolTemplate
}
@ -234,6 +235,10 @@ type organizationRNoteImagesR struct {
type organizationRImportDistrictGidDistrictR struct {
o *ImportDistrictTemplate
}
type organizationRPoolsR struct {
number int
o *PoolTemplate
}
type organizationRNuisancesR struct {
number int
o *PublicreportNuisanceTemplate
@ -415,9 +420,9 @@ func (t OrganizationTemplate) setModelRels(o *models.Organization) {
o.R.Polygonlocations = rel
}
if t.r.Pools != nil {
if t.r.FieldseekerPool != nil {
rel := models.FieldseekerPoolSlice{}
for _, r := range t.r.Pools {
for _, r := range t.r.FieldseekerPool {
related := r.o.BuildMany(r.number)
for _, rel := range related {
rel.OrganizationID = o.ID // h2
@ -425,7 +430,7 @@ func (t OrganizationTemplate) setModelRels(o *models.Organization) {
}
rel = append(rel, related...)
}
o.R.Pools = rel
o.R.FieldseekerPool = rel
}
if t.r.Pooldetails != nil {
@ -708,6 +713,19 @@ func (t OrganizationTemplate) setModelRels(o *models.Organization) {
o.R.ImportDistrictGidDistrict = rel
}
if t.r.Pools != nil {
rel := models.PoolSlice{}
for _, r := range t.r.Pools {
related := r.o.BuildMany(r.number)
for _, rel := range related {
rel.OrganizationID = o.ID // h2
rel.R.Organization = o
}
rel = append(rel, related...)
}
o.R.Pools = rel
}
if t.r.Nuisances != nil {
rel := models.PublicreportNuisanceSlice{}
for _, r := range t.r.Nuisances {
@ -1123,19 +1141,19 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu
}
}
isPoolsDone, _ := organizationRelPoolsCtx.Value(ctx)
if !isPoolsDone && o.r.Pools != nil {
ctx = organizationRelPoolsCtx.WithValue(ctx, true)
for _, r := range o.r.Pools {
isFieldseekerPoolDone, _ := organizationRelFieldseekerPoolCtx.Value(ctx)
if !isFieldseekerPoolDone && o.r.FieldseekerPool != nil {
ctx = organizationRelFieldseekerPoolCtx.WithValue(ctx, true)
for _, r := range o.r.FieldseekerPool {
if r.o.alreadyPersisted {
m.R.Pools = append(m.R.Pools, r.o.Build())
m.R.FieldseekerPool = append(m.R.FieldseekerPool, r.o.Build())
} else {
rel12, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil {
return err
}
err = m.AttachPools(ctx, exec, rel12...)
err = m.AttachFieldseekerPool(ctx, exec, rel12...)
if err != nil {
return err
}
@ -1582,6 +1600,26 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu
}
isPoolsDone, _ := organizationRelPoolsCtx.Value(ctx)
if !isPoolsDone && o.r.Pools != nil {
ctx = organizationRelPoolsCtx.WithValue(ctx, true)
for _, r := range o.r.Pools {
if r.o.alreadyPersisted {
m.R.Pools = append(m.R.Pools, r.o.Build())
} else {
rel35, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil {
return err
}
err = m.AttachPools(ctx, exec, rel35...)
if err != nil {
return err
}
}
}
}
isNuisancesDone, _ := organizationRelNuisancesCtx.Value(ctx)
if !isNuisancesDone && o.r.Nuisances != nil {
ctx = organizationRelNuisancesCtx.WithValue(ctx, true)
@ -1589,12 +1627,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu
if r.o.alreadyPersisted {
m.R.Nuisances = append(m.R.Nuisances, r.o.Build())
} else {
rel35, err := r.o.CreateMany(ctx, exec, r.number)
rel36, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil {
return err
}
err = m.AttachNuisances(ctx, exec, rel35...)
err = m.AttachNuisances(ctx, exec, rel36...)
if err != nil {
return err
}
@ -1609,12 +1647,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu
if r.o.alreadyPersisted {
m.R.PublicreportPool = append(m.R.PublicreportPool, r.o.Build())
} else {
rel36, err := r.o.CreateMany(ctx, exec, r.number)
rel37, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil {
return err
}
err = m.AttachPublicreportPool(ctx, exec, rel36...)
err = m.AttachPublicreportPool(ctx, exec, rel37...)
if err != nil {
return err
}
@ -1629,12 +1667,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu
if r.o.alreadyPersisted {
m.R.Quicks = append(m.R.Quicks, r.o.Build())
} else {
rel37, err := r.o.CreateMany(ctx, exec, r.number)
rel38, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil {
return err
}
err = m.AttachQuicks(ctx, exec, rel37...)
err = m.AttachQuicks(ctx, exec, rel38...)
if err != nil {
return err
}
@ -1649,12 +1687,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu
if r.o.alreadyPersisted {
m.R.User = append(m.R.User, r.o.Build())
} else {
rel38, err := r.o.CreateMany(ctx, exec, r.number)
rel39, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil {
return err
}
err = m.AttachUser(ctx, exec, rel38...)
err = m.AttachUser(ctx, exec, rel39...)
if err != nil {
return err
}
@ -2819,51 +2857,51 @@ func (m organizationMods) WithoutPolygonlocations() OrganizationMod {
})
}
func (m organizationMods) WithPools(number int, related *FieldseekerPoolTemplate) OrganizationMod {
func (m organizationMods) WithFieldseekerPool(number int, related *FieldseekerPoolTemplate) OrganizationMod {
return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) {
o.r.Pools = []*organizationRPoolsR{{
o.r.FieldseekerPool = []*organizationRFieldseekerPoolR{{
number: number,
o: related,
}}
})
}
func (m organizationMods) WithNewPools(number int, mods ...FieldseekerPoolMod) OrganizationMod {
func (m organizationMods) WithNewFieldseekerPool(number int, mods ...FieldseekerPoolMod) OrganizationMod {
return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) {
related := o.f.NewFieldseekerPoolWithContext(ctx, mods...)
m.WithPools(number, related).Apply(ctx, o)
m.WithFieldseekerPool(number, related).Apply(ctx, o)
})
}
func (m organizationMods) AddPools(number int, related *FieldseekerPoolTemplate) OrganizationMod {
func (m organizationMods) AddFieldseekerPool(number int, related *FieldseekerPoolTemplate) OrganizationMod {
return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) {
o.r.Pools = append(o.r.Pools, &organizationRPoolsR{
o.r.FieldseekerPool = append(o.r.FieldseekerPool, &organizationRFieldseekerPoolR{
number: number,
o: related,
})
})
}
func (m organizationMods) AddNewPools(number int, mods ...FieldseekerPoolMod) OrganizationMod {
func (m organizationMods) AddNewFieldseekerPool(number int, mods ...FieldseekerPoolMod) OrganizationMod {
return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) {
related := o.f.NewFieldseekerPoolWithContext(ctx, mods...)
m.AddPools(number, related).Apply(ctx, o)
m.AddFieldseekerPool(number, related).Apply(ctx, o)
})
}
func (m organizationMods) AddExistingPools(existingModels ...*models.FieldseekerPool) OrganizationMod {
func (m organizationMods) AddExistingFieldseekerPool(existingModels ...*models.FieldseekerPool) OrganizationMod {
return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) {
for _, em := range existingModels {
o.r.Pools = append(o.r.Pools, &organizationRPoolsR{
o.r.FieldseekerPool = append(o.r.FieldseekerPool, &organizationRFieldseekerPoolR{
o: o.f.FromExistingFieldseekerPool(em),
})
}
})
}
func (m organizationMods) WithoutPools() OrganizationMod {
func (m organizationMods) WithoutFieldseekerPool() OrganizationMod {
return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) {
o.r.Pools = nil
o.r.FieldseekerPool = nil
})
}
@ -3875,6 +3913,54 @@ func (m organizationMods) WithoutNoteImages() OrganizationMod {
})
}
func (m organizationMods) WithPools(number int, related *PoolTemplate) OrganizationMod {
return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) {
o.r.Pools = []*organizationRPoolsR{{
number: number,
o: related,
}}
})
}
func (m organizationMods) WithNewPools(number int, mods ...PoolMod) OrganizationMod {
return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) {
related := o.f.NewPoolWithContext(ctx, mods...)
m.WithPools(number, related).Apply(ctx, o)
})
}
func (m organizationMods) AddPools(number int, related *PoolTemplate) OrganizationMod {
return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) {
o.r.Pools = append(o.r.Pools, &organizationRPoolsR{
number: number,
o: related,
})
})
}
func (m organizationMods) AddNewPools(number int, mods ...PoolMod) OrganizationMod {
return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) {
related := o.f.NewPoolWithContext(ctx, mods...)
m.AddPools(number, related).Apply(ctx, o)
})
}
func (m organizationMods) AddExistingPools(existingModels ...*models.Pool) OrganizationMod {
return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) {
for _, em := range existingModels {
o.r.Pools = append(o.r.Pools, &organizationRPoolsR{
o: o.f.FromExistingPool(em),
})
}
})
}
func (m organizationMods) WithoutPools() OrganizationMod {
return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) {
o.r.Pools = nil
})
}
func (m organizationMods) WithNuisances(number int, related *PublicreportNuisanceTemplate) OrganizationMod {
return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) {
o.r.Nuisances = []*organizationRNuisancesR{{

1121
db/factory/pool.bob.go Normal file

File diff suppressed because it is too large Load diff

View file

@ -66,6 +66,7 @@ type userR struct {
DeletorNoteImages []*userRDeletorNoteImagesR
UserNotifications []*userRUserNotificationsR
UserOauthTokens []*userRUserOauthTokensR
CreatorPools []*userRCreatorPoolsR
Organization *userROrganizationR
}
@ -101,6 +102,10 @@ type userRUserOauthTokensR struct {
number int
o *OauthTokenTemplate
}
type userRCreatorPoolsR struct {
number int
o *PoolTemplate
}
type userROrganizationR struct {
o *OrganizationTemplate
}
@ -219,6 +224,19 @@ func (t UserTemplate) setModelRels(o *models.User) {
o.R.UserOauthTokens = rel
}
if t.r.CreatorPools != nil {
rel := models.PoolSlice{}
for _, r := range t.r.CreatorPools {
related := r.o.BuildMany(r.number)
for _, rel := range related {
rel.CreatorID = o.ID // h2
rel.R.CreatorUser = o
}
rel = append(rel, related...)
}
o.R.CreatorPools = rel
}
if t.r.Organization != nil {
rel := t.r.Organization.o.Build()
rel.R.User = append(rel.R.User, o)
@ -546,6 +564,26 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *
}
}
isCreatorPoolsDone, _ := userRelCreatorPoolsCtx.Value(ctx)
if !isCreatorPoolsDone && o.r.CreatorPools != nil {
ctx = userRelCreatorPoolsCtx.WithValue(ctx, true)
for _, r := range o.r.CreatorPools {
if r.o.alreadyPersisted {
m.R.CreatorPools = append(m.R.CreatorPools, r.o.Build())
} else {
rel8, err := r.o.CreateMany(ctx, exec, r.number)
if err != nil {
return err
}
err = m.AttachCreatorPools(ctx, exec, rel8...)
if err != nil {
return err
}
}
}
}
return err
}
@ -560,25 +598,25 @@ func (o *UserTemplate) Create(ctx context.Context, exec bob.Executor) (*models.U
UserMods.WithNewOrganization().Apply(ctx, o)
}
var rel8 *models.Organization
var rel9 *models.Organization
if o.r.Organization.o.alreadyPersisted {
rel8 = o.r.Organization.o.Build()
rel9 = o.r.Organization.o.Build()
} else {
rel8, err = o.r.Organization.o.Create(ctx, exec)
rel9, err = o.r.Organization.o.Create(ctx, exec)
if err != nil {
return nil, err
}
}
opt.OrganizationID = omit.From(rel8.ID)
opt.OrganizationID = omit.From(rel9.ID)
m, err := models.Users.Insert(opt).One(ctx, exec)
if err != nil {
return nil, err
}
m.R.Organization = rel8
m.R.Organization = rel9
if err := o.insertOptRels(ctx, exec, m); err != nil {
return nil, err
@ -1603,3 +1641,51 @@ func (m userMods) WithoutUserOauthTokens() UserMod {
o.r.UserOauthTokens = nil
})
}
func (m userMods) WithCreatorPools(number int, related *PoolTemplate) UserMod {
return UserModFunc(func(ctx context.Context, o *UserTemplate) {
o.r.CreatorPools = []*userRCreatorPoolsR{{
number: number,
o: related,
}}
})
}
func (m userMods) WithNewCreatorPools(number int, mods ...PoolMod) UserMod {
return UserModFunc(func(ctx context.Context, o *UserTemplate) {
related := o.f.NewPoolWithContext(ctx, mods...)
m.WithCreatorPools(number, related).Apply(ctx, o)
})
}
func (m userMods) AddCreatorPools(number int, related *PoolTemplate) UserMod {
return UserModFunc(func(ctx context.Context, o *UserTemplate) {
o.r.CreatorPools = append(o.r.CreatorPools, &userRCreatorPoolsR{
number: number,
o: related,
})
})
}
func (m userMods) AddNewCreatorPools(number int, mods ...PoolMod) UserMod {
return UserModFunc(func(ctx context.Context, o *UserTemplate) {
related := o.f.NewPoolWithContext(ctx, mods...)
m.AddCreatorPools(number, related).Apply(ctx, o)
})
}
func (m userMods) AddExistingCreatorPools(existingModels ...*models.Pool) UserMod {
return UserModFunc(func(ctx context.Context, o *UserTemplate) {
for _, em := range existingModels {
o.r.CreatorPools = append(o.r.CreatorPools, &userRCreatorPoolsR{
o: o.f.FromExistingPool(em),
})
}
})
}
func (m userMods) WithoutCreatorPools() UserMod {
return UserModFunc(func(ctx context.Context, o *UserTemplate) {
o.r.CreatorPools = nil
})
}

View file

@ -1,8 +1,9 @@
-- +goose Up
CREATE SCHEMA fileupload;
CREATE TYPE fileupload.FileStatusType AS ENUM (
'uploaded',
'parsed'
'error',
'parsed',
'uploaded'
);
CREATE TYPE fileupload.CSVType AS ENUM (
'PoolList'
@ -21,19 +22,56 @@ CREATE TABLE fileupload.file (
PRIMARY KEY(id)
);
CREATE TABLE fileupload.csv (
committed TIMESTAMP WITHOUT TIME ZONE,
file_id INTEGER REFERENCES fileupload.file(id) NOT NULL,
rowcount INTEGER NOT NULL,
type_ fileupload.CSVType NOT NULL,
PRIMARY KEY (file_id)
);
CREATE TABLE fileupload.error (
CREATE TABLE fileupload.error_file (
file_id INTEGER REFERENCES fileupload.file(id) NOT NULL,
id SERIAL,
message TEXT NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE fileupload.error_csv (
col INTEGER NOT NULL,
csv_file_id INTEGER REFERENCES fileupload.csv(file_id) NOT NULL,
id SERIAL,
line INTEGER NOT NULL,
message TEXT NOT NULL,
PRIMARY KEY (id)
);
CREATE TYPE PoolConditionType AS ENUM (
'green',
'murky',
'blue',
'unknown'
);
CREATE TABLE pool (
address_city TEXT NOT NULL,
address_postal_code TEXT NOT NULL,
address_street TEXT NOT NULL,
condition PoolConditionType NOT NULL,
created TIMESTAMP WITHOUT TIME ZONE NOT NULL,
creator_id INTEGER REFERENCES user_(id) NOT NULL,
deleted TIMESTAMP WITHOUT TIME ZONE,
committed BOOLEAN NOT NULL, -- Whether or not its just proposed before a CSV file is committed
id SERIAL,
notes TEXT NOT NULL,
organization_id INTEGER REFERENCES organization(id) NOT NULL,
property_owner_name TEXT NOT NULL,
property_owner_phone comms.phone,
resident_owned BOOLEAN,
resident_phone comms.phone,
version integer,
PRIMARY KEY (id, version)
);
-- +goose Down
DROP TABLE fileupload.error;
DROP TABLE pool;
DROP TYPE poolconditiontype;
DROP TABLE fileupload.error_csv;
DROP TABLE fileupload.error_file;
DROP TABLE fileupload.csv;
DROP TABLE fileupload.file;
DROP TYPE fileupload.CSVType;

View file

@ -25,6 +25,7 @@ type preloadCounts struct {
CommsEmailContact commsEmailContactCountPreloader
CommsEmailTemplate commsEmailTemplateCountPreloader
CommsPhone commsPhoneCountPreloader
FileuploadCSV fileuploadCSVCountPreloader
FileuploadFile fileuploadFileCountPreloader
NoteAudio noteAudioCountPreloader
NoteImage noteImageCountPreloader
@ -42,6 +43,7 @@ func getPreloadCount() preloadCounts {
CommsEmailContact: buildCommsEmailContactCountPreloader(),
CommsEmailTemplate: buildCommsEmailTemplateCountPreloader(),
CommsPhone: buildCommsPhoneCountPreloader(),
FileuploadCSV: buildFileuploadCSVCountPreloader(),
FileuploadFile: buildFileuploadFileCountPreloader(),
NoteAudio: buildNoteAudioCountPreloader(),
NoteImage: buildNoteImageCountPreloader(),
@ -59,6 +61,7 @@ type thenLoadCounts[Q orm.Loadable] struct {
CommsEmailContact commsEmailContactCountThenLoader[Q]
CommsEmailTemplate commsEmailTemplateCountThenLoader[Q]
CommsPhone commsPhoneCountThenLoader[Q]
FileuploadCSV fileuploadCSVCountThenLoader[Q]
FileuploadFile fileuploadFileCountThenLoader[Q]
NoteAudio noteAudioCountThenLoader[Q]
NoteImage noteImageCountThenLoader[Q]
@ -76,6 +79,7 @@ func getThenLoadCount[Q orm.Loadable]() thenLoadCounts[Q] {
CommsEmailContact: buildCommsEmailContactCountThenLoader[Q](),
CommsEmailTemplate: buildCommsEmailTemplateCountThenLoader[Q](),
CommsPhone: buildCommsPhoneCountThenLoader[Q](),
FileuploadCSV: buildFileuploadCSVCountThenLoader[Q](),
FileuploadFile: buildFileuploadFileCountThenLoader[Q](),
NoteAudio: buildNoteAudioCountThenLoader[Q](),
NoteImage: buildNoteImageCountThenLoader[Q](),

View file

@ -71,7 +71,8 @@ type joins[Q dialect.Joinable] struct {
FieldseekerZones2s joinSet[fieldseekerZones2Joins[Q]]
FieldseekerSyncs joinSet[fieldseekerSyncJoins[Q]]
FileuploadCSVS joinSet[fileuploadCSVJoins[Q]]
FileuploadErrors joinSet[fileuploadErrorJoins[Q]]
FileuploadErrorCSVS joinSet[fileuploadErrorCSVJoins[Q]]
FileuploadErrorFiles joinSet[fileuploadErrorFileJoins[Q]]
FileuploadFiles joinSet[fileuploadFileJoins[Q]]
H3Aggregations joinSet[h3AggregationJoins[Q]]
ImportDistricts joinSet[importDistrictJoins[Q]]
@ -84,6 +85,7 @@ type joins[Q dialect.Joinable] struct {
Notifications joinSet[notificationJoins[Q]]
OauthTokens joinSet[oauthTokenJoins[Q]]
Organizations joinSet[organizationJoins[Q]]
Pools joinSet[poolJoins[Q]]
PublicreportImages joinSet[publicreportImageJoins[Q]]
PublicreportImageExifs joinSet[publicreportImageExifJoins[Q]]
PublicreportNotifyEmailNuisances joinSet[publicreportNotifyEmailNuisanceJoins[Q]]
@ -148,7 +150,8 @@ func getJoins[Q dialect.Joinable]() joins[Q] {
FieldseekerZones2s: buildJoinSet[fieldseekerZones2Joins[Q]](FieldseekerZones2s.Columns, buildFieldseekerZones2Joins),
FieldseekerSyncs: buildJoinSet[fieldseekerSyncJoins[Q]](FieldseekerSyncs.Columns, buildFieldseekerSyncJoins),
FileuploadCSVS: buildJoinSet[fileuploadCSVJoins[Q]](FileuploadCSVS.Columns, buildFileuploadCSVJoins),
FileuploadErrors: buildJoinSet[fileuploadErrorJoins[Q]](FileuploadErrors.Columns, buildFileuploadErrorJoins),
FileuploadErrorCSVS: buildJoinSet[fileuploadErrorCSVJoins[Q]](FileuploadErrorCSVS.Columns, buildFileuploadErrorCSVJoins),
FileuploadErrorFiles: buildJoinSet[fileuploadErrorFileJoins[Q]](FileuploadErrorFiles.Columns, buildFileuploadErrorFileJoins),
FileuploadFiles: buildJoinSet[fileuploadFileJoins[Q]](FileuploadFiles.Columns, buildFileuploadFileJoins),
H3Aggregations: buildJoinSet[h3AggregationJoins[Q]](H3Aggregations.Columns, buildH3AggregationJoins),
ImportDistricts: buildJoinSet[importDistrictJoins[Q]](ImportDistricts.Columns, buildImportDistrictJoins),
@ -161,6 +164,7 @@ func getJoins[Q dialect.Joinable]() joins[Q] {
Notifications: buildJoinSet[notificationJoins[Q]](Notifications.Columns, buildNotificationJoins),
OauthTokens: buildJoinSet[oauthTokenJoins[Q]](OauthTokens.Columns, buildOauthTokenJoins),
Organizations: buildJoinSet[organizationJoins[Q]](Organizations.Columns, buildOrganizationJoins),
Pools: buildJoinSet[poolJoins[Q]](Pools.Columns, buildPoolJoins),
PublicreportImages: buildJoinSet[publicreportImageJoins[Q]](PublicreportImages.Columns, buildPublicreportImageJoins),
PublicreportImageExifs: buildJoinSet[publicreportImageExifJoins[Q]](PublicreportImageExifs.Columns, buildPublicreportImageExifJoins),
PublicreportNotifyEmailNuisances: buildJoinSet[publicreportNotifyEmailNuisanceJoins[Q]](PublicreportNotifyEmailNuisances.Columns, buildPublicreportNotifyEmailNuisanceJoins),

View file

@ -56,7 +56,8 @@ type preloaders struct {
FieldseekerZones2 fieldseekerZones2Preloader
FieldseekerSync fieldseekerSyncPreloader
FileuploadCSV fileuploadCSVPreloader
FileuploadError fileuploadErrorPreloader
FileuploadErrorCSV fileuploadErrorCSVPreloader
FileuploadErrorFile fileuploadErrorFilePreloader
FileuploadFile fileuploadFilePreloader
H3Aggregation h3AggregationPreloader
ImportDistrict importDistrictPreloader
@ -69,6 +70,7 @@ type preloaders struct {
Notification notificationPreloader
OauthToken oauthTokenPreloader
Organization organizationPreloader
Pool poolPreloader
PublicreportImage publicreportImagePreloader
PublicreportImageExif publicreportImageExifPreloader
PublicreportNotifyEmailNuisance publicreportNotifyEmailNuisancePreloader
@ -125,7 +127,8 @@ func getPreloaders() preloaders {
FieldseekerZones2: buildFieldseekerZones2Preloader(),
FieldseekerSync: buildFieldseekerSyncPreloader(),
FileuploadCSV: buildFileuploadCSVPreloader(),
FileuploadError: buildFileuploadErrorPreloader(),
FileuploadErrorCSV: buildFileuploadErrorCSVPreloader(),
FileuploadErrorFile: buildFileuploadErrorFilePreloader(),
FileuploadFile: buildFileuploadFilePreloader(),
H3Aggregation: buildH3AggregationPreloader(),
ImportDistrict: buildImportDistrictPreloader(),
@ -138,6 +141,7 @@ func getPreloaders() preloaders {
Notification: buildNotificationPreloader(),
OauthToken: buildOauthTokenPreloader(),
Organization: buildOrganizationPreloader(),
Pool: buildPoolPreloader(),
PublicreportImage: buildPublicreportImagePreloader(),
PublicreportImageExif: buildPublicreportImageExifPreloader(),
PublicreportNotifyEmailNuisance: buildPublicreportNotifyEmailNuisancePreloader(),
@ -200,7 +204,8 @@ type thenLoaders[Q orm.Loadable] struct {
FieldseekerZones2 fieldseekerZones2ThenLoader[Q]
FieldseekerSync fieldseekerSyncThenLoader[Q]
FileuploadCSV fileuploadCSVThenLoader[Q]
FileuploadError fileuploadErrorThenLoader[Q]
FileuploadErrorCSV fileuploadErrorCSVThenLoader[Q]
FileuploadErrorFile fileuploadErrorFileThenLoader[Q]
FileuploadFile fileuploadFileThenLoader[Q]
H3Aggregation h3AggregationThenLoader[Q]
ImportDistrict importDistrictThenLoader[Q]
@ -213,6 +218,7 @@ type thenLoaders[Q orm.Loadable] struct {
Notification notificationThenLoader[Q]
OauthToken oauthTokenThenLoader[Q]
Organization organizationThenLoader[Q]
Pool poolThenLoader[Q]
PublicreportImage publicreportImageThenLoader[Q]
PublicreportImageExif publicreportImageExifThenLoader[Q]
PublicreportNotifyEmailNuisance publicreportNotifyEmailNuisanceThenLoader[Q]
@ -269,7 +275,8 @@ func getThenLoaders[Q orm.Loadable]() thenLoaders[Q] {
FieldseekerZones2: buildFieldseekerZones2ThenLoader[Q](),
FieldseekerSync: buildFieldseekerSyncThenLoader[Q](),
FileuploadCSV: buildFileuploadCSVThenLoader[Q](),
FileuploadError: buildFileuploadErrorThenLoader[Q](),
FileuploadErrorCSV: buildFileuploadErrorCSVThenLoader[Q](),
FileuploadErrorFile: buildFileuploadErrorFileThenLoader[Q](),
FileuploadFile: buildFileuploadFileThenLoader[Q](),
H3Aggregation: buildH3AggregationThenLoader[Q](),
ImportDistrict: buildImportDistrictThenLoader[Q](),
@ -282,6 +289,7 @@ func getThenLoaders[Q orm.Loadable]() thenLoaders[Q] {
Notification: buildNotificationThenLoader[Q](),
OauthToken: buildOauthTokenThenLoader[Q](),
Organization: buildOrganizationThenLoader[Q](),
Pool: buildPoolThenLoader[Q](),
PublicreportImage: buildPublicreportImageThenLoader[Q](),
PublicreportImageExif: buildPublicreportImageExifThenLoader[Q](),
PublicreportNotifyEmailNuisance: buildPublicreportNotifyEmailNuisanceThenLoader[Q](),

View file

@ -56,7 +56,8 @@ func Where[Q psql.Filterable]() struct {
FieldseekerZones2s fieldseekerZones2Where[Q]
FieldseekerSyncs fieldseekerSyncWhere[Q]
FileuploadCSVS fileuploadCSVWhere[Q]
FileuploadErrors fileuploadErrorWhere[Q]
FileuploadErrorCSVS fileuploadErrorCSVWhere[Q]
FileuploadErrorFiles fileuploadErrorFileWhere[Q]
FileuploadFiles fileuploadFileWhere[Q]
GeographyColumns geographyColumnWhere[Q]
GeometryColumns geometryColumnWhere[Q]
@ -72,6 +73,7 @@ func Where[Q psql.Filterable]() struct {
Notifications notificationWhere[Q]
OauthTokens oauthTokenWhere[Q]
Organizations organizationWhere[Q]
Pools poolWhere[Q]
PublicreportImages publicreportImageWhere[Q]
PublicreportImageExifs publicreportImageExifWhere[Q]
PublicreportNotifyEmailNuisances publicreportNotifyEmailNuisanceWhere[Q]
@ -131,7 +133,8 @@ func Where[Q psql.Filterable]() struct {
FieldseekerZones2s fieldseekerZones2Where[Q]
FieldseekerSyncs fieldseekerSyncWhere[Q]
FileuploadCSVS fileuploadCSVWhere[Q]
FileuploadErrors fileuploadErrorWhere[Q]
FileuploadErrorCSVS fileuploadErrorCSVWhere[Q]
FileuploadErrorFiles fileuploadErrorFileWhere[Q]
FileuploadFiles fileuploadFileWhere[Q]
GeographyColumns geographyColumnWhere[Q]
GeometryColumns geometryColumnWhere[Q]
@ -147,6 +150,7 @@ func Where[Q psql.Filterable]() struct {
Notifications notificationWhere[Q]
OauthTokens oauthTokenWhere[Q]
Organizations organizationWhere[Q]
Pools poolWhere[Q]
PublicreportImages publicreportImageWhere[Q]
PublicreportImageExifs publicreportImageExifWhere[Q]
PublicreportNotifyEmailNuisances publicreportNotifyEmailNuisanceWhere[Q]
@ -205,7 +209,8 @@ func Where[Q psql.Filterable]() struct {
FieldseekerZones2s: buildFieldseekerZones2Where[Q](FieldseekerZones2s.Columns),
FieldseekerSyncs: buildFieldseekerSyncWhere[Q](FieldseekerSyncs.Columns),
FileuploadCSVS: buildFileuploadCSVWhere[Q](FileuploadCSVS.Columns),
FileuploadErrors: buildFileuploadErrorWhere[Q](FileuploadErrors.Columns),
FileuploadErrorCSVS: buildFileuploadErrorCSVWhere[Q](FileuploadErrorCSVS.Columns),
FileuploadErrorFiles: buildFileuploadErrorFileWhere[Q](FileuploadErrorFiles.Columns),
FileuploadFiles: buildFileuploadFileWhere[Q](FileuploadFiles.Columns),
GeographyColumns: buildGeographyColumnWhere[Q](GeographyColumns.Columns),
GeometryColumns: buildGeometryColumnWhere[Q](GeometryColumns.Columns),
@ -221,6 +226,7 @@ func Where[Q psql.Filterable]() struct {
Notifications: buildNotificationWhere[Q](Notifications.Columns),
OauthTokens: buildOauthTokenWhere[Q](OauthTokens.Columns),
Organizations: buildOrganizationWhere[Q](Organizations.Columns),
Pools: buildPoolWhere[Q](Pools.Columns),
PublicreportImages: buildPublicreportImageWhere[Q](PublicreportImages.Columns),
PublicreportImageExifs: buildPublicreportImageExifWhere[Q](PublicreportImageExifs.Columns),
PublicreportNotifyEmailNuisances: buildPublicreportNotifyEmailNuisanceWhere[Q](PublicreportNotifyEmailNuisances.Columns),

View file

@ -1153,7 +1153,7 @@ func (fieldseekerPool0 *FieldseekerPool) InsertOrganization(ctx context.Context,
fieldseekerPool0.R.Organization = organization1
organization1.R.Pools = append(organization1.R.Pools, fieldseekerPool0)
organization1.R.FieldseekerPool = append(organization1.R.FieldseekerPool, fieldseekerPool0)
return nil
}
@ -1168,7 +1168,7 @@ func (fieldseekerPool0 *FieldseekerPool) AttachOrganization(ctx context.Context,
fieldseekerPool0.R.Organization = organization1
organization1.R.Pools = append(organization1.R.Pools, fieldseekerPool0)
organization1.R.FieldseekerPool = append(organization1.R.FieldseekerPool, fieldseekerPool0)
return nil
}
@ -1264,7 +1264,7 @@ func (o *FieldseekerPool) Preload(name string, retrieved any) error {
o.R.Organization = rel
if rel != nil {
rel.R.Pools = FieldseekerPoolSlice{o}
rel.R.FieldseekerPool = FieldseekerPoolSlice{o}
}
return nil
default:
@ -1327,7 +1327,7 @@ func (o *FieldseekerPool) LoadOrganization(ctx context.Context, exec bob.Executo
return err
}
related.R.Pools = FieldseekerPoolSlice{o}
related.R.FieldseekerPool = FieldseekerPoolSlice{o}
o.R.Organization = related
return nil
@ -1355,7 +1355,7 @@ func (os FieldseekerPoolSlice) LoadOrganization(ctx context.Context, exec bob.Ex
continue
}
rel.R.Pools = append(rel.R.Pools, o)
rel.R.FieldseekerPool = append(rel.R.FieldseekerPool, o)
o.R.Organization = rel
break

View file

@ -7,6 +7,7 @@ import (
"context"
"fmt"
"io"
"time"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
@ -19,15 +20,21 @@ import (
"github.com/Gleipnir-Technology/bob/orm"
"github.com/Gleipnir-Technology/bob/types/pgtypes"
enums "github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/aarondl/opt/null"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
)
// FileuploadCSV is an object representing the database table.
type FileuploadCSV struct {
FileID int32 `db:"file_id,pk" `
Type enums.FileuploadCsvtype `db:"type_" `
Committed null.Val[time.Time] `db:"committed" `
FileID int32 `db:"file_id,pk" `
Rowcount int32 `db:"rowcount" `
Type enums.FileuploadCsvtype `db:"type_" `
R fileuploadCSVR `db:"-" `
C fileuploadCSVC `db:"-" `
}
// FileuploadCSVSlice is an alias for a slice of pointers to FileuploadCSV.
@ -42,16 +49,19 @@ type FileuploadCSVSQuery = *psql.ViewQuery[*FileuploadCSV, FileuploadCSVSlice]
// fileuploadCSVR is where relationships are stored.
type fileuploadCSVR struct {
File *FileuploadFile // fileupload.csv.csv_file_id_fkey
File *FileuploadFile // fileupload.csv.csv_file_id_fkey
CSVFileErrorCSVS FileuploadErrorCSVSlice // fileupload.error_csv.error_csv_csv_file_id_fkey
}
func buildFileuploadCSVColumns(alias string) fileuploadCSVColumns {
return fileuploadCSVColumns{
ColumnsExpr: expr.NewColumnsExpr(
"file_id", "type_",
"committed", "file_id", "rowcount", "type_",
).WithParent("fileupload.csv"),
tableAlias: alias,
Committed: psql.Quote(alias, "committed"),
FileID: psql.Quote(alias, "file_id"),
Rowcount: psql.Quote(alias, "rowcount"),
Type: psql.Quote(alias, "type_"),
}
}
@ -59,7 +69,9 @@ func buildFileuploadCSVColumns(alias string) fileuploadCSVColumns {
type fileuploadCSVColumns struct {
expr.ColumnsExpr
tableAlias string
Committed psql.Expression
FileID psql.Expression
Rowcount psql.Expression
Type psql.Expression
}
@ -75,15 +87,23 @@ func (fileuploadCSVColumns) AliasedAs(alias string) fileuploadCSVColumns {
// All values are optional, and do not have to be set
// Generated columns are not included
type FileuploadCSVSetter struct {
FileID omit.Val[int32] `db:"file_id,pk" `
Type omit.Val[enums.FileuploadCsvtype] `db:"type_" `
Committed omitnull.Val[time.Time] `db:"committed" `
FileID omit.Val[int32] `db:"file_id,pk" `
Rowcount omit.Val[int32] `db:"rowcount" `
Type omit.Val[enums.FileuploadCsvtype] `db:"type_" `
}
func (s FileuploadCSVSetter) SetColumns() []string {
vals := make([]string, 0, 2)
vals := make([]string, 0, 4)
if !s.Committed.IsUnset() {
vals = append(vals, "committed")
}
if s.FileID.IsValue() {
vals = append(vals, "file_id")
}
if s.Rowcount.IsValue() {
vals = append(vals, "rowcount")
}
if s.Type.IsValue() {
vals = append(vals, "type_")
}
@ -91,9 +111,15 @@ func (s FileuploadCSVSetter) SetColumns() []string {
}
func (s FileuploadCSVSetter) Overwrite(t *FileuploadCSV) {
if !s.Committed.IsUnset() {
t.Committed = s.Committed.MustGetNull()
}
if s.FileID.IsValue() {
t.FileID = s.FileID.MustGet()
}
if s.Rowcount.IsValue() {
t.Rowcount = s.Rowcount.MustGet()
}
if s.Type.IsValue() {
t.Type = s.Type.MustGet()
}
@ -105,19 +131,31 @@ func (s *FileuploadCSVSetter) Apply(q *dialect.InsertQuery) {
})
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
vals := make([]bob.Expression, 2)
if s.FileID.IsValue() {
vals[0] = psql.Arg(s.FileID.MustGet())
vals := make([]bob.Expression, 4)
if !s.Committed.IsUnset() {
vals[0] = psql.Arg(s.Committed.MustGetNull())
} else {
vals[0] = psql.Raw("DEFAULT")
}
if s.Type.IsValue() {
vals[1] = psql.Arg(s.Type.MustGet())
if s.FileID.IsValue() {
vals[1] = psql.Arg(s.FileID.MustGet())
} else {
vals[1] = psql.Raw("DEFAULT")
}
if s.Rowcount.IsValue() {
vals[2] = psql.Arg(s.Rowcount.MustGet())
} else {
vals[2] = psql.Raw("DEFAULT")
}
if s.Type.IsValue() {
vals[3] = psql.Arg(s.Type.MustGet())
} else {
vals[3] = psql.Raw("DEFAULT")
}
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
}))
}
@ -127,7 +165,14 @@ func (s FileuploadCSVSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
}
func (s FileuploadCSVSetter) Expressions(prefix ...string) []bob.Expression {
exprs := make([]bob.Expression, 0, 2)
exprs := make([]bob.Expression, 0, 4)
if !s.Committed.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "committed")...),
psql.Arg(s.Committed),
}})
}
if s.FileID.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
@ -136,6 +181,13 @@ func (s FileuploadCSVSetter) Expressions(prefix ...string) []bob.Expression {
}})
}
if s.Rowcount.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "rowcount")...),
psql.Arg(s.Rowcount),
}})
}
if s.Type.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "type_")...),
@ -393,6 +445,30 @@ func (os FileuploadCSVSlice) File(mods ...bob.Mod[*dialect.SelectQuery]) Fileupl
)...)
}
// CSVFileErrorCSVS starts a query for related objects on fileupload.error_csv
func (o *FileuploadCSV) CSVFileErrorCSVS(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadErrorCSVSQuery {
return FileuploadErrorCSVS.Query(append(mods,
sm.Where(FileuploadErrorCSVS.Columns.CSVFileID.EQ(psql.Arg(o.FileID))),
)...)
}
func (os FileuploadCSVSlice) CSVFileErrorCSVS(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadErrorCSVSQuery {
pkFileID := make(pgtypes.Array[int32], 0, len(os))
for _, o := range os {
if o == nil {
continue
}
pkFileID = append(pkFileID, o.FileID)
}
PKArgExpr := psql.Select(sm.Columns(
psql.F("unnest", psql.Cast(psql.Arg(pkFileID), "integer[]")),
))
return FileuploadErrorCSVS.Query(append(mods,
sm.Where(psql.Group(FileuploadErrorCSVS.Columns.CSVFileID).OP("IN", PKArgExpr)),
)...)
}
func attachFileuploadCSVFile0(ctx context.Context, exec bob.Executor, count int, fileuploadCSV0 *FileuploadCSV, fileuploadFile1 *FileuploadFile) (*FileuploadCSV, error) {
setter := &FileuploadCSVSetter{
FileID: omit.From(fileuploadFile1.ID),
@ -441,9 +517,79 @@ func (fileuploadCSV0 *FileuploadCSV) AttachFile(ctx context.Context, exec bob.Ex
return nil
}
func insertFileuploadCSVCSVFileErrorCSVS0(ctx context.Context, exec bob.Executor, fileuploadErrorCSVS1 []*FileuploadErrorCSVSetter, fileuploadCSV0 *FileuploadCSV) (FileuploadErrorCSVSlice, error) {
for i := range fileuploadErrorCSVS1 {
fileuploadErrorCSVS1[i].CSVFileID = omit.From(fileuploadCSV0.FileID)
}
ret, err := FileuploadErrorCSVS.Insert(bob.ToMods(fileuploadErrorCSVS1...)).All(ctx, exec)
if err != nil {
return ret, fmt.Errorf("insertFileuploadCSVCSVFileErrorCSVS0: %w", err)
}
return ret, nil
}
func attachFileuploadCSVCSVFileErrorCSVS0(ctx context.Context, exec bob.Executor, count int, fileuploadErrorCSVS1 FileuploadErrorCSVSlice, fileuploadCSV0 *FileuploadCSV) (FileuploadErrorCSVSlice, error) {
setter := &FileuploadErrorCSVSetter{
CSVFileID: omit.From(fileuploadCSV0.FileID),
}
err := fileuploadErrorCSVS1.UpdateAll(ctx, exec, *setter)
if err != nil {
return nil, fmt.Errorf("attachFileuploadCSVCSVFileErrorCSVS0: %w", err)
}
return fileuploadErrorCSVS1, nil
}
func (fileuploadCSV0 *FileuploadCSV) InsertCSVFileErrorCSVS(ctx context.Context, exec bob.Executor, related ...*FileuploadErrorCSVSetter) error {
if len(related) == 0 {
return nil
}
var err error
fileuploadErrorCSVS1, err := insertFileuploadCSVCSVFileErrorCSVS0(ctx, exec, related, fileuploadCSV0)
if err != nil {
return err
}
fileuploadCSV0.R.CSVFileErrorCSVS = append(fileuploadCSV0.R.CSVFileErrorCSVS, fileuploadErrorCSVS1...)
for _, rel := range fileuploadErrorCSVS1 {
rel.R.CSVFileCSV = fileuploadCSV0
}
return nil
}
func (fileuploadCSV0 *FileuploadCSV) AttachCSVFileErrorCSVS(ctx context.Context, exec bob.Executor, related ...*FileuploadErrorCSV) error {
if len(related) == 0 {
return nil
}
var err error
fileuploadErrorCSVS1 := FileuploadErrorCSVSlice(related)
_, err = attachFileuploadCSVCSVFileErrorCSVS0(ctx, exec, len(related), fileuploadErrorCSVS1, fileuploadCSV0)
if err != nil {
return err
}
fileuploadCSV0.R.CSVFileErrorCSVS = append(fileuploadCSV0.R.CSVFileErrorCSVS, fileuploadErrorCSVS1...)
for _, rel := range related {
rel.R.CSVFileCSV = fileuploadCSV0
}
return nil
}
type fileuploadCSVWhere[Q psql.Filterable] struct {
FileID psql.WhereMod[Q, int32]
Type psql.WhereMod[Q, enums.FileuploadCsvtype]
Committed psql.WhereNullMod[Q, time.Time]
FileID psql.WhereMod[Q, int32]
Rowcount psql.WhereMod[Q, int32]
Type psql.WhereMod[Q, enums.FileuploadCsvtype]
}
func (fileuploadCSVWhere[Q]) AliasedAs(alias string) fileuploadCSVWhere[Q] {
@ -452,8 +598,10 @@ func (fileuploadCSVWhere[Q]) AliasedAs(alias string) fileuploadCSVWhere[Q] {
func buildFileuploadCSVWhere[Q psql.Filterable](cols fileuploadCSVColumns) fileuploadCSVWhere[Q] {
return fileuploadCSVWhere[Q]{
FileID: psql.Where[Q, int32](cols.FileID),
Type: psql.Where[Q, enums.FileuploadCsvtype](cols.Type),
Committed: psql.WhereNull[Q, time.Time](cols.Committed),
FileID: psql.Where[Q, int32](cols.FileID),
Rowcount: psql.Where[Q, int32](cols.Rowcount),
Type: psql.Where[Q, enums.FileuploadCsvtype](cols.Type),
}
}
@ -475,6 +623,20 @@ func (o *FileuploadCSV) Preload(name string, retrieved any) error {
rel.R.CSV = o
}
return nil
case "CSVFileErrorCSVS":
rels, ok := retrieved.(FileuploadErrorCSVSlice)
if !ok {
return fmt.Errorf("fileuploadCSV cannot load %T as %q", retrieved, name)
}
o.R.CSVFileErrorCSVS = rels
for _, rel := range rels {
if rel != nil {
rel.R.CSVFileCSV = o
}
}
return nil
default:
return fmt.Errorf("fileuploadCSV has no relationship %q", name)
}
@ -503,13 +665,17 @@ func buildFileuploadCSVPreloader() fileuploadCSVPreloader {
}
type fileuploadCSVThenLoader[Q orm.Loadable] struct {
File func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
File func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
CSVFileErrorCSVS func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
}
func buildFileuploadCSVThenLoader[Q orm.Loadable]() fileuploadCSVThenLoader[Q] {
type FileLoadInterface interface {
LoadFile(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type CSVFileErrorCSVSLoadInterface interface {
LoadCSVFileErrorCSVS(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
return fileuploadCSVThenLoader[Q]{
File: thenLoadBuilder[Q](
@ -518,6 +684,12 @@ func buildFileuploadCSVThenLoader[Q orm.Loadable]() fileuploadCSVThenLoader[Q] {
return retrieved.LoadFile(ctx, exec, mods...)
},
),
CSVFileErrorCSVS: thenLoadBuilder[Q](
"CSVFileErrorCSVS",
func(ctx context.Context, exec bob.Executor, retrieved CSVFileErrorCSVSLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadCSVFileErrorCSVS(ctx, exec, mods...)
},
),
}
}
@ -573,9 +745,164 @@ func (os FileuploadCSVSlice) LoadFile(ctx context.Context, exec bob.Executor, mo
return nil
}
// LoadCSVFileErrorCSVS loads the fileuploadCSV's CSVFileErrorCSVS into the .R struct
func (o *FileuploadCSV) LoadCSVFileErrorCSVS(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
// Reset the relationship
o.R.CSVFileErrorCSVS = nil
related, err := o.CSVFileErrorCSVS(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, rel := range related {
rel.R.CSVFileCSV = o
}
o.R.CSVFileErrorCSVS = related
return nil
}
// LoadCSVFileErrorCSVS loads the fileuploadCSV's CSVFileErrorCSVS into the .R struct
func (os FileuploadCSVSlice) LoadCSVFileErrorCSVS(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
fileuploadErrorCSVS, err := os.CSVFileErrorCSVS(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, o := range os {
if o == nil {
continue
}
o.R.CSVFileErrorCSVS = nil
}
for _, o := range os {
if o == nil {
continue
}
for _, rel := range fileuploadErrorCSVS {
if !(o.FileID == rel.CSVFileID) {
continue
}
rel.R.CSVFileCSV = o
o.R.CSVFileErrorCSVS = append(o.R.CSVFileErrorCSVS, rel)
}
}
return nil
}
// fileuploadCSVC is where relationship counts are stored.
type fileuploadCSVC struct {
CSVFileErrorCSVS *int64
}
// PreloadCount sets a count in the C struct by name
func (o *FileuploadCSV) PreloadCount(name string, count int64) error {
if o == nil {
return nil
}
switch name {
case "CSVFileErrorCSVS":
o.C.CSVFileErrorCSVS = &count
}
return nil
}
type fileuploadCSVCountPreloader struct {
CSVFileErrorCSVS func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
}
func buildFileuploadCSVCountPreloader() fileuploadCSVCountPreloader {
return fileuploadCSVCountPreloader{
CSVFileErrorCSVS: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader {
return countPreloader[*FileuploadCSV]("CSVFileErrorCSVS", func(parent string) bob.Expression {
// Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk)
if parent == "" {
parent = FileuploadCSVS.Alias()
}
subqueryMods := []bob.Mod[*dialect.SelectQuery]{
sm.Columns(psql.Raw("count(*)")),
sm.From(FileuploadErrorCSVS.Name()),
sm.Where(psql.Quote(FileuploadErrorCSVS.Alias(), "csv_file_id").EQ(psql.Quote(parent, "file_id"))),
}
subqueryMods = append(subqueryMods, mods...)
return psql.Group(psql.Select(subqueryMods...).Expression)
})
},
}
}
type fileuploadCSVCountThenLoader[Q orm.Loadable] struct {
CSVFileErrorCSVS func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
}
func buildFileuploadCSVCountThenLoader[Q orm.Loadable]() fileuploadCSVCountThenLoader[Q] {
type CSVFileErrorCSVSCountInterface interface {
LoadCountCSVFileErrorCSVS(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
return fileuploadCSVCountThenLoader[Q]{
CSVFileErrorCSVS: countThenLoadBuilder[Q](
"CSVFileErrorCSVS",
func(ctx context.Context, exec bob.Executor, retrieved CSVFileErrorCSVSCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadCountCSVFileErrorCSVS(ctx, exec, mods...)
},
),
}
}
// LoadCountCSVFileErrorCSVS loads the count of CSVFileErrorCSVS into the C struct
func (o *FileuploadCSV) LoadCountCSVFileErrorCSVS(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
count, err := o.CSVFileErrorCSVS(mods...).Count(ctx, exec)
if err != nil {
return err
}
o.C.CSVFileErrorCSVS = &count
return nil
}
// LoadCountCSVFileErrorCSVS loads the count of CSVFileErrorCSVS for a slice
func (os FileuploadCSVSlice) LoadCountCSVFileErrorCSVS(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
for _, o := range os {
if err := o.LoadCountCSVFileErrorCSVS(ctx, exec, mods...); err != nil {
return err
}
}
return nil
}
type fileuploadCSVJoins[Q dialect.Joinable] struct {
typ string
File modAs[Q, fileuploadFileColumns]
typ string
File modAs[Q, fileuploadFileColumns]
CSVFileErrorCSVS modAs[Q, fileuploadErrorCSVColumns]
}
func (j fileuploadCSVJoins[Q]) aliasedAs(alias string) fileuploadCSVJoins[Q] {
@ -596,6 +923,20 @@ func buildFileuploadCSVJoins[Q dialect.Joinable](cols fileuploadCSVColumns, typ
))
}
return mods
},
},
CSVFileErrorCSVS: modAs[Q, fileuploadErrorCSVColumns]{
c: FileuploadErrorCSVS.Columns,
f: func(to fileuploadErrorCSVColumns) bob.Mod[Q] {
mods := make(mods.QueryMods[Q], 0, 1)
{
mods = append(mods, dialect.Join[Q](typ, FileuploadErrorCSVS.Name().As(to.Alias())).On(
to.CSVFileID.EQ(cols.FileID),
))
}
return mods
},
},

View file

@ -1,652 +0,0 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package models
import (
"context"
"fmt"
"io"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
"github.com/Gleipnir-Technology/bob/dialect/psql/dm"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
"github.com/Gleipnir-Technology/bob/expr"
"github.com/Gleipnir-Technology/bob/mods"
"github.com/Gleipnir-Technology/bob/orm"
"github.com/Gleipnir-Technology/bob/types/pgtypes"
"github.com/aarondl/opt/omit"
)
// FileuploadError is an object representing the database table.
type FileuploadError struct {
FileID int32 `db:"file_id" `
ID int32 `db:"id,pk" `
Line int32 `db:"line" `
Message string `db:"message" `
R fileuploadErrorR `db:"-" `
}
// FileuploadErrorSlice is an alias for a slice of pointers to FileuploadError.
// This should almost always be used instead of []*FileuploadError.
type FileuploadErrorSlice []*FileuploadError
// FileuploadErrors contains methods to work with the error table
var FileuploadErrors = psql.NewTablex[*FileuploadError, FileuploadErrorSlice, *FileuploadErrorSetter]("fileupload", "error", buildFileuploadErrorColumns("fileupload.error"))
// FileuploadErrorsQuery is a query on the error table
type FileuploadErrorsQuery = *psql.ViewQuery[*FileuploadError, FileuploadErrorSlice]
// fileuploadErrorR is where relationships are stored.
type fileuploadErrorR struct {
File *FileuploadFile // fileupload.error.error_file_id_fkey
}
func buildFileuploadErrorColumns(alias string) fileuploadErrorColumns {
return fileuploadErrorColumns{
ColumnsExpr: expr.NewColumnsExpr(
"file_id", "id", "line", "message",
).WithParent("fileupload.error"),
tableAlias: alias,
FileID: psql.Quote(alias, "file_id"),
ID: psql.Quote(alias, "id"),
Line: psql.Quote(alias, "line"),
Message: psql.Quote(alias, "message"),
}
}
type fileuploadErrorColumns struct {
expr.ColumnsExpr
tableAlias string
FileID psql.Expression
ID psql.Expression
Line psql.Expression
Message psql.Expression
}
func (c fileuploadErrorColumns) Alias() string {
return c.tableAlias
}
func (fileuploadErrorColumns) AliasedAs(alias string) fileuploadErrorColumns {
return buildFileuploadErrorColumns(alias)
}
// FileuploadErrorSetter is used for insert/upsert/update operations
// All values are optional, and do not have to be set
// Generated columns are not included
type FileuploadErrorSetter struct {
FileID omit.Val[int32] `db:"file_id" `
ID omit.Val[int32] `db:"id,pk" `
Line omit.Val[int32] `db:"line" `
Message omit.Val[string] `db:"message" `
}
func (s FileuploadErrorSetter) SetColumns() []string {
vals := make([]string, 0, 4)
if s.FileID.IsValue() {
vals = append(vals, "file_id")
}
if s.ID.IsValue() {
vals = append(vals, "id")
}
if s.Line.IsValue() {
vals = append(vals, "line")
}
if s.Message.IsValue() {
vals = append(vals, "message")
}
return vals
}
func (s FileuploadErrorSetter) Overwrite(t *FileuploadError) {
if s.FileID.IsValue() {
t.FileID = s.FileID.MustGet()
}
if s.ID.IsValue() {
t.ID = s.ID.MustGet()
}
if s.Line.IsValue() {
t.Line = s.Line.MustGet()
}
if s.Message.IsValue() {
t.Message = s.Message.MustGet()
}
}
func (s *FileuploadErrorSetter) Apply(q *dialect.InsertQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadErrors.BeforeInsertHooks.RunHooks(ctx, exec, s)
})
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
vals := make([]bob.Expression, 4)
if s.FileID.IsValue() {
vals[0] = psql.Arg(s.FileID.MustGet())
} else {
vals[0] = psql.Raw("DEFAULT")
}
if s.ID.IsValue() {
vals[1] = psql.Arg(s.ID.MustGet())
} else {
vals[1] = psql.Raw("DEFAULT")
}
if s.Line.IsValue() {
vals[2] = psql.Arg(s.Line.MustGet())
} else {
vals[2] = psql.Raw("DEFAULT")
}
if s.Message.IsValue() {
vals[3] = psql.Arg(s.Message.MustGet())
} else {
vals[3] = psql.Raw("DEFAULT")
}
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
}))
}
func (s FileuploadErrorSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return um.Set(s.Expressions()...)
}
func (s FileuploadErrorSetter) Expressions(prefix ...string) []bob.Expression {
exprs := make([]bob.Expression, 0, 4)
if s.FileID.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "file_id")...),
psql.Arg(s.FileID),
}})
}
if s.ID.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "id")...),
psql.Arg(s.ID),
}})
}
if s.Line.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "line")...),
psql.Arg(s.Line),
}})
}
if s.Message.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "message")...),
psql.Arg(s.Message),
}})
}
return exprs
}
// FindFileuploadError retrieves a single record by primary key
// If cols is empty Find will return all columns.
func FindFileuploadError(ctx context.Context, exec bob.Executor, IDPK int32, cols ...string) (*FileuploadError, error) {
if len(cols) == 0 {
return FileuploadErrors.Query(
sm.Where(FileuploadErrors.Columns.ID.EQ(psql.Arg(IDPK))),
).One(ctx, exec)
}
return FileuploadErrors.Query(
sm.Where(FileuploadErrors.Columns.ID.EQ(psql.Arg(IDPK))),
sm.Columns(FileuploadErrors.Columns.Only(cols...)),
).One(ctx, exec)
}
// FileuploadErrorExists checks the presence of a single record by primary key
func FileuploadErrorExists(ctx context.Context, exec bob.Executor, IDPK int32) (bool, error) {
return FileuploadErrors.Query(
sm.Where(FileuploadErrors.Columns.ID.EQ(psql.Arg(IDPK))),
).Exists(ctx, exec)
}
// AfterQueryHook is called after FileuploadError is retrieved from the database
func (o *FileuploadError) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FileuploadErrors.AfterSelectHooks.RunHooks(ctx, exec, FileuploadErrorSlice{o})
case bob.QueryTypeInsert:
ctx, err = FileuploadErrors.AfterInsertHooks.RunHooks(ctx, exec, FileuploadErrorSlice{o})
case bob.QueryTypeUpdate:
ctx, err = FileuploadErrors.AfterUpdateHooks.RunHooks(ctx, exec, FileuploadErrorSlice{o})
case bob.QueryTypeDelete:
ctx, err = FileuploadErrors.AfterDeleteHooks.RunHooks(ctx, exec, FileuploadErrorSlice{o})
}
return err
}
// primaryKeyVals returns the primary key values of the FileuploadError
func (o *FileuploadError) primaryKeyVals() bob.Expression {
return psql.Arg(o.ID)
}
func (o *FileuploadError) pkEQ() dialect.Expression {
return psql.Quote("fileupload.error", "id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
return o.primaryKeyVals().WriteSQL(ctx, w, d, start)
}))
}
// Update uses an executor to update the FileuploadError
func (o *FileuploadError) Update(ctx context.Context, exec bob.Executor, s *FileuploadErrorSetter) error {
v, err := FileuploadErrors.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
if err != nil {
return err
}
o.R = v.R
*o = *v
return nil
}
// Delete deletes a single FileuploadError record with an executor
func (o *FileuploadError) Delete(ctx context.Context, exec bob.Executor) error {
_, err := FileuploadErrors.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
return err
}
// Reload refreshes the FileuploadError using the executor
func (o *FileuploadError) Reload(ctx context.Context, exec bob.Executor) error {
o2, err := FileuploadErrors.Query(
sm.Where(FileuploadErrors.Columns.ID.EQ(psql.Arg(o.ID))),
).One(ctx, exec)
if err != nil {
return err
}
o2.R = o.R
*o = *o2
return nil
}
// AfterQueryHook is called after FileuploadErrorSlice is retrieved from the database
func (o FileuploadErrorSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FileuploadErrors.AfterSelectHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeInsert:
ctx, err = FileuploadErrors.AfterInsertHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeUpdate:
ctx, err = FileuploadErrors.AfterUpdateHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeDelete:
ctx, err = FileuploadErrors.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}
func (o FileuploadErrorSlice) pkIN() dialect.Expression {
if len(o) == 0 {
return psql.Raw("NULL")
}
return psql.Quote("fileupload.error", "id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
pkPairs := make([]bob.Expression, len(o))
for i, row := range o {
pkPairs[i] = row.primaryKeyVals()
}
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
}))
}
// copyMatchingRows finds models in the given slice that have the same primary key
// then it first copies the existing relationships from the old model to the new model
// and then replaces the old model in the slice with the new model
func (o FileuploadErrorSlice) copyMatchingRows(from ...*FileuploadError) {
for i, old := range o {
for _, new := range from {
if new.ID != old.ID {
continue
}
new.R = old.R
o[i] = new
break
}
}
}
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
func (o FileuploadErrorSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadErrors.BeforeUpdateHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FileuploadError:
o.copyMatchingRows(retrieved)
case []*FileuploadError:
o.copyMatchingRows(retrieved...)
case FileuploadErrorSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FileuploadError or a slice of FileuploadError
// then run the AfterUpdateHooks on the slice
_, err = FileuploadErrors.AfterUpdateHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
func (o FileuploadErrorSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadErrors.BeforeDeleteHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FileuploadError:
o.copyMatchingRows(retrieved)
case []*FileuploadError:
o.copyMatchingRows(retrieved...)
case FileuploadErrorSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FileuploadError or a slice of FileuploadError
// then run the AfterDeleteHooks on the slice
_, err = FileuploadErrors.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
func (o FileuploadErrorSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals FileuploadErrorSetter) error {
if len(o) == 0 {
return nil
}
_, err := FileuploadErrors.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
return err
}
func (o FileuploadErrorSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
_, err := FileuploadErrors.Delete(o.DeleteMod()).Exec(ctx, exec)
return err
}
func (o FileuploadErrorSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
o2, err := FileuploadErrors.Query(sm.Where(o.pkIN())).All(ctx, exec)
if err != nil {
return err
}
o.copyMatchingRows(o2...)
return nil
}
// File starts a query for related objects on fileupload.file
func (o *FileuploadError) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery {
return FileuploadFiles.Query(append(mods,
sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(o.FileID))),
)...)
}
func (os FileuploadErrorSlice) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery {
pkFileID := make(pgtypes.Array[int32], 0, len(os))
for _, o := range os {
if o == nil {
continue
}
pkFileID = append(pkFileID, o.FileID)
}
PKArgExpr := psql.Select(sm.Columns(
psql.F("unnest", psql.Cast(psql.Arg(pkFileID), "integer[]")),
))
return FileuploadFiles.Query(append(mods,
sm.Where(psql.Group(FileuploadFiles.Columns.ID).OP("IN", PKArgExpr)),
)...)
}
func attachFileuploadErrorFile0(ctx context.Context, exec bob.Executor, count int, fileuploadError0 *FileuploadError, fileuploadFile1 *FileuploadFile) (*FileuploadError, error) {
setter := &FileuploadErrorSetter{
FileID: omit.From(fileuploadFile1.ID),
}
err := fileuploadError0.Update(ctx, exec, setter)
if err != nil {
return nil, fmt.Errorf("attachFileuploadErrorFile0: %w", err)
}
return fileuploadError0, nil
}
func (fileuploadError0 *FileuploadError) InsertFile(ctx context.Context, exec bob.Executor, related *FileuploadFileSetter) error {
var err error
fileuploadFile1, err := FileuploadFiles.Insert(related).One(ctx, exec)
if err != nil {
return fmt.Errorf("inserting related objects: %w", err)
}
_, err = attachFileuploadErrorFile0(ctx, exec, 1, fileuploadError0, fileuploadFile1)
if err != nil {
return err
}
fileuploadError0.R.File = fileuploadFile1
fileuploadFile1.R.Errors = append(fileuploadFile1.R.Errors, fileuploadError0)
return nil
}
func (fileuploadError0 *FileuploadError) AttachFile(ctx context.Context, exec bob.Executor, fileuploadFile1 *FileuploadFile) error {
var err error
_, err = attachFileuploadErrorFile0(ctx, exec, 1, fileuploadError0, fileuploadFile1)
if err != nil {
return err
}
fileuploadError0.R.File = fileuploadFile1
fileuploadFile1.R.Errors = append(fileuploadFile1.R.Errors, fileuploadError0)
return nil
}
type fileuploadErrorWhere[Q psql.Filterable] struct {
FileID psql.WhereMod[Q, int32]
ID psql.WhereMod[Q, int32]
Line psql.WhereMod[Q, int32]
Message psql.WhereMod[Q, string]
}
func (fileuploadErrorWhere[Q]) AliasedAs(alias string) fileuploadErrorWhere[Q] {
return buildFileuploadErrorWhere[Q](buildFileuploadErrorColumns(alias))
}
func buildFileuploadErrorWhere[Q psql.Filterable](cols fileuploadErrorColumns) fileuploadErrorWhere[Q] {
return fileuploadErrorWhere[Q]{
FileID: psql.Where[Q, int32](cols.FileID),
ID: psql.Where[Q, int32](cols.ID),
Line: psql.Where[Q, int32](cols.Line),
Message: psql.Where[Q, string](cols.Message),
}
}
func (o *FileuploadError) Preload(name string, retrieved any) error {
if o == nil {
return nil
}
switch name {
case "File":
rel, ok := retrieved.(*FileuploadFile)
if !ok {
return fmt.Errorf("fileuploadError cannot load %T as %q", retrieved, name)
}
o.R.File = rel
if rel != nil {
rel.R.Errors = FileuploadErrorSlice{o}
}
return nil
default:
return fmt.Errorf("fileuploadError has no relationship %q", name)
}
}
type fileuploadErrorPreloader struct {
File func(...psql.PreloadOption) psql.Preloader
}
func buildFileuploadErrorPreloader() fileuploadErrorPreloader {
return fileuploadErrorPreloader{
File: func(opts ...psql.PreloadOption) psql.Preloader {
return psql.Preload[*FileuploadFile, FileuploadFileSlice](psql.PreloadRel{
Name: "File",
Sides: []psql.PreloadSide{
{
From: FileuploadErrors,
To: FileuploadFiles,
FromColumns: []string{"file_id"},
ToColumns: []string{"id"},
},
},
}, FileuploadFiles.Columns.Names(), opts...)
},
}
}
type fileuploadErrorThenLoader[Q orm.Loadable] struct {
File func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
}
func buildFileuploadErrorThenLoader[Q orm.Loadable]() fileuploadErrorThenLoader[Q] {
type FileLoadInterface interface {
LoadFile(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
return fileuploadErrorThenLoader[Q]{
File: thenLoadBuilder[Q](
"File",
func(ctx context.Context, exec bob.Executor, retrieved FileLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadFile(ctx, exec, mods...)
},
),
}
}
// LoadFile loads the fileuploadError's File into the .R struct
func (o *FileuploadError) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
// Reset the relationship
o.R.File = nil
related, err := o.File(mods...).One(ctx, exec)
if err != nil {
return err
}
related.R.Errors = FileuploadErrorSlice{o}
o.R.File = related
return nil
}
// LoadFile loads the fileuploadError's File into the .R struct
func (os FileuploadErrorSlice) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
fileuploadFiles, err := os.File(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, o := range os {
if o == nil {
continue
}
for _, rel := range fileuploadFiles {
if !(o.FileID == rel.ID) {
continue
}
rel.R.Errors = append(rel.R.Errors, o)
o.R.File = rel
break
}
}
return nil
}
type fileuploadErrorJoins[Q dialect.Joinable] struct {
typ string
File modAs[Q, fileuploadFileColumns]
}
func (j fileuploadErrorJoins[Q]) aliasedAs(alias string) fileuploadErrorJoins[Q] {
return buildFileuploadErrorJoins[Q](buildFileuploadErrorColumns(alias), j.typ)
}
func buildFileuploadErrorJoins[Q dialect.Joinable](cols fileuploadErrorColumns, typ string) fileuploadErrorJoins[Q] {
return fileuploadErrorJoins[Q]{
typ: typ,
File: modAs[Q, fileuploadFileColumns]{
c: FileuploadFiles.Columns,
f: func(to fileuploadFileColumns) bob.Mod[Q] {
mods := make(mods.QueryMods[Q], 0, 1)
{
mods = append(mods, dialect.Join[Q](typ, FileuploadFiles.Name().As(to.Alias())).On(
to.ID.EQ(cols.FileID),
))
}
return mods
},
},
}
}

View file

@ -0,0 +1,677 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package models
import (
"context"
"fmt"
"io"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
"github.com/Gleipnir-Technology/bob/dialect/psql/dm"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
"github.com/Gleipnir-Technology/bob/expr"
"github.com/Gleipnir-Technology/bob/mods"
"github.com/Gleipnir-Technology/bob/orm"
"github.com/Gleipnir-Technology/bob/types/pgtypes"
"github.com/aarondl/opt/omit"
)
// FileuploadErrorCSV is an object representing the database table.
type FileuploadErrorCSV struct {
Col int32 `db:"col" `
CSVFileID int32 `db:"csv_file_id" `
ID int32 `db:"id,pk" `
Line int32 `db:"line" `
Message string `db:"message" `
R fileuploadErrorCSVR `db:"-" `
}
// FileuploadErrorCSVSlice is an alias for a slice of pointers to FileuploadErrorCSV.
// This should almost always be used instead of []*FileuploadErrorCSV.
type FileuploadErrorCSVSlice []*FileuploadErrorCSV
// FileuploadErrorCSVS contains methods to work with the error_csv table
var FileuploadErrorCSVS = psql.NewTablex[*FileuploadErrorCSV, FileuploadErrorCSVSlice, *FileuploadErrorCSVSetter]("fileupload", "error_csv", buildFileuploadErrorCSVColumns("fileupload.error_csv"))
// FileuploadErrorCSVSQuery is a query on the error_csv table
type FileuploadErrorCSVSQuery = *psql.ViewQuery[*FileuploadErrorCSV, FileuploadErrorCSVSlice]
// fileuploadErrorCSVR is where relationships are stored.
type fileuploadErrorCSVR struct {
CSVFileCSV *FileuploadCSV // fileupload.error_csv.error_csv_csv_file_id_fkey
}
func buildFileuploadErrorCSVColumns(alias string) fileuploadErrorCSVColumns {
return fileuploadErrorCSVColumns{
ColumnsExpr: expr.NewColumnsExpr(
"col", "csv_file_id", "id", "line", "message",
).WithParent("fileupload.error_csv"),
tableAlias: alias,
Col: psql.Quote(alias, "col"),
CSVFileID: psql.Quote(alias, "csv_file_id"),
ID: psql.Quote(alias, "id"),
Line: psql.Quote(alias, "line"),
Message: psql.Quote(alias, "message"),
}
}
type fileuploadErrorCSVColumns struct {
expr.ColumnsExpr
tableAlias string
Col psql.Expression
CSVFileID psql.Expression
ID psql.Expression
Line psql.Expression
Message psql.Expression
}
func (c fileuploadErrorCSVColumns) Alias() string {
return c.tableAlias
}
func (fileuploadErrorCSVColumns) AliasedAs(alias string) fileuploadErrorCSVColumns {
return buildFileuploadErrorCSVColumns(alias)
}
// FileuploadErrorCSVSetter is used for insert/upsert/update operations
// All values are optional, and do not have to be set
// Generated columns are not included
type FileuploadErrorCSVSetter struct {
Col omit.Val[int32] `db:"col" `
CSVFileID omit.Val[int32] `db:"csv_file_id" `
ID omit.Val[int32] `db:"id,pk" `
Line omit.Val[int32] `db:"line" `
Message omit.Val[string] `db:"message" `
}
func (s FileuploadErrorCSVSetter) SetColumns() []string {
vals := make([]string, 0, 5)
if s.Col.IsValue() {
vals = append(vals, "col")
}
if s.CSVFileID.IsValue() {
vals = append(vals, "csv_file_id")
}
if s.ID.IsValue() {
vals = append(vals, "id")
}
if s.Line.IsValue() {
vals = append(vals, "line")
}
if s.Message.IsValue() {
vals = append(vals, "message")
}
return vals
}
func (s FileuploadErrorCSVSetter) Overwrite(t *FileuploadErrorCSV) {
if s.Col.IsValue() {
t.Col = s.Col.MustGet()
}
if s.CSVFileID.IsValue() {
t.CSVFileID = s.CSVFileID.MustGet()
}
if s.ID.IsValue() {
t.ID = s.ID.MustGet()
}
if s.Line.IsValue() {
t.Line = s.Line.MustGet()
}
if s.Message.IsValue() {
t.Message = s.Message.MustGet()
}
}
func (s *FileuploadErrorCSVSetter) Apply(q *dialect.InsertQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadErrorCSVS.BeforeInsertHooks.RunHooks(ctx, exec, s)
})
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
vals := make([]bob.Expression, 5)
if s.Col.IsValue() {
vals[0] = psql.Arg(s.Col.MustGet())
} else {
vals[0] = psql.Raw("DEFAULT")
}
if s.CSVFileID.IsValue() {
vals[1] = psql.Arg(s.CSVFileID.MustGet())
} else {
vals[1] = psql.Raw("DEFAULT")
}
if s.ID.IsValue() {
vals[2] = psql.Arg(s.ID.MustGet())
} else {
vals[2] = psql.Raw("DEFAULT")
}
if s.Line.IsValue() {
vals[3] = psql.Arg(s.Line.MustGet())
} else {
vals[3] = psql.Raw("DEFAULT")
}
if s.Message.IsValue() {
vals[4] = psql.Arg(s.Message.MustGet())
} else {
vals[4] = psql.Raw("DEFAULT")
}
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
}))
}
func (s FileuploadErrorCSVSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return um.Set(s.Expressions()...)
}
func (s FileuploadErrorCSVSetter) Expressions(prefix ...string) []bob.Expression {
exprs := make([]bob.Expression, 0, 5)
if s.Col.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "col")...),
psql.Arg(s.Col),
}})
}
if s.CSVFileID.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "csv_file_id")...),
psql.Arg(s.CSVFileID),
}})
}
if s.ID.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "id")...),
psql.Arg(s.ID),
}})
}
if s.Line.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "line")...),
psql.Arg(s.Line),
}})
}
if s.Message.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "message")...),
psql.Arg(s.Message),
}})
}
return exprs
}
// FindFileuploadErrorCSV retrieves a single record by primary key
// If cols is empty Find will return all columns.
func FindFileuploadErrorCSV(ctx context.Context, exec bob.Executor, IDPK int32, cols ...string) (*FileuploadErrorCSV, error) {
if len(cols) == 0 {
return FileuploadErrorCSVS.Query(
sm.Where(FileuploadErrorCSVS.Columns.ID.EQ(psql.Arg(IDPK))),
).One(ctx, exec)
}
return FileuploadErrorCSVS.Query(
sm.Where(FileuploadErrorCSVS.Columns.ID.EQ(psql.Arg(IDPK))),
sm.Columns(FileuploadErrorCSVS.Columns.Only(cols...)),
).One(ctx, exec)
}
// FileuploadErrorCSVExists checks the presence of a single record by primary key
func FileuploadErrorCSVExists(ctx context.Context, exec bob.Executor, IDPK int32) (bool, error) {
return FileuploadErrorCSVS.Query(
sm.Where(FileuploadErrorCSVS.Columns.ID.EQ(psql.Arg(IDPK))),
).Exists(ctx, exec)
}
// AfterQueryHook is called after FileuploadErrorCSV is retrieved from the database
func (o *FileuploadErrorCSV) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FileuploadErrorCSVS.AfterSelectHooks.RunHooks(ctx, exec, FileuploadErrorCSVSlice{o})
case bob.QueryTypeInsert:
ctx, err = FileuploadErrorCSVS.AfterInsertHooks.RunHooks(ctx, exec, FileuploadErrorCSVSlice{o})
case bob.QueryTypeUpdate:
ctx, err = FileuploadErrorCSVS.AfterUpdateHooks.RunHooks(ctx, exec, FileuploadErrorCSVSlice{o})
case bob.QueryTypeDelete:
ctx, err = FileuploadErrorCSVS.AfterDeleteHooks.RunHooks(ctx, exec, FileuploadErrorCSVSlice{o})
}
return err
}
// primaryKeyVals returns the primary key values of the FileuploadErrorCSV
func (o *FileuploadErrorCSV) primaryKeyVals() bob.Expression {
return psql.Arg(o.ID)
}
func (o *FileuploadErrorCSV) pkEQ() dialect.Expression {
return psql.Quote("fileupload.error_csv", "id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
return o.primaryKeyVals().WriteSQL(ctx, w, d, start)
}))
}
// Update uses an executor to update the FileuploadErrorCSV
func (o *FileuploadErrorCSV) Update(ctx context.Context, exec bob.Executor, s *FileuploadErrorCSVSetter) error {
v, err := FileuploadErrorCSVS.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
if err != nil {
return err
}
o.R = v.R
*o = *v
return nil
}
// Delete deletes a single FileuploadErrorCSV record with an executor
func (o *FileuploadErrorCSV) Delete(ctx context.Context, exec bob.Executor) error {
_, err := FileuploadErrorCSVS.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
return err
}
// Reload refreshes the FileuploadErrorCSV using the executor
func (o *FileuploadErrorCSV) Reload(ctx context.Context, exec bob.Executor) error {
o2, err := FileuploadErrorCSVS.Query(
sm.Where(FileuploadErrorCSVS.Columns.ID.EQ(psql.Arg(o.ID))),
).One(ctx, exec)
if err != nil {
return err
}
o2.R = o.R
*o = *o2
return nil
}
// AfterQueryHook is called after FileuploadErrorCSVSlice is retrieved from the database
func (o FileuploadErrorCSVSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FileuploadErrorCSVS.AfterSelectHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeInsert:
ctx, err = FileuploadErrorCSVS.AfterInsertHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeUpdate:
ctx, err = FileuploadErrorCSVS.AfterUpdateHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeDelete:
ctx, err = FileuploadErrorCSVS.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}
func (o FileuploadErrorCSVSlice) pkIN() dialect.Expression {
if len(o) == 0 {
return psql.Raw("NULL")
}
return psql.Quote("fileupload.error_csv", "id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
pkPairs := make([]bob.Expression, len(o))
for i, row := range o {
pkPairs[i] = row.primaryKeyVals()
}
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
}))
}
// copyMatchingRows finds models in the given slice that have the same primary key
// then it first copies the existing relationships from the old model to the new model
// and then replaces the old model in the slice with the new model
func (o FileuploadErrorCSVSlice) copyMatchingRows(from ...*FileuploadErrorCSV) {
for i, old := range o {
for _, new := range from {
if new.ID != old.ID {
continue
}
new.R = old.R
o[i] = new
break
}
}
}
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
func (o FileuploadErrorCSVSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadErrorCSVS.BeforeUpdateHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FileuploadErrorCSV:
o.copyMatchingRows(retrieved)
case []*FileuploadErrorCSV:
o.copyMatchingRows(retrieved...)
case FileuploadErrorCSVSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FileuploadErrorCSV or a slice of FileuploadErrorCSV
// then run the AfterUpdateHooks on the slice
_, err = FileuploadErrorCSVS.AfterUpdateHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
func (o FileuploadErrorCSVSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadErrorCSVS.BeforeDeleteHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FileuploadErrorCSV:
o.copyMatchingRows(retrieved)
case []*FileuploadErrorCSV:
o.copyMatchingRows(retrieved...)
case FileuploadErrorCSVSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FileuploadErrorCSV or a slice of FileuploadErrorCSV
// then run the AfterDeleteHooks on the slice
_, err = FileuploadErrorCSVS.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
func (o FileuploadErrorCSVSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals FileuploadErrorCSVSetter) error {
if len(o) == 0 {
return nil
}
_, err := FileuploadErrorCSVS.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
return err
}
func (o FileuploadErrorCSVSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
_, err := FileuploadErrorCSVS.Delete(o.DeleteMod()).Exec(ctx, exec)
return err
}
func (o FileuploadErrorCSVSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
o2, err := FileuploadErrorCSVS.Query(sm.Where(o.pkIN())).All(ctx, exec)
if err != nil {
return err
}
o.copyMatchingRows(o2...)
return nil
}
// CSVFileCSV starts a query for related objects on fileupload.csv
func (o *FileuploadErrorCSV) CSVFileCSV(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadCSVSQuery {
return FileuploadCSVS.Query(append(mods,
sm.Where(FileuploadCSVS.Columns.FileID.EQ(psql.Arg(o.CSVFileID))),
)...)
}
func (os FileuploadErrorCSVSlice) CSVFileCSV(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadCSVSQuery {
pkCSVFileID := make(pgtypes.Array[int32], 0, len(os))
for _, o := range os {
if o == nil {
continue
}
pkCSVFileID = append(pkCSVFileID, o.CSVFileID)
}
PKArgExpr := psql.Select(sm.Columns(
psql.F("unnest", psql.Cast(psql.Arg(pkCSVFileID), "integer[]")),
))
return FileuploadCSVS.Query(append(mods,
sm.Where(psql.Group(FileuploadCSVS.Columns.FileID).OP("IN", PKArgExpr)),
)...)
}
func attachFileuploadErrorCSVCSVFileCSV0(ctx context.Context, exec bob.Executor, count int, fileuploadErrorCSV0 *FileuploadErrorCSV, fileuploadCSV1 *FileuploadCSV) (*FileuploadErrorCSV, error) {
setter := &FileuploadErrorCSVSetter{
CSVFileID: omit.From(fileuploadCSV1.FileID),
}
err := fileuploadErrorCSV0.Update(ctx, exec, setter)
if err != nil {
return nil, fmt.Errorf("attachFileuploadErrorCSVCSVFileCSV0: %w", err)
}
return fileuploadErrorCSV0, nil
}
func (fileuploadErrorCSV0 *FileuploadErrorCSV) InsertCSVFileCSV(ctx context.Context, exec bob.Executor, related *FileuploadCSVSetter) error {
var err error
fileuploadCSV1, err := FileuploadCSVS.Insert(related).One(ctx, exec)
if err != nil {
return fmt.Errorf("inserting related objects: %w", err)
}
_, err = attachFileuploadErrorCSVCSVFileCSV0(ctx, exec, 1, fileuploadErrorCSV0, fileuploadCSV1)
if err != nil {
return err
}
fileuploadErrorCSV0.R.CSVFileCSV = fileuploadCSV1
fileuploadCSV1.R.CSVFileErrorCSVS = append(fileuploadCSV1.R.CSVFileErrorCSVS, fileuploadErrorCSV0)
return nil
}
func (fileuploadErrorCSV0 *FileuploadErrorCSV) AttachCSVFileCSV(ctx context.Context, exec bob.Executor, fileuploadCSV1 *FileuploadCSV) error {
var err error
_, err = attachFileuploadErrorCSVCSVFileCSV0(ctx, exec, 1, fileuploadErrorCSV0, fileuploadCSV1)
if err != nil {
return err
}
fileuploadErrorCSV0.R.CSVFileCSV = fileuploadCSV1
fileuploadCSV1.R.CSVFileErrorCSVS = append(fileuploadCSV1.R.CSVFileErrorCSVS, fileuploadErrorCSV0)
return nil
}
type fileuploadErrorCSVWhere[Q psql.Filterable] struct {
Col psql.WhereMod[Q, int32]
CSVFileID psql.WhereMod[Q, int32]
ID psql.WhereMod[Q, int32]
Line psql.WhereMod[Q, int32]
Message psql.WhereMod[Q, string]
}
func (fileuploadErrorCSVWhere[Q]) AliasedAs(alias string) fileuploadErrorCSVWhere[Q] {
return buildFileuploadErrorCSVWhere[Q](buildFileuploadErrorCSVColumns(alias))
}
func buildFileuploadErrorCSVWhere[Q psql.Filterable](cols fileuploadErrorCSVColumns) fileuploadErrorCSVWhere[Q] {
return fileuploadErrorCSVWhere[Q]{
Col: psql.Where[Q, int32](cols.Col),
CSVFileID: psql.Where[Q, int32](cols.CSVFileID),
ID: psql.Where[Q, int32](cols.ID),
Line: psql.Where[Q, int32](cols.Line),
Message: psql.Where[Q, string](cols.Message),
}
}
func (o *FileuploadErrorCSV) Preload(name string, retrieved any) error {
if o == nil {
return nil
}
switch name {
case "CSVFileCSV":
rel, ok := retrieved.(*FileuploadCSV)
if !ok {
return fmt.Errorf("fileuploadErrorCSV cannot load %T as %q", retrieved, name)
}
o.R.CSVFileCSV = rel
if rel != nil {
rel.R.CSVFileErrorCSVS = FileuploadErrorCSVSlice{o}
}
return nil
default:
return fmt.Errorf("fileuploadErrorCSV has no relationship %q", name)
}
}
type fileuploadErrorCSVPreloader struct {
CSVFileCSV func(...psql.PreloadOption) psql.Preloader
}
func buildFileuploadErrorCSVPreloader() fileuploadErrorCSVPreloader {
return fileuploadErrorCSVPreloader{
CSVFileCSV: func(opts ...psql.PreloadOption) psql.Preloader {
return psql.Preload[*FileuploadCSV, FileuploadCSVSlice](psql.PreloadRel{
Name: "CSVFileCSV",
Sides: []psql.PreloadSide{
{
From: FileuploadErrorCSVS,
To: FileuploadCSVS,
FromColumns: []string{"csv_file_id"},
ToColumns: []string{"file_id"},
},
},
}, FileuploadCSVS.Columns.Names(), opts...)
},
}
}
type fileuploadErrorCSVThenLoader[Q orm.Loadable] struct {
CSVFileCSV func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
}
func buildFileuploadErrorCSVThenLoader[Q orm.Loadable]() fileuploadErrorCSVThenLoader[Q] {
type CSVFileCSVLoadInterface interface {
LoadCSVFileCSV(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
return fileuploadErrorCSVThenLoader[Q]{
CSVFileCSV: thenLoadBuilder[Q](
"CSVFileCSV",
func(ctx context.Context, exec bob.Executor, retrieved CSVFileCSVLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadCSVFileCSV(ctx, exec, mods...)
},
),
}
}
// LoadCSVFileCSV loads the fileuploadErrorCSV's CSVFileCSV into the .R struct
func (o *FileuploadErrorCSV) LoadCSVFileCSV(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
// Reset the relationship
o.R.CSVFileCSV = nil
related, err := o.CSVFileCSV(mods...).One(ctx, exec)
if err != nil {
return err
}
related.R.CSVFileErrorCSVS = FileuploadErrorCSVSlice{o}
o.R.CSVFileCSV = related
return nil
}
// LoadCSVFileCSV loads the fileuploadErrorCSV's CSVFileCSV into the .R struct
func (os FileuploadErrorCSVSlice) LoadCSVFileCSV(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
fileuploadCSVS, err := os.CSVFileCSV(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, o := range os {
if o == nil {
continue
}
for _, rel := range fileuploadCSVS {
if !(o.CSVFileID == rel.FileID) {
continue
}
rel.R.CSVFileErrorCSVS = append(rel.R.CSVFileErrorCSVS, o)
o.R.CSVFileCSV = rel
break
}
}
return nil
}
type fileuploadErrorCSVJoins[Q dialect.Joinable] struct {
typ string
CSVFileCSV modAs[Q, fileuploadCSVColumns]
}
func (j fileuploadErrorCSVJoins[Q]) aliasedAs(alias string) fileuploadErrorCSVJoins[Q] {
return buildFileuploadErrorCSVJoins[Q](buildFileuploadErrorCSVColumns(alias), j.typ)
}
func buildFileuploadErrorCSVJoins[Q dialect.Joinable](cols fileuploadErrorCSVColumns, typ string) fileuploadErrorCSVJoins[Q] {
return fileuploadErrorCSVJoins[Q]{
typ: typ,
CSVFileCSV: modAs[Q, fileuploadCSVColumns]{
c: FileuploadCSVS.Columns,
f: func(to fileuploadCSVColumns) bob.Mod[Q] {
mods := make(mods.QueryMods[Q], 0, 1)
{
mods = append(mods, dialect.Join[Q](typ, FileuploadCSVS.Name().As(to.Alias())).On(
to.FileID.EQ(cols.CSVFileID),
))
}
return mods
},
},
}
}

View file

@ -0,0 +1,627 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package models
import (
"context"
"fmt"
"io"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
"github.com/Gleipnir-Technology/bob/dialect/psql/dm"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
"github.com/Gleipnir-Technology/bob/expr"
"github.com/Gleipnir-Technology/bob/mods"
"github.com/Gleipnir-Technology/bob/orm"
"github.com/Gleipnir-Technology/bob/types/pgtypes"
"github.com/aarondl/opt/omit"
)
// FileuploadErrorFile is an object representing the database table.
type FileuploadErrorFile struct {
FileID int32 `db:"file_id" `
ID int32 `db:"id,pk" `
Message string `db:"message" `
R fileuploadErrorFileR `db:"-" `
}
// FileuploadErrorFileSlice is an alias for a slice of pointers to FileuploadErrorFile.
// This should almost always be used instead of []*FileuploadErrorFile.
type FileuploadErrorFileSlice []*FileuploadErrorFile
// FileuploadErrorFiles contains methods to work with the error_file table
var FileuploadErrorFiles = psql.NewTablex[*FileuploadErrorFile, FileuploadErrorFileSlice, *FileuploadErrorFileSetter]("fileupload", "error_file", buildFileuploadErrorFileColumns("fileupload.error_file"))
// FileuploadErrorFilesQuery is a query on the error_file table
type FileuploadErrorFilesQuery = *psql.ViewQuery[*FileuploadErrorFile, FileuploadErrorFileSlice]
// fileuploadErrorFileR is where relationships are stored.
type fileuploadErrorFileR struct {
File *FileuploadFile // fileupload.error_file.error_file_file_id_fkey
}
func buildFileuploadErrorFileColumns(alias string) fileuploadErrorFileColumns {
return fileuploadErrorFileColumns{
ColumnsExpr: expr.NewColumnsExpr(
"file_id", "id", "message",
).WithParent("fileupload.error_file"),
tableAlias: alias,
FileID: psql.Quote(alias, "file_id"),
ID: psql.Quote(alias, "id"),
Message: psql.Quote(alias, "message"),
}
}
type fileuploadErrorFileColumns struct {
expr.ColumnsExpr
tableAlias string
FileID psql.Expression
ID psql.Expression
Message psql.Expression
}
func (c fileuploadErrorFileColumns) Alias() string {
return c.tableAlias
}
func (fileuploadErrorFileColumns) AliasedAs(alias string) fileuploadErrorFileColumns {
return buildFileuploadErrorFileColumns(alias)
}
// FileuploadErrorFileSetter is used for insert/upsert/update operations
// All values are optional, and do not have to be set
// Generated columns are not included
type FileuploadErrorFileSetter struct {
FileID omit.Val[int32] `db:"file_id" `
ID omit.Val[int32] `db:"id,pk" `
Message omit.Val[string] `db:"message" `
}
func (s FileuploadErrorFileSetter) SetColumns() []string {
vals := make([]string, 0, 3)
if s.FileID.IsValue() {
vals = append(vals, "file_id")
}
if s.ID.IsValue() {
vals = append(vals, "id")
}
if s.Message.IsValue() {
vals = append(vals, "message")
}
return vals
}
func (s FileuploadErrorFileSetter) Overwrite(t *FileuploadErrorFile) {
if s.FileID.IsValue() {
t.FileID = s.FileID.MustGet()
}
if s.ID.IsValue() {
t.ID = s.ID.MustGet()
}
if s.Message.IsValue() {
t.Message = s.Message.MustGet()
}
}
func (s *FileuploadErrorFileSetter) Apply(q *dialect.InsertQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadErrorFiles.BeforeInsertHooks.RunHooks(ctx, exec, s)
})
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
vals := make([]bob.Expression, 3)
if s.FileID.IsValue() {
vals[0] = psql.Arg(s.FileID.MustGet())
} else {
vals[0] = psql.Raw("DEFAULT")
}
if s.ID.IsValue() {
vals[1] = psql.Arg(s.ID.MustGet())
} else {
vals[1] = psql.Raw("DEFAULT")
}
if s.Message.IsValue() {
vals[2] = psql.Arg(s.Message.MustGet())
} else {
vals[2] = psql.Raw("DEFAULT")
}
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
}))
}
func (s FileuploadErrorFileSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return um.Set(s.Expressions()...)
}
func (s FileuploadErrorFileSetter) Expressions(prefix ...string) []bob.Expression {
exprs := make([]bob.Expression, 0, 3)
if s.FileID.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "file_id")...),
psql.Arg(s.FileID),
}})
}
if s.ID.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "id")...),
psql.Arg(s.ID),
}})
}
if s.Message.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "message")...),
psql.Arg(s.Message),
}})
}
return exprs
}
// FindFileuploadErrorFile retrieves a single record by primary key
// If cols is empty Find will return all columns.
func FindFileuploadErrorFile(ctx context.Context, exec bob.Executor, IDPK int32, cols ...string) (*FileuploadErrorFile, error) {
if len(cols) == 0 {
return FileuploadErrorFiles.Query(
sm.Where(FileuploadErrorFiles.Columns.ID.EQ(psql.Arg(IDPK))),
).One(ctx, exec)
}
return FileuploadErrorFiles.Query(
sm.Where(FileuploadErrorFiles.Columns.ID.EQ(psql.Arg(IDPK))),
sm.Columns(FileuploadErrorFiles.Columns.Only(cols...)),
).One(ctx, exec)
}
// FileuploadErrorFileExists checks the presence of a single record by primary key
func FileuploadErrorFileExists(ctx context.Context, exec bob.Executor, IDPK int32) (bool, error) {
return FileuploadErrorFiles.Query(
sm.Where(FileuploadErrorFiles.Columns.ID.EQ(psql.Arg(IDPK))),
).Exists(ctx, exec)
}
// AfterQueryHook is called after FileuploadErrorFile is retrieved from the database
func (o *FileuploadErrorFile) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FileuploadErrorFiles.AfterSelectHooks.RunHooks(ctx, exec, FileuploadErrorFileSlice{o})
case bob.QueryTypeInsert:
ctx, err = FileuploadErrorFiles.AfterInsertHooks.RunHooks(ctx, exec, FileuploadErrorFileSlice{o})
case bob.QueryTypeUpdate:
ctx, err = FileuploadErrorFiles.AfterUpdateHooks.RunHooks(ctx, exec, FileuploadErrorFileSlice{o})
case bob.QueryTypeDelete:
ctx, err = FileuploadErrorFiles.AfterDeleteHooks.RunHooks(ctx, exec, FileuploadErrorFileSlice{o})
}
return err
}
// primaryKeyVals returns the primary key values of the FileuploadErrorFile
func (o *FileuploadErrorFile) primaryKeyVals() bob.Expression {
return psql.Arg(o.ID)
}
func (o *FileuploadErrorFile) pkEQ() dialect.Expression {
return psql.Quote("fileupload.error_file", "id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
return o.primaryKeyVals().WriteSQL(ctx, w, d, start)
}))
}
// Update uses an executor to update the FileuploadErrorFile
func (o *FileuploadErrorFile) Update(ctx context.Context, exec bob.Executor, s *FileuploadErrorFileSetter) error {
v, err := FileuploadErrorFiles.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
if err != nil {
return err
}
o.R = v.R
*o = *v
return nil
}
// Delete deletes a single FileuploadErrorFile record with an executor
func (o *FileuploadErrorFile) Delete(ctx context.Context, exec bob.Executor) error {
_, err := FileuploadErrorFiles.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
return err
}
// Reload refreshes the FileuploadErrorFile using the executor
func (o *FileuploadErrorFile) Reload(ctx context.Context, exec bob.Executor) error {
o2, err := FileuploadErrorFiles.Query(
sm.Where(FileuploadErrorFiles.Columns.ID.EQ(psql.Arg(o.ID))),
).One(ctx, exec)
if err != nil {
return err
}
o2.R = o.R
*o = *o2
return nil
}
// AfterQueryHook is called after FileuploadErrorFileSlice is retrieved from the database
func (o FileuploadErrorFileSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FileuploadErrorFiles.AfterSelectHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeInsert:
ctx, err = FileuploadErrorFiles.AfterInsertHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeUpdate:
ctx, err = FileuploadErrorFiles.AfterUpdateHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeDelete:
ctx, err = FileuploadErrorFiles.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}
func (o FileuploadErrorFileSlice) pkIN() dialect.Expression {
if len(o) == 0 {
return psql.Raw("NULL")
}
return psql.Quote("fileupload.error_file", "id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
pkPairs := make([]bob.Expression, len(o))
for i, row := range o {
pkPairs[i] = row.primaryKeyVals()
}
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
}))
}
// copyMatchingRows finds models in the given slice that have the same primary key
// then it first copies the existing relationships from the old model to the new model
// and then replaces the old model in the slice with the new model
func (o FileuploadErrorFileSlice) copyMatchingRows(from ...*FileuploadErrorFile) {
for i, old := range o {
for _, new := range from {
if new.ID != old.ID {
continue
}
new.R = old.R
o[i] = new
break
}
}
}
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
func (o FileuploadErrorFileSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadErrorFiles.BeforeUpdateHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FileuploadErrorFile:
o.copyMatchingRows(retrieved)
case []*FileuploadErrorFile:
o.copyMatchingRows(retrieved...)
case FileuploadErrorFileSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FileuploadErrorFile or a slice of FileuploadErrorFile
// then run the AfterUpdateHooks on the slice
_, err = FileuploadErrorFiles.AfterUpdateHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
func (o FileuploadErrorFileSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FileuploadErrorFiles.BeforeDeleteHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FileuploadErrorFile:
o.copyMatchingRows(retrieved)
case []*FileuploadErrorFile:
o.copyMatchingRows(retrieved...)
case FileuploadErrorFileSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FileuploadErrorFile or a slice of FileuploadErrorFile
// then run the AfterDeleteHooks on the slice
_, err = FileuploadErrorFiles.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
func (o FileuploadErrorFileSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals FileuploadErrorFileSetter) error {
if len(o) == 0 {
return nil
}
_, err := FileuploadErrorFiles.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
return err
}
func (o FileuploadErrorFileSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
_, err := FileuploadErrorFiles.Delete(o.DeleteMod()).Exec(ctx, exec)
return err
}
func (o FileuploadErrorFileSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
o2, err := FileuploadErrorFiles.Query(sm.Where(o.pkIN())).All(ctx, exec)
if err != nil {
return err
}
o.copyMatchingRows(o2...)
return nil
}
// File starts a query for related objects on fileupload.file
func (o *FileuploadErrorFile) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery {
return FileuploadFiles.Query(append(mods,
sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(o.FileID))),
)...)
}
func (os FileuploadErrorFileSlice) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery {
pkFileID := make(pgtypes.Array[int32], 0, len(os))
for _, o := range os {
if o == nil {
continue
}
pkFileID = append(pkFileID, o.FileID)
}
PKArgExpr := psql.Select(sm.Columns(
psql.F("unnest", psql.Cast(psql.Arg(pkFileID), "integer[]")),
))
return FileuploadFiles.Query(append(mods,
sm.Where(psql.Group(FileuploadFiles.Columns.ID).OP("IN", PKArgExpr)),
)...)
}
func attachFileuploadErrorFileFile0(ctx context.Context, exec bob.Executor, count int, fileuploadErrorFile0 *FileuploadErrorFile, fileuploadFile1 *FileuploadFile) (*FileuploadErrorFile, error) {
setter := &FileuploadErrorFileSetter{
FileID: omit.From(fileuploadFile1.ID),
}
err := fileuploadErrorFile0.Update(ctx, exec, setter)
if err != nil {
return nil, fmt.Errorf("attachFileuploadErrorFileFile0: %w", err)
}
return fileuploadErrorFile0, nil
}
func (fileuploadErrorFile0 *FileuploadErrorFile) InsertFile(ctx context.Context, exec bob.Executor, related *FileuploadFileSetter) error {
var err error
fileuploadFile1, err := FileuploadFiles.Insert(related).One(ctx, exec)
if err != nil {
return fmt.Errorf("inserting related objects: %w", err)
}
_, err = attachFileuploadErrorFileFile0(ctx, exec, 1, fileuploadErrorFile0, fileuploadFile1)
if err != nil {
return err
}
fileuploadErrorFile0.R.File = fileuploadFile1
fileuploadFile1.R.ErrorFiles = append(fileuploadFile1.R.ErrorFiles, fileuploadErrorFile0)
return nil
}
func (fileuploadErrorFile0 *FileuploadErrorFile) AttachFile(ctx context.Context, exec bob.Executor, fileuploadFile1 *FileuploadFile) error {
var err error
_, err = attachFileuploadErrorFileFile0(ctx, exec, 1, fileuploadErrorFile0, fileuploadFile1)
if err != nil {
return err
}
fileuploadErrorFile0.R.File = fileuploadFile1
fileuploadFile1.R.ErrorFiles = append(fileuploadFile1.R.ErrorFiles, fileuploadErrorFile0)
return nil
}
type fileuploadErrorFileWhere[Q psql.Filterable] struct {
FileID psql.WhereMod[Q, int32]
ID psql.WhereMod[Q, int32]
Message psql.WhereMod[Q, string]
}
func (fileuploadErrorFileWhere[Q]) AliasedAs(alias string) fileuploadErrorFileWhere[Q] {
return buildFileuploadErrorFileWhere[Q](buildFileuploadErrorFileColumns(alias))
}
func buildFileuploadErrorFileWhere[Q psql.Filterable](cols fileuploadErrorFileColumns) fileuploadErrorFileWhere[Q] {
return fileuploadErrorFileWhere[Q]{
FileID: psql.Where[Q, int32](cols.FileID),
ID: psql.Where[Q, int32](cols.ID),
Message: psql.Where[Q, string](cols.Message),
}
}
func (o *FileuploadErrorFile) Preload(name string, retrieved any) error {
if o == nil {
return nil
}
switch name {
case "File":
rel, ok := retrieved.(*FileuploadFile)
if !ok {
return fmt.Errorf("fileuploadErrorFile cannot load %T as %q", retrieved, name)
}
o.R.File = rel
if rel != nil {
rel.R.ErrorFiles = FileuploadErrorFileSlice{o}
}
return nil
default:
return fmt.Errorf("fileuploadErrorFile has no relationship %q", name)
}
}
type fileuploadErrorFilePreloader struct {
File func(...psql.PreloadOption) psql.Preloader
}
func buildFileuploadErrorFilePreloader() fileuploadErrorFilePreloader {
return fileuploadErrorFilePreloader{
File: func(opts ...psql.PreloadOption) psql.Preloader {
return psql.Preload[*FileuploadFile, FileuploadFileSlice](psql.PreloadRel{
Name: "File",
Sides: []psql.PreloadSide{
{
From: FileuploadErrorFiles,
To: FileuploadFiles,
FromColumns: []string{"file_id"},
ToColumns: []string{"id"},
},
},
}, FileuploadFiles.Columns.Names(), opts...)
},
}
}
type fileuploadErrorFileThenLoader[Q orm.Loadable] struct {
File func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
}
func buildFileuploadErrorFileThenLoader[Q orm.Loadable]() fileuploadErrorFileThenLoader[Q] {
type FileLoadInterface interface {
LoadFile(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
return fileuploadErrorFileThenLoader[Q]{
File: thenLoadBuilder[Q](
"File",
func(ctx context.Context, exec bob.Executor, retrieved FileLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadFile(ctx, exec, mods...)
},
),
}
}
// LoadFile loads the fileuploadErrorFile's File into the .R struct
func (o *FileuploadErrorFile) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
// Reset the relationship
o.R.File = nil
related, err := o.File(mods...).One(ctx, exec)
if err != nil {
return err
}
related.R.ErrorFiles = FileuploadErrorFileSlice{o}
o.R.File = related
return nil
}
// LoadFile loads the fileuploadErrorFile's File into the .R struct
func (os FileuploadErrorFileSlice) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
fileuploadFiles, err := os.File(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, o := range os {
if o == nil {
continue
}
for _, rel := range fileuploadFiles {
if !(o.FileID == rel.ID) {
continue
}
rel.R.ErrorFiles = append(rel.R.ErrorFiles, o)
o.R.File = rel
break
}
}
return nil
}
type fileuploadErrorFileJoins[Q dialect.Joinable] struct {
typ string
File modAs[Q, fileuploadFileColumns]
}
func (j fileuploadErrorFileJoins[Q]) aliasedAs(alias string) fileuploadErrorFileJoins[Q] {
return buildFileuploadErrorFileJoins[Q](buildFileuploadErrorFileColumns(alias), j.typ)
}
func buildFileuploadErrorFileJoins[Q dialect.Joinable](cols fileuploadErrorFileColumns, typ string) fileuploadErrorFileJoins[Q] {
return fileuploadErrorFileJoins[Q]{
typ: typ,
File: modAs[Q, fileuploadFileColumns]{
c: FileuploadFiles.Columns,
f: func(to fileuploadFileColumns) bob.Mod[Q] {
mods := make(mods.QueryMods[Q], 0, 1)
{
mods = append(mods, dialect.Join[Q](typ, FileuploadFiles.Name().As(to.Alias())).On(
to.ID.EQ(cols.FileID),
))
}
return mods
},
},
}
}

View file

@ -56,10 +56,10 @@ type FileuploadFilesQuery = *psql.ViewQuery[*FileuploadFile, FileuploadFileSlice
// fileuploadFileR is where relationships are stored.
type fileuploadFileR struct {
CSV *FileuploadCSV // fileupload.csv.csv_file_id_fkey
Errors FileuploadErrorSlice // fileupload.error.error_file_id_fkey
CreatorUser *User // fileupload.file.file_creator_id_fkey
Organization *Organization // fileupload.file.file_organization_id_fkey
CSV *FileuploadCSV // fileupload.csv.csv_file_id_fkey
ErrorFiles FileuploadErrorFileSlice // fileupload.error_file.error_file_file_id_fkey
CreatorUser *User // fileupload.file.file_creator_id_fkey
Organization *Organization // fileupload.file.file_organization_id_fkey
}
func buildFileuploadFileColumns(alias string) fileuploadFileColumns {
@ -586,14 +586,14 @@ func (os FileuploadFileSlice) CSV(mods ...bob.Mod[*dialect.SelectQuery]) Fileupl
)...)
}
// Errors starts a query for related objects on fileupload.error
func (o *FileuploadFile) Errors(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadErrorsQuery {
return FileuploadErrors.Query(append(mods,
sm.Where(FileuploadErrors.Columns.FileID.EQ(psql.Arg(o.ID))),
// ErrorFiles starts a query for related objects on fileupload.error_file
func (o *FileuploadFile) ErrorFiles(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadErrorFilesQuery {
return FileuploadErrorFiles.Query(append(mods,
sm.Where(FileuploadErrorFiles.Columns.FileID.EQ(psql.Arg(o.ID))),
)...)
}
func (os FileuploadFileSlice) Errors(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadErrorsQuery {
func (os FileuploadFileSlice) ErrorFiles(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadErrorFilesQuery {
pkID := make(pgtypes.Array[int32], 0, len(os))
for _, o := range os {
if o == nil {
@ -605,8 +605,8 @@ func (os FileuploadFileSlice) Errors(mods ...bob.Mod[*dialect.SelectQuery]) File
psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")),
))
return FileuploadErrors.Query(append(mods,
sm.Where(psql.Group(FileuploadErrors.Columns.FileID).OP("IN", PKArgExpr)),
return FileuploadErrorFiles.Query(append(mods,
sm.Where(psql.Group(FileuploadErrorFiles.Columns.FileID).OP("IN", PKArgExpr)),
)...)
}
@ -712,66 +712,66 @@ func (fileuploadFile0 *FileuploadFile) AttachCSV(ctx context.Context, exec bob.E
return nil
}
func insertFileuploadFileErrors0(ctx context.Context, exec bob.Executor, fileuploadErrors1 []*FileuploadErrorSetter, fileuploadFile0 *FileuploadFile) (FileuploadErrorSlice, error) {
for i := range fileuploadErrors1 {
fileuploadErrors1[i].FileID = omit.From(fileuploadFile0.ID)
func insertFileuploadFileErrorFiles0(ctx context.Context, exec bob.Executor, fileuploadErrorFiles1 []*FileuploadErrorFileSetter, fileuploadFile0 *FileuploadFile) (FileuploadErrorFileSlice, error) {
for i := range fileuploadErrorFiles1 {
fileuploadErrorFiles1[i].FileID = omit.From(fileuploadFile0.ID)
}
ret, err := FileuploadErrors.Insert(bob.ToMods(fileuploadErrors1...)).All(ctx, exec)
ret, err := FileuploadErrorFiles.Insert(bob.ToMods(fileuploadErrorFiles1...)).All(ctx, exec)
if err != nil {
return ret, fmt.Errorf("insertFileuploadFileErrors0: %w", err)
return ret, fmt.Errorf("insertFileuploadFileErrorFiles0: %w", err)
}
return ret, nil
}
func attachFileuploadFileErrors0(ctx context.Context, exec bob.Executor, count int, fileuploadErrors1 FileuploadErrorSlice, fileuploadFile0 *FileuploadFile) (FileuploadErrorSlice, error) {
setter := &FileuploadErrorSetter{
func attachFileuploadFileErrorFiles0(ctx context.Context, exec bob.Executor, count int, fileuploadErrorFiles1 FileuploadErrorFileSlice, fileuploadFile0 *FileuploadFile) (FileuploadErrorFileSlice, error) {
setter := &FileuploadErrorFileSetter{
FileID: omit.From(fileuploadFile0.ID),
}
err := fileuploadErrors1.UpdateAll(ctx, exec, *setter)
err := fileuploadErrorFiles1.UpdateAll(ctx, exec, *setter)
if err != nil {
return nil, fmt.Errorf("attachFileuploadFileErrors0: %w", err)
return nil, fmt.Errorf("attachFileuploadFileErrorFiles0: %w", err)
}
return fileuploadErrors1, nil
return fileuploadErrorFiles1, nil
}
func (fileuploadFile0 *FileuploadFile) InsertErrors(ctx context.Context, exec bob.Executor, related ...*FileuploadErrorSetter) error {
func (fileuploadFile0 *FileuploadFile) InsertErrorFiles(ctx context.Context, exec bob.Executor, related ...*FileuploadErrorFileSetter) error {
if len(related) == 0 {
return nil
}
var err error
fileuploadErrors1, err := insertFileuploadFileErrors0(ctx, exec, related, fileuploadFile0)
fileuploadErrorFiles1, err := insertFileuploadFileErrorFiles0(ctx, exec, related, fileuploadFile0)
if err != nil {
return err
}
fileuploadFile0.R.Errors = append(fileuploadFile0.R.Errors, fileuploadErrors1...)
fileuploadFile0.R.ErrorFiles = append(fileuploadFile0.R.ErrorFiles, fileuploadErrorFiles1...)
for _, rel := range fileuploadErrors1 {
for _, rel := range fileuploadErrorFiles1 {
rel.R.File = fileuploadFile0
}
return nil
}
func (fileuploadFile0 *FileuploadFile) AttachErrors(ctx context.Context, exec bob.Executor, related ...*FileuploadError) error {
func (fileuploadFile0 *FileuploadFile) AttachErrorFiles(ctx context.Context, exec bob.Executor, related ...*FileuploadErrorFile) error {
if len(related) == 0 {
return nil
}
var err error
fileuploadErrors1 := FileuploadErrorSlice(related)
fileuploadErrorFiles1 := FileuploadErrorFileSlice(related)
_, err = attachFileuploadFileErrors0(ctx, exec, len(related), fileuploadErrors1, fileuploadFile0)
_, err = attachFileuploadFileErrorFiles0(ctx, exec, len(related), fileuploadErrorFiles1, fileuploadFile0)
if err != nil {
return err
}
fileuploadFile0.R.Errors = append(fileuploadFile0.R.Errors, fileuploadErrors1...)
fileuploadFile0.R.ErrorFiles = append(fileuploadFile0.R.ErrorFiles, fileuploadErrorFiles1...)
for _, rel := range related {
rel.R.File = fileuploadFile0
@ -926,13 +926,13 @@ func (o *FileuploadFile) Preload(name string, retrieved any) error {
rel.R.File = o
}
return nil
case "Errors":
rels, ok := retrieved.(FileuploadErrorSlice)
case "ErrorFiles":
rels, ok := retrieved.(FileuploadErrorFileSlice)
if !ok {
return fmt.Errorf("fileuploadFile cannot load %T as %q", retrieved, name)
}
o.R.Errors = rels
o.R.ErrorFiles = rels
for _, rel := range rels {
if rel != nil {
@ -1021,7 +1021,7 @@ func buildFileuploadFilePreloader() fileuploadFilePreloader {
type fileuploadFileThenLoader[Q orm.Loadable] struct {
CSV func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Errors func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
ErrorFiles func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
CreatorUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Organization func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
}
@ -1030,8 +1030,8 @@ func buildFileuploadFileThenLoader[Q orm.Loadable]() fileuploadFileThenLoader[Q]
type CSVLoadInterface interface {
LoadCSV(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type ErrorsLoadInterface interface {
LoadErrors(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
type ErrorFilesLoadInterface interface {
LoadErrorFiles(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type CreatorUserLoadInterface interface {
LoadCreatorUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
@ -1047,10 +1047,10 @@ func buildFileuploadFileThenLoader[Q orm.Loadable]() fileuploadFileThenLoader[Q]
return retrieved.LoadCSV(ctx, exec, mods...)
},
),
Errors: thenLoadBuilder[Q](
"Errors",
func(ctx context.Context, exec bob.Executor, retrieved ErrorsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadErrors(ctx, exec, mods...)
ErrorFiles: thenLoadBuilder[Q](
"ErrorFiles",
func(ctx context.Context, exec bob.Executor, retrieved ErrorFilesLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadErrorFiles(ctx, exec, mods...)
},
),
CreatorUser: thenLoadBuilder[Q](
@ -1120,16 +1120,16 @@ func (os FileuploadFileSlice) LoadCSV(ctx context.Context, exec bob.Executor, mo
return nil
}
// LoadErrors loads the fileuploadFile's Errors into the .R struct
func (o *FileuploadFile) LoadErrors(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
// LoadErrorFiles loads the fileuploadFile's ErrorFiles into the .R struct
func (o *FileuploadFile) LoadErrorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
// Reset the relationship
o.R.Errors = nil
o.R.ErrorFiles = nil
related, err := o.Errors(mods...).All(ctx, exec)
related, err := o.ErrorFiles(mods...).All(ctx, exec)
if err != nil {
return err
}
@ -1138,17 +1138,17 @@ func (o *FileuploadFile) LoadErrors(ctx context.Context, exec bob.Executor, mods
rel.R.File = o
}
o.R.Errors = related
o.R.ErrorFiles = related
return nil
}
// LoadErrors loads the fileuploadFile's Errors into the .R struct
func (os FileuploadFileSlice) LoadErrors(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
// LoadErrorFiles loads the fileuploadFile's ErrorFiles into the .R struct
func (os FileuploadFileSlice) LoadErrorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
fileuploadErrors, err := os.Errors(mods...).All(ctx, exec)
fileuploadErrorFiles, err := os.ErrorFiles(mods...).All(ctx, exec)
if err != nil {
return err
}
@ -1158,7 +1158,7 @@ func (os FileuploadFileSlice) LoadErrors(ctx context.Context, exec bob.Executor,
continue
}
o.R.Errors = nil
o.R.ErrorFiles = nil
}
for _, o := range os {
@ -1166,7 +1166,7 @@ func (os FileuploadFileSlice) LoadErrors(ctx context.Context, exec bob.Executor,
continue
}
for _, rel := range fileuploadErrors {
for _, rel := range fileuploadErrorFiles {
if !(o.ID == rel.FileID) {
continue
@ -1174,7 +1174,7 @@ func (os FileuploadFileSlice) LoadErrors(ctx context.Context, exec bob.Executor,
rel.R.File = o
o.R.Errors = append(o.R.Errors, rel)
o.R.ErrorFiles = append(o.R.ErrorFiles, rel)
}
}
@ -1287,7 +1287,7 @@ func (os FileuploadFileSlice) LoadOrganization(ctx context.Context, exec bob.Exe
// fileuploadFileC is where relationship counts are stored.
type fileuploadFileC struct {
Errors *int64
ErrorFiles *int64
}
// PreloadCount sets a count in the C struct by name
@ -1297,20 +1297,20 @@ func (o *FileuploadFile) PreloadCount(name string, count int64) error {
}
switch name {
case "Errors":
o.C.Errors = &count
case "ErrorFiles":
o.C.ErrorFiles = &count
}
return nil
}
type fileuploadFileCountPreloader struct {
Errors func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
ErrorFiles func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
}
func buildFileuploadFileCountPreloader() fileuploadFileCountPreloader {
return fileuploadFileCountPreloader{
Errors: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader {
return countPreloader[*FileuploadFile]("Errors", func(parent string) bob.Expression {
ErrorFiles: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader {
return countPreloader[*FileuploadFile]("ErrorFiles", func(parent string) bob.Expression {
// Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk)
if parent == "" {
parent = FileuploadFiles.Alias()
@ -1319,8 +1319,8 @@ func buildFileuploadFileCountPreloader() fileuploadFileCountPreloader {
subqueryMods := []bob.Mod[*dialect.SelectQuery]{
sm.Columns(psql.Raw("count(*)")),
sm.From(FileuploadErrors.Name()),
sm.Where(psql.Quote(FileuploadErrors.Alias(), "file_id").EQ(psql.Quote(parent, "id"))),
sm.From(FileuploadErrorFiles.Name()),
sm.Where(psql.Quote(FileuploadErrorFiles.Alias(), "file_id").EQ(psql.Quote(parent, "id"))),
}
subqueryMods = append(subqueryMods, mods...)
return psql.Group(psql.Select(subqueryMods...).Expression)
@ -1330,47 +1330,47 @@ func buildFileuploadFileCountPreloader() fileuploadFileCountPreloader {
}
type fileuploadFileCountThenLoader[Q orm.Loadable] struct {
Errors func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
ErrorFiles func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
}
func buildFileuploadFileCountThenLoader[Q orm.Loadable]() fileuploadFileCountThenLoader[Q] {
type ErrorsCountInterface interface {
LoadCountErrors(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
type ErrorFilesCountInterface interface {
LoadCountErrorFiles(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
return fileuploadFileCountThenLoader[Q]{
Errors: countThenLoadBuilder[Q](
"Errors",
func(ctx context.Context, exec bob.Executor, retrieved ErrorsCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadCountErrors(ctx, exec, mods...)
ErrorFiles: countThenLoadBuilder[Q](
"ErrorFiles",
func(ctx context.Context, exec bob.Executor, retrieved ErrorFilesCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadCountErrorFiles(ctx, exec, mods...)
},
),
}
}
// LoadCountErrors loads the count of Errors into the C struct
func (o *FileuploadFile) LoadCountErrors(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
// LoadCountErrorFiles loads the count of ErrorFiles into the C struct
func (o *FileuploadFile) LoadCountErrorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
count, err := o.Errors(mods...).Count(ctx, exec)
count, err := o.ErrorFiles(mods...).Count(ctx, exec)
if err != nil {
return err
}
o.C.Errors = &count
o.C.ErrorFiles = &count
return nil
}
// LoadCountErrors loads the count of Errors for a slice
func (os FileuploadFileSlice) LoadCountErrors(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
// LoadCountErrorFiles loads the count of ErrorFiles for a slice
func (os FileuploadFileSlice) LoadCountErrorFiles(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
for _, o := range os {
if err := o.LoadCountErrors(ctx, exec, mods...); err != nil {
if err := o.LoadCountErrorFiles(ctx, exec, mods...); err != nil {
return err
}
}
@ -1381,7 +1381,7 @@ func (os FileuploadFileSlice) LoadCountErrors(ctx context.Context, exec bob.Exec
type fileuploadFileJoins[Q dialect.Joinable] struct {
typ string
CSV modAs[Q, fileuploadCSVColumns]
Errors modAs[Q, fileuploadErrorColumns]
ErrorFiles modAs[Q, fileuploadErrorFileColumns]
CreatorUser modAs[Q, userColumns]
Organization modAs[Q, organizationColumns]
}
@ -1407,13 +1407,13 @@ func buildFileuploadFileJoins[Q dialect.Joinable](cols fileuploadFileColumns, ty
return mods
},
},
Errors: modAs[Q, fileuploadErrorColumns]{
c: FileuploadErrors.Columns,
f: func(to fileuploadErrorColumns) bob.Mod[Q] {
ErrorFiles: modAs[Q, fileuploadErrorFileColumns]{
c: FileuploadErrorFiles.Columns,
f: func(to fileuploadErrorFileColumns) bob.Mod[Q] {
mods := make(mods.QueryMods[Q], 0, 1)
{
mods = append(mods, dialect.Join[Q](typ, FileuploadErrors.Name().As(to.Alias())).On(
mods = append(mods, dialect.Join[Q](typ, FileuploadErrorFiles.Name().As(to.Alias())).On(
to.FileID.EQ(cols.ID),
))
}

View file

@ -67,7 +67,7 @@ type organizationR struct {
Mosquitoinspections FieldseekerMosquitoinspectionSlice // fieldseeker.mosquitoinspection.mosquitoinspection_organization_id_fkey
Pointlocations FieldseekerPointlocationSlice // fieldseeker.pointlocation.pointlocation_organization_id_fkey
Polygonlocations FieldseekerPolygonlocationSlice // fieldseeker.polygonlocation.polygonlocation_organization_id_fkey
Pools FieldseekerPoolSlice // fieldseeker.pool.pool_organization_id_fkey
FieldseekerPool FieldseekerPoolSlice // fieldseeker.pool.pool_organization_id_fkey
Pooldetails FieldseekerPooldetailSlice // fieldseeker.pooldetail.pooldetail_organization_id_fkey
Proposedtreatmentareas FieldseekerProposedtreatmentareaSlice // fieldseeker.proposedtreatmentarea.proposedtreatmentarea_organization_id_fkey
Qamosquitoinspections FieldseekerQamosquitoinspectionSlice // fieldseeker.qamosquitoinspection.qamosquitoinspection_organization_id_fkey
@ -90,6 +90,7 @@ type organizationR struct {
NoteAudios NoteAudioSlice // note_audio.note_audio_organization_id_fkey
NoteImages NoteImageSlice // note_image.note_image_organization_id_fkey
ImportDistrictGidDistrict *ImportDistrict // organization.organization_import_district_gid_fkey
Pools PoolSlice // pool.pool_organization_id_fkey
Nuisances PublicreportNuisanceSlice // publicreport.nuisance.nuisance_organization_id_fkey
PublicreportPool PublicreportPoolSlice // publicreport.pool.pool_organization_id_fkey
Quicks PublicreportQuickSlice // publicreport.quick.quick_organization_id_fkey
@ -872,14 +873,14 @@ func (os OrganizationSlice) Polygonlocations(mods ...bob.Mod[*dialect.SelectQuer
)...)
}
// Pools starts a query for related objects on fieldseeker.pool
func (o *Organization) Pools(mods ...bob.Mod[*dialect.SelectQuery]) FieldseekerPoolsQuery {
// FieldseekerPool starts a query for related objects on fieldseeker.pool
func (o *Organization) FieldseekerPool(mods ...bob.Mod[*dialect.SelectQuery]) FieldseekerPoolsQuery {
return FieldseekerPools.Query(append(mods,
sm.Where(FieldseekerPools.Columns.OrganizationID.EQ(psql.Arg(o.ID))),
)...)
}
func (os OrganizationSlice) Pools(mods ...bob.Mod[*dialect.SelectQuery]) FieldseekerPoolsQuery {
func (os OrganizationSlice) FieldseekerPool(mods ...bob.Mod[*dialect.SelectQuery]) FieldseekerPoolsQuery {
pkID := make(pgtypes.Array[int32], 0, len(os))
for _, o := range os {
if o == nil {
@ -1424,6 +1425,30 @@ func (os OrganizationSlice) ImportDistrictGidDistrict(mods ...bob.Mod[*dialect.S
)...)
}
// Pools starts a query for related objects on pool
func (o *Organization) Pools(mods ...bob.Mod[*dialect.SelectQuery]) PoolsQuery {
return Pools.Query(append(mods,
sm.Where(Pools.Columns.OrganizationID.EQ(psql.Arg(o.ID))),
)...)
}
func (os OrganizationSlice) Pools(mods ...bob.Mod[*dialect.SelectQuery]) PoolsQuery {
pkID := make(pgtypes.Array[int32], 0, len(os))
for _, o := range os {
if o == nil {
continue
}
pkID = append(pkID, o.ID)
}
PKArgExpr := psql.Select(sm.Columns(
psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")),
))
return Pools.Query(append(mods,
sm.Where(psql.Group(Pools.Columns.OrganizationID).OP("IN", PKArgExpr)),
)...)
}
// Nuisances starts a query for related objects on publicreport.nuisance
func (o *Organization) Nuisances(mods ...bob.Mod[*dialect.SelectQuery]) PublicreportNuisancesQuery {
return PublicreportNuisances.Query(append(mods,
@ -2330,45 +2355,45 @@ func (organization0 *Organization) AttachPolygonlocations(ctx context.Context, e
return nil
}
func insertOrganizationPools0(ctx context.Context, exec bob.Executor, fieldseekerPools1 []*FieldseekerPoolSetter, organization0 *Organization) (FieldseekerPoolSlice, error) {
func insertOrganizationFieldseekerPool0(ctx context.Context, exec bob.Executor, fieldseekerPools1 []*FieldseekerPoolSetter, organization0 *Organization) (FieldseekerPoolSlice, error) {
for i := range fieldseekerPools1 {
fieldseekerPools1[i].OrganizationID = omit.From(organization0.ID)
}
ret, err := FieldseekerPools.Insert(bob.ToMods(fieldseekerPools1...)).All(ctx, exec)
if err != nil {
return ret, fmt.Errorf("insertOrganizationPools0: %w", err)
return ret, fmt.Errorf("insertOrganizationFieldseekerPool0: %w", err)
}
return ret, nil
}
func attachOrganizationPools0(ctx context.Context, exec bob.Executor, count int, fieldseekerPools1 FieldseekerPoolSlice, organization0 *Organization) (FieldseekerPoolSlice, error) {
func attachOrganizationFieldseekerPool0(ctx context.Context, exec bob.Executor, count int, fieldseekerPools1 FieldseekerPoolSlice, organization0 *Organization) (FieldseekerPoolSlice, error) {
setter := &FieldseekerPoolSetter{
OrganizationID: omit.From(organization0.ID),
}
err := fieldseekerPools1.UpdateAll(ctx, exec, *setter)
if err != nil {
return nil, fmt.Errorf("attachOrganizationPools0: %w", err)
return nil, fmt.Errorf("attachOrganizationFieldseekerPool0: %w", err)
}
return fieldseekerPools1, nil
}
func (organization0 *Organization) InsertPools(ctx context.Context, exec bob.Executor, related ...*FieldseekerPoolSetter) error {
func (organization0 *Organization) InsertFieldseekerPool(ctx context.Context, exec bob.Executor, related ...*FieldseekerPoolSetter) error {
if len(related) == 0 {
return nil
}
var err error
fieldseekerPools1, err := insertOrganizationPools0(ctx, exec, related, organization0)
fieldseekerPools1, err := insertOrganizationFieldseekerPool0(ctx, exec, related, organization0)
if err != nil {
return err
}
organization0.R.Pools = append(organization0.R.Pools, fieldseekerPools1...)
organization0.R.FieldseekerPool = append(organization0.R.FieldseekerPool, fieldseekerPools1...)
for _, rel := range fieldseekerPools1 {
rel.R.Organization = organization0
@ -2376,7 +2401,7 @@ func (organization0 *Organization) InsertPools(ctx context.Context, exec bob.Exe
return nil
}
func (organization0 *Organization) AttachPools(ctx context.Context, exec bob.Executor, related ...*FieldseekerPool) error {
func (organization0 *Organization) AttachFieldseekerPool(ctx context.Context, exec bob.Executor, related ...*FieldseekerPool) error {
if len(related) == 0 {
return nil
}
@ -2384,12 +2409,12 @@ func (organization0 *Organization) AttachPools(ctx context.Context, exec bob.Exe
var err error
fieldseekerPools1 := FieldseekerPoolSlice(related)
_, err = attachOrganizationPools0(ctx, exec, len(related), fieldseekerPools1, organization0)
_, err = attachOrganizationFieldseekerPool0(ctx, exec, len(related), fieldseekerPools1, organization0)
if err != nil {
return err
}
organization0.R.Pools = append(organization0.R.Pools, fieldseekerPools1...)
organization0.R.FieldseekerPool = append(organization0.R.FieldseekerPool, fieldseekerPools1...)
for _, rel := range related {
rel.R.Organization = organization0
@ -3874,6 +3899,74 @@ func (organization0 *Organization) AttachImportDistrictGidDistrict(ctx context.C
return nil
}
func insertOrganizationPools0(ctx context.Context, exec bob.Executor, pools1 []*PoolSetter, organization0 *Organization) (PoolSlice, error) {
for i := range pools1 {
pools1[i].OrganizationID = omit.From(organization0.ID)
}
ret, err := Pools.Insert(bob.ToMods(pools1...)).All(ctx, exec)
if err != nil {
return ret, fmt.Errorf("insertOrganizationPools0: %w", err)
}
return ret, nil
}
func attachOrganizationPools0(ctx context.Context, exec bob.Executor, count int, pools1 PoolSlice, organization0 *Organization) (PoolSlice, error) {
setter := &PoolSetter{
OrganizationID: omit.From(organization0.ID),
}
err := pools1.UpdateAll(ctx, exec, *setter)
if err != nil {
return nil, fmt.Errorf("attachOrganizationPools0: %w", err)
}
return pools1, nil
}
func (organization0 *Organization) InsertPools(ctx context.Context, exec bob.Executor, related ...*PoolSetter) error {
if len(related) == 0 {
return nil
}
var err error
pools1, err := insertOrganizationPools0(ctx, exec, related, organization0)
if err != nil {
return err
}
organization0.R.Pools = append(organization0.R.Pools, pools1...)
for _, rel := range pools1 {
rel.R.Organization = organization0
}
return nil
}
func (organization0 *Organization) AttachPools(ctx context.Context, exec bob.Executor, related ...*Pool) error {
if len(related) == 0 {
return nil
}
var err error
pools1 := PoolSlice(related)
_, err = attachOrganizationPools0(ctx, exec, len(related), pools1, organization0)
if err != nil {
return err
}
organization0.R.Pools = append(organization0.R.Pools, pools1...)
for _, rel := range related {
rel.R.Organization = organization0
}
return nil
}
func insertOrganizationNuisances0(ctx context.Context, exec bob.Executor, publicreportNuisances1 []*PublicreportNuisanceSetter, organization0 *Organization) (PublicreportNuisanceSlice, error) {
for i := range publicreportNuisances1 {
publicreportNuisances1[i].OrganizationID = omitnull.From(organization0.ID)
@ -4350,13 +4443,13 @@ func (o *Organization) Preload(name string, retrieved any) error {
}
}
return nil
case "Pools":
case "FieldseekerPool":
rels, ok := retrieved.(FieldseekerPoolSlice)
if !ok {
return fmt.Errorf("organization cannot load %T as %q", retrieved, name)
}
o.R.Pools = rels
o.R.FieldseekerPool = rels
for _, rel := range rels {
if rel != nil {
@ -4670,6 +4763,20 @@ func (o *Organization) Preload(name string, retrieved any) error {
rel.R.ImportDistrictGidOrganization = o
}
return nil
case "Pools":
rels, ok := retrieved.(PoolSlice)
if !ok {
return fmt.Errorf("organization cannot load %T as %q", retrieved, name)
}
o.R.Pools = rels
for _, rel := range rels {
if rel != nil {
rel.R.Organization = o
}
}
return nil
case "Nuisances":
rels, ok := retrieved.(PublicreportNuisanceSlice)
if !ok {
@ -4766,7 +4873,7 @@ type organizationThenLoader[Q orm.Loadable] struct {
Mosquitoinspections func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Pointlocations func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Polygonlocations func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Pools func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
FieldseekerPool func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Pooldetails func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Proposedtreatmentareas func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Qamosquitoinspections func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
@ -4789,6 +4896,7 @@ type organizationThenLoader[Q orm.Loadable] struct {
NoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
NoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
ImportDistrictGidDistrict func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Pools func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Nuisances func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
PublicreportPool func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Quicks func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
@ -4832,8 +4940,8 @@ func buildOrganizationThenLoader[Q orm.Loadable]() organizationThenLoader[Q] {
type PolygonlocationsLoadInterface interface {
LoadPolygonlocations(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type PoolsLoadInterface interface {
LoadPools(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
type FieldseekerPoolLoadInterface interface {
LoadFieldseekerPool(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type PooldetailsLoadInterface interface {
LoadPooldetails(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
@ -4901,6 +5009,9 @@ func buildOrganizationThenLoader[Q orm.Loadable]() organizationThenLoader[Q] {
type ImportDistrictGidDistrictLoadInterface interface {
LoadImportDistrictGidDistrict(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type PoolsLoadInterface interface {
LoadPools(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type NuisancesLoadInterface interface {
LoadNuisances(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
@ -4987,10 +5098,10 @@ func buildOrganizationThenLoader[Q orm.Loadable]() organizationThenLoader[Q] {
return retrieved.LoadPolygonlocations(ctx, exec, mods...)
},
),
Pools: thenLoadBuilder[Q](
"Pools",
func(ctx context.Context, exec bob.Executor, retrieved PoolsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadPools(ctx, exec, mods...)
FieldseekerPool: thenLoadBuilder[Q](
"FieldseekerPool",
func(ctx context.Context, exec bob.Executor, retrieved FieldseekerPoolLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadFieldseekerPool(ctx, exec, mods...)
},
),
Pooldetails: thenLoadBuilder[Q](
@ -5125,6 +5236,12 @@ func buildOrganizationThenLoader[Q orm.Loadable]() organizationThenLoader[Q] {
return retrieved.LoadImportDistrictGidDistrict(ctx, exec, mods...)
},
),
Pools: thenLoadBuilder[Q](
"Pools",
func(ctx context.Context, exec bob.Executor, retrieved PoolsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadPools(ctx, exec, mods...)
},
),
Nuisances: thenLoadBuilder[Q](
"Nuisances",
func(ctx context.Context, exec bob.Executor, retrieved NuisancesLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
@ -5924,16 +6041,16 @@ func (os OrganizationSlice) LoadPolygonlocations(ctx context.Context, exec bob.E
return nil
}
// LoadPools loads the organization's Pools into the .R struct
func (o *Organization) LoadPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
// LoadFieldseekerPool loads the organization's FieldseekerPool into the .R struct
func (o *Organization) LoadFieldseekerPool(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
// Reset the relationship
o.R.Pools = nil
o.R.FieldseekerPool = nil
related, err := o.Pools(mods...).All(ctx, exec)
related, err := o.FieldseekerPool(mods...).All(ctx, exec)
if err != nil {
return err
}
@ -5942,17 +6059,17 @@ func (o *Organization) LoadPools(ctx context.Context, exec bob.Executor, mods ..
rel.R.Organization = o
}
o.R.Pools = related
o.R.FieldseekerPool = related
return nil
}
// LoadPools loads the organization's Pools into the .R struct
func (os OrganizationSlice) LoadPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
// LoadFieldseekerPool loads the organization's FieldseekerPool into the .R struct
func (os OrganizationSlice) LoadFieldseekerPool(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
fieldseekerPools, err := os.Pools(mods...).All(ctx, exec)
fieldseekerPools, err := os.FieldseekerPool(mods...).All(ctx, exec)
if err != nil {
return err
}
@ -5962,7 +6079,7 @@ func (os OrganizationSlice) LoadPools(ctx context.Context, exec bob.Executor, mo
continue
}
o.R.Pools = nil
o.R.FieldseekerPool = nil
}
for _, o := range os {
@ -5978,7 +6095,7 @@ func (os OrganizationSlice) LoadPools(ctx context.Context, exec bob.Executor, mo
rel.R.Organization = o
o.R.Pools = append(o.R.Pools, rel)
o.R.FieldseekerPool = append(o.R.FieldseekerPool, rel)
}
}
@ -7321,6 +7438,67 @@ func (os OrganizationSlice) LoadImportDistrictGidDistrict(ctx context.Context, e
return nil
}
// LoadPools loads the organization's Pools into the .R struct
func (o *Organization) LoadPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
// Reset the relationship
o.R.Pools = nil
related, err := o.Pools(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, rel := range related {
rel.R.Organization = o
}
o.R.Pools = related
return nil
}
// LoadPools loads the organization's Pools into the .R struct
func (os OrganizationSlice) LoadPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
pools, err := os.Pools(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, o := range os {
if o == nil {
continue
}
o.R.Pools = nil
}
for _, o := range os {
if o == nil {
continue
}
for _, rel := range pools {
if !(o.ID == rel.OrganizationID) {
continue
}
rel.R.Organization = o
o.R.Pools = append(o.R.Pools, rel)
}
}
return nil
}
// LoadNuisances loads the organization's Nuisances into the .R struct
func (o *Organization) LoadNuisances(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
@ -7588,7 +7766,7 @@ type organizationC struct {
Mosquitoinspections *int64
Pointlocations *int64
Polygonlocations *int64
Pools *int64
FieldseekerPool *int64
Pooldetails *int64
Proposedtreatmentareas *int64
Qamosquitoinspections *int64
@ -7610,6 +7788,7 @@ type organizationC struct {
H3Aggregations *int64
NoteAudios *int64
NoteImages *int64
Pools *int64
Nuisances *int64
PublicreportPool *int64
Quicks *int64
@ -7647,8 +7826,8 @@ func (o *Organization) PreloadCount(name string, count int64) error {
o.C.Pointlocations = &count
case "Polygonlocations":
o.C.Polygonlocations = &count
case "Pools":
o.C.Pools = &count
case "FieldseekerPool":
o.C.FieldseekerPool = &count
case "Pooldetails":
o.C.Pooldetails = &count
case "Proposedtreatmentareas":
@ -7691,6 +7870,8 @@ func (o *Organization) PreloadCount(name string, count int64) error {
o.C.NoteAudios = &count
case "NoteImages":
o.C.NoteImages = &count
case "Pools":
o.C.Pools = &count
case "Nuisances":
o.C.Nuisances = &count
case "PublicreportPool":
@ -7716,7 +7897,7 @@ type organizationCountPreloader struct {
Mosquitoinspections func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
Pointlocations func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
Polygonlocations func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
Pools func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
FieldseekerPool func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
Pooldetails func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
Proposedtreatmentareas func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
Qamosquitoinspections func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
@ -7738,6 +7919,7 @@ type organizationCountPreloader struct {
H3Aggregations func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
NoteAudios func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
NoteImages func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
Pools func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
Nuisances func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
PublicreportPool func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
Quicks func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
@ -7956,8 +8138,8 @@ func buildOrganizationCountPreloader() organizationCountPreloader {
return psql.Group(psql.Select(subqueryMods...).Expression)
})
},
Pools: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader {
return countPreloader[*Organization]("Pools", func(parent string) bob.Expression {
FieldseekerPool: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader {
return countPreloader[*Organization]("FieldseekerPool", func(parent string) bob.Expression {
// Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk)
if parent == "" {
parent = Organizations.Alias()
@ -8330,6 +8512,23 @@ func buildOrganizationCountPreloader() organizationCountPreloader {
return psql.Group(psql.Select(subqueryMods...).Expression)
})
},
Pools: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader {
return countPreloader[*Organization]("Pools", func(parent string) bob.Expression {
// Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk)
if parent == "" {
parent = Organizations.Alias()
}
subqueryMods := []bob.Mod[*dialect.SelectQuery]{
sm.Columns(psql.Raw("count(*)")),
sm.From(Pools.Name()),
sm.Where(psql.Quote(Pools.Alias(), "organization_id").EQ(psql.Quote(parent, "id"))),
}
subqueryMods = append(subqueryMods, mods...)
return psql.Group(psql.Select(subqueryMods...).Expression)
})
},
Nuisances: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader {
return countPreloader[*Organization]("Nuisances", func(parent string) bob.Expression {
// Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk)
@ -8414,7 +8613,7 @@ type organizationCountThenLoader[Q orm.Loadable] struct {
Mosquitoinspections func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Pointlocations func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Polygonlocations func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Pools func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
FieldseekerPool func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Pooldetails func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Proposedtreatmentareas func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Qamosquitoinspections func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
@ -8436,6 +8635,7 @@ type organizationCountThenLoader[Q orm.Loadable] struct {
H3Aggregations func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
NoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
NoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Pools func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Nuisances func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
PublicreportPool func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Quicks func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
@ -8479,8 +8679,8 @@ func buildOrganizationCountThenLoader[Q orm.Loadable]() organizationCountThenLoa
type PolygonlocationsCountInterface interface {
LoadCountPolygonlocations(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type PoolsCountInterface interface {
LoadCountPools(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
type FieldseekerPoolCountInterface interface {
LoadCountFieldseekerPool(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type PooldetailsCountInterface interface {
LoadCountPooldetails(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
@ -8545,6 +8745,9 @@ func buildOrganizationCountThenLoader[Q orm.Loadable]() organizationCountThenLoa
type NoteImagesCountInterface interface {
LoadCountNoteImages(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type PoolsCountInterface interface {
LoadCountPools(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type NuisancesCountInterface interface {
LoadCountNuisances(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
@ -8631,10 +8834,10 @@ func buildOrganizationCountThenLoader[Q orm.Loadable]() organizationCountThenLoa
return retrieved.LoadCountPolygonlocations(ctx, exec, mods...)
},
),
Pools: countThenLoadBuilder[Q](
"Pools",
func(ctx context.Context, exec bob.Executor, retrieved PoolsCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadCountPools(ctx, exec, mods...)
FieldseekerPool: countThenLoadBuilder[Q](
"FieldseekerPool",
func(ctx context.Context, exec bob.Executor, retrieved FieldseekerPoolCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadCountFieldseekerPool(ctx, exec, mods...)
},
),
Pooldetails: countThenLoadBuilder[Q](
@ -8763,6 +8966,12 @@ func buildOrganizationCountThenLoader[Q orm.Loadable]() organizationCountThenLoa
return retrieved.LoadCountNoteImages(ctx, exec, mods...)
},
),
Pools: countThenLoadBuilder[Q](
"Pools",
func(ctx context.Context, exec bob.Executor, retrieved PoolsCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadCountPools(ctx, exec, mods...)
},
),
Nuisances: countThenLoadBuilder[Q](
"Nuisances",
func(ctx context.Context, exec bob.Executor, retrieved NuisancesCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
@ -9150,29 +9359,29 @@ func (os OrganizationSlice) LoadCountPolygonlocations(ctx context.Context, exec
return nil
}
// LoadCountPools loads the count of Pools into the C struct
func (o *Organization) LoadCountPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
// LoadCountFieldseekerPool loads the count of FieldseekerPool into the C struct
func (o *Organization) LoadCountFieldseekerPool(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
count, err := o.Pools(mods...).Count(ctx, exec)
count, err := o.FieldseekerPool(mods...).Count(ctx, exec)
if err != nil {
return err
}
o.C.Pools = &count
o.C.FieldseekerPool = &count
return nil
}
// LoadCountPools loads the count of Pools for a slice
func (os OrganizationSlice) LoadCountPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
// LoadCountFieldseekerPool loads the count of FieldseekerPool for a slice
func (os OrganizationSlice) LoadCountFieldseekerPool(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
for _, o := range os {
if err := o.LoadCountPools(ctx, exec, mods...); err != nil {
if err := o.LoadCountFieldseekerPool(ctx, exec, mods...); err != nil {
return err
}
}
@ -9810,6 +10019,36 @@ func (os OrganizationSlice) LoadCountNoteImages(ctx context.Context, exec bob.Ex
return nil
}
// LoadCountPools loads the count of Pools into the C struct
func (o *Organization) LoadCountPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
count, err := o.Pools(mods...).Count(ctx, exec)
if err != nil {
return err
}
o.C.Pools = &count
return nil
}
// LoadCountPools loads the count of Pools for a slice
func (os OrganizationSlice) LoadCountPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
for _, o := range os {
if err := o.LoadCountPools(ctx, exec, mods...); err != nil {
return err
}
}
return nil
}
// LoadCountNuisances loads the count of Nuisances into the C struct
func (o *Organization) LoadCountNuisances(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
@ -9944,7 +10183,7 @@ type organizationJoins[Q dialect.Joinable] struct {
Mosquitoinspections modAs[Q, fieldseekerMosquitoinspectionColumns]
Pointlocations modAs[Q, fieldseekerPointlocationColumns]
Polygonlocations modAs[Q, fieldseekerPolygonlocationColumns]
Pools modAs[Q, fieldseekerPoolColumns]
FieldseekerPool modAs[Q, fieldseekerPoolColumns]
Pooldetails modAs[Q, fieldseekerPooldetailColumns]
Proposedtreatmentareas modAs[Q, fieldseekerProposedtreatmentareaColumns]
Qamosquitoinspections modAs[Q, fieldseekerQamosquitoinspectionColumns]
@ -9967,6 +10206,7 @@ type organizationJoins[Q dialect.Joinable] struct {
NoteAudios modAs[Q, noteAudioColumns]
NoteImages modAs[Q, noteImageColumns]
ImportDistrictGidDistrict modAs[Q, importDistrictColumns]
Pools modAs[Q, poolColumns]
Nuisances modAs[Q, publicreportNuisanceColumns]
PublicreportPool modAs[Q, publicreportPoolColumns]
Quicks modAs[Q, publicreportQuickColumns]
@ -10164,7 +10404,7 @@ func buildOrganizationJoins[Q dialect.Joinable](cols organizationColumns, typ st
return mods
},
},
Pools: modAs[Q, fieldseekerPoolColumns]{
FieldseekerPool: modAs[Q, fieldseekerPoolColumns]{
c: FieldseekerPools.Columns,
f: func(to fieldseekerPoolColumns) bob.Mod[Q] {
mods := make(mods.QueryMods[Q], 0, 1)
@ -10486,6 +10726,20 @@ func buildOrganizationJoins[Q dialect.Joinable](cols organizationColumns, typ st
return mods
},
},
Pools: modAs[Q, poolColumns]{
c: Pools.Columns,
f: func(to poolColumns) bob.Mod[Q] {
mods := make(mods.QueryMods[Q], 0, 1)
{
mods = append(mods, dialect.Join[Q](typ, Pools.Name().As(to.Alias())).On(
to.OrganizationID.EQ(cols.ID),
))
}
return mods
},
},
Nuisances: modAs[Q, publicreportNuisanceColumns]{
c: PublicreportNuisances.Columns,
f: func(to publicreportNuisanceColumns) bob.Mod[Q] {

1142
db/models/pool.bob.go Normal file

File diff suppressed because it is too large Load diff

View file

@ -65,6 +65,7 @@ type userR struct {
DeletorNoteImages NoteImageSlice // note_image.note_image_deletor_id_fkey
UserNotifications NotificationSlice // notification.notification_user_id_fkey
UserOauthTokens OauthTokenSlice // oauth_token.oauth_token_user_id_fkey
CreatorPools PoolSlice // pool.pool_creator_id_fkey
Organization *Organization // user_.user__organization_id_fkey
}
@ -804,6 +805,30 @@ func (os UserSlice) UserOauthTokens(mods ...bob.Mod[*dialect.SelectQuery]) Oauth
)...)
}
// CreatorPools starts a query for related objects on pool
func (o *User) CreatorPools(mods ...bob.Mod[*dialect.SelectQuery]) PoolsQuery {
return Pools.Query(append(mods,
sm.Where(Pools.Columns.CreatorID.EQ(psql.Arg(o.ID))),
)...)
}
func (os UserSlice) CreatorPools(mods ...bob.Mod[*dialect.SelectQuery]) PoolsQuery {
pkID := make(pgtypes.Array[int32], 0, len(os))
for _, o := range os {
if o == nil {
continue
}
pkID = append(pkID, o.ID)
}
PKArgExpr := psql.Select(sm.Columns(
psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")),
))
return Pools.Query(append(mods,
sm.Where(psql.Group(Pools.Columns.CreatorID).OP("IN", PKArgExpr)),
)...)
}
// Organization starts a query for related objects on organization
func (o *User) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery {
return Organizations.Query(append(mods,
@ -1372,6 +1397,74 @@ func (user0 *User) AttachUserOauthTokens(ctx context.Context, exec bob.Executor,
return nil
}
func insertUserCreatorPools0(ctx context.Context, exec bob.Executor, pools1 []*PoolSetter, user0 *User) (PoolSlice, error) {
for i := range pools1 {
pools1[i].CreatorID = omit.From(user0.ID)
}
ret, err := Pools.Insert(bob.ToMods(pools1...)).All(ctx, exec)
if err != nil {
return ret, fmt.Errorf("insertUserCreatorPools0: %w", err)
}
return ret, nil
}
func attachUserCreatorPools0(ctx context.Context, exec bob.Executor, count int, pools1 PoolSlice, user0 *User) (PoolSlice, error) {
setter := &PoolSetter{
CreatorID: omit.From(user0.ID),
}
err := pools1.UpdateAll(ctx, exec, *setter)
if err != nil {
return nil, fmt.Errorf("attachUserCreatorPools0: %w", err)
}
return pools1, nil
}
func (user0 *User) InsertCreatorPools(ctx context.Context, exec bob.Executor, related ...*PoolSetter) error {
if len(related) == 0 {
return nil
}
var err error
pools1, err := insertUserCreatorPools0(ctx, exec, related, user0)
if err != nil {
return err
}
user0.R.CreatorPools = append(user0.R.CreatorPools, pools1...)
for _, rel := range pools1 {
rel.R.CreatorUser = user0
}
return nil
}
func (user0 *User) AttachCreatorPools(ctx context.Context, exec bob.Executor, related ...*Pool) error {
if len(related) == 0 {
return nil
}
var err error
pools1 := PoolSlice(related)
_, err = attachUserCreatorPools0(ctx, exec, len(related), pools1, user0)
if err != nil {
return err
}
user0.R.CreatorPools = append(user0.R.CreatorPools, pools1...)
for _, rel := range related {
rel.R.CreatorUser = user0
}
return nil
}
func attachUserOrganization0(ctx context.Context, exec bob.Executor, count int, user0 *User, organization1 *Organization) (*User, error) {
setter := &UserSetter{
OrganizationID: omit.From(organization1.ID),
@ -1574,6 +1667,20 @@ func (o *User) Preload(name string, retrieved any) error {
}
}
return nil
case "CreatorPools":
rels, ok := retrieved.(PoolSlice)
if !ok {
return fmt.Errorf("user cannot load %T as %q", retrieved, name)
}
o.R.CreatorPools = rels
for _, rel := range rels {
if rel != nil {
rel.R.CreatorUser = o
}
}
return nil
case "Organization":
rel, ok := retrieved.(*Organization)
if !ok {
@ -1622,6 +1729,7 @@ type userThenLoader[Q orm.Loadable] struct {
DeletorNoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
UserNotifications func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
UserOauthTokens func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
CreatorPools func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
Organization func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
}
@ -1650,6 +1758,9 @@ func buildUserThenLoader[Q orm.Loadable]() userThenLoader[Q] {
type UserOauthTokensLoadInterface interface {
LoadUserOauthTokens(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type CreatorPoolsLoadInterface interface {
LoadCreatorPools(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type OrganizationLoadInterface interface {
LoadOrganization(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
@ -1703,6 +1814,12 @@ func buildUserThenLoader[Q orm.Loadable]() userThenLoader[Q] {
return retrieved.LoadUserOauthTokens(ctx, exec, mods...)
},
),
CreatorPools: thenLoadBuilder[Q](
"CreatorPools",
func(ctx context.Context, exec bob.Executor, retrieved CreatorPoolsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadCreatorPools(ctx, exec, mods...)
},
),
Organization: thenLoadBuilder[Q](
"Organization",
func(ctx context.Context, exec bob.Executor, retrieved OrganizationLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
@ -2206,6 +2323,67 @@ func (os UserSlice) LoadUserOauthTokens(ctx context.Context, exec bob.Executor,
return nil
}
// LoadCreatorPools loads the user's CreatorPools into the .R struct
func (o *User) LoadCreatorPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
// Reset the relationship
o.R.CreatorPools = nil
related, err := o.CreatorPools(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, rel := range related {
rel.R.CreatorUser = o
}
o.R.CreatorPools = related
return nil
}
// LoadCreatorPools loads the user's CreatorPools into the .R struct
func (os UserSlice) LoadCreatorPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
pools, err := os.CreatorPools(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, o := range os {
if o == nil {
continue
}
o.R.CreatorPools = nil
}
for _, o := range os {
if o == nil {
continue
}
for _, rel := range pools {
if !(o.ID == rel.CreatorID) {
continue
}
rel.R.CreatorUser = o
o.R.CreatorPools = append(o.R.CreatorPools, rel)
}
}
return nil
}
// LoadOrganization loads the user's Organization into the .R struct
func (o *User) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
@ -2268,6 +2446,7 @@ type userC struct {
DeletorNoteImages *int64
UserNotifications *int64
UserOauthTokens *int64
CreatorPools *int64
}
// PreloadCount sets a count in the C struct by name
@ -2293,6 +2472,8 @@ func (o *User) PreloadCount(name string, count int64) error {
o.C.UserNotifications = &count
case "UserOauthTokens":
o.C.UserOauthTokens = &count
case "CreatorPools":
o.C.CreatorPools = &count
}
return nil
}
@ -2306,6 +2487,7 @@ type userCountPreloader struct {
DeletorNoteImages func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
UserNotifications func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
UserOauthTokens func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
CreatorPools func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader
}
func buildUserCountPreloader() userCountPreloader {
@ -2446,6 +2628,23 @@ func buildUserCountPreloader() userCountPreloader {
return psql.Group(psql.Select(subqueryMods...).Expression)
})
},
CreatorPools: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader {
return countPreloader[*User]("CreatorPools", func(parent string) bob.Expression {
// Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk)
if parent == "" {
parent = Users.Alias()
}
subqueryMods := []bob.Mod[*dialect.SelectQuery]{
sm.Columns(psql.Raw("count(*)")),
sm.From(Pools.Name()),
sm.Where(psql.Quote(Pools.Alias(), "creator_id").EQ(psql.Quote(parent, "id"))),
}
subqueryMods = append(subqueryMods, mods...)
return psql.Group(psql.Select(subqueryMods...).Expression)
})
},
}
}
@ -2458,6 +2657,7 @@ type userCountThenLoader[Q orm.Loadable] struct {
DeletorNoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
UserNotifications func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
UserOauthTokens func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
CreatorPools func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
}
func buildUserCountThenLoader[Q orm.Loadable]() userCountThenLoader[Q] {
@ -2485,6 +2685,9 @@ func buildUserCountThenLoader[Q orm.Loadable]() userCountThenLoader[Q] {
type UserOauthTokensCountInterface interface {
LoadCountUserOauthTokens(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
type CreatorPoolsCountInterface interface {
LoadCountCreatorPools(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
return userCountThenLoader[Q]{
PublicUserUser: countThenLoadBuilder[Q](
@ -2535,6 +2738,12 @@ func buildUserCountThenLoader[Q orm.Loadable]() userCountThenLoader[Q] {
return retrieved.LoadCountUserOauthTokens(ctx, exec, mods...)
},
),
CreatorPools: countThenLoadBuilder[Q](
"CreatorPools",
func(ctx context.Context, exec bob.Executor, retrieved CreatorPoolsCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadCountCreatorPools(ctx, exec, mods...)
},
),
}
}
@ -2778,6 +2987,36 @@ func (os UserSlice) LoadCountUserOauthTokens(ctx context.Context, exec bob.Execu
return nil
}
// LoadCountCreatorPools loads the count of CreatorPools into the C struct
func (o *User) LoadCountCreatorPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
count, err := o.CreatorPools(mods...).Count(ctx, exec)
if err != nil {
return err
}
o.C.CreatorPools = &count
return nil
}
// LoadCountCreatorPools loads the count of CreatorPools for a slice
func (os UserSlice) LoadCountCreatorPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
for _, o := range os {
if err := o.LoadCountCreatorPools(ctx, exec, mods...); err != nil {
return err
}
}
return nil
}
type userJoins[Q dialect.Joinable] struct {
typ string
PublicUserUser modAs[Q, arcgisuserColumns]
@ -2788,6 +3027,7 @@ type userJoins[Q dialect.Joinable] struct {
DeletorNoteImages modAs[Q, noteImageColumns]
UserNotifications modAs[Q, notificationColumns]
UserOauthTokens modAs[Q, oauthTokenColumns]
CreatorPools modAs[Q, poolColumns]
Organization modAs[Q, organizationColumns]
}
@ -2910,6 +3150,20 @@ func buildUserJoins[Q dialect.Joinable](cols userColumns, typ string) userJoins[
return mods
},
},
CreatorPools: modAs[Q, poolColumns]{
c: Pools.Columns,
f: func(to poolColumns) bob.Mod[Q] {
mods := make(mods.QueryMods[Q], 0, 1)
{
mods = append(mods, dialect.Join[Q](typ, Pools.Name().As(to.Alias())).On(
to.CreatorID.EQ(cols.ID),
))
}
return mods
},
},
Organization: modAs[Q, organizationColumns]{
c: Organizations.Columns,
f: func(to organizationColumns) bob.Mod[Q] {