diff --git a/db/bobgen.yaml b/db/bobgen.yaml index 4effbb43..1e9cf27d 100644 --- a/db/bobgen.yaml +++ b/db/bobgen.yaml @@ -9,6 +9,8 @@ aliases: publicreport.pool.pool_organization_id_fkey: "PublicreportPool" fieldseeker.pool.pool_organization_id_fkey: "FieldseekerPool" user_: + relationships: + fileupload.pool.pool_creator_id_fkey: "FileuploadPool" up_plural: "Users" up_singular: "User" down_plural: "users" diff --git a/db/dberrors/address.bob.go b/db/dberrors/address.bob.go new file mode 100644 index 00000000..69797795 --- /dev/null +++ b/db/dberrors/address.bob.go @@ -0,0 +1,26 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dberrors + +var AddressErrors = &addressErrors{ + ErrUniqueAddressPkey: &UniqueConstraintError{ + schema: "", + table: "address", + columns: []string{"id"}, + s: "address_pkey", + }, + + ErrUniqueAddressCountryLocalityNumber_StreetKey: &UniqueConstraintError{ + schema: "", + table: "address", + columns: []string{"country", "locality", "number_", "street"}, + s: "address_country_locality_number__street_key", + }, +} + +type addressErrors struct { + ErrUniqueAddressPkey *UniqueConstraintError + + ErrUniqueAddressCountryLocalityNumber_StreetKey *UniqueConstraintError +} diff --git a/db/dberrors/arcgis.address_mapping.bob.go b/db/dberrors/arcgis.address_mapping.bob.go new file mode 100644 index 00000000..530221b6 --- /dev/null +++ b/db/dberrors/arcgis.address_mapping.bob.go @@ -0,0 +1,17 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dberrors + +var ArcgisAddressMappingErrors = &arcgisAddressMappingErrors{ + ErrUniqueAddressMappingPkey: &UniqueConstraintError{ + schema: "arcgis", + table: "address_mapping", + columns: []string{"organization_id", "destination"}, + s: "address_mapping_pkey", + }, +} + +type arcgisAddressMappingErrors struct { + ErrUniqueAddressMappingPkey *UniqueConstraintError +} diff --git a/db/dberrors/arcgis.feature_service.bob.go b/db/dberrors/arcgis.feature_service.bob.go new file mode 100644 index 00000000..d009a0e0 --- /dev/null +++ b/db/dberrors/arcgis.feature_service.bob.go @@ -0,0 +1,17 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dberrors + +var ArcgisFeatureServiceErrors = &arcgisFeatureServiceErrors{ + ErrUniqueFeatureServicePkey: &UniqueConstraintError{ + schema: "arcgis", + table: "feature_service", + columns: []string{"item_id"}, + s: "feature_service_pkey", + }, +} + +type arcgisFeatureServiceErrors struct { + ErrUniqueFeatureServicePkey *UniqueConstraintError +} diff --git a/db/dberrors/arcgis.layer.bob.go b/db/dberrors/arcgis.layer.bob.go new file mode 100644 index 00000000..f07d57e6 --- /dev/null +++ b/db/dberrors/arcgis.layer.bob.go @@ -0,0 +1,17 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dberrors + +var ArcgisLayerErrors = &arcgisLayerErrors{ + ErrUniqueLayerPkey: &UniqueConstraintError{ + schema: "arcgis", + table: "layer", + columns: []string{"feature_service_item_id", "index_"}, + s: "layer_pkey", + }, +} + +type arcgisLayerErrors struct { + ErrUniqueLayerPkey *UniqueConstraintError +} diff --git a/db/dberrors/arcgis.layer_field.bob.go b/db/dberrors/arcgis.layer_field.bob.go new file mode 100644 index 00000000..5fb95136 --- /dev/null +++ b/db/dberrors/arcgis.layer_field.bob.go @@ -0,0 +1,17 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dberrors + +var ArcgisLayerFieldErrors = &arcgisLayerFieldErrors{ + ErrUniqueLayerFieldPkey: &UniqueConstraintError{ + schema: "arcgis", + table: "layer_field", + columns: []string{"layer_feature_service_item_id", "layer_index", "name"}, + s: "layer_field_pkey", + }, +} + +type arcgisLayerFieldErrors struct { + ErrUniqueLayerFieldPkey *UniqueConstraintError +} diff --git a/db/dberrors/arcgis.parcel_mapping.bob.go b/db/dberrors/arcgis.parcel_mapping.bob.go new file mode 100644 index 00000000..8d10a16c --- /dev/null +++ b/db/dberrors/arcgis.parcel_mapping.bob.go @@ -0,0 +1,17 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dberrors + +var ArcgisParcelMappingErrors = &arcgisParcelMappingErrors{ + ErrUniqueParcelMappingPkey: &UniqueConstraintError{ + schema: "arcgis", + table: "parcel_mapping", + columns: []string{"organization_id", "destination"}, + s: "parcel_mapping_pkey", + }, +} + +type arcgisParcelMappingErrors struct { + ErrUniqueParcelMappingPkey *UniqueConstraintError +} diff --git a/db/dberrors/parcel.bob.go b/db/dberrors/parcel.bob.go new file mode 100644 index 00000000..b9cd511e --- /dev/null +++ b/db/dberrors/parcel.bob.go @@ -0,0 +1,17 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dberrors + +var ParcelErrors = &parcelErrors{ + ErrUniqueParcelPkey: &UniqueConstraintError{ + schema: "", + table: "parcel", + columns: []string{"id"}, + s: "parcel_pkey", + }, +} + +type parcelErrors struct { + ErrUniqueParcelPkey *UniqueConstraintError +} diff --git a/db/dberrors/pool.bob.go b/db/dberrors/pool.bob.go new file mode 100644 index 00000000..8cc4b911 --- /dev/null +++ b/db/dberrors/pool.bob.go @@ -0,0 +1,17 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dberrors + +var PoolErrors = &poolErrors{ + ErrUniquePoolPkey: &UniqueConstraintError{ + schema: "", + table: "pool", + columns: []string{"id"}, + s: "pool_pkey", + }, +} + +type poolErrors struct { + ErrUniquePoolPkey *UniqueConstraintError +} diff --git a/db/dberrors/site.bob.go b/db/dberrors/site.bob.go new file mode 100644 index 00000000..43e045fe --- /dev/null +++ b/db/dberrors/site.bob.go @@ -0,0 +1,26 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dberrors + +var SiteErrors = &siteErrors{ + ErrUniqueSitePkey: &UniqueConstraintError{ + schema: "", + table: "site", + columns: []string{"id", "version"}, + s: "site_pkey", + }, + + ErrUniqueSiteAddressIdKey: &UniqueConstraintError{ + schema: "", + table: "site", + columns: []string{"address_id"}, + s: "site_address_id_key", + }, +} + +type siteErrors struct { + ErrUniqueSitePkey *UniqueConstraintError + + ErrUniqueSiteAddressIdKey *UniqueConstraintError +} diff --git a/db/dbinfo/address.bob.go b/db/dbinfo/address.bob.go new file mode 100644 index 00000000..cbcae265 --- /dev/null +++ b/db/dbinfo/address.bob.go @@ -0,0 +1,227 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dbinfo + +import "github.com/aarondl/opt/null" + +var Addresses = Table[ + addressColumns, + addressIndexes, + addressForeignKeys, + addressUniques, + addressChecks, +]{ + Schema: "", + Name: "address", + Columns: addressColumns{ + Country: column{ + Name: "country", + DBType: "public.countrytype", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Created: column{ + Name: "created", + DBType: "timestamp without time zone", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Geom: column{ + Name: "geom", + DBType: "geometry", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + H3cell: column{ + Name: "h3cell", + DBType: "h3index", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + ID: column{ + Name: "id", + DBType: "integer", + Default: "nextval('address_id_seq'::regclass)", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Locality: column{ + Name: "locality", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Number: column{ + Name: "number_", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + PostalCode: column{ + Name: "postal_code", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Street: column{ + Name: "street", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Unit: column{ + Name: "unit", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + }, + Indexes: addressIndexes{ + AddressPkey: index{ + Type: "btree", + Name: "address_pkey", + Columns: []indexColumn{ + { + Name: "id", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + AddressCountryLocalityNumberStreetKey: index{ + Type: "btree", + Name: "address_country_locality_number__street_key", + Columns: []indexColumn{ + { + Name: "country", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + { + Name: "locality", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + { + Name: "number_", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + { + Name: "street", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false, false, false, false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + }, + PrimaryKey: &constraint{ + Name: "address_pkey", + Columns: []string{"id"}, + Comment: "", + }, + + Uniques: addressUniques{ + AddressCountryLocalityNumberStreetKey: constraint{ + Name: "address_country_locality_number__street_key", + Columns: []string{"country", "locality", "number_", "street"}, + Comment: "", + }, + }, + + Comment: "", +} + +type addressColumns struct { + Country column + Created column + Geom column + H3cell column + ID column + Locality column + Number column + PostalCode column + Street column + Unit column +} + +func (c addressColumns) AsSlice() []column { + return []column{ + c.Country, c.Created, c.Geom, c.H3cell, c.ID, c.Locality, c.Number, c.PostalCode, c.Street, c.Unit, + } +} + +type addressIndexes struct { + AddressPkey index + AddressCountryLocalityNumberStreetKey index +} + +func (i addressIndexes) AsSlice() []index { + return []index{ + i.AddressPkey, i.AddressCountryLocalityNumberStreetKey, + } +} + +type addressForeignKeys struct{} + +func (f addressForeignKeys) AsSlice() []foreignKey { + return []foreignKey{} +} + +type addressUniques struct { + AddressCountryLocalityNumberStreetKey constraint +} + +func (u addressUniques) AsSlice() []constraint { + return []constraint{ + u.AddressCountryLocalityNumberStreetKey, + } +} + +type addressChecks struct{} + +func (c addressChecks) AsSlice() []check { + return []check{} +} diff --git a/db/dbinfo/arcgis.address_mapping.bob.go b/db/dbinfo/arcgis.address_mapping.bob.go new file mode 100644 index 00000000..29f83022 --- /dev/null +++ b/db/dbinfo/arcgis.address_mapping.bob.go @@ -0,0 +1,162 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dbinfo + +import "github.com/aarondl/opt/null" + +var ArcgisAddressMappings = Table[ + arcgisAddressMappingColumns, + arcgisAddressMappingIndexes, + arcgisAddressMappingForeignKeys, + arcgisAddressMappingUniques, + arcgisAddressMappingChecks, +]{ + Schema: "arcgis", + Name: "address_mapping", + Columns: arcgisAddressMappingColumns{ + Destination: column{ + Name: "destination", + DBType: "arcgis.mappingdestinationaddress", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + LayerFeatureServiceItemID: column{ + Name: "layer_feature_service_item_id", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + LayerIndex: column{ + Name: "layer_index", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + LayerFieldName: column{ + Name: "layer_field_name", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + OrganizationID: column{ + Name: "organization_id", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + }, + Indexes: arcgisAddressMappingIndexes{ + AddressMappingPkey: index{ + Type: "btree", + Name: "address_mapping_pkey", + Columns: []indexColumn{ + { + Name: "organization_id", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + { + Name: "destination", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false, false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + }, + PrimaryKey: &constraint{ + Name: "address_mapping_pkey", + Columns: []string{"organization_id", "destination"}, + Comment: "", + }, + ForeignKeys: arcgisAddressMappingForeignKeys{ + ArcgisAddressMappingAddressMappingLayerFeatureServiceItemIDLayerIndexFkey: foreignKey{ + constraint: constraint{ + Name: "arcgis.address_mapping.address_mapping_layer_feature_service_item_id_layer_index__fkey", + Columns: []string{"layer_feature_service_item_id", "layer_index", "layer_field_name"}, + Comment: "", + }, + ForeignTable: "arcgis.layer_field", + ForeignColumns: []string{"layer_feature_service_item_id", "layer_index", "name"}, + }, + ArcgisAddressMappingAddressMappingOrganizationIDFkey: foreignKey{ + constraint: constraint{ + Name: "arcgis.address_mapping.address_mapping_organization_id_fkey", + Columns: []string{"organization_id"}, + Comment: "", + }, + ForeignTable: "organization", + ForeignColumns: []string{"id"}, + }, + }, + + Comment: "", +} + +type arcgisAddressMappingColumns struct { + Destination column + LayerFeatureServiceItemID column + LayerIndex column + LayerFieldName column + OrganizationID column +} + +func (c arcgisAddressMappingColumns) AsSlice() []column { + return []column{ + c.Destination, c.LayerFeatureServiceItemID, c.LayerIndex, c.LayerFieldName, c.OrganizationID, + } +} + +type arcgisAddressMappingIndexes struct { + AddressMappingPkey index +} + +func (i arcgisAddressMappingIndexes) AsSlice() []index { + return []index{ + i.AddressMappingPkey, + } +} + +type arcgisAddressMappingForeignKeys struct { + ArcgisAddressMappingAddressMappingLayerFeatureServiceItemIDLayerIndexFkey foreignKey + ArcgisAddressMappingAddressMappingOrganizationIDFkey foreignKey +} + +func (f arcgisAddressMappingForeignKeys) AsSlice() []foreignKey { + return []foreignKey{ + f.ArcgisAddressMappingAddressMappingLayerFeatureServiceItemIDLayerIndexFkey, f.ArcgisAddressMappingAddressMappingOrganizationIDFkey, + } +} + +type arcgisAddressMappingUniques struct{} + +func (u arcgisAddressMappingUniques) AsSlice() []constraint { + return []constraint{} +} + +type arcgisAddressMappingChecks struct{} + +func (c arcgisAddressMappingChecks) AsSlice() []check { + return []check{} +} diff --git a/db/dbinfo/arcgis.feature_service.bob.go b/db/dbinfo/arcgis.feature_service.bob.go new file mode 100644 index 00000000..865aa154 --- /dev/null +++ b/db/dbinfo/arcgis.feature_service.bob.go @@ -0,0 +1,122 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dbinfo + +import "github.com/aarondl/opt/null" + +var ArcgisFeatureServices = Table[ + arcgisFeatureServiceColumns, + arcgisFeatureServiceIndexes, + arcgisFeatureServiceForeignKeys, + arcgisFeatureServiceUniques, + arcgisFeatureServiceChecks, +]{ + Schema: "arcgis", + Name: "feature_service", + Columns: arcgisFeatureServiceColumns{ + Extent: column{ + Name: "extent", + DBType: "box2d", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + ItemID: column{ + Name: "item_id", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + SpatialReference: column{ + Name: "spatial_reference", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + URL: column{ + Name: "url", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + }, + Indexes: arcgisFeatureServiceIndexes{ + FeatureServicePkey: index{ + Type: "btree", + Name: "feature_service_pkey", + Columns: []indexColumn{ + { + Name: "item_id", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + }, + PrimaryKey: &constraint{ + Name: "feature_service_pkey", + Columns: []string{"item_id"}, + Comment: "", + }, + + Comment: "", +} + +type arcgisFeatureServiceColumns struct { + Extent column + ItemID column + SpatialReference column + URL column +} + +func (c arcgisFeatureServiceColumns) AsSlice() []column { + return []column{ + c.Extent, c.ItemID, c.SpatialReference, c.URL, + } +} + +type arcgisFeatureServiceIndexes struct { + FeatureServicePkey index +} + +func (i arcgisFeatureServiceIndexes) AsSlice() []index { + return []index{ + i.FeatureServicePkey, + } +} + +type arcgisFeatureServiceForeignKeys struct{} + +func (f arcgisFeatureServiceForeignKeys) AsSlice() []foreignKey { + return []foreignKey{} +} + +type arcgisFeatureServiceUniques struct{} + +func (u arcgisFeatureServiceUniques) AsSlice() []constraint { + return []constraint{} +} + +type arcgisFeatureServiceChecks struct{} + +func (c arcgisFeatureServiceChecks) AsSlice() []check { + return []check{} +} diff --git a/db/dbinfo/arcgis.layer.bob.go b/db/dbinfo/arcgis.layer.bob.go new file mode 100644 index 00000000..2c09a4d4 --- /dev/null +++ b/db/dbinfo/arcgis.layer.bob.go @@ -0,0 +1,132 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dbinfo + +import "github.com/aarondl/opt/null" + +var ArcgisLayers = Table[ + arcgisLayerColumns, + arcgisLayerIndexes, + arcgisLayerForeignKeys, + arcgisLayerUniques, + arcgisLayerChecks, +]{ + Schema: "arcgis", + Name: "layer", + Columns: arcgisLayerColumns{ + Extent: column{ + Name: "extent", + DBType: "box2d", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + FeatureServiceItemID: column{ + Name: "feature_service_item_id", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Index: column{ + Name: "index_", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + }, + Indexes: arcgisLayerIndexes{ + LayerPkey: index{ + Type: "btree", + Name: "layer_pkey", + Columns: []indexColumn{ + { + Name: "feature_service_item_id", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + { + Name: "index_", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false, false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + }, + PrimaryKey: &constraint{ + Name: "layer_pkey", + Columns: []string{"feature_service_item_id", "index_"}, + Comment: "", + }, + ForeignKeys: arcgisLayerForeignKeys{ + ArcgisLayerLayerFeatureServiceItemIDFkey: foreignKey{ + constraint: constraint{ + Name: "arcgis.layer.layer_feature_service_item_id_fkey", + Columns: []string{"feature_service_item_id"}, + Comment: "", + }, + ForeignTable: "arcgis.feature_service", + ForeignColumns: []string{"item_id"}, + }, + }, + + Comment: "", +} + +type arcgisLayerColumns struct { + Extent column + FeatureServiceItemID column + Index column +} + +func (c arcgisLayerColumns) AsSlice() []column { + return []column{ + c.Extent, c.FeatureServiceItemID, c.Index, + } +} + +type arcgisLayerIndexes struct { + LayerPkey index +} + +func (i arcgisLayerIndexes) AsSlice() []index { + return []index{ + i.LayerPkey, + } +} + +type arcgisLayerForeignKeys struct { + ArcgisLayerLayerFeatureServiceItemIDFkey foreignKey +} + +func (f arcgisLayerForeignKeys) AsSlice() []foreignKey { + return []foreignKey{ + f.ArcgisLayerLayerFeatureServiceItemIDFkey, + } +} + +type arcgisLayerUniques struct{} + +func (u arcgisLayerUniques) AsSlice() []constraint { + return []constraint{} +} + +type arcgisLayerChecks struct{} + +func (c arcgisLayerChecks) AsSlice() []check { + return []check{} +} diff --git a/db/dbinfo/arcgis.layer_field.bob.go b/db/dbinfo/arcgis.layer_field.bob.go new file mode 100644 index 00000000..4a1207d5 --- /dev/null +++ b/db/dbinfo/arcgis.layer_field.bob.go @@ -0,0 +1,147 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dbinfo + +import "github.com/aarondl/opt/null" + +var ArcgisLayerFields = Table[ + arcgisLayerFieldColumns, + arcgisLayerFieldIndexes, + arcgisLayerFieldForeignKeys, + arcgisLayerFieldUniques, + arcgisLayerFieldChecks, +]{ + Schema: "arcgis", + Name: "layer_field", + Columns: arcgisLayerFieldColumns{ + LayerFeatureServiceItemID: column{ + Name: "layer_feature_service_item_id", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + LayerIndex: column{ + Name: "layer_index", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Name: column{ + Name: "name", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Type: column{ + Name: "type_", + DBType: "arcgis.fieldtype", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + }, + Indexes: arcgisLayerFieldIndexes{ + LayerFieldPkey: index{ + Type: "btree", + Name: "layer_field_pkey", + Columns: []indexColumn{ + { + Name: "layer_feature_service_item_id", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + { + Name: "layer_index", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + { + Name: "name", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false, false, false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + }, + PrimaryKey: &constraint{ + Name: "layer_field_pkey", + Columns: []string{"layer_feature_service_item_id", "layer_index", "name"}, + Comment: "", + }, + ForeignKeys: arcgisLayerFieldForeignKeys{ + ArcgisLayerFieldLayerFieldLayerFeatureServiceItemIDLayerIndexFkey: foreignKey{ + constraint: constraint{ + Name: "arcgis.layer_field.layer_field_layer_feature_service_item_id_layer_index_fkey", + Columns: []string{"layer_feature_service_item_id", "layer_index"}, + Comment: "", + }, + ForeignTable: "arcgis.layer", + ForeignColumns: []string{"feature_service_item_id", "index_"}, + }, + }, + + Comment: "", +} + +type arcgisLayerFieldColumns struct { + LayerFeatureServiceItemID column + LayerIndex column + Name column + Type column +} + +func (c arcgisLayerFieldColumns) AsSlice() []column { + return []column{ + c.LayerFeatureServiceItemID, c.LayerIndex, c.Name, c.Type, + } +} + +type arcgisLayerFieldIndexes struct { + LayerFieldPkey index +} + +func (i arcgisLayerFieldIndexes) AsSlice() []index { + return []index{ + i.LayerFieldPkey, + } +} + +type arcgisLayerFieldForeignKeys struct { + ArcgisLayerFieldLayerFieldLayerFeatureServiceItemIDLayerIndexFkey foreignKey +} + +func (f arcgisLayerFieldForeignKeys) AsSlice() []foreignKey { + return []foreignKey{ + f.ArcgisLayerFieldLayerFieldLayerFeatureServiceItemIDLayerIndexFkey, + } +} + +type arcgisLayerFieldUniques struct{} + +func (u arcgisLayerFieldUniques) AsSlice() []constraint { + return []constraint{} +} + +type arcgisLayerFieldChecks struct{} + +func (c arcgisLayerFieldChecks) AsSlice() []check { + return []check{} +} diff --git a/db/dbinfo/arcgis.parcel_mapping.bob.go b/db/dbinfo/arcgis.parcel_mapping.bob.go new file mode 100644 index 00000000..30433a27 --- /dev/null +++ b/db/dbinfo/arcgis.parcel_mapping.bob.go @@ -0,0 +1,162 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dbinfo + +import "github.com/aarondl/opt/null" + +var ArcgisParcelMappings = Table[ + arcgisParcelMappingColumns, + arcgisParcelMappingIndexes, + arcgisParcelMappingForeignKeys, + arcgisParcelMappingUniques, + arcgisParcelMappingChecks, +]{ + Schema: "arcgis", + Name: "parcel_mapping", + Columns: arcgisParcelMappingColumns{ + Destination: column{ + Name: "destination", + DBType: "arcgis.mappingdestinationparcel", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + LayerFeatureServiceItemID: column{ + Name: "layer_feature_service_item_id", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + LayerIndex: column{ + Name: "layer_index", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + LayerFieldName: column{ + Name: "layer_field_name", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + OrganizationID: column{ + Name: "organization_id", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + }, + Indexes: arcgisParcelMappingIndexes{ + ParcelMappingPkey: index{ + Type: "btree", + Name: "parcel_mapping_pkey", + Columns: []indexColumn{ + { + Name: "organization_id", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + { + Name: "destination", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false, false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + }, + PrimaryKey: &constraint{ + Name: "parcel_mapping_pkey", + Columns: []string{"organization_id", "destination"}, + Comment: "", + }, + ForeignKeys: arcgisParcelMappingForeignKeys{ + ArcgisParcelMappingParcelMappingLayerFeatureServiceItemIDLayerIndexLFkey: foreignKey{ + constraint: constraint{ + Name: "arcgis.parcel_mapping.parcel_mapping_layer_feature_service_item_id_layer_index_l_fkey", + Columns: []string{"layer_feature_service_item_id", "layer_index", "layer_field_name"}, + Comment: "", + }, + ForeignTable: "arcgis.layer_field", + ForeignColumns: []string{"layer_feature_service_item_id", "layer_index", "name"}, + }, + ArcgisParcelMappingParcelMappingOrganizationIDFkey: foreignKey{ + constraint: constraint{ + Name: "arcgis.parcel_mapping.parcel_mapping_organization_id_fkey", + Columns: []string{"organization_id"}, + Comment: "", + }, + ForeignTable: "organization", + ForeignColumns: []string{"id"}, + }, + }, + + Comment: "", +} + +type arcgisParcelMappingColumns struct { + Destination column + LayerFeatureServiceItemID column + LayerIndex column + LayerFieldName column + OrganizationID column +} + +func (c arcgisParcelMappingColumns) AsSlice() []column { + return []column{ + c.Destination, c.LayerFeatureServiceItemID, c.LayerIndex, c.LayerFieldName, c.OrganizationID, + } +} + +type arcgisParcelMappingIndexes struct { + ParcelMappingPkey index +} + +func (i arcgisParcelMappingIndexes) AsSlice() []index { + return []index{ + i.ParcelMappingPkey, + } +} + +type arcgisParcelMappingForeignKeys struct { + ArcgisParcelMappingParcelMappingLayerFeatureServiceItemIDLayerIndexLFkey foreignKey + ArcgisParcelMappingParcelMappingOrganizationIDFkey foreignKey +} + +func (f arcgisParcelMappingForeignKeys) AsSlice() []foreignKey { + return []foreignKey{ + f.ArcgisParcelMappingParcelMappingLayerFeatureServiceItemIDLayerIndexLFkey, f.ArcgisParcelMappingParcelMappingOrganizationIDFkey, + } +} + +type arcgisParcelMappingUniques struct{} + +func (u arcgisParcelMappingUniques) AsSlice() []constraint { + return []constraint{} +} + +type arcgisParcelMappingChecks struct{} + +func (c arcgisParcelMappingChecks) AsSlice() []check { + return []check{} +} diff --git a/db/dbinfo/parcel.bob.go b/db/dbinfo/parcel.bob.go new file mode 100644 index 00000000..845bece8 --- /dev/null +++ b/db/dbinfo/parcel.bob.go @@ -0,0 +1,122 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dbinfo + +import "github.com/aarondl/opt/null" + +var Parcels = Table[ + parcelColumns, + parcelIndexes, + parcelForeignKeys, + parcelUniques, + parcelChecks, +]{ + Schema: "", + Name: "parcel", + Columns: parcelColumns{ + Apn: column{ + Name: "apn", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Description: column{ + Name: "description", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + ID: column{ + Name: "id", + DBType: "integer", + Default: "nextval('parcel_id_seq'::regclass)", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Geometry: column{ + Name: "geometry", + DBType: "geometry", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + }, + Indexes: parcelIndexes{ + ParcelPkey: index{ + Type: "btree", + Name: "parcel_pkey", + Columns: []indexColumn{ + { + Name: "id", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + }, + PrimaryKey: &constraint{ + Name: "parcel_pkey", + Columns: []string{"id"}, + Comment: "", + }, + + Comment: "", +} + +type parcelColumns struct { + Apn column + Description column + ID column + Geometry column +} + +func (c parcelColumns) AsSlice() []column { + return []column{ + c.Apn, c.Description, c.ID, c.Geometry, + } +} + +type parcelIndexes struct { + ParcelPkey index +} + +func (i parcelIndexes) AsSlice() []index { + return []index{ + i.ParcelPkey, + } +} + +type parcelForeignKeys struct{} + +func (f parcelForeignKeys) AsSlice() []foreignKey { + return []foreignKey{} +} + +type parcelUniques struct{} + +func (u parcelUniques) AsSlice() []constraint { + return []constraint{} +} + +type parcelChecks struct{} + +func (c parcelChecks) AsSlice() []check { + return []check{} +} diff --git a/db/dbinfo/pool.bob.go b/db/dbinfo/pool.bob.go new file mode 100644 index 00000000..149b10d2 --- /dev/null +++ b/db/dbinfo/pool.bob.go @@ -0,0 +1,147 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dbinfo + +import "github.com/aarondl/opt/null" + +var Pools = Table[ + poolColumns, + poolIndexes, + poolForeignKeys, + poolUniques, + poolChecks, +]{ + Schema: "", + Name: "pool", + Columns: poolColumns{ + Condition: column{ + Name: "condition", + DBType: "public.poolconditiontype", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Created: column{ + Name: "created", + DBType: "timestamp without time zone", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + CreatorID: column{ + Name: "creator_id", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + ID: column{ + Name: "id", + DBType: "integer", + Default: "nextval('pool_id_seq'::regclass)", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + SiteID: column{ + Name: "site_id", + DBType: "integer", + Default: "NULL", + Comment: "", + Nullable: true, + Generated: false, + AutoIncr: false, + }, + }, + Indexes: poolIndexes{ + PoolPkey: index{ + Type: "btree", + Name: "pool_pkey", + Columns: []indexColumn{ + { + Name: "id", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + }, + PrimaryKey: &constraint{ + Name: "pool_pkey", + Columns: []string{"id"}, + Comment: "", + }, + ForeignKeys: poolForeignKeys{ + PoolPoolCreatorIDFkey: foreignKey{ + constraint: constraint{ + Name: "pool.pool_creator_id_fkey", + Columns: []string{"creator_id"}, + Comment: "", + }, + ForeignTable: "user_", + ForeignColumns: []string{"id"}, + }, + }, + + Comment: "", +} + +type poolColumns struct { + Condition column + Created column + CreatorID column + ID column + SiteID column +} + +func (c poolColumns) AsSlice() []column { + return []column{ + c.Condition, c.Created, c.CreatorID, c.ID, c.SiteID, + } +} + +type poolIndexes struct { + PoolPkey index +} + +func (i poolIndexes) AsSlice() []index { + return []index{ + i.PoolPkey, + } +} + +type poolForeignKeys struct { + PoolPoolCreatorIDFkey foreignKey +} + +func (f poolForeignKeys) AsSlice() []foreignKey { + return []foreignKey{ + f.PoolPoolCreatorIDFkey, + } +} + +type poolUniques struct{} + +func (u poolUniques) AsSlice() []constraint { + return []constraint{} +} + +type poolChecks struct{} + +func (c poolChecks) AsSlice() []check { + return []check{} +} diff --git a/db/dbinfo/site.bob.go b/db/dbinfo/site.bob.go new file mode 100644 index 00000000..a0154cfb --- /dev/null +++ b/db/dbinfo/site.bob.go @@ -0,0 +1,281 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package dbinfo + +import "github.com/aarondl/opt/null" + +var Sites = Table[ + siteColumns, + siteIndexes, + siteForeignKeys, + siteUniques, + siteChecks, +]{ + Schema: "", + Name: "site", + Columns: siteColumns{ + AddressID: column{ + Name: "address_id", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Created: column{ + Name: "created", + DBType: "timestamp without time zone", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + CreatorID: column{ + Name: "creator_id", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + FileID: column{ + Name: "file_id", + DBType: "integer", + Default: "NULL", + Comment: "", + Nullable: true, + Generated: false, + AutoIncr: false, + }, + ID: column{ + Name: "id", + DBType: "integer", + Default: "nextval('site_id_seq'::regclass)", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Notes: column{ + Name: "notes", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + OrganizationID: column{ + Name: "organization_id", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + OwnerName: column{ + Name: "owner_name", + DBType: "text", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + OwnerPhoneE164: column{ + Name: "owner_phone_e164", + DBType: "text", + Default: "NULL", + Comment: "", + Nullable: true, + Generated: false, + AutoIncr: false, + }, + ResidentOwned: column{ + Name: "resident_owned", + DBType: "boolean", + Default: "NULL", + Comment: "", + Nullable: true, + Generated: false, + AutoIncr: false, + }, + ResidentPhoneE164: column{ + Name: "resident_phone_e164", + DBType: "text", + Default: "NULL", + Comment: "", + Nullable: true, + Generated: false, + AutoIncr: false, + }, + Tags: column{ + Name: "tags", + DBType: "hstore", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + Version: column{ + Name: "version", + DBType: "integer", + Default: "", + Comment: "", + Nullable: false, + Generated: false, + AutoIncr: false, + }, + }, + Indexes: siteIndexes{ + SitePkey: index{ + Type: "btree", + Name: "site_pkey", + Columns: []indexColumn{ + { + Name: "id", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + { + Name: "version", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false, false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + SiteAddressIDKey: index{ + Type: "btree", + Name: "site_address_id_key", + Columns: []indexColumn{ + { + Name: "address_id", + Desc: null.FromCond(false, true), + IsExpression: false, + }, + }, + Unique: true, + Comment: "", + NullsFirst: []bool{false}, + NullsDistinct: false, + Where: "", + Include: []string{}, + }, + }, + PrimaryKey: &constraint{ + Name: "site_pkey", + Columns: []string{"id", "version"}, + Comment: "", + }, + ForeignKeys: siteForeignKeys{ + SiteSiteAddressIDFkey: foreignKey{ + constraint: constraint{ + Name: "site.site_address_id_fkey", + Columns: []string{"address_id"}, + Comment: "", + }, + ForeignTable: "address", + ForeignColumns: []string{"id"}, + }, + SiteSiteCreatorIDFkey: foreignKey{ + constraint: constraint{ + Name: "site.site_creator_id_fkey", + Columns: []string{"creator_id"}, + Comment: "", + }, + ForeignTable: "user_", + ForeignColumns: []string{"id"}, + }, + SiteSiteFileIDFkey: foreignKey{ + constraint: constraint{ + Name: "site.site_file_id_fkey", + Columns: []string{"file_id"}, + Comment: "", + }, + ForeignTable: "fileupload.file", + ForeignColumns: []string{"id"}, + }, + }, + Uniques: siteUniques{ + SiteAddressIDKey: constraint{ + Name: "site_address_id_key", + Columns: []string{"address_id"}, + Comment: "", + }, + }, + + Comment: "", +} + +type siteColumns struct { + AddressID column + Created column + CreatorID column + FileID column + ID column + Notes column + OrganizationID column + OwnerName column + OwnerPhoneE164 column + ResidentOwned column + ResidentPhoneE164 column + Tags column + Version column +} + +func (c siteColumns) AsSlice() []column { + return []column{ + c.AddressID, c.Created, c.CreatorID, c.FileID, c.ID, c.Notes, c.OrganizationID, c.OwnerName, c.OwnerPhoneE164, c.ResidentOwned, c.ResidentPhoneE164, c.Tags, c.Version, + } +} + +type siteIndexes struct { + SitePkey index + SiteAddressIDKey index +} + +func (i siteIndexes) AsSlice() []index { + return []index{ + i.SitePkey, i.SiteAddressIDKey, + } +} + +type siteForeignKeys struct { + SiteSiteAddressIDFkey foreignKey + SiteSiteCreatorIDFkey foreignKey + SiteSiteFileIDFkey foreignKey +} + +func (f siteForeignKeys) AsSlice() []foreignKey { + return []foreignKey{ + f.SiteSiteAddressIDFkey, f.SiteSiteCreatorIDFkey, f.SiteSiteFileIDFkey, + } +} + +type siteUniques struct { + SiteAddressIDKey constraint +} + +func (u siteUniques) AsSlice() []constraint { + return []constraint{ + u.SiteAddressIDKey, + } +} + +type siteChecks struct{} + +func (c siteChecks) AsSlice() []check { + return []check{} +} diff --git a/db/enums/enums.bob.go b/db/enums/enums.bob.go index ef4a8d20..ecb6cad5 100644 --- a/db/enums/enums.bob.go +++ b/db/enums/enums.bob.go @@ -8,6 +8,270 @@ import ( "fmt" ) +// Enum values for ArcgisFieldtype +const ( + ArcgisFieldtypeEsrifieldtypesmallinteger ArcgisFieldtype = "esriFieldTypeSmallInteger" + ArcgisFieldtypeEsrifieldtypeinteger ArcgisFieldtype = "esriFieldTypeInteger" + ArcgisFieldtypeEsrifieldtypesingle ArcgisFieldtype = "esriFieldTypeSingle" + ArcgisFieldtypeEsrifieldtypedouble ArcgisFieldtype = "esriFieldTypeDouble" + ArcgisFieldtypeEsrifieldtypestring ArcgisFieldtype = "esriFieldTypeString" + ArcgisFieldtypeEsrifieldtypedate ArcgisFieldtype = "esriFieldTypeDate" + ArcgisFieldtypeEsrifieldtypeoid ArcgisFieldtype = "esriFieldTypeOID" + ArcgisFieldtypeEsrifieldtypegeometry ArcgisFieldtype = "esriFieldTypeGeometry" + ArcgisFieldtypeEsrifieldtypeblob ArcgisFieldtype = "esriFieldTypeBlob" + ArcgisFieldtypeEsrifieldtyperaster ArcgisFieldtype = "esriFieldTypeRaster" + ArcgisFieldtypeEsrifieldtypeguid ArcgisFieldtype = "esriFieldTypeGUID" + ArcgisFieldtypeEsrifieldtypeglobalid ArcgisFieldtype = "esriFieldTypeGlobalID" + ArcgisFieldtypeEsrifieldtypexml ArcgisFieldtype = "esriFieldTypeXML" + ArcgisFieldtypeEsrifieldtypebiginteger ArcgisFieldtype = "esriFieldTypeBigInteger" +) + +func AllArcgisFieldtype() []ArcgisFieldtype { + return []ArcgisFieldtype{ + ArcgisFieldtypeEsrifieldtypesmallinteger, + ArcgisFieldtypeEsrifieldtypeinteger, + ArcgisFieldtypeEsrifieldtypesingle, + ArcgisFieldtypeEsrifieldtypedouble, + ArcgisFieldtypeEsrifieldtypestring, + ArcgisFieldtypeEsrifieldtypedate, + ArcgisFieldtypeEsrifieldtypeoid, + ArcgisFieldtypeEsrifieldtypegeometry, + ArcgisFieldtypeEsrifieldtypeblob, + ArcgisFieldtypeEsrifieldtyperaster, + ArcgisFieldtypeEsrifieldtypeguid, + ArcgisFieldtypeEsrifieldtypeglobalid, + ArcgisFieldtypeEsrifieldtypexml, + ArcgisFieldtypeEsrifieldtypebiginteger, + } +} + +type ArcgisFieldtype string + +func (e ArcgisFieldtype) String() string { + return string(e) +} + +func (e ArcgisFieldtype) Valid() bool { + switch e { + case ArcgisFieldtypeEsrifieldtypesmallinteger, + ArcgisFieldtypeEsrifieldtypeinteger, + ArcgisFieldtypeEsrifieldtypesingle, + ArcgisFieldtypeEsrifieldtypedouble, + ArcgisFieldtypeEsrifieldtypestring, + ArcgisFieldtypeEsrifieldtypedate, + ArcgisFieldtypeEsrifieldtypeoid, + ArcgisFieldtypeEsrifieldtypegeometry, + ArcgisFieldtypeEsrifieldtypeblob, + ArcgisFieldtypeEsrifieldtyperaster, + ArcgisFieldtypeEsrifieldtypeguid, + ArcgisFieldtypeEsrifieldtypeglobalid, + ArcgisFieldtypeEsrifieldtypexml, + ArcgisFieldtypeEsrifieldtypebiginteger: + return true + default: + return false + } +} + +// useful when testing in other packages +func (e ArcgisFieldtype) All() []ArcgisFieldtype { + return AllArcgisFieldtype() +} + +func (e ArcgisFieldtype) MarshalText() ([]byte, error) { + return []byte(e), nil +} + +func (e *ArcgisFieldtype) UnmarshalText(text []byte) error { + return e.Scan(text) +} + +func (e ArcgisFieldtype) MarshalBinary() ([]byte, error) { + return []byte(e), nil +} + +func (e *ArcgisFieldtype) UnmarshalBinary(data []byte) error { + return e.Scan(data) +} + +func (e ArcgisFieldtype) Value() (driver.Value, error) { + return string(e), nil +} + +func (e *ArcgisFieldtype) Scan(value any) error { + switch x := value.(type) { + case string: + *e = ArcgisFieldtype(x) + case []byte: + *e = ArcgisFieldtype(x) + case nil: + return fmt.Errorf("cannot nil into ArcgisFieldtype") + default: + return fmt.Errorf("cannot scan type %T: %v", value, value) + } + + if !e.Valid() { + return fmt.Errorf("invalid ArcgisFieldtype value: %s", *e) + } + + return nil +} + +// Enum values for ArcgisMappingdestinationaddress +const ( + ArcgisMappingdestinationaddressCountry ArcgisMappingdestinationaddress = "country" + ArcgisMappingdestinationaddressLocality ArcgisMappingdestinationaddress = "locality" + ArcgisMappingdestinationaddressPostalCode ArcgisMappingdestinationaddress = "postal_code" + ArcgisMappingdestinationaddressStreet ArcgisMappingdestinationaddress = "street" + ArcgisMappingdestinationaddressUnit ArcgisMappingdestinationaddress = "unit" +) + +func AllArcgisMappingdestinationaddress() []ArcgisMappingdestinationaddress { + return []ArcgisMappingdestinationaddress{ + ArcgisMappingdestinationaddressCountry, + ArcgisMappingdestinationaddressLocality, + ArcgisMappingdestinationaddressPostalCode, + ArcgisMappingdestinationaddressStreet, + ArcgisMappingdestinationaddressUnit, + } +} + +type ArcgisMappingdestinationaddress string + +func (e ArcgisMappingdestinationaddress) String() string { + return string(e) +} + +func (e ArcgisMappingdestinationaddress) Valid() bool { + switch e { + case ArcgisMappingdestinationaddressCountry, + ArcgisMappingdestinationaddressLocality, + ArcgisMappingdestinationaddressPostalCode, + ArcgisMappingdestinationaddressStreet, + ArcgisMappingdestinationaddressUnit: + return true + default: + return false + } +} + +// useful when testing in other packages +func (e ArcgisMappingdestinationaddress) All() []ArcgisMappingdestinationaddress { + return AllArcgisMappingdestinationaddress() +} + +func (e ArcgisMappingdestinationaddress) MarshalText() ([]byte, error) { + return []byte(e), nil +} + +func (e *ArcgisMappingdestinationaddress) UnmarshalText(text []byte) error { + return e.Scan(text) +} + +func (e ArcgisMappingdestinationaddress) MarshalBinary() ([]byte, error) { + return []byte(e), nil +} + +func (e *ArcgisMappingdestinationaddress) UnmarshalBinary(data []byte) error { + return e.Scan(data) +} + +func (e ArcgisMappingdestinationaddress) Value() (driver.Value, error) { + return string(e), nil +} + +func (e *ArcgisMappingdestinationaddress) Scan(value any) error { + switch x := value.(type) { + case string: + *e = ArcgisMappingdestinationaddress(x) + case []byte: + *e = ArcgisMappingdestinationaddress(x) + case nil: + return fmt.Errorf("cannot nil into ArcgisMappingdestinationaddress") + default: + return fmt.Errorf("cannot scan type %T: %v", value, value) + } + + if !e.Valid() { + return fmt.Errorf("invalid ArcgisMappingdestinationaddress value: %s", *e) + } + + return nil +} + +// Enum values for ArcgisMappingdestinationparcel +const ( + ArcgisMappingdestinationparcelApn ArcgisMappingdestinationparcel = "apn" + ArcgisMappingdestinationparcelDescription ArcgisMappingdestinationparcel = "description" +) + +func AllArcgisMappingdestinationparcel() []ArcgisMappingdestinationparcel { + return []ArcgisMappingdestinationparcel{ + ArcgisMappingdestinationparcelApn, + ArcgisMappingdestinationparcelDescription, + } +} + +type ArcgisMappingdestinationparcel string + +func (e ArcgisMappingdestinationparcel) String() string { + return string(e) +} + +func (e ArcgisMappingdestinationparcel) Valid() bool { + switch e { + case ArcgisMappingdestinationparcelApn, + ArcgisMappingdestinationparcelDescription: + return true + default: + return false + } +} + +// useful when testing in other packages +func (e ArcgisMappingdestinationparcel) All() []ArcgisMappingdestinationparcel { + return AllArcgisMappingdestinationparcel() +} + +func (e ArcgisMappingdestinationparcel) MarshalText() ([]byte, error) { + return []byte(e), nil +} + +func (e *ArcgisMappingdestinationparcel) UnmarshalText(text []byte) error { + return e.Scan(text) +} + +func (e ArcgisMappingdestinationparcel) MarshalBinary() ([]byte, error) { + return []byte(e), nil +} + +func (e *ArcgisMappingdestinationparcel) UnmarshalBinary(data []byte) error { + return e.Scan(data) +} + +func (e ArcgisMappingdestinationparcel) Value() (driver.Value, error) { + return string(e), nil +} + +func (e *ArcgisMappingdestinationparcel) Scan(value any) error { + switch x := value.(type) { + case string: + *e = ArcgisMappingdestinationparcel(x) + case []byte: + *e = ArcgisMappingdestinationparcel(x) + case nil: + return fmt.Errorf("cannot nil into ArcgisMappingdestinationparcel") + default: + return fmt.Errorf("cannot scan type %T: %v", value, value) + } + + if !e.Valid() { + return fmt.Errorf("invalid ArcgisMappingdestinationparcel value: %s", *e) + } + + return nil +} + // Enum values for Arcgislicensetype const ( ArcgislicensetypeAdvancedut Arcgislicensetype = "advancedUT" @@ -579,6 +843,76 @@ func (e *CommsTextorigin) Scan(value any) error { return nil } +// Enum values for Countrytype +const ( + CountrytypeUsa Countrytype = "usa" +) + +func AllCountrytype() []Countrytype { + return []Countrytype{ + CountrytypeUsa, + } +} + +type Countrytype string + +func (e Countrytype) String() string { + return string(e) +} + +func (e Countrytype) Valid() bool { + switch e { + case CountrytypeUsa: + return true + default: + return false + } +} + +// useful when testing in other packages +func (e Countrytype) All() []Countrytype { + return AllCountrytype() +} + +func (e Countrytype) MarshalText() ([]byte, error) { + return []byte(e), nil +} + +func (e *Countrytype) UnmarshalText(text []byte) error { + return e.Scan(text) +} + +func (e Countrytype) MarshalBinary() ([]byte, error) { + return []byte(e), nil +} + +func (e *Countrytype) UnmarshalBinary(data []byte) error { + return e.Scan(data) +} + +func (e Countrytype) Value() (driver.Value, error) { + return string(e), nil +} + +func (e *Countrytype) Scan(value any) error { + switch x := value.(type) { + case string: + *e = Countrytype(x) + case []byte: + *e = Countrytype(x) + case nil: + return fmt.Errorf("cannot nil into Countrytype") + default: + return fmt.Errorf("cannot scan type %T: %v", value, value) + } + + if !e.Valid() { + return fmt.Errorf("invalid Countrytype value: %s", *e) + } + + return nil +} + // Enum values for FileuploadCsvtype const ( FileuploadCsvtypePoollist FileuploadCsvtype = "PoolList" @@ -1104,6 +1438,88 @@ func (e *Notificationtype) Scan(value any) error { return nil } +// Enum values for Poolconditiontype +const ( + PoolconditiontypeBlue Poolconditiontype = "blue" + PoolconditiontypeDry Poolconditiontype = "dry" + PoolconditiontypeFalsePool Poolconditiontype = "false pool" + PoolconditiontypeGreen Poolconditiontype = "green" + PoolconditiontypeMurky Poolconditiontype = "murky" +) + +func AllPoolconditiontype() []Poolconditiontype { + return []Poolconditiontype{ + PoolconditiontypeBlue, + PoolconditiontypeDry, + PoolconditiontypeFalsePool, + PoolconditiontypeGreen, + PoolconditiontypeMurky, + } +} + +type Poolconditiontype string + +func (e Poolconditiontype) String() string { + return string(e) +} + +func (e Poolconditiontype) Valid() bool { + switch e { + case PoolconditiontypeBlue, + PoolconditiontypeDry, + PoolconditiontypeFalsePool, + PoolconditiontypeGreen, + PoolconditiontypeMurky: + return true + default: + return false + } +} + +// useful when testing in other packages +func (e Poolconditiontype) All() []Poolconditiontype { + return AllPoolconditiontype() +} + +func (e Poolconditiontype) MarshalText() ([]byte, error) { + return []byte(e), nil +} + +func (e *Poolconditiontype) UnmarshalText(text []byte) error { + return e.Scan(text) +} + +func (e Poolconditiontype) MarshalBinary() ([]byte, error) { + return []byte(e), nil +} + +func (e *Poolconditiontype) UnmarshalBinary(data []byte) error { + return e.Scan(data) +} + +func (e Poolconditiontype) Value() (driver.Value, error) { + return string(e), nil +} + +func (e *Poolconditiontype) Scan(value any) error { + switch x := value.(type) { + case string: + *e = Poolconditiontype(x) + case []byte: + *e = Poolconditiontype(x) + case nil: + return fmt.Errorf("cannot nil into Poolconditiontype") + default: + return fmt.Errorf("cannot scan type %T: %v", value, value) + } + + if !e.Valid() { + return fmt.Errorf("invalid Poolconditiontype value: %s", *e) + } + + return nil +} + // Enum values for PublicreportAccuracytype const ( PublicreportAccuracytypeRooftop PublicreportAccuracytype = "rooftop" diff --git a/db/factory/address.bob.go b/db/factory/address.bob.go new file mode 100644 index 00000000..a12c7a49 --- /dev/null +++ b/db/factory/address.bob.go @@ -0,0 +1,719 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package factory + +import ( + "context" + "testing" + "time" + + "github.com/Gleipnir-Technology/bob" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + models "github.com/Gleipnir-Technology/nidus-sync/db/models" + "github.com/aarondl/opt/omit" + "github.com/jaswdr/faker/v2" +) + +type AddressMod interface { + Apply(context.Context, *AddressTemplate) +} + +type AddressModFunc func(context.Context, *AddressTemplate) + +func (f AddressModFunc) Apply(ctx context.Context, n *AddressTemplate) { + f(ctx, n) +} + +type AddressModSlice []AddressMod + +func (mods AddressModSlice) Apply(ctx context.Context, n *AddressTemplate) { + for _, f := range mods { + f.Apply(ctx, n) + } +} + +// AddressTemplate is an object representing the database table. +// all columns are optional and should be set by mods +type AddressTemplate struct { + Country func() enums.Countrytype + Created func() time.Time + Geom func() string + H3cell func() string + ID func() int32 + Locality func() string + Number func() int32 + PostalCode func() string + Street func() string + Unit func() string + + r addressR + f *Factory + + alreadyPersisted bool +} + +type addressR struct { + Site *addressRSiteR +} + +type addressRSiteR struct { + o *SiteTemplate +} + +// Apply mods to the AddressTemplate +func (o *AddressTemplate) Apply(ctx context.Context, mods ...AddressMod) { + for _, mod := range mods { + mod.Apply(ctx, o) + } +} + +// setModelRels creates and sets the relationships on *models.Address +// according to the relationships in the template. Nothing is inserted into the db +func (t AddressTemplate) setModelRels(o *models.Address) { + if t.r.Site != nil { + rel := t.r.Site.o.Build() + rel.R.Address = o + rel.AddressID = o.ID // h2 + o.R.Site = rel + } +} + +// BuildSetter returns an *models.AddressSetter +// this does nothing with the relationship templates +func (o AddressTemplate) BuildSetter() *models.AddressSetter { + m := &models.AddressSetter{} + + if o.Country != nil { + val := o.Country() + m.Country = omit.From(val) + } + if o.Created != nil { + val := o.Created() + m.Created = omit.From(val) + } + if o.Geom != nil { + val := o.Geom() + m.Geom = omit.From(val) + } + if o.H3cell != nil { + val := o.H3cell() + m.H3cell = omit.From(val) + } + if o.ID != nil { + val := o.ID() + m.ID = omit.From(val) + } + if o.Locality != nil { + val := o.Locality() + m.Locality = omit.From(val) + } + if o.Number != nil { + val := o.Number() + m.Number = omit.From(val) + } + if o.PostalCode != nil { + val := o.PostalCode() + m.PostalCode = omit.From(val) + } + if o.Street != nil { + val := o.Street() + m.Street = omit.From(val) + } + if o.Unit != nil { + val := o.Unit() + m.Unit = omit.From(val) + } + + return m +} + +// BuildManySetter returns an []*models.AddressSetter +// this does nothing with the relationship templates +func (o AddressTemplate) BuildManySetter(number int) []*models.AddressSetter { + m := make([]*models.AddressSetter, number) + + for i := range m { + m[i] = o.BuildSetter() + } + + return m +} + +// Build returns an *models.Address +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use AddressTemplate.Create +func (o AddressTemplate) Build() *models.Address { + m := &models.Address{} + + if o.Country != nil { + m.Country = o.Country() + } + if o.Created != nil { + m.Created = o.Created() + } + if o.Geom != nil { + m.Geom = o.Geom() + } + if o.H3cell != nil { + m.H3cell = o.H3cell() + } + if o.ID != nil { + m.ID = o.ID() + } + if o.Locality != nil { + m.Locality = o.Locality() + } + if o.Number != nil { + m.Number = o.Number() + } + if o.PostalCode != nil { + m.PostalCode = o.PostalCode() + } + if o.Street != nil { + m.Street = o.Street() + } + if o.Unit != nil { + m.Unit = o.Unit() + } + + o.setModelRels(m) + + return m +} + +// BuildMany returns an models.AddressSlice +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use AddressTemplate.CreateMany +func (o AddressTemplate) BuildMany(number int) models.AddressSlice { + m := make(models.AddressSlice, number) + + for i := range m { + m[i] = o.Build() + } + + return m +} + +func ensureCreatableAddress(m *models.AddressSetter) { + if !(m.Country.IsValue()) { + val := random_enums_Countrytype(nil) + m.Country = omit.From(val) + } + if !(m.Created.IsValue()) { + val := random_time_Time(nil) + m.Created = omit.From(val) + } + if !(m.Geom.IsValue()) { + val := random_string(nil) + m.Geom = omit.From(val) + } + if !(m.H3cell.IsValue()) { + val := random_string(nil) + m.H3cell = omit.From(val) + } + if !(m.Locality.IsValue()) { + val := random_string(nil) + m.Locality = omit.From(val) + } + if !(m.Number.IsValue()) { + val := random_int32(nil) + m.Number = omit.From(val) + } + if !(m.PostalCode.IsValue()) { + val := random_string(nil) + m.PostalCode = omit.From(val) + } + if !(m.Street.IsValue()) { + val := random_string(nil) + m.Street = omit.From(val) + } + if !(m.Unit.IsValue()) { + val := random_string(nil) + m.Unit = omit.From(val) + } +} + +// insertOptRels creates and inserts any optional the relationships on *models.Address +// according to the relationships in the template. +// any required relationship should have already exist on the model +func (o *AddressTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.Address) error { + var err error + + isSiteDone, _ := addressRelSiteCtx.Value(ctx) + if !isSiteDone && o.r.Site != nil { + ctx = addressRelSiteCtx.WithValue(ctx, true) + if o.r.Site.o.alreadyPersisted { + m.R.Site = o.r.Site.o.Build() + } else { + var rel0 *models.Site + rel0, err = o.r.Site.o.Create(ctx, exec) + if err != nil { + return err + } + err = m.AttachSite(ctx, exec, rel0) + if err != nil { + return err + } + } + + } + + return err +} + +// Create builds a address and inserts it into the database +// Relations objects are also inserted and placed in the .R field +func (o *AddressTemplate) Create(ctx context.Context, exec bob.Executor) (*models.Address, error) { + var err error + opt := o.BuildSetter() + ensureCreatableAddress(opt) + + m, err := models.Addresses.Insert(opt).One(ctx, exec) + if err != nil { + return nil, err + } + + if err := o.insertOptRels(ctx, exec, m); err != nil { + return nil, err + } + return m, err +} + +// MustCreate builds a address and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o *AddressTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.Address { + m, err := o.Create(ctx, exec) + if err != nil { + panic(err) + } + return m +} + +// CreateOrFail builds a address and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o *AddressTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.Address { + tb.Helper() + m, err := o.Create(ctx, exec) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// CreateMany builds multiple addresses and inserts them into the database +// Relations objects are also inserted and placed in the .R field +func (o AddressTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.AddressSlice, error) { + var err error + m := make(models.AddressSlice, number) + + for i := range m { + m[i], err = o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + return m, nil +} + +// MustCreateMany builds multiple addresses and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o AddressTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.AddressSlice { + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + panic(err) + } + return m +} + +// CreateManyOrFail builds multiple addresses and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o AddressTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.AddressSlice { + tb.Helper() + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// Address has methods that act as mods for the AddressTemplate +var AddressMods addressMods + +type addressMods struct{} + +func (m addressMods) RandomizeAllColumns(f *faker.Faker) AddressMod { + return AddressModSlice{ + AddressMods.RandomCountry(f), + AddressMods.RandomCreated(f), + AddressMods.RandomGeom(f), + AddressMods.RandomH3cell(f), + AddressMods.RandomID(f), + AddressMods.RandomLocality(f), + AddressMods.RandomNumber(f), + AddressMods.RandomPostalCode(f), + AddressMods.RandomStreet(f), + AddressMods.RandomUnit(f), + } +} + +// Set the model columns to this value +func (m addressMods) Country(val enums.Countrytype) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Country = func() enums.Countrytype { return val } + }) +} + +// Set the Column from the function +func (m addressMods) CountryFunc(f func() enums.Countrytype) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Country = f + }) +} + +// Clear any values for the column +func (m addressMods) UnsetCountry() AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Country = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m addressMods) RandomCountry(f *faker.Faker) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Country = func() enums.Countrytype { + return random_enums_Countrytype(f) + } + }) +} + +// Set the model columns to this value +func (m addressMods) Created(val time.Time) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Created = func() time.Time { return val } + }) +} + +// Set the Column from the function +func (m addressMods) CreatedFunc(f func() time.Time) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Created = f + }) +} + +// Clear any values for the column +func (m addressMods) UnsetCreated() AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Created = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m addressMods) RandomCreated(f *faker.Faker) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Created = func() time.Time { + return random_time_Time(f) + } + }) +} + +// Set the model columns to this value +func (m addressMods) Geom(val string) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Geom = func() string { return val } + }) +} + +// Set the Column from the function +func (m addressMods) GeomFunc(f func() string) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Geom = f + }) +} + +// Clear any values for the column +func (m addressMods) UnsetGeom() AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Geom = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m addressMods) RandomGeom(f *faker.Faker) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Geom = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m addressMods) H3cell(val string) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.H3cell = func() string { return val } + }) +} + +// Set the Column from the function +func (m addressMods) H3cellFunc(f func() string) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.H3cell = f + }) +} + +// Clear any values for the column +func (m addressMods) UnsetH3cell() AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.H3cell = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m addressMods) RandomH3cell(f *faker.Faker) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.H3cell = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m addressMods) ID(val int32) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.ID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m addressMods) IDFunc(f func() int32) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.ID = f + }) +} + +// Clear any values for the column +func (m addressMods) UnsetID() AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.ID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m addressMods) RandomID(f *faker.Faker) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.ID = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m addressMods) Locality(val string) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Locality = func() string { return val } + }) +} + +// Set the Column from the function +func (m addressMods) LocalityFunc(f func() string) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Locality = f + }) +} + +// Clear any values for the column +func (m addressMods) UnsetLocality() AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Locality = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m addressMods) RandomLocality(f *faker.Faker) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Locality = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m addressMods) Number(val int32) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Number = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m addressMods) NumberFunc(f func() int32) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Number = f + }) +} + +// Clear any values for the column +func (m addressMods) UnsetNumber() AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Number = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m addressMods) RandomNumber(f *faker.Faker) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Number = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m addressMods) PostalCode(val string) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.PostalCode = func() string { return val } + }) +} + +// Set the Column from the function +func (m addressMods) PostalCodeFunc(f func() string) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.PostalCode = f + }) +} + +// Clear any values for the column +func (m addressMods) UnsetPostalCode() AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.PostalCode = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m addressMods) RandomPostalCode(f *faker.Faker) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.PostalCode = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m addressMods) Street(val string) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Street = func() string { return val } + }) +} + +// Set the Column from the function +func (m addressMods) StreetFunc(f func() string) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Street = f + }) +} + +// Clear any values for the column +func (m addressMods) UnsetStreet() AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Street = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m addressMods) RandomStreet(f *faker.Faker) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Street = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m addressMods) Unit(val string) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Unit = func() string { return val } + }) +} + +// Set the Column from the function +func (m addressMods) UnitFunc(f func() string) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Unit = f + }) +} + +// Clear any values for the column +func (m addressMods) UnsetUnit() AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Unit = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m addressMods) RandomUnit(f *faker.Faker) AddressMod { + return AddressModFunc(func(_ context.Context, o *AddressTemplate) { + o.Unit = func() string { + return random_string(f) + } + }) +} + +func (m addressMods) WithParentsCascading() AddressMod { + return AddressModFunc(func(ctx context.Context, o *AddressTemplate) { + if isDone, _ := addressWithParentsCascadingCtx.Value(ctx); isDone { + return + } + ctx = addressWithParentsCascadingCtx.WithValue(ctx, true) + { + + related := o.f.NewSiteWithContext(ctx, SiteMods.WithParentsCascading()) + m.WithSite(related).Apply(ctx, o) + } + }) +} + +func (m addressMods) WithSite(rel *SiteTemplate) AddressMod { + return AddressModFunc(func(ctx context.Context, o *AddressTemplate) { + o.r.Site = &addressRSiteR{ + o: rel, + } + }) +} + +func (m addressMods) WithNewSite(mods ...SiteMod) AddressMod { + return AddressModFunc(func(ctx context.Context, o *AddressTemplate) { + related := o.f.NewSiteWithContext(ctx, mods...) + + m.WithSite(related).Apply(ctx, o) + }) +} + +func (m addressMods) WithExistingSite(em *models.Site) AddressMod { + return AddressModFunc(func(ctx context.Context, o *AddressTemplate) { + o.r.Site = &addressRSiteR{ + o: o.f.FromExistingSite(em), + } + }) +} + +func (m addressMods) WithoutSite() AddressMod { + return AddressModFunc(func(ctx context.Context, o *AddressTemplate) { + o.r.Site = nil + }) +} diff --git a/db/factory/arcgis.address_mapping.bob.go b/db/factory/arcgis.address_mapping.bob.go new file mode 100644 index 00000000..3277c800 --- /dev/null +++ b/db/factory/arcgis.address_mapping.bob.go @@ -0,0 +1,570 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package factory + +import ( + "context" + "testing" + + "github.com/Gleipnir-Technology/bob" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + models "github.com/Gleipnir-Technology/nidus-sync/db/models" + "github.com/aarondl/opt/omit" + "github.com/jaswdr/faker/v2" +) + +type ArcgisAddressMappingMod interface { + Apply(context.Context, *ArcgisAddressMappingTemplate) +} + +type ArcgisAddressMappingModFunc func(context.Context, *ArcgisAddressMappingTemplate) + +func (f ArcgisAddressMappingModFunc) Apply(ctx context.Context, n *ArcgisAddressMappingTemplate) { + f(ctx, n) +} + +type ArcgisAddressMappingModSlice []ArcgisAddressMappingMod + +func (mods ArcgisAddressMappingModSlice) Apply(ctx context.Context, n *ArcgisAddressMappingTemplate) { + for _, f := range mods { + f.Apply(ctx, n) + } +} + +// ArcgisAddressMappingTemplate is an object representing the database table. +// all columns are optional and should be set by mods +type ArcgisAddressMappingTemplate struct { + Destination func() enums.ArcgisMappingdestinationaddress + LayerFeatureServiceItemID func() string + LayerIndex func() int32 + LayerFieldName func() string + OrganizationID func() int32 + + r arcgisAddressMappingR + f *Factory + + alreadyPersisted bool +} + +type arcgisAddressMappingR struct { + LayerField *arcgisAddressMappingRLayerFieldR + Organization *arcgisAddressMappingROrganizationR +} + +type arcgisAddressMappingRLayerFieldR struct { + o *ArcgisLayerFieldTemplate +} +type arcgisAddressMappingROrganizationR struct { + o *OrganizationTemplate +} + +// Apply mods to the ArcgisAddressMappingTemplate +func (o *ArcgisAddressMappingTemplate) Apply(ctx context.Context, mods ...ArcgisAddressMappingMod) { + for _, mod := range mods { + mod.Apply(ctx, o) + } +} + +// setModelRels creates and sets the relationships on *models.ArcgisAddressMapping +// according to the relationships in the template. Nothing is inserted into the db +func (t ArcgisAddressMappingTemplate) setModelRels(o *models.ArcgisAddressMapping) { + if t.r.LayerField != nil { + rel := t.r.LayerField.o.Build() + rel.R.AddressMappings = append(rel.R.AddressMappings, o) + o.LayerFeatureServiceItemID = rel.LayerFeatureServiceItemID // h2 + o.LayerIndex = rel.LayerIndex // h2 + o.LayerFieldName = rel.Name // h2 + o.R.LayerField = rel + } + + if t.r.Organization != nil { + rel := t.r.Organization.o.Build() + rel.R.AddressMappings = append(rel.R.AddressMappings, o) + o.OrganizationID = rel.ID // h2 + o.R.Organization = rel + } +} + +// BuildSetter returns an *models.ArcgisAddressMappingSetter +// this does nothing with the relationship templates +func (o ArcgisAddressMappingTemplate) BuildSetter() *models.ArcgisAddressMappingSetter { + m := &models.ArcgisAddressMappingSetter{} + + if o.Destination != nil { + val := o.Destination() + m.Destination = omit.From(val) + } + if o.LayerFeatureServiceItemID != nil { + val := o.LayerFeatureServiceItemID() + m.LayerFeatureServiceItemID = omit.From(val) + } + if o.LayerIndex != nil { + val := o.LayerIndex() + m.LayerIndex = omit.From(val) + } + if o.LayerFieldName != nil { + val := o.LayerFieldName() + m.LayerFieldName = omit.From(val) + } + if o.OrganizationID != nil { + val := o.OrganizationID() + m.OrganizationID = omit.From(val) + } + + return m +} + +// BuildManySetter returns an []*models.ArcgisAddressMappingSetter +// this does nothing with the relationship templates +func (o ArcgisAddressMappingTemplate) BuildManySetter(number int) []*models.ArcgisAddressMappingSetter { + m := make([]*models.ArcgisAddressMappingSetter, number) + + for i := range m { + m[i] = o.BuildSetter() + } + + return m +} + +// Build returns an *models.ArcgisAddressMapping +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use ArcgisAddressMappingTemplate.Create +func (o ArcgisAddressMappingTemplate) Build() *models.ArcgisAddressMapping { + m := &models.ArcgisAddressMapping{} + + if o.Destination != nil { + m.Destination = o.Destination() + } + if o.LayerFeatureServiceItemID != nil { + m.LayerFeatureServiceItemID = o.LayerFeatureServiceItemID() + } + if o.LayerIndex != nil { + m.LayerIndex = o.LayerIndex() + } + if o.LayerFieldName != nil { + m.LayerFieldName = o.LayerFieldName() + } + if o.OrganizationID != nil { + m.OrganizationID = o.OrganizationID() + } + + o.setModelRels(m) + + return m +} + +// BuildMany returns an models.ArcgisAddressMappingSlice +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use ArcgisAddressMappingTemplate.CreateMany +func (o ArcgisAddressMappingTemplate) BuildMany(number int) models.ArcgisAddressMappingSlice { + m := make(models.ArcgisAddressMappingSlice, number) + + for i := range m { + m[i] = o.Build() + } + + return m +} + +func ensureCreatableArcgisAddressMapping(m *models.ArcgisAddressMappingSetter) { + if !(m.Destination.IsValue()) { + val := random_enums_ArcgisMappingdestinationaddress(nil) + m.Destination = omit.From(val) + } + if !(m.LayerFeatureServiceItemID.IsValue()) { + val := random_string(nil) + m.LayerFeatureServiceItemID = omit.From(val) + } + if !(m.LayerIndex.IsValue()) { + val := random_int32(nil) + m.LayerIndex = omit.From(val) + } + if !(m.LayerFieldName.IsValue()) { + val := random_string(nil) + m.LayerFieldName = omit.From(val) + } + if !(m.OrganizationID.IsValue()) { + val := random_int32(nil) + m.OrganizationID = omit.From(val) + } +} + +// insertOptRels creates and inserts any optional the relationships on *models.ArcgisAddressMapping +// according to the relationships in the template. +// any required relationship should have already exist on the model +func (o *ArcgisAddressMappingTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.ArcgisAddressMapping) error { + var err error + + return err +} + +// Create builds a arcgisAddressMapping and inserts it into the database +// Relations objects are also inserted and placed in the .R field +func (o *ArcgisAddressMappingTemplate) Create(ctx context.Context, exec bob.Executor) (*models.ArcgisAddressMapping, error) { + var err error + opt := o.BuildSetter() + ensureCreatableArcgisAddressMapping(opt) + + if o.r.LayerField == nil { + ArcgisAddressMappingMods.WithNewLayerField().Apply(ctx, o) + } + + var rel0 *models.ArcgisLayerField + + if o.r.LayerField.o.alreadyPersisted { + rel0 = o.r.LayerField.o.Build() + } else { + rel0, err = o.r.LayerField.o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + opt.LayerFeatureServiceItemID = omit.From(rel0.LayerFeatureServiceItemID) + opt.LayerIndex = omit.From(rel0.LayerIndex) + opt.LayerFieldName = omit.From(rel0.Name) + + if o.r.Organization == nil { + ArcgisAddressMappingMods.WithNewOrganization().Apply(ctx, o) + } + + var rel1 *models.Organization + + if o.r.Organization.o.alreadyPersisted { + rel1 = o.r.Organization.o.Build() + } else { + rel1, err = o.r.Organization.o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + opt.OrganizationID = omit.From(rel1.ID) + + m, err := models.ArcgisAddressMappings.Insert(opt).One(ctx, exec) + if err != nil { + return nil, err + } + + m.R.LayerField = rel0 + m.R.Organization = rel1 + + if err := o.insertOptRels(ctx, exec, m); err != nil { + return nil, err + } + return m, err +} + +// MustCreate builds a arcgisAddressMapping and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o *ArcgisAddressMappingTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.ArcgisAddressMapping { + m, err := o.Create(ctx, exec) + if err != nil { + panic(err) + } + return m +} + +// CreateOrFail builds a arcgisAddressMapping and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o *ArcgisAddressMappingTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.ArcgisAddressMapping { + tb.Helper() + m, err := o.Create(ctx, exec) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// CreateMany builds multiple arcgisAddressMappings and inserts them into the database +// Relations objects are also inserted and placed in the .R field +func (o ArcgisAddressMappingTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.ArcgisAddressMappingSlice, error) { + var err error + m := make(models.ArcgisAddressMappingSlice, number) + + for i := range m { + m[i], err = o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + return m, nil +} + +// MustCreateMany builds multiple arcgisAddressMappings and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o ArcgisAddressMappingTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.ArcgisAddressMappingSlice { + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + panic(err) + } + return m +} + +// CreateManyOrFail builds multiple arcgisAddressMappings and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o ArcgisAddressMappingTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.ArcgisAddressMappingSlice { + tb.Helper() + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// ArcgisAddressMapping has methods that act as mods for the ArcgisAddressMappingTemplate +var ArcgisAddressMappingMods arcgisAddressMappingMods + +type arcgisAddressMappingMods struct{} + +func (m arcgisAddressMappingMods) RandomizeAllColumns(f *faker.Faker) ArcgisAddressMappingMod { + return ArcgisAddressMappingModSlice{ + ArcgisAddressMappingMods.RandomDestination(f), + ArcgisAddressMappingMods.RandomLayerFeatureServiceItemID(f), + ArcgisAddressMappingMods.RandomLayerIndex(f), + ArcgisAddressMappingMods.RandomLayerFieldName(f), + ArcgisAddressMappingMods.RandomOrganizationID(f), + } +} + +// Set the model columns to this value +func (m arcgisAddressMappingMods) Destination(val enums.ArcgisMappingdestinationaddress) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.Destination = func() enums.ArcgisMappingdestinationaddress { return val } + }) +} + +// Set the Column from the function +func (m arcgisAddressMappingMods) DestinationFunc(f func() enums.ArcgisMappingdestinationaddress) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.Destination = f + }) +} + +// Clear any values for the column +func (m arcgisAddressMappingMods) UnsetDestination() ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.Destination = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisAddressMappingMods) RandomDestination(f *faker.Faker) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.Destination = func() enums.ArcgisMappingdestinationaddress { + return random_enums_ArcgisMappingdestinationaddress(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisAddressMappingMods) LayerFeatureServiceItemID(val string) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.LayerFeatureServiceItemID = func() string { return val } + }) +} + +// Set the Column from the function +func (m arcgisAddressMappingMods) LayerFeatureServiceItemIDFunc(f func() string) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.LayerFeatureServiceItemID = f + }) +} + +// Clear any values for the column +func (m arcgisAddressMappingMods) UnsetLayerFeatureServiceItemID() ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.LayerFeatureServiceItemID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisAddressMappingMods) RandomLayerFeatureServiceItemID(f *faker.Faker) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.LayerFeatureServiceItemID = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisAddressMappingMods) LayerIndex(val int32) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.LayerIndex = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m arcgisAddressMappingMods) LayerIndexFunc(f func() int32) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.LayerIndex = f + }) +} + +// Clear any values for the column +func (m arcgisAddressMappingMods) UnsetLayerIndex() ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.LayerIndex = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisAddressMappingMods) RandomLayerIndex(f *faker.Faker) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.LayerIndex = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisAddressMappingMods) LayerFieldName(val string) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.LayerFieldName = func() string { return val } + }) +} + +// Set the Column from the function +func (m arcgisAddressMappingMods) LayerFieldNameFunc(f func() string) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.LayerFieldName = f + }) +} + +// Clear any values for the column +func (m arcgisAddressMappingMods) UnsetLayerFieldName() ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.LayerFieldName = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisAddressMappingMods) RandomLayerFieldName(f *faker.Faker) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.LayerFieldName = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisAddressMappingMods) OrganizationID(val int32) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.OrganizationID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m arcgisAddressMappingMods) OrganizationIDFunc(f func() int32) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.OrganizationID = f + }) +} + +// Clear any values for the column +func (m arcgisAddressMappingMods) UnsetOrganizationID() ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.OrganizationID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisAddressMappingMods) RandomOrganizationID(f *faker.Faker) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(_ context.Context, o *ArcgisAddressMappingTemplate) { + o.OrganizationID = func() int32 { + return random_int32(f) + } + }) +} + +func (m arcgisAddressMappingMods) WithParentsCascading() ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(ctx context.Context, o *ArcgisAddressMappingTemplate) { + if isDone, _ := arcgisAddressMappingWithParentsCascadingCtx.Value(ctx); isDone { + return + } + ctx = arcgisAddressMappingWithParentsCascadingCtx.WithValue(ctx, true) + { + + related := o.f.NewArcgisLayerFieldWithContext(ctx, ArcgisLayerFieldMods.WithParentsCascading()) + m.WithLayerField(related).Apply(ctx, o) + } + { + + related := o.f.NewOrganizationWithContext(ctx, OrganizationMods.WithParentsCascading()) + m.WithOrganization(related).Apply(ctx, o) + } + }) +} + +func (m arcgisAddressMappingMods) WithLayerField(rel *ArcgisLayerFieldTemplate) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(ctx context.Context, o *ArcgisAddressMappingTemplate) { + o.r.LayerField = &arcgisAddressMappingRLayerFieldR{ + o: rel, + } + }) +} + +func (m arcgisAddressMappingMods) WithNewLayerField(mods ...ArcgisLayerFieldMod) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(ctx context.Context, o *ArcgisAddressMappingTemplate) { + related := o.f.NewArcgisLayerFieldWithContext(ctx, mods...) + + m.WithLayerField(related).Apply(ctx, o) + }) +} + +func (m arcgisAddressMappingMods) WithExistingLayerField(em *models.ArcgisLayerField) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(ctx context.Context, o *ArcgisAddressMappingTemplate) { + o.r.LayerField = &arcgisAddressMappingRLayerFieldR{ + o: o.f.FromExistingArcgisLayerField(em), + } + }) +} + +func (m arcgisAddressMappingMods) WithoutLayerField() ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(ctx context.Context, o *ArcgisAddressMappingTemplate) { + o.r.LayerField = nil + }) +} + +func (m arcgisAddressMappingMods) WithOrganization(rel *OrganizationTemplate) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(ctx context.Context, o *ArcgisAddressMappingTemplate) { + o.r.Organization = &arcgisAddressMappingROrganizationR{ + o: rel, + } + }) +} + +func (m arcgisAddressMappingMods) WithNewOrganization(mods ...OrganizationMod) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(ctx context.Context, o *ArcgisAddressMappingTemplate) { + related := o.f.NewOrganizationWithContext(ctx, mods...) + + m.WithOrganization(related).Apply(ctx, o) + }) +} + +func (m arcgisAddressMappingMods) WithExistingOrganization(em *models.Organization) ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(ctx context.Context, o *ArcgisAddressMappingTemplate) { + o.r.Organization = &arcgisAddressMappingROrganizationR{ + o: o.f.FromExistingOrganization(em), + } + }) +} + +func (m arcgisAddressMappingMods) WithoutOrganization() ArcgisAddressMappingMod { + return ArcgisAddressMappingModFunc(func(ctx context.Context, o *ArcgisAddressMappingTemplate) { + o.r.Organization = nil + }) +} diff --git a/db/factory/arcgis.feature_service.bob.go b/db/factory/arcgis.feature_service.bob.go new file mode 100644 index 00000000..a9ac9310 --- /dev/null +++ b/db/factory/arcgis.feature_service.bob.go @@ -0,0 +1,478 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package factory + +import ( + "context" + "testing" + + "github.com/Gleipnir-Technology/bob" + models "github.com/Gleipnir-Technology/nidus-sync/db/models" + "github.com/aarondl/opt/omit" + "github.com/jaswdr/faker/v2" +) + +type ArcgisFeatureServiceMod interface { + Apply(context.Context, *ArcgisFeatureServiceTemplate) +} + +type ArcgisFeatureServiceModFunc func(context.Context, *ArcgisFeatureServiceTemplate) + +func (f ArcgisFeatureServiceModFunc) Apply(ctx context.Context, n *ArcgisFeatureServiceTemplate) { + f(ctx, n) +} + +type ArcgisFeatureServiceModSlice []ArcgisFeatureServiceMod + +func (mods ArcgisFeatureServiceModSlice) Apply(ctx context.Context, n *ArcgisFeatureServiceTemplate) { + for _, f := range mods { + f.Apply(ctx, n) + } +} + +// ArcgisFeatureServiceTemplate is an object representing the database table. +// all columns are optional and should be set by mods +type ArcgisFeatureServiceTemplate struct { + Extent func() string + ItemID func() string + SpatialReference func() int32 + URL func() string + + r arcgisFeatureServiceR + f *Factory + + alreadyPersisted bool +} + +type arcgisFeatureServiceR struct { + FeatureServiceItemLayers []*arcgisFeatureServiceRFeatureServiceItemLayersR +} + +type arcgisFeatureServiceRFeatureServiceItemLayersR struct { + number int + o *ArcgisLayerTemplate +} + +// Apply mods to the ArcgisFeatureServiceTemplate +func (o *ArcgisFeatureServiceTemplate) Apply(ctx context.Context, mods ...ArcgisFeatureServiceMod) { + for _, mod := range mods { + mod.Apply(ctx, o) + } +} + +// setModelRels creates and sets the relationships on *models.ArcgisFeatureService +// according to the relationships in the template. Nothing is inserted into the db +func (t ArcgisFeatureServiceTemplate) setModelRels(o *models.ArcgisFeatureService) { + if t.r.FeatureServiceItemLayers != nil { + rel := models.ArcgisLayerSlice{} + for _, r := range t.r.FeatureServiceItemLayers { + related := r.o.BuildMany(r.number) + for _, rel := range related { + rel.FeatureServiceItemID = o.ItemID // h2 + rel.R.FeatureServiceItemFeatureService = o + } + rel = append(rel, related...) + } + o.R.FeatureServiceItemLayers = rel + } +} + +// BuildSetter returns an *models.ArcgisFeatureServiceSetter +// this does nothing with the relationship templates +func (o ArcgisFeatureServiceTemplate) BuildSetter() *models.ArcgisFeatureServiceSetter { + m := &models.ArcgisFeatureServiceSetter{} + + if o.Extent != nil { + val := o.Extent() + m.Extent = omit.From(val) + } + if o.ItemID != nil { + val := o.ItemID() + m.ItemID = omit.From(val) + } + if o.SpatialReference != nil { + val := o.SpatialReference() + m.SpatialReference = omit.From(val) + } + if o.URL != nil { + val := o.URL() + m.URL = omit.From(val) + } + + return m +} + +// BuildManySetter returns an []*models.ArcgisFeatureServiceSetter +// this does nothing with the relationship templates +func (o ArcgisFeatureServiceTemplate) BuildManySetter(number int) []*models.ArcgisFeatureServiceSetter { + m := make([]*models.ArcgisFeatureServiceSetter, number) + + for i := range m { + m[i] = o.BuildSetter() + } + + return m +} + +// Build returns an *models.ArcgisFeatureService +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use ArcgisFeatureServiceTemplate.Create +func (o ArcgisFeatureServiceTemplate) Build() *models.ArcgisFeatureService { + m := &models.ArcgisFeatureService{} + + if o.Extent != nil { + m.Extent = o.Extent() + } + if o.ItemID != nil { + m.ItemID = o.ItemID() + } + if o.SpatialReference != nil { + m.SpatialReference = o.SpatialReference() + } + if o.URL != nil { + m.URL = o.URL() + } + + o.setModelRels(m) + + return m +} + +// BuildMany returns an models.ArcgisFeatureServiceSlice +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use ArcgisFeatureServiceTemplate.CreateMany +func (o ArcgisFeatureServiceTemplate) BuildMany(number int) models.ArcgisFeatureServiceSlice { + m := make(models.ArcgisFeatureServiceSlice, number) + + for i := range m { + m[i] = o.Build() + } + + return m +} + +func ensureCreatableArcgisFeatureService(m *models.ArcgisFeatureServiceSetter) { + if !(m.Extent.IsValue()) { + val := random_string(nil) + m.Extent = omit.From(val) + } + if !(m.ItemID.IsValue()) { + val := random_string(nil) + m.ItemID = omit.From(val) + } + if !(m.SpatialReference.IsValue()) { + val := random_int32(nil) + m.SpatialReference = omit.From(val) + } + if !(m.URL.IsValue()) { + val := random_string(nil) + m.URL = omit.From(val) + } +} + +// insertOptRels creates and inserts any optional the relationships on *models.ArcgisFeatureService +// according to the relationships in the template. +// any required relationship should have already exist on the model +func (o *ArcgisFeatureServiceTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.ArcgisFeatureService) error { + var err error + + isFeatureServiceItemLayersDone, _ := arcgisFeatureServiceRelFeatureServiceItemLayersCtx.Value(ctx) + if !isFeatureServiceItemLayersDone && o.r.FeatureServiceItemLayers != nil { + ctx = arcgisFeatureServiceRelFeatureServiceItemLayersCtx.WithValue(ctx, true) + for _, r := range o.r.FeatureServiceItemLayers { + if r.o.alreadyPersisted { + m.R.FeatureServiceItemLayers = append(m.R.FeatureServiceItemLayers, r.o.Build()) + } else { + rel0, err := r.o.CreateMany(ctx, exec, r.number) + if err != nil { + return err + } + + err = m.AttachFeatureServiceItemLayers(ctx, exec, rel0...) + if err != nil { + return err + } + } + } + } + + return err +} + +// Create builds a arcgisFeatureService and inserts it into the database +// Relations objects are also inserted and placed in the .R field +func (o *ArcgisFeatureServiceTemplate) Create(ctx context.Context, exec bob.Executor) (*models.ArcgisFeatureService, error) { + var err error + opt := o.BuildSetter() + ensureCreatableArcgisFeatureService(opt) + + m, err := models.ArcgisFeatureServices.Insert(opt).One(ctx, exec) + if err != nil { + return nil, err + } + + if err := o.insertOptRels(ctx, exec, m); err != nil { + return nil, err + } + return m, err +} + +// MustCreate builds a arcgisFeatureService and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o *ArcgisFeatureServiceTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.ArcgisFeatureService { + m, err := o.Create(ctx, exec) + if err != nil { + panic(err) + } + return m +} + +// CreateOrFail builds a arcgisFeatureService and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o *ArcgisFeatureServiceTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.ArcgisFeatureService { + tb.Helper() + m, err := o.Create(ctx, exec) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// CreateMany builds multiple arcgisFeatureServices and inserts them into the database +// Relations objects are also inserted and placed in the .R field +func (o ArcgisFeatureServiceTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.ArcgisFeatureServiceSlice, error) { + var err error + m := make(models.ArcgisFeatureServiceSlice, number) + + for i := range m { + m[i], err = o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + return m, nil +} + +// MustCreateMany builds multiple arcgisFeatureServices and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o ArcgisFeatureServiceTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.ArcgisFeatureServiceSlice { + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + panic(err) + } + return m +} + +// CreateManyOrFail builds multiple arcgisFeatureServices and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o ArcgisFeatureServiceTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.ArcgisFeatureServiceSlice { + tb.Helper() + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// ArcgisFeatureService has methods that act as mods for the ArcgisFeatureServiceTemplate +var ArcgisFeatureServiceMods arcgisFeatureServiceMods + +type arcgisFeatureServiceMods struct{} + +func (m arcgisFeatureServiceMods) RandomizeAllColumns(f *faker.Faker) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModSlice{ + ArcgisFeatureServiceMods.RandomExtent(f), + ArcgisFeatureServiceMods.RandomItemID(f), + ArcgisFeatureServiceMods.RandomSpatialReference(f), + ArcgisFeatureServiceMods.RandomURL(f), + } +} + +// Set the model columns to this value +func (m arcgisFeatureServiceMods) Extent(val string) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.Extent = func() string { return val } + }) +} + +// Set the Column from the function +func (m arcgisFeatureServiceMods) ExtentFunc(f func() string) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.Extent = f + }) +} + +// Clear any values for the column +func (m arcgisFeatureServiceMods) UnsetExtent() ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.Extent = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisFeatureServiceMods) RandomExtent(f *faker.Faker) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.Extent = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisFeatureServiceMods) ItemID(val string) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.ItemID = func() string { return val } + }) +} + +// Set the Column from the function +func (m arcgisFeatureServiceMods) ItemIDFunc(f func() string) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.ItemID = f + }) +} + +// Clear any values for the column +func (m arcgisFeatureServiceMods) UnsetItemID() ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.ItemID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisFeatureServiceMods) RandomItemID(f *faker.Faker) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.ItemID = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisFeatureServiceMods) SpatialReference(val int32) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.SpatialReference = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m arcgisFeatureServiceMods) SpatialReferenceFunc(f func() int32) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.SpatialReference = f + }) +} + +// Clear any values for the column +func (m arcgisFeatureServiceMods) UnsetSpatialReference() ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.SpatialReference = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisFeatureServiceMods) RandomSpatialReference(f *faker.Faker) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.SpatialReference = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisFeatureServiceMods) URL(val string) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.URL = func() string { return val } + }) +} + +// Set the Column from the function +func (m arcgisFeatureServiceMods) URLFunc(f func() string) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.URL = f + }) +} + +// Clear any values for the column +func (m arcgisFeatureServiceMods) UnsetURL() ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.URL = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisFeatureServiceMods) RandomURL(f *faker.Faker) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(_ context.Context, o *ArcgisFeatureServiceTemplate) { + o.URL = func() string { + return random_string(f) + } + }) +} + +func (m arcgisFeatureServiceMods) WithParentsCascading() ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(ctx context.Context, o *ArcgisFeatureServiceTemplate) { + if isDone, _ := arcgisFeatureServiceWithParentsCascadingCtx.Value(ctx); isDone { + return + } + ctx = arcgisFeatureServiceWithParentsCascadingCtx.WithValue(ctx, true) + }) +} + +func (m arcgisFeatureServiceMods) WithFeatureServiceItemLayers(number int, related *ArcgisLayerTemplate) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(ctx context.Context, o *ArcgisFeatureServiceTemplate) { + o.r.FeatureServiceItemLayers = []*arcgisFeatureServiceRFeatureServiceItemLayersR{{ + number: number, + o: related, + }} + }) +} + +func (m arcgisFeatureServiceMods) WithNewFeatureServiceItemLayers(number int, mods ...ArcgisLayerMod) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(ctx context.Context, o *ArcgisFeatureServiceTemplate) { + related := o.f.NewArcgisLayerWithContext(ctx, mods...) + m.WithFeatureServiceItemLayers(number, related).Apply(ctx, o) + }) +} + +func (m arcgisFeatureServiceMods) AddFeatureServiceItemLayers(number int, related *ArcgisLayerTemplate) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(ctx context.Context, o *ArcgisFeatureServiceTemplate) { + o.r.FeatureServiceItemLayers = append(o.r.FeatureServiceItemLayers, &arcgisFeatureServiceRFeatureServiceItemLayersR{ + number: number, + o: related, + }) + }) +} + +func (m arcgisFeatureServiceMods) AddNewFeatureServiceItemLayers(number int, mods ...ArcgisLayerMod) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(ctx context.Context, o *ArcgisFeatureServiceTemplate) { + related := o.f.NewArcgisLayerWithContext(ctx, mods...) + m.AddFeatureServiceItemLayers(number, related).Apply(ctx, o) + }) +} + +func (m arcgisFeatureServiceMods) AddExistingFeatureServiceItemLayers(existingModels ...*models.ArcgisLayer) ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(ctx context.Context, o *ArcgisFeatureServiceTemplate) { + for _, em := range existingModels { + o.r.FeatureServiceItemLayers = append(o.r.FeatureServiceItemLayers, &arcgisFeatureServiceRFeatureServiceItemLayersR{ + o: o.f.FromExistingArcgisLayer(em), + }) + } + }) +} + +func (m arcgisFeatureServiceMods) WithoutFeatureServiceItemLayers() ArcgisFeatureServiceMod { + return ArcgisFeatureServiceModFunc(func(ctx context.Context, o *ArcgisFeatureServiceTemplate) { + o.r.FeatureServiceItemLayers = nil + }) +} diff --git a/db/factory/arcgis.layer.bob.go b/db/factory/arcgis.layer.bob.go new file mode 100644 index 00000000..33eb4d0f --- /dev/null +++ b/db/factory/arcgis.layer.bob.go @@ -0,0 +1,500 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package factory + +import ( + "context" + "testing" + + "github.com/Gleipnir-Technology/bob" + models "github.com/Gleipnir-Technology/nidus-sync/db/models" + "github.com/aarondl/opt/omit" + "github.com/jaswdr/faker/v2" +) + +type ArcgisLayerMod interface { + Apply(context.Context, *ArcgisLayerTemplate) +} + +type ArcgisLayerModFunc func(context.Context, *ArcgisLayerTemplate) + +func (f ArcgisLayerModFunc) Apply(ctx context.Context, n *ArcgisLayerTemplate) { + f(ctx, n) +} + +type ArcgisLayerModSlice []ArcgisLayerMod + +func (mods ArcgisLayerModSlice) Apply(ctx context.Context, n *ArcgisLayerTemplate) { + for _, f := range mods { + f.Apply(ctx, n) + } +} + +// ArcgisLayerTemplate is an object representing the database table. +// all columns are optional and should be set by mods +type ArcgisLayerTemplate struct { + Extent func() string + FeatureServiceItemID func() string + Index func() int32 + + r arcgisLayerR + f *Factory + + alreadyPersisted bool +} + +type arcgisLayerR struct { + FeatureServiceItemFeatureService *arcgisLayerRFeatureServiceItemFeatureServiceR + LayerFields []*arcgisLayerRLayerFieldsR +} + +type arcgisLayerRFeatureServiceItemFeatureServiceR struct { + o *ArcgisFeatureServiceTemplate +} +type arcgisLayerRLayerFieldsR struct { + number int + o *ArcgisLayerFieldTemplate +} + +// Apply mods to the ArcgisLayerTemplate +func (o *ArcgisLayerTemplate) Apply(ctx context.Context, mods ...ArcgisLayerMod) { + for _, mod := range mods { + mod.Apply(ctx, o) + } +} + +// setModelRels creates and sets the relationships on *models.ArcgisLayer +// according to the relationships in the template. Nothing is inserted into the db +func (t ArcgisLayerTemplate) setModelRels(o *models.ArcgisLayer) { + if t.r.FeatureServiceItemFeatureService != nil { + rel := t.r.FeatureServiceItemFeatureService.o.Build() + rel.R.FeatureServiceItemLayers = append(rel.R.FeatureServiceItemLayers, o) + o.FeatureServiceItemID = rel.ItemID // h2 + o.R.FeatureServiceItemFeatureService = rel + } + + if t.r.LayerFields != nil { + rel := models.ArcgisLayerFieldSlice{} + for _, r := range t.r.LayerFields { + related := r.o.BuildMany(r.number) + for _, rel := range related { + rel.LayerFeatureServiceItemID = o.FeatureServiceItemID // h2 + rel.LayerIndex = o.Index // h2 + rel.R.Layer = o + } + rel = append(rel, related...) + } + o.R.LayerFields = rel + } +} + +// BuildSetter returns an *models.ArcgisLayerSetter +// this does nothing with the relationship templates +func (o ArcgisLayerTemplate) BuildSetter() *models.ArcgisLayerSetter { + m := &models.ArcgisLayerSetter{} + + if o.Extent != nil { + val := o.Extent() + m.Extent = omit.From(val) + } + if o.FeatureServiceItemID != nil { + val := o.FeatureServiceItemID() + m.FeatureServiceItemID = omit.From(val) + } + if o.Index != nil { + val := o.Index() + m.Index = omit.From(val) + } + + return m +} + +// BuildManySetter returns an []*models.ArcgisLayerSetter +// this does nothing with the relationship templates +func (o ArcgisLayerTemplate) BuildManySetter(number int) []*models.ArcgisLayerSetter { + m := make([]*models.ArcgisLayerSetter, number) + + for i := range m { + m[i] = o.BuildSetter() + } + + return m +} + +// Build returns an *models.ArcgisLayer +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use ArcgisLayerTemplate.Create +func (o ArcgisLayerTemplate) Build() *models.ArcgisLayer { + m := &models.ArcgisLayer{} + + if o.Extent != nil { + m.Extent = o.Extent() + } + if o.FeatureServiceItemID != nil { + m.FeatureServiceItemID = o.FeatureServiceItemID() + } + if o.Index != nil { + m.Index = o.Index() + } + + o.setModelRels(m) + + return m +} + +// BuildMany returns an models.ArcgisLayerSlice +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use ArcgisLayerTemplate.CreateMany +func (o ArcgisLayerTemplate) BuildMany(number int) models.ArcgisLayerSlice { + m := make(models.ArcgisLayerSlice, number) + + for i := range m { + m[i] = o.Build() + } + + return m +} + +func ensureCreatableArcgisLayer(m *models.ArcgisLayerSetter) { + if !(m.Extent.IsValue()) { + val := random_string(nil) + m.Extent = omit.From(val) + } + if !(m.FeatureServiceItemID.IsValue()) { + val := random_string(nil) + m.FeatureServiceItemID = omit.From(val) + } + if !(m.Index.IsValue()) { + val := random_int32(nil) + m.Index = omit.From(val) + } +} + +// insertOptRels creates and inserts any optional the relationships on *models.ArcgisLayer +// according to the relationships in the template. +// any required relationship should have already exist on the model +func (o *ArcgisLayerTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.ArcgisLayer) error { + var err error + + isLayerFieldsDone, _ := arcgisLayerRelLayerFieldsCtx.Value(ctx) + if !isLayerFieldsDone && o.r.LayerFields != nil { + ctx = arcgisLayerRelLayerFieldsCtx.WithValue(ctx, true) + for _, r := range o.r.LayerFields { + if r.o.alreadyPersisted { + m.R.LayerFields = append(m.R.LayerFields, r.o.Build()) + } else { + rel1, err := r.o.CreateMany(ctx, exec, r.number) + if err != nil { + return err + } + + err = m.AttachLayerFields(ctx, exec, rel1...) + if err != nil { + return err + } + } + } + } + + return err +} + +// Create builds a arcgisLayer and inserts it into the database +// Relations objects are also inserted and placed in the .R field +func (o *ArcgisLayerTemplate) Create(ctx context.Context, exec bob.Executor) (*models.ArcgisLayer, error) { + var err error + opt := o.BuildSetter() + ensureCreatableArcgisLayer(opt) + + if o.r.FeatureServiceItemFeatureService == nil { + ArcgisLayerMods.WithNewFeatureServiceItemFeatureService().Apply(ctx, o) + } + + var rel0 *models.ArcgisFeatureService + + if o.r.FeatureServiceItemFeatureService.o.alreadyPersisted { + rel0 = o.r.FeatureServiceItemFeatureService.o.Build() + } else { + rel0, err = o.r.FeatureServiceItemFeatureService.o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + opt.FeatureServiceItemID = omit.From(rel0.ItemID) + + m, err := models.ArcgisLayers.Insert(opt).One(ctx, exec) + if err != nil { + return nil, err + } + + m.R.FeatureServiceItemFeatureService = rel0 + + if err := o.insertOptRels(ctx, exec, m); err != nil { + return nil, err + } + return m, err +} + +// MustCreate builds a arcgisLayer and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o *ArcgisLayerTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.ArcgisLayer { + m, err := o.Create(ctx, exec) + if err != nil { + panic(err) + } + return m +} + +// CreateOrFail builds a arcgisLayer and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o *ArcgisLayerTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.ArcgisLayer { + tb.Helper() + m, err := o.Create(ctx, exec) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// CreateMany builds multiple arcgisLayers and inserts them into the database +// Relations objects are also inserted and placed in the .R field +func (o ArcgisLayerTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.ArcgisLayerSlice, error) { + var err error + m := make(models.ArcgisLayerSlice, number) + + for i := range m { + m[i], err = o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + return m, nil +} + +// MustCreateMany builds multiple arcgisLayers and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o ArcgisLayerTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.ArcgisLayerSlice { + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + panic(err) + } + return m +} + +// CreateManyOrFail builds multiple arcgisLayers and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o ArcgisLayerTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.ArcgisLayerSlice { + tb.Helper() + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// ArcgisLayer has methods that act as mods for the ArcgisLayerTemplate +var ArcgisLayerMods arcgisLayerMods + +type arcgisLayerMods struct{} + +func (m arcgisLayerMods) RandomizeAllColumns(f *faker.Faker) ArcgisLayerMod { + return ArcgisLayerModSlice{ + ArcgisLayerMods.RandomExtent(f), + ArcgisLayerMods.RandomFeatureServiceItemID(f), + ArcgisLayerMods.RandomIndex(f), + } +} + +// Set the model columns to this value +func (m arcgisLayerMods) Extent(val string) ArcgisLayerMod { + return ArcgisLayerModFunc(func(_ context.Context, o *ArcgisLayerTemplate) { + o.Extent = func() string { return val } + }) +} + +// Set the Column from the function +func (m arcgisLayerMods) ExtentFunc(f func() string) ArcgisLayerMod { + return ArcgisLayerModFunc(func(_ context.Context, o *ArcgisLayerTemplate) { + o.Extent = f + }) +} + +// Clear any values for the column +func (m arcgisLayerMods) UnsetExtent() ArcgisLayerMod { + return ArcgisLayerModFunc(func(_ context.Context, o *ArcgisLayerTemplate) { + o.Extent = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisLayerMods) RandomExtent(f *faker.Faker) ArcgisLayerMod { + return ArcgisLayerModFunc(func(_ context.Context, o *ArcgisLayerTemplate) { + o.Extent = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisLayerMods) FeatureServiceItemID(val string) ArcgisLayerMod { + return ArcgisLayerModFunc(func(_ context.Context, o *ArcgisLayerTemplate) { + o.FeatureServiceItemID = func() string { return val } + }) +} + +// Set the Column from the function +func (m arcgisLayerMods) FeatureServiceItemIDFunc(f func() string) ArcgisLayerMod { + return ArcgisLayerModFunc(func(_ context.Context, o *ArcgisLayerTemplate) { + o.FeatureServiceItemID = f + }) +} + +// Clear any values for the column +func (m arcgisLayerMods) UnsetFeatureServiceItemID() ArcgisLayerMod { + return ArcgisLayerModFunc(func(_ context.Context, o *ArcgisLayerTemplate) { + o.FeatureServiceItemID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisLayerMods) RandomFeatureServiceItemID(f *faker.Faker) ArcgisLayerMod { + return ArcgisLayerModFunc(func(_ context.Context, o *ArcgisLayerTemplate) { + o.FeatureServiceItemID = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisLayerMods) Index(val int32) ArcgisLayerMod { + return ArcgisLayerModFunc(func(_ context.Context, o *ArcgisLayerTemplate) { + o.Index = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m arcgisLayerMods) IndexFunc(f func() int32) ArcgisLayerMod { + return ArcgisLayerModFunc(func(_ context.Context, o *ArcgisLayerTemplate) { + o.Index = f + }) +} + +// Clear any values for the column +func (m arcgisLayerMods) UnsetIndex() ArcgisLayerMod { + return ArcgisLayerModFunc(func(_ context.Context, o *ArcgisLayerTemplate) { + o.Index = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisLayerMods) RandomIndex(f *faker.Faker) ArcgisLayerMod { + return ArcgisLayerModFunc(func(_ context.Context, o *ArcgisLayerTemplate) { + o.Index = func() int32 { + return random_int32(f) + } + }) +} + +func (m arcgisLayerMods) WithParentsCascading() ArcgisLayerMod { + return ArcgisLayerModFunc(func(ctx context.Context, o *ArcgisLayerTemplate) { + if isDone, _ := arcgisLayerWithParentsCascadingCtx.Value(ctx); isDone { + return + } + ctx = arcgisLayerWithParentsCascadingCtx.WithValue(ctx, true) + { + + related := o.f.NewArcgisFeatureServiceWithContext(ctx, ArcgisFeatureServiceMods.WithParentsCascading()) + m.WithFeatureServiceItemFeatureService(related).Apply(ctx, o) + } + }) +} + +func (m arcgisLayerMods) WithFeatureServiceItemFeatureService(rel *ArcgisFeatureServiceTemplate) ArcgisLayerMod { + return ArcgisLayerModFunc(func(ctx context.Context, o *ArcgisLayerTemplate) { + o.r.FeatureServiceItemFeatureService = &arcgisLayerRFeatureServiceItemFeatureServiceR{ + o: rel, + } + }) +} + +func (m arcgisLayerMods) WithNewFeatureServiceItemFeatureService(mods ...ArcgisFeatureServiceMod) ArcgisLayerMod { + return ArcgisLayerModFunc(func(ctx context.Context, o *ArcgisLayerTemplate) { + related := o.f.NewArcgisFeatureServiceWithContext(ctx, mods...) + + m.WithFeatureServiceItemFeatureService(related).Apply(ctx, o) + }) +} + +func (m arcgisLayerMods) WithExistingFeatureServiceItemFeatureService(em *models.ArcgisFeatureService) ArcgisLayerMod { + return ArcgisLayerModFunc(func(ctx context.Context, o *ArcgisLayerTemplate) { + o.r.FeatureServiceItemFeatureService = &arcgisLayerRFeatureServiceItemFeatureServiceR{ + o: o.f.FromExistingArcgisFeatureService(em), + } + }) +} + +func (m arcgisLayerMods) WithoutFeatureServiceItemFeatureService() ArcgisLayerMod { + return ArcgisLayerModFunc(func(ctx context.Context, o *ArcgisLayerTemplate) { + o.r.FeatureServiceItemFeatureService = nil + }) +} + +func (m arcgisLayerMods) WithLayerFields(number int, related *ArcgisLayerFieldTemplate) ArcgisLayerMod { + return ArcgisLayerModFunc(func(ctx context.Context, o *ArcgisLayerTemplate) { + o.r.LayerFields = []*arcgisLayerRLayerFieldsR{{ + number: number, + o: related, + }} + }) +} + +func (m arcgisLayerMods) WithNewLayerFields(number int, mods ...ArcgisLayerFieldMod) ArcgisLayerMod { + return ArcgisLayerModFunc(func(ctx context.Context, o *ArcgisLayerTemplate) { + related := o.f.NewArcgisLayerFieldWithContext(ctx, mods...) + m.WithLayerFields(number, related).Apply(ctx, o) + }) +} + +func (m arcgisLayerMods) AddLayerFields(number int, related *ArcgisLayerFieldTemplate) ArcgisLayerMod { + return ArcgisLayerModFunc(func(ctx context.Context, o *ArcgisLayerTemplate) { + o.r.LayerFields = append(o.r.LayerFields, &arcgisLayerRLayerFieldsR{ + number: number, + o: related, + }) + }) +} + +func (m arcgisLayerMods) AddNewLayerFields(number int, mods ...ArcgisLayerFieldMod) ArcgisLayerMod { + return ArcgisLayerModFunc(func(ctx context.Context, o *ArcgisLayerTemplate) { + related := o.f.NewArcgisLayerFieldWithContext(ctx, mods...) + m.AddLayerFields(number, related).Apply(ctx, o) + }) +} + +func (m arcgisLayerMods) AddExistingLayerFields(existingModels ...*models.ArcgisLayerField) ArcgisLayerMod { + return ArcgisLayerModFunc(func(ctx context.Context, o *ArcgisLayerTemplate) { + for _, em := range existingModels { + o.r.LayerFields = append(o.r.LayerFields, &arcgisLayerRLayerFieldsR{ + o: o.f.FromExistingArcgisLayerField(em), + }) + } + }) +} + +func (m arcgisLayerMods) WithoutLayerFields() ArcgisLayerMod { + return ArcgisLayerModFunc(func(ctx context.Context, o *ArcgisLayerTemplate) { + o.r.LayerFields = nil + }) +} diff --git a/db/factory/arcgis.layer_field.bob.go b/db/factory/arcgis.layer_field.bob.go new file mode 100644 index 00000000..5faea2ce --- /dev/null +++ b/db/factory/arcgis.layer_field.bob.go @@ -0,0 +1,636 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package factory + +import ( + "context" + "testing" + + "github.com/Gleipnir-Technology/bob" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + models "github.com/Gleipnir-Technology/nidus-sync/db/models" + "github.com/aarondl/opt/omit" + "github.com/jaswdr/faker/v2" +) + +type ArcgisLayerFieldMod interface { + Apply(context.Context, *ArcgisLayerFieldTemplate) +} + +type ArcgisLayerFieldModFunc func(context.Context, *ArcgisLayerFieldTemplate) + +func (f ArcgisLayerFieldModFunc) Apply(ctx context.Context, n *ArcgisLayerFieldTemplate) { + f(ctx, n) +} + +type ArcgisLayerFieldModSlice []ArcgisLayerFieldMod + +func (mods ArcgisLayerFieldModSlice) Apply(ctx context.Context, n *ArcgisLayerFieldTemplate) { + for _, f := range mods { + f.Apply(ctx, n) + } +} + +// ArcgisLayerFieldTemplate is an object representing the database table. +// all columns are optional and should be set by mods +type ArcgisLayerFieldTemplate struct { + LayerFeatureServiceItemID func() string + LayerIndex func() int32 + Name func() string + Type func() enums.ArcgisFieldtype + + r arcgisLayerFieldR + f *Factory + + alreadyPersisted bool +} + +type arcgisLayerFieldR struct { + AddressMappings []*arcgisLayerFieldRAddressMappingsR + Layer *arcgisLayerFieldRLayerR + ParcelMappings []*arcgisLayerFieldRParcelMappingsR +} + +type arcgisLayerFieldRAddressMappingsR struct { + number int + o *ArcgisAddressMappingTemplate +} +type arcgisLayerFieldRLayerR struct { + o *ArcgisLayerTemplate +} +type arcgisLayerFieldRParcelMappingsR struct { + number int + o *ArcgisParcelMappingTemplate +} + +// Apply mods to the ArcgisLayerFieldTemplate +func (o *ArcgisLayerFieldTemplate) Apply(ctx context.Context, mods ...ArcgisLayerFieldMod) { + for _, mod := range mods { + mod.Apply(ctx, o) + } +} + +// setModelRels creates and sets the relationships on *models.ArcgisLayerField +// according to the relationships in the template. Nothing is inserted into the db +func (t ArcgisLayerFieldTemplate) setModelRels(o *models.ArcgisLayerField) { + if t.r.AddressMappings != nil { + rel := models.ArcgisAddressMappingSlice{} + for _, r := range t.r.AddressMappings { + related := r.o.BuildMany(r.number) + for _, rel := range related { + rel.LayerFeatureServiceItemID = o.LayerFeatureServiceItemID // h2 + rel.LayerIndex = o.LayerIndex // h2 + rel.LayerFieldName = o.Name // h2 + rel.R.LayerField = o + } + rel = append(rel, related...) + } + o.R.AddressMappings = rel + } + + if t.r.Layer != nil { + rel := t.r.Layer.o.Build() + rel.R.LayerFields = append(rel.R.LayerFields, o) + o.LayerFeatureServiceItemID = rel.FeatureServiceItemID // h2 + o.LayerIndex = rel.Index // h2 + o.R.Layer = rel + } + + if t.r.ParcelMappings != nil { + rel := models.ArcgisParcelMappingSlice{} + for _, r := range t.r.ParcelMappings { + related := r.o.BuildMany(r.number) + for _, rel := range related { + rel.LayerFeatureServiceItemID = o.LayerFeatureServiceItemID // h2 + rel.LayerIndex = o.LayerIndex // h2 + rel.LayerFieldName = o.Name // h2 + rel.R.LayerField = o + } + rel = append(rel, related...) + } + o.R.ParcelMappings = rel + } +} + +// BuildSetter returns an *models.ArcgisLayerFieldSetter +// this does nothing with the relationship templates +func (o ArcgisLayerFieldTemplate) BuildSetter() *models.ArcgisLayerFieldSetter { + m := &models.ArcgisLayerFieldSetter{} + + if o.LayerFeatureServiceItemID != nil { + val := o.LayerFeatureServiceItemID() + m.LayerFeatureServiceItemID = omit.From(val) + } + if o.LayerIndex != nil { + val := o.LayerIndex() + m.LayerIndex = omit.From(val) + } + if o.Name != nil { + val := o.Name() + m.Name = omit.From(val) + } + if o.Type != nil { + val := o.Type() + m.Type = omit.From(val) + } + + return m +} + +// BuildManySetter returns an []*models.ArcgisLayerFieldSetter +// this does nothing with the relationship templates +func (o ArcgisLayerFieldTemplate) BuildManySetter(number int) []*models.ArcgisLayerFieldSetter { + m := make([]*models.ArcgisLayerFieldSetter, number) + + for i := range m { + m[i] = o.BuildSetter() + } + + return m +} + +// Build returns an *models.ArcgisLayerField +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use ArcgisLayerFieldTemplate.Create +func (o ArcgisLayerFieldTemplate) Build() *models.ArcgisLayerField { + m := &models.ArcgisLayerField{} + + if o.LayerFeatureServiceItemID != nil { + m.LayerFeatureServiceItemID = o.LayerFeatureServiceItemID() + } + if o.LayerIndex != nil { + m.LayerIndex = o.LayerIndex() + } + if o.Name != nil { + m.Name = o.Name() + } + if o.Type != nil { + m.Type = o.Type() + } + + o.setModelRels(m) + + return m +} + +// BuildMany returns an models.ArcgisLayerFieldSlice +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use ArcgisLayerFieldTemplate.CreateMany +func (o ArcgisLayerFieldTemplate) BuildMany(number int) models.ArcgisLayerFieldSlice { + m := make(models.ArcgisLayerFieldSlice, number) + + for i := range m { + m[i] = o.Build() + } + + return m +} + +func ensureCreatableArcgisLayerField(m *models.ArcgisLayerFieldSetter) { + if !(m.LayerFeatureServiceItemID.IsValue()) { + val := random_string(nil) + m.LayerFeatureServiceItemID = omit.From(val) + } + if !(m.LayerIndex.IsValue()) { + val := random_int32(nil) + m.LayerIndex = omit.From(val) + } + if !(m.Name.IsValue()) { + val := random_string(nil) + m.Name = omit.From(val) + } + if !(m.Type.IsValue()) { + val := random_enums_ArcgisFieldtype(nil) + m.Type = omit.From(val) + } +} + +// insertOptRels creates and inserts any optional the relationships on *models.ArcgisLayerField +// according to the relationships in the template. +// any required relationship should have already exist on the model +func (o *ArcgisLayerFieldTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.ArcgisLayerField) error { + var err error + + isAddressMappingsDone, _ := arcgisLayerFieldRelAddressMappingsCtx.Value(ctx) + if !isAddressMappingsDone && o.r.AddressMappings != nil { + ctx = arcgisLayerFieldRelAddressMappingsCtx.WithValue(ctx, true) + for _, r := range o.r.AddressMappings { + if r.o.alreadyPersisted { + m.R.AddressMappings = append(m.R.AddressMappings, r.o.Build()) + } else { + rel0, err := r.o.CreateMany(ctx, exec, r.number) + if err != nil { + return err + } + + err = m.AttachAddressMappings(ctx, exec, rel0...) + if err != nil { + return err + } + } + } + } + + isParcelMappingsDone, _ := arcgisLayerFieldRelParcelMappingsCtx.Value(ctx) + if !isParcelMappingsDone && o.r.ParcelMappings != nil { + ctx = arcgisLayerFieldRelParcelMappingsCtx.WithValue(ctx, true) + for _, r := range o.r.ParcelMappings { + if r.o.alreadyPersisted { + m.R.ParcelMappings = append(m.R.ParcelMappings, r.o.Build()) + } else { + rel2, err := r.o.CreateMany(ctx, exec, r.number) + if err != nil { + return err + } + + err = m.AttachParcelMappings(ctx, exec, rel2...) + if err != nil { + return err + } + } + } + } + + return err +} + +// Create builds a arcgisLayerField and inserts it into the database +// Relations objects are also inserted and placed in the .R field +func (o *ArcgisLayerFieldTemplate) Create(ctx context.Context, exec bob.Executor) (*models.ArcgisLayerField, error) { + var err error + opt := o.BuildSetter() + ensureCreatableArcgisLayerField(opt) + + if o.r.Layer == nil { + ArcgisLayerFieldMods.WithNewLayer().Apply(ctx, o) + } + + var rel1 *models.ArcgisLayer + + if o.r.Layer.o.alreadyPersisted { + rel1 = o.r.Layer.o.Build() + } else { + rel1, err = o.r.Layer.o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + opt.LayerFeatureServiceItemID = omit.From(rel1.FeatureServiceItemID) + opt.LayerIndex = omit.From(rel1.Index) + + m, err := models.ArcgisLayerFields.Insert(opt).One(ctx, exec) + if err != nil { + return nil, err + } + + m.R.Layer = rel1 + + if err := o.insertOptRels(ctx, exec, m); err != nil { + return nil, err + } + return m, err +} + +// MustCreate builds a arcgisLayerField and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o *ArcgisLayerFieldTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.ArcgisLayerField { + m, err := o.Create(ctx, exec) + if err != nil { + panic(err) + } + return m +} + +// CreateOrFail builds a arcgisLayerField and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o *ArcgisLayerFieldTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.ArcgisLayerField { + tb.Helper() + m, err := o.Create(ctx, exec) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// CreateMany builds multiple arcgisLayerFields and inserts them into the database +// Relations objects are also inserted and placed in the .R field +func (o ArcgisLayerFieldTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.ArcgisLayerFieldSlice, error) { + var err error + m := make(models.ArcgisLayerFieldSlice, number) + + for i := range m { + m[i], err = o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + return m, nil +} + +// MustCreateMany builds multiple arcgisLayerFields and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o ArcgisLayerFieldTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.ArcgisLayerFieldSlice { + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + panic(err) + } + return m +} + +// CreateManyOrFail builds multiple arcgisLayerFields and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o ArcgisLayerFieldTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.ArcgisLayerFieldSlice { + tb.Helper() + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// ArcgisLayerField has methods that act as mods for the ArcgisLayerFieldTemplate +var ArcgisLayerFieldMods arcgisLayerFieldMods + +type arcgisLayerFieldMods struct{} + +func (m arcgisLayerFieldMods) RandomizeAllColumns(f *faker.Faker) ArcgisLayerFieldMod { + return ArcgisLayerFieldModSlice{ + ArcgisLayerFieldMods.RandomLayerFeatureServiceItemID(f), + ArcgisLayerFieldMods.RandomLayerIndex(f), + ArcgisLayerFieldMods.RandomName(f), + ArcgisLayerFieldMods.RandomType(f), + } +} + +// Set the model columns to this value +func (m arcgisLayerFieldMods) LayerFeatureServiceItemID(val string) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.LayerFeatureServiceItemID = func() string { return val } + }) +} + +// Set the Column from the function +func (m arcgisLayerFieldMods) LayerFeatureServiceItemIDFunc(f func() string) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.LayerFeatureServiceItemID = f + }) +} + +// Clear any values for the column +func (m arcgisLayerFieldMods) UnsetLayerFeatureServiceItemID() ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.LayerFeatureServiceItemID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisLayerFieldMods) RandomLayerFeatureServiceItemID(f *faker.Faker) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.LayerFeatureServiceItemID = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisLayerFieldMods) LayerIndex(val int32) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.LayerIndex = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m arcgisLayerFieldMods) LayerIndexFunc(f func() int32) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.LayerIndex = f + }) +} + +// Clear any values for the column +func (m arcgisLayerFieldMods) UnsetLayerIndex() ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.LayerIndex = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisLayerFieldMods) RandomLayerIndex(f *faker.Faker) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.LayerIndex = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisLayerFieldMods) Name(val string) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.Name = func() string { return val } + }) +} + +// Set the Column from the function +func (m arcgisLayerFieldMods) NameFunc(f func() string) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.Name = f + }) +} + +// Clear any values for the column +func (m arcgisLayerFieldMods) UnsetName() ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.Name = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisLayerFieldMods) RandomName(f *faker.Faker) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.Name = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisLayerFieldMods) Type(val enums.ArcgisFieldtype) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.Type = func() enums.ArcgisFieldtype { return val } + }) +} + +// Set the Column from the function +func (m arcgisLayerFieldMods) TypeFunc(f func() enums.ArcgisFieldtype) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.Type = f + }) +} + +// Clear any values for the column +func (m arcgisLayerFieldMods) UnsetType() ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.Type = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisLayerFieldMods) RandomType(f *faker.Faker) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(_ context.Context, o *ArcgisLayerFieldTemplate) { + o.Type = func() enums.ArcgisFieldtype { + return random_enums_ArcgisFieldtype(f) + } + }) +} + +func (m arcgisLayerFieldMods) WithParentsCascading() ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + if isDone, _ := arcgisLayerFieldWithParentsCascadingCtx.Value(ctx); isDone { + return + } + ctx = arcgisLayerFieldWithParentsCascadingCtx.WithValue(ctx, true) + { + + related := o.f.NewArcgisLayerWithContext(ctx, ArcgisLayerMods.WithParentsCascading()) + m.WithLayer(related).Apply(ctx, o) + } + }) +} + +func (m arcgisLayerFieldMods) WithLayer(rel *ArcgisLayerTemplate) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + o.r.Layer = &arcgisLayerFieldRLayerR{ + o: rel, + } + }) +} + +func (m arcgisLayerFieldMods) WithNewLayer(mods ...ArcgisLayerMod) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + related := o.f.NewArcgisLayerWithContext(ctx, mods...) + + m.WithLayer(related).Apply(ctx, o) + }) +} + +func (m arcgisLayerFieldMods) WithExistingLayer(em *models.ArcgisLayer) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + o.r.Layer = &arcgisLayerFieldRLayerR{ + o: o.f.FromExistingArcgisLayer(em), + } + }) +} + +func (m arcgisLayerFieldMods) WithoutLayer() ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + o.r.Layer = nil + }) +} + +func (m arcgisLayerFieldMods) WithAddressMappings(number int, related *ArcgisAddressMappingTemplate) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + o.r.AddressMappings = []*arcgisLayerFieldRAddressMappingsR{{ + number: number, + o: related, + }} + }) +} + +func (m arcgisLayerFieldMods) WithNewAddressMappings(number int, mods ...ArcgisAddressMappingMod) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + related := o.f.NewArcgisAddressMappingWithContext(ctx, mods...) + m.WithAddressMappings(number, related).Apply(ctx, o) + }) +} + +func (m arcgisLayerFieldMods) AddAddressMappings(number int, related *ArcgisAddressMappingTemplate) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + o.r.AddressMappings = append(o.r.AddressMappings, &arcgisLayerFieldRAddressMappingsR{ + number: number, + o: related, + }) + }) +} + +func (m arcgisLayerFieldMods) AddNewAddressMappings(number int, mods ...ArcgisAddressMappingMod) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + related := o.f.NewArcgisAddressMappingWithContext(ctx, mods...) + m.AddAddressMappings(number, related).Apply(ctx, o) + }) +} + +func (m arcgisLayerFieldMods) AddExistingAddressMappings(existingModels ...*models.ArcgisAddressMapping) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + for _, em := range existingModels { + o.r.AddressMappings = append(o.r.AddressMappings, &arcgisLayerFieldRAddressMappingsR{ + o: o.f.FromExistingArcgisAddressMapping(em), + }) + } + }) +} + +func (m arcgisLayerFieldMods) WithoutAddressMappings() ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + o.r.AddressMappings = nil + }) +} + +func (m arcgisLayerFieldMods) WithParcelMappings(number int, related *ArcgisParcelMappingTemplate) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + o.r.ParcelMappings = []*arcgisLayerFieldRParcelMappingsR{{ + number: number, + o: related, + }} + }) +} + +func (m arcgisLayerFieldMods) WithNewParcelMappings(number int, mods ...ArcgisParcelMappingMod) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + related := o.f.NewArcgisParcelMappingWithContext(ctx, mods...) + m.WithParcelMappings(number, related).Apply(ctx, o) + }) +} + +func (m arcgisLayerFieldMods) AddParcelMappings(number int, related *ArcgisParcelMappingTemplate) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + o.r.ParcelMappings = append(o.r.ParcelMappings, &arcgisLayerFieldRParcelMappingsR{ + number: number, + o: related, + }) + }) +} + +func (m arcgisLayerFieldMods) AddNewParcelMappings(number int, mods ...ArcgisParcelMappingMod) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + related := o.f.NewArcgisParcelMappingWithContext(ctx, mods...) + m.AddParcelMappings(number, related).Apply(ctx, o) + }) +} + +func (m arcgisLayerFieldMods) AddExistingParcelMappings(existingModels ...*models.ArcgisParcelMapping) ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + for _, em := range existingModels { + o.r.ParcelMappings = append(o.r.ParcelMappings, &arcgisLayerFieldRParcelMappingsR{ + o: o.f.FromExistingArcgisParcelMapping(em), + }) + } + }) +} + +func (m arcgisLayerFieldMods) WithoutParcelMappings() ArcgisLayerFieldMod { + return ArcgisLayerFieldModFunc(func(ctx context.Context, o *ArcgisLayerFieldTemplate) { + o.r.ParcelMappings = nil + }) +} diff --git a/db/factory/arcgis.parcel_mapping.bob.go b/db/factory/arcgis.parcel_mapping.bob.go new file mode 100644 index 00000000..42018da8 --- /dev/null +++ b/db/factory/arcgis.parcel_mapping.bob.go @@ -0,0 +1,570 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package factory + +import ( + "context" + "testing" + + "github.com/Gleipnir-Technology/bob" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + models "github.com/Gleipnir-Technology/nidus-sync/db/models" + "github.com/aarondl/opt/omit" + "github.com/jaswdr/faker/v2" +) + +type ArcgisParcelMappingMod interface { + Apply(context.Context, *ArcgisParcelMappingTemplate) +} + +type ArcgisParcelMappingModFunc func(context.Context, *ArcgisParcelMappingTemplate) + +func (f ArcgisParcelMappingModFunc) Apply(ctx context.Context, n *ArcgisParcelMappingTemplate) { + f(ctx, n) +} + +type ArcgisParcelMappingModSlice []ArcgisParcelMappingMod + +func (mods ArcgisParcelMappingModSlice) Apply(ctx context.Context, n *ArcgisParcelMappingTemplate) { + for _, f := range mods { + f.Apply(ctx, n) + } +} + +// ArcgisParcelMappingTemplate is an object representing the database table. +// all columns are optional and should be set by mods +type ArcgisParcelMappingTemplate struct { + Destination func() enums.ArcgisMappingdestinationparcel + LayerFeatureServiceItemID func() string + LayerIndex func() int32 + LayerFieldName func() string + OrganizationID func() int32 + + r arcgisParcelMappingR + f *Factory + + alreadyPersisted bool +} + +type arcgisParcelMappingR struct { + LayerField *arcgisParcelMappingRLayerFieldR + Organization *arcgisParcelMappingROrganizationR +} + +type arcgisParcelMappingRLayerFieldR struct { + o *ArcgisLayerFieldTemplate +} +type arcgisParcelMappingROrganizationR struct { + o *OrganizationTemplate +} + +// Apply mods to the ArcgisParcelMappingTemplate +func (o *ArcgisParcelMappingTemplate) Apply(ctx context.Context, mods ...ArcgisParcelMappingMod) { + for _, mod := range mods { + mod.Apply(ctx, o) + } +} + +// setModelRels creates and sets the relationships on *models.ArcgisParcelMapping +// according to the relationships in the template. Nothing is inserted into the db +func (t ArcgisParcelMappingTemplate) setModelRels(o *models.ArcgisParcelMapping) { + if t.r.LayerField != nil { + rel := t.r.LayerField.o.Build() + rel.R.ParcelMappings = append(rel.R.ParcelMappings, o) + o.LayerFeatureServiceItemID = rel.LayerFeatureServiceItemID // h2 + o.LayerIndex = rel.LayerIndex // h2 + o.LayerFieldName = rel.Name // h2 + o.R.LayerField = rel + } + + if t.r.Organization != nil { + rel := t.r.Organization.o.Build() + rel.R.ParcelMappings = append(rel.R.ParcelMappings, o) + o.OrganizationID = rel.ID // h2 + o.R.Organization = rel + } +} + +// BuildSetter returns an *models.ArcgisParcelMappingSetter +// this does nothing with the relationship templates +func (o ArcgisParcelMappingTemplate) BuildSetter() *models.ArcgisParcelMappingSetter { + m := &models.ArcgisParcelMappingSetter{} + + if o.Destination != nil { + val := o.Destination() + m.Destination = omit.From(val) + } + if o.LayerFeatureServiceItemID != nil { + val := o.LayerFeatureServiceItemID() + m.LayerFeatureServiceItemID = omit.From(val) + } + if o.LayerIndex != nil { + val := o.LayerIndex() + m.LayerIndex = omit.From(val) + } + if o.LayerFieldName != nil { + val := o.LayerFieldName() + m.LayerFieldName = omit.From(val) + } + if o.OrganizationID != nil { + val := o.OrganizationID() + m.OrganizationID = omit.From(val) + } + + return m +} + +// BuildManySetter returns an []*models.ArcgisParcelMappingSetter +// this does nothing with the relationship templates +func (o ArcgisParcelMappingTemplate) BuildManySetter(number int) []*models.ArcgisParcelMappingSetter { + m := make([]*models.ArcgisParcelMappingSetter, number) + + for i := range m { + m[i] = o.BuildSetter() + } + + return m +} + +// Build returns an *models.ArcgisParcelMapping +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use ArcgisParcelMappingTemplate.Create +func (o ArcgisParcelMappingTemplate) Build() *models.ArcgisParcelMapping { + m := &models.ArcgisParcelMapping{} + + if o.Destination != nil { + m.Destination = o.Destination() + } + if o.LayerFeatureServiceItemID != nil { + m.LayerFeatureServiceItemID = o.LayerFeatureServiceItemID() + } + if o.LayerIndex != nil { + m.LayerIndex = o.LayerIndex() + } + if o.LayerFieldName != nil { + m.LayerFieldName = o.LayerFieldName() + } + if o.OrganizationID != nil { + m.OrganizationID = o.OrganizationID() + } + + o.setModelRels(m) + + return m +} + +// BuildMany returns an models.ArcgisParcelMappingSlice +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use ArcgisParcelMappingTemplate.CreateMany +func (o ArcgisParcelMappingTemplate) BuildMany(number int) models.ArcgisParcelMappingSlice { + m := make(models.ArcgisParcelMappingSlice, number) + + for i := range m { + m[i] = o.Build() + } + + return m +} + +func ensureCreatableArcgisParcelMapping(m *models.ArcgisParcelMappingSetter) { + if !(m.Destination.IsValue()) { + val := random_enums_ArcgisMappingdestinationparcel(nil) + m.Destination = omit.From(val) + } + if !(m.LayerFeatureServiceItemID.IsValue()) { + val := random_string(nil) + m.LayerFeatureServiceItemID = omit.From(val) + } + if !(m.LayerIndex.IsValue()) { + val := random_int32(nil) + m.LayerIndex = omit.From(val) + } + if !(m.LayerFieldName.IsValue()) { + val := random_string(nil) + m.LayerFieldName = omit.From(val) + } + if !(m.OrganizationID.IsValue()) { + val := random_int32(nil) + m.OrganizationID = omit.From(val) + } +} + +// insertOptRels creates and inserts any optional the relationships on *models.ArcgisParcelMapping +// according to the relationships in the template. +// any required relationship should have already exist on the model +func (o *ArcgisParcelMappingTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.ArcgisParcelMapping) error { + var err error + + return err +} + +// Create builds a arcgisParcelMapping and inserts it into the database +// Relations objects are also inserted and placed in the .R field +func (o *ArcgisParcelMappingTemplate) Create(ctx context.Context, exec bob.Executor) (*models.ArcgisParcelMapping, error) { + var err error + opt := o.BuildSetter() + ensureCreatableArcgisParcelMapping(opt) + + if o.r.LayerField == nil { + ArcgisParcelMappingMods.WithNewLayerField().Apply(ctx, o) + } + + var rel0 *models.ArcgisLayerField + + if o.r.LayerField.o.alreadyPersisted { + rel0 = o.r.LayerField.o.Build() + } else { + rel0, err = o.r.LayerField.o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + opt.LayerFeatureServiceItemID = omit.From(rel0.LayerFeatureServiceItemID) + opt.LayerIndex = omit.From(rel0.LayerIndex) + opt.LayerFieldName = omit.From(rel0.Name) + + if o.r.Organization == nil { + ArcgisParcelMappingMods.WithNewOrganization().Apply(ctx, o) + } + + var rel1 *models.Organization + + if o.r.Organization.o.alreadyPersisted { + rel1 = o.r.Organization.o.Build() + } else { + rel1, err = o.r.Organization.o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + opt.OrganizationID = omit.From(rel1.ID) + + m, err := models.ArcgisParcelMappings.Insert(opt).One(ctx, exec) + if err != nil { + return nil, err + } + + m.R.LayerField = rel0 + m.R.Organization = rel1 + + if err := o.insertOptRels(ctx, exec, m); err != nil { + return nil, err + } + return m, err +} + +// MustCreate builds a arcgisParcelMapping and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o *ArcgisParcelMappingTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.ArcgisParcelMapping { + m, err := o.Create(ctx, exec) + if err != nil { + panic(err) + } + return m +} + +// CreateOrFail builds a arcgisParcelMapping and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o *ArcgisParcelMappingTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.ArcgisParcelMapping { + tb.Helper() + m, err := o.Create(ctx, exec) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// CreateMany builds multiple arcgisParcelMappings and inserts them into the database +// Relations objects are also inserted and placed in the .R field +func (o ArcgisParcelMappingTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.ArcgisParcelMappingSlice, error) { + var err error + m := make(models.ArcgisParcelMappingSlice, number) + + for i := range m { + m[i], err = o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + return m, nil +} + +// MustCreateMany builds multiple arcgisParcelMappings and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o ArcgisParcelMappingTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.ArcgisParcelMappingSlice { + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + panic(err) + } + return m +} + +// CreateManyOrFail builds multiple arcgisParcelMappings and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o ArcgisParcelMappingTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.ArcgisParcelMappingSlice { + tb.Helper() + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// ArcgisParcelMapping has methods that act as mods for the ArcgisParcelMappingTemplate +var ArcgisParcelMappingMods arcgisParcelMappingMods + +type arcgisParcelMappingMods struct{} + +func (m arcgisParcelMappingMods) RandomizeAllColumns(f *faker.Faker) ArcgisParcelMappingMod { + return ArcgisParcelMappingModSlice{ + ArcgisParcelMappingMods.RandomDestination(f), + ArcgisParcelMappingMods.RandomLayerFeatureServiceItemID(f), + ArcgisParcelMappingMods.RandomLayerIndex(f), + ArcgisParcelMappingMods.RandomLayerFieldName(f), + ArcgisParcelMappingMods.RandomOrganizationID(f), + } +} + +// Set the model columns to this value +func (m arcgisParcelMappingMods) Destination(val enums.ArcgisMappingdestinationparcel) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.Destination = func() enums.ArcgisMappingdestinationparcel { return val } + }) +} + +// Set the Column from the function +func (m arcgisParcelMappingMods) DestinationFunc(f func() enums.ArcgisMappingdestinationparcel) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.Destination = f + }) +} + +// Clear any values for the column +func (m arcgisParcelMappingMods) UnsetDestination() ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.Destination = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisParcelMappingMods) RandomDestination(f *faker.Faker) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.Destination = func() enums.ArcgisMappingdestinationparcel { + return random_enums_ArcgisMappingdestinationparcel(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisParcelMappingMods) LayerFeatureServiceItemID(val string) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.LayerFeatureServiceItemID = func() string { return val } + }) +} + +// Set the Column from the function +func (m arcgisParcelMappingMods) LayerFeatureServiceItemIDFunc(f func() string) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.LayerFeatureServiceItemID = f + }) +} + +// Clear any values for the column +func (m arcgisParcelMappingMods) UnsetLayerFeatureServiceItemID() ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.LayerFeatureServiceItemID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisParcelMappingMods) RandomLayerFeatureServiceItemID(f *faker.Faker) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.LayerFeatureServiceItemID = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisParcelMappingMods) LayerIndex(val int32) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.LayerIndex = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m arcgisParcelMappingMods) LayerIndexFunc(f func() int32) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.LayerIndex = f + }) +} + +// Clear any values for the column +func (m arcgisParcelMappingMods) UnsetLayerIndex() ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.LayerIndex = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisParcelMappingMods) RandomLayerIndex(f *faker.Faker) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.LayerIndex = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisParcelMappingMods) LayerFieldName(val string) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.LayerFieldName = func() string { return val } + }) +} + +// Set the Column from the function +func (m arcgisParcelMappingMods) LayerFieldNameFunc(f func() string) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.LayerFieldName = f + }) +} + +// Clear any values for the column +func (m arcgisParcelMappingMods) UnsetLayerFieldName() ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.LayerFieldName = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisParcelMappingMods) RandomLayerFieldName(f *faker.Faker) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.LayerFieldName = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m arcgisParcelMappingMods) OrganizationID(val int32) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.OrganizationID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m arcgisParcelMappingMods) OrganizationIDFunc(f func() int32) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.OrganizationID = f + }) +} + +// Clear any values for the column +func (m arcgisParcelMappingMods) UnsetOrganizationID() ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.OrganizationID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m arcgisParcelMappingMods) RandomOrganizationID(f *faker.Faker) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(_ context.Context, o *ArcgisParcelMappingTemplate) { + o.OrganizationID = func() int32 { + return random_int32(f) + } + }) +} + +func (m arcgisParcelMappingMods) WithParentsCascading() ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(ctx context.Context, o *ArcgisParcelMappingTemplate) { + if isDone, _ := arcgisParcelMappingWithParentsCascadingCtx.Value(ctx); isDone { + return + } + ctx = arcgisParcelMappingWithParentsCascadingCtx.WithValue(ctx, true) + { + + related := o.f.NewArcgisLayerFieldWithContext(ctx, ArcgisLayerFieldMods.WithParentsCascading()) + m.WithLayerField(related).Apply(ctx, o) + } + { + + related := o.f.NewOrganizationWithContext(ctx, OrganizationMods.WithParentsCascading()) + m.WithOrganization(related).Apply(ctx, o) + } + }) +} + +func (m arcgisParcelMappingMods) WithLayerField(rel *ArcgisLayerFieldTemplate) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(ctx context.Context, o *ArcgisParcelMappingTemplate) { + o.r.LayerField = &arcgisParcelMappingRLayerFieldR{ + o: rel, + } + }) +} + +func (m arcgisParcelMappingMods) WithNewLayerField(mods ...ArcgisLayerFieldMod) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(ctx context.Context, o *ArcgisParcelMappingTemplate) { + related := o.f.NewArcgisLayerFieldWithContext(ctx, mods...) + + m.WithLayerField(related).Apply(ctx, o) + }) +} + +func (m arcgisParcelMappingMods) WithExistingLayerField(em *models.ArcgisLayerField) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(ctx context.Context, o *ArcgisParcelMappingTemplate) { + o.r.LayerField = &arcgisParcelMappingRLayerFieldR{ + o: o.f.FromExistingArcgisLayerField(em), + } + }) +} + +func (m arcgisParcelMappingMods) WithoutLayerField() ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(ctx context.Context, o *ArcgisParcelMappingTemplate) { + o.r.LayerField = nil + }) +} + +func (m arcgisParcelMappingMods) WithOrganization(rel *OrganizationTemplate) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(ctx context.Context, o *ArcgisParcelMappingTemplate) { + o.r.Organization = &arcgisParcelMappingROrganizationR{ + o: rel, + } + }) +} + +func (m arcgisParcelMappingMods) WithNewOrganization(mods ...OrganizationMod) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(ctx context.Context, o *ArcgisParcelMappingTemplate) { + related := o.f.NewOrganizationWithContext(ctx, mods...) + + m.WithOrganization(related).Apply(ctx, o) + }) +} + +func (m arcgisParcelMappingMods) WithExistingOrganization(em *models.Organization) ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(ctx context.Context, o *ArcgisParcelMappingTemplate) { + o.r.Organization = &arcgisParcelMappingROrganizationR{ + o: o.f.FromExistingOrganization(em), + } + }) +} + +func (m arcgisParcelMappingMods) WithoutOrganization() ArcgisParcelMappingMod { + return ArcgisParcelMappingModFunc(func(ctx context.Context, o *ArcgisParcelMappingTemplate) { + o.r.Organization = nil + }) +} diff --git a/db/factory/bobfactory_context.bob.go b/db/factory/bobfactory_context.bob.go index 3c6836a5..4c3c2c7b 100644 --- a/db/factory/bobfactory_context.bob.go +++ b/db/factory/bobfactory_context.bob.go @@ -8,6 +8,35 @@ import "context" type contextKey string var ( + // Relationship Contexts for address + addressWithParentsCascadingCtx = newContextual[bool]("addressWithParentsCascading") + addressRelSiteCtx = newContextual[bool]("address.site.site.site_address_id_fkey") + + // Relationship Contexts for arcgis.address_mapping + arcgisAddressMappingWithParentsCascadingCtx = newContextual[bool]("arcgisAddressMappingWithParentsCascading") + arcgisAddressMappingRelLayerFieldCtx = newContextual[bool]("arcgis.address_mapping.arcgis.layer_field.arcgis.address_mapping.address_mapping_layer_feature_service_item_id_layer_index__fkey") + arcgisAddressMappingRelOrganizationCtx = newContextual[bool]("arcgis.address_mapping.organization.arcgis.address_mapping.address_mapping_organization_id_fkey") + + // Relationship Contexts for arcgis.feature_service + arcgisFeatureServiceWithParentsCascadingCtx = newContextual[bool]("arcgisFeatureServiceWithParentsCascading") + arcgisFeatureServiceRelFeatureServiceItemLayersCtx = newContextual[bool]("arcgis.feature_service.arcgis.layer.arcgis.layer.layer_feature_service_item_id_fkey") + + // Relationship Contexts for arcgis.layer + arcgisLayerWithParentsCascadingCtx = newContextual[bool]("arcgisLayerWithParentsCascading") + arcgisLayerRelFeatureServiceItemFeatureServiceCtx = newContextual[bool]("arcgis.feature_service.arcgis.layer.arcgis.layer.layer_feature_service_item_id_fkey") + arcgisLayerRelLayerFieldsCtx = newContextual[bool]("arcgis.layer.arcgis.layer_field.arcgis.layer_field.layer_field_layer_feature_service_item_id_layer_index_fkey") + + // Relationship Contexts for arcgis.layer_field + arcgisLayerFieldWithParentsCascadingCtx = newContextual[bool]("arcgisLayerFieldWithParentsCascading") + arcgisLayerFieldRelAddressMappingsCtx = newContextual[bool]("arcgis.address_mapping.arcgis.layer_field.arcgis.address_mapping.address_mapping_layer_feature_service_item_id_layer_index__fkey") + arcgisLayerFieldRelLayerCtx = newContextual[bool]("arcgis.layer.arcgis.layer_field.arcgis.layer_field.layer_field_layer_feature_service_item_id_layer_index_fkey") + arcgisLayerFieldRelParcelMappingsCtx = newContextual[bool]("arcgis.layer_field.arcgis.parcel_mapping.arcgis.parcel_mapping.parcel_mapping_layer_feature_service_item_id_layer_index_l_fkey") + + // Relationship Contexts for arcgis.parcel_mapping + arcgisParcelMappingWithParentsCascadingCtx = newContextual[bool]("arcgisParcelMappingWithParentsCascading") + arcgisParcelMappingRelLayerFieldCtx = newContextual[bool]("arcgis.layer_field.arcgis.parcel_mapping.arcgis.parcel_mapping.parcel_mapping_layer_feature_service_item_id_layer_index_l_fkey") + arcgisParcelMappingRelOrganizationCtx = newContextual[bool]("arcgis.parcel_mapping.organization.arcgis.parcel_mapping.parcel_mapping_organization_id_fkey") + // Relationship Contexts for arcgis.user_ arcgisuserWithParentsCascadingCtx = newContextual[bool]("arcgisuserWithParentsCascading") arcgisuserRelPublicUserUserCtx = newContextual[bool]("arcgis.user_.user_.arcgis.user_.user__public_user_id_fkey") @@ -197,6 +226,7 @@ var ( fileuploadFileRelErrorFilesCtx = newContextual[bool]("fileupload.error_file.fileupload.file.fileupload.error_file.error_file_file_id_fkey") fileuploadFileRelCreatorUserCtx = newContextual[bool]("fileupload.file.user_.fileupload.file.file_creator_id_fkey") fileuploadFileRelOrganizationCtx = newContextual[bool]("fileupload.file.organization.fileupload.file.file_organization_id_fkey") + fileuploadFileRelSitesCtx = newContextual[bool]("fileupload.file.site.site.site_file_id_fkey") // Relationship Contexts for fileupload.pool fileuploadPoolWithParentsCascadingCtx = newContextual[bool]("fileuploadPoolWithParentsCascading") @@ -261,6 +291,8 @@ var ( // Relationship Contexts for organization organizationWithParentsCascadingCtx = newContextual[bool]("organizationWithParentsCascading") + organizationRelAddressMappingsCtx = newContextual[bool]("arcgis.address_mapping.organization.arcgis.address_mapping.address_mapping_organization_id_fkey") + organizationRelParcelMappingsCtx = newContextual[bool]("arcgis.parcel_mapping.organization.arcgis.parcel_mapping.parcel_mapping_organization_id_fkey") organizationRelEmailContactsCtx = newContextual[bool]("comms.email_contact.organization.district_subscription_email.district_subscription_email_email_contact_address_fkeydistrict_subscription_email.district_subscription_email_organization_id_fkey") organizationRelPhonesCtx = newContextual[bool]("comms.phone.organization.district_subscription_phone.district_subscription_phone_organization_id_fkeydistrict_subscription_phone.district_subscription_phone_phone_e164_fkey") organizationRelContainerrelatesCtx = newContextual[bool]("fieldseeker.containerrelate.organization.fieldseeker.containerrelate.containerrelate_organization_id_fkey") @@ -301,6 +333,13 @@ var ( organizationRelQuicksCtx = newContextual[bool]("organization.publicreport.quick.publicreport.quick.quick_organization_id_fkey") organizationRelUserCtx = newContextual[bool]("organization.user_.user_.user__organization_id_fkey") + // Relationship Contexts for parcel + parcelWithParentsCascadingCtx = newContextual[bool]("parcelWithParentsCascading") + + // Relationship Contexts for pool + poolWithParentsCascadingCtx = newContextual[bool]("poolWithParentsCascading") + poolRelCreatorUserCtx = newContextual[bool]("pool.user_.pool.pool_creator_id_fkey") + // Relationship Contexts for publicreport.image publicreportImageWithParentsCascadingCtx = newContextual[bool]("publicreportImageWithParentsCascading") publicreportImageRelImageExifsCtx = newContextual[bool]("publicreport.image.publicreport.image_exif.publicreport.image_exif.image_exif_image_id_fkey") @@ -386,6 +425,12 @@ var ( // Relationship Contexts for sessions sessionWithParentsCascadingCtx = newContextual[bool]("sessionWithParentsCascading") + // Relationship Contexts for site + siteWithParentsCascadingCtx = newContextual[bool]("siteWithParentsCascading") + siteRelAddressCtx = newContextual[bool]("address.site.site.site_address_id_fkey") + siteRelCreatorUserCtx = newContextual[bool]("site.user_.site.site_creator_id_fkey") + siteRelFileCtx = newContextual[bool]("fileupload.file.site.site.site_file_id_fkey") + // Relationship Contexts for spatial_ref_sys spatialRefSyWithParentsCascadingCtx = newContextual[bool]("spatialRefSyWithParentsCascading") @@ -393,13 +438,15 @@ var ( userWithParentsCascadingCtx = newContextual[bool]("userWithParentsCascading") userRelPublicUserUserCtx = newContextual[bool]("arcgis.user_.user_.arcgis.user_.user__public_user_id_fkey") userRelCreatorFilesCtx = newContextual[bool]("fileupload.file.user_.fileupload.file.file_creator_id_fkey") - userRelCreatorPoolsCtx = newContextual[bool]("fileupload.pool.user_.fileupload.pool.pool_creator_id_fkey") + userRelFileuploadPoolCtx = newContextual[bool]("fileupload.pool.user_.fileupload.pool.pool_creator_id_fkey") userRelCreatorNoteAudiosCtx = newContextual[bool]("note_audio.user_.note_audio.note_audio_creator_id_fkey") userRelDeletorNoteAudiosCtx = newContextual[bool]("note_audio.user_.note_audio.note_audio_deletor_id_fkey") userRelCreatorNoteImagesCtx = newContextual[bool]("note_image.user_.note_image.note_image_creator_id_fkey") userRelDeletorNoteImagesCtx = newContextual[bool]("note_image.user_.note_image.note_image_deletor_id_fkey") userRelUserNotificationsCtx = newContextual[bool]("notification.user_.notification.notification_user_id_fkey") userRelUserOauthTokensCtx = newContextual[bool]("oauth_token.user_.oauth_token.oauth_token_user_id_fkey") + userRelCreatorPoolsCtx = newContextual[bool]("pool.user_.pool.pool_creator_id_fkey") + userRelCreatorSitesCtx = newContextual[bool]("site.user_.site.site_creator_id_fkey") userRelOrganizationCtx = newContextual[bool]("organization.user_.user_.user__organization_id_fkey") ) diff --git a/db/factory/bobfactory_main.bob.go b/db/factory/bobfactory_main.bob.go index 4c3eb2b5..95689998 100644 --- a/db/factory/bobfactory_main.bob.go +++ b/db/factory/bobfactory_main.bob.go @@ -19,6 +19,12 @@ import ( ) type Factory struct { + baseAddressMods AddressModSlice + baseArcgisAddressMappingMods ArcgisAddressMappingModSlice + baseArcgisFeatureServiceMods ArcgisFeatureServiceModSlice + baseArcgisLayerMods ArcgisLayerModSlice + baseArcgisLayerFieldMods ArcgisLayerFieldModSlice + baseArcgisParcelMappingMods ArcgisParcelMappingModSlice baseArcgisUserMods ArcgisUserModSlice baseArcgisUserPrivilegeMods ArcgisUserPrivilegeModSlice baseCommsEmailContactMods CommsEmailContactModSlice @@ -75,6 +81,8 @@ type Factory struct { baseNotificationMods NotificationModSlice baseOauthTokenMods OauthTokenModSlice baseOrganizationMods OrganizationModSlice + baseParcelMods ParcelModSlice + basePoolMods PoolModSlice basePublicreportImageMods PublicreportImageModSlice basePublicreportImageExifMods PublicreportImageExifModSlice basePublicreportNotifyEmailNuisanceMods PublicreportNotifyEmailNuisanceModSlice @@ -93,6 +101,7 @@ type Factory struct { baseRasterColumnMods RasterColumnModSlice baseRasterOverviewMods RasterOverviewModSlice baseSessionMods SessionModSlice + baseSiteMods SiteModSlice baseSpatialRefSyMods SpatialRefSyModSlice baseUserMods UserModSlice } @@ -101,6 +110,220 @@ func New() *Factory { return &Factory{} } +func (f *Factory) NewAddress(mods ...AddressMod) *AddressTemplate { + return f.NewAddressWithContext(context.Background(), mods...) +} + +func (f *Factory) NewAddressWithContext(ctx context.Context, mods ...AddressMod) *AddressTemplate { + o := &AddressTemplate{f: f} + + if f != nil { + f.baseAddressMods.Apply(ctx, o) + } + + AddressModSlice(mods).Apply(ctx, o) + + return o +} + +func (f *Factory) FromExistingAddress(m *models.Address) *AddressTemplate { + o := &AddressTemplate{f: f, alreadyPersisted: true} + + o.Country = func() enums.Countrytype { return m.Country } + o.Created = func() time.Time { return m.Created } + o.Geom = func() string { return m.Geom } + o.H3cell = func() string { return m.H3cell } + o.ID = func() int32 { return m.ID } + o.Locality = func() string { return m.Locality } + o.Number = func() int32 { return m.Number } + o.PostalCode = func() string { return m.PostalCode } + o.Street = func() string { return m.Street } + o.Unit = func() string { return m.Unit } + + ctx := context.Background() + if m.R.Site != nil { + AddressMods.WithExistingSite(m.R.Site).Apply(ctx, o) + } + + return o +} + +func (f *Factory) NewArcgisAddressMapping(mods ...ArcgisAddressMappingMod) *ArcgisAddressMappingTemplate { + return f.NewArcgisAddressMappingWithContext(context.Background(), mods...) +} + +func (f *Factory) NewArcgisAddressMappingWithContext(ctx context.Context, mods ...ArcgisAddressMappingMod) *ArcgisAddressMappingTemplate { + o := &ArcgisAddressMappingTemplate{f: f} + + if f != nil { + f.baseArcgisAddressMappingMods.Apply(ctx, o) + } + + ArcgisAddressMappingModSlice(mods).Apply(ctx, o) + + return o +} + +func (f *Factory) FromExistingArcgisAddressMapping(m *models.ArcgisAddressMapping) *ArcgisAddressMappingTemplate { + o := &ArcgisAddressMappingTemplate{f: f, alreadyPersisted: true} + + o.Destination = func() enums.ArcgisMappingdestinationaddress { return m.Destination } + o.LayerFeatureServiceItemID = func() string { return m.LayerFeatureServiceItemID } + o.LayerIndex = func() int32 { return m.LayerIndex } + o.LayerFieldName = func() string { return m.LayerFieldName } + o.OrganizationID = func() int32 { return m.OrganizationID } + + ctx := context.Background() + if m.R.LayerField != nil { + ArcgisAddressMappingMods.WithExistingLayerField(m.R.LayerField).Apply(ctx, o) + } + if m.R.Organization != nil { + ArcgisAddressMappingMods.WithExistingOrganization(m.R.Organization).Apply(ctx, o) + } + + return o +} + +func (f *Factory) NewArcgisFeatureService(mods ...ArcgisFeatureServiceMod) *ArcgisFeatureServiceTemplate { + return f.NewArcgisFeatureServiceWithContext(context.Background(), mods...) +} + +func (f *Factory) NewArcgisFeatureServiceWithContext(ctx context.Context, mods ...ArcgisFeatureServiceMod) *ArcgisFeatureServiceTemplate { + o := &ArcgisFeatureServiceTemplate{f: f} + + if f != nil { + f.baseArcgisFeatureServiceMods.Apply(ctx, o) + } + + ArcgisFeatureServiceModSlice(mods).Apply(ctx, o) + + return o +} + +func (f *Factory) FromExistingArcgisFeatureService(m *models.ArcgisFeatureService) *ArcgisFeatureServiceTemplate { + o := &ArcgisFeatureServiceTemplate{f: f, alreadyPersisted: true} + + o.Extent = func() string { return m.Extent } + o.ItemID = func() string { return m.ItemID } + o.SpatialReference = func() int32 { return m.SpatialReference } + o.URL = func() string { return m.URL } + + ctx := context.Background() + if len(m.R.FeatureServiceItemLayers) > 0 { + ArcgisFeatureServiceMods.AddExistingFeatureServiceItemLayers(m.R.FeatureServiceItemLayers...).Apply(ctx, o) + } + + return o +} + +func (f *Factory) NewArcgisLayer(mods ...ArcgisLayerMod) *ArcgisLayerTemplate { + return f.NewArcgisLayerWithContext(context.Background(), mods...) +} + +func (f *Factory) NewArcgisLayerWithContext(ctx context.Context, mods ...ArcgisLayerMod) *ArcgisLayerTemplate { + o := &ArcgisLayerTemplate{f: f} + + if f != nil { + f.baseArcgisLayerMods.Apply(ctx, o) + } + + ArcgisLayerModSlice(mods).Apply(ctx, o) + + return o +} + +func (f *Factory) FromExistingArcgisLayer(m *models.ArcgisLayer) *ArcgisLayerTemplate { + o := &ArcgisLayerTemplate{f: f, alreadyPersisted: true} + + o.Extent = func() string { return m.Extent } + o.FeatureServiceItemID = func() string { return m.FeatureServiceItemID } + o.Index = func() int32 { return m.Index } + + ctx := context.Background() + if m.R.FeatureServiceItemFeatureService != nil { + ArcgisLayerMods.WithExistingFeatureServiceItemFeatureService(m.R.FeatureServiceItemFeatureService).Apply(ctx, o) + } + if len(m.R.LayerFields) > 0 { + ArcgisLayerMods.AddExistingLayerFields(m.R.LayerFields...).Apply(ctx, o) + } + + return o +} + +func (f *Factory) NewArcgisLayerField(mods ...ArcgisLayerFieldMod) *ArcgisLayerFieldTemplate { + return f.NewArcgisLayerFieldWithContext(context.Background(), mods...) +} + +func (f *Factory) NewArcgisLayerFieldWithContext(ctx context.Context, mods ...ArcgisLayerFieldMod) *ArcgisLayerFieldTemplate { + o := &ArcgisLayerFieldTemplate{f: f} + + if f != nil { + f.baseArcgisLayerFieldMods.Apply(ctx, o) + } + + ArcgisLayerFieldModSlice(mods).Apply(ctx, o) + + return o +} + +func (f *Factory) FromExistingArcgisLayerField(m *models.ArcgisLayerField) *ArcgisLayerFieldTemplate { + o := &ArcgisLayerFieldTemplate{f: f, alreadyPersisted: true} + + o.LayerFeatureServiceItemID = func() string { return m.LayerFeatureServiceItemID } + o.LayerIndex = func() int32 { return m.LayerIndex } + o.Name = func() string { return m.Name } + o.Type = func() enums.ArcgisFieldtype { return m.Type } + + ctx := context.Background() + if len(m.R.AddressMappings) > 0 { + ArcgisLayerFieldMods.AddExistingAddressMappings(m.R.AddressMappings...).Apply(ctx, o) + } + if m.R.Layer != nil { + ArcgisLayerFieldMods.WithExistingLayer(m.R.Layer).Apply(ctx, o) + } + if len(m.R.ParcelMappings) > 0 { + ArcgisLayerFieldMods.AddExistingParcelMappings(m.R.ParcelMappings...).Apply(ctx, o) + } + + return o +} + +func (f *Factory) NewArcgisParcelMapping(mods ...ArcgisParcelMappingMod) *ArcgisParcelMappingTemplate { + return f.NewArcgisParcelMappingWithContext(context.Background(), mods...) +} + +func (f *Factory) NewArcgisParcelMappingWithContext(ctx context.Context, mods ...ArcgisParcelMappingMod) *ArcgisParcelMappingTemplate { + o := &ArcgisParcelMappingTemplate{f: f} + + if f != nil { + f.baseArcgisParcelMappingMods.Apply(ctx, o) + } + + ArcgisParcelMappingModSlice(mods).Apply(ctx, o) + + return o +} + +func (f *Factory) FromExistingArcgisParcelMapping(m *models.ArcgisParcelMapping) *ArcgisParcelMappingTemplate { + o := &ArcgisParcelMappingTemplate{f: f, alreadyPersisted: true} + + o.Destination = func() enums.ArcgisMappingdestinationparcel { return m.Destination } + o.LayerFeatureServiceItemID = func() string { return m.LayerFeatureServiceItemID } + o.LayerIndex = func() int32 { return m.LayerIndex } + o.LayerFieldName = func() string { return m.LayerFieldName } + o.OrganizationID = func() int32 { return m.OrganizationID } + + ctx := context.Background() + if m.R.LayerField != nil { + ArcgisParcelMappingMods.WithExistingLayerField(m.R.LayerField).Apply(ctx, o) + } + if m.R.Organization != nil { + ArcgisParcelMappingMods.WithExistingOrganization(m.R.Organization).Apply(ctx, o) + } + + return o +} + func (f *Factory) NewArcgisUser(mods ...ArcgisUserMod) *ArcgisUserTemplate { return f.NewArcgisUserWithContext(context.Background(), mods...) } @@ -2392,6 +2615,9 @@ func (f *Factory) FromExistingFileuploadFile(m *models.FileuploadFile) *Fileuplo if m.R.Organization != nil { FileuploadFileMods.WithExistingOrganization(m.R.Organization).Apply(ctx, o) } + if len(m.R.Sites) > 0 { + FileuploadFileMods.AddExistingSites(m.R.Sites...).Apply(ctx, o) + } return o } @@ -2933,6 +3159,12 @@ func (f *Factory) FromExistingOrganization(m *models.Organization) *Organization o.ServiceAreaCentroidY = func() null.Val[float64] { return m.ServiceAreaCentroidY } ctx := context.Background() + if len(m.R.AddressMappings) > 0 { + OrganizationMods.AddExistingAddressMappings(m.R.AddressMappings...).Apply(ctx, o) + } + if len(m.R.ParcelMappings) > 0 { + OrganizationMods.AddExistingParcelMappings(m.R.ParcelMappings...).Apply(ctx, o) + } if len(m.R.EmailContacts) > 0 { OrganizationMods.AddExistingEmailContacts(m.R.EmailContacts...).Apply(ctx, o) } @@ -3054,6 +3286,66 @@ func (f *Factory) FromExistingOrganization(m *models.Organization) *Organization return o } +func (f *Factory) NewParcel(mods ...ParcelMod) *ParcelTemplate { + return f.NewParcelWithContext(context.Background(), mods...) +} + +func (f *Factory) NewParcelWithContext(ctx context.Context, mods ...ParcelMod) *ParcelTemplate { + o := &ParcelTemplate{f: f} + + if f != nil { + f.baseParcelMods.Apply(ctx, o) + } + + ParcelModSlice(mods).Apply(ctx, o) + + return o +} + +func (f *Factory) FromExistingParcel(m *models.Parcel) *ParcelTemplate { + o := &ParcelTemplate{f: f, alreadyPersisted: true} + + o.Apn = func() string { return m.Apn } + o.Description = func() string { return m.Description } + o.ID = func() int32 { return m.ID } + o.Geometry = func() string { return m.Geometry } + + return o +} + +func (f *Factory) NewPool(mods ...PoolMod) *PoolTemplate { + return f.NewPoolWithContext(context.Background(), mods...) +} + +func (f *Factory) NewPoolWithContext(ctx context.Context, mods ...PoolMod) *PoolTemplate { + o := &PoolTemplate{f: f} + + if f != nil { + f.basePoolMods.Apply(ctx, o) + } + + PoolModSlice(mods).Apply(ctx, o) + + return o +} + +func (f *Factory) FromExistingPool(m *models.Pool) *PoolTemplate { + o := &PoolTemplate{f: f, alreadyPersisted: true} + + o.Condition = func() enums.Poolconditiontype { return m.Condition } + o.Created = func() time.Time { return m.Created } + o.CreatorID = func() int32 { return m.CreatorID } + o.ID = func() int32 { return m.ID } + o.SiteID = func() null.Val[int32] { return m.SiteID } + + ctx := context.Background() + if m.R.CreatorUser != nil { + PoolMods.WithExistingCreatorUser(m.R.CreatorUser).Apply(ctx, o) + } + + return o +} + func (f *Factory) NewPublicreportImage(mods ...PublicreportImageMod) *PublicreportImageTemplate { return f.NewPublicreportImageWithContext(context.Background(), mods...) } @@ -3747,6 +4039,53 @@ func (f *Factory) FromExistingSession(m *models.Session) *SessionTemplate { return o } +func (f *Factory) NewSite(mods ...SiteMod) *SiteTemplate { + return f.NewSiteWithContext(context.Background(), mods...) +} + +func (f *Factory) NewSiteWithContext(ctx context.Context, mods ...SiteMod) *SiteTemplate { + o := &SiteTemplate{f: f} + + if f != nil { + f.baseSiteMods.Apply(ctx, o) + } + + SiteModSlice(mods).Apply(ctx, o) + + return o +} + +func (f *Factory) FromExistingSite(m *models.Site) *SiteTemplate { + o := &SiteTemplate{f: f, alreadyPersisted: true} + + o.AddressID = func() int32 { return m.AddressID } + o.Created = func() time.Time { return m.Created } + o.CreatorID = func() int32 { return m.CreatorID } + o.FileID = func() null.Val[int32] { return m.FileID } + o.ID = func() int32 { return m.ID } + o.Notes = func() string { return m.Notes } + o.OrganizationID = func() int32 { return m.OrganizationID } + o.OwnerName = func() string { return m.OwnerName } + o.OwnerPhoneE164 = func() null.Val[string] { return m.OwnerPhoneE164 } + o.ResidentOwned = func() null.Val[bool] { return m.ResidentOwned } + o.ResidentPhoneE164 = func() null.Val[string] { return m.ResidentPhoneE164 } + o.Tags = func() pgtypes.HStore { return m.Tags } + o.Version = func() int32 { return m.Version } + + ctx := context.Background() + if m.R.Address != nil { + SiteMods.WithExistingAddress(m.R.Address).Apply(ctx, o) + } + if m.R.CreatorUser != nil { + SiteMods.WithExistingCreatorUser(m.R.CreatorUser).Apply(ctx, o) + } + if m.R.File != nil { + SiteMods.WithExistingFile(m.R.File).Apply(ctx, o) + } + + return o +} + func (f *Factory) NewSpatialRefSy(mods ...SpatialRefSyMod) *SpatialRefSyTemplate { return f.NewSpatialRefSyWithContext(context.Background(), mods...) } @@ -3815,8 +4154,8 @@ func (f *Factory) FromExistingUser(m *models.User) *UserTemplate { if len(m.R.CreatorFiles) > 0 { UserMods.AddExistingCreatorFiles(m.R.CreatorFiles...).Apply(ctx, o) } - if len(m.R.CreatorPools) > 0 { - UserMods.AddExistingCreatorPools(m.R.CreatorPools...).Apply(ctx, o) + if len(m.R.FileuploadPool) > 0 { + UserMods.AddExistingFileuploadPool(m.R.FileuploadPool...).Apply(ctx, o) } if len(m.R.CreatorNoteAudios) > 0 { UserMods.AddExistingCreatorNoteAudios(m.R.CreatorNoteAudios...).Apply(ctx, o) @@ -3836,6 +4175,12 @@ func (f *Factory) FromExistingUser(m *models.User) *UserTemplate { if len(m.R.UserOauthTokens) > 0 { UserMods.AddExistingUserOauthTokens(m.R.UserOauthTokens...).Apply(ctx, o) } + if len(m.R.CreatorPools) > 0 { + UserMods.AddExistingCreatorPools(m.R.CreatorPools...).Apply(ctx, o) + } + if len(m.R.CreatorSites) > 0 { + UserMods.AddExistingCreatorSites(m.R.CreatorSites...).Apply(ctx, o) + } if m.R.Organization != nil { UserMods.WithExistingOrganization(m.R.Organization).Apply(ctx, o) } @@ -3843,6 +4188,54 @@ func (f *Factory) FromExistingUser(m *models.User) *UserTemplate { return o } +func (f *Factory) ClearBaseAddressMods() { + f.baseAddressMods = nil +} + +func (f *Factory) AddBaseAddressMod(mods ...AddressMod) { + f.baseAddressMods = append(f.baseAddressMods, mods...) +} + +func (f *Factory) ClearBaseArcgisAddressMappingMods() { + f.baseArcgisAddressMappingMods = nil +} + +func (f *Factory) AddBaseArcgisAddressMappingMod(mods ...ArcgisAddressMappingMod) { + f.baseArcgisAddressMappingMods = append(f.baseArcgisAddressMappingMods, mods...) +} + +func (f *Factory) ClearBaseArcgisFeatureServiceMods() { + f.baseArcgisFeatureServiceMods = nil +} + +func (f *Factory) AddBaseArcgisFeatureServiceMod(mods ...ArcgisFeatureServiceMod) { + f.baseArcgisFeatureServiceMods = append(f.baseArcgisFeatureServiceMods, mods...) +} + +func (f *Factory) ClearBaseArcgisLayerMods() { + f.baseArcgisLayerMods = nil +} + +func (f *Factory) AddBaseArcgisLayerMod(mods ...ArcgisLayerMod) { + f.baseArcgisLayerMods = append(f.baseArcgisLayerMods, mods...) +} + +func (f *Factory) ClearBaseArcgisLayerFieldMods() { + f.baseArcgisLayerFieldMods = nil +} + +func (f *Factory) AddBaseArcgisLayerFieldMod(mods ...ArcgisLayerFieldMod) { + f.baseArcgisLayerFieldMods = append(f.baseArcgisLayerFieldMods, mods...) +} + +func (f *Factory) ClearBaseArcgisParcelMappingMods() { + f.baseArcgisParcelMappingMods = nil +} + +func (f *Factory) AddBaseArcgisParcelMappingMod(mods ...ArcgisParcelMappingMod) { + f.baseArcgisParcelMappingMods = append(f.baseArcgisParcelMappingMods, mods...) +} + func (f *Factory) ClearBaseArcgisUserMods() { f.baseArcgisUserMods = nil } @@ -4291,6 +4684,22 @@ func (f *Factory) AddBaseOrganizationMod(mods ...OrganizationMod) { f.baseOrganizationMods = append(f.baseOrganizationMods, mods...) } +func (f *Factory) ClearBaseParcelMods() { + f.baseParcelMods = nil +} + +func (f *Factory) AddBaseParcelMod(mods ...ParcelMod) { + f.baseParcelMods = append(f.baseParcelMods, mods...) +} + +func (f *Factory) ClearBasePoolMods() { + f.basePoolMods = nil +} + +func (f *Factory) AddBasePoolMod(mods ...PoolMod) { + f.basePoolMods = append(f.basePoolMods, mods...) +} + func (f *Factory) ClearBasePublicreportImageMods() { f.basePublicreportImageMods = nil } @@ -4435,6 +4844,14 @@ func (f *Factory) AddBaseSessionMod(mods ...SessionMod) { f.baseSessionMods = append(f.baseSessionMods, mods...) } +func (f *Factory) ClearBaseSiteMods() { + f.baseSiteMods = nil +} + +func (f *Factory) AddBaseSiteMod(mods ...SiteMod) { + f.baseSiteMods = append(f.baseSiteMods, mods...) +} + func (f *Factory) ClearBaseSpatialRefSyMods() { f.baseSpatialRefSyMods = nil } diff --git a/db/factory/bobfactory_random.bob.go b/db/factory/bobfactory_random.bob.go index 7fe8364a..427dbbe6 100644 --- a/db/factory/bobfactory_random.bob.go +++ b/db/factory/bobfactory_random.bob.go @@ -71,6 +71,36 @@ func random_decimal_Decimal(f *faker.Faker, limits ...string) decimal.Decimal { return val } +func random_enums_ArcgisFieldtype(f *faker.Faker, limits ...string) enums.ArcgisFieldtype { + if f == nil { + f = &defaultFaker + } + + var e enums.ArcgisFieldtype + all := e.All() + return all[f.IntBetween(0, len(all)-1)] +} + +func random_enums_ArcgisMappingdestinationaddress(f *faker.Faker, limits ...string) enums.ArcgisMappingdestinationaddress { + if f == nil { + f = &defaultFaker + } + + var e enums.ArcgisMappingdestinationaddress + all := e.All() + return all[f.IntBetween(0, len(all)-1)] +} + +func random_enums_ArcgisMappingdestinationparcel(f *faker.Faker, limits ...string) enums.ArcgisMappingdestinationparcel { + if f == nil { + f = &defaultFaker + } + + var e enums.ArcgisMappingdestinationparcel + all := e.All() + return all[f.IntBetween(0, len(all)-1)] +} + func random_enums_Arcgislicensetype(f *faker.Faker, limits ...string) enums.Arcgislicensetype { if f == nil { f = &defaultFaker @@ -141,6 +171,16 @@ func random_enums_CommsTextorigin(f *faker.Faker, limits ...string) enums.CommsT return all[f.IntBetween(0, len(all)-1)] } +func random_enums_Countrytype(f *faker.Faker, limits ...string) enums.Countrytype { + if f == nil { + f = &defaultFaker + } + + var e enums.Countrytype + all := e.All() + return all[f.IntBetween(0, len(all)-1)] +} + func random_enums_FileuploadCsvtype(f *faker.Faker, limits ...string) enums.FileuploadCsvtype { if f == nil { f = &defaultFaker @@ -201,6 +241,16 @@ func random_enums_Notificationtype(f *faker.Faker, limits ...string) enums.Notif return all[f.IntBetween(0, len(all)-1)] } +func random_enums_Poolconditiontype(f *faker.Faker, limits ...string) enums.Poolconditiontype { + if f == nil { + f = &defaultFaker + } + + var e enums.Poolconditiontype + all := e.All() + return all[f.IntBetween(0, len(all)-1)] +} + func random_enums_PublicreportAccuracytype(f *faker.Faker, limits ...string) enums.PublicreportAccuracytype { if f == nil { f = &defaultFaker diff --git a/db/factory/fileupload.file.bob.go b/db/factory/fileupload.file.bob.go index 1c25151c..e0950498 100644 --- a/db/factory/fileupload.file.bob.go +++ b/db/factory/fileupload.file.bob.go @@ -61,6 +61,7 @@ type fileuploadFileR struct { ErrorFiles []*fileuploadFileRErrorFilesR CreatorUser *fileuploadFileRCreatorUserR Organization *fileuploadFileROrganizationR + Sites []*fileuploadFileRSitesR } type fileuploadFileRCSVR struct { @@ -76,6 +77,10 @@ type fileuploadFileRCreatorUserR struct { type fileuploadFileROrganizationR struct { o *OrganizationTemplate } +type fileuploadFileRSitesR struct { + number int + o *SiteTemplate +} // Apply mods to the FileuploadFileTemplate func (o *FileuploadFileTemplate) Apply(ctx context.Context, mods ...FileuploadFileMod) { @@ -120,6 +125,19 @@ func (t FileuploadFileTemplate) setModelRels(o *models.FileuploadFile) { o.OrganizationID = rel.ID // h2 o.R.Organization = rel } + + if t.r.Sites != nil { + rel := models.SiteSlice{} + for _, r := range t.r.Sites { + related := r.o.BuildMany(r.number) + for _, rel := range related { + rel.FileID = null.From(o.ID) // h2 + rel.R.File = o + } + rel = append(rel, related...) + } + o.R.Sites = rel + } } // BuildSetter returns an *models.FileuploadFileSetter @@ -318,6 +336,26 @@ func (o *FileuploadFileTemplate) insertOptRels(ctx context.Context, exec bob.Exe } } + isSitesDone, _ := fileuploadFileRelSitesCtx.Value(ctx) + if !isSitesDone && o.r.Sites != nil { + ctx = fileuploadFileRelSitesCtx.WithValue(ctx, true) + for _, r := range o.r.Sites { + if r.o.alreadyPersisted { + m.R.Sites = append(m.R.Sites, r.o.Build()) + } else { + rel4, err := r.o.CreateMany(ctx, exec, r.number) + if err != nil { + return err + } + + err = m.AttachSites(ctx, exec, rel4...) + if err != nil { + return err + } + } + } + } + return err } @@ -953,3 +991,51 @@ func (m fileuploadFileMods) WithoutErrorFiles() FileuploadFileMod { o.r.ErrorFiles = nil }) } + +func (m fileuploadFileMods) WithSites(number int, related *SiteTemplate) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + o.r.Sites = []*fileuploadFileRSitesR{{ + number: number, + o: related, + }} + }) +} + +func (m fileuploadFileMods) WithNewSites(number int, mods ...SiteMod) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + related := o.f.NewSiteWithContext(ctx, mods...) + m.WithSites(number, related).Apply(ctx, o) + }) +} + +func (m fileuploadFileMods) AddSites(number int, related *SiteTemplate) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + o.r.Sites = append(o.r.Sites, &fileuploadFileRSitesR{ + number: number, + o: related, + }) + }) +} + +func (m fileuploadFileMods) AddNewSites(number int, mods ...SiteMod) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + related := o.f.NewSiteWithContext(ctx, mods...) + m.AddSites(number, related).Apply(ctx, o) + }) +} + +func (m fileuploadFileMods) AddExistingSites(existingModels ...*models.Site) FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + for _, em := range existingModels { + o.r.Sites = append(o.r.Sites, &fileuploadFileRSitesR{ + o: o.f.FromExistingSite(em), + }) + } + }) +} + +func (m fileuploadFileMods) WithoutSites() FileuploadFileMod { + return FileuploadFileModFunc(func(ctx context.Context, o *FileuploadFileTemplate) { + o.r.Sites = nil + }) +} diff --git a/db/factory/fileupload.pool.bob.go b/db/factory/fileupload.pool.bob.go index 021d4806..84aa53e6 100644 --- a/db/factory/fileupload.pool.bob.go +++ b/db/factory/fileupload.pool.bob.go @@ -104,7 +104,7 @@ func (o *FileuploadPoolTemplate) Apply(ctx context.Context, mods ...FileuploadPo func (t FileuploadPoolTemplate) setModelRels(o *models.FileuploadPool) { if t.r.CreatorUser != nil { rel := t.r.CreatorUser.o.Build() - rel.R.CreatorPools = append(rel.R.CreatorPools, o) + rel.R.FileuploadPool = append(rel.R.FileuploadPool, o) o.CreatorID = rel.ID // h2 o.R.CreatorUser = rel } diff --git a/db/factory/organization.bob.go b/db/factory/organization.bob.go index 865382c8..2e40bcda 100644 --- a/db/factory/organization.bob.go +++ b/db/factory/organization.bob.go @@ -75,6 +75,8 @@ type OrganizationTemplate struct { } type organizationR struct { + AddressMappings []*organizationRAddressMappingsR + ParcelMappings []*organizationRParcelMappingsR EmailContacts []*organizationREmailContactsR Phones []*organizationRPhonesR Containerrelates []*organizationRContainerrelatesR @@ -116,6 +118,14 @@ type organizationR struct { User []*organizationRUserR } +type organizationRAddressMappingsR struct { + number int + o *ArcgisAddressMappingTemplate +} +type organizationRParcelMappingsR struct { + number int + o *ArcgisParcelMappingTemplate +} type organizationREmailContactsR struct { number int o *CommsEmailContactTemplate @@ -283,6 +293,32 @@ func (o *OrganizationTemplate) Apply(ctx context.Context, mods ...OrganizationMo // setModelRels creates and sets the relationships on *models.Organization // according to the relationships in the template. Nothing is inserted into the db func (t OrganizationTemplate) setModelRels(o *models.Organization) { + if t.r.AddressMappings != nil { + rel := models.ArcgisAddressMappingSlice{} + for _, r := range t.r.AddressMappings { + related := r.o.BuildMany(r.number) + for _, rel := range related { + rel.OrganizationID = o.ID // h2 + rel.R.Organization = o + } + rel = append(rel, related...) + } + o.R.AddressMappings = rel + } + + if t.r.ParcelMappings != nil { + rel := models.ArcgisParcelMappingSlice{} + for _, r := range t.r.ParcelMappings { + related := r.o.BuildMany(r.number) + for _, rel := range related { + rel.OrganizationID = o.ID // h2 + rel.R.Organization = o + } + rel = append(rel, related...) + } + o.R.ParcelMappings = rel + } + if t.r.EmailContacts != nil { rel := models.CommsEmailContactSlice{} for _, r := range t.r.EmailContacts { @@ -1011,6 +1047,46 @@ func ensureCreatableOrganization(m *models.OrganizationSetter) { func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.Organization) error { var err error + isAddressMappingsDone, _ := organizationRelAddressMappingsCtx.Value(ctx) + if !isAddressMappingsDone && o.r.AddressMappings != nil { + ctx = organizationRelAddressMappingsCtx.WithValue(ctx, true) + for _, r := range o.r.AddressMappings { + if r.o.alreadyPersisted { + m.R.AddressMappings = append(m.R.AddressMappings, r.o.Build()) + } else { + rel0, err := r.o.CreateMany(ctx, exec, r.number) + if err != nil { + return err + } + + err = m.AttachAddressMappings(ctx, exec, rel0...) + if err != nil { + return err + } + } + } + } + + isParcelMappingsDone, _ := organizationRelParcelMappingsCtx.Value(ctx) + if !isParcelMappingsDone && o.r.ParcelMappings != nil { + ctx = organizationRelParcelMappingsCtx.WithValue(ctx, true) + for _, r := range o.r.ParcelMappings { + if r.o.alreadyPersisted { + m.R.ParcelMappings = append(m.R.ParcelMappings, r.o.Build()) + } else { + rel1, err := r.o.CreateMany(ctx, exec, r.number) + if err != nil { + return err + } + + err = m.AttachParcelMappings(ctx, exec, rel1...) + if err != nil { + return err + } + } + } + } + isEmailContactsDone, _ := organizationRelEmailContactsCtx.Value(ctx) if !isEmailContactsDone && o.r.EmailContacts != nil { ctx = organizationRelEmailContactsCtx.WithValue(ctx, true) @@ -1018,12 +1094,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.EmailContacts = append(m.R.EmailContacts, r.o.Build()) } else { - rel0, err := r.o.CreateMany(ctx, exec, r.number) + rel2, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachEmailContacts(ctx, exec, rel0...) + err = m.AttachEmailContacts(ctx, exec, rel2...) if err != nil { return err } @@ -1038,12 +1114,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Phones = append(m.R.Phones, r.o.Build()) } else { - rel1, err := r.o.CreateMany(ctx, exec, r.number) + rel3, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachPhones(ctx, exec, rel1...) + err = m.AttachPhones(ctx, exec, rel3...) if err != nil { return err } @@ -1058,12 +1134,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Containerrelates = append(m.R.Containerrelates, r.o.Build()) } else { - rel2, err := r.o.CreateMany(ctx, exec, r.number) + rel4, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachContainerrelates(ctx, exec, rel2...) + err = m.AttachContainerrelates(ctx, exec, rel4...) if err != nil { return err } @@ -1078,12 +1154,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Fieldscoutinglogs = append(m.R.Fieldscoutinglogs, r.o.Build()) } else { - rel3, err := r.o.CreateMany(ctx, exec, r.number) + rel5, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachFieldscoutinglogs(ctx, exec, rel3...) + err = m.AttachFieldscoutinglogs(ctx, exec, rel5...) if err != nil { return err } @@ -1098,12 +1174,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Habitatrelates = append(m.R.Habitatrelates, r.o.Build()) } else { - rel4, err := r.o.CreateMany(ctx, exec, r.number) + rel6, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachHabitatrelates(ctx, exec, rel4...) + err = m.AttachHabitatrelates(ctx, exec, rel6...) if err != nil { return err } @@ -1118,12 +1194,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Inspectionsamples = append(m.R.Inspectionsamples, r.o.Build()) } else { - rel5, err := r.o.CreateMany(ctx, exec, r.number) + rel7, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachInspectionsamples(ctx, exec, rel5...) + err = m.AttachInspectionsamples(ctx, exec, rel7...) if err != nil { return err } @@ -1138,12 +1214,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Inspectionsampledetails = append(m.R.Inspectionsampledetails, r.o.Build()) } else { - rel6, err := r.o.CreateMany(ctx, exec, r.number) + rel8, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachInspectionsampledetails(ctx, exec, rel6...) + err = m.AttachInspectionsampledetails(ctx, exec, rel8...) if err != nil { return err } @@ -1158,12 +1234,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Linelocations = append(m.R.Linelocations, r.o.Build()) } else { - rel7, err := r.o.CreateMany(ctx, exec, r.number) + rel9, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachLinelocations(ctx, exec, rel7...) + err = m.AttachLinelocations(ctx, exec, rel9...) if err != nil { return err } @@ -1178,12 +1254,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Locationtrackings = append(m.R.Locationtrackings, r.o.Build()) } else { - rel8, err := r.o.CreateMany(ctx, exec, r.number) + rel10, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachLocationtrackings(ctx, exec, rel8...) + err = m.AttachLocationtrackings(ctx, exec, rel10...) if err != nil { return err } @@ -1198,12 +1274,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Mosquitoinspections = append(m.R.Mosquitoinspections, r.o.Build()) } else { - rel9, err := r.o.CreateMany(ctx, exec, r.number) + rel11, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachMosquitoinspections(ctx, exec, rel9...) + err = m.AttachMosquitoinspections(ctx, exec, rel11...) if err != nil { return err } @@ -1218,12 +1294,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Pointlocations = append(m.R.Pointlocations, r.o.Build()) } else { - rel10, err := r.o.CreateMany(ctx, exec, r.number) + rel12, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachPointlocations(ctx, exec, rel10...) + err = m.AttachPointlocations(ctx, exec, rel12...) if err != nil { return err } @@ -1238,12 +1314,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Polygonlocations = append(m.R.Polygonlocations, r.o.Build()) } else { - rel11, err := r.o.CreateMany(ctx, exec, r.number) + rel13, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachPolygonlocations(ctx, exec, rel11...) + err = m.AttachPolygonlocations(ctx, exec, rel13...) if err != nil { return err } @@ -1258,12 +1334,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.FieldseekerPool = append(m.R.FieldseekerPool, r.o.Build()) } else { - rel12, err := r.o.CreateMany(ctx, exec, r.number) + rel14, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachFieldseekerPool(ctx, exec, rel12...) + err = m.AttachFieldseekerPool(ctx, exec, rel14...) if err != nil { return err } @@ -1278,12 +1354,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Pooldetails = append(m.R.Pooldetails, r.o.Build()) } else { - rel13, err := r.o.CreateMany(ctx, exec, r.number) + rel15, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachPooldetails(ctx, exec, rel13...) + err = m.AttachPooldetails(ctx, exec, rel15...) if err != nil { return err } @@ -1298,12 +1374,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Proposedtreatmentareas = append(m.R.Proposedtreatmentareas, r.o.Build()) } else { - rel14, err := r.o.CreateMany(ctx, exec, r.number) + rel16, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachProposedtreatmentareas(ctx, exec, rel14...) + err = m.AttachProposedtreatmentareas(ctx, exec, rel16...) if err != nil { return err } @@ -1318,12 +1394,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Qamosquitoinspections = append(m.R.Qamosquitoinspections, r.o.Build()) } else { - rel15, err := r.o.CreateMany(ctx, exec, r.number) + rel17, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachQamosquitoinspections(ctx, exec, rel15...) + err = m.AttachQamosquitoinspections(ctx, exec, rel17...) if err != nil { return err } @@ -1338,12 +1414,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Rodentlocations = append(m.R.Rodentlocations, r.o.Build()) } else { - rel16, err := r.o.CreateMany(ctx, exec, r.number) + rel18, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachRodentlocations(ctx, exec, rel16...) + err = m.AttachRodentlocations(ctx, exec, rel18...) if err != nil { return err } @@ -1358,12 +1434,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Samplecollections = append(m.R.Samplecollections, r.o.Build()) } else { - rel17, err := r.o.CreateMany(ctx, exec, r.number) + rel19, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachSamplecollections(ctx, exec, rel17...) + err = m.AttachSamplecollections(ctx, exec, rel19...) if err != nil { return err } @@ -1378,12 +1454,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Samplelocations = append(m.R.Samplelocations, r.o.Build()) } else { - rel18, err := r.o.CreateMany(ctx, exec, r.number) + rel20, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachSamplelocations(ctx, exec, rel18...) + err = m.AttachSamplelocations(ctx, exec, rel20...) if err != nil { return err } @@ -1398,12 +1474,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Servicerequests = append(m.R.Servicerequests, r.o.Build()) } else { - rel19, err := r.o.CreateMany(ctx, exec, r.number) + rel21, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachServicerequests(ctx, exec, rel19...) + err = m.AttachServicerequests(ctx, exec, rel21...) if err != nil { return err } @@ -1418,12 +1494,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Speciesabundances = append(m.R.Speciesabundances, r.o.Build()) } else { - rel20, err := r.o.CreateMany(ctx, exec, r.number) + rel22, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachSpeciesabundances(ctx, exec, rel20...) + err = m.AttachSpeciesabundances(ctx, exec, rel22...) if err != nil { return err } @@ -1438,12 +1514,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Stormdrains = append(m.R.Stormdrains, r.o.Build()) } else { - rel21, err := r.o.CreateMany(ctx, exec, r.number) + rel23, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachStormdrains(ctx, exec, rel21...) + err = m.AttachStormdrains(ctx, exec, rel23...) if err != nil { return err } @@ -1458,12 +1534,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Timecards = append(m.R.Timecards, r.o.Build()) } else { - rel22, err := r.o.CreateMany(ctx, exec, r.number) + rel24, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachTimecards(ctx, exec, rel22...) + err = m.AttachTimecards(ctx, exec, rel24...) if err != nil { return err } @@ -1478,12 +1554,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Trapdata = append(m.R.Trapdata, r.o.Build()) } else { - rel23, err := r.o.CreateMany(ctx, exec, r.number) + rel25, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachTrapdata(ctx, exec, rel23...) + err = m.AttachTrapdata(ctx, exec, rel25...) if err != nil { return err } @@ -1498,12 +1574,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Traplocations = append(m.R.Traplocations, r.o.Build()) } else { - rel24, err := r.o.CreateMany(ctx, exec, r.number) + rel26, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachTraplocations(ctx, exec, rel24...) + err = m.AttachTraplocations(ctx, exec, rel26...) if err != nil { return err } @@ -1518,12 +1594,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Treatments = append(m.R.Treatments, r.o.Build()) } else { - rel25, err := r.o.CreateMany(ctx, exec, r.number) + rel27, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachTreatments(ctx, exec, rel25...) + err = m.AttachTreatments(ctx, exec, rel27...) if err != nil { return err } @@ -1538,12 +1614,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Treatmentareas = append(m.R.Treatmentareas, r.o.Build()) } else { - rel26, err := r.o.CreateMany(ctx, exec, r.number) + rel28, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachTreatmentareas(ctx, exec, rel26...) + err = m.AttachTreatmentareas(ctx, exec, rel28...) if err != nil { return err } @@ -1558,12 +1634,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Zones = append(m.R.Zones, r.o.Build()) } else { - rel27, err := r.o.CreateMany(ctx, exec, r.number) + rel29, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachZones(ctx, exec, rel27...) + err = m.AttachZones(ctx, exec, rel29...) if err != nil { return err } @@ -1578,12 +1654,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Zones2s = append(m.R.Zones2s, r.o.Build()) } else { - rel28, err := r.o.CreateMany(ctx, exec, r.number) + rel30, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachZones2s(ctx, exec, rel28...) + err = m.AttachZones2s(ctx, exec, rel30...) if err != nil { return err } @@ -1598,12 +1674,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.FieldseekerSyncs = append(m.R.FieldseekerSyncs, r.o.Build()) } else { - rel29, err := r.o.CreateMany(ctx, exec, r.number) + rel31, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachFieldseekerSyncs(ctx, exec, rel29...) + err = m.AttachFieldseekerSyncs(ctx, exec, rel31...) if err != nil { return err } @@ -1618,12 +1694,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Files = append(m.R.Files, r.o.Build()) } else { - rel30, err := r.o.CreateMany(ctx, exec, r.number) + rel32, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachFiles(ctx, exec, rel30...) + err = m.AttachFiles(ctx, exec, rel32...) if err != nil { return err } @@ -1638,12 +1714,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Pools = append(m.R.Pools, r.o.Build()) } else { - rel31, err := r.o.CreateMany(ctx, exec, r.number) + rel33, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachPools(ctx, exec, rel31...) + err = m.AttachPools(ctx, exec, rel33...) if err != nil { return err } @@ -1658,12 +1734,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.H3Aggregations = append(m.R.H3Aggregations, r.o.Build()) } else { - rel32, err := r.o.CreateMany(ctx, exec, r.number) + rel34, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachH3Aggregations(ctx, exec, rel32...) + err = m.AttachH3Aggregations(ctx, exec, rel34...) if err != nil { return err } @@ -1678,12 +1754,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.NoteAudios = append(m.R.NoteAudios, r.o.Build()) } else { - rel33, err := r.o.CreateMany(ctx, exec, r.number) + rel35, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachNoteAudios(ctx, exec, rel33...) + err = m.AttachNoteAudios(ctx, exec, rel35...) if err != nil { return err } @@ -1698,12 +1774,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.NoteImages = append(m.R.NoteImages, r.o.Build()) } else { - rel34, err := r.o.CreateMany(ctx, exec, r.number) + rel36, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachNoteImages(ctx, exec, rel34...) + err = m.AttachNoteImages(ctx, exec, rel36...) if err != nil { return err } @@ -1718,12 +1794,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Nuisances = append(m.R.Nuisances, r.o.Build()) } else { - rel35, err := r.o.CreateMany(ctx, exec, r.number) + rel37, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachNuisances(ctx, exec, rel35...) + err = m.AttachNuisances(ctx, exec, rel37...) if err != nil { return err } @@ -1738,12 +1814,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.PublicreportPool = append(m.R.PublicreportPool, r.o.Build()) } else { - rel36, err := r.o.CreateMany(ctx, exec, r.number) + rel38, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachPublicreportPool(ctx, exec, rel36...) + err = m.AttachPublicreportPool(ctx, exec, rel38...) if err != nil { return err } @@ -1758,12 +1834,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.Quicks = append(m.R.Quicks, r.o.Build()) } else { - rel37, err := r.o.CreateMany(ctx, exec, r.number) + rel39, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachQuicks(ctx, exec, rel37...) + err = m.AttachQuicks(ctx, exec, rel39...) if err != nil { return err } @@ -1778,12 +1854,12 @@ func (o *OrganizationTemplate) insertOptRels(ctx context.Context, exec bob.Execu if r.o.alreadyPersisted { m.R.User = append(m.R.User, r.o.Build()) } else { - rel38, err := r.o.CreateMany(ctx, exec, r.number) + rel40, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachUser(ctx, exec, rel38...) + err = m.AttachUser(ctx, exec, rel40...) if err != nil { return err } @@ -3417,6 +3493,102 @@ func (m organizationMods) WithParentsCascading() OrganizationMod { }) } +func (m organizationMods) WithAddressMappings(number int, related *ArcgisAddressMappingTemplate) OrganizationMod { + return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) { + o.r.AddressMappings = []*organizationRAddressMappingsR{{ + number: number, + o: related, + }} + }) +} + +func (m organizationMods) WithNewAddressMappings(number int, mods ...ArcgisAddressMappingMod) OrganizationMod { + return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) { + related := o.f.NewArcgisAddressMappingWithContext(ctx, mods...) + m.WithAddressMappings(number, related).Apply(ctx, o) + }) +} + +func (m organizationMods) AddAddressMappings(number int, related *ArcgisAddressMappingTemplate) OrganizationMod { + return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) { + o.r.AddressMappings = append(o.r.AddressMappings, &organizationRAddressMappingsR{ + number: number, + o: related, + }) + }) +} + +func (m organizationMods) AddNewAddressMappings(number int, mods ...ArcgisAddressMappingMod) OrganizationMod { + return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) { + related := o.f.NewArcgisAddressMappingWithContext(ctx, mods...) + m.AddAddressMappings(number, related).Apply(ctx, o) + }) +} + +func (m organizationMods) AddExistingAddressMappings(existingModels ...*models.ArcgisAddressMapping) OrganizationMod { + return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) { + for _, em := range existingModels { + o.r.AddressMappings = append(o.r.AddressMappings, &organizationRAddressMappingsR{ + o: o.f.FromExistingArcgisAddressMapping(em), + }) + } + }) +} + +func (m organizationMods) WithoutAddressMappings() OrganizationMod { + return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) { + o.r.AddressMappings = nil + }) +} + +func (m organizationMods) WithParcelMappings(number int, related *ArcgisParcelMappingTemplate) OrganizationMod { + return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) { + o.r.ParcelMappings = []*organizationRParcelMappingsR{{ + number: number, + o: related, + }} + }) +} + +func (m organizationMods) WithNewParcelMappings(number int, mods ...ArcgisParcelMappingMod) OrganizationMod { + return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) { + related := o.f.NewArcgisParcelMappingWithContext(ctx, mods...) + m.WithParcelMappings(number, related).Apply(ctx, o) + }) +} + +func (m organizationMods) AddParcelMappings(number int, related *ArcgisParcelMappingTemplate) OrganizationMod { + return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) { + o.r.ParcelMappings = append(o.r.ParcelMappings, &organizationRParcelMappingsR{ + number: number, + o: related, + }) + }) +} + +func (m organizationMods) AddNewParcelMappings(number int, mods ...ArcgisParcelMappingMod) OrganizationMod { + return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) { + related := o.f.NewArcgisParcelMappingWithContext(ctx, mods...) + m.AddParcelMappings(number, related).Apply(ctx, o) + }) +} + +func (m organizationMods) AddExistingParcelMappings(existingModels ...*models.ArcgisParcelMapping) OrganizationMod { + return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) { + for _, em := range existingModels { + o.r.ParcelMappings = append(o.r.ParcelMappings, &organizationRParcelMappingsR{ + o: o.f.FromExistingArcgisParcelMapping(em), + }) + } + }) +} + +func (m organizationMods) WithoutParcelMappings() OrganizationMod { + return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) { + o.r.ParcelMappings = nil + }) +} + func (m organizationMods) WithEmailContacts(number int, related *CommsEmailContactTemplate) OrganizationMod { return OrganizationModFunc(func(ctx context.Context, o *OrganizationTemplate) { o.r.EmailContacts = []*organizationREmailContactsR{{ diff --git a/db/factory/parcel.bob.go b/db/factory/parcel.bob.go new file mode 100644 index 00000000..92e0b050 --- /dev/null +++ b/db/factory/parcel.bob.go @@ -0,0 +1,383 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package factory + +import ( + "context" + "testing" + + "github.com/Gleipnir-Technology/bob" + models "github.com/Gleipnir-Technology/nidus-sync/db/models" + "github.com/aarondl/opt/omit" + "github.com/jaswdr/faker/v2" +) + +type ParcelMod interface { + Apply(context.Context, *ParcelTemplate) +} + +type ParcelModFunc func(context.Context, *ParcelTemplate) + +func (f ParcelModFunc) Apply(ctx context.Context, n *ParcelTemplate) { + f(ctx, n) +} + +type ParcelModSlice []ParcelMod + +func (mods ParcelModSlice) Apply(ctx context.Context, n *ParcelTemplate) { + for _, f := range mods { + f.Apply(ctx, n) + } +} + +// ParcelTemplate is an object representing the database table. +// all columns are optional and should be set by mods +type ParcelTemplate struct { + Apn func() string + Description func() string + ID func() int32 + Geometry func() string + + f *Factory + + alreadyPersisted bool +} + +// Apply mods to the ParcelTemplate +func (o *ParcelTemplate) Apply(ctx context.Context, mods ...ParcelMod) { + for _, mod := range mods { + mod.Apply(ctx, o) + } +} + +// setModelRels creates and sets the relationships on *models.Parcel +// according to the relationships in the template. Nothing is inserted into the db +func (t ParcelTemplate) setModelRels(o *models.Parcel) {} + +// BuildSetter returns an *models.ParcelSetter +// this does nothing with the relationship templates +func (o ParcelTemplate) BuildSetter() *models.ParcelSetter { + m := &models.ParcelSetter{} + + if o.Apn != nil { + val := o.Apn() + m.Apn = omit.From(val) + } + if o.Description != nil { + val := o.Description() + m.Description = omit.From(val) + } + if o.ID != nil { + val := o.ID() + m.ID = omit.From(val) + } + if o.Geometry != nil { + val := o.Geometry() + m.Geometry = omit.From(val) + } + + return m +} + +// BuildManySetter returns an []*models.ParcelSetter +// this does nothing with the relationship templates +func (o ParcelTemplate) BuildManySetter(number int) []*models.ParcelSetter { + m := make([]*models.ParcelSetter, number) + + for i := range m { + m[i] = o.BuildSetter() + } + + return m +} + +// Build returns an *models.Parcel +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use ParcelTemplate.Create +func (o ParcelTemplate) Build() *models.Parcel { + m := &models.Parcel{} + + if o.Apn != nil { + m.Apn = o.Apn() + } + if o.Description != nil { + m.Description = o.Description() + } + if o.ID != nil { + m.ID = o.ID() + } + if o.Geometry != nil { + m.Geometry = o.Geometry() + } + + o.setModelRels(m) + + return m +} + +// BuildMany returns an models.ParcelSlice +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use ParcelTemplate.CreateMany +func (o ParcelTemplate) BuildMany(number int) models.ParcelSlice { + m := make(models.ParcelSlice, number) + + for i := range m { + m[i] = o.Build() + } + + return m +} + +func ensureCreatableParcel(m *models.ParcelSetter) { + if !(m.Apn.IsValue()) { + val := random_string(nil) + m.Apn = omit.From(val) + } + if !(m.Description.IsValue()) { + val := random_string(nil) + m.Description = omit.From(val) + } + if !(m.Geometry.IsValue()) { + val := random_string(nil) + m.Geometry = omit.From(val) + } +} + +// insertOptRels creates and inserts any optional the relationships on *models.Parcel +// according to the relationships in the template. +// any required relationship should have already exist on the model +func (o *ParcelTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.Parcel) error { + var err error + + return err +} + +// Create builds a parcel and inserts it into the database +// Relations objects are also inserted and placed in the .R field +func (o *ParcelTemplate) Create(ctx context.Context, exec bob.Executor) (*models.Parcel, error) { + var err error + opt := o.BuildSetter() + ensureCreatableParcel(opt) + + m, err := models.Parcels.Insert(opt).One(ctx, exec) + if err != nil { + return nil, err + } + + if err := o.insertOptRels(ctx, exec, m); err != nil { + return nil, err + } + return m, err +} + +// MustCreate builds a parcel and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o *ParcelTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.Parcel { + m, err := o.Create(ctx, exec) + if err != nil { + panic(err) + } + return m +} + +// CreateOrFail builds a parcel and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o *ParcelTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.Parcel { + tb.Helper() + m, err := o.Create(ctx, exec) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// CreateMany builds multiple parcels and inserts them into the database +// Relations objects are also inserted and placed in the .R field +func (o ParcelTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.ParcelSlice, error) { + var err error + m := make(models.ParcelSlice, number) + + for i := range m { + m[i], err = o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + return m, nil +} + +// MustCreateMany builds multiple parcels and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o ParcelTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.ParcelSlice { + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + panic(err) + } + return m +} + +// CreateManyOrFail builds multiple parcels and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o ParcelTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.ParcelSlice { + tb.Helper() + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// Parcel has methods that act as mods for the ParcelTemplate +var ParcelMods parcelMods + +type parcelMods struct{} + +func (m parcelMods) RandomizeAllColumns(f *faker.Faker) ParcelMod { + return ParcelModSlice{ + ParcelMods.RandomApn(f), + ParcelMods.RandomDescription(f), + ParcelMods.RandomID(f), + ParcelMods.RandomGeometry(f), + } +} + +// Set the model columns to this value +func (m parcelMods) Apn(val string) ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.Apn = func() string { return val } + }) +} + +// Set the Column from the function +func (m parcelMods) ApnFunc(f func() string) ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.Apn = f + }) +} + +// Clear any values for the column +func (m parcelMods) UnsetApn() ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.Apn = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m parcelMods) RandomApn(f *faker.Faker) ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.Apn = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m parcelMods) Description(val string) ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.Description = func() string { return val } + }) +} + +// Set the Column from the function +func (m parcelMods) DescriptionFunc(f func() string) ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.Description = f + }) +} + +// Clear any values for the column +func (m parcelMods) UnsetDescription() ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.Description = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m parcelMods) RandomDescription(f *faker.Faker) ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.Description = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m parcelMods) ID(val int32) ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.ID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m parcelMods) IDFunc(f func() int32) ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.ID = f + }) +} + +// Clear any values for the column +func (m parcelMods) UnsetID() ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.ID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m parcelMods) RandomID(f *faker.Faker) ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.ID = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m parcelMods) Geometry(val string) ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.Geometry = func() string { return val } + }) +} + +// Set the Column from the function +func (m parcelMods) GeometryFunc(f func() string) ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.Geometry = f + }) +} + +// Clear any values for the column +func (m parcelMods) UnsetGeometry() ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.Geometry = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m parcelMods) RandomGeometry(f *faker.Faker) ParcelMod { + return ParcelModFunc(func(_ context.Context, o *ParcelTemplate) { + o.Geometry = func() string { + return random_string(f) + } + }) +} + +func (m parcelMods) WithParentsCascading() ParcelMod { + return ParcelModFunc(func(ctx context.Context, o *ParcelTemplate) { + if isDone, _ := parcelWithParentsCascadingCtx.Value(ctx); isDone { + return + } + ctx = parcelWithParentsCascadingCtx.WithValue(ctx, true) + }) +} diff --git a/db/factory/pool.bob.go b/db/factory/pool.bob.go new file mode 100644 index 00000000..667dbb89 --- /dev/null +++ b/db/factory/pool.bob.go @@ -0,0 +1,519 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package factory + +import ( + "context" + "testing" + "time" + + "github.com/Gleipnir-Technology/bob" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + models "github.com/Gleipnir-Technology/nidus-sync/db/models" + "github.com/aarondl/opt/null" + "github.com/aarondl/opt/omit" + "github.com/aarondl/opt/omitnull" + "github.com/jaswdr/faker/v2" +) + +type PoolMod interface { + Apply(context.Context, *PoolTemplate) +} + +type PoolModFunc func(context.Context, *PoolTemplate) + +func (f PoolModFunc) Apply(ctx context.Context, n *PoolTemplate) { + f(ctx, n) +} + +type PoolModSlice []PoolMod + +func (mods PoolModSlice) Apply(ctx context.Context, n *PoolTemplate) { + for _, f := range mods { + f.Apply(ctx, n) + } +} + +// PoolTemplate is an object representing the database table. +// all columns are optional and should be set by mods +type PoolTemplate struct { + Condition func() enums.Poolconditiontype + Created func() time.Time + CreatorID func() int32 + ID func() int32 + SiteID func() null.Val[int32] + + r poolR + f *Factory + + alreadyPersisted bool +} + +type poolR struct { + CreatorUser *poolRCreatorUserR +} + +type poolRCreatorUserR struct { + o *UserTemplate +} + +// Apply mods to the PoolTemplate +func (o *PoolTemplate) Apply(ctx context.Context, mods ...PoolMod) { + for _, mod := range mods { + mod.Apply(ctx, o) + } +} + +// setModelRels creates and sets the relationships on *models.Pool +// according to the relationships in the template. Nothing is inserted into the db +func (t PoolTemplate) setModelRels(o *models.Pool) { + if t.r.CreatorUser != nil { + rel := t.r.CreatorUser.o.Build() + rel.R.CreatorPools = append(rel.R.CreatorPools, o) + o.CreatorID = rel.ID // h2 + o.R.CreatorUser = rel + } +} + +// BuildSetter returns an *models.PoolSetter +// this does nothing with the relationship templates +func (o PoolTemplate) BuildSetter() *models.PoolSetter { + m := &models.PoolSetter{} + + if o.Condition != nil { + val := o.Condition() + m.Condition = omit.From(val) + } + if o.Created != nil { + val := o.Created() + m.Created = omit.From(val) + } + if o.CreatorID != nil { + val := o.CreatorID() + m.CreatorID = omit.From(val) + } + if o.ID != nil { + val := o.ID() + m.ID = omit.From(val) + } + if o.SiteID != nil { + val := o.SiteID() + m.SiteID = omitnull.FromNull(val) + } + + return m +} + +// BuildManySetter returns an []*models.PoolSetter +// this does nothing with the relationship templates +func (o PoolTemplate) BuildManySetter(number int) []*models.PoolSetter { + m := make([]*models.PoolSetter, number) + + for i := range m { + m[i] = o.BuildSetter() + } + + return m +} + +// Build returns an *models.Pool +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use PoolTemplate.Create +func (o PoolTemplate) Build() *models.Pool { + m := &models.Pool{} + + if o.Condition != nil { + m.Condition = o.Condition() + } + if o.Created != nil { + m.Created = o.Created() + } + if o.CreatorID != nil { + m.CreatorID = o.CreatorID() + } + if o.ID != nil { + m.ID = o.ID() + } + if o.SiteID != nil { + m.SiteID = o.SiteID() + } + + o.setModelRels(m) + + return m +} + +// BuildMany returns an models.PoolSlice +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use PoolTemplate.CreateMany +func (o PoolTemplate) BuildMany(number int) models.PoolSlice { + m := make(models.PoolSlice, number) + + for i := range m { + m[i] = o.Build() + } + + return m +} + +func ensureCreatablePool(m *models.PoolSetter) { + if !(m.Condition.IsValue()) { + val := random_enums_Poolconditiontype(nil) + m.Condition = omit.From(val) + } + if !(m.Created.IsValue()) { + val := random_time_Time(nil) + m.Created = omit.From(val) + } + if !(m.CreatorID.IsValue()) { + val := random_int32(nil) + m.CreatorID = omit.From(val) + } +} + +// insertOptRels creates and inserts any optional the relationships on *models.Pool +// according to the relationships in the template. +// any required relationship should have already exist on the model +func (o *PoolTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.Pool) error { + var err error + + return err +} + +// Create builds a pool and inserts it into the database +// Relations objects are also inserted and placed in the .R field +func (o *PoolTemplate) Create(ctx context.Context, exec bob.Executor) (*models.Pool, error) { + var err error + opt := o.BuildSetter() + ensureCreatablePool(opt) + + if o.r.CreatorUser == nil { + PoolMods.WithNewCreatorUser().Apply(ctx, o) + } + + var rel0 *models.User + + if o.r.CreatorUser.o.alreadyPersisted { + rel0 = o.r.CreatorUser.o.Build() + } else { + rel0, err = o.r.CreatorUser.o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + opt.CreatorID = omit.From(rel0.ID) + + m, err := models.Pools.Insert(opt).One(ctx, exec) + if err != nil { + return nil, err + } + + m.R.CreatorUser = rel0 + + if err := o.insertOptRels(ctx, exec, m); err != nil { + return nil, err + } + return m, err +} + +// MustCreate builds a pool and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o *PoolTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.Pool { + m, err := o.Create(ctx, exec) + if err != nil { + panic(err) + } + return m +} + +// CreateOrFail builds a pool and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o *PoolTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.Pool { + tb.Helper() + m, err := o.Create(ctx, exec) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// CreateMany builds multiple pools and inserts them into the database +// Relations objects are also inserted and placed in the .R field +func (o PoolTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.PoolSlice, error) { + var err error + m := make(models.PoolSlice, number) + + for i := range m { + m[i], err = o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + return m, nil +} + +// MustCreateMany builds multiple pools and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o PoolTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.PoolSlice { + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + panic(err) + } + return m +} + +// CreateManyOrFail builds multiple pools and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o PoolTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.PoolSlice { + tb.Helper() + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// Pool has methods that act as mods for the PoolTemplate +var PoolMods poolMods + +type poolMods struct{} + +func (m poolMods) RandomizeAllColumns(f *faker.Faker) PoolMod { + return PoolModSlice{ + PoolMods.RandomCondition(f), + PoolMods.RandomCreated(f), + PoolMods.RandomCreatorID(f), + PoolMods.RandomID(f), + PoolMods.RandomSiteID(f), + } +} + +// Set the model columns to this value +func (m poolMods) Condition(val enums.Poolconditiontype) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.Condition = func() enums.Poolconditiontype { return val } + }) +} + +// Set the Column from the function +func (m poolMods) ConditionFunc(f func() enums.Poolconditiontype) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.Condition = f + }) +} + +// Clear any values for the column +func (m poolMods) UnsetCondition() PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.Condition = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m poolMods) RandomCondition(f *faker.Faker) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.Condition = func() enums.Poolconditiontype { + return random_enums_Poolconditiontype(f) + } + }) +} + +// Set the model columns to this value +func (m poolMods) Created(val time.Time) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.Created = func() time.Time { return val } + }) +} + +// Set the Column from the function +func (m poolMods) CreatedFunc(f func() time.Time) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.Created = f + }) +} + +// Clear any values for the column +func (m poolMods) UnsetCreated() PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.Created = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m poolMods) RandomCreated(f *faker.Faker) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.Created = func() time.Time { + return random_time_Time(f) + } + }) +} + +// Set the model columns to this value +func (m poolMods) CreatorID(val int32) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.CreatorID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m poolMods) CreatorIDFunc(f func() int32) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.CreatorID = f + }) +} + +// Clear any values for the column +func (m poolMods) UnsetCreatorID() PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.CreatorID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m poolMods) RandomCreatorID(f *faker.Faker) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.CreatorID = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m poolMods) ID(val int32) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.ID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m poolMods) IDFunc(f func() int32) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.ID = f + }) +} + +// Clear any values for the column +func (m poolMods) UnsetID() PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.ID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m poolMods) RandomID(f *faker.Faker) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.ID = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m poolMods) SiteID(val null.Val[int32]) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.SiteID = func() null.Val[int32] { return val } + }) +} + +// Set the Column from the function +func (m poolMods) SiteIDFunc(f func() null.Val[int32]) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.SiteID = f + }) +} + +// Clear any values for the column +func (m poolMods) UnsetSiteID() PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.SiteID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +// The generated value is sometimes null +func (m poolMods) RandomSiteID(f *faker.Faker) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.SiteID = func() null.Val[int32] { + if f == nil { + f = &defaultFaker + } + + val := random_int32(f) + return null.From(val) + } + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +// The generated value is never null +func (m poolMods) RandomSiteIDNotNull(f *faker.Faker) PoolMod { + return PoolModFunc(func(_ context.Context, o *PoolTemplate) { + o.SiteID = func() null.Val[int32] { + if f == nil { + f = &defaultFaker + } + + val := random_int32(f) + return null.From(val) + } + }) +} + +func (m poolMods) WithParentsCascading() PoolMod { + return PoolModFunc(func(ctx context.Context, o *PoolTemplate) { + if isDone, _ := poolWithParentsCascadingCtx.Value(ctx); isDone { + return + } + ctx = poolWithParentsCascadingCtx.WithValue(ctx, true) + { + + related := o.f.NewUserWithContext(ctx, UserMods.WithParentsCascading()) + m.WithCreatorUser(related).Apply(ctx, o) + } + }) +} + +func (m poolMods) WithCreatorUser(rel *UserTemplate) PoolMod { + return PoolModFunc(func(ctx context.Context, o *PoolTemplate) { + o.r.CreatorUser = &poolRCreatorUserR{ + o: rel, + } + }) +} + +func (m poolMods) WithNewCreatorUser(mods ...UserMod) PoolMod { + return PoolModFunc(func(ctx context.Context, o *PoolTemplate) { + related := o.f.NewUserWithContext(ctx, mods...) + + m.WithCreatorUser(related).Apply(ctx, o) + }) +} + +func (m poolMods) WithExistingCreatorUser(em *models.User) PoolMod { + return PoolModFunc(func(ctx context.Context, o *PoolTemplate) { + o.r.CreatorUser = &poolRCreatorUserR{ + o: o.f.FromExistingUser(em), + } + }) +} + +func (m poolMods) WithoutCreatorUser() PoolMod { + return PoolModFunc(func(ctx context.Context, o *PoolTemplate) { + o.r.CreatorUser = nil + }) +} diff --git a/db/factory/site.bob.go b/db/factory/site.bob.go new file mode 100644 index 00000000..c9733f9e --- /dev/null +++ b/db/factory/site.bob.go @@ -0,0 +1,1054 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package factory + +import ( + "context" + "testing" + "time" + + "github.com/Gleipnir-Technology/bob" + "github.com/Gleipnir-Technology/bob/types/pgtypes" + models "github.com/Gleipnir-Technology/nidus-sync/db/models" + "github.com/aarondl/opt/null" + "github.com/aarondl/opt/omit" + "github.com/aarondl/opt/omitnull" + "github.com/jaswdr/faker/v2" +) + +type SiteMod interface { + Apply(context.Context, *SiteTemplate) +} + +type SiteModFunc func(context.Context, *SiteTemplate) + +func (f SiteModFunc) Apply(ctx context.Context, n *SiteTemplate) { + f(ctx, n) +} + +type SiteModSlice []SiteMod + +func (mods SiteModSlice) Apply(ctx context.Context, n *SiteTemplate) { + for _, f := range mods { + f.Apply(ctx, n) + } +} + +// SiteTemplate is an object representing the database table. +// all columns are optional and should be set by mods +type SiteTemplate struct { + AddressID func() int32 + Created func() time.Time + CreatorID func() int32 + FileID func() null.Val[int32] + ID func() int32 + Notes func() string + OrganizationID func() int32 + OwnerName func() string + OwnerPhoneE164 func() null.Val[string] + ResidentOwned func() null.Val[bool] + ResidentPhoneE164 func() null.Val[string] + Tags func() pgtypes.HStore + Version func() int32 + + r siteR + f *Factory + + alreadyPersisted bool +} + +type siteR struct { + Address *siteRAddressR + CreatorUser *siteRCreatorUserR + File *siteRFileR +} + +type siteRAddressR struct { + o *AddressTemplate +} +type siteRCreatorUserR struct { + o *UserTemplate +} +type siteRFileR struct { + o *FileuploadFileTemplate +} + +// Apply mods to the SiteTemplate +func (o *SiteTemplate) Apply(ctx context.Context, mods ...SiteMod) { + for _, mod := range mods { + mod.Apply(ctx, o) + } +} + +// setModelRels creates and sets the relationships on *models.Site +// according to the relationships in the template. Nothing is inserted into the db +func (t SiteTemplate) setModelRels(o *models.Site) { + if t.r.Address != nil { + rel := t.r.Address.o.Build() + rel.R.Site = o + o.AddressID = rel.ID // h2 + o.R.Address = rel + } + + if t.r.CreatorUser != nil { + rel := t.r.CreatorUser.o.Build() + rel.R.CreatorSites = append(rel.R.CreatorSites, o) + o.CreatorID = rel.ID // h2 + o.R.CreatorUser = rel + } + + if t.r.File != nil { + rel := t.r.File.o.Build() + rel.R.Sites = append(rel.R.Sites, o) + o.FileID = null.From(rel.ID) // h2 + o.R.File = rel + } +} + +// BuildSetter returns an *models.SiteSetter +// this does nothing with the relationship templates +func (o SiteTemplate) BuildSetter() *models.SiteSetter { + m := &models.SiteSetter{} + + if o.AddressID != nil { + val := o.AddressID() + m.AddressID = omit.From(val) + } + if o.Created != nil { + val := o.Created() + m.Created = omit.From(val) + } + if o.CreatorID != nil { + val := o.CreatorID() + m.CreatorID = omit.From(val) + } + if o.FileID != nil { + val := o.FileID() + m.FileID = omitnull.FromNull(val) + } + if o.ID != nil { + val := o.ID() + m.ID = omit.From(val) + } + if o.Notes != nil { + val := o.Notes() + m.Notes = omit.From(val) + } + if o.OrganizationID != nil { + val := o.OrganizationID() + m.OrganizationID = omit.From(val) + } + if o.OwnerName != nil { + val := o.OwnerName() + m.OwnerName = omit.From(val) + } + if o.OwnerPhoneE164 != nil { + val := o.OwnerPhoneE164() + m.OwnerPhoneE164 = omitnull.FromNull(val) + } + if o.ResidentOwned != nil { + val := o.ResidentOwned() + m.ResidentOwned = omitnull.FromNull(val) + } + if o.ResidentPhoneE164 != nil { + val := o.ResidentPhoneE164() + m.ResidentPhoneE164 = omitnull.FromNull(val) + } + if o.Tags != nil { + val := o.Tags() + m.Tags = omit.From(val) + } + if o.Version != nil { + val := o.Version() + m.Version = omit.From(val) + } + + return m +} + +// BuildManySetter returns an []*models.SiteSetter +// this does nothing with the relationship templates +func (o SiteTemplate) BuildManySetter(number int) []*models.SiteSetter { + m := make([]*models.SiteSetter, number) + + for i := range m { + m[i] = o.BuildSetter() + } + + return m +} + +// Build returns an *models.Site +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use SiteTemplate.Create +func (o SiteTemplate) Build() *models.Site { + m := &models.Site{} + + if o.AddressID != nil { + m.AddressID = o.AddressID() + } + if o.Created != nil { + m.Created = o.Created() + } + if o.CreatorID != nil { + m.CreatorID = o.CreatorID() + } + if o.FileID != nil { + m.FileID = o.FileID() + } + if o.ID != nil { + m.ID = o.ID() + } + if o.Notes != nil { + m.Notes = o.Notes() + } + if o.OrganizationID != nil { + m.OrganizationID = o.OrganizationID() + } + if o.OwnerName != nil { + m.OwnerName = o.OwnerName() + } + if o.OwnerPhoneE164 != nil { + m.OwnerPhoneE164 = o.OwnerPhoneE164() + } + if o.ResidentOwned != nil { + m.ResidentOwned = o.ResidentOwned() + } + if o.ResidentPhoneE164 != nil { + m.ResidentPhoneE164 = o.ResidentPhoneE164() + } + if o.Tags != nil { + m.Tags = o.Tags() + } + if o.Version != nil { + m.Version = o.Version() + } + + o.setModelRels(m) + + return m +} + +// BuildMany returns an models.SiteSlice +// Related objects are also created and placed in the .R field +// NOTE: Objects are not inserted into the database. Use SiteTemplate.CreateMany +func (o SiteTemplate) BuildMany(number int) models.SiteSlice { + m := make(models.SiteSlice, number) + + for i := range m { + m[i] = o.Build() + } + + return m +} + +func ensureCreatableSite(m *models.SiteSetter) { + if !(m.AddressID.IsValue()) { + val := random_int32(nil) + m.AddressID = omit.From(val) + } + if !(m.Created.IsValue()) { + val := random_time_Time(nil) + m.Created = omit.From(val) + } + if !(m.CreatorID.IsValue()) { + val := random_int32(nil) + m.CreatorID = omit.From(val) + } + if !(m.Notes.IsValue()) { + val := random_string(nil) + m.Notes = omit.From(val) + } + if !(m.OrganizationID.IsValue()) { + val := random_int32(nil) + m.OrganizationID = omit.From(val) + } + if !(m.OwnerName.IsValue()) { + val := random_string(nil) + m.OwnerName = omit.From(val) + } + if !(m.Tags.IsValue()) { + val := random_pgtypes_HStore(nil) + m.Tags = omit.From(val) + } + if !(m.Version.IsValue()) { + val := random_int32(nil) + m.Version = omit.From(val) + } +} + +// insertOptRels creates and inserts any optional the relationships on *models.Site +// according to the relationships in the template. +// any required relationship should have already exist on the model +func (o *SiteTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m *models.Site) error { + var err error + + isFileDone, _ := siteRelFileCtx.Value(ctx) + if !isFileDone && o.r.File != nil { + ctx = siteRelFileCtx.WithValue(ctx, true) + if o.r.File.o.alreadyPersisted { + m.R.File = o.r.File.o.Build() + } else { + var rel2 *models.FileuploadFile + rel2, err = o.r.File.o.Create(ctx, exec) + if err != nil { + return err + } + err = m.AttachFile(ctx, exec, rel2) + if err != nil { + return err + } + } + + } + + return err +} + +// Create builds a site and inserts it into the database +// Relations objects are also inserted and placed in the .R field +func (o *SiteTemplate) Create(ctx context.Context, exec bob.Executor) (*models.Site, error) { + var err error + opt := o.BuildSetter() + ensureCreatableSite(opt) + + if o.r.Address == nil { + SiteMods.WithNewAddress().Apply(ctx, o) + } + + var rel0 *models.Address + + if o.r.Address.o.alreadyPersisted { + rel0 = o.r.Address.o.Build() + } else { + rel0, err = o.r.Address.o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + opt.AddressID = omit.From(rel0.ID) + + if o.r.CreatorUser == nil { + SiteMods.WithNewCreatorUser().Apply(ctx, o) + } + + var rel1 *models.User + + if o.r.CreatorUser.o.alreadyPersisted { + rel1 = o.r.CreatorUser.o.Build() + } else { + rel1, err = o.r.CreatorUser.o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + opt.CreatorID = omit.From(rel1.ID) + + m, err := models.Sites.Insert(opt).One(ctx, exec) + if err != nil { + return nil, err + } + + m.R.Address = rel0 + m.R.CreatorUser = rel1 + + if err := o.insertOptRels(ctx, exec, m); err != nil { + return nil, err + } + return m, err +} + +// MustCreate builds a site and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o *SiteTemplate) MustCreate(ctx context.Context, exec bob.Executor) *models.Site { + m, err := o.Create(ctx, exec) + if err != nil { + panic(err) + } + return m +} + +// CreateOrFail builds a site and inserts it into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o *SiteTemplate) CreateOrFail(ctx context.Context, tb testing.TB, exec bob.Executor) *models.Site { + tb.Helper() + m, err := o.Create(ctx, exec) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// CreateMany builds multiple sites and inserts them into the database +// Relations objects are also inserted and placed in the .R field +func (o SiteTemplate) CreateMany(ctx context.Context, exec bob.Executor, number int) (models.SiteSlice, error) { + var err error + m := make(models.SiteSlice, number) + + for i := range m { + m[i], err = o.Create(ctx, exec) + if err != nil { + return nil, err + } + } + + return m, nil +} + +// MustCreateMany builds multiple sites and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// panics if an error occurs +func (o SiteTemplate) MustCreateMany(ctx context.Context, exec bob.Executor, number int) models.SiteSlice { + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + panic(err) + } + return m +} + +// CreateManyOrFail builds multiple sites and inserts them into the database +// Relations objects are also inserted and placed in the .R field +// It calls `tb.Fatal(err)` on the test/benchmark if an error occurs +func (o SiteTemplate) CreateManyOrFail(ctx context.Context, tb testing.TB, exec bob.Executor, number int) models.SiteSlice { + tb.Helper() + m, err := o.CreateMany(ctx, exec, number) + if err != nil { + tb.Fatal(err) + return nil + } + return m +} + +// Site has methods that act as mods for the SiteTemplate +var SiteMods siteMods + +type siteMods struct{} + +func (m siteMods) RandomizeAllColumns(f *faker.Faker) SiteMod { + return SiteModSlice{ + SiteMods.RandomAddressID(f), + SiteMods.RandomCreated(f), + SiteMods.RandomCreatorID(f), + SiteMods.RandomFileID(f), + SiteMods.RandomID(f), + SiteMods.RandomNotes(f), + SiteMods.RandomOrganizationID(f), + SiteMods.RandomOwnerName(f), + SiteMods.RandomOwnerPhoneE164(f), + SiteMods.RandomResidentOwned(f), + SiteMods.RandomResidentPhoneE164(f), + SiteMods.RandomTags(f), + SiteMods.RandomVersion(f), + } +} + +// Set the model columns to this value +func (m siteMods) AddressID(val int32) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.AddressID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m siteMods) AddressIDFunc(f func() int32) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.AddressID = f + }) +} + +// Clear any values for the column +func (m siteMods) UnsetAddressID() SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.AddressID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m siteMods) RandomAddressID(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.AddressID = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m siteMods) Created(val time.Time) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Created = func() time.Time { return val } + }) +} + +// Set the Column from the function +func (m siteMods) CreatedFunc(f func() time.Time) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Created = f + }) +} + +// Clear any values for the column +func (m siteMods) UnsetCreated() SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Created = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m siteMods) RandomCreated(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Created = func() time.Time { + return random_time_Time(f) + } + }) +} + +// Set the model columns to this value +func (m siteMods) CreatorID(val int32) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.CreatorID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m siteMods) CreatorIDFunc(f func() int32) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.CreatorID = f + }) +} + +// Clear any values for the column +func (m siteMods) UnsetCreatorID() SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.CreatorID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m siteMods) RandomCreatorID(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.CreatorID = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m siteMods) FileID(val null.Val[int32]) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.FileID = func() null.Val[int32] { return val } + }) +} + +// Set the Column from the function +func (m siteMods) FileIDFunc(f func() null.Val[int32]) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.FileID = f + }) +} + +// Clear any values for the column +func (m siteMods) UnsetFileID() SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.FileID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +// The generated value is sometimes null +func (m siteMods) RandomFileID(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.FileID = func() null.Val[int32] { + if f == nil { + f = &defaultFaker + } + + val := random_int32(f) + return null.From(val) + } + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +// The generated value is never null +func (m siteMods) RandomFileIDNotNull(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.FileID = func() null.Val[int32] { + if f == nil { + f = &defaultFaker + } + + val := random_int32(f) + return null.From(val) + } + }) +} + +// Set the model columns to this value +func (m siteMods) ID(val int32) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m siteMods) IDFunc(f func() int32) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ID = f + }) +} + +// Clear any values for the column +func (m siteMods) UnsetID() SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m siteMods) RandomID(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ID = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m siteMods) Notes(val string) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Notes = func() string { return val } + }) +} + +// Set the Column from the function +func (m siteMods) NotesFunc(f func() string) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Notes = f + }) +} + +// Clear any values for the column +func (m siteMods) UnsetNotes() SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Notes = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m siteMods) RandomNotes(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Notes = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m siteMods) OrganizationID(val int32) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.OrganizationID = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m siteMods) OrganizationIDFunc(f func() int32) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.OrganizationID = f + }) +} + +// Clear any values for the column +func (m siteMods) UnsetOrganizationID() SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.OrganizationID = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m siteMods) RandomOrganizationID(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.OrganizationID = func() int32 { + return random_int32(f) + } + }) +} + +// Set the model columns to this value +func (m siteMods) OwnerName(val string) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.OwnerName = func() string { return val } + }) +} + +// Set the Column from the function +func (m siteMods) OwnerNameFunc(f func() string) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.OwnerName = f + }) +} + +// Clear any values for the column +func (m siteMods) UnsetOwnerName() SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.OwnerName = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m siteMods) RandomOwnerName(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.OwnerName = func() string { + return random_string(f) + } + }) +} + +// Set the model columns to this value +func (m siteMods) OwnerPhoneE164(val null.Val[string]) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.OwnerPhoneE164 = func() null.Val[string] { return val } + }) +} + +// Set the Column from the function +func (m siteMods) OwnerPhoneE164Func(f func() null.Val[string]) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.OwnerPhoneE164 = f + }) +} + +// Clear any values for the column +func (m siteMods) UnsetOwnerPhoneE164() SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.OwnerPhoneE164 = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +// The generated value is sometimes null +func (m siteMods) RandomOwnerPhoneE164(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.OwnerPhoneE164 = func() null.Val[string] { + if f == nil { + f = &defaultFaker + } + + val := random_string(f) + return null.From(val) + } + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +// The generated value is never null +func (m siteMods) RandomOwnerPhoneE164NotNull(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.OwnerPhoneE164 = func() null.Val[string] { + if f == nil { + f = &defaultFaker + } + + val := random_string(f) + return null.From(val) + } + }) +} + +// Set the model columns to this value +func (m siteMods) ResidentOwned(val null.Val[bool]) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ResidentOwned = func() null.Val[bool] { return val } + }) +} + +// Set the Column from the function +func (m siteMods) ResidentOwnedFunc(f func() null.Val[bool]) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ResidentOwned = f + }) +} + +// Clear any values for the column +func (m siteMods) UnsetResidentOwned() SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ResidentOwned = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +// The generated value is sometimes null +func (m siteMods) RandomResidentOwned(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ResidentOwned = func() null.Val[bool] { + if f == nil { + f = &defaultFaker + } + + val := random_bool(f) + return null.From(val) + } + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +// The generated value is never null +func (m siteMods) RandomResidentOwnedNotNull(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ResidentOwned = func() null.Val[bool] { + if f == nil { + f = &defaultFaker + } + + val := random_bool(f) + return null.From(val) + } + }) +} + +// Set the model columns to this value +func (m siteMods) ResidentPhoneE164(val null.Val[string]) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ResidentPhoneE164 = func() null.Val[string] { return val } + }) +} + +// Set the Column from the function +func (m siteMods) ResidentPhoneE164Func(f func() null.Val[string]) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ResidentPhoneE164 = f + }) +} + +// Clear any values for the column +func (m siteMods) UnsetResidentPhoneE164() SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ResidentPhoneE164 = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +// The generated value is sometimes null +func (m siteMods) RandomResidentPhoneE164(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ResidentPhoneE164 = func() null.Val[string] { + if f == nil { + f = &defaultFaker + } + + val := random_string(f) + return null.From(val) + } + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +// The generated value is never null +func (m siteMods) RandomResidentPhoneE164NotNull(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.ResidentPhoneE164 = func() null.Val[string] { + if f == nil { + f = &defaultFaker + } + + val := random_string(f) + return null.From(val) + } + }) +} + +// Set the model columns to this value +func (m siteMods) Tags(val pgtypes.HStore) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Tags = func() pgtypes.HStore { return val } + }) +} + +// Set the Column from the function +func (m siteMods) TagsFunc(f func() pgtypes.HStore) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Tags = f + }) +} + +// Clear any values for the column +func (m siteMods) UnsetTags() SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Tags = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m siteMods) RandomTags(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Tags = func() pgtypes.HStore { + return random_pgtypes_HStore(f) + } + }) +} + +// Set the model columns to this value +func (m siteMods) Version(val int32) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Version = func() int32 { return val } + }) +} + +// Set the Column from the function +func (m siteMods) VersionFunc(f func() int32) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Version = f + }) +} + +// Clear any values for the column +func (m siteMods) UnsetVersion() SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Version = nil + }) +} + +// Generates a random value for the column using the given faker +// if faker is nil, a default faker is used +func (m siteMods) RandomVersion(f *faker.Faker) SiteMod { + return SiteModFunc(func(_ context.Context, o *SiteTemplate) { + o.Version = func() int32 { + return random_int32(f) + } + }) +} + +func (m siteMods) WithParentsCascading() SiteMod { + return SiteModFunc(func(ctx context.Context, o *SiteTemplate) { + if isDone, _ := siteWithParentsCascadingCtx.Value(ctx); isDone { + return + } + ctx = siteWithParentsCascadingCtx.WithValue(ctx, true) + { + + related := o.f.NewAddressWithContext(ctx, AddressMods.WithParentsCascading()) + m.WithAddress(related).Apply(ctx, o) + } + { + + related := o.f.NewUserWithContext(ctx, UserMods.WithParentsCascading()) + m.WithCreatorUser(related).Apply(ctx, o) + } + { + + related := o.f.NewFileuploadFileWithContext(ctx, FileuploadFileMods.WithParentsCascading()) + m.WithFile(related).Apply(ctx, o) + } + }) +} + +func (m siteMods) WithAddress(rel *AddressTemplate) SiteMod { + return SiteModFunc(func(ctx context.Context, o *SiteTemplate) { + o.r.Address = &siteRAddressR{ + o: rel, + } + }) +} + +func (m siteMods) WithNewAddress(mods ...AddressMod) SiteMod { + return SiteModFunc(func(ctx context.Context, o *SiteTemplate) { + related := o.f.NewAddressWithContext(ctx, mods...) + + m.WithAddress(related).Apply(ctx, o) + }) +} + +func (m siteMods) WithExistingAddress(em *models.Address) SiteMod { + return SiteModFunc(func(ctx context.Context, o *SiteTemplate) { + o.r.Address = &siteRAddressR{ + o: o.f.FromExistingAddress(em), + } + }) +} + +func (m siteMods) WithoutAddress() SiteMod { + return SiteModFunc(func(ctx context.Context, o *SiteTemplate) { + o.r.Address = nil + }) +} + +func (m siteMods) WithCreatorUser(rel *UserTemplate) SiteMod { + return SiteModFunc(func(ctx context.Context, o *SiteTemplate) { + o.r.CreatorUser = &siteRCreatorUserR{ + o: rel, + } + }) +} + +func (m siteMods) WithNewCreatorUser(mods ...UserMod) SiteMod { + return SiteModFunc(func(ctx context.Context, o *SiteTemplate) { + related := o.f.NewUserWithContext(ctx, mods...) + + m.WithCreatorUser(related).Apply(ctx, o) + }) +} + +func (m siteMods) WithExistingCreatorUser(em *models.User) SiteMod { + return SiteModFunc(func(ctx context.Context, o *SiteTemplate) { + o.r.CreatorUser = &siteRCreatorUserR{ + o: o.f.FromExistingUser(em), + } + }) +} + +func (m siteMods) WithoutCreatorUser() SiteMod { + return SiteModFunc(func(ctx context.Context, o *SiteTemplate) { + o.r.CreatorUser = nil + }) +} + +func (m siteMods) WithFile(rel *FileuploadFileTemplate) SiteMod { + return SiteModFunc(func(ctx context.Context, o *SiteTemplate) { + o.r.File = &siteRFileR{ + o: rel, + } + }) +} + +func (m siteMods) WithNewFile(mods ...FileuploadFileMod) SiteMod { + return SiteModFunc(func(ctx context.Context, o *SiteTemplate) { + related := o.f.NewFileuploadFileWithContext(ctx, mods...) + + m.WithFile(related).Apply(ctx, o) + }) +} + +func (m siteMods) WithExistingFile(em *models.FileuploadFile) SiteMod { + return SiteModFunc(func(ctx context.Context, o *SiteTemplate) { + o.r.File = &siteRFileR{ + o: o.f.FromExistingFileuploadFile(em), + } + }) +} + +func (m siteMods) WithoutFile() SiteMod { + return SiteModFunc(func(ctx context.Context, o *SiteTemplate) { + o.r.File = nil + }) +} diff --git a/db/factory/user_.bob.go b/db/factory/user_.bob.go index 38937f1e..87bcc9cf 100644 --- a/db/factory/user_.bob.go +++ b/db/factory/user_.bob.go @@ -61,13 +61,15 @@ type UserTemplate struct { type userR struct { PublicUserUser []*userRPublicUserUserR CreatorFiles []*userRCreatorFilesR - CreatorPools []*userRCreatorPoolsR + FileuploadPool []*userRFileuploadPoolR CreatorNoteAudios []*userRCreatorNoteAudiosR DeletorNoteAudios []*userRDeletorNoteAudiosR CreatorNoteImages []*userRCreatorNoteImagesR DeletorNoteImages []*userRDeletorNoteImagesR UserNotifications []*userRUserNotificationsR UserOauthTokens []*userRUserOauthTokensR + CreatorPools []*userRCreatorPoolsR + CreatorSites []*userRCreatorSitesR Organization *userROrganizationR } @@ -79,7 +81,7 @@ type userRCreatorFilesR struct { number int o *FileuploadFileTemplate } -type userRCreatorPoolsR struct { +type userRFileuploadPoolR struct { number int o *FileuploadPoolTemplate } @@ -107,6 +109,14 @@ type userRUserOauthTokensR struct { number int o *OauthTokenTemplate } +type userRCreatorPoolsR struct { + number int + o *PoolTemplate +} +type userRCreatorSitesR struct { + number int + o *SiteTemplate +} type userROrganizationR struct { o *OrganizationTemplate } @@ -147,9 +157,9 @@ func (t UserTemplate) setModelRels(o *models.User) { o.R.CreatorFiles = rel } - if t.r.CreatorPools != nil { + if t.r.FileuploadPool != nil { rel := models.FileuploadPoolSlice{} - for _, r := range t.r.CreatorPools { + for _, r := range t.r.FileuploadPool { related := r.o.BuildMany(r.number) for _, rel := range related { rel.CreatorID = o.ID // h2 @@ -157,7 +167,7 @@ func (t UserTemplate) setModelRels(o *models.User) { } rel = append(rel, related...) } - o.R.CreatorPools = rel + o.R.FileuploadPool = rel } if t.r.CreatorNoteAudios != nil { @@ -238,6 +248,32 @@ func (t UserTemplate) setModelRels(o *models.User) { o.R.UserOauthTokens = rel } + if t.r.CreatorPools != nil { + rel := models.PoolSlice{} + for _, r := range t.r.CreatorPools { + related := r.o.BuildMany(r.number) + for _, rel := range related { + rel.CreatorID = o.ID // h2 + rel.R.CreatorUser = o + } + rel = append(rel, related...) + } + o.R.CreatorPools = rel + } + + if t.r.CreatorSites != nil { + rel := models.SiteSlice{} + for _, r := range t.r.CreatorSites { + related := r.o.BuildMany(r.number) + for _, rel := range related { + rel.CreatorID = o.ID // h2 + rel.R.CreatorUser = o + } + rel = append(rel, related...) + } + o.R.CreatorSites = rel + } + if t.r.Organization != nil { rel := t.r.Organization.o.Build() rel.R.User = append(rel.R.User, o) @@ -456,19 +492,19 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m * } } - isCreatorPoolsDone, _ := userRelCreatorPoolsCtx.Value(ctx) - if !isCreatorPoolsDone && o.r.CreatorPools != nil { - ctx = userRelCreatorPoolsCtx.WithValue(ctx, true) - for _, r := range o.r.CreatorPools { + isFileuploadPoolDone, _ := userRelFileuploadPoolCtx.Value(ctx) + if !isFileuploadPoolDone && o.r.FileuploadPool != nil { + ctx = userRelFileuploadPoolCtx.WithValue(ctx, true) + for _, r := range o.r.FileuploadPool { if r.o.alreadyPersisted { - m.R.CreatorPools = append(m.R.CreatorPools, r.o.Build()) + m.R.FileuploadPool = append(m.R.FileuploadPool, r.o.Build()) } else { rel2, err := r.o.CreateMany(ctx, exec, r.number) if err != nil { return err } - err = m.AttachCreatorPools(ctx, exec, rel2...) + err = m.AttachFileuploadPool(ctx, exec, rel2...) if err != nil { return err } @@ -596,6 +632,46 @@ func (o *UserTemplate) insertOptRels(ctx context.Context, exec bob.Executor, m * } } + isCreatorPoolsDone, _ := userRelCreatorPoolsCtx.Value(ctx) + if !isCreatorPoolsDone && o.r.CreatorPools != nil { + ctx = userRelCreatorPoolsCtx.WithValue(ctx, true) + for _, r := range o.r.CreatorPools { + if r.o.alreadyPersisted { + m.R.CreatorPools = append(m.R.CreatorPools, r.o.Build()) + } else { + rel9, err := r.o.CreateMany(ctx, exec, r.number) + if err != nil { + return err + } + + err = m.AttachCreatorPools(ctx, exec, rel9...) + if err != nil { + return err + } + } + } + } + + isCreatorSitesDone, _ := userRelCreatorSitesCtx.Value(ctx) + if !isCreatorSitesDone && o.r.CreatorSites != nil { + ctx = userRelCreatorSitesCtx.WithValue(ctx, true) + for _, r := range o.r.CreatorSites { + if r.o.alreadyPersisted { + m.R.CreatorSites = append(m.R.CreatorSites, r.o.Build()) + } else { + rel10, err := r.o.CreateMany(ctx, exec, r.number) + if err != nil { + return err + } + + err = m.AttachCreatorSites(ctx, exec, rel10...) + if err != nil { + return err + } + } + } + } + return err } @@ -610,25 +686,25 @@ func (o *UserTemplate) Create(ctx context.Context, exec bob.Executor) (*models.U UserMods.WithNewOrganization().Apply(ctx, o) } - var rel9 *models.Organization + var rel11 *models.Organization if o.r.Organization.o.alreadyPersisted { - rel9 = o.r.Organization.o.Build() + rel11 = o.r.Organization.o.Build() } else { - rel9, err = o.r.Organization.o.Create(ctx, exec) + rel11, err = o.r.Organization.o.Create(ctx, exec) if err != nil { return nil, err } } - opt.OrganizationID = omit.From(rel9.ID) + opt.OrganizationID = omit.From(rel11.ID) m, err := models.Users.Insert(opt).One(ctx, exec) if err != nil { return nil, err } - m.R.Organization = rel9 + m.R.Organization = rel11 if err := o.insertOptRels(ctx, exec, m); err != nil { return nil, err @@ -1398,51 +1474,51 @@ func (m userMods) WithoutCreatorFiles() UserMod { }) } -func (m userMods) WithCreatorPools(number int, related *FileuploadPoolTemplate) UserMod { +func (m userMods) WithFileuploadPool(number int, related *FileuploadPoolTemplate) UserMod { return UserModFunc(func(ctx context.Context, o *UserTemplate) { - o.r.CreatorPools = []*userRCreatorPoolsR{{ + o.r.FileuploadPool = []*userRFileuploadPoolR{{ number: number, o: related, }} }) } -func (m userMods) WithNewCreatorPools(number int, mods ...FileuploadPoolMod) UserMod { +func (m userMods) WithNewFileuploadPool(number int, mods ...FileuploadPoolMod) UserMod { return UserModFunc(func(ctx context.Context, o *UserTemplate) { related := o.f.NewFileuploadPoolWithContext(ctx, mods...) - m.WithCreatorPools(number, related).Apply(ctx, o) + m.WithFileuploadPool(number, related).Apply(ctx, o) }) } -func (m userMods) AddCreatorPools(number int, related *FileuploadPoolTemplate) UserMod { +func (m userMods) AddFileuploadPool(number int, related *FileuploadPoolTemplate) UserMod { return UserModFunc(func(ctx context.Context, o *UserTemplate) { - o.r.CreatorPools = append(o.r.CreatorPools, &userRCreatorPoolsR{ + o.r.FileuploadPool = append(o.r.FileuploadPool, &userRFileuploadPoolR{ number: number, o: related, }) }) } -func (m userMods) AddNewCreatorPools(number int, mods ...FileuploadPoolMod) UserMod { +func (m userMods) AddNewFileuploadPool(number int, mods ...FileuploadPoolMod) UserMod { return UserModFunc(func(ctx context.Context, o *UserTemplate) { related := o.f.NewFileuploadPoolWithContext(ctx, mods...) - m.AddCreatorPools(number, related).Apply(ctx, o) + m.AddFileuploadPool(number, related).Apply(ctx, o) }) } -func (m userMods) AddExistingCreatorPools(existingModels ...*models.FileuploadPool) UserMod { +func (m userMods) AddExistingFileuploadPool(existingModels ...*models.FileuploadPool) UserMod { return UserModFunc(func(ctx context.Context, o *UserTemplate) { for _, em := range existingModels { - o.r.CreatorPools = append(o.r.CreatorPools, &userRCreatorPoolsR{ + o.r.FileuploadPool = append(o.r.FileuploadPool, &userRFileuploadPoolR{ o: o.f.FromExistingFileuploadPool(em), }) } }) } -func (m userMods) WithoutCreatorPools() UserMod { +func (m userMods) WithoutFileuploadPool() UserMod { return UserModFunc(func(ctx context.Context, o *UserTemplate) { - o.r.CreatorPools = nil + o.r.FileuploadPool = nil }) } @@ -1733,3 +1809,99 @@ func (m userMods) WithoutUserOauthTokens() UserMod { o.r.UserOauthTokens = nil }) } + +func (m userMods) WithCreatorPools(number int, related *PoolTemplate) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + o.r.CreatorPools = []*userRCreatorPoolsR{{ + number: number, + o: related, + }} + }) +} + +func (m userMods) WithNewCreatorPools(number int, mods ...PoolMod) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + related := o.f.NewPoolWithContext(ctx, mods...) + m.WithCreatorPools(number, related).Apply(ctx, o) + }) +} + +func (m userMods) AddCreatorPools(number int, related *PoolTemplate) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + o.r.CreatorPools = append(o.r.CreatorPools, &userRCreatorPoolsR{ + number: number, + o: related, + }) + }) +} + +func (m userMods) AddNewCreatorPools(number int, mods ...PoolMod) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + related := o.f.NewPoolWithContext(ctx, mods...) + m.AddCreatorPools(number, related).Apply(ctx, o) + }) +} + +func (m userMods) AddExistingCreatorPools(existingModels ...*models.Pool) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + for _, em := range existingModels { + o.r.CreatorPools = append(o.r.CreatorPools, &userRCreatorPoolsR{ + o: o.f.FromExistingPool(em), + }) + } + }) +} + +func (m userMods) WithoutCreatorPools() UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + o.r.CreatorPools = nil + }) +} + +func (m userMods) WithCreatorSites(number int, related *SiteTemplate) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + o.r.CreatorSites = []*userRCreatorSitesR{{ + number: number, + o: related, + }} + }) +} + +func (m userMods) WithNewCreatorSites(number int, mods ...SiteMod) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + related := o.f.NewSiteWithContext(ctx, mods...) + m.WithCreatorSites(number, related).Apply(ctx, o) + }) +} + +func (m userMods) AddCreatorSites(number int, related *SiteTemplate) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + o.r.CreatorSites = append(o.r.CreatorSites, &userRCreatorSitesR{ + number: number, + o: related, + }) + }) +} + +func (m userMods) AddNewCreatorSites(number int, mods ...SiteMod) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + related := o.f.NewSiteWithContext(ctx, mods...) + m.AddCreatorSites(number, related).Apply(ctx, o) + }) +} + +func (m userMods) AddExistingCreatorSites(existingModels ...*models.Site) UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + for _, em := range existingModels { + o.r.CreatorSites = append(o.r.CreatorSites, &userRCreatorSitesR{ + o: o.f.FromExistingSite(em), + }) + } + }) +} + +func (m userMods) WithoutCreatorSites() UserMod { + return UserModFunc(func(ctx context.Context, o *UserTemplate) { + o.r.CreatorSites = nil + }) +} diff --git a/db/migrations/00068_site.sql b/db/migrations/00068_site.sql new file mode 100644 index 00000000..0a1c1574 --- /dev/null +++ b/db/migrations/00068_site.sql @@ -0,0 +1,52 @@ +-- +goose Up +CREATE TYPE CountryType AS ENUM ( + 'usa' +); +CREATE TABLE address ( + country CountryType NOT NULL, + created TIMESTAMP WITHOUT TIME ZONE NOT NULL, + geom geometry(Point,4326) NOT NULL, + h3cell h3index NOT NULL, + id SERIAL NOT NULL, + locality TEXT NOT NULL, + number_ INTEGER NOT NULL, + postal_code TEXT NOT NULL, + street TEXT NOT NULL, + unit TEXT NOT NULL, + PRIMARY KEY(id), + UNIQUE(country, locality, number_, street) +); + +CREATE TABLE site ( + address_id INTEGER REFERENCES address(id) NOT NULL, + created TIMESTAMP WITHOUT TIME ZONE NOT NULL, + creator_id INTEGER REFERENCES user_(id) NOT NULL, + file_id INTEGER REFERENCES fileupload.file(id), + id SERIAL NOT NULL, + notes text NOT NULL, + organization_id INTEGER NOT NULL, + owner_name TEXT NOT NULL, + owner_phone_e164 TEXT, + resident_owned BOOLEAN, + resident_phone_e164 TEXT, + tags HSTORE NOT NULL, + version INTEGER NOT NULL, + PRIMARY KEY(id, version), + UNIQUE(address_id) +); + +CREATE TYPE PoolConditionType AS ENUM ( + 'blue', + 'dry', + 'false pool', + 'green', + 'murky' +); +CREATE TABLE pool ( + condition PoolConditionType NOT NULL, + created TIMESTAMP WITHOUT TIME ZONE NOT NULL, + creator_id INTEGER REFERENCES user_(id) NOT NULL, + id SERIAL NOT NULL, + site_id INTEGER, + PRIMARY KEY(id) +); diff --git a/db/migrations/00069_arcgis_layer.sql b/db/migrations/00069_arcgis_layer.sql new file mode 100644 index 00000000..e223dc3e --- /dev/null +++ b/db/migrations/00069_arcgis_layer.sql @@ -0,0 +1,75 @@ +-- +goose Up +CREATE TABLE arcgis.feature_service ( + extent box2d NOT NULL, + item_id TEXT NOT NULL, + spatial_reference INTEGER NOT NULL, + url TEXT NOT NULL, + PRIMARY KEY(item_id) +); +CREATE TABLE arcgis.layer ( + extent box2d NOT NULL, + feature_service_item_id TEXT NOT NULL REFERENCES arcgis.feature_service(item_id), + index_ INTEGER NOT NULL, + PRIMARY KEY(feature_service_item_id, index_) +); +CREATE TYPE arcgis.FieldType AS ENUM ( + 'esriFieldTypeSmallInteger', -- 16-bit Integer. + 'esriFieldTypeInteger', -- 32-bit Integer. + 'esriFieldTypeSingle', -- Single-precision floating-point number. + 'esriFieldTypeDouble', -- Double-precision floating-point number. + 'esriFieldTypeString', -- Character string. + 'esriFieldTypeDate', -- Date. + 'esriFieldTypeOID', -- Integer representing an object identifier. 32-bit OID has a length of 4 bytes, and 64-bit OID has a length of 8 bytes. + 'esriFieldTypeGeometry', -- Geometry. + 'esriFieldTypeBlob', -- Binary Large Object. + 'esriFieldTypeRaster', -- Raster. + 'esriFieldTypeGUID', -- Globally Unique Identifier. + 'esriFieldTypeGlobalID', -- Esri Global ID. + 'esriFieldTypeXML', -- XML Document. + 'esriFieldTypeBigInteger' -- 64-bit Integer. +); +CREATE TABLE arcgis.layer_field ( + layer_feature_service_item_id TEXT NOT NULL, + layer_index INTEGER NOT NULL, + name TEXT NOT NULL, + type_ arcgis.FieldType NOT NULL, + FOREIGN KEY(layer_feature_service_item_id, layer_index) REFERENCES arcgis.layer(feature_service_item_id, index_), + PRIMARY KEY(layer_feature_service_item_id, layer_index, name) +); +CREATE TYPE arcgis.MappingDestinationParcel AS ENUM ( + 'apn', + 'description' +); +CREATE TABLE arcgis.parcel_mapping ( + destination arcgis.MappingDestinationParcel NOT NULL, + layer_feature_service_item_id TEXT NOT NULL, + layer_index INTEGER NOT NULL, + layer_field_name TEXT NOT NULL, + organization_id INTEGER NOT NULL REFERENCES organization(id), + FOREIGN KEY(layer_feature_service_item_id, layer_index, layer_field_name) REFERENCES arcgis.layer_field(layer_feature_service_item_id, layer_index, name), + PRIMARY KEY(organization_id, destination) +); + +CREATE TYPE arcgis.MappingDestinationAddress AS ENUM ( + 'country', + 'locality', + 'postal_code', + 'street', + 'unit' +); +CREATE TABLE arcgis.address_mapping ( + destination arcgis.MappingDestinationAddress NOT NULL, + layer_feature_service_item_id TEXT NOT NULL, + layer_index INTEGER NOT NULL, + layer_field_name TEXT NOT NULL, + organization_id INTEGER NOT NULL REFERENCES organization(id), + FOREIGN KEY(layer_feature_service_item_id, layer_index, layer_field_name) REFERENCES arcgis.layer_field(layer_feature_service_item_id, layer_index, name), + PRIMARY KEY(organization_id, destination) +); +CREATE TABLE parcel ( + apn TEXT NOT NULL, + description TEXT NOT NULL, + id SERIAL NOT NULL, + geometry geometry(Polygon, 4326) NOT NULL, + PRIMARY KEY(id) +); diff --git a/db/models/address.bob.go b/db/models/address.bob.go new file mode 100644 index 00000000..c4413e7c --- /dev/null +++ b/db/models/address.bob.go @@ -0,0 +1,810 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "fmt" + "io" + "time" + + "github.com/Gleipnir-Technology/bob" + "github.com/Gleipnir-Technology/bob/dialect/psql" + "github.com/Gleipnir-Technology/bob/dialect/psql/dialect" + "github.com/Gleipnir-Technology/bob/dialect/psql/dm" + "github.com/Gleipnir-Technology/bob/dialect/psql/sm" + "github.com/Gleipnir-Technology/bob/dialect/psql/um" + "github.com/Gleipnir-Technology/bob/expr" + "github.com/Gleipnir-Technology/bob/mods" + "github.com/Gleipnir-Technology/bob/orm" + "github.com/Gleipnir-Technology/bob/types/pgtypes" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + "github.com/aarondl/opt/omit" +) + +// Address is an object representing the database table. +type Address struct { + Country enums.Countrytype `db:"country" ` + Created time.Time `db:"created" ` + Geom string `db:"geom" ` + H3cell string `db:"h3cell" ` + ID int32 `db:"id,pk" ` + Locality string `db:"locality" ` + Number int32 `db:"number_" ` + PostalCode string `db:"postal_code" ` + Street string `db:"street" ` + Unit string `db:"unit" ` + + R addressR `db:"-" ` +} + +// AddressSlice is an alias for a slice of pointers to Address. +// This should almost always be used instead of []*Address. +type AddressSlice []*Address + +// Addresses contains methods to work with the address table +var Addresses = psql.NewTablex[*Address, AddressSlice, *AddressSetter]("", "address", buildAddressColumns("address")) + +// AddressesQuery is a query on the address table +type AddressesQuery = *psql.ViewQuery[*Address, AddressSlice] + +// addressR is where relationships are stored. +type addressR struct { + Site *Site // site.site_address_id_fkey +} + +func buildAddressColumns(alias string) addressColumns { + return addressColumns{ + ColumnsExpr: expr.NewColumnsExpr( + "country", "created", "geom", "h3cell", "id", "locality", "number_", "postal_code", "street", "unit", + ).WithParent("address"), + tableAlias: alias, + Country: psql.Quote(alias, "country"), + Created: psql.Quote(alias, "created"), + Geom: psql.Quote(alias, "geom"), + H3cell: psql.Quote(alias, "h3cell"), + ID: psql.Quote(alias, "id"), + Locality: psql.Quote(alias, "locality"), + Number: psql.Quote(alias, "number_"), + PostalCode: psql.Quote(alias, "postal_code"), + Street: psql.Quote(alias, "street"), + Unit: psql.Quote(alias, "unit"), + } +} + +type addressColumns struct { + expr.ColumnsExpr + tableAlias string + Country psql.Expression + Created psql.Expression + Geom psql.Expression + H3cell psql.Expression + ID psql.Expression + Locality psql.Expression + Number psql.Expression + PostalCode psql.Expression + Street psql.Expression + Unit psql.Expression +} + +func (c addressColumns) Alias() string { + return c.tableAlias +} + +func (addressColumns) AliasedAs(alias string) addressColumns { + return buildAddressColumns(alias) +} + +// AddressSetter is used for insert/upsert/update operations +// All values are optional, and do not have to be set +// Generated columns are not included +type AddressSetter struct { + Country omit.Val[enums.Countrytype] `db:"country" ` + Created omit.Val[time.Time] `db:"created" ` + Geom omit.Val[string] `db:"geom" ` + H3cell omit.Val[string] `db:"h3cell" ` + ID omit.Val[int32] `db:"id,pk" ` + Locality omit.Val[string] `db:"locality" ` + Number omit.Val[int32] `db:"number_" ` + PostalCode omit.Val[string] `db:"postal_code" ` + Street omit.Val[string] `db:"street" ` + Unit omit.Val[string] `db:"unit" ` +} + +func (s AddressSetter) SetColumns() []string { + vals := make([]string, 0, 10) + if s.Country.IsValue() { + vals = append(vals, "country") + } + if s.Created.IsValue() { + vals = append(vals, "created") + } + if s.Geom.IsValue() { + vals = append(vals, "geom") + } + if s.H3cell.IsValue() { + vals = append(vals, "h3cell") + } + if s.ID.IsValue() { + vals = append(vals, "id") + } + if s.Locality.IsValue() { + vals = append(vals, "locality") + } + if s.Number.IsValue() { + vals = append(vals, "number_") + } + if s.PostalCode.IsValue() { + vals = append(vals, "postal_code") + } + if s.Street.IsValue() { + vals = append(vals, "street") + } + if s.Unit.IsValue() { + vals = append(vals, "unit") + } + return vals +} + +func (s AddressSetter) Overwrite(t *Address) { + if s.Country.IsValue() { + t.Country = s.Country.MustGet() + } + if s.Created.IsValue() { + t.Created = s.Created.MustGet() + } + if s.Geom.IsValue() { + t.Geom = s.Geom.MustGet() + } + if s.H3cell.IsValue() { + t.H3cell = s.H3cell.MustGet() + } + if s.ID.IsValue() { + t.ID = s.ID.MustGet() + } + if s.Locality.IsValue() { + t.Locality = s.Locality.MustGet() + } + if s.Number.IsValue() { + t.Number = s.Number.MustGet() + } + if s.PostalCode.IsValue() { + t.PostalCode = s.PostalCode.MustGet() + } + if s.Street.IsValue() { + t.Street = s.Street.MustGet() + } + if s.Unit.IsValue() { + t.Unit = s.Unit.MustGet() + } +} + +func (s *AddressSetter) Apply(q *dialect.InsertQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return Addresses.BeforeInsertHooks.RunHooks(ctx, exec, s) + }) + + q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + vals := make([]bob.Expression, 10) + if s.Country.IsValue() { + vals[0] = psql.Arg(s.Country.MustGet()) + } else { + vals[0] = psql.Raw("DEFAULT") + } + + if s.Created.IsValue() { + vals[1] = psql.Arg(s.Created.MustGet()) + } else { + vals[1] = psql.Raw("DEFAULT") + } + + if s.Geom.IsValue() { + vals[2] = psql.Arg(s.Geom.MustGet()) + } else { + vals[2] = psql.Raw("DEFAULT") + } + + if s.H3cell.IsValue() { + vals[3] = psql.Arg(s.H3cell.MustGet()) + } else { + vals[3] = psql.Raw("DEFAULT") + } + + if s.ID.IsValue() { + vals[4] = psql.Arg(s.ID.MustGet()) + } else { + vals[4] = psql.Raw("DEFAULT") + } + + if s.Locality.IsValue() { + vals[5] = psql.Arg(s.Locality.MustGet()) + } else { + vals[5] = psql.Raw("DEFAULT") + } + + if s.Number.IsValue() { + vals[6] = psql.Arg(s.Number.MustGet()) + } else { + vals[6] = psql.Raw("DEFAULT") + } + + if s.PostalCode.IsValue() { + vals[7] = psql.Arg(s.PostalCode.MustGet()) + } else { + vals[7] = psql.Raw("DEFAULT") + } + + if s.Street.IsValue() { + vals[8] = psql.Arg(s.Street.MustGet()) + } else { + vals[8] = psql.Raw("DEFAULT") + } + + if s.Unit.IsValue() { + vals[9] = psql.Arg(s.Unit.MustGet()) + } else { + vals[9] = psql.Raw("DEFAULT") + } + + return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "") + })) +} + +func (s AddressSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return um.Set(s.Expressions()...) +} + +func (s AddressSetter) Expressions(prefix ...string) []bob.Expression { + exprs := make([]bob.Expression, 0, 10) + + if s.Country.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "country")...), + psql.Arg(s.Country), + }}) + } + + if s.Created.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "created")...), + psql.Arg(s.Created), + }}) + } + + if s.Geom.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "geom")...), + psql.Arg(s.Geom), + }}) + } + + if s.H3cell.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "h3cell")...), + psql.Arg(s.H3cell), + }}) + } + + if s.ID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "id")...), + psql.Arg(s.ID), + }}) + } + + if s.Locality.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "locality")...), + psql.Arg(s.Locality), + }}) + } + + if s.Number.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "number_")...), + psql.Arg(s.Number), + }}) + } + + if s.PostalCode.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "postal_code")...), + psql.Arg(s.PostalCode), + }}) + } + + if s.Street.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "street")...), + psql.Arg(s.Street), + }}) + } + + if s.Unit.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "unit")...), + psql.Arg(s.Unit), + }}) + } + + return exprs +} + +// FindAddress retrieves a single record by primary key +// If cols is empty Find will return all columns. +func FindAddress(ctx context.Context, exec bob.Executor, IDPK int32, cols ...string) (*Address, error) { + if len(cols) == 0 { + return Addresses.Query( + sm.Where(Addresses.Columns.ID.EQ(psql.Arg(IDPK))), + ).One(ctx, exec) + } + + return Addresses.Query( + sm.Where(Addresses.Columns.ID.EQ(psql.Arg(IDPK))), + sm.Columns(Addresses.Columns.Only(cols...)), + ).One(ctx, exec) +} + +// AddressExists checks the presence of a single record by primary key +func AddressExists(ctx context.Context, exec bob.Executor, IDPK int32) (bool, error) { + return Addresses.Query( + sm.Where(Addresses.Columns.ID.EQ(psql.Arg(IDPK))), + ).Exists(ctx, exec) +} + +// AfterQueryHook is called after Address is retrieved from the database +func (o *Address) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = Addresses.AfterSelectHooks.RunHooks(ctx, exec, AddressSlice{o}) + case bob.QueryTypeInsert: + ctx, err = Addresses.AfterInsertHooks.RunHooks(ctx, exec, AddressSlice{o}) + case bob.QueryTypeUpdate: + ctx, err = Addresses.AfterUpdateHooks.RunHooks(ctx, exec, AddressSlice{o}) + case bob.QueryTypeDelete: + ctx, err = Addresses.AfterDeleteHooks.RunHooks(ctx, exec, AddressSlice{o}) + } + + return err +} + +// primaryKeyVals returns the primary key values of the Address +func (o *Address) primaryKeyVals() bob.Expression { + return psql.Arg(o.ID) +} + +func (o *Address) pkEQ() dialect.Expression { + return psql.Quote("address", "id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + return o.primaryKeyVals().WriteSQL(ctx, w, d, start) + })) +} + +// Update uses an executor to update the Address +func (o *Address) Update(ctx context.Context, exec bob.Executor, s *AddressSetter) error { + v, err := Addresses.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec) + if err != nil { + return err + } + + o.R = v.R + *o = *v + + return nil +} + +// Delete deletes a single Address record with an executor +func (o *Address) Delete(ctx context.Context, exec bob.Executor) error { + _, err := Addresses.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec) + return err +} + +// Reload refreshes the Address using the executor +func (o *Address) Reload(ctx context.Context, exec bob.Executor) error { + o2, err := Addresses.Query( + sm.Where(Addresses.Columns.ID.EQ(psql.Arg(o.ID))), + ).One(ctx, exec) + if err != nil { + return err + } + o2.R = o.R + *o = *o2 + + return nil +} + +// AfterQueryHook is called after AddressSlice is retrieved from the database +func (o AddressSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = Addresses.AfterSelectHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeInsert: + ctx, err = Addresses.AfterInsertHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeUpdate: + ctx, err = Addresses.AfterUpdateHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeDelete: + ctx, err = Addresses.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err +} + +func (o AddressSlice) pkIN() dialect.Expression { + if len(o) == 0 { + return psql.Raw("NULL") + } + + return psql.Quote("address", "id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + pkPairs := make([]bob.Expression, len(o)) + for i, row := range o { + pkPairs[i] = row.primaryKeyVals() + } + return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "") + })) +} + +// copyMatchingRows finds models in the given slice that have the same primary key +// then it first copies the existing relationships from the old model to the new model +// and then replaces the old model in the slice with the new model +func (o AddressSlice) copyMatchingRows(from ...*Address) { + for i, old := range o { + for _, new := range from { + if new.ID != old.ID { + continue + } + new.R = old.R + o[i] = new + break + } + } +} + +// UpdateMod modifies an update query with "WHERE primary_key IN (o...)" +func (o AddressSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return Addresses.BeforeUpdateHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *Address: + o.copyMatchingRows(retrieved) + case []*Address: + o.copyMatchingRows(retrieved...) + case AddressSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a Address or a slice of Address + // then run the AfterUpdateHooks on the slice + _, err = Addresses.AfterUpdateHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)" +func (o AddressSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] { + return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return Addresses.BeforeDeleteHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *Address: + o.copyMatchingRows(retrieved) + case []*Address: + o.copyMatchingRows(retrieved...) + case AddressSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a Address or a slice of Address + // then run the AfterDeleteHooks on the slice + _, err = Addresses.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +func (o AddressSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals AddressSetter) error { + if len(o) == 0 { + return nil + } + + _, err := Addresses.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec) + return err +} + +func (o AddressSlice) DeleteAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + _, err := Addresses.Delete(o.DeleteMod()).Exec(ctx, exec) + return err +} + +func (o AddressSlice) ReloadAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + o2, err := Addresses.Query(sm.Where(o.pkIN())).All(ctx, exec) + if err != nil { + return err + } + + o.copyMatchingRows(o2...) + + return nil +} + +// Site starts a query for related objects on site +func (o *Address) Site(mods ...bob.Mod[*dialect.SelectQuery]) SitesQuery { + return Sites.Query(append(mods, + sm.Where(Sites.Columns.AddressID.EQ(psql.Arg(o.ID))), + )...) +} + +func (os AddressSlice) Site(mods ...bob.Mod[*dialect.SelectQuery]) SitesQuery { + pkID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkID = append(pkID, o.ID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")), + )) + + return Sites.Query(append(mods, + sm.Where(psql.Group(Sites.Columns.AddressID).OP("IN", PKArgExpr)), + )...) +} + +func insertAddressSite0(ctx context.Context, exec bob.Executor, site1 *SiteSetter, address0 *Address) (*Site, error) { + site1.AddressID = omit.From(address0.ID) + + ret, err := Sites.Insert(site1).One(ctx, exec) + if err != nil { + return ret, fmt.Errorf("insertAddressSite0: %w", err) + } + + return ret, nil +} + +func attachAddressSite0(ctx context.Context, exec bob.Executor, count int, site1 *Site, address0 *Address) (*Site, error) { + setter := &SiteSetter{ + AddressID: omit.From(address0.ID), + } + + err := site1.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachAddressSite0: %w", err) + } + + return site1, nil +} + +func (address0 *Address) InsertSite(ctx context.Context, exec bob.Executor, related *SiteSetter) error { + var err error + + site1, err := insertAddressSite0(ctx, exec, related, address0) + if err != nil { + return err + } + + address0.R.Site = site1 + + site1.R.Address = address0 + + return nil +} + +func (address0 *Address) AttachSite(ctx context.Context, exec bob.Executor, site1 *Site) error { + var err error + + _, err = attachAddressSite0(ctx, exec, 1, site1, address0) + if err != nil { + return err + } + + address0.R.Site = site1 + + site1.R.Address = address0 + + return nil +} + +type addressWhere[Q psql.Filterable] struct { + Country psql.WhereMod[Q, enums.Countrytype] + Created psql.WhereMod[Q, time.Time] + Geom psql.WhereMod[Q, string] + H3cell psql.WhereMod[Q, string] + ID psql.WhereMod[Q, int32] + Locality psql.WhereMod[Q, string] + Number psql.WhereMod[Q, int32] + PostalCode psql.WhereMod[Q, string] + Street psql.WhereMod[Q, string] + Unit psql.WhereMod[Q, string] +} + +func (addressWhere[Q]) AliasedAs(alias string) addressWhere[Q] { + return buildAddressWhere[Q](buildAddressColumns(alias)) +} + +func buildAddressWhere[Q psql.Filterable](cols addressColumns) addressWhere[Q] { + return addressWhere[Q]{ + Country: psql.Where[Q, enums.Countrytype](cols.Country), + Created: psql.Where[Q, time.Time](cols.Created), + Geom: psql.Where[Q, string](cols.Geom), + H3cell: psql.Where[Q, string](cols.H3cell), + ID: psql.Where[Q, int32](cols.ID), + Locality: psql.Where[Q, string](cols.Locality), + Number: psql.Where[Q, int32](cols.Number), + PostalCode: psql.Where[Q, string](cols.PostalCode), + Street: psql.Where[Q, string](cols.Street), + Unit: psql.Where[Q, string](cols.Unit), + } +} + +func (o *Address) Preload(name string, retrieved any) error { + if o == nil { + return nil + } + + switch name { + case "Site": + rel, ok := retrieved.(*Site) + if !ok { + return fmt.Errorf("address cannot load %T as %q", retrieved, name) + } + + o.R.Site = rel + + if rel != nil { + rel.R.Address = o + } + return nil + default: + return fmt.Errorf("address has no relationship %q", name) + } +} + +type addressPreloader struct { + Site func(...psql.PreloadOption) psql.Preloader +} + +func buildAddressPreloader() addressPreloader { + return addressPreloader{ + Site: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*Site, SiteSlice](psql.PreloadRel{ + Name: "Site", + Sides: []psql.PreloadSide{ + { + From: Addresses, + To: Sites, + FromColumns: []string{"id"}, + ToColumns: []string{"address_id"}, + }, + }, + }, Sites.Columns.Names(), opts...) + }, + } +} + +type addressThenLoader[Q orm.Loadable] struct { + Site func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildAddressThenLoader[Q orm.Loadable]() addressThenLoader[Q] { + type SiteLoadInterface interface { + LoadSite(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return addressThenLoader[Q]{ + Site: thenLoadBuilder[Q]( + "Site", + func(ctx context.Context, exec bob.Executor, retrieved SiteLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadSite(ctx, exec, mods...) + }, + ), + } +} + +// LoadSite loads the address's Site into the .R struct +func (o *Address) LoadSite(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.Site = nil + + related, err := o.Site(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.Address = o + + o.R.Site = related + return nil +} + +// LoadSite loads the address's Site into the .R struct +func (os AddressSlice) LoadSite(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + sites, err := os.Site(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range sites { + + if !(o.ID == rel.AddressID) { + continue + } + + rel.R.Address = o + + o.R.Site = rel + break + } + } + + return nil +} + +type addressJoins[Q dialect.Joinable] struct { + typ string + Site modAs[Q, siteColumns] +} + +func (j addressJoins[Q]) aliasedAs(alias string) addressJoins[Q] { + return buildAddressJoins[Q](buildAddressColumns(alias), j.typ) +} + +func buildAddressJoins[Q dialect.Joinable](cols addressColumns, typ string) addressJoins[Q] { + return addressJoins[Q]{ + typ: typ, + Site: modAs[Q, siteColumns]{ + c: Sites.Columns, + f: func(to siteColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, Sites.Name().As(to.Alias())).On( + to.AddressID.EQ(cols.ID), + )) + } + + return mods + }, + }, + } +} diff --git a/db/models/arcgis.address_mapping.bob.go b/db/models/arcgis.address_mapping.bob.go new file mode 100644 index 00000000..2f47f37c --- /dev/null +++ b/db/models/arcgis.address_mapping.bob.go @@ -0,0 +1,882 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "fmt" + "io" + + "github.com/Gleipnir-Technology/bob" + "github.com/Gleipnir-Technology/bob/dialect/psql" + "github.com/Gleipnir-Technology/bob/dialect/psql/dialect" + "github.com/Gleipnir-Technology/bob/dialect/psql/dm" + "github.com/Gleipnir-Technology/bob/dialect/psql/sm" + "github.com/Gleipnir-Technology/bob/dialect/psql/um" + "github.com/Gleipnir-Technology/bob/expr" + "github.com/Gleipnir-Technology/bob/mods" + "github.com/Gleipnir-Technology/bob/orm" + "github.com/Gleipnir-Technology/bob/types/pgtypes" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + "github.com/aarondl/opt/omit" +) + +// ArcgisAddressMapping is an object representing the database table. +type ArcgisAddressMapping struct { + Destination enums.ArcgisMappingdestinationaddress `db:"destination,pk" ` + LayerFeatureServiceItemID string `db:"layer_feature_service_item_id" ` + LayerIndex int32 `db:"layer_index" ` + LayerFieldName string `db:"layer_field_name" ` + OrganizationID int32 `db:"organization_id,pk" ` + + R arcgisAddressMappingR `db:"-" ` +} + +// ArcgisAddressMappingSlice is an alias for a slice of pointers to ArcgisAddressMapping. +// This should almost always be used instead of []*ArcgisAddressMapping. +type ArcgisAddressMappingSlice []*ArcgisAddressMapping + +// ArcgisAddressMappings contains methods to work with the address_mapping table +var ArcgisAddressMappings = psql.NewTablex[*ArcgisAddressMapping, ArcgisAddressMappingSlice, *ArcgisAddressMappingSetter]("arcgis", "address_mapping", buildArcgisAddressMappingColumns("arcgis.address_mapping")) + +// ArcgisAddressMappingsQuery is a query on the address_mapping table +type ArcgisAddressMappingsQuery = *psql.ViewQuery[*ArcgisAddressMapping, ArcgisAddressMappingSlice] + +// arcgisAddressMappingR is where relationships are stored. +type arcgisAddressMappingR struct { + LayerField *ArcgisLayerField // arcgis.address_mapping.address_mapping_layer_feature_service_item_id_layer_index__fkey + Organization *Organization // arcgis.address_mapping.address_mapping_organization_id_fkey +} + +func buildArcgisAddressMappingColumns(alias string) arcgisAddressMappingColumns { + return arcgisAddressMappingColumns{ + ColumnsExpr: expr.NewColumnsExpr( + "destination", "layer_feature_service_item_id", "layer_index", "layer_field_name", "organization_id", + ).WithParent("arcgis.address_mapping"), + tableAlias: alias, + Destination: psql.Quote(alias, "destination"), + LayerFeatureServiceItemID: psql.Quote(alias, "layer_feature_service_item_id"), + LayerIndex: psql.Quote(alias, "layer_index"), + LayerFieldName: psql.Quote(alias, "layer_field_name"), + OrganizationID: psql.Quote(alias, "organization_id"), + } +} + +type arcgisAddressMappingColumns struct { + expr.ColumnsExpr + tableAlias string + Destination psql.Expression + LayerFeatureServiceItemID psql.Expression + LayerIndex psql.Expression + LayerFieldName psql.Expression + OrganizationID psql.Expression +} + +func (c arcgisAddressMappingColumns) Alias() string { + return c.tableAlias +} + +func (arcgisAddressMappingColumns) AliasedAs(alias string) arcgisAddressMappingColumns { + return buildArcgisAddressMappingColumns(alias) +} + +// ArcgisAddressMappingSetter is used for insert/upsert/update operations +// All values are optional, and do not have to be set +// Generated columns are not included +type ArcgisAddressMappingSetter struct { + Destination omit.Val[enums.ArcgisMappingdestinationaddress] `db:"destination,pk" ` + LayerFeatureServiceItemID omit.Val[string] `db:"layer_feature_service_item_id" ` + LayerIndex omit.Val[int32] `db:"layer_index" ` + LayerFieldName omit.Val[string] `db:"layer_field_name" ` + OrganizationID omit.Val[int32] `db:"organization_id,pk" ` +} + +func (s ArcgisAddressMappingSetter) SetColumns() []string { + vals := make([]string, 0, 5) + if s.Destination.IsValue() { + vals = append(vals, "destination") + } + if s.LayerFeatureServiceItemID.IsValue() { + vals = append(vals, "layer_feature_service_item_id") + } + if s.LayerIndex.IsValue() { + vals = append(vals, "layer_index") + } + if s.LayerFieldName.IsValue() { + vals = append(vals, "layer_field_name") + } + if s.OrganizationID.IsValue() { + vals = append(vals, "organization_id") + } + return vals +} + +func (s ArcgisAddressMappingSetter) Overwrite(t *ArcgisAddressMapping) { + if s.Destination.IsValue() { + t.Destination = s.Destination.MustGet() + } + if s.LayerFeatureServiceItemID.IsValue() { + t.LayerFeatureServiceItemID = s.LayerFeatureServiceItemID.MustGet() + } + if s.LayerIndex.IsValue() { + t.LayerIndex = s.LayerIndex.MustGet() + } + if s.LayerFieldName.IsValue() { + t.LayerFieldName = s.LayerFieldName.MustGet() + } + if s.OrganizationID.IsValue() { + t.OrganizationID = s.OrganizationID.MustGet() + } +} + +func (s *ArcgisAddressMappingSetter) Apply(q *dialect.InsertQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisAddressMappings.BeforeInsertHooks.RunHooks(ctx, exec, s) + }) + + q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + vals := make([]bob.Expression, 5) + if s.Destination.IsValue() { + vals[0] = psql.Arg(s.Destination.MustGet()) + } else { + vals[0] = psql.Raw("DEFAULT") + } + + if s.LayerFeatureServiceItemID.IsValue() { + vals[1] = psql.Arg(s.LayerFeatureServiceItemID.MustGet()) + } else { + vals[1] = psql.Raw("DEFAULT") + } + + if s.LayerIndex.IsValue() { + vals[2] = psql.Arg(s.LayerIndex.MustGet()) + } else { + vals[2] = psql.Raw("DEFAULT") + } + + if s.LayerFieldName.IsValue() { + vals[3] = psql.Arg(s.LayerFieldName.MustGet()) + } else { + vals[3] = psql.Raw("DEFAULT") + } + + if s.OrganizationID.IsValue() { + vals[4] = psql.Arg(s.OrganizationID.MustGet()) + } else { + vals[4] = psql.Raw("DEFAULT") + } + + return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "") + })) +} + +func (s ArcgisAddressMappingSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return um.Set(s.Expressions()...) +} + +func (s ArcgisAddressMappingSetter) Expressions(prefix ...string) []bob.Expression { + exprs := make([]bob.Expression, 0, 5) + + if s.Destination.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "destination")...), + psql.Arg(s.Destination), + }}) + } + + if s.LayerFeatureServiceItemID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "layer_feature_service_item_id")...), + psql.Arg(s.LayerFeatureServiceItemID), + }}) + } + + if s.LayerIndex.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "layer_index")...), + psql.Arg(s.LayerIndex), + }}) + } + + if s.LayerFieldName.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "layer_field_name")...), + psql.Arg(s.LayerFieldName), + }}) + } + + if s.OrganizationID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "organization_id")...), + psql.Arg(s.OrganizationID), + }}) + } + + return exprs +} + +// FindArcgisAddressMapping retrieves a single record by primary key +// If cols is empty Find will return all columns. +func FindArcgisAddressMapping(ctx context.Context, exec bob.Executor, OrganizationIDPK int32, DestinationPK enums.ArcgisMappingdestinationaddress, cols ...string) (*ArcgisAddressMapping, error) { + if len(cols) == 0 { + return ArcgisAddressMappings.Query( + sm.Where(ArcgisAddressMappings.Columns.OrganizationID.EQ(psql.Arg(OrganizationIDPK))), + sm.Where(ArcgisAddressMappings.Columns.Destination.EQ(psql.Arg(DestinationPK))), + ).One(ctx, exec) + } + + return ArcgisAddressMappings.Query( + sm.Where(ArcgisAddressMappings.Columns.OrganizationID.EQ(psql.Arg(OrganizationIDPK))), + sm.Where(ArcgisAddressMappings.Columns.Destination.EQ(psql.Arg(DestinationPK))), + sm.Columns(ArcgisAddressMappings.Columns.Only(cols...)), + ).One(ctx, exec) +} + +// ArcgisAddressMappingExists checks the presence of a single record by primary key +func ArcgisAddressMappingExists(ctx context.Context, exec bob.Executor, OrganizationIDPK int32, DestinationPK enums.ArcgisMappingdestinationaddress) (bool, error) { + return ArcgisAddressMappings.Query( + sm.Where(ArcgisAddressMappings.Columns.OrganizationID.EQ(psql.Arg(OrganizationIDPK))), + sm.Where(ArcgisAddressMappings.Columns.Destination.EQ(psql.Arg(DestinationPK))), + ).Exists(ctx, exec) +} + +// AfterQueryHook is called after ArcgisAddressMapping is retrieved from the database +func (o *ArcgisAddressMapping) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = ArcgisAddressMappings.AfterSelectHooks.RunHooks(ctx, exec, ArcgisAddressMappingSlice{o}) + case bob.QueryTypeInsert: + ctx, err = ArcgisAddressMappings.AfterInsertHooks.RunHooks(ctx, exec, ArcgisAddressMappingSlice{o}) + case bob.QueryTypeUpdate: + ctx, err = ArcgisAddressMappings.AfterUpdateHooks.RunHooks(ctx, exec, ArcgisAddressMappingSlice{o}) + case bob.QueryTypeDelete: + ctx, err = ArcgisAddressMappings.AfterDeleteHooks.RunHooks(ctx, exec, ArcgisAddressMappingSlice{o}) + } + + return err +} + +// primaryKeyVals returns the primary key values of the ArcgisAddressMapping +func (o *ArcgisAddressMapping) primaryKeyVals() bob.Expression { + return psql.ArgGroup( + o.OrganizationID, + o.Destination, + ) +} + +func (o *ArcgisAddressMapping) pkEQ() dialect.Expression { + return psql.Group(psql.Quote("arcgis.address_mapping", "organization_id"), psql.Quote("arcgis.address_mapping", "destination")).EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + return o.primaryKeyVals().WriteSQL(ctx, w, d, start) + })) +} + +// Update uses an executor to update the ArcgisAddressMapping +func (o *ArcgisAddressMapping) Update(ctx context.Context, exec bob.Executor, s *ArcgisAddressMappingSetter) error { + v, err := ArcgisAddressMappings.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec) + if err != nil { + return err + } + + o.R = v.R + *o = *v + + return nil +} + +// Delete deletes a single ArcgisAddressMapping record with an executor +func (o *ArcgisAddressMapping) Delete(ctx context.Context, exec bob.Executor) error { + _, err := ArcgisAddressMappings.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec) + return err +} + +// Reload refreshes the ArcgisAddressMapping using the executor +func (o *ArcgisAddressMapping) Reload(ctx context.Context, exec bob.Executor) error { + o2, err := ArcgisAddressMappings.Query( + sm.Where(ArcgisAddressMappings.Columns.OrganizationID.EQ(psql.Arg(o.OrganizationID))), + sm.Where(ArcgisAddressMappings.Columns.Destination.EQ(psql.Arg(o.Destination))), + ).One(ctx, exec) + if err != nil { + return err + } + o2.R = o.R + *o = *o2 + + return nil +} + +// AfterQueryHook is called after ArcgisAddressMappingSlice is retrieved from the database +func (o ArcgisAddressMappingSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = ArcgisAddressMappings.AfterSelectHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeInsert: + ctx, err = ArcgisAddressMappings.AfterInsertHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeUpdate: + ctx, err = ArcgisAddressMappings.AfterUpdateHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeDelete: + ctx, err = ArcgisAddressMappings.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err +} + +func (o ArcgisAddressMappingSlice) pkIN() dialect.Expression { + if len(o) == 0 { + return psql.Raw("NULL") + } + + return psql.Group(psql.Quote("arcgis.address_mapping", "organization_id"), psql.Quote("arcgis.address_mapping", "destination")).In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + pkPairs := make([]bob.Expression, len(o)) + for i, row := range o { + pkPairs[i] = row.primaryKeyVals() + } + return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "") + })) +} + +// copyMatchingRows finds models in the given slice that have the same primary key +// then it first copies the existing relationships from the old model to the new model +// and then replaces the old model in the slice with the new model +func (o ArcgisAddressMappingSlice) copyMatchingRows(from ...*ArcgisAddressMapping) { + for i, old := range o { + for _, new := range from { + if new.OrganizationID != old.OrganizationID { + continue + } + if new.Destination != old.Destination { + continue + } + new.R = old.R + o[i] = new + break + } + } +} + +// UpdateMod modifies an update query with "WHERE primary_key IN (o...)" +func (o ArcgisAddressMappingSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisAddressMappings.BeforeUpdateHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *ArcgisAddressMapping: + o.copyMatchingRows(retrieved) + case []*ArcgisAddressMapping: + o.copyMatchingRows(retrieved...) + case ArcgisAddressMappingSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a ArcgisAddressMapping or a slice of ArcgisAddressMapping + // then run the AfterUpdateHooks on the slice + _, err = ArcgisAddressMappings.AfterUpdateHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)" +func (o ArcgisAddressMappingSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] { + return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisAddressMappings.BeforeDeleteHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *ArcgisAddressMapping: + o.copyMatchingRows(retrieved) + case []*ArcgisAddressMapping: + o.copyMatchingRows(retrieved...) + case ArcgisAddressMappingSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a ArcgisAddressMapping or a slice of ArcgisAddressMapping + // then run the AfterDeleteHooks on the slice + _, err = ArcgisAddressMappings.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +func (o ArcgisAddressMappingSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals ArcgisAddressMappingSetter) error { + if len(o) == 0 { + return nil + } + + _, err := ArcgisAddressMappings.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec) + return err +} + +func (o ArcgisAddressMappingSlice) DeleteAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + _, err := ArcgisAddressMappings.Delete(o.DeleteMod()).Exec(ctx, exec) + return err +} + +func (o ArcgisAddressMappingSlice) ReloadAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + o2, err := ArcgisAddressMappings.Query(sm.Where(o.pkIN())).All(ctx, exec) + if err != nil { + return err + } + + o.copyMatchingRows(o2...) + + return nil +} + +// LayerField starts a query for related objects on arcgis.layer_field +func (o *ArcgisAddressMapping) LayerField(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayerFieldsQuery { + return ArcgisLayerFields.Query(append(mods, + sm.Where(ArcgisLayerFields.Columns.LayerFeatureServiceItemID.EQ(psql.Arg(o.LayerFeatureServiceItemID))), sm.Where(ArcgisLayerFields.Columns.LayerIndex.EQ(psql.Arg(o.LayerIndex))), sm.Where(ArcgisLayerFields.Columns.Name.EQ(psql.Arg(o.LayerFieldName))), + )...) +} + +func (os ArcgisAddressMappingSlice) LayerField(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayerFieldsQuery { + pkLayerFeatureServiceItemID := make(pgtypes.Array[string], 0, len(os)) + + pkLayerIndex := make(pgtypes.Array[int32], 0, len(os)) + + pkLayerFieldName := make(pgtypes.Array[string], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkLayerFeatureServiceItemID = append(pkLayerFeatureServiceItemID, o.LayerFeatureServiceItemID) + pkLayerIndex = append(pkLayerIndex, o.LayerIndex) + pkLayerFieldName = append(pkLayerFieldName, o.LayerFieldName) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkLayerFeatureServiceItemID), "text[]")), + psql.F("unnest", psql.Cast(psql.Arg(pkLayerIndex), "integer[]")), + psql.F("unnest", psql.Cast(psql.Arg(pkLayerFieldName), "text[]")), + )) + + return ArcgisLayerFields.Query(append(mods, + sm.Where(psql.Group(ArcgisLayerFields.Columns.LayerFeatureServiceItemID, ArcgisLayerFields.Columns.LayerIndex, ArcgisLayerFields.Columns.Name).OP("IN", PKArgExpr)), + )...) +} + +// Organization starts a query for related objects on organization +func (o *ArcgisAddressMapping) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery { + return Organizations.Query(append(mods, + sm.Where(Organizations.Columns.ID.EQ(psql.Arg(o.OrganizationID))), + )...) +} + +func (os ArcgisAddressMappingSlice) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery { + pkOrganizationID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkOrganizationID = append(pkOrganizationID, o.OrganizationID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkOrganizationID), "integer[]")), + )) + + return Organizations.Query(append(mods, + sm.Where(psql.Group(Organizations.Columns.ID).OP("IN", PKArgExpr)), + )...) +} + +func attachArcgisAddressMappingLayerField0(ctx context.Context, exec bob.Executor, count int, arcgisAddressMapping0 *ArcgisAddressMapping, arcgisLayerField1 *ArcgisLayerField) (*ArcgisAddressMapping, error) { + setter := &ArcgisAddressMappingSetter{ + LayerFeatureServiceItemID: omit.From(arcgisLayerField1.LayerFeatureServiceItemID), + LayerIndex: omit.From(arcgisLayerField1.LayerIndex), + LayerFieldName: omit.From(arcgisLayerField1.Name), + } + + err := arcgisAddressMapping0.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachArcgisAddressMappingLayerField0: %w", err) + } + + return arcgisAddressMapping0, nil +} + +func (arcgisAddressMapping0 *ArcgisAddressMapping) InsertLayerField(ctx context.Context, exec bob.Executor, related *ArcgisLayerFieldSetter) error { + var err error + + arcgisLayerField1, err := ArcgisLayerFields.Insert(related).One(ctx, exec) + if err != nil { + return fmt.Errorf("inserting related objects: %w", err) + } + + _, err = attachArcgisAddressMappingLayerField0(ctx, exec, 1, arcgisAddressMapping0, arcgisLayerField1) + if err != nil { + return err + } + + arcgisAddressMapping0.R.LayerField = arcgisLayerField1 + + arcgisLayerField1.R.AddressMappings = append(arcgisLayerField1.R.AddressMappings, arcgisAddressMapping0) + + return nil +} + +func (arcgisAddressMapping0 *ArcgisAddressMapping) AttachLayerField(ctx context.Context, exec bob.Executor, arcgisLayerField1 *ArcgisLayerField) error { + var err error + + _, err = attachArcgisAddressMappingLayerField0(ctx, exec, 1, arcgisAddressMapping0, arcgisLayerField1) + if err != nil { + return err + } + + arcgisAddressMapping0.R.LayerField = arcgisLayerField1 + + arcgisLayerField1.R.AddressMappings = append(arcgisLayerField1.R.AddressMappings, arcgisAddressMapping0) + + return nil +} + +func attachArcgisAddressMappingOrganization0(ctx context.Context, exec bob.Executor, count int, arcgisAddressMapping0 *ArcgisAddressMapping, organization1 *Organization) (*ArcgisAddressMapping, error) { + setter := &ArcgisAddressMappingSetter{ + OrganizationID: omit.From(organization1.ID), + } + + err := arcgisAddressMapping0.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachArcgisAddressMappingOrganization0: %w", err) + } + + return arcgisAddressMapping0, nil +} + +func (arcgisAddressMapping0 *ArcgisAddressMapping) InsertOrganization(ctx context.Context, exec bob.Executor, related *OrganizationSetter) error { + var err error + + organization1, err := Organizations.Insert(related).One(ctx, exec) + if err != nil { + return fmt.Errorf("inserting related objects: %w", err) + } + + _, err = attachArcgisAddressMappingOrganization0(ctx, exec, 1, arcgisAddressMapping0, organization1) + if err != nil { + return err + } + + arcgisAddressMapping0.R.Organization = organization1 + + organization1.R.AddressMappings = append(organization1.R.AddressMappings, arcgisAddressMapping0) + + return nil +} + +func (arcgisAddressMapping0 *ArcgisAddressMapping) AttachOrganization(ctx context.Context, exec bob.Executor, organization1 *Organization) error { + var err error + + _, err = attachArcgisAddressMappingOrganization0(ctx, exec, 1, arcgisAddressMapping0, organization1) + if err != nil { + return err + } + + arcgisAddressMapping0.R.Organization = organization1 + + organization1.R.AddressMappings = append(organization1.R.AddressMappings, arcgisAddressMapping0) + + return nil +} + +type arcgisAddressMappingWhere[Q psql.Filterable] struct { + Destination psql.WhereMod[Q, enums.ArcgisMappingdestinationaddress] + LayerFeatureServiceItemID psql.WhereMod[Q, string] + LayerIndex psql.WhereMod[Q, int32] + LayerFieldName psql.WhereMod[Q, string] + OrganizationID psql.WhereMod[Q, int32] +} + +func (arcgisAddressMappingWhere[Q]) AliasedAs(alias string) arcgisAddressMappingWhere[Q] { + return buildArcgisAddressMappingWhere[Q](buildArcgisAddressMappingColumns(alias)) +} + +func buildArcgisAddressMappingWhere[Q psql.Filterable](cols arcgisAddressMappingColumns) arcgisAddressMappingWhere[Q] { + return arcgisAddressMappingWhere[Q]{ + Destination: psql.Where[Q, enums.ArcgisMappingdestinationaddress](cols.Destination), + LayerFeatureServiceItemID: psql.Where[Q, string](cols.LayerFeatureServiceItemID), + LayerIndex: psql.Where[Q, int32](cols.LayerIndex), + LayerFieldName: psql.Where[Q, string](cols.LayerFieldName), + OrganizationID: psql.Where[Q, int32](cols.OrganizationID), + } +} + +func (o *ArcgisAddressMapping) Preload(name string, retrieved any) error { + if o == nil { + return nil + } + + switch name { + case "LayerField": + rel, ok := retrieved.(*ArcgisLayerField) + if !ok { + return fmt.Errorf("arcgisAddressMapping cannot load %T as %q", retrieved, name) + } + + o.R.LayerField = rel + + if rel != nil { + rel.R.AddressMappings = ArcgisAddressMappingSlice{o} + } + return nil + case "Organization": + rel, ok := retrieved.(*Organization) + if !ok { + return fmt.Errorf("arcgisAddressMapping cannot load %T as %q", retrieved, name) + } + + o.R.Organization = rel + + if rel != nil { + rel.R.AddressMappings = ArcgisAddressMappingSlice{o} + } + return nil + default: + return fmt.Errorf("arcgisAddressMapping has no relationship %q", name) + } +} + +type arcgisAddressMappingPreloader struct { + LayerField func(...psql.PreloadOption) psql.Preloader + Organization func(...psql.PreloadOption) psql.Preloader +} + +func buildArcgisAddressMappingPreloader() arcgisAddressMappingPreloader { + return arcgisAddressMappingPreloader{ + LayerField: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*ArcgisLayerField, ArcgisLayerFieldSlice](psql.PreloadRel{ + Name: "LayerField", + Sides: []psql.PreloadSide{ + { + From: ArcgisAddressMappings, + To: ArcgisLayerFields, + FromColumns: []string{"layer_feature_service_item_id", "layer_index", "layer_field_name"}, + ToColumns: []string{"layer_feature_service_item_id", "layer_index", "name"}, + }, + }, + }, ArcgisLayerFields.Columns.Names(), opts...) + }, + Organization: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*Organization, OrganizationSlice](psql.PreloadRel{ + Name: "Organization", + Sides: []psql.PreloadSide{ + { + From: ArcgisAddressMappings, + To: Organizations, + FromColumns: []string{"organization_id"}, + ToColumns: []string{"id"}, + }, + }, + }, Organizations.Columns.Names(), opts...) + }, + } +} + +type arcgisAddressMappingThenLoader[Q orm.Loadable] struct { + LayerField func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + Organization func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildArcgisAddressMappingThenLoader[Q orm.Loadable]() arcgisAddressMappingThenLoader[Q] { + type LayerFieldLoadInterface interface { + LoadLayerField(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type OrganizationLoadInterface interface { + LoadOrganization(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return arcgisAddressMappingThenLoader[Q]{ + LayerField: thenLoadBuilder[Q]( + "LayerField", + func(ctx context.Context, exec bob.Executor, retrieved LayerFieldLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadLayerField(ctx, exec, mods...) + }, + ), + Organization: thenLoadBuilder[Q]( + "Organization", + func(ctx context.Context, exec bob.Executor, retrieved OrganizationLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadOrganization(ctx, exec, mods...) + }, + ), + } +} + +// LoadLayerField loads the arcgisAddressMapping's LayerField into the .R struct +func (o *ArcgisAddressMapping) LoadLayerField(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.LayerField = nil + + related, err := o.LayerField(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.AddressMappings = ArcgisAddressMappingSlice{o} + + o.R.LayerField = related + return nil +} + +// LoadLayerField loads the arcgisAddressMapping's LayerField into the .R struct +func (os ArcgisAddressMappingSlice) LoadLayerField(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + arcgisLayerFields, err := os.LayerField(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range arcgisLayerFields { + + if !(o.LayerFeatureServiceItemID == rel.LayerFeatureServiceItemID) { + continue + } + + if !(o.LayerIndex == rel.LayerIndex) { + continue + } + + if !(o.LayerFieldName == rel.Name) { + continue + } + + rel.R.AddressMappings = append(rel.R.AddressMappings, o) + + o.R.LayerField = rel + break + } + } + + return nil +} + +// LoadOrganization loads the arcgisAddressMapping's Organization into the .R struct +func (o *ArcgisAddressMapping) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.Organization = nil + + related, err := o.Organization(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.AddressMappings = ArcgisAddressMappingSlice{o} + + o.R.Organization = related + return nil +} + +// LoadOrganization loads the arcgisAddressMapping's Organization into the .R struct +func (os ArcgisAddressMappingSlice) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + organizations, err := os.Organization(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range organizations { + + if !(o.OrganizationID == rel.ID) { + continue + } + + rel.R.AddressMappings = append(rel.R.AddressMappings, o) + + o.R.Organization = rel + break + } + } + + return nil +} + +type arcgisAddressMappingJoins[Q dialect.Joinable] struct { + typ string + LayerField modAs[Q, arcgisLayerFieldColumns] + Organization modAs[Q, organizationColumns] +} + +func (j arcgisAddressMappingJoins[Q]) aliasedAs(alias string) arcgisAddressMappingJoins[Q] { + return buildArcgisAddressMappingJoins[Q](buildArcgisAddressMappingColumns(alias), j.typ) +} + +func buildArcgisAddressMappingJoins[Q dialect.Joinable](cols arcgisAddressMappingColumns, typ string) arcgisAddressMappingJoins[Q] { + return arcgisAddressMappingJoins[Q]{ + typ: typ, + LayerField: modAs[Q, arcgisLayerFieldColumns]{ + c: ArcgisLayerFields.Columns, + f: func(to arcgisLayerFieldColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, ArcgisLayerFields.Name().As(to.Alias())).On( + to.LayerFeatureServiceItemID.EQ(cols.LayerFeatureServiceItemID), to.LayerIndex.EQ(cols.LayerIndex), to.Name.EQ(cols.LayerFieldName), + )) + } + + return mods + }, + }, + Organization: modAs[Q, organizationColumns]{ + c: Organizations.Columns, + f: func(to organizationColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, Organizations.Name().As(to.Alias())).On( + to.ID.EQ(cols.OrganizationID), + )) + } + + return mods + }, + }, + } +} diff --git a/db/models/arcgis.feature_service.bob.go b/db/models/arcgis.feature_service.bob.go new file mode 100644 index 00000000..f7f1acdc --- /dev/null +++ b/db/models/arcgis.feature_service.bob.go @@ -0,0 +1,762 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "fmt" + "io" + + "github.com/Gleipnir-Technology/bob" + "github.com/Gleipnir-Technology/bob/dialect/psql" + "github.com/Gleipnir-Technology/bob/dialect/psql/dialect" + "github.com/Gleipnir-Technology/bob/dialect/psql/dm" + "github.com/Gleipnir-Technology/bob/dialect/psql/sm" + "github.com/Gleipnir-Technology/bob/dialect/psql/um" + "github.com/Gleipnir-Technology/bob/expr" + "github.com/Gleipnir-Technology/bob/mods" + "github.com/Gleipnir-Technology/bob/orm" + "github.com/Gleipnir-Technology/bob/types/pgtypes" + "github.com/aarondl/opt/omit" +) + +// ArcgisFeatureService is an object representing the database table. +type ArcgisFeatureService struct { + Extent string `db:"extent" ` + ItemID string `db:"item_id,pk" ` + SpatialReference int32 `db:"spatial_reference" ` + URL string `db:"url" ` + + R arcgisFeatureServiceR `db:"-" ` + + C arcgisFeatureServiceC `db:"-" ` +} + +// ArcgisFeatureServiceSlice is an alias for a slice of pointers to ArcgisFeatureService. +// This should almost always be used instead of []*ArcgisFeatureService. +type ArcgisFeatureServiceSlice []*ArcgisFeatureService + +// ArcgisFeatureServices contains methods to work with the feature_service table +var ArcgisFeatureServices = psql.NewTablex[*ArcgisFeatureService, ArcgisFeatureServiceSlice, *ArcgisFeatureServiceSetter]("arcgis", "feature_service", buildArcgisFeatureServiceColumns("arcgis.feature_service")) + +// ArcgisFeatureServicesQuery is a query on the feature_service table +type ArcgisFeatureServicesQuery = *psql.ViewQuery[*ArcgisFeatureService, ArcgisFeatureServiceSlice] + +// arcgisFeatureServiceR is where relationships are stored. +type arcgisFeatureServiceR struct { + FeatureServiceItemLayers ArcgisLayerSlice // arcgis.layer.layer_feature_service_item_id_fkey +} + +func buildArcgisFeatureServiceColumns(alias string) arcgisFeatureServiceColumns { + return arcgisFeatureServiceColumns{ + ColumnsExpr: expr.NewColumnsExpr( + "extent", "item_id", "spatial_reference", "url", + ).WithParent("arcgis.feature_service"), + tableAlias: alias, + Extent: psql.Quote(alias, "extent"), + ItemID: psql.Quote(alias, "item_id"), + SpatialReference: psql.Quote(alias, "spatial_reference"), + URL: psql.Quote(alias, "url"), + } +} + +type arcgisFeatureServiceColumns struct { + expr.ColumnsExpr + tableAlias string + Extent psql.Expression + ItemID psql.Expression + SpatialReference psql.Expression + URL psql.Expression +} + +func (c arcgisFeatureServiceColumns) Alias() string { + return c.tableAlias +} + +func (arcgisFeatureServiceColumns) AliasedAs(alias string) arcgisFeatureServiceColumns { + return buildArcgisFeatureServiceColumns(alias) +} + +// ArcgisFeatureServiceSetter is used for insert/upsert/update operations +// All values are optional, and do not have to be set +// Generated columns are not included +type ArcgisFeatureServiceSetter struct { + Extent omit.Val[string] `db:"extent" ` + ItemID omit.Val[string] `db:"item_id,pk" ` + SpatialReference omit.Val[int32] `db:"spatial_reference" ` + URL omit.Val[string] `db:"url" ` +} + +func (s ArcgisFeatureServiceSetter) SetColumns() []string { + vals := make([]string, 0, 4) + if s.Extent.IsValue() { + vals = append(vals, "extent") + } + if s.ItemID.IsValue() { + vals = append(vals, "item_id") + } + if s.SpatialReference.IsValue() { + vals = append(vals, "spatial_reference") + } + if s.URL.IsValue() { + vals = append(vals, "url") + } + return vals +} + +func (s ArcgisFeatureServiceSetter) Overwrite(t *ArcgisFeatureService) { + if s.Extent.IsValue() { + t.Extent = s.Extent.MustGet() + } + if s.ItemID.IsValue() { + t.ItemID = s.ItemID.MustGet() + } + if s.SpatialReference.IsValue() { + t.SpatialReference = s.SpatialReference.MustGet() + } + if s.URL.IsValue() { + t.URL = s.URL.MustGet() + } +} + +func (s *ArcgisFeatureServiceSetter) Apply(q *dialect.InsertQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisFeatureServices.BeforeInsertHooks.RunHooks(ctx, exec, s) + }) + + q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + vals := make([]bob.Expression, 4) + if s.Extent.IsValue() { + vals[0] = psql.Arg(s.Extent.MustGet()) + } else { + vals[0] = psql.Raw("DEFAULT") + } + + if s.ItemID.IsValue() { + vals[1] = psql.Arg(s.ItemID.MustGet()) + } else { + vals[1] = psql.Raw("DEFAULT") + } + + if s.SpatialReference.IsValue() { + vals[2] = psql.Arg(s.SpatialReference.MustGet()) + } else { + vals[2] = psql.Raw("DEFAULT") + } + + if s.URL.IsValue() { + vals[3] = psql.Arg(s.URL.MustGet()) + } else { + vals[3] = psql.Raw("DEFAULT") + } + + return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "") + })) +} + +func (s ArcgisFeatureServiceSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return um.Set(s.Expressions()...) +} + +func (s ArcgisFeatureServiceSetter) Expressions(prefix ...string) []bob.Expression { + exprs := make([]bob.Expression, 0, 4) + + if s.Extent.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "extent")...), + psql.Arg(s.Extent), + }}) + } + + if s.ItemID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "item_id")...), + psql.Arg(s.ItemID), + }}) + } + + if s.SpatialReference.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "spatial_reference")...), + psql.Arg(s.SpatialReference), + }}) + } + + if s.URL.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "url")...), + psql.Arg(s.URL), + }}) + } + + return exprs +} + +// FindArcgisFeatureService retrieves a single record by primary key +// If cols is empty Find will return all columns. +func FindArcgisFeatureService(ctx context.Context, exec bob.Executor, ItemIDPK string, cols ...string) (*ArcgisFeatureService, error) { + if len(cols) == 0 { + return ArcgisFeatureServices.Query( + sm.Where(ArcgisFeatureServices.Columns.ItemID.EQ(psql.Arg(ItemIDPK))), + ).One(ctx, exec) + } + + return ArcgisFeatureServices.Query( + sm.Where(ArcgisFeatureServices.Columns.ItemID.EQ(psql.Arg(ItemIDPK))), + sm.Columns(ArcgisFeatureServices.Columns.Only(cols...)), + ).One(ctx, exec) +} + +// ArcgisFeatureServiceExists checks the presence of a single record by primary key +func ArcgisFeatureServiceExists(ctx context.Context, exec bob.Executor, ItemIDPK string) (bool, error) { + return ArcgisFeatureServices.Query( + sm.Where(ArcgisFeatureServices.Columns.ItemID.EQ(psql.Arg(ItemIDPK))), + ).Exists(ctx, exec) +} + +// AfterQueryHook is called after ArcgisFeatureService is retrieved from the database +func (o *ArcgisFeatureService) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = ArcgisFeatureServices.AfterSelectHooks.RunHooks(ctx, exec, ArcgisFeatureServiceSlice{o}) + case bob.QueryTypeInsert: + ctx, err = ArcgisFeatureServices.AfterInsertHooks.RunHooks(ctx, exec, ArcgisFeatureServiceSlice{o}) + case bob.QueryTypeUpdate: + ctx, err = ArcgisFeatureServices.AfterUpdateHooks.RunHooks(ctx, exec, ArcgisFeatureServiceSlice{o}) + case bob.QueryTypeDelete: + ctx, err = ArcgisFeatureServices.AfterDeleteHooks.RunHooks(ctx, exec, ArcgisFeatureServiceSlice{o}) + } + + return err +} + +// primaryKeyVals returns the primary key values of the ArcgisFeatureService +func (o *ArcgisFeatureService) primaryKeyVals() bob.Expression { + return psql.Arg(o.ItemID) +} + +func (o *ArcgisFeatureService) pkEQ() dialect.Expression { + return psql.Quote("arcgis.feature_service", "item_id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + return o.primaryKeyVals().WriteSQL(ctx, w, d, start) + })) +} + +// Update uses an executor to update the ArcgisFeatureService +func (o *ArcgisFeatureService) Update(ctx context.Context, exec bob.Executor, s *ArcgisFeatureServiceSetter) error { + v, err := ArcgisFeatureServices.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec) + if err != nil { + return err + } + + o.R = v.R + *o = *v + + return nil +} + +// Delete deletes a single ArcgisFeatureService record with an executor +func (o *ArcgisFeatureService) Delete(ctx context.Context, exec bob.Executor) error { + _, err := ArcgisFeatureServices.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec) + return err +} + +// Reload refreshes the ArcgisFeatureService using the executor +func (o *ArcgisFeatureService) Reload(ctx context.Context, exec bob.Executor) error { + o2, err := ArcgisFeatureServices.Query( + sm.Where(ArcgisFeatureServices.Columns.ItemID.EQ(psql.Arg(o.ItemID))), + ).One(ctx, exec) + if err != nil { + return err + } + o2.R = o.R + *o = *o2 + + return nil +} + +// AfterQueryHook is called after ArcgisFeatureServiceSlice is retrieved from the database +func (o ArcgisFeatureServiceSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = ArcgisFeatureServices.AfterSelectHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeInsert: + ctx, err = ArcgisFeatureServices.AfterInsertHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeUpdate: + ctx, err = ArcgisFeatureServices.AfterUpdateHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeDelete: + ctx, err = ArcgisFeatureServices.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err +} + +func (o ArcgisFeatureServiceSlice) pkIN() dialect.Expression { + if len(o) == 0 { + return psql.Raw("NULL") + } + + return psql.Quote("arcgis.feature_service", "item_id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + pkPairs := make([]bob.Expression, len(o)) + for i, row := range o { + pkPairs[i] = row.primaryKeyVals() + } + return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "") + })) +} + +// copyMatchingRows finds models in the given slice that have the same primary key +// then it first copies the existing relationships from the old model to the new model +// and then replaces the old model in the slice with the new model +func (o ArcgisFeatureServiceSlice) copyMatchingRows(from ...*ArcgisFeatureService) { + for i, old := range o { + for _, new := range from { + if new.ItemID != old.ItemID { + continue + } + new.R = old.R + o[i] = new + break + } + } +} + +// UpdateMod modifies an update query with "WHERE primary_key IN (o...)" +func (o ArcgisFeatureServiceSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisFeatureServices.BeforeUpdateHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *ArcgisFeatureService: + o.copyMatchingRows(retrieved) + case []*ArcgisFeatureService: + o.copyMatchingRows(retrieved...) + case ArcgisFeatureServiceSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a ArcgisFeatureService or a slice of ArcgisFeatureService + // then run the AfterUpdateHooks on the slice + _, err = ArcgisFeatureServices.AfterUpdateHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)" +func (o ArcgisFeatureServiceSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] { + return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisFeatureServices.BeforeDeleteHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *ArcgisFeatureService: + o.copyMatchingRows(retrieved) + case []*ArcgisFeatureService: + o.copyMatchingRows(retrieved...) + case ArcgisFeatureServiceSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a ArcgisFeatureService or a slice of ArcgisFeatureService + // then run the AfterDeleteHooks on the slice + _, err = ArcgisFeatureServices.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +func (o ArcgisFeatureServiceSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals ArcgisFeatureServiceSetter) error { + if len(o) == 0 { + return nil + } + + _, err := ArcgisFeatureServices.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec) + return err +} + +func (o ArcgisFeatureServiceSlice) DeleteAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + _, err := ArcgisFeatureServices.Delete(o.DeleteMod()).Exec(ctx, exec) + return err +} + +func (o ArcgisFeatureServiceSlice) ReloadAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + o2, err := ArcgisFeatureServices.Query(sm.Where(o.pkIN())).All(ctx, exec) + if err != nil { + return err + } + + o.copyMatchingRows(o2...) + + return nil +} + +// FeatureServiceItemLayers starts a query for related objects on arcgis.layer +func (o *ArcgisFeatureService) FeatureServiceItemLayers(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayersQuery { + return ArcgisLayers.Query(append(mods, + sm.Where(ArcgisLayers.Columns.FeatureServiceItemID.EQ(psql.Arg(o.ItemID))), + )...) +} + +func (os ArcgisFeatureServiceSlice) FeatureServiceItemLayers(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayersQuery { + pkItemID := make(pgtypes.Array[string], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkItemID = append(pkItemID, o.ItemID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkItemID), "text[]")), + )) + + return ArcgisLayers.Query(append(mods, + sm.Where(psql.Group(ArcgisLayers.Columns.FeatureServiceItemID).OP("IN", PKArgExpr)), + )...) +} + +func insertArcgisFeatureServiceFeatureServiceItemLayers0(ctx context.Context, exec bob.Executor, arcgisLayers1 []*ArcgisLayerSetter, arcgisFeatureService0 *ArcgisFeatureService) (ArcgisLayerSlice, error) { + for i := range arcgisLayers1 { + arcgisLayers1[i].FeatureServiceItemID = omit.From(arcgisFeatureService0.ItemID) + } + + ret, err := ArcgisLayers.Insert(bob.ToMods(arcgisLayers1...)).All(ctx, exec) + if err != nil { + return ret, fmt.Errorf("insertArcgisFeatureServiceFeatureServiceItemLayers0: %w", err) + } + + return ret, nil +} + +func attachArcgisFeatureServiceFeatureServiceItemLayers0(ctx context.Context, exec bob.Executor, count int, arcgisLayers1 ArcgisLayerSlice, arcgisFeatureService0 *ArcgisFeatureService) (ArcgisLayerSlice, error) { + setter := &ArcgisLayerSetter{ + FeatureServiceItemID: omit.From(arcgisFeatureService0.ItemID), + } + + err := arcgisLayers1.UpdateAll(ctx, exec, *setter) + if err != nil { + return nil, fmt.Errorf("attachArcgisFeatureServiceFeatureServiceItemLayers0: %w", err) + } + + return arcgisLayers1, nil +} + +func (arcgisFeatureService0 *ArcgisFeatureService) InsertFeatureServiceItemLayers(ctx context.Context, exec bob.Executor, related ...*ArcgisLayerSetter) error { + if len(related) == 0 { + return nil + } + + var err error + + arcgisLayers1, err := insertArcgisFeatureServiceFeatureServiceItemLayers0(ctx, exec, related, arcgisFeatureService0) + if err != nil { + return err + } + + arcgisFeatureService0.R.FeatureServiceItemLayers = append(arcgisFeatureService0.R.FeatureServiceItemLayers, arcgisLayers1...) + + for _, rel := range arcgisLayers1 { + rel.R.FeatureServiceItemFeatureService = arcgisFeatureService0 + } + return nil +} + +func (arcgisFeatureService0 *ArcgisFeatureService) AttachFeatureServiceItemLayers(ctx context.Context, exec bob.Executor, related ...*ArcgisLayer) error { + if len(related) == 0 { + return nil + } + + var err error + arcgisLayers1 := ArcgisLayerSlice(related) + + _, err = attachArcgisFeatureServiceFeatureServiceItemLayers0(ctx, exec, len(related), arcgisLayers1, arcgisFeatureService0) + if err != nil { + return err + } + + arcgisFeatureService0.R.FeatureServiceItemLayers = append(arcgisFeatureService0.R.FeatureServiceItemLayers, arcgisLayers1...) + + for _, rel := range related { + rel.R.FeatureServiceItemFeatureService = arcgisFeatureService0 + } + + return nil +} + +type arcgisFeatureServiceWhere[Q psql.Filterable] struct { + Extent psql.WhereMod[Q, string] + ItemID psql.WhereMod[Q, string] + SpatialReference psql.WhereMod[Q, int32] + URL psql.WhereMod[Q, string] +} + +func (arcgisFeatureServiceWhere[Q]) AliasedAs(alias string) arcgisFeatureServiceWhere[Q] { + return buildArcgisFeatureServiceWhere[Q](buildArcgisFeatureServiceColumns(alias)) +} + +func buildArcgisFeatureServiceWhere[Q psql.Filterable](cols arcgisFeatureServiceColumns) arcgisFeatureServiceWhere[Q] { + return arcgisFeatureServiceWhere[Q]{ + Extent: psql.Where[Q, string](cols.Extent), + ItemID: psql.Where[Q, string](cols.ItemID), + SpatialReference: psql.Where[Q, int32](cols.SpatialReference), + URL: psql.Where[Q, string](cols.URL), + } +} + +func (o *ArcgisFeatureService) Preload(name string, retrieved any) error { + if o == nil { + return nil + } + + switch name { + case "FeatureServiceItemLayers": + rels, ok := retrieved.(ArcgisLayerSlice) + if !ok { + return fmt.Errorf("arcgisFeatureService cannot load %T as %q", retrieved, name) + } + + o.R.FeatureServiceItemLayers = rels + + for _, rel := range rels { + if rel != nil { + rel.R.FeatureServiceItemFeatureService = o + } + } + return nil + default: + return fmt.Errorf("arcgisFeatureService has no relationship %q", name) + } +} + +type arcgisFeatureServicePreloader struct{} + +func buildArcgisFeatureServicePreloader() arcgisFeatureServicePreloader { + return arcgisFeatureServicePreloader{} +} + +type arcgisFeatureServiceThenLoader[Q orm.Loadable] struct { + FeatureServiceItemLayers func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildArcgisFeatureServiceThenLoader[Q orm.Loadable]() arcgisFeatureServiceThenLoader[Q] { + type FeatureServiceItemLayersLoadInterface interface { + LoadFeatureServiceItemLayers(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return arcgisFeatureServiceThenLoader[Q]{ + FeatureServiceItemLayers: thenLoadBuilder[Q]( + "FeatureServiceItemLayers", + func(ctx context.Context, exec bob.Executor, retrieved FeatureServiceItemLayersLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadFeatureServiceItemLayers(ctx, exec, mods...) + }, + ), + } +} + +// LoadFeatureServiceItemLayers loads the arcgisFeatureService's FeatureServiceItemLayers into the .R struct +func (o *ArcgisFeatureService) LoadFeatureServiceItemLayers(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.FeatureServiceItemLayers = nil + + related, err := o.FeatureServiceItemLayers(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, rel := range related { + rel.R.FeatureServiceItemFeatureService = o + } + + o.R.FeatureServiceItemLayers = related + return nil +} + +// LoadFeatureServiceItemLayers loads the arcgisFeatureService's FeatureServiceItemLayers into the .R struct +func (os ArcgisFeatureServiceSlice) LoadFeatureServiceItemLayers(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + arcgisLayers, err := os.FeatureServiceItemLayers(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + o.R.FeatureServiceItemLayers = nil + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range arcgisLayers { + + if !(o.ItemID == rel.FeatureServiceItemID) { + continue + } + + rel.R.FeatureServiceItemFeatureService = o + + o.R.FeatureServiceItemLayers = append(o.R.FeatureServiceItemLayers, rel) + } + } + + return nil +} + +// arcgisFeatureServiceC is where relationship counts are stored. +type arcgisFeatureServiceC struct { + FeatureServiceItemLayers *int64 +} + +// PreloadCount sets a count in the C struct by name +func (o *ArcgisFeatureService) PreloadCount(name string, count int64) error { + if o == nil { + return nil + } + + switch name { + case "FeatureServiceItemLayers": + o.C.FeatureServiceItemLayers = &count + } + return nil +} + +type arcgisFeatureServiceCountPreloader struct { + FeatureServiceItemLayers func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader +} + +func buildArcgisFeatureServiceCountPreloader() arcgisFeatureServiceCountPreloader { + return arcgisFeatureServiceCountPreloader{ + FeatureServiceItemLayers: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { + return countPreloader[*ArcgisFeatureService]("FeatureServiceItemLayers", func(parent string) bob.Expression { + // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) + if parent == "" { + parent = ArcgisFeatureServices.Alias() + } + + subqueryMods := []bob.Mod[*dialect.SelectQuery]{ + sm.Columns(psql.Raw("count(*)")), + + sm.From(ArcgisLayers.Name()), + sm.Where(psql.Quote(ArcgisLayers.Alias(), "feature_service_item_id").EQ(psql.Quote(parent, "item_id"))), + } + subqueryMods = append(subqueryMods, mods...) + return psql.Group(psql.Select(subqueryMods...).Expression) + }) + }, + } +} + +type arcgisFeatureServiceCountThenLoader[Q orm.Loadable] struct { + FeatureServiceItemLayers func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildArcgisFeatureServiceCountThenLoader[Q orm.Loadable]() arcgisFeatureServiceCountThenLoader[Q] { + type FeatureServiceItemLayersCountInterface interface { + LoadCountFeatureServiceItemLayers(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return arcgisFeatureServiceCountThenLoader[Q]{ + FeatureServiceItemLayers: countThenLoadBuilder[Q]( + "FeatureServiceItemLayers", + func(ctx context.Context, exec bob.Executor, retrieved FeatureServiceItemLayersCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCountFeatureServiceItemLayers(ctx, exec, mods...) + }, + ), + } +} + +// LoadCountFeatureServiceItemLayers loads the count of FeatureServiceItemLayers into the C struct +func (o *ArcgisFeatureService) LoadCountFeatureServiceItemLayers(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + count, err := o.FeatureServiceItemLayers(mods...).Count(ctx, exec) + if err != nil { + return err + } + + o.C.FeatureServiceItemLayers = &count + return nil +} + +// LoadCountFeatureServiceItemLayers loads the count of FeatureServiceItemLayers for a slice +func (os ArcgisFeatureServiceSlice) LoadCountFeatureServiceItemLayers(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + for _, o := range os { + if err := o.LoadCountFeatureServiceItemLayers(ctx, exec, mods...); err != nil { + return err + } + } + + return nil +} + +type arcgisFeatureServiceJoins[Q dialect.Joinable] struct { + typ string + FeatureServiceItemLayers modAs[Q, arcgisLayerColumns] +} + +func (j arcgisFeatureServiceJoins[Q]) aliasedAs(alias string) arcgisFeatureServiceJoins[Q] { + return buildArcgisFeatureServiceJoins[Q](buildArcgisFeatureServiceColumns(alias), j.typ) +} + +func buildArcgisFeatureServiceJoins[Q dialect.Joinable](cols arcgisFeatureServiceColumns, typ string) arcgisFeatureServiceJoins[Q] { + return arcgisFeatureServiceJoins[Q]{ + typ: typ, + FeatureServiceItemLayers: modAs[Q, arcgisLayerColumns]{ + c: ArcgisLayers.Columns, + f: func(to arcgisLayerColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, ArcgisLayers.Name().As(to.Alias())).On( + to.FeatureServiceItemID.EQ(cols.ItemID), + )) + } + + return mods + }, + }, + } +} diff --git a/db/models/arcgis.layer.bob.go b/db/models/arcgis.layer.bob.go new file mode 100644 index 00000000..aec268f1 --- /dev/null +++ b/db/models/arcgis.layer.bob.go @@ -0,0 +1,936 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "fmt" + "io" + + "github.com/Gleipnir-Technology/bob" + "github.com/Gleipnir-Technology/bob/dialect/psql" + "github.com/Gleipnir-Technology/bob/dialect/psql/dialect" + "github.com/Gleipnir-Technology/bob/dialect/psql/dm" + "github.com/Gleipnir-Technology/bob/dialect/psql/sm" + "github.com/Gleipnir-Technology/bob/dialect/psql/um" + "github.com/Gleipnir-Technology/bob/expr" + "github.com/Gleipnir-Technology/bob/mods" + "github.com/Gleipnir-Technology/bob/orm" + "github.com/Gleipnir-Technology/bob/types/pgtypes" + "github.com/aarondl/opt/omit" +) + +// ArcgisLayer is an object representing the database table. +type ArcgisLayer struct { + Extent string `db:"extent" ` + FeatureServiceItemID string `db:"feature_service_item_id,pk" ` + Index int32 `db:"index_,pk" ` + + R arcgisLayerR `db:"-" ` + + C arcgisLayerC `db:"-" ` +} + +// ArcgisLayerSlice is an alias for a slice of pointers to ArcgisLayer. +// This should almost always be used instead of []*ArcgisLayer. +type ArcgisLayerSlice []*ArcgisLayer + +// ArcgisLayers contains methods to work with the layer table +var ArcgisLayers = psql.NewTablex[*ArcgisLayer, ArcgisLayerSlice, *ArcgisLayerSetter]("arcgis", "layer", buildArcgisLayerColumns("arcgis.layer")) + +// ArcgisLayersQuery is a query on the layer table +type ArcgisLayersQuery = *psql.ViewQuery[*ArcgisLayer, ArcgisLayerSlice] + +// arcgisLayerR is where relationships are stored. +type arcgisLayerR struct { + FeatureServiceItemFeatureService *ArcgisFeatureService // arcgis.layer.layer_feature_service_item_id_fkey + LayerFields ArcgisLayerFieldSlice // arcgis.layer_field.layer_field_layer_feature_service_item_id_layer_index_fkey +} + +func buildArcgisLayerColumns(alias string) arcgisLayerColumns { + return arcgisLayerColumns{ + ColumnsExpr: expr.NewColumnsExpr( + "extent", "feature_service_item_id", "index_", + ).WithParent("arcgis.layer"), + tableAlias: alias, + Extent: psql.Quote(alias, "extent"), + FeatureServiceItemID: psql.Quote(alias, "feature_service_item_id"), + Index: psql.Quote(alias, "index_"), + } +} + +type arcgisLayerColumns struct { + expr.ColumnsExpr + tableAlias string + Extent psql.Expression + FeatureServiceItemID psql.Expression + Index psql.Expression +} + +func (c arcgisLayerColumns) Alias() string { + return c.tableAlias +} + +func (arcgisLayerColumns) AliasedAs(alias string) arcgisLayerColumns { + return buildArcgisLayerColumns(alias) +} + +// ArcgisLayerSetter is used for insert/upsert/update operations +// All values are optional, and do not have to be set +// Generated columns are not included +type ArcgisLayerSetter struct { + Extent omit.Val[string] `db:"extent" ` + FeatureServiceItemID omit.Val[string] `db:"feature_service_item_id,pk" ` + Index omit.Val[int32] `db:"index_,pk" ` +} + +func (s ArcgisLayerSetter) SetColumns() []string { + vals := make([]string, 0, 3) + if s.Extent.IsValue() { + vals = append(vals, "extent") + } + if s.FeatureServiceItemID.IsValue() { + vals = append(vals, "feature_service_item_id") + } + if s.Index.IsValue() { + vals = append(vals, "index_") + } + return vals +} + +func (s ArcgisLayerSetter) Overwrite(t *ArcgisLayer) { + if s.Extent.IsValue() { + t.Extent = s.Extent.MustGet() + } + if s.FeatureServiceItemID.IsValue() { + t.FeatureServiceItemID = s.FeatureServiceItemID.MustGet() + } + if s.Index.IsValue() { + t.Index = s.Index.MustGet() + } +} + +func (s *ArcgisLayerSetter) Apply(q *dialect.InsertQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisLayers.BeforeInsertHooks.RunHooks(ctx, exec, s) + }) + + q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + vals := make([]bob.Expression, 3) + if s.Extent.IsValue() { + vals[0] = psql.Arg(s.Extent.MustGet()) + } else { + vals[0] = psql.Raw("DEFAULT") + } + + if s.FeatureServiceItemID.IsValue() { + vals[1] = psql.Arg(s.FeatureServiceItemID.MustGet()) + } else { + vals[1] = psql.Raw("DEFAULT") + } + + if s.Index.IsValue() { + vals[2] = psql.Arg(s.Index.MustGet()) + } else { + vals[2] = psql.Raw("DEFAULT") + } + + return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "") + })) +} + +func (s ArcgisLayerSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return um.Set(s.Expressions()...) +} + +func (s ArcgisLayerSetter) Expressions(prefix ...string) []bob.Expression { + exprs := make([]bob.Expression, 0, 3) + + if s.Extent.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "extent")...), + psql.Arg(s.Extent), + }}) + } + + if s.FeatureServiceItemID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "feature_service_item_id")...), + psql.Arg(s.FeatureServiceItemID), + }}) + } + + if s.Index.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "index_")...), + psql.Arg(s.Index), + }}) + } + + return exprs +} + +// FindArcgisLayer retrieves a single record by primary key +// If cols is empty Find will return all columns. +func FindArcgisLayer(ctx context.Context, exec bob.Executor, FeatureServiceItemIDPK string, IndexPK int32, cols ...string) (*ArcgisLayer, error) { + if len(cols) == 0 { + return ArcgisLayers.Query( + sm.Where(ArcgisLayers.Columns.FeatureServiceItemID.EQ(psql.Arg(FeatureServiceItemIDPK))), + sm.Where(ArcgisLayers.Columns.Index.EQ(psql.Arg(IndexPK))), + ).One(ctx, exec) + } + + return ArcgisLayers.Query( + sm.Where(ArcgisLayers.Columns.FeatureServiceItemID.EQ(psql.Arg(FeatureServiceItemIDPK))), + sm.Where(ArcgisLayers.Columns.Index.EQ(psql.Arg(IndexPK))), + sm.Columns(ArcgisLayers.Columns.Only(cols...)), + ).One(ctx, exec) +} + +// ArcgisLayerExists checks the presence of a single record by primary key +func ArcgisLayerExists(ctx context.Context, exec bob.Executor, FeatureServiceItemIDPK string, IndexPK int32) (bool, error) { + return ArcgisLayers.Query( + sm.Where(ArcgisLayers.Columns.FeatureServiceItemID.EQ(psql.Arg(FeatureServiceItemIDPK))), + sm.Where(ArcgisLayers.Columns.Index.EQ(psql.Arg(IndexPK))), + ).Exists(ctx, exec) +} + +// AfterQueryHook is called after ArcgisLayer is retrieved from the database +func (o *ArcgisLayer) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = ArcgisLayers.AfterSelectHooks.RunHooks(ctx, exec, ArcgisLayerSlice{o}) + case bob.QueryTypeInsert: + ctx, err = ArcgisLayers.AfterInsertHooks.RunHooks(ctx, exec, ArcgisLayerSlice{o}) + case bob.QueryTypeUpdate: + ctx, err = ArcgisLayers.AfterUpdateHooks.RunHooks(ctx, exec, ArcgisLayerSlice{o}) + case bob.QueryTypeDelete: + ctx, err = ArcgisLayers.AfterDeleteHooks.RunHooks(ctx, exec, ArcgisLayerSlice{o}) + } + + return err +} + +// primaryKeyVals returns the primary key values of the ArcgisLayer +func (o *ArcgisLayer) primaryKeyVals() bob.Expression { + return psql.ArgGroup( + o.FeatureServiceItemID, + o.Index, + ) +} + +func (o *ArcgisLayer) pkEQ() dialect.Expression { + return psql.Group(psql.Quote("arcgis.layer", "feature_service_item_id"), psql.Quote("arcgis.layer", "index_")).EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + return o.primaryKeyVals().WriteSQL(ctx, w, d, start) + })) +} + +// Update uses an executor to update the ArcgisLayer +func (o *ArcgisLayer) Update(ctx context.Context, exec bob.Executor, s *ArcgisLayerSetter) error { + v, err := ArcgisLayers.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec) + if err != nil { + return err + } + + o.R = v.R + *o = *v + + return nil +} + +// Delete deletes a single ArcgisLayer record with an executor +func (o *ArcgisLayer) Delete(ctx context.Context, exec bob.Executor) error { + _, err := ArcgisLayers.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec) + return err +} + +// Reload refreshes the ArcgisLayer using the executor +func (o *ArcgisLayer) Reload(ctx context.Context, exec bob.Executor) error { + o2, err := ArcgisLayers.Query( + sm.Where(ArcgisLayers.Columns.FeatureServiceItemID.EQ(psql.Arg(o.FeatureServiceItemID))), + sm.Where(ArcgisLayers.Columns.Index.EQ(psql.Arg(o.Index))), + ).One(ctx, exec) + if err != nil { + return err + } + o2.R = o.R + *o = *o2 + + return nil +} + +// AfterQueryHook is called after ArcgisLayerSlice is retrieved from the database +func (o ArcgisLayerSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = ArcgisLayers.AfterSelectHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeInsert: + ctx, err = ArcgisLayers.AfterInsertHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeUpdate: + ctx, err = ArcgisLayers.AfterUpdateHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeDelete: + ctx, err = ArcgisLayers.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err +} + +func (o ArcgisLayerSlice) pkIN() dialect.Expression { + if len(o) == 0 { + return psql.Raw("NULL") + } + + return psql.Group(psql.Quote("arcgis.layer", "feature_service_item_id"), psql.Quote("arcgis.layer", "index_")).In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + pkPairs := make([]bob.Expression, len(o)) + for i, row := range o { + pkPairs[i] = row.primaryKeyVals() + } + return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "") + })) +} + +// copyMatchingRows finds models in the given slice that have the same primary key +// then it first copies the existing relationships from the old model to the new model +// and then replaces the old model in the slice with the new model +func (o ArcgisLayerSlice) copyMatchingRows(from ...*ArcgisLayer) { + for i, old := range o { + for _, new := range from { + if new.FeatureServiceItemID != old.FeatureServiceItemID { + continue + } + if new.Index != old.Index { + continue + } + new.R = old.R + o[i] = new + break + } + } +} + +// UpdateMod modifies an update query with "WHERE primary_key IN (o...)" +func (o ArcgisLayerSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisLayers.BeforeUpdateHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *ArcgisLayer: + o.copyMatchingRows(retrieved) + case []*ArcgisLayer: + o.copyMatchingRows(retrieved...) + case ArcgisLayerSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a ArcgisLayer or a slice of ArcgisLayer + // then run the AfterUpdateHooks on the slice + _, err = ArcgisLayers.AfterUpdateHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)" +func (o ArcgisLayerSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] { + return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisLayers.BeforeDeleteHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *ArcgisLayer: + o.copyMatchingRows(retrieved) + case []*ArcgisLayer: + o.copyMatchingRows(retrieved...) + case ArcgisLayerSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a ArcgisLayer or a slice of ArcgisLayer + // then run the AfterDeleteHooks on the slice + _, err = ArcgisLayers.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +func (o ArcgisLayerSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals ArcgisLayerSetter) error { + if len(o) == 0 { + return nil + } + + _, err := ArcgisLayers.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec) + return err +} + +func (o ArcgisLayerSlice) DeleteAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + _, err := ArcgisLayers.Delete(o.DeleteMod()).Exec(ctx, exec) + return err +} + +func (o ArcgisLayerSlice) ReloadAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + o2, err := ArcgisLayers.Query(sm.Where(o.pkIN())).All(ctx, exec) + if err != nil { + return err + } + + o.copyMatchingRows(o2...) + + return nil +} + +// FeatureServiceItemFeatureService starts a query for related objects on arcgis.feature_service +func (o *ArcgisLayer) FeatureServiceItemFeatureService(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisFeatureServicesQuery { + return ArcgisFeatureServices.Query(append(mods, + sm.Where(ArcgisFeatureServices.Columns.ItemID.EQ(psql.Arg(o.FeatureServiceItemID))), + )...) +} + +func (os ArcgisLayerSlice) FeatureServiceItemFeatureService(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisFeatureServicesQuery { + pkFeatureServiceItemID := make(pgtypes.Array[string], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkFeatureServiceItemID = append(pkFeatureServiceItemID, o.FeatureServiceItemID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkFeatureServiceItemID), "text[]")), + )) + + return ArcgisFeatureServices.Query(append(mods, + sm.Where(psql.Group(ArcgisFeatureServices.Columns.ItemID).OP("IN", PKArgExpr)), + )...) +} + +// LayerFields starts a query for related objects on arcgis.layer_field +func (o *ArcgisLayer) LayerFields(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayerFieldsQuery { + return ArcgisLayerFields.Query(append(mods, + sm.Where(ArcgisLayerFields.Columns.LayerFeatureServiceItemID.EQ(psql.Arg(o.FeatureServiceItemID))), sm.Where(ArcgisLayerFields.Columns.LayerIndex.EQ(psql.Arg(o.Index))), + )...) +} + +func (os ArcgisLayerSlice) LayerFields(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayerFieldsQuery { + pkFeatureServiceItemID := make(pgtypes.Array[string], 0, len(os)) + + pkIndex := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkFeatureServiceItemID = append(pkFeatureServiceItemID, o.FeatureServiceItemID) + pkIndex = append(pkIndex, o.Index) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkFeatureServiceItemID), "text[]")), + psql.F("unnest", psql.Cast(psql.Arg(pkIndex), "integer[]")), + )) + + return ArcgisLayerFields.Query(append(mods, + sm.Where(psql.Group(ArcgisLayerFields.Columns.LayerFeatureServiceItemID, ArcgisLayerFields.Columns.LayerIndex).OP("IN", PKArgExpr)), + )...) +} + +func attachArcgisLayerFeatureServiceItemFeatureService0(ctx context.Context, exec bob.Executor, count int, arcgisLayer0 *ArcgisLayer, arcgisFeatureService1 *ArcgisFeatureService) (*ArcgisLayer, error) { + setter := &ArcgisLayerSetter{ + FeatureServiceItemID: omit.From(arcgisFeatureService1.ItemID), + } + + err := arcgisLayer0.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachArcgisLayerFeatureServiceItemFeatureService0: %w", err) + } + + return arcgisLayer0, nil +} + +func (arcgisLayer0 *ArcgisLayer) InsertFeatureServiceItemFeatureService(ctx context.Context, exec bob.Executor, related *ArcgisFeatureServiceSetter) error { + var err error + + arcgisFeatureService1, err := ArcgisFeatureServices.Insert(related).One(ctx, exec) + if err != nil { + return fmt.Errorf("inserting related objects: %w", err) + } + + _, err = attachArcgisLayerFeatureServiceItemFeatureService0(ctx, exec, 1, arcgisLayer0, arcgisFeatureService1) + if err != nil { + return err + } + + arcgisLayer0.R.FeatureServiceItemFeatureService = arcgisFeatureService1 + + arcgisFeatureService1.R.FeatureServiceItemLayers = append(arcgisFeatureService1.R.FeatureServiceItemLayers, arcgisLayer0) + + return nil +} + +func (arcgisLayer0 *ArcgisLayer) AttachFeatureServiceItemFeatureService(ctx context.Context, exec bob.Executor, arcgisFeatureService1 *ArcgisFeatureService) error { + var err error + + _, err = attachArcgisLayerFeatureServiceItemFeatureService0(ctx, exec, 1, arcgisLayer0, arcgisFeatureService1) + if err != nil { + return err + } + + arcgisLayer0.R.FeatureServiceItemFeatureService = arcgisFeatureService1 + + arcgisFeatureService1.R.FeatureServiceItemLayers = append(arcgisFeatureService1.R.FeatureServiceItemLayers, arcgisLayer0) + + return nil +} + +func insertArcgisLayerLayerFields0(ctx context.Context, exec bob.Executor, arcgisLayerFields1 []*ArcgisLayerFieldSetter, arcgisLayer0 *ArcgisLayer) (ArcgisLayerFieldSlice, error) { + for i := range arcgisLayerFields1 { + arcgisLayerFields1[i].LayerFeatureServiceItemID = omit.From(arcgisLayer0.FeatureServiceItemID) + arcgisLayerFields1[i].LayerIndex = omit.From(arcgisLayer0.Index) + } + + ret, err := ArcgisLayerFields.Insert(bob.ToMods(arcgisLayerFields1...)).All(ctx, exec) + if err != nil { + return ret, fmt.Errorf("insertArcgisLayerLayerFields0: %w", err) + } + + return ret, nil +} + +func attachArcgisLayerLayerFields0(ctx context.Context, exec bob.Executor, count int, arcgisLayerFields1 ArcgisLayerFieldSlice, arcgisLayer0 *ArcgisLayer) (ArcgisLayerFieldSlice, error) { + setter := &ArcgisLayerFieldSetter{ + LayerFeatureServiceItemID: omit.From(arcgisLayer0.FeatureServiceItemID), + LayerIndex: omit.From(arcgisLayer0.Index), + } + + err := arcgisLayerFields1.UpdateAll(ctx, exec, *setter) + if err != nil { + return nil, fmt.Errorf("attachArcgisLayerLayerFields0: %w", err) + } + + return arcgisLayerFields1, nil +} + +func (arcgisLayer0 *ArcgisLayer) InsertLayerFields(ctx context.Context, exec bob.Executor, related ...*ArcgisLayerFieldSetter) error { + if len(related) == 0 { + return nil + } + + var err error + + arcgisLayerFields1, err := insertArcgisLayerLayerFields0(ctx, exec, related, arcgisLayer0) + if err != nil { + return err + } + + arcgisLayer0.R.LayerFields = append(arcgisLayer0.R.LayerFields, arcgisLayerFields1...) + + for _, rel := range arcgisLayerFields1 { + rel.R.Layer = arcgisLayer0 + } + return nil +} + +func (arcgisLayer0 *ArcgisLayer) AttachLayerFields(ctx context.Context, exec bob.Executor, related ...*ArcgisLayerField) error { + if len(related) == 0 { + return nil + } + + var err error + arcgisLayerFields1 := ArcgisLayerFieldSlice(related) + + _, err = attachArcgisLayerLayerFields0(ctx, exec, len(related), arcgisLayerFields1, arcgisLayer0) + if err != nil { + return err + } + + arcgisLayer0.R.LayerFields = append(arcgisLayer0.R.LayerFields, arcgisLayerFields1...) + + for _, rel := range related { + rel.R.Layer = arcgisLayer0 + } + + return nil +} + +type arcgisLayerWhere[Q psql.Filterable] struct { + Extent psql.WhereMod[Q, string] + FeatureServiceItemID psql.WhereMod[Q, string] + Index psql.WhereMod[Q, int32] +} + +func (arcgisLayerWhere[Q]) AliasedAs(alias string) arcgisLayerWhere[Q] { + return buildArcgisLayerWhere[Q](buildArcgisLayerColumns(alias)) +} + +func buildArcgisLayerWhere[Q psql.Filterable](cols arcgisLayerColumns) arcgisLayerWhere[Q] { + return arcgisLayerWhere[Q]{ + Extent: psql.Where[Q, string](cols.Extent), + FeatureServiceItemID: psql.Where[Q, string](cols.FeatureServiceItemID), + Index: psql.Where[Q, int32](cols.Index), + } +} + +func (o *ArcgisLayer) Preload(name string, retrieved any) error { + if o == nil { + return nil + } + + switch name { + case "FeatureServiceItemFeatureService": + rel, ok := retrieved.(*ArcgisFeatureService) + if !ok { + return fmt.Errorf("arcgisLayer cannot load %T as %q", retrieved, name) + } + + o.R.FeatureServiceItemFeatureService = rel + + if rel != nil { + rel.R.FeatureServiceItemLayers = ArcgisLayerSlice{o} + } + return nil + case "LayerFields": + rels, ok := retrieved.(ArcgisLayerFieldSlice) + if !ok { + return fmt.Errorf("arcgisLayer cannot load %T as %q", retrieved, name) + } + + o.R.LayerFields = rels + + for _, rel := range rels { + if rel != nil { + rel.R.Layer = o + } + } + return nil + default: + return fmt.Errorf("arcgisLayer has no relationship %q", name) + } +} + +type arcgisLayerPreloader struct { + FeatureServiceItemFeatureService func(...psql.PreloadOption) psql.Preloader +} + +func buildArcgisLayerPreloader() arcgisLayerPreloader { + return arcgisLayerPreloader{ + FeatureServiceItemFeatureService: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*ArcgisFeatureService, ArcgisFeatureServiceSlice](psql.PreloadRel{ + Name: "FeatureServiceItemFeatureService", + Sides: []psql.PreloadSide{ + { + From: ArcgisLayers, + To: ArcgisFeatureServices, + FromColumns: []string{"feature_service_item_id"}, + ToColumns: []string{"item_id"}, + }, + }, + }, ArcgisFeatureServices.Columns.Names(), opts...) + }, + } +} + +type arcgisLayerThenLoader[Q orm.Loadable] struct { + FeatureServiceItemFeatureService func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + LayerFields func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildArcgisLayerThenLoader[Q orm.Loadable]() arcgisLayerThenLoader[Q] { + type FeatureServiceItemFeatureServiceLoadInterface interface { + LoadFeatureServiceItemFeatureService(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type LayerFieldsLoadInterface interface { + LoadLayerFields(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return arcgisLayerThenLoader[Q]{ + FeatureServiceItemFeatureService: thenLoadBuilder[Q]( + "FeatureServiceItemFeatureService", + func(ctx context.Context, exec bob.Executor, retrieved FeatureServiceItemFeatureServiceLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadFeatureServiceItemFeatureService(ctx, exec, mods...) + }, + ), + LayerFields: thenLoadBuilder[Q]( + "LayerFields", + func(ctx context.Context, exec bob.Executor, retrieved LayerFieldsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadLayerFields(ctx, exec, mods...) + }, + ), + } +} + +// LoadFeatureServiceItemFeatureService loads the arcgisLayer's FeatureServiceItemFeatureService into the .R struct +func (o *ArcgisLayer) LoadFeatureServiceItemFeatureService(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.FeatureServiceItemFeatureService = nil + + related, err := o.FeatureServiceItemFeatureService(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.FeatureServiceItemLayers = ArcgisLayerSlice{o} + + o.R.FeatureServiceItemFeatureService = related + return nil +} + +// LoadFeatureServiceItemFeatureService loads the arcgisLayer's FeatureServiceItemFeatureService into the .R struct +func (os ArcgisLayerSlice) LoadFeatureServiceItemFeatureService(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + arcgisFeatureServices, err := os.FeatureServiceItemFeatureService(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range arcgisFeatureServices { + + if !(o.FeatureServiceItemID == rel.ItemID) { + continue + } + + rel.R.FeatureServiceItemLayers = append(rel.R.FeatureServiceItemLayers, o) + + o.R.FeatureServiceItemFeatureService = rel + break + } + } + + return nil +} + +// LoadLayerFields loads the arcgisLayer's LayerFields into the .R struct +func (o *ArcgisLayer) LoadLayerFields(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.LayerFields = nil + + related, err := o.LayerFields(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, rel := range related { + rel.R.Layer = o + } + + o.R.LayerFields = related + return nil +} + +// LoadLayerFields loads the arcgisLayer's LayerFields into the .R struct +func (os ArcgisLayerSlice) LoadLayerFields(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + arcgisLayerFields, err := os.LayerFields(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + o.R.LayerFields = nil + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range arcgisLayerFields { + + if !(o.FeatureServiceItemID == rel.LayerFeatureServiceItemID) { + continue + } + + if !(o.Index == rel.LayerIndex) { + continue + } + + rel.R.Layer = o + + o.R.LayerFields = append(o.R.LayerFields, rel) + } + } + + return nil +} + +// arcgisLayerC is where relationship counts are stored. +type arcgisLayerC struct { + LayerFields *int64 +} + +// PreloadCount sets a count in the C struct by name +func (o *ArcgisLayer) PreloadCount(name string, count int64) error { + if o == nil { + return nil + } + + switch name { + case "LayerFields": + o.C.LayerFields = &count + } + return nil +} + +type arcgisLayerCountPreloader struct { + LayerFields func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader +} + +func buildArcgisLayerCountPreloader() arcgisLayerCountPreloader { + return arcgisLayerCountPreloader{ + LayerFields: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { + return countPreloader[*ArcgisLayer]("LayerFields", func(parent string) bob.Expression { + // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) + if parent == "" { + parent = ArcgisLayers.Alias() + } + + subqueryMods := []bob.Mod[*dialect.SelectQuery]{ + sm.Columns(psql.Raw("count(*)")), + + sm.From(ArcgisLayerFields.Name()), + sm.Where(psql.Quote(ArcgisLayerFields.Alias(), "layer_feature_service_item_id").EQ(psql.Quote(parent, "feature_service_item_id"))), + sm.Where(psql.Quote(ArcgisLayerFields.Alias(), "layer_index").EQ(psql.Quote(parent, "index_"))), + } + subqueryMods = append(subqueryMods, mods...) + return psql.Group(psql.Select(subqueryMods...).Expression) + }) + }, + } +} + +type arcgisLayerCountThenLoader[Q orm.Loadable] struct { + LayerFields func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildArcgisLayerCountThenLoader[Q orm.Loadable]() arcgisLayerCountThenLoader[Q] { + type LayerFieldsCountInterface interface { + LoadCountLayerFields(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return arcgisLayerCountThenLoader[Q]{ + LayerFields: countThenLoadBuilder[Q]( + "LayerFields", + func(ctx context.Context, exec bob.Executor, retrieved LayerFieldsCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCountLayerFields(ctx, exec, mods...) + }, + ), + } +} + +// LoadCountLayerFields loads the count of LayerFields into the C struct +func (o *ArcgisLayer) LoadCountLayerFields(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + count, err := o.LayerFields(mods...).Count(ctx, exec) + if err != nil { + return err + } + + o.C.LayerFields = &count + return nil +} + +// LoadCountLayerFields loads the count of LayerFields for a slice +func (os ArcgisLayerSlice) LoadCountLayerFields(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + for _, o := range os { + if err := o.LoadCountLayerFields(ctx, exec, mods...); err != nil { + return err + } + } + + return nil +} + +type arcgisLayerJoins[Q dialect.Joinable] struct { + typ string + FeatureServiceItemFeatureService modAs[Q, arcgisFeatureServiceColumns] + LayerFields modAs[Q, arcgisLayerFieldColumns] +} + +func (j arcgisLayerJoins[Q]) aliasedAs(alias string) arcgisLayerJoins[Q] { + return buildArcgisLayerJoins[Q](buildArcgisLayerColumns(alias), j.typ) +} + +func buildArcgisLayerJoins[Q dialect.Joinable](cols arcgisLayerColumns, typ string) arcgisLayerJoins[Q] { + return arcgisLayerJoins[Q]{ + typ: typ, + FeatureServiceItemFeatureService: modAs[Q, arcgisFeatureServiceColumns]{ + c: ArcgisFeatureServices.Columns, + f: func(to arcgisFeatureServiceColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, ArcgisFeatureServices.Name().As(to.Alias())).On( + to.ItemID.EQ(cols.FeatureServiceItemID), + )) + } + + return mods + }, + }, + LayerFields: modAs[Q, arcgisLayerFieldColumns]{ + c: ArcgisLayerFields.Columns, + f: func(to arcgisLayerFieldColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, ArcgisLayerFields.Name().As(to.Alias())).On( + to.LayerFeatureServiceItemID.EQ(cols.FeatureServiceItemID), to.LayerIndex.EQ(cols.Index), + )) + } + + return mods + }, + }, + } +} diff --git a/db/models/arcgis.layer_field.bob.go b/db/models/arcgis.layer_field.bob.go new file mode 100644 index 00000000..8fe37f55 --- /dev/null +++ b/db/models/arcgis.layer_field.bob.go @@ -0,0 +1,1266 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "fmt" + "io" + + "github.com/Gleipnir-Technology/bob" + "github.com/Gleipnir-Technology/bob/dialect/psql" + "github.com/Gleipnir-Technology/bob/dialect/psql/dialect" + "github.com/Gleipnir-Technology/bob/dialect/psql/dm" + "github.com/Gleipnir-Technology/bob/dialect/psql/sm" + "github.com/Gleipnir-Technology/bob/dialect/psql/um" + "github.com/Gleipnir-Technology/bob/expr" + "github.com/Gleipnir-Technology/bob/mods" + "github.com/Gleipnir-Technology/bob/orm" + "github.com/Gleipnir-Technology/bob/types/pgtypes" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + "github.com/aarondl/opt/omit" +) + +// ArcgisLayerField is an object representing the database table. +type ArcgisLayerField struct { + LayerFeatureServiceItemID string `db:"layer_feature_service_item_id,pk" ` + LayerIndex int32 `db:"layer_index,pk" ` + Name string `db:"name,pk" ` + Type enums.ArcgisFieldtype `db:"type_" ` + + R arcgisLayerFieldR `db:"-" ` + + C arcgisLayerFieldC `db:"-" ` +} + +// ArcgisLayerFieldSlice is an alias for a slice of pointers to ArcgisLayerField. +// This should almost always be used instead of []*ArcgisLayerField. +type ArcgisLayerFieldSlice []*ArcgisLayerField + +// ArcgisLayerFields contains methods to work with the layer_field table +var ArcgisLayerFields = psql.NewTablex[*ArcgisLayerField, ArcgisLayerFieldSlice, *ArcgisLayerFieldSetter]("arcgis", "layer_field", buildArcgisLayerFieldColumns("arcgis.layer_field")) + +// ArcgisLayerFieldsQuery is a query on the layer_field table +type ArcgisLayerFieldsQuery = *psql.ViewQuery[*ArcgisLayerField, ArcgisLayerFieldSlice] + +// arcgisLayerFieldR is where relationships are stored. +type arcgisLayerFieldR struct { + AddressMappings ArcgisAddressMappingSlice // arcgis.address_mapping.address_mapping_layer_feature_service_item_id_layer_index__fkey + Layer *ArcgisLayer // arcgis.layer_field.layer_field_layer_feature_service_item_id_layer_index_fkey + ParcelMappings ArcgisParcelMappingSlice // arcgis.parcel_mapping.parcel_mapping_layer_feature_service_item_id_layer_index_l_fkey +} + +func buildArcgisLayerFieldColumns(alias string) arcgisLayerFieldColumns { + return arcgisLayerFieldColumns{ + ColumnsExpr: expr.NewColumnsExpr( + "layer_feature_service_item_id", "layer_index", "name", "type_", + ).WithParent("arcgis.layer_field"), + tableAlias: alias, + LayerFeatureServiceItemID: psql.Quote(alias, "layer_feature_service_item_id"), + LayerIndex: psql.Quote(alias, "layer_index"), + Name: psql.Quote(alias, "name"), + Type: psql.Quote(alias, "type_"), + } +} + +type arcgisLayerFieldColumns struct { + expr.ColumnsExpr + tableAlias string + LayerFeatureServiceItemID psql.Expression + LayerIndex psql.Expression + Name psql.Expression + Type psql.Expression +} + +func (c arcgisLayerFieldColumns) Alias() string { + return c.tableAlias +} + +func (arcgisLayerFieldColumns) AliasedAs(alias string) arcgisLayerFieldColumns { + return buildArcgisLayerFieldColumns(alias) +} + +// ArcgisLayerFieldSetter is used for insert/upsert/update operations +// All values are optional, and do not have to be set +// Generated columns are not included +type ArcgisLayerFieldSetter struct { + LayerFeatureServiceItemID omit.Val[string] `db:"layer_feature_service_item_id,pk" ` + LayerIndex omit.Val[int32] `db:"layer_index,pk" ` + Name omit.Val[string] `db:"name,pk" ` + Type omit.Val[enums.ArcgisFieldtype] `db:"type_" ` +} + +func (s ArcgisLayerFieldSetter) SetColumns() []string { + vals := make([]string, 0, 4) + if s.LayerFeatureServiceItemID.IsValue() { + vals = append(vals, "layer_feature_service_item_id") + } + if s.LayerIndex.IsValue() { + vals = append(vals, "layer_index") + } + if s.Name.IsValue() { + vals = append(vals, "name") + } + if s.Type.IsValue() { + vals = append(vals, "type_") + } + return vals +} + +func (s ArcgisLayerFieldSetter) Overwrite(t *ArcgisLayerField) { + if s.LayerFeatureServiceItemID.IsValue() { + t.LayerFeatureServiceItemID = s.LayerFeatureServiceItemID.MustGet() + } + if s.LayerIndex.IsValue() { + t.LayerIndex = s.LayerIndex.MustGet() + } + if s.Name.IsValue() { + t.Name = s.Name.MustGet() + } + if s.Type.IsValue() { + t.Type = s.Type.MustGet() + } +} + +func (s *ArcgisLayerFieldSetter) Apply(q *dialect.InsertQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisLayerFields.BeforeInsertHooks.RunHooks(ctx, exec, s) + }) + + q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + vals := make([]bob.Expression, 4) + if s.LayerFeatureServiceItemID.IsValue() { + vals[0] = psql.Arg(s.LayerFeatureServiceItemID.MustGet()) + } else { + vals[0] = psql.Raw("DEFAULT") + } + + if s.LayerIndex.IsValue() { + vals[1] = psql.Arg(s.LayerIndex.MustGet()) + } else { + vals[1] = psql.Raw("DEFAULT") + } + + if s.Name.IsValue() { + vals[2] = psql.Arg(s.Name.MustGet()) + } else { + vals[2] = psql.Raw("DEFAULT") + } + + if s.Type.IsValue() { + vals[3] = psql.Arg(s.Type.MustGet()) + } else { + vals[3] = psql.Raw("DEFAULT") + } + + return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "") + })) +} + +func (s ArcgisLayerFieldSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return um.Set(s.Expressions()...) +} + +func (s ArcgisLayerFieldSetter) Expressions(prefix ...string) []bob.Expression { + exprs := make([]bob.Expression, 0, 4) + + if s.LayerFeatureServiceItemID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "layer_feature_service_item_id")...), + psql.Arg(s.LayerFeatureServiceItemID), + }}) + } + + if s.LayerIndex.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "layer_index")...), + psql.Arg(s.LayerIndex), + }}) + } + + if s.Name.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "name")...), + psql.Arg(s.Name), + }}) + } + + if s.Type.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "type_")...), + psql.Arg(s.Type), + }}) + } + + return exprs +} + +// FindArcgisLayerField retrieves a single record by primary key +// If cols is empty Find will return all columns. +func FindArcgisLayerField(ctx context.Context, exec bob.Executor, LayerFeatureServiceItemIDPK string, LayerIndexPK int32, NamePK string, cols ...string) (*ArcgisLayerField, error) { + if len(cols) == 0 { + return ArcgisLayerFields.Query( + sm.Where(ArcgisLayerFields.Columns.LayerFeatureServiceItemID.EQ(psql.Arg(LayerFeatureServiceItemIDPK))), + sm.Where(ArcgisLayerFields.Columns.LayerIndex.EQ(psql.Arg(LayerIndexPK))), + sm.Where(ArcgisLayerFields.Columns.Name.EQ(psql.Arg(NamePK))), + ).One(ctx, exec) + } + + return ArcgisLayerFields.Query( + sm.Where(ArcgisLayerFields.Columns.LayerFeatureServiceItemID.EQ(psql.Arg(LayerFeatureServiceItemIDPK))), + sm.Where(ArcgisLayerFields.Columns.LayerIndex.EQ(psql.Arg(LayerIndexPK))), + sm.Where(ArcgisLayerFields.Columns.Name.EQ(psql.Arg(NamePK))), + sm.Columns(ArcgisLayerFields.Columns.Only(cols...)), + ).One(ctx, exec) +} + +// ArcgisLayerFieldExists checks the presence of a single record by primary key +func ArcgisLayerFieldExists(ctx context.Context, exec bob.Executor, LayerFeatureServiceItemIDPK string, LayerIndexPK int32, NamePK string) (bool, error) { + return ArcgisLayerFields.Query( + sm.Where(ArcgisLayerFields.Columns.LayerFeatureServiceItemID.EQ(psql.Arg(LayerFeatureServiceItemIDPK))), + sm.Where(ArcgisLayerFields.Columns.LayerIndex.EQ(psql.Arg(LayerIndexPK))), + sm.Where(ArcgisLayerFields.Columns.Name.EQ(psql.Arg(NamePK))), + ).Exists(ctx, exec) +} + +// AfterQueryHook is called after ArcgisLayerField is retrieved from the database +func (o *ArcgisLayerField) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = ArcgisLayerFields.AfterSelectHooks.RunHooks(ctx, exec, ArcgisLayerFieldSlice{o}) + case bob.QueryTypeInsert: + ctx, err = ArcgisLayerFields.AfterInsertHooks.RunHooks(ctx, exec, ArcgisLayerFieldSlice{o}) + case bob.QueryTypeUpdate: + ctx, err = ArcgisLayerFields.AfterUpdateHooks.RunHooks(ctx, exec, ArcgisLayerFieldSlice{o}) + case bob.QueryTypeDelete: + ctx, err = ArcgisLayerFields.AfterDeleteHooks.RunHooks(ctx, exec, ArcgisLayerFieldSlice{o}) + } + + return err +} + +// primaryKeyVals returns the primary key values of the ArcgisLayerField +func (o *ArcgisLayerField) primaryKeyVals() bob.Expression { + return psql.ArgGroup( + o.LayerFeatureServiceItemID, + o.LayerIndex, + o.Name, + ) +} + +func (o *ArcgisLayerField) pkEQ() dialect.Expression { + return psql.Group(psql.Quote("arcgis.layer_field", "layer_feature_service_item_id"), psql.Quote("arcgis.layer_field", "layer_index"), psql.Quote("arcgis.layer_field", "name")).EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + return o.primaryKeyVals().WriteSQL(ctx, w, d, start) + })) +} + +// Update uses an executor to update the ArcgisLayerField +func (o *ArcgisLayerField) Update(ctx context.Context, exec bob.Executor, s *ArcgisLayerFieldSetter) error { + v, err := ArcgisLayerFields.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec) + if err != nil { + return err + } + + o.R = v.R + *o = *v + + return nil +} + +// Delete deletes a single ArcgisLayerField record with an executor +func (o *ArcgisLayerField) Delete(ctx context.Context, exec bob.Executor) error { + _, err := ArcgisLayerFields.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec) + return err +} + +// Reload refreshes the ArcgisLayerField using the executor +func (o *ArcgisLayerField) Reload(ctx context.Context, exec bob.Executor) error { + o2, err := ArcgisLayerFields.Query( + sm.Where(ArcgisLayerFields.Columns.LayerFeatureServiceItemID.EQ(psql.Arg(o.LayerFeatureServiceItemID))), + sm.Where(ArcgisLayerFields.Columns.LayerIndex.EQ(psql.Arg(o.LayerIndex))), + sm.Where(ArcgisLayerFields.Columns.Name.EQ(psql.Arg(o.Name))), + ).One(ctx, exec) + if err != nil { + return err + } + o2.R = o.R + *o = *o2 + + return nil +} + +// AfterQueryHook is called after ArcgisLayerFieldSlice is retrieved from the database +func (o ArcgisLayerFieldSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = ArcgisLayerFields.AfterSelectHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeInsert: + ctx, err = ArcgisLayerFields.AfterInsertHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeUpdate: + ctx, err = ArcgisLayerFields.AfterUpdateHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeDelete: + ctx, err = ArcgisLayerFields.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err +} + +func (o ArcgisLayerFieldSlice) pkIN() dialect.Expression { + if len(o) == 0 { + return psql.Raw("NULL") + } + + return psql.Group(psql.Quote("arcgis.layer_field", "layer_feature_service_item_id"), psql.Quote("arcgis.layer_field", "layer_index"), psql.Quote("arcgis.layer_field", "name")).In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + pkPairs := make([]bob.Expression, len(o)) + for i, row := range o { + pkPairs[i] = row.primaryKeyVals() + } + return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "") + })) +} + +// copyMatchingRows finds models in the given slice that have the same primary key +// then it first copies the existing relationships from the old model to the new model +// and then replaces the old model in the slice with the new model +func (o ArcgisLayerFieldSlice) copyMatchingRows(from ...*ArcgisLayerField) { + for i, old := range o { + for _, new := range from { + if new.LayerFeatureServiceItemID != old.LayerFeatureServiceItemID { + continue + } + if new.LayerIndex != old.LayerIndex { + continue + } + if new.Name != old.Name { + continue + } + new.R = old.R + o[i] = new + break + } + } +} + +// UpdateMod modifies an update query with "WHERE primary_key IN (o...)" +func (o ArcgisLayerFieldSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisLayerFields.BeforeUpdateHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *ArcgisLayerField: + o.copyMatchingRows(retrieved) + case []*ArcgisLayerField: + o.copyMatchingRows(retrieved...) + case ArcgisLayerFieldSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a ArcgisLayerField or a slice of ArcgisLayerField + // then run the AfterUpdateHooks on the slice + _, err = ArcgisLayerFields.AfterUpdateHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)" +func (o ArcgisLayerFieldSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] { + return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisLayerFields.BeforeDeleteHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *ArcgisLayerField: + o.copyMatchingRows(retrieved) + case []*ArcgisLayerField: + o.copyMatchingRows(retrieved...) + case ArcgisLayerFieldSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a ArcgisLayerField or a slice of ArcgisLayerField + // then run the AfterDeleteHooks on the slice + _, err = ArcgisLayerFields.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +func (o ArcgisLayerFieldSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals ArcgisLayerFieldSetter) error { + if len(o) == 0 { + return nil + } + + _, err := ArcgisLayerFields.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec) + return err +} + +func (o ArcgisLayerFieldSlice) DeleteAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + _, err := ArcgisLayerFields.Delete(o.DeleteMod()).Exec(ctx, exec) + return err +} + +func (o ArcgisLayerFieldSlice) ReloadAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + o2, err := ArcgisLayerFields.Query(sm.Where(o.pkIN())).All(ctx, exec) + if err != nil { + return err + } + + o.copyMatchingRows(o2...) + + return nil +} + +// AddressMappings starts a query for related objects on arcgis.address_mapping +func (o *ArcgisLayerField) AddressMappings(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisAddressMappingsQuery { + return ArcgisAddressMappings.Query(append(mods, + sm.Where(ArcgisAddressMappings.Columns.LayerFeatureServiceItemID.EQ(psql.Arg(o.LayerFeatureServiceItemID))), sm.Where(ArcgisAddressMappings.Columns.LayerIndex.EQ(psql.Arg(o.LayerIndex))), sm.Where(ArcgisAddressMappings.Columns.LayerFieldName.EQ(psql.Arg(o.Name))), + )...) +} + +func (os ArcgisLayerFieldSlice) AddressMappings(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisAddressMappingsQuery { + pkLayerFeatureServiceItemID := make(pgtypes.Array[string], 0, len(os)) + + pkLayerIndex := make(pgtypes.Array[int32], 0, len(os)) + + pkName := make(pgtypes.Array[string], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkLayerFeatureServiceItemID = append(pkLayerFeatureServiceItemID, o.LayerFeatureServiceItemID) + pkLayerIndex = append(pkLayerIndex, o.LayerIndex) + pkName = append(pkName, o.Name) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkLayerFeatureServiceItemID), "text[]")), + psql.F("unnest", psql.Cast(psql.Arg(pkLayerIndex), "integer[]")), + psql.F("unnest", psql.Cast(psql.Arg(pkName), "text[]")), + )) + + return ArcgisAddressMappings.Query(append(mods, + sm.Where(psql.Group(ArcgisAddressMappings.Columns.LayerFeatureServiceItemID, ArcgisAddressMappings.Columns.LayerIndex, ArcgisAddressMappings.Columns.LayerFieldName).OP("IN", PKArgExpr)), + )...) +} + +// Layer starts a query for related objects on arcgis.layer +func (o *ArcgisLayerField) Layer(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayersQuery { + return ArcgisLayers.Query(append(mods, + sm.Where(ArcgisLayers.Columns.FeatureServiceItemID.EQ(psql.Arg(o.LayerFeatureServiceItemID))), sm.Where(ArcgisLayers.Columns.Index.EQ(psql.Arg(o.LayerIndex))), + )...) +} + +func (os ArcgisLayerFieldSlice) Layer(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayersQuery { + pkLayerFeatureServiceItemID := make(pgtypes.Array[string], 0, len(os)) + + pkLayerIndex := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkLayerFeatureServiceItemID = append(pkLayerFeatureServiceItemID, o.LayerFeatureServiceItemID) + pkLayerIndex = append(pkLayerIndex, o.LayerIndex) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkLayerFeatureServiceItemID), "text[]")), + psql.F("unnest", psql.Cast(psql.Arg(pkLayerIndex), "integer[]")), + )) + + return ArcgisLayers.Query(append(mods, + sm.Where(psql.Group(ArcgisLayers.Columns.FeatureServiceItemID, ArcgisLayers.Columns.Index).OP("IN", PKArgExpr)), + )...) +} + +// ParcelMappings starts a query for related objects on arcgis.parcel_mapping +func (o *ArcgisLayerField) ParcelMappings(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisParcelMappingsQuery { + return ArcgisParcelMappings.Query(append(mods, + sm.Where(ArcgisParcelMappings.Columns.LayerFeatureServiceItemID.EQ(psql.Arg(o.LayerFeatureServiceItemID))), sm.Where(ArcgisParcelMappings.Columns.LayerIndex.EQ(psql.Arg(o.LayerIndex))), sm.Where(ArcgisParcelMappings.Columns.LayerFieldName.EQ(psql.Arg(o.Name))), + )...) +} + +func (os ArcgisLayerFieldSlice) ParcelMappings(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisParcelMappingsQuery { + pkLayerFeatureServiceItemID := make(pgtypes.Array[string], 0, len(os)) + + pkLayerIndex := make(pgtypes.Array[int32], 0, len(os)) + + pkName := make(pgtypes.Array[string], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkLayerFeatureServiceItemID = append(pkLayerFeatureServiceItemID, o.LayerFeatureServiceItemID) + pkLayerIndex = append(pkLayerIndex, o.LayerIndex) + pkName = append(pkName, o.Name) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkLayerFeatureServiceItemID), "text[]")), + psql.F("unnest", psql.Cast(psql.Arg(pkLayerIndex), "integer[]")), + psql.F("unnest", psql.Cast(psql.Arg(pkName), "text[]")), + )) + + return ArcgisParcelMappings.Query(append(mods, + sm.Where(psql.Group(ArcgisParcelMappings.Columns.LayerFeatureServiceItemID, ArcgisParcelMappings.Columns.LayerIndex, ArcgisParcelMappings.Columns.LayerFieldName).OP("IN", PKArgExpr)), + )...) +} + +func insertArcgisLayerFieldAddressMappings0(ctx context.Context, exec bob.Executor, arcgisAddressMappings1 []*ArcgisAddressMappingSetter, arcgisLayerField0 *ArcgisLayerField) (ArcgisAddressMappingSlice, error) { + for i := range arcgisAddressMappings1 { + arcgisAddressMappings1[i].LayerFeatureServiceItemID = omit.From(arcgisLayerField0.LayerFeatureServiceItemID) + arcgisAddressMappings1[i].LayerIndex = omit.From(arcgisLayerField0.LayerIndex) + arcgisAddressMappings1[i].LayerFieldName = omit.From(arcgisLayerField0.Name) + } + + ret, err := ArcgisAddressMappings.Insert(bob.ToMods(arcgisAddressMappings1...)).All(ctx, exec) + if err != nil { + return ret, fmt.Errorf("insertArcgisLayerFieldAddressMappings0: %w", err) + } + + return ret, nil +} + +func attachArcgisLayerFieldAddressMappings0(ctx context.Context, exec bob.Executor, count int, arcgisAddressMappings1 ArcgisAddressMappingSlice, arcgisLayerField0 *ArcgisLayerField) (ArcgisAddressMappingSlice, error) { + setter := &ArcgisAddressMappingSetter{ + LayerFeatureServiceItemID: omit.From(arcgisLayerField0.LayerFeatureServiceItemID), + LayerIndex: omit.From(arcgisLayerField0.LayerIndex), + LayerFieldName: omit.From(arcgisLayerField0.Name), + } + + err := arcgisAddressMappings1.UpdateAll(ctx, exec, *setter) + if err != nil { + return nil, fmt.Errorf("attachArcgisLayerFieldAddressMappings0: %w", err) + } + + return arcgisAddressMappings1, nil +} + +func (arcgisLayerField0 *ArcgisLayerField) InsertAddressMappings(ctx context.Context, exec bob.Executor, related ...*ArcgisAddressMappingSetter) error { + if len(related) == 0 { + return nil + } + + var err error + + arcgisAddressMappings1, err := insertArcgisLayerFieldAddressMappings0(ctx, exec, related, arcgisLayerField0) + if err != nil { + return err + } + + arcgisLayerField0.R.AddressMappings = append(arcgisLayerField0.R.AddressMappings, arcgisAddressMappings1...) + + for _, rel := range arcgisAddressMappings1 { + rel.R.LayerField = arcgisLayerField0 + } + return nil +} + +func (arcgisLayerField0 *ArcgisLayerField) AttachAddressMappings(ctx context.Context, exec bob.Executor, related ...*ArcgisAddressMapping) error { + if len(related) == 0 { + return nil + } + + var err error + arcgisAddressMappings1 := ArcgisAddressMappingSlice(related) + + _, err = attachArcgisLayerFieldAddressMappings0(ctx, exec, len(related), arcgisAddressMappings1, arcgisLayerField0) + if err != nil { + return err + } + + arcgisLayerField0.R.AddressMappings = append(arcgisLayerField0.R.AddressMappings, arcgisAddressMappings1...) + + for _, rel := range related { + rel.R.LayerField = arcgisLayerField0 + } + + return nil +} + +func attachArcgisLayerFieldLayer0(ctx context.Context, exec bob.Executor, count int, arcgisLayerField0 *ArcgisLayerField, arcgisLayer1 *ArcgisLayer) (*ArcgisLayerField, error) { + setter := &ArcgisLayerFieldSetter{ + LayerFeatureServiceItemID: omit.From(arcgisLayer1.FeatureServiceItemID), + LayerIndex: omit.From(arcgisLayer1.Index), + } + + err := arcgisLayerField0.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachArcgisLayerFieldLayer0: %w", err) + } + + return arcgisLayerField0, nil +} + +func (arcgisLayerField0 *ArcgisLayerField) InsertLayer(ctx context.Context, exec bob.Executor, related *ArcgisLayerSetter) error { + var err error + + arcgisLayer1, err := ArcgisLayers.Insert(related).One(ctx, exec) + if err != nil { + return fmt.Errorf("inserting related objects: %w", err) + } + + _, err = attachArcgisLayerFieldLayer0(ctx, exec, 1, arcgisLayerField0, arcgisLayer1) + if err != nil { + return err + } + + arcgisLayerField0.R.Layer = arcgisLayer1 + + arcgisLayer1.R.LayerFields = append(arcgisLayer1.R.LayerFields, arcgisLayerField0) + + return nil +} + +func (arcgisLayerField0 *ArcgisLayerField) AttachLayer(ctx context.Context, exec bob.Executor, arcgisLayer1 *ArcgisLayer) error { + var err error + + _, err = attachArcgisLayerFieldLayer0(ctx, exec, 1, arcgisLayerField0, arcgisLayer1) + if err != nil { + return err + } + + arcgisLayerField0.R.Layer = arcgisLayer1 + + arcgisLayer1.R.LayerFields = append(arcgisLayer1.R.LayerFields, arcgisLayerField0) + + return nil +} + +func insertArcgisLayerFieldParcelMappings0(ctx context.Context, exec bob.Executor, arcgisParcelMappings1 []*ArcgisParcelMappingSetter, arcgisLayerField0 *ArcgisLayerField) (ArcgisParcelMappingSlice, error) { + for i := range arcgisParcelMappings1 { + arcgisParcelMappings1[i].LayerFeatureServiceItemID = omit.From(arcgisLayerField0.LayerFeatureServiceItemID) + arcgisParcelMappings1[i].LayerIndex = omit.From(arcgisLayerField0.LayerIndex) + arcgisParcelMappings1[i].LayerFieldName = omit.From(arcgisLayerField0.Name) + } + + ret, err := ArcgisParcelMappings.Insert(bob.ToMods(arcgisParcelMappings1...)).All(ctx, exec) + if err != nil { + return ret, fmt.Errorf("insertArcgisLayerFieldParcelMappings0: %w", err) + } + + return ret, nil +} + +func attachArcgisLayerFieldParcelMappings0(ctx context.Context, exec bob.Executor, count int, arcgisParcelMappings1 ArcgisParcelMappingSlice, arcgisLayerField0 *ArcgisLayerField) (ArcgisParcelMappingSlice, error) { + setter := &ArcgisParcelMappingSetter{ + LayerFeatureServiceItemID: omit.From(arcgisLayerField0.LayerFeatureServiceItemID), + LayerIndex: omit.From(arcgisLayerField0.LayerIndex), + LayerFieldName: omit.From(arcgisLayerField0.Name), + } + + err := arcgisParcelMappings1.UpdateAll(ctx, exec, *setter) + if err != nil { + return nil, fmt.Errorf("attachArcgisLayerFieldParcelMappings0: %w", err) + } + + return arcgisParcelMappings1, nil +} + +func (arcgisLayerField0 *ArcgisLayerField) InsertParcelMappings(ctx context.Context, exec bob.Executor, related ...*ArcgisParcelMappingSetter) error { + if len(related) == 0 { + return nil + } + + var err error + + arcgisParcelMappings1, err := insertArcgisLayerFieldParcelMappings0(ctx, exec, related, arcgisLayerField0) + if err != nil { + return err + } + + arcgisLayerField0.R.ParcelMappings = append(arcgisLayerField0.R.ParcelMappings, arcgisParcelMappings1...) + + for _, rel := range arcgisParcelMappings1 { + rel.R.LayerField = arcgisLayerField0 + } + return nil +} + +func (arcgisLayerField0 *ArcgisLayerField) AttachParcelMappings(ctx context.Context, exec bob.Executor, related ...*ArcgisParcelMapping) error { + if len(related) == 0 { + return nil + } + + var err error + arcgisParcelMappings1 := ArcgisParcelMappingSlice(related) + + _, err = attachArcgisLayerFieldParcelMappings0(ctx, exec, len(related), arcgisParcelMappings1, arcgisLayerField0) + if err != nil { + return err + } + + arcgisLayerField0.R.ParcelMappings = append(arcgisLayerField0.R.ParcelMappings, arcgisParcelMappings1...) + + for _, rel := range related { + rel.R.LayerField = arcgisLayerField0 + } + + return nil +} + +type arcgisLayerFieldWhere[Q psql.Filterable] struct { + LayerFeatureServiceItemID psql.WhereMod[Q, string] + LayerIndex psql.WhereMod[Q, int32] + Name psql.WhereMod[Q, string] + Type psql.WhereMod[Q, enums.ArcgisFieldtype] +} + +func (arcgisLayerFieldWhere[Q]) AliasedAs(alias string) arcgisLayerFieldWhere[Q] { + return buildArcgisLayerFieldWhere[Q](buildArcgisLayerFieldColumns(alias)) +} + +func buildArcgisLayerFieldWhere[Q psql.Filterable](cols arcgisLayerFieldColumns) arcgisLayerFieldWhere[Q] { + return arcgisLayerFieldWhere[Q]{ + LayerFeatureServiceItemID: psql.Where[Q, string](cols.LayerFeatureServiceItemID), + LayerIndex: psql.Where[Q, int32](cols.LayerIndex), + Name: psql.Where[Q, string](cols.Name), + Type: psql.Where[Q, enums.ArcgisFieldtype](cols.Type), + } +} + +func (o *ArcgisLayerField) Preload(name string, retrieved any) error { + if o == nil { + return nil + } + + switch name { + case "AddressMappings": + rels, ok := retrieved.(ArcgisAddressMappingSlice) + if !ok { + return fmt.Errorf("arcgisLayerField cannot load %T as %q", retrieved, name) + } + + o.R.AddressMappings = rels + + for _, rel := range rels { + if rel != nil { + rel.R.LayerField = o + } + } + return nil + case "Layer": + rel, ok := retrieved.(*ArcgisLayer) + if !ok { + return fmt.Errorf("arcgisLayerField cannot load %T as %q", retrieved, name) + } + + o.R.Layer = rel + + if rel != nil { + rel.R.LayerFields = ArcgisLayerFieldSlice{o} + } + return nil + case "ParcelMappings": + rels, ok := retrieved.(ArcgisParcelMappingSlice) + if !ok { + return fmt.Errorf("arcgisLayerField cannot load %T as %q", retrieved, name) + } + + o.R.ParcelMappings = rels + + for _, rel := range rels { + if rel != nil { + rel.R.LayerField = o + } + } + return nil + default: + return fmt.Errorf("arcgisLayerField has no relationship %q", name) + } +} + +type arcgisLayerFieldPreloader struct { + Layer func(...psql.PreloadOption) psql.Preloader +} + +func buildArcgisLayerFieldPreloader() arcgisLayerFieldPreloader { + return arcgisLayerFieldPreloader{ + Layer: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*ArcgisLayer, ArcgisLayerSlice](psql.PreloadRel{ + Name: "Layer", + Sides: []psql.PreloadSide{ + { + From: ArcgisLayerFields, + To: ArcgisLayers, + FromColumns: []string{"layer_feature_service_item_id", "layer_index"}, + ToColumns: []string{"feature_service_item_id", "index_"}, + }, + }, + }, ArcgisLayers.Columns.Names(), opts...) + }, + } +} + +type arcgisLayerFieldThenLoader[Q orm.Loadable] struct { + AddressMappings func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + Layer func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + ParcelMappings func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildArcgisLayerFieldThenLoader[Q orm.Loadable]() arcgisLayerFieldThenLoader[Q] { + type AddressMappingsLoadInterface interface { + LoadAddressMappings(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type LayerLoadInterface interface { + LoadLayer(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type ParcelMappingsLoadInterface interface { + LoadParcelMappings(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return arcgisLayerFieldThenLoader[Q]{ + AddressMappings: thenLoadBuilder[Q]( + "AddressMappings", + func(ctx context.Context, exec bob.Executor, retrieved AddressMappingsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadAddressMappings(ctx, exec, mods...) + }, + ), + Layer: thenLoadBuilder[Q]( + "Layer", + func(ctx context.Context, exec bob.Executor, retrieved LayerLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadLayer(ctx, exec, mods...) + }, + ), + ParcelMappings: thenLoadBuilder[Q]( + "ParcelMappings", + func(ctx context.Context, exec bob.Executor, retrieved ParcelMappingsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadParcelMappings(ctx, exec, mods...) + }, + ), + } +} + +// LoadAddressMappings loads the arcgisLayerField's AddressMappings into the .R struct +func (o *ArcgisLayerField) LoadAddressMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.AddressMappings = nil + + related, err := o.AddressMappings(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, rel := range related { + rel.R.LayerField = o + } + + o.R.AddressMappings = related + return nil +} + +// LoadAddressMappings loads the arcgisLayerField's AddressMappings into the .R struct +func (os ArcgisLayerFieldSlice) LoadAddressMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + arcgisAddressMappings, err := os.AddressMappings(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + o.R.AddressMappings = nil + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range arcgisAddressMappings { + + if !(o.LayerFeatureServiceItemID == rel.LayerFeatureServiceItemID) { + continue + } + + if !(o.LayerIndex == rel.LayerIndex) { + continue + } + + if !(o.Name == rel.LayerFieldName) { + continue + } + + rel.R.LayerField = o + + o.R.AddressMappings = append(o.R.AddressMappings, rel) + } + } + + return nil +} + +// LoadLayer loads the arcgisLayerField's Layer into the .R struct +func (o *ArcgisLayerField) LoadLayer(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.Layer = nil + + related, err := o.Layer(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.LayerFields = ArcgisLayerFieldSlice{o} + + o.R.Layer = related + return nil +} + +// LoadLayer loads the arcgisLayerField's Layer into the .R struct +func (os ArcgisLayerFieldSlice) LoadLayer(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + arcgisLayers, err := os.Layer(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range arcgisLayers { + + if !(o.LayerFeatureServiceItemID == rel.FeatureServiceItemID) { + continue + } + + if !(o.LayerIndex == rel.Index) { + continue + } + + rel.R.LayerFields = append(rel.R.LayerFields, o) + + o.R.Layer = rel + break + } + } + + return nil +} + +// LoadParcelMappings loads the arcgisLayerField's ParcelMappings into the .R struct +func (o *ArcgisLayerField) LoadParcelMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.ParcelMappings = nil + + related, err := o.ParcelMappings(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, rel := range related { + rel.R.LayerField = o + } + + o.R.ParcelMappings = related + return nil +} + +// LoadParcelMappings loads the arcgisLayerField's ParcelMappings into the .R struct +func (os ArcgisLayerFieldSlice) LoadParcelMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + arcgisParcelMappings, err := os.ParcelMappings(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + o.R.ParcelMappings = nil + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range arcgisParcelMappings { + + if !(o.LayerFeatureServiceItemID == rel.LayerFeatureServiceItemID) { + continue + } + + if !(o.LayerIndex == rel.LayerIndex) { + continue + } + + if !(o.Name == rel.LayerFieldName) { + continue + } + + rel.R.LayerField = o + + o.R.ParcelMappings = append(o.R.ParcelMappings, rel) + } + } + + return nil +} + +// arcgisLayerFieldC is where relationship counts are stored. +type arcgisLayerFieldC struct { + AddressMappings *int64 + ParcelMappings *int64 +} + +// PreloadCount sets a count in the C struct by name +func (o *ArcgisLayerField) PreloadCount(name string, count int64) error { + if o == nil { + return nil + } + + switch name { + case "AddressMappings": + o.C.AddressMappings = &count + case "ParcelMappings": + o.C.ParcelMappings = &count + } + return nil +} + +type arcgisLayerFieldCountPreloader struct { + AddressMappings func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader + ParcelMappings func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader +} + +func buildArcgisLayerFieldCountPreloader() arcgisLayerFieldCountPreloader { + return arcgisLayerFieldCountPreloader{ + AddressMappings: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { + return countPreloader[*ArcgisLayerField]("AddressMappings", func(parent string) bob.Expression { + // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) + if parent == "" { + parent = ArcgisLayerFields.Alias() + } + + subqueryMods := []bob.Mod[*dialect.SelectQuery]{ + sm.Columns(psql.Raw("count(*)")), + + sm.From(ArcgisAddressMappings.Name()), + sm.Where(psql.Quote(ArcgisAddressMappings.Alias(), "layer_feature_service_item_id").EQ(psql.Quote(parent, "layer_feature_service_item_id"))), + sm.Where(psql.Quote(ArcgisAddressMappings.Alias(), "layer_index").EQ(psql.Quote(parent, "layer_index"))), + sm.Where(psql.Quote(ArcgisAddressMappings.Alias(), "layer_field_name").EQ(psql.Quote(parent, "name"))), + } + subqueryMods = append(subqueryMods, mods...) + return psql.Group(psql.Select(subqueryMods...).Expression) + }) + }, + ParcelMappings: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { + return countPreloader[*ArcgisLayerField]("ParcelMappings", func(parent string) bob.Expression { + // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) + if parent == "" { + parent = ArcgisLayerFields.Alias() + } + + subqueryMods := []bob.Mod[*dialect.SelectQuery]{ + sm.Columns(psql.Raw("count(*)")), + + sm.From(ArcgisParcelMappings.Name()), + sm.Where(psql.Quote(ArcgisParcelMappings.Alias(), "layer_feature_service_item_id").EQ(psql.Quote(parent, "layer_feature_service_item_id"))), + sm.Where(psql.Quote(ArcgisParcelMappings.Alias(), "layer_index").EQ(psql.Quote(parent, "layer_index"))), + sm.Where(psql.Quote(ArcgisParcelMappings.Alias(), "layer_field_name").EQ(psql.Quote(parent, "name"))), + } + subqueryMods = append(subqueryMods, mods...) + return psql.Group(psql.Select(subqueryMods...).Expression) + }) + }, + } +} + +type arcgisLayerFieldCountThenLoader[Q orm.Loadable] struct { + AddressMappings func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + ParcelMappings func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildArcgisLayerFieldCountThenLoader[Q orm.Loadable]() arcgisLayerFieldCountThenLoader[Q] { + type AddressMappingsCountInterface interface { + LoadCountAddressMappings(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type ParcelMappingsCountInterface interface { + LoadCountParcelMappings(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return arcgisLayerFieldCountThenLoader[Q]{ + AddressMappings: countThenLoadBuilder[Q]( + "AddressMappings", + func(ctx context.Context, exec bob.Executor, retrieved AddressMappingsCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCountAddressMappings(ctx, exec, mods...) + }, + ), + ParcelMappings: countThenLoadBuilder[Q]( + "ParcelMappings", + func(ctx context.Context, exec bob.Executor, retrieved ParcelMappingsCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCountParcelMappings(ctx, exec, mods...) + }, + ), + } +} + +// LoadCountAddressMappings loads the count of AddressMappings into the C struct +func (o *ArcgisLayerField) LoadCountAddressMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + count, err := o.AddressMappings(mods...).Count(ctx, exec) + if err != nil { + return err + } + + o.C.AddressMappings = &count + return nil +} + +// LoadCountAddressMappings loads the count of AddressMappings for a slice +func (os ArcgisLayerFieldSlice) LoadCountAddressMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + for _, o := range os { + if err := o.LoadCountAddressMappings(ctx, exec, mods...); err != nil { + return err + } + } + + return nil +} + +// LoadCountParcelMappings loads the count of ParcelMappings into the C struct +func (o *ArcgisLayerField) LoadCountParcelMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + count, err := o.ParcelMappings(mods...).Count(ctx, exec) + if err != nil { + return err + } + + o.C.ParcelMappings = &count + return nil +} + +// LoadCountParcelMappings loads the count of ParcelMappings for a slice +func (os ArcgisLayerFieldSlice) LoadCountParcelMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + for _, o := range os { + if err := o.LoadCountParcelMappings(ctx, exec, mods...); err != nil { + return err + } + } + + return nil +} + +type arcgisLayerFieldJoins[Q dialect.Joinable] struct { + typ string + AddressMappings modAs[Q, arcgisAddressMappingColumns] + Layer modAs[Q, arcgisLayerColumns] + ParcelMappings modAs[Q, arcgisParcelMappingColumns] +} + +func (j arcgisLayerFieldJoins[Q]) aliasedAs(alias string) arcgisLayerFieldJoins[Q] { + return buildArcgisLayerFieldJoins[Q](buildArcgisLayerFieldColumns(alias), j.typ) +} + +func buildArcgisLayerFieldJoins[Q dialect.Joinable](cols arcgisLayerFieldColumns, typ string) arcgisLayerFieldJoins[Q] { + return arcgisLayerFieldJoins[Q]{ + typ: typ, + AddressMappings: modAs[Q, arcgisAddressMappingColumns]{ + c: ArcgisAddressMappings.Columns, + f: func(to arcgisAddressMappingColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, ArcgisAddressMappings.Name().As(to.Alias())).On( + to.LayerFeatureServiceItemID.EQ(cols.LayerFeatureServiceItemID), to.LayerIndex.EQ(cols.LayerIndex), to.LayerFieldName.EQ(cols.Name), + )) + } + + return mods + }, + }, + Layer: modAs[Q, arcgisLayerColumns]{ + c: ArcgisLayers.Columns, + f: func(to arcgisLayerColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, ArcgisLayers.Name().As(to.Alias())).On( + to.FeatureServiceItemID.EQ(cols.LayerFeatureServiceItemID), to.Index.EQ(cols.LayerIndex), + )) + } + + return mods + }, + }, + ParcelMappings: modAs[Q, arcgisParcelMappingColumns]{ + c: ArcgisParcelMappings.Columns, + f: func(to arcgisParcelMappingColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, ArcgisParcelMappings.Name().As(to.Alias())).On( + to.LayerFeatureServiceItemID.EQ(cols.LayerFeatureServiceItemID), to.LayerIndex.EQ(cols.LayerIndex), to.LayerFieldName.EQ(cols.Name), + )) + } + + return mods + }, + }, + } +} diff --git a/db/models/arcgis.parcel_mapping.bob.go b/db/models/arcgis.parcel_mapping.bob.go new file mode 100644 index 00000000..251a5ee7 --- /dev/null +++ b/db/models/arcgis.parcel_mapping.bob.go @@ -0,0 +1,882 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "fmt" + "io" + + "github.com/Gleipnir-Technology/bob" + "github.com/Gleipnir-Technology/bob/dialect/psql" + "github.com/Gleipnir-Technology/bob/dialect/psql/dialect" + "github.com/Gleipnir-Technology/bob/dialect/psql/dm" + "github.com/Gleipnir-Technology/bob/dialect/psql/sm" + "github.com/Gleipnir-Technology/bob/dialect/psql/um" + "github.com/Gleipnir-Technology/bob/expr" + "github.com/Gleipnir-Technology/bob/mods" + "github.com/Gleipnir-Technology/bob/orm" + "github.com/Gleipnir-Technology/bob/types/pgtypes" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + "github.com/aarondl/opt/omit" +) + +// ArcgisParcelMapping is an object representing the database table. +type ArcgisParcelMapping struct { + Destination enums.ArcgisMappingdestinationparcel `db:"destination,pk" ` + LayerFeatureServiceItemID string `db:"layer_feature_service_item_id" ` + LayerIndex int32 `db:"layer_index" ` + LayerFieldName string `db:"layer_field_name" ` + OrganizationID int32 `db:"organization_id,pk" ` + + R arcgisParcelMappingR `db:"-" ` +} + +// ArcgisParcelMappingSlice is an alias for a slice of pointers to ArcgisParcelMapping. +// This should almost always be used instead of []*ArcgisParcelMapping. +type ArcgisParcelMappingSlice []*ArcgisParcelMapping + +// ArcgisParcelMappings contains methods to work with the parcel_mapping table +var ArcgisParcelMappings = psql.NewTablex[*ArcgisParcelMapping, ArcgisParcelMappingSlice, *ArcgisParcelMappingSetter]("arcgis", "parcel_mapping", buildArcgisParcelMappingColumns("arcgis.parcel_mapping")) + +// ArcgisParcelMappingsQuery is a query on the parcel_mapping table +type ArcgisParcelMappingsQuery = *psql.ViewQuery[*ArcgisParcelMapping, ArcgisParcelMappingSlice] + +// arcgisParcelMappingR is where relationships are stored. +type arcgisParcelMappingR struct { + LayerField *ArcgisLayerField // arcgis.parcel_mapping.parcel_mapping_layer_feature_service_item_id_layer_index_l_fkey + Organization *Organization // arcgis.parcel_mapping.parcel_mapping_organization_id_fkey +} + +func buildArcgisParcelMappingColumns(alias string) arcgisParcelMappingColumns { + return arcgisParcelMappingColumns{ + ColumnsExpr: expr.NewColumnsExpr( + "destination", "layer_feature_service_item_id", "layer_index", "layer_field_name", "organization_id", + ).WithParent("arcgis.parcel_mapping"), + tableAlias: alias, + Destination: psql.Quote(alias, "destination"), + LayerFeatureServiceItemID: psql.Quote(alias, "layer_feature_service_item_id"), + LayerIndex: psql.Quote(alias, "layer_index"), + LayerFieldName: psql.Quote(alias, "layer_field_name"), + OrganizationID: psql.Quote(alias, "organization_id"), + } +} + +type arcgisParcelMappingColumns struct { + expr.ColumnsExpr + tableAlias string + Destination psql.Expression + LayerFeatureServiceItemID psql.Expression + LayerIndex psql.Expression + LayerFieldName psql.Expression + OrganizationID psql.Expression +} + +func (c arcgisParcelMappingColumns) Alias() string { + return c.tableAlias +} + +func (arcgisParcelMappingColumns) AliasedAs(alias string) arcgisParcelMappingColumns { + return buildArcgisParcelMappingColumns(alias) +} + +// ArcgisParcelMappingSetter is used for insert/upsert/update operations +// All values are optional, and do not have to be set +// Generated columns are not included +type ArcgisParcelMappingSetter struct { + Destination omit.Val[enums.ArcgisMappingdestinationparcel] `db:"destination,pk" ` + LayerFeatureServiceItemID omit.Val[string] `db:"layer_feature_service_item_id" ` + LayerIndex omit.Val[int32] `db:"layer_index" ` + LayerFieldName omit.Val[string] `db:"layer_field_name" ` + OrganizationID omit.Val[int32] `db:"organization_id,pk" ` +} + +func (s ArcgisParcelMappingSetter) SetColumns() []string { + vals := make([]string, 0, 5) + if s.Destination.IsValue() { + vals = append(vals, "destination") + } + if s.LayerFeatureServiceItemID.IsValue() { + vals = append(vals, "layer_feature_service_item_id") + } + if s.LayerIndex.IsValue() { + vals = append(vals, "layer_index") + } + if s.LayerFieldName.IsValue() { + vals = append(vals, "layer_field_name") + } + if s.OrganizationID.IsValue() { + vals = append(vals, "organization_id") + } + return vals +} + +func (s ArcgisParcelMappingSetter) Overwrite(t *ArcgisParcelMapping) { + if s.Destination.IsValue() { + t.Destination = s.Destination.MustGet() + } + if s.LayerFeatureServiceItemID.IsValue() { + t.LayerFeatureServiceItemID = s.LayerFeatureServiceItemID.MustGet() + } + if s.LayerIndex.IsValue() { + t.LayerIndex = s.LayerIndex.MustGet() + } + if s.LayerFieldName.IsValue() { + t.LayerFieldName = s.LayerFieldName.MustGet() + } + if s.OrganizationID.IsValue() { + t.OrganizationID = s.OrganizationID.MustGet() + } +} + +func (s *ArcgisParcelMappingSetter) Apply(q *dialect.InsertQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisParcelMappings.BeforeInsertHooks.RunHooks(ctx, exec, s) + }) + + q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + vals := make([]bob.Expression, 5) + if s.Destination.IsValue() { + vals[0] = psql.Arg(s.Destination.MustGet()) + } else { + vals[0] = psql.Raw("DEFAULT") + } + + if s.LayerFeatureServiceItemID.IsValue() { + vals[1] = psql.Arg(s.LayerFeatureServiceItemID.MustGet()) + } else { + vals[1] = psql.Raw("DEFAULT") + } + + if s.LayerIndex.IsValue() { + vals[2] = psql.Arg(s.LayerIndex.MustGet()) + } else { + vals[2] = psql.Raw("DEFAULT") + } + + if s.LayerFieldName.IsValue() { + vals[3] = psql.Arg(s.LayerFieldName.MustGet()) + } else { + vals[3] = psql.Raw("DEFAULT") + } + + if s.OrganizationID.IsValue() { + vals[4] = psql.Arg(s.OrganizationID.MustGet()) + } else { + vals[4] = psql.Raw("DEFAULT") + } + + return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "") + })) +} + +func (s ArcgisParcelMappingSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return um.Set(s.Expressions()...) +} + +func (s ArcgisParcelMappingSetter) Expressions(prefix ...string) []bob.Expression { + exprs := make([]bob.Expression, 0, 5) + + if s.Destination.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "destination")...), + psql.Arg(s.Destination), + }}) + } + + if s.LayerFeatureServiceItemID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "layer_feature_service_item_id")...), + psql.Arg(s.LayerFeatureServiceItemID), + }}) + } + + if s.LayerIndex.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "layer_index")...), + psql.Arg(s.LayerIndex), + }}) + } + + if s.LayerFieldName.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "layer_field_name")...), + psql.Arg(s.LayerFieldName), + }}) + } + + if s.OrganizationID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "organization_id")...), + psql.Arg(s.OrganizationID), + }}) + } + + return exprs +} + +// FindArcgisParcelMapping retrieves a single record by primary key +// If cols is empty Find will return all columns. +func FindArcgisParcelMapping(ctx context.Context, exec bob.Executor, OrganizationIDPK int32, DestinationPK enums.ArcgisMappingdestinationparcel, cols ...string) (*ArcgisParcelMapping, error) { + if len(cols) == 0 { + return ArcgisParcelMappings.Query( + sm.Where(ArcgisParcelMappings.Columns.OrganizationID.EQ(psql.Arg(OrganizationIDPK))), + sm.Where(ArcgisParcelMappings.Columns.Destination.EQ(psql.Arg(DestinationPK))), + ).One(ctx, exec) + } + + return ArcgisParcelMappings.Query( + sm.Where(ArcgisParcelMappings.Columns.OrganizationID.EQ(psql.Arg(OrganizationIDPK))), + sm.Where(ArcgisParcelMappings.Columns.Destination.EQ(psql.Arg(DestinationPK))), + sm.Columns(ArcgisParcelMappings.Columns.Only(cols...)), + ).One(ctx, exec) +} + +// ArcgisParcelMappingExists checks the presence of a single record by primary key +func ArcgisParcelMappingExists(ctx context.Context, exec bob.Executor, OrganizationIDPK int32, DestinationPK enums.ArcgisMappingdestinationparcel) (bool, error) { + return ArcgisParcelMappings.Query( + sm.Where(ArcgisParcelMappings.Columns.OrganizationID.EQ(psql.Arg(OrganizationIDPK))), + sm.Where(ArcgisParcelMappings.Columns.Destination.EQ(psql.Arg(DestinationPK))), + ).Exists(ctx, exec) +} + +// AfterQueryHook is called after ArcgisParcelMapping is retrieved from the database +func (o *ArcgisParcelMapping) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = ArcgisParcelMappings.AfterSelectHooks.RunHooks(ctx, exec, ArcgisParcelMappingSlice{o}) + case bob.QueryTypeInsert: + ctx, err = ArcgisParcelMappings.AfterInsertHooks.RunHooks(ctx, exec, ArcgisParcelMappingSlice{o}) + case bob.QueryTypeUpdate: + ctx, err = ArcgisParcelMappings.AfterUpdateHooks.RunHooks(ctx, exec, ArcgisParcelMappingSlice{o}) + case bob.QueryTypeDelete: + ctx, err = ArcgisParcelMappings.AfterDeleteHooks.RunHooks(ctx, exec, ArcgisParcelMappingSlice{o}) + } + + return err +} + +// primaryKeyVals returns the primary key values of the ArcgisParcelMapping +func (o *ArcgisParcelMapping) primaryKeyVals() bob.Expression { + return psql.ArgGroup( + o.OrganizationID, + o.Destination, + ) +} + +func (o *ArcgisParcelMapping) pkEQ() dialect.Expression { + return psql.Group(psql.Quote("arcgis.parcel_mapping", "organization_id"), psql.Quote("arcgis.parcel_mapping", "destination")).EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + return o.primaryKeyVals().WriteSQL(ctx, w, d, start) + })) +} + +// Update uses an executor to update the ArcgisParcelMapping +func (o *ArcgisParcelMapping) Update(ctx context.Context, exec bob.Executor, s *ArcgisParcelMappingSetter) error { + v, err := ArcgisParcelMappings.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec) + if err != nil { + return err + } + + o.R = v.R + *o = *v + + return nil +} + +// Delete deletes a single ArcgisParcelMapping record with an executor +func (o *ArcgisParcelMapping) Delete(ctx context.Context, exec bob.Executor) error { + _, err := ArcgisParcelMappings.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec) + return err +} + +// Reload refreshes the ArcgisParcelMapping using the executor +func (o *ArcgisParcelMapping) Reload(ctx context.Context, exec bob.Executor) error { + o2, err := ArcgisParcelMappings.Query( + sm.Where(ArcgisParcelMappings.Columns.OrganizationID.EQ(psql.Arg(o.OrganizationID))), + sm.Where(ArcgisParcelMappings.Columns.Destination.EQ(psql.Arg(o.Destination))), + ).One(ctx, exec) + if err != nil { + return err + } + o2.R = o.R + *o = *o2 + + return nil +} + +// AfterQueryHook is called after ArcgisParcelMappingSlice is retrieved from the database +func (o ArcgisParcelMappingSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = ArcgisParcelMappings.AfterSelectHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeInsert: + ctx, err = ArcgisParcelMappings.AfterInsertHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeUpdate: + ctx, err = ArcgisParcelMappings.AfterUpdateHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeDelete: + ctx, err = ArcgisParcelMappings.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err +} + +func (o ArcgisParcelMappingSlice) pkIN() dialect.Expression { + if len(o) == 0 { + return psql.Raw("NULL") + } + + return psql.Group(psql.Quote("arcgis.parcel_mapping", "organization_id"), psql.Quote("arcgis.parcel_mapping", "destination")).In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + pkPairs := make([]bob.Expression, len(o)) + for i, row := range o { + pkPairs[i] = row.primaryKeyVals() + } + return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "") + })) +} + +// copyMatchingRows finds models in the given slice that have the same primary key +// then it first copies the existing relationships from the old model to the new model +// and then replaces the old model in the slice with the new model +func (o ArcgisParcelMappingSlice) copyMatchingRows(from ...*ArcgisParcelMapping) { + for i, old := range o { + for _, new := range from { + if new.OrganizationID != old.OrganizationID { + continue + } + if new.Destination != old.Destination { + continue + } + new.R = old.R + o[i] = new + break + } + } +} + +// UpdateMod modifies an update query with "WHERE primary_key IN (o...)" +func (o ArcgisParcelMappingSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisParcelMappings.BeforeUpdateHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *ArcgisParcelMapping: + o.copyMatchingRows(retrieved) + case []*ArcgisParcelMapping: + o.copyMatchingRows(retrieved...) + case ArcgisParcelMappingSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a ArcgisParcelMapping or a slice of ArcgisParcelMapping + // then run the AfterUpdateHooks on the slice + _, err = ArcgisParcelMappings.AfterUpdateHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)" +func (o ArcgisParcelMappingSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] { + return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return ArcgisParcelMappings.BeforeDeleteHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *ArcgisParcelMapping: + o.copyMatchingRows(retrieved) + case []*ArcgisParcelMapping: + o.copyMatchingRows(retrieved...) + case ArcgisParcelMappingSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a ArcgisParcelMapping or a slice of ArcgisParcelMapping + // then run the AfterDeleteHooks on the slice + _, err = ArcgisParcelMappings.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +func (o ArcgisParcelMappingSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals ArcgisParcelMappingSetter) error { + if len(o) == 0 { + return nil + } + + _, err := ArcgisParcelMappings.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec) + return err +} + +func (o ArcgisParcelMappingSlice) DeleteAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + _, err := ArcgisParcelMappings.Delete(o.DeleteMod()).Exec(ctx, exec) + return err +} + +func (o ArcgisParcelMappingSlice) ReloadAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + o2, err := ArcgisParcelMappings.Query(sm.Where(o.pkIN())).All(ctx, exec) + if err != nil { + return err + } + + o.copyMatchingRows(o2...) + + return nil +} + +// LayerField starts a query for related objects on arcgis.layer_field +func (o *ArcgisParcelMapping) LayerField(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayerFieldsQuery { + return ArcgisLayerFields.Query(append(mods, + sm.Where(ArcgisLayerFields.Columns.LayerFeatureServiceItemID.EQ(psql.Arg(o.LayerFeatureServiceItemID))), sm.Where(ArcgisLayerFields.Columns.LayerIndex.EQ(psql.Arg(o.LayerIndex))), sm.Where(ArcgisLayerFields.Columns.Name.EQ(psql.Arg(o.LayerFieldName))), + )...) +} + +func (os ArcgisParcelMappingSlice) LayerField(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayerFieldsQuery { + pkLayerFeatureServiceItemID := make(pgtypes.Array[string], 0, len(os)) + + pkLayerIndex := make(pgtypes.Array[int32], 0, len(os)) + + pkLayerFieldName := make(pgtypes.Array[string], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkLayerFeatureServiceItemID = append(pkLayerFeatureServiceItemID, o.LayerFeatureServiceItemID) + pkLayerIndex = append(pkLayerIndex, o.LayerIndex) + pkLayerFieldName = append(pkLayerFieldName, o.LayerFieldName) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkLayerFeatureServiceItemID), "text[]")), + psql.F("unnest", psql.Cast(psql.Arg(pkLayerIndex), "integer[]")), + psql.F("unnest", psql.Cast(psql.Arg(pkLayerFieldName), "text[]")), + )) + + return ArcgisLayerFields.Query(append(mods, + sm.Where(psql.Group(ArcgisLayerFields.Columns.LayerFeatureServiceItemID, ArcgisLayerFields.Columns.LayerIndex, ArcgisLayerFields.Columns.Name).OP("IN", PKArgExpr)), + )...) +} + +// Organization starts a query for related objects on organization +func (o *ArcgisParcelMapping) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery { + return Organizations.Query(append(mods, + sm.Where(Organizations.Columns.ID.EQ(psql.Arg(o.OrganizationID))), + )...) +} + +func (os ArcgisParcelMappingSlice) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery { + pkOrganizationID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkOrganizationID = append(pkOrganizationID, o.OrganizationID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkOrganizationID), "integer[]")), + )) + + return Organizations.Query(append(mods, + sm.Where(psql.Group(Organizations.Columns.ID).OP("IN", PKArgExpr)), + )...) +} + +func attachArcgisParcelMappingLayerField0(ctx context.Context, exec bob.Executor, count int, arcgisParcelMapping0 *ArcgisParcelMapping, arcgisLayerField1 *ArcgisLayerField) (*ArcgisParcelMapping, error) { + setter := &ArcgisParcelMappingSetter{ + LayerFeatureServiceItemID: omit.From(arcgisLayerField1.LayerFeatureServiceItemID), + LayerIndex: omit.From(arcgisLayerField1.LayerIndex), + LayerFieldName: omit.From(arcgisLayerField1.Name), + } + + err := arcgisParcelMapping0.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachArcgisParcelMappingLayerField0: %w", err) + } + + return arcgisParcelMapping0, nil +} + +func (arcgisParcelMapping0 *ArcgisParcelMapping) InsertLayerField(ctx context.Context, exec bob.Executor, related *ArcgisLayerFieldSetter) error { + var err error + + arcgisLayerField1, err := ArcgisLayerFields.Insert(related).One(ctx, exec) + if err != nil { + return fmt.Errorf("inserting related objects: %w", err) + } + + _, err = attachArcgisParcelMappingLayerField0(ctx, exec, 1, arcgisParcelMapping0, arcgisLayerField1) + if err != nil { + return err + } + + arcgisParcelMapping0.R.LayerField = arcgisLayerField1 + + arcgisLayerField1.R.ParcelMappings = append(arcgisLayerField1.R.ParcelMappings, arcgisParcelMapping0) + + return nil +} + +func (arcgisParcelMapping0 *ArcgisParcelMapping) AttachLayerField(ctx context.Context, exec bob.Executor, arcgisLayerField1 *ArcgisLayerField) error { + var err error + + _, err = attachArcgisParcelMappingLayerField0(ctx, exec, 1, arcgisParcelMapping0, arcgisLayerField1) + if err != nil { + return err + } + + arcgisParcelMapping0.R.LayerField = arcgisLayerField1 + + arcgisLayerField1.R.ParcelMappings = append(arcgisLayerField1.R.ParcelMappings, arcgisParcelMapping0) + + return nil +} + +func attachArcgisParcelMappingOrganization0(ctx context.Context, exec bob.Executor, count int, arcgisParcelMapping0 *ArcgisParcelMapping, organization1 *Organization) (*ArcgisParcelMapping, error) { + setter := &ArcgisParcelMappingSetter{ + OrganizationID: omit.From(organization1.ID), + } + + err := arcgisParcelMapping0.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachArcgisParcelMappingOrganization0: %w", err) + } + + return arcgisParcelMapping0, nil +} + +func (arcgisParcelMapping0 *ArcgisParcelMapping) InsertOrganization(ctx context.Context, exec bob.Executor, related *OrganizationSetter) error { + var err error + + organization1, err := Organizations.Insert(related).One(ctx, exec) + if err != nil { + return fmt.Errorf("inserting related objects: %w", err) + } + + _, err = attachArcgisParcelMappingOrganization0(ctx, exec, 1, arcgisParcelMapping0, organization1) + if err != nil { + return err + } + + arcgisParcelMapping0.R.Organization = organization1 + + organization1.R.ParcelMappings = append(organization1.R.ParcelMappings, arcgisParcelMapping0) + + return nil +} + +func (arcgisParcelMapping0 *ArcgisParcelMapping) AttachOrganization(ctx context.Context, exec bob.Executor, organization1 *Organization) error { + var err error + + _, err = attachArcgisParcelMappingOrganization0(ctx, exec, 1, arcgisParcelMapping0, organization1) + if err != nil { + return err + } + + arcgisParcelMapping0.R.Organization = organization1 + + organization1.R.ParcelMappings = append(organization1.R.ParcelMappings, arcgisParcelMapping0) + + return nil +} + +type arcgisParcelMappingWhere[Q psql.Filterable] struct { + Destination psql.WhereMod[Q, enums.ArcgisMappingdestinationparcel] + LayerFeatureServiceItemID psql.WhereMod[Q, string] + LayerIndex psql.WhereMod[Q, int32] + LayerFieldName psql.WhereMod[Q, string] + OrganizationID psql.WhereMod[Q, int32] +} + +func (arcgisParcelMappingWhere[Q]) AliasedAs(alias string) arcgisParcelMappingWhere[Q] { + return buildArcgisParcelMappingWhere[Q](buildArcgisParcelMappingColumns(alias)) +} + +func buildArcgisParcelMappingWhere[Q psql.Filterable](cols arcgisParcelMappingColumns) arcgisParcelMappingWhere[Q] { + return arcgisParcelMappingWhere[Q]{ + Destination: psql.Where[Q, enums.ArcgisMappingdestinationparcel](cols.Destination), + LayerFeatureServiceItemID: psql.Where[Q, string](cols.LayerFeatureServiceItemID), + LayerIndex: psql.Where[Q, int32](cols.LayerIndex), + LayerFieldName: psql.Where[Q, string](cols.LayerFieldName), + OrganizationID: psql.Where[Q, int32](cols.OrganizationID), + } +} + +func (o *ArcgisParcelMapping) Preload(name string, retrieved any) error { + if o == nil { + return nil + } + + switch name { + case "LayerField": + rel, ok := retrieved.(*ArcgisLayerField) + if !ok { + return fmt.Errorf("arcgisParcelMapping cannot load %T as %q", retrieved, name) + } + + o.R.LayerField = rel + + if rel != nil { + rel.R.ParcelMappings = ArcgisParcelMappingSlice{o} + } + return nil + case "Organization": + rel, ok := retrieved.(*Organization) + if !ok { + return fmt.Errorf("arcgisParcelMapping cannot load %T as %q", retrieved, name) + } + + o.R.Organization = rel + + if rel != nil { + rel.R.ParcelMappings = ArcgisParcelMappingSlice{o} + } + return nil + default: + return fmt.Errorf("arcgisParcelMapping has no relationship %q", name) + } +} + +type arcgisParcelMappingPreloader struct { + LayerField func(...psql.PreloadOption) psql.Preloader + Organization func(...psql.PreloadOption) psql.Preloader +} + +func buildArcgisParcelMappingPreloader() arcgisParcelMappingPreloader { + return arcgisParcelMappingPreloader{ + LayerField: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*ArcgisLayerField, ArcgisLayerFieldSlice](psql.PreloadRel{ + Name: "LayerField", + Sides: []psql.PreloadSide{ + { + From: ArcgisParcelMappings, + To: ArcgisLayerFields, + FromColumns: []string{"layer_feature_service_item_id", "layer_index", "layer_field_name"}, + ToColumns: []string{"layer_feature_service_item_id", "layer_index", "name"}, + }, + }, + }, ArcgisLayerFields.Columns.Names(), opts...) + }, + Organization: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*Organization, OrganizationSlice](psql.PreloadRel{ + Name: "Organization", + Sides: []psql.PreloadSide{ + { + From: ArcgisParcelMappings, + To: Organizations, + FromColumns: []string{"organization_id"}, + ToColumns: []string{"id"}, + }, + }, + }, Organizations.Columns.Names(), opts...) + }, + } +} + +type arcgisParcelMappingThenLoader[Q orm.Loadable] struct { + LayerField func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + Organization func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildArcgisParcelMappingThenLoader[Q orm.Loadable]() arcgisParcelMappingThenLoader[Q] { + type LayerFieldLoadInterface interface { + LoadLayerField(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type OrganizationLoadInterface interface { + LoadOrganization(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return arcgisParcelMappingThenLoader[Q]{ + LayerField: thenLoadBuilder[Q]( + "LayerField", + func(ctx context.Context, exec bob.Executor, retrieved LayerFieldLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadLayerField(ctx, exec, mods...) + }, + ), + Organization: thenLoadBuilder[Q]( + "Organization", + func(ctx context.Context, exec bob.Executor, retrieved OrganizationLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadOrganization(ctx, exec, mods...) + }, + ), + } +} + +// LoadLayerField loads the arcgisParcelMapping's LayerField into the .R struct +func (o *ArcgisParcelMapping) LoadLayerField(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.LayerField = nil + + related, err := o.LayerField(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.ParcelMappings = ArcgisParcelMappingSlice{o} + + o.R.LayerField = related + return nil +} + +// LoadLayerField loads the arcgisParcelMapping's LayerField into the .R struct +func (os ArcgisParcelMappingSlice) LoadLayerField(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + arcgisLayerFields, err := os.LayerField(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range arcgisLayerFields { + + if !(o.LayerFeatureServiceItemID == rel.LayerFeatureServiceItemID) { + continue + } + + if !(o.LayerIndex == rel.LayerIndex) { + continue + } + + if !(o.LayerFieldName == rel.Name) { + continue + } + + rel.R.ParcelMappings = append(rel.R.ParcelMappings, o) + + o.R.LayerField = rel + break + } + } + + return nil +} + +// LoadOrganization loads the arcgisParcelMapping's Organization into the .R struct +func (o *ArcgisParcelMapping) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.Organization = nil + + related, err := o.Organization(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.ParcelMappings = ArcgisParcelMappingSlice{o} + + o.R.Organization = related + return nil +} + +// LoadOrganization loads the arcgisParcelMapping's Organization into the .R struct +func (os ArcgisParcelMappingSlice) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + organizations, err := os.Organization(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range organizations { + + if !(o.OrganizationID == rel.ID) { + continue + } + + rel.R.ParcelMappings = append(rel.R.ParcelMappings, o) + + o.R.Organization = rel + break + } + } + + return nil +} + +type arcgisParcelMappingJoins[Q dialect.Joinable] struct { + typ string + LayerField modAs[Q, arcgisLayerFieldColumns] + Organization modAs[Q, organizationColumns] +} + +func (j arcgisParcelMappingJoins[Q]) aliasedAs(alias string) arcgisParcelMappingJoins[Q] { + return buildArcgisParcelMappingJoins[Q](buildArcgisParcelMappingColumns(alias), j.typ) +} + +func buildArcgisParcelMappingJoins[Q dialect.Joinable](cols arcgisParcelMappingColumns, typ string) arcgisParcelMappingJoins[Q] { + return arcgisParcelMappingJoins[Q]{ + typ: typ, + LayerField: modAs[Q, arcgisLayerFieldColumns]{ + c: ArcgisLayerFields.Columns, + f: func(to arcgisLayerFieldColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, ArcgisLayerFields.Name().As(to.Alias())).On( + to.LayerFeatureServiceItemID.EQ(cols.LayerFeatureServiceItemID), to.LayerIndex.EQ(cols.LayerIndex), to.Name.EQ(cols.LayerFieldName), + )) + } + + return mods + }, + }, + Organization: modAs[Q, organizationColumns]{ + c: Organizations.Columns, + f: func(to organizationColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, Organizations.Name().As(to.Alias())).On( + to.ID.EQ(cols.OrganizationID), + )) + } + + return mods + }, + }, + } +} diff --git a/db/models/bob_counts.bob.go b/db/models/bob_counts.bob.go index 8071c91d..5f015649 100644 --- a/db/models/bob_counts.bob.go +++ b/db/models/bob_counts.bob.go @@ -21,6 +21,9 @@ var ( ) type preloadCounts struct { + ArcgisFeatureService arcgisFeatureServiceCountPreloader + ArcgisLayer arcgisLayerCountPreloader + ArcgisLayerField arcgisLayerFieldCountPreloader ArcgisUser arcgisuserCountPreloader CommsEmailContact commsEmailContactCountPreloader CommsEmailTemplate commsEmailTemplateCountPreloader @@ -39,6 +42,9 @@ type preloadCounts struct { func getPreloadCount() preloadCounts { return preloadCounts{ + ArcgisFeatureService: buildArcgisFeatureServiceCountPreloader(), + ArcgisLayer: buildArcgisLayerCountPreloader(), + ArcgisLayerField: buildArcgisLayerFieldCountPreloader(), ArcgisUser: buildArcgisUserCountPreloader(), CommsEmailContact: buildCommsEmailContactCountPreloader(), CommsEmailTemplate: buildCommsEmailTemplateCountPreloader(), @@ -57,6 +63,9 @@ func getPreloadCount() preloadCounts { } type thenLoadCounts[Q orm.Loadable] struct { + ArcgisFeatureService arcgisFeatureServiceCountThenLoader[Q] + ArcgisLayer arcgisLayerCountThenLoader[Q] + ArcgisLayerField arcgisLayerFieldCountThenLoader[Q] ArcgisUser arcgisuserCountThenLoader[Q] CommsEmailContact commsEmailContactCountThenLoader[Q] CommsEmailTemplate commsEmailTemplateCountThenLoader[Q] @@ -75,6 +84,9 @@ type thenLoadCounts[Q orm.Loadable] struct { func getThenLoadCount[Q orm.Loadable]() thenLoadCounts[Q] { return thenLoadCounts[Q]{ + ArcgisFeatureService: buildArcgisFeatureServiceCountThenLoader[Q](), + ArcgisLayer: buildArcgisLayerCountThenLoader[Q](), + ArcgisLayerField: buildArcgisLayerFieldCountThenLoader[Q](), ArcgisUser: buildArcgisUserCountThenLoader[Q](), CommsEmailContact: buildCommsEmailContactCountThenLoader[Q](), CommsEmailTemplate: buildCommsEmailTemplateCountThenLoader[Q](), diff --git a/db/models/bob_joins.bob.go b/db/models/bob_joins.bob.go index 26ad0ce8..3ae21741 100644 --- a/db/models/bob_joins.bob.go +++ b/db/models/bob_joins.bob.go @@ -32,6 +32,12 @@ func (j joinSet[Q]) AliasedAs(alias string) joinSet[Q] { } type joins[Q dialect.Joinable] struct { + Addresses joinSet[addressJoins[Q]] + ArcgisAddressMappings joinSet[arcgisAddressMappingJoins[Q]] + ArcgisFeatureServices joinSet[arcgisFeatureServiceJoins[Q]] + ArcgisLayers joinSet[arcgisLayerJoins[Q]] + ArcgisLayerFields joinSet[arcgisLayerFieldJoins[Q]] + ArcgisParcelMappings joinSet[arcgisParcelMappingJoins[Q]] ArcgisUsers joinSet[arcgisuserJoins[Q]] ArcgisUserPrivileges joinSet[arcgisUserPrivilegeJoins[Q]] CommsEmailContacts joinSet[commsEmailContactJoins[Q]] @@ -85,6 +91,7 @@ type joins[Q dialect.Joinable] struct { Notifications joinSet[notificationJoins[Q]] OauthTokens joinSet[oauthTokenJoins[Q]] Organizations joinSet[organizationJoins[Q]] + Pools joinSet[poolJoins[Q]] PublicreportImages joinSet[publicreportImageJoins[Q]] PublicreportImageExifs joinSet[publicreportImageExifJoins[Q]] PublicreportNotifyEmailNuisances joinSet[publicreportNotifyEmailNuisanceJoins[Q]] @@ -99,6 +106,7 @@ type joins[Q dialect.Joinable] struct { PublicreportQuickImages joinSet[publicreportQuickImageJoins[Q]] PublicreportSubscribeEmails joinSet[publicreportSubscribeEmailJoins[Q]] PublicreportSubscribePhones joinSet[publicreportSubscribePhoneJoins[Q]] + Sites joinSet[siteJoins[Q]] Users joinSet[userJoins[Q]] } @@ -112,6 +120,12 @@ func buildJoinSet[Q interface{ aliasedAs(string) Q }, C any, F func(C, string) Q func getJoins[Q dialect.Joinable]() joins[Q] { return joins[Q]{ + Addresses: buildJoinSet[addressJoins[Q]](Addresses.Columns, buildAddressJoins), + ArcgisAddressMappings: buildJoinSet[arcgisAddressMappingJoins[Q]](ArcgisAddressMappings.Columns, buildArcgisAddressMappingJoins), + ArcgisFeatureServices: buildJoinSet[arcgisFeatureServiceJoins[Q]](ArcgisFeatureServices.Columns, buildArcgisFeatureServiceJoins), + ArcgisLayers: buildJoinSet[arcgisLayerJoins[Q]](ArcgisLayers.Columns, buildArcgisLayerJoins), + ArcgisLayerFields: buildJoinSet[arcgisLayerFieldJoins[Q]](ArcgisLayerFields.Columns, buildArcgisLayerFieldJoins), + ArcgisParcelMappings: buildJoinSet[arcgisParcelMappingJoins[Q]](ArcgisParcelMappings.Columns, buildArcgisParcelMappingJoins), ArcgisUsers: buildJoinSet[arcgisuserJoins[Q]](ArcgisUsers.Columns, buildArcgisUserJoins), ArcgisUserPrivileges: buildJoinSet[arcgisUserPrivilegeJoins[Q]](ArcgisUserPrivileges.Columns, buildArcgisUserPrivilegeJoins), CommsEmailContacts: buildJoinSet[commsEmailContactJoins[Q]](CommsEmailContacts.Columns, buildCommsEmailContactJoins), @@ -165,6 +179,7 @@ func getJoins[Q dialect.Joinable]() joins[Q] { Notifications: buildJoinSet[notificationJoins[Q]](Notifications.Columns, buildNotificationJoins), OauthTokens: buildJoinSet[oauthTokenJoins[Q]](OauthTokens.Columns, buildOauthTokenJoins), Organizations: buildJoinSet[organizationJoins[Q]](Organizations.Columns, buildOrganizationJoins), + Pools: buildJoinSet[poolJoins[Q]](Pools.Columns, buildPoolJoins), PublicreportImages: buildJoinSet[publicreportImageJoins[Q]](PublicreportImages.Columns, buildPublicreportImageJoins), PublicreportImageExifs: buildJoinSet[publicreportImageExifJoins[Q]](PublicreportImageExifs.Columns, buildPublicreportImageExifJoins), PublicreportNotifyEmailNuisances: buildJoinSet[publicreportNotifyEmailNuisanceJoins[Q]](PublicreportNotifyEmailNuisances.Columns, buildPublicreportNotifyEmailNuisanceJoins), @@ -179,6 +194,7 @@ func getJoins[Q dialect.Joinable]() joins[Q] { PublicreportQuickImages: buildJoinSet[publicreportQuickImageJoins[Q]](PublicreportQuickImages.Columns, buildPublicreportQuickImageJoins), PublicreportSubscribeEmails: buildJoinSet[publicreportSubscribeEmailJoins[Q]](PublicreportSubscribeEmails.Columns, buildPublicreportSubscribeEmailJoins), PublicreportSubscribePhones: buildJoinSet[publicreportSubscribePhoneJoins[Q]](PublicreportSubscribePhones.Columns, buildPublicreportSubscribePhoneJoins), + Sites: buildJoinSet[siteJoins[Q]](Sites.Columns, buildSiteJoins), Users: buildJoinSet[userJoins[Q]](Users.Columns, buildUserJoins), } } diff --git a/db/models/bob_loaders.bob.go b/db/models/bob_loaders.bob.go index 11a9ad25..ab8cae6f 100644 --- a/db/models/bob_loaders.bob.go +++ b/db/models/bob_loaders.bob.go @@ -17,6 +17,12 @@ import ( var Preload = getPreloaders() type preloaders struct { + Address addressPreloader + ArcgisAddressMapping arcgisAddressMappingPreloader + ArcgisFeatureService arcgisFeatureServicePreloader + ArcgisLayer arcgisLayerPreloader + ArcgisLayerField arcgisLayerFieldPreloader + ArcgisParcelMapping arcgisParcelMappingPreloader ArcgisUser arcgisuserPreloader ArcgisUserPrivilege arcgisUserPrivilegePreloader CommsEmailContact commsEmailContactPreloader @@ -70,6 +76,7 @@ type preloaders struct { Notification notificationPreloader OauthToken oauthTokenPreloader Organization organizationPreloader + Pool poolPreloader PublicreportImage publicreportImagePreloader PublicreportImageExif publicreportImageExifPreloader PublicreportNotifyEmailNuisance publicreportNotifyEmailNuisancePreloader @@ -84,11 +91,18 @@ type preloaders struct { PublicreportQuickImage publicreportQuickImagePreloader PublicreportSubscribeEmail publicreportSubscribeEmailPreloader PublicreportSubscribePhone publicreportSubscribePhonePreloader + Site sitePreloader User userPreloader } func getPreloaders() preloaders { return preloaders{ + Address: buildAddressPreloader(), + ArcgisAddressMapping: buildArcgisAddressMappingPreloader(), + ArcgisFeatureService: buildArcgisFeatureServicePreloader(), + ArcgisLayer: buildArcgisLayerPreloader(), + ArcgisLayerField: buildArcgisLayerFieldPreloader(), + ArcgisParcelMapping: buildArcgisParcelMappingPreloader(), ArcgisUser: buildArcgisUserPreloader(), ArcgisUserPrivilege: buildArcgisUserPrivilegePreloader(), CommsEmailContact: buildCommsEmailContactPreloader(), @@ -142,6 +156,7 @@ func getPreloaders() preloaders { Notification: buildNotificationPreloader(), OauthToken: buildOauthTokenPreloader(), Organization: buildOrganizationPreloader(), + Pool: buildPoolPreloader(), PublicreportImage: buildPublicreportImagePreloader(), PublicreportImageExif: buildPublicreportImageExifPreloader(), PublicreportNotifyEmailNuisance: buildPublicreportNotifyEmailNuisancePreloader(), @@ -156,6 +171,7 @@ func getPreloaders() preloaders { PublicreportQuickImage: buildPublicreportQuickImagePreloader(), PublicreportSubscribeEmail: buildPublicreportSubscribeEmailPreloader(), PublicreportSubscribePhone: buildPublicreportSubscribePhonePreloader(), + Site: buildSitePreloader(), User: buildUserPreloader(), } } @@ -167,6 +183,12 @@ var ( ) type thenLoaders[Q orm.Loadable] struct { + Address addressThenLoader[Q] + ArcgisAddressMapping arcgisAddressMappingThenLoader[Q] + ArcgisFeatureService arcgisFeatureServiceThenLoader[Q] + ArcgisLayer arcgisLayerThenLoader[Q] + ArcgisLayerField arcgisLayerFieldThenLoader[Q] + ArcgisParcelMapping arcgisParcelMappingThenLoader[Q] ArcgisUser arcgisuserThenLoader[Q] ArcgisUserPrivilege arcgisUserPrivilegeThenLoader[Q] CommsEmailContact commsEmailContactThenLoader[Q] @@ -220,6 +242,7 @@ type thenLoaders[Q orm.Loadable] struct { Notification notificationThenLoader[Q] OauthToken oauthTokenThenLoader[Q] Organization organizationThenLoader[Q] + Pool poolThenLoader[Q] PublicreportImage publicreportImageThenLoader[Q] PublicreportImageExif publicreportImageExifThenLoader[Q] PublicreportNotifyEmailNuisance publicreportNotifyEmailNuisanceThenLoader[Q] @@ -234,11 +257,18 @@ type thenLoaders[Q orm.Loadable] struct { PublicreportQuickImage publicreportQuickImageThenLoader[Q] PublicreportSubscribeEmail publicreportSubscribeEmailThenLoader[Q] PublicreportSubscribePhone publicreportSubscribePhoneThenLoader[Q] + Site siteThenLoader[Q] User userThenLoader[Q] } func getThenLoaders[Q orm.Loadable]() thenLoaders[Q] { return thenLoaders[Q]{ + Address: buildAddressThenLoader[Q](), + ArcgisAddressMapping: buildArcgisAddressMappingThenLoader[Q](), + ArcgisFeatureService: buildArcgisFeatureServiceThenLoader[Q](), + ArcgisLayer: buildArcgisLayerThenLoader[Q](), + ArcgisLayerField: buildArcgisLayerFieldThenLoader[Q](), + ArcgisParcelMapping: buildArcgisParcelMappingThenLoader[Q](), ArcgisUser: buildArcgisUserThenLoader[Q](), ArcgisUserPrivilege: buildArcgisUserPrivilegeThenLoader[Q](), CommsEmailContact: buildCommsEmailContactThenLoader[Q](), @@ -292,6 +322,7 @@ func getThenLoaders[Q orm.Loadable]() thenLoaders[Q] { Notification: buildNotificationThenLoader[Q](), OauthToken: buildOauthTokenThenLoader[Q](), Organization: buildOrganizationThenLoader[Q](), + Pool: buildPoolThenLoader[Q](), PublicreportImage: buildPublicreportImageThenLoader[Q](), PublicreportImageExif: buildPublicreportImageExifThenLoader[Q](), PublicreportNotifyEmailNuisance: buildPublicreportNotifyEmailNuisanceThenLoader[Q](), @@ -306,6 +337,7 @@ func getThenLoaders[Q orm.Loadable]() thenLoaders[Q] { PublicreportQuickImage: buildPublicreportQuickImageThenLoader[Q](), PublicreportSubscribeEmail: buildPublicreportSubscribeEmailThenLoader[Q](), PublicreportSubscribePhone: buildPublicreportSubscribePhoneThenLoader[Q](), + Site: buildSiteThenLoader[Q](), User: buildUserThenLoader[Q](), } } diff --git a/db/models/bob_where.bob.go b/db/models/bob_where.bob.go index b9582fe8..1c80ddd9 100644 --- a/db/models/bob_where.bob.go +++ b/db/models/bob_where.bob.go @@ -17,6 +17,12 @@ var ( ) func Where[Q psql.Filterable]() struct { + Addresses addressWhere[Q] + ArcgisAddressMappings arcgisAddressMappingWhere[Q] + ArcgisFeatureServices arcgisFeatureServiceWhere[Q] + ArcgisLayers arcgisLayerWhere[Q] + ArcgisLayerFields arcgisLayerFieldWhere[Q] + ArcgisParcelMappings arcgisParcelMappingWhere[Q] ArcgisUsers arcgisuserWhere[Q] ArcgisUserPrivileges arcgisUserPrivilegeWhere[Q] CommsEmailContacts commsEmailContactWhere[Q] @@ -73,6 +79,8 @@ func Where[Q psql.Filterable]() struct { Notifications notificationWhere[Q] OauthTokens oauthTokenWhere[Q] Organizations organizationWhere[Q] + Parcels parcelWhere[Q] + Pools poolWhere[Q] PublicreportImages publicreportImageWhere[Q] PublicreportImageExifs publicreportImageExifWhere[Q] PublicreportNotifyEmailNuisances publicreportNotifyEmailNuisanceWhere[Q] @@ -91,10 +99,17 @@ func Where[Q psql.Filterable]() struct { RasterColumns rasterColumnWhere[Q] RasterOverviews rasterOverviewWhere[Q] Sessions sessionWhere[Q] + Sites siteWhere[Q] SpatialRefSys spatialRefSyWhere[Q] Users userWhere[Q] } { return struct { + Addresses addressWhere[Q] + ArcgisAddressMappings arcgisAddressMappingWhere[Q] + ArcgisFeatureServices arcgisFeatureServiceWhere[Q] + ArcgisLayers arcgisLayerWhere[Q] + ArcgisLayerFields arcgisLayerFieldWhere[Q] + ArcgisParcelMappings arcgisParcelMappingWhere[Q] ArcgisUsers arcgisuserWhere[Q] ArcgisUserPrivileges arcgisUserPrivilegeWhere[Q] CommsEmailContacts commsEmailContactWhere[Q] @@ -151,6 +166,8 @@ func Where[Q psql.Filterable]() struct { Notifications notificationWhere[Q] OauthTokens oauthTokenWhere[Q] Organizations organizationWhere[Q] + Parcels parcelWhere[Q] + Pools poolWhere[Q] PublicreportImages publicreportImageWhere[Q] PublicreportImageExifs publicreportImageExifWhere[Q] PublicreportNotifyEmailNuisances publicreportNotifyEmailNuisanceWhere[Q] @@ -169,9 +186,16 @@ func Where[Q psql.Filterable]() struct { RasterColumns rasterColumnWhere[Q] RasterOverviews rasterOverviewWhere[Q] Sessions sessionWhere[Q] + Sites siteWhere[Q] SpatialRefSys spatialRefSyWhere[Q] Users userWhere[Q] }{ + Addresses: buildAddressWhere[Q](Addresses.Columns), + ArcgisAddressMappings: buildArcgisAddressMappingWhere[Q](ArcgisAddressMappings.Columns), + ArcgisFeatureServices: buildArcgisFeatureServiceWhere[Q](ArcgisFeatureServices.Columns), + ArcgisLayers: buildArcgisLayerWhere[Q](ArcgisLayers.Columns), + ArcgisLayerFields: buildArcgisLayerFieldWhere[Q](ArcgisLayerFields.Columns), + ArcgisParcelMappings: buildArcgisParcelMappingWhere[Q](ArcgisParcelMappings.Columns), ArcgisUsers: buildArcgisUserWhere[Q](ArcgisUsers.Columns), ArcgisUserPrivileges: buildArcgisUserPrivilegeWhere[Q](ArcgisUserPrivileges.Columns), CommsEmailContacts: buildCommsEmailContactWhere[Q](CommsEmailContacts.Columns), @@ -228,6 +252,8 @@ func Where[Q psql.Filterable]() struct { Notifications: buildNotificationWhere[Q](Notifications.Columns), OauthTokens: buildOauthTokenWhere[Q](OauthTokens.Columns), Organizations: buildOrganizationWhere[Q](Organizations.Columns), + Parcels: buildParcelWhere[Q](Parcels.Columns), + Pools: buildPoolWhere[Q](Pools.Columns), PublicreportImages: buildPublicreportImageWhere[Q](PublicreportImages.Columns), PublicreportImageExifs: buildPublicreportImageExifWhere[Q](PublicreportImageExifs.Columns), PublicreportNotifyEmailNuisances: buildPublicreportNotifyEmailNuisanceWhere[Q](PublicreportNotifyEmailNuisances.Columns), @@ -246,6 +272,7 @@ func Where[Q psql.Filterable]() struct { RasterColumns: buildRasterColumnWhere[Q](RasterColumns.Columns), RasterOverviews: buildRasterOverviewWhere[Q](RasterOverviews.Columns), Sessions: buildSessionWhere[Q](Sessions.Columns), + Sites: buildSiteWhere[Q](Sites.Columns), SpatialRefSys: buildSpatialRefSyWhere[Q](SpatialRefSys.Columns), Users: buildUserWhere[Q](Users.Columns), } diff --git a/db/models/fileupload.file.bob.go b/db/models/fileupload.file.bob.go index c95a797b..91b9b39f 100644 --- a/db/models/fileupload.file.bob.go +++ b/db/models/fileupload.file.bob.go @@ -60,6 +60,7 @@ type fileuploadFileR struct { ErrorFiles FileuploadErrorFileSlice // fileupload.error_file.error_file_file_id_fkey CreatorUser *User // fileupload.file.file_creator_id_fkey Organization *Organization // fileupload.file.file_organization_id_fkey + Sites SiteSlice // site.site_file_id_fkey } func buildFileuploadFileColumns(alias string) fileuploadFileColumns { @@ -658,6 +659,30 @@ func (os FileuploadFileSlice) Organization(mods ...bob.Mod[*dialect.SelectQuery] )...) } +// Sites starts a query for related objects on site +func (o *FileuploadFile) Sites(mods ...bob.Mod[*dialect.SelectQuery]) SitesQuery { + return Sites.Query(append(mods, + sm.Where(Sites.Columns.FileID.EQ(psql.Arg(o.ID))), + )...) +} + +func (os FileuploadFileSlice) Sites(mods ...bob.Mod[*dialect.SelectQuery]) SitesQuery { + pkID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkID = append(pkID, o.ID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")), + )) + + return Sites.Query(append(mods, + sm.Where(psql.Group(Sites.Columns.FileID).OP("IN", PKArgExpr)), + )...) +} + func insertFileuploadFileCSV0(ctx context.Context, exec bob.Executor, fileuploadCSV1 *FileuploadCSVSetter, fileuploadFile0 *FileuploadFile) (*FileuploadCSV, error) { fileuploadCSV1.FileID = omit.From(fileuploadFile0.ID) @@ -876,6 +901,74 @@ func (fileuploadFile0 *FileuploadFile) AttachOrganization(ctx context.Context, e return nil } +func insertFileuploadFileSites0(ctx context.Context, exec bob.Executor, sites1 []*SiteSetter, fileuploadFile0 *FileuploadFile) (SiteSlice, error) { + for i := range sites1 { + sites1[i].FileID = omitnull.From(fileuploadFile0.ID) + } + + ret, err := Sites.Insert(bob.ToMods(sites1...)).All(ctx, exec) + if err != nil { + return ret, fmt.Errorf("insertFileuploadFileSites0: %w", err) + } + + return ret, nil +} + +func attachFileuploadFileSites0(ctx context.Context, exec bob.Executor, count int, sites1 SiteSlice, fileuploadFile0 *FileuploadFile) (SiteSlice, error) { + setter := &SiteSetter{ + FileID: omitnull.From(fileuploadFile0.ID), + } + + err := sites1.UpdateAll(ctx, exec, *setter) + if err != nil { + return nil, fmt.Errorf("attachFileuploadFileSites0: %w", err) + } + + return sites1, nil +} + +func (fileuploadFile0 *FileuploadFile) InsertSites(ctx context.Context, exec bob.Executor, related ...*SiteSetter) error { + if len(related) == 0 { + return nil + } + + var err error + + sites1, err := insertFileuploadFileSites0(ctx, exec, related, fileuploadFile0) + if err != nil { + return err + } + + fileuploadFile0.R.Sites = append(fileuploadFile0.R.Sites, sites1...) + + for _, rel := range sites1 { + rel.R.File = fileuploadFile0 + } + return nil +} + +func (fileuploadFile0 *FileuploadFile) AttachSites(ctx context.Context, exec bob.Executor, related ...*Site) error { + if len(related) == 0 { + return nil + } + + var err error + sites1 := SiteSlice(related) + + _, err = attachFileuploadFileSites0(ctx, exec, len(related), sites1, fileuploadFile0) + if err != nil { + return err + } + + fileuploadFile0.R.Sites = append(fileuploadFile0.R.Sites, sites1...) + + for _, rel := range related { + rel.R.File = fileuploadFile0 + } + + return nil +} + type fileuploadFileWhere[Q psql.Filterable] struct { ID psql.WhereMod[Q, int32] ContentType psql.WhereMod[Q, string] @@ -964,6 +1057,20 @@ func (o *FileuploadFile) Preload(name string, retrieved any) error { rel.R.Files = FileuploadFileSlice{o} } return nil + case "Sites": + rels, ok := retrieved.(SiteSlice) + if !ok { + return fmt.Errorf("fileuploadFile cannot load %T as %q", retrieved, name) + } + + o.R.Sites = rels + + for _, rel := range rels { + if rel != nil { + rel.R.File = o + } + } + return nil default: return fmt.Errorf("fileuploadFile has no relationship %q", name) } @@ -1024,6 +1131,7 @@ type fileuploadFileThenLoader[Q orm.Loadable] struct { ErrorFiles func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] Organization func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + Sites func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] } func buildFileuploadFileThenLoader[Q orm.Loadable]() fileuploadFileThenLoader[Q] { @@ -1039,6 +1147,9 @@ func buildFileuploadFileThenLoader[Q orm.Loadable]() fileuploadFileThenLoader[Q] type OrganizationLoadInterface interface { LoadOrganization(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } + type SitesLoadInterface interface { + LoadSites(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } return fileuploadFileThenLoader[Q]{ CSV: thenLoadBuilder[Q]( @@ -1065,6 +1176,12 @@ func buildFileuploadFileThenLoader[Q orm.Loadable]() fileuploadFileThenLoader[Q] return retrieved.LoadOrganization(ctx, exec, mods...) }, ), + Sites: thenLoadBuilder[Q]( + "Sites", + func(ctx context.Context, exec bob.Executor, retrieved SitesLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadSites(ctx, exec, mods...) + }, + ), } } @@ -1285,9 +1402,74 @@ func (os FileuploadFileSlice) LoadOrganization(ctx context.Context, exec bob.Exe return nil } +// LoadSites loads the fileuploadFile's Sites into the .R struct +func (o *FileuploadFile) LoadSites(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.Sites = nil + + related, err := o.Sites(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, rel := range related { + rel.R.File = o + } + + o.R.Sites = related + return nil +} + +// LoadSites loads the fileuploadFile's Sites into the .R struct +func (os FileuploadFileSlice) LoadSites(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + sites, err := os.Sites(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + o.R.Sites = nil + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range sites { + + if !rel.FileID.IsValue() { + continue + } + if !(rel.FileID.IsValue() && o.ID == rel.FileID.MustGet()) { + continue + } + + rel.R.File = o + + o.R.Sites = append(o.R.Sites, rel) + } + } + + return nil +} + // fileuploadFileC is where relationship counts are stored. type fileuploadFileC struct { ErrorFiles *int64 + Sites *int64 } // PreloadCount sets a count in the C struct by name @@ -1299,12 +1481,15 @@ func (o *FileuploadFile) PreloadCount(name string, count int64) error { switch name { case "ErrorFiles": o.C.ErrorFiles = &count + case "Sites": + o.C.Sites = &count } return nil } type fileuploadFileCountPreloader struct { ErrorFiles func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader + Sites func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader } func buildFileuploadFileCountPreloader() fileuploadFileCountPreloader { @@ -1326,17 +1511,38 @@ func buildFileuploadFileCountPreloader() fileuploadFileCountPreloader { return psql.Group(psql.Select(subqueryMods...).Expression) }) }, + Sites: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { + return countPreloader[*FileuploadFile]("Sites", func(parent string) bob.Expression { + // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) + if parent == "" { + parent = FileuploadFiles.Alias() + } + + subqueryMods := []bob.Mod[*dialect.SelectQuery]{ + sm.Columns(psql.Raw("count(*)")), + + sm.From(Sites.Name()), + sm.Where(psql.Quote(Sites.Alias(), "file_id").EQ(psql.Quote(parent, "id"))), + } + subqueryMods = append(subqueryMods, mods...) + return psql.Group(psql.Select(subqueryMods...).Expression) + }) + }, } } type fileuploadFileCountThenLoader[Q orm.Loadable] struct { ErrorFiles func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + Sites func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] } func buildFileuploadFileCountThenLoader[Q orm.Loadable]() fileuploadFileCountThenLoader[Q] { type ErrorFilesCountInterface interface { LoadCountErrorFiles(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } + type SitesCountInterface interface { + LoadCountSites(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } return fileuploadFileCountThenLoader[Q]{ ErrorFiles: countThenLoadBuilder[Q]( @@ -1345,6 +1551,12 @@ func buildFileuploadFileCountThenLoader[Q orm.Loadable]() fileuploadFileCountThe return retrieved.LoadCountErrorFiles(ctx, exec, mods...) }, ), + Sites: countThenLoadBuilder[Q]( + "Sites", + func(ctx context.Context, exec bob.Executor, retrieved SitesCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCountSites(ctx, exec, mods...) + }, + ), } } @@ -1378,12 +1590,43 @@ func (os FileuploadFileSlice) LoadCountErrorFiles(ctx context.Context, exec bob. return nil } +// LoadCountSites loads the count of Sites into the C struct +func (o *FileuploadFile) LoadCountSites(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + count, err := o.Sites(mods...).Count(ctx, exec) + if err != nil { + return err + } + + o.C.Sites = &count + return nil +} + +// LoadCountSites loads the count of Sites for a slice +func (os FileuploadFileSlice) LoadCountSites(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + for _, o := range os { + if err := o.LoadCountSites(ctx, exec, mods...); err != nil { + return err + } + } + + return nil +} + type fileuploadFileJoins[Q dialect.Joinable] struct { typ string CSV modAs[Q, fileuploadCSVColumns] ErrorFiles modAs[Q, fileuploadErrorFileColumns] CreatorUser modAs[Q, userColumns] Organization modAs[Q, organizationColumns] + Sites modAs[Q, siteColumns] } func (j fileuploadFileJoins[Q]) aliasedAs(alias string) fileuploadFileJoins[Q] { @@ -1446,6 +1689,20 @@ func buildFileuploadFileJoins[Q dialect.Joinable](cols fileuploadFileColumns, ty )) } + return mods + }, + }, + Sites: modAs[Q, siteColumns]{ + c: Sites.Columns, + f: func(to siteColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, Sites.Name().As(to.Alias())).On( + to.FileID.EQ(cols.ID), + )) + } + return mods }, }, diff --git a/db/models/fileupload.pool.bob.go b/db/models/fileupload.pool.bob.go index 3a207249..aca3a1b6 100644 --- a/db/models/fileupload.pool.bob.go +++ b/db/models/fileupload.pool.bob.go @@ -984,7 +984,7 @@ func (fileuploadPool0 *FileuploadPool) InsertCreatorUser(ctx context.Context, ex fileuploadPool0.R.CreatorUser = user1 - user1.R.CreatorPools = append(user1.R.CreatorPools, fileuploadPool0) + user1.R.FileuploadPool = append(user1.R.FileuploadPool, fileuploadPool0) return nil } @@ -999,7 +999,7 @@ func (fileuploadPool0 *FileuploadPool) AttachCreatorUser(ctx context.Context, ex fileuploadPool0.R.CreatorUser = user1 - user1.R.CreatorPools = append(user1.R.CreatorPools, fileuploadPool0) + user1.R.FileuploadPool = append(user1.R.FileuploadPool, fileuploadPool0) return nil } @@ -1267,7 +1267,7 @@ func (o *FileuploadPool) Preload(name string, retrieved any) error { o.R.CreatorUser = rel if rel != nil { - rel.R.CreatorPools = FileuploadPoolSlice{o} + rel.R.FileuploadPool = FileuploadPoolSlice{o} } return nil case "CSVFileCSV": @@ -1474,7 +1474,7 @@ func (o *FileuploadPool) LoadCreatorUser(ctx context.Context, exec bob.Executor, return err } - related.R.CreatorPools = FileuploadPoolSlice{o} + related.R.FileuploadPool = FileuploadPoolSlice{o} o.R.CreatorUser = related return nil @@ -1502,7 +1502,7 @@ func (os FileuploadPoolSlice) LoadCreatorUser(ctx context.Context, exec bob.Exec continue } - rel.R.CreatorPools = append(rel.R.CreatorPools, o) + rel.R.FileuploadPool = append(rel.R.FileuploadPool, o) o.R.CreatorUser = rel break diff --git a/db/models/organization.bob.go b/db/models/organization.bob.go index 203fd879..2243b8ff 100644 --- a/db/models/organization.bob.go +++ b/db/models/organization.bob.go @@ -76,6 +76,8 @@ type OrganizationsQuery = *psql.ViewQuery[*Organization, OrganizationSlice] // organizationR is where relationships are stored. type organizationR struct { + AddressMappings ArcgisAddressMappingSlice // arcgis.address_mapping.address_mapping_organization_id_fkey + ParcelMappings ArcgisParcelMappingSlice // arcgis.parcel_mapping.parcel_mapping_organization_id_fkey EmailContacts CommsEmailContactSlice // district_subscription_email.district_subscription_email_email_contact_address_fkeydistrict_subscription_email.district_subscription_email_organization_id_fkey Phones CommsPhoneSlice // district_subscription_phone.district_subscription_phone_organization_id_fkeydistrict_subscription_phone.district_subscription_phone_phone_e164_fkey Containerrelates FieldseekerContainerrelateSlice // fieldseeker.containerrelate.containerrelate_organization_id_fkey @@ -835,6 +837,54 @@ func (o OrganizationSlice) ReloadAll(ctx context.Context, exec bob.Executor) err return nil } +// AddressMappings starts a query for related objects on arcgis.address_mapping +func (o *Organization) AddressMappings(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisAddressMappingsQuery { + return ArcgisAddressMappings.Query(append(mods, + sm.Where(ArcgisAddressMappings.Columns.OrganizationID.EQ(psql.Arg(o.ID))), + )...) +} + +func (os OrganizationSlice) AddressMappings(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisAddressMappingsQuery { + pkID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkID = append(pkID, o.ID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")), + )) + + return ArcgisAddressMappings.Query(append(mods, + sm.Where(psql.Group(ArcgisAddressMappings.Columns.OrganizationID).OP("IN", PKArgExpr)), + )...) +} + +// ParcelMappings starts a query for related objects on arcgis.parcel_mapping +func (o *Organization) ParcelMappings(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisParcelMappingsQuery { + return ArcgisParcelMappings.Query(append(mods, + sm.Where(ArcgisParcelMappings.Columns.OrganizationID.EQ(psql.Arg(o.ID))), + )...) +} + +func (os OrganizationSlice) ParcelMappings(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisParcelMappingsQuery { + pkID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkID = append(pkID, o.ID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")), + )) + + return ArcgisParcelMappings.Query(append(mods, + sm.Where(psql.Group(ArcgisParcelMappings.Columns.OrganizationID).OP("IN", PKArgExpr)), + )...) +} + // EmailContacts starts a query for related objects on comms.email_contact func (o *Organization) EmailContacts(mods ...bob.Mod[*dialect.SelectQuery]) CommsEmailContactsQuery { return CommsEmailContacts.Query(append(mods, @@ -1781,6 +1831,142 @@ func (os OrganizationSlice) User(mods ...bob.Mod[*dialect.SelectQuery]) UsersQue )...) } +func insertOrganizationAddressMappings0(ctx context.Context, exec bob.Executor, arcgisAddressMappings1 []*ArcgisAddressMappingSetter, organization0 *Organization) (ArcgisAddressMappingSlice, error) { + for i := range arcgisAddressMappings1 { + arcgisAddressMappings1[i].OrganizationID = omit.From(organization0.ID) + } + + ret, err := ArcgisAddressMappings.Insert(bob.ToMods(arcgisAddressMappings1...)).All(ctx, exec) + if err != nil { + return ret, fmt.Errorf("insertOrganizationAddressMappings0: %w", err) + } + + return ret, nil +} + +func attachOrganizationAddressMappings0(ctx context.Context, exec bob.Executor, count int, arcgisAddressMappings1 ArcgisAddressMappingSlice, organization0 *Organization) (ArcgisAddressMappingSlice, error) { + setter := &ArcgisAddressMappingSetter{ + OrganizationID: omit.From(organization0.ID), + } + + err := arcgisAddressMappings1.UpdateAll(ctx, exec, *setter) + if err != nil { + return nil, fmt.Errorf("attachOrganizationAddressMappings0: %w", err) + } + + return arcgisAddressMappings1, nil +} + +func (organization0 *Organization) InsertAddressMappings(ctx context.Context, exec bob.Executor, related ...*ArcgisAddressMappingSetter) error { + if len(related) == 0 { + return nil + } + + var err error + + arcgisAddressMappings1, err := insertOrganizationAddressMappings0(ctx, exec, related, organization0) + if err != nil { + return err + } + + organization0.R.AddressMappings = append(organization0.R.AddressMappings, arcgisAddressMappings1...) + + for _, rel := range arcgisAddressMappings1 { + rel.R.Organization = organization0 + } + return nil +} + +func (organization0 *Organization) AttachAddressMappings(ctx context.Context, exec bob.Executor, related ...*ArcgisAddressMapping) error { + if len(related) == 0 { + return nil + } + + var err error + arcgisAddressMappings1 := ArcgisAddressMappingSlice(related) + + _, err = attachOrganizationAddressMappings0(ctx, exec, len(related), arcgisAddressMappings1, organization0) + if err != nil { + return err + } + + organization0.R.AddressMappings = append(organization0.R.AddressMappings, arcgisAddressMappings1...) + + for _, rel := range related { + rel.R.Organization = organization0 + } + + return nil +} + +func insertOrganizationParcelMappings0(ctx context.Context, exec bob.Executor, arcgisParcelMappings1 []*ArcgisParcelMappingSetter, organization0 *Organization) (ArcgisParcelMappingSlice, error) { + for i := range arcgisParcelMappings1 { + arcgisParcelMappings1[i].OrganizationID = omit.From(organization0.ID) + } + + ret, err := ArcgisParcelMappings.Insert(bob.ToMods(arcgisParcelMappings1...)).All(ctx, exec) + if err != nil { + return ret, fmt.Errorf("insertOrganizationParcelMappings0: %w", err) + } + + return ret, nil +} + +func attachOrganizationParcelMappings0(ctx context.Context, exec bob.Executor, count int, arcgisParcelMappings1 ArcgisParcelMappingSlice, organization0 *Organization) (ArcgisParcelMappingSlice, error) { + setter := &ArcgisParcelMappingSetter{ + OrganizationID: omit.From(organization0.ID), + } + + err := arcgisParcelMappings1.UpdateAll(ctx, exec, *setter) + if err != nil { + return nil, fmt.Errorf("attachOrganizationParcelMappings0: %w", err) + } + + return arcgisParcelMappings1, nil +} + +func (organization0 *Organization) InsertParcelMappings(ctx context.Context, exec bob.Executor, related ...*ArcgisParcelMappingSetter) error { + if len(related) == 0 { + return nil + } + + var err error + + arcgisParcelMappings1, err := insertOrganizationParcelMappings0(ctx, exec, related, organization0) + if err != nil { + return err + } + + organization0.R.ParcelMappings = append(organization0.R.ParcelMappings, arcgisParcelMappings1...) + + for _, rel := range arcgisParcelMappings1 { + rel.R.Organization = organization0 + } + return nil +} + +func (organization0 *Organization) AttachParcelMappings(ctx context.Context, exec bob.Executor, related ...*ArcgisParcelMapping) error { + if len(related) == 0 { + return nil + } + + var err error + arcgisParcelMappings1 := ArcgisParcelMappingSlice(related) + + _, err = attachOrganizationParcelMappings0(ctx, exec, len(related), arcgisParcelMappings1, organization0) + if err != nil { + return err + } + + organization0.R.ParcelMappings = append(organization0.R.ParcelMappings, arcgisParcelMappings1...) + + for _, rel := range related { + rel.R.Organization = organization0 + } + + return nil +} + func attachOrganizationEmailContacts0(ctx context.Context, exec bob.Executor, count int, organization0 *Organization, commsEmailContacts2 CommsEmailContactSlice) (DistrictSubscriptionEmailSlice, error) { setters := make([]*DistrictSubscriptionEmailSetter, count) for i := range count { @@ -4503,6 +4689,34 @@ func (o *Organization) Preload(name string, retrieved any) error { } switch name { + case "AddressMappings": + rels, ok := retrieved.(ArcgisAddressMappingSlice) + if !ok { + return fmt.Errorf("organization cannot load %T as %q", retrieved, name) + } + + o.R.AddressMappings = rels + + for _, rel := range rels { + if rel != nil { + rel.R.Organization = o + } + } + return nil + case "ParcelMappings": + rels, ok := retrieved.(ArcgisParcelMappingSlice) + if !ok { + return fmt.Errorf("organization cannot load %T as %q", retrieved, name) + } + + o.R.ParcelMappings = rels + + for _, rel := range rels { + if rel != nil { + rel.R.Organization = o + } + } + return nil case "EmailContacts": rels, ok := retrieved.(CommsEmailContactSlice) if !ok { @@ -5061,6 +5275,8 @@ func buildOrganizationPreloader() organizationPreloader { } type organizationThenLoader[Q orm.Loadable] struct { + AddressMappings func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + ParcelMappings func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] EmailContacts func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] Phones func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] Containerrelates func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] @@ -5103,6 +5319,12 @@ type organizationThenLoader[Q orm.Loadable] struct { } func buildOrganizationThenLoader[Q orm.Loadable]() organizationThenLoader[Q] { + type AddressMappingsLoadInterface interface { + LoadAddressMappings(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type ParcelMappingsLoadInterface interface { + LoadParcelMappings(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } type EmailContactsLoadInterface interface { LoadEmailContacts(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } @@ -5222,6 +5444,18 @@ func buildOrganizationThenLoader[Q orm.Loadable]() organizationThenLoader[Q] { } return organizationThenLoader[Q]{ + AddressMappings: thenLoadBuilder[Q]( + "AddressMappings", + func(ctx context.Context, exec bob.Executor, retrieved AddressMappingsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadAddressMappings(ctx, exec, mods...) + }, + ), + ParcelMappings: thenLoadBuilder[Q]( + "ParcelMappings", + func(ctx context.Context, exec bob.Executor, retrieved ParcelMappingsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadParcelMappings(ctx, exec, mods...) + }, + ), EmailContacts: thenLoadBuilder[Q]( "EmailContacts", func(ctx context.Context, exec bob.Executor, retrieved EmailContactsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { @@ -5459,6 +5693,128 @@ func buildOrganizationThenLoader[Q orm.Loadable]() organizationThenLoader[Q] { } } +// LoadAddressMappings loads the organization's AddressMappings into the .R struct +func (o *Organization) LoadAddressMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.AddressMappings = nil + + related, err := o.AddressMappings(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, rel := range related { + rel.R.Organization = o + } + + o.R.AddressMappings = related + return nil +} + +// LoadAddressMappings loads the organization's AddressMappings into the .R struct +func (os OrganizationSlice) LoadAddressMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + arcgisAddressMappings, err := os.AddressMappings(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + o.R.AddressMappings = nil + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range arcgisAddressMappings { + + if !(o.ID == rel.OrganizationID) { + continue + } + + rel.R.Organization = o + + o.R.AddressMappings = append(o.R.AddressMappings, rel) + } + } + + return nil +} + +// LoadParcelMappings loads the organization's ParcelMappings into the .R struct +func (o *Organization) LoadParcelMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.ParcelMappings = nil + + related, err := o.ParcelMappings(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, rel := range related { + rel.R.Organization = o + } + + o.R.ParcelMappings = related + return nil +} + +// LoadParcelMappings loads the organization's ParcelMappings into the .R struct +func (os OrganizationSlice) LoadParcelMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + arcgisParcelMappings, err := os.ParcelMappings(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + o.R.ParcelMappings = nil + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range arcgisParcelMappings { + + if !(o.ID == rel.OrganizationID) { + continue + } + + rel.R.Organization = o + + o.R.ParcelMappings = append(o.R.ParcelMappings, rel) + } + } + + return nil +} + // LoadEmailContacts loads the organization's EmailContacts into the .R struct func (o *Organization) LoadEmailContacts(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { if o == nil { @@ -7889,6 +8245,8 @@ func (os OrganizationSlice) LoadUser(ctx context.Context, exec bob.Executor, mod // organizationC is where relationship counts are stored. type organizationC struct { + AddressMappings *int64 + ParcelMappings *int64 EmailContacts *int64 Phones *int64 Containerrelates *int64 @@ -7937,6 +8295,10 @@ func (o *Organization) PreloadCount(name string, count int64) error { } switch name { + case "AddressMappings": + o.C.AddressMappings = &count + case "ParcelMappings": + o.C.ParcelMappings = &count case "EmailContacts": o.C.EmailContacts = &count case "Phones": @@ -8020,6 +8382,8 @@ func (o *Organization) PreloadCount(name string, count int64) error { } type organizationCountPreloader struct { + AddressMappings func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader + ParcelMappings func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader EmailContacts func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader Phones func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader Containerrelates func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader @@ -8063,6 +8427,40 @@ type organizationCountPreloader struct { func buildOrganizationCountPreloader() organizationCountPreloader { return organizationCountPreloader{ + AddressMappings: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { + return countPreloader[*Organization]("AddressMappings", func(parent string) bob.Expression { + // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) + if parent == "" { + parent = Organizations.Alias() + } + + subqueryMods := []bob.Mod[*dialect.SelectQuery]{ + sm.Columns(psql.Raw("count(*)")), + + sm.From(ArcgisAddressMappings.Name()), + sm.Where(psql.Quote(ArcgisAddressMappings.Alias(), "organization_id").EQ(psql.Quote(parent, "id"))), + } + subqueryMods = append(subqueryMods, mods...) + return psql.Group(psql.Select(subqueryMods...).Expression) + }) + }, + ParcelMappings: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { + return countPreloader[*Organization]("ParcelMappings", func(parent string) bob.Expression { + // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) + if parent == "" { + parent = Organizations.Alias() + } + + subqueryMods := []bob.Mod[*dialect.SelectQuery]{ + sm.Columns(psql.Raw("count(*)")), + + sm.From(ArcgisParcelMappings.Name()), + sm.Where(psql.Quote(ArcgisParcelMappings.Alias(), "organization_id").EQ(psql.Quote(parent, "id"))), + } + subqueryMods = append(subqueryMods, mods...) + return psql.Group(psql.Select(subqueryMods...).Expression) + }) + }, EmailContacts: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { return countPreloader[*Organization]("EmailContacts", func(parent string) bob.Expression { // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) @@ -8736,6 +9134,8 @@ func buildOrganizationCountPreloader() organizationCountPreloader { } type organizationCountThenLoader[Q orm.Loadable] struct { + AddressMappings func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + ParcelMappings func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] EmailContacts func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] Phones func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] Containerrelates func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] @@ -8778,6 +9178,12 @@ type organizationCountThenLoader[Q orm.Loadable] struct { } func buildOrganizationCountThenLoader[Q orm.Loadable]() organizationCountThenLoader[Q] { + type AddressMappingsCountInterface interface { + LoadCountAddressMappings(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type ParcelMappingsCountInterface interface { + LoadCountParcelMappings(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } type EmailContactsCountInterface interface { LoadCountEmailContacts(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } @@ -8897,6 +9303,18 @@ func buildOrganizationCountThenLoader[Q orm.Loadable]() organizationCountThenLoa } return organizationCountThenLoader[Q]{ + AddressMappings: countThenLoadBuilder[Q]( + "AddressMappings", + func(ctx context.Context, exec bob.Executor, retrieved AddressMappingsCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCountAddressMappings(ctx, exec, mods...) + }, + ), + ParcelMappings: countThenLoadBuilder[Q]( + "ParcelMappings", + func(ctx context.Context, exec bob.Executor, retrieved ParcelMappingsCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCountParcelMappings(ctx, exec, mods...) + }, + ), EmailContacts: countThenLoadBuilder[Q]( "EmailContacts", func(ctx context.Context, exec bob.Executor, retrieved EmailContactsCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { @@ -9134,6 +9552,66 @@ func buildOrganizationCountThenLoader[Q orm.Loadable]() organizationCountThenLoa } } +// LoadCountAddressMappings loads the count of AddressMappings into the C struct +func (o *Organization) LoadCountAddressMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + count, err := o.AddressMappings(mods...).Count(ctx, exec) + if err != nil { + return err + } + + o.C.AddressMappings = &count + return nil +} + +// LoadCountAddressMappings loads the count of AddressMappings for a slice +func (os OrganizationSlice) LoadCountAddressMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + for _, o := range os { + if err := o.LoadCountAddressMappings(ctx, exec, mods...); err != nil { + return err + } + } + + return nil +} + +// LoadCountParcelMappings loads the count of ParcelMappings into the C struct +func (o *Organization) LoadCountParcelMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + count, err := o.ParcelMappings(mods...).Count(ctx, exec) + if err != nil { + return err + } + + o.C.ParcelMappings = &count + return nil +} + +// LoadCountParcelMappings loads the count of ParcelMappings for a slice +func (os OrganizationSlice) LoadCountParcelMappings(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + for _, o := range os { + if err := o.LoadCountParcelMappings(ctx, exec, mods...); err != nil { + return err + } + } + + return nil +} + // LoadCountEmailContacts loads the count of EmailContacts into the C struct func (o *Organization) LoadCountEmailContacts(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { if o == nil { @@ -10306,6 +10784,8 @@ func (os OrganizationSlice) LoadCountUser(ctx context.Context, exec bob.Executor type organizationJoins[Q dialect.Joinable] struct { typ string + AddressMappings modAs[Q, arcgisAddressMappingColumns] + ParcelMappings modAs[Q, arcgisParcelMappingColumns] EmailContacts modAs[Q, commsEmailContactColumns] Phones modAs[Q, commsPhoneColumns] Containerrelates modAs[Q, fieldseekerContainerrelateColumns] @@ -10354,6 +10834,34 @@ func (j organizationJoins[Q]) aliasedAs(alias string) organizationJoins[Q] { func buildOrganizationJoins[Q dialect.Joinable](cols organizationColumns, typ string) organizationJoins[Q] { return organizationJoins[Q]{ typ: typ, + AddressMappings: modAs[Q, arcgisAddressMappingColumns]{ + c: ArcgisAddressMappings.Columns, + f: func(to arcgisAddressMappingColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, ArcgisAddressMappings.Name().As(to.Alias())).On( + to.OrganizationID.EQ(cols.ID), + )) + } + + return mods + }, + }, + ParcelMappings: modAs[Q, arcgisParcelMappingColumns]{ + c: ArcgisParcelMappings.Columns, + f: func(to arcgisParcelMappingColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, ArcgisParcelMappings.Name().As(to.Alias())).On( + to.OrganizationID.EQ(cols.ID), + )) + } + + return mods + }, + }, EmailContacts: modAs[Q, commsEmailContactColumns]{ c: CommsEmailContacts.Columns, f: func(to commsEmailContactColumns) bob.Mod[Q] { diff --git a/db/models/parcel.bob.go b/db/models/parcel.bob.go new file mode 100644 index 00000000..41f92cc1 --- /dev/null +++ b/db/models/parcel.bob.go @@ -0,0 +1,423 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "io" + + "github.com/Gleipnir-Technology/bob" + "github.com/Gleipnir-Technology/bob/dialect/psql" + "github.com/Gleipnir-Technology/bob/dialect/psql/dialect" + "github.com/Gleipnir-Technology/bob/dialect/psql/dm" + "github.com/Gleipnir-Technology/bob/dialect/psql/sm" + "github.com/Gleipnir-Technology/bob/dialect/psql/um" + "github.com/Gleipnir-Technology/bob/expr" + "github.com/aarondl/opt/omit" +) + +// Parcel is an object representing the database table. +type Parcel struct { + Apn string `db:"apn" ` + Description string `db:"description" ` + ID int32 `db:"id,pk" ` + Geometry string `db:"geometry" ` +} + +// ParcelSlice is an alias for a slice of pointers to Parcel. +// This should almost always be used instead of []*Parcel. +type ParcelSlice []*Parcel + +// Parcels contains methods to work with the parcel table +var Parcels = psql.NewTablex[*Parcel, ParcelSlice, *ParcelSetter]("", "parcel", buildParcelColumns("parcel")) + +// ParcelsQuery is a query on the parcel table +type ParcelsQuery = *psql.ViewQuery[*Parcel, ParcelSlice] + +func buildParcelColumns(alias string) parcelColumns { + return parcelColumns{ + ColumnsExpr: expr.NewColumnsExpr( + "apn", "description", "id", "geometry", + ).WithParent("parcel"), + tableAlias: alias, + Apn: psql.Quote(alias, "apn"), + Description: psql.Quote(alias, "description"), + ID: psql.Quote(alias, "id"), + Geometry: psql.Quote(alias, "geometry"), + } +} + +type parcelColumns struct { + expr.ColumnsExpr + tableAlias string + Apn psql.Expression + Description psql.Expression + ID psql.Expression + Geometry psql.Expression +} + +func (c parcelColumns) Alias() string { + return c.tableAlias +} + +func (parcelColumns) AliasedAs(alias string) parcelColumns { + return buildParcelColumns(alias) +} + +// ParcelSetter is used for insert/upsert/update operations +// All values are optional, and do not have to be set +// Generated columns are not included +type ParcelSetter struct { + Apn omit.Val[string] `db:"apn" ` + Description omit.Val[string] `db:"description" ` + ID omit.Val[int32] `db:"id,pk" ` + Geometry omit.Val[string] `db:"geometry" ` +} + +func (s ParcelSetter) SetColumns() []string { + vals := make([]string, 0, 4) + if s.Apn.IsValue() { + vals = append(vals, "apn") + } + if s.Description.IsValue() { + vals = append(vals, "description") + } + if s.ID.IsValue() { + vals = append(vals, "id") + } + if s.Geometry.IsValue() { + vals = append(vals, "geometry") + } + return vals +} + +func (s ParcelSetter) Overwrite(t *Parcel) { + if s.Apn.IsValue() { + t.Apn = s.Apn.MustGet() + } + if s.Description.IsValue() { + t.Description = s.Description.MustGet() + } + if s.ID.IsValue() { + t.ID = s.ID.MustGet() + } + if s.Geometry.IsValue() { + t.Geometry = s.Geometry.MustGet() + } +} + +func (s *ParcelSetter) Apply(q *dialect.InsertQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return Parcels.BeforeInsertHooks.RunHooks(ctx, exec, s) + }) + + q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + vals := make([]bob.Expression, 4) + if s.Apn.IsValue() { + vals[0] = psql.Arg(s.Apn.MustGet()) + } else { + vals[0] = psql.Raw("DEFAULT") + } + + if s.Description.IsValue() { + vals[1] = psql.Arg(s.Description.MustGet()) + } else { + vals[1] = psql.Raw("DEFAULT") + } + + if s.ID.IsValue() { + vals[2] = psql.Arg(s.ID.MustGet()) + } else { + vals[2] = psql.Raw("DEFAULT") + } + + if s.Geometry.IsValue() { + vals[3] = psql.Arg(s.Geometry.MustGet()) + } else { + vals[3] = psql.Raw("DEFAULT") + } + + return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "") + })) +} + +func (s ParcelSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return um.Set(s.Expressions()...) +} + +func (s ParcelSetter) Expressions(prefix ...string) []bob.Expression { + exprs := make([]bob.Expression, 0, 4) + + if s.Apn.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "apn")...), + psql.Arg(s.Apn), + }}) + } + + if s.Description.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "description")...), + psql.Arg(s.Description), + }}) + } + + if s.ID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "id")...), + psql.Arg(s.ID), + }}) + } + + if s.Geometry.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "geometry")...), + psql.Arg(s.Geometry), + }}) + } + + return exprs +} + +// FindParcel retrieves a single record by primary key +// If cols is empty Find will return all columns. +func FindParcel(ctx context.Context, exec bob.Executor, IDPK int32, cols ...string) (*Parcel, error) { + if len(cols) == 0 { + return Parcels.Query( + sm.Where(Parcels.Columns.ID.EQ(psql.Arg(IDPK))), + ).One(ctx, exec) + } + + return Parcels.Query( + sm.Where(Parcels.Columns.ID.EQ(psql.Arg(IDPK))), + sm.Columns(Parcels.Columns.Only(cols...)), + ).One(ctx, exec) +} + +// ParcelExists checks the presence of a single record by primary key +func ParcelExists(ctx context.Context, exec bob.Executor, IDPK int32) (bool, error) { + return Parcels.Query( + sm.Where(Parcels.Columns.ID.EQ(psql.Arg(IDPK))), + ).Exists(ctx, exec) +} + +// AfterQueryHook is called after Parcel is retrieved from the database +func (o *Parcel) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = Parcels.AfterSelectHooks.RunHooks(ctx, exec, ParcelSlice{o}) + case bob.QueryTypeInsert: + ctx, err = Parcels.AfterInsertHooks.RunHooks(ctx, exec, ParcelSlice{o}) + case bob.QueryTypeUpdate: + ctx, err = Parcels.AfterUpdateHooks.RunHooks(ctx, exec, ParcelSlice{o}) + case bob.QueryTypeDelete: + ctx, err = Parcels.AfterDeleteHooks.RunHooks(ctx, exec, ParcelSlice{o}) + } + + return err +} + +// primaryKeyVals returns the primary key values of the Parcel +func (o *Parcel) primaryKeyVals() bob.Expression { + return psql.Arg(o.ID) +} + +func (o *Parcel) pkEQ() dialect.Expression { + return psql.Quote("parcel", "id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + return o.primaryKeyVals().WriteSQL(ctx, w, d, start) + })) +} + +// Update uses an executor to update the Parcel +func (o *Parcel) Update(ctx context.Context, exec bob.Executor, s *ParcelSetter) error { + v, err := Parcels.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec) + if err != nil { + return err + } + + *o = *v + + return nil +} + +// Delete deletes a single Parcel record with an executor +func (o *Parcel) Delete(ctx context.Context, exec bob.Executor) error { + _, err := Parcels.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec) + return err +} + +// Reload refreshes the Parcel using the executor +func (o *Parcel) Reload(ctx context.Context, exec bob.Executor) error { + o2, err := Parcels.Query( + sm.Where(Parcels.Columns.ID.EQ(psql.Arg(o.ID))), + ).One(ctx, exec) + if err != nil { + return err + } + + *o = *o2 + + return nil +} + +// AfterQueryHook is called after ParcelSlice is retrieved from the database +func (o ParcelSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = Parcels.AfterSelectHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeInsert: + ctx, err = Parcels.AfterInsertHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeUpdate: + ctx, err = Parcels.AfterUpdateHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeDelete: + ctx, err = Parcels.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err +} + +func (o ParcelSlice) pkIN() dialect.Expression { + if len(o) == 0 { + return psql.Raw("NULL") + } + + return psql.Quote("parcel", "id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + pkPairs := make([]bob.Expression, len(o)) + for i, row := range o { + pkPairs[i] = row.primaryKeyVals() + } + return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "") + })) +} + +// copyMatchingRows finds models in the given slice that have the same primary key +// then it first copies the existing relationships from the old model to the new model +// and then replaces the old model in the slice with the new model +func (o ParcelSlice) copyMatchingRows(from ...*Parcel) { + for i, old := range o { + for _, new := range from { + if new.ID != old.ID { + continue + } + + o[i] = new + break + } + } +} + +// UpdateMod modifies an update query with "WHERE primary_key IN (o...)" +func (o ParcelSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return Parcels.BeforeUpdateHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *Parcel: + o.copyMatchingRows(retrieved) + case []*Parcel: + o.copyMatchingRows(retrieved...) + case ParcelSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a Parcel or a slice of Parcel + // then run the AfterUpdateHooks on the slice + _, err = Parcels.AfterUpdateHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)" +func (o ParcelSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] { + return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return Parcels.BeforeDeleteHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *Parcel: + o.copyMatchingRows(retrieved) + case []*Parcel: + o.copyMatchingRows(retrieved...) + case ParcelSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a Parcel or a slice of Parcel + // then run the AfterDeleteHooks on the slice + _, err = Parcels.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +func (o ParcelSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals ParcelSetter) error { + if len(o) == 0 { + return nil + } + + _, err := Parcels.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec) + return err +} + +func (o ParcelSlice) DeleteAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + _, err := Parcels.Delete(o.DeleteMod()).Exec(ctx, exec) + return err +} + +func (o ParcelSlice) ReloadAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + o2, err := Parcels.Query(sm.Where(o.pkIN())).All(ctx, exec) + if err != nil { + return err + } + + o.copyMatchingRows(o2...) + + return nil +} + +type parcelWhere[Q psql.Filterable] struct { + Apn psql.WhereMod[Q, string] + Description psql.WhereMod[Q, string] + ID psql.WhereMod[Q, int32] + Geometry psql.WhereMod[Q, string] +} + +func (parcelWhere[Q]) AliasedAs(alias string) parcelWhere[Q] { + return buildParcelWhere[Q](buildParcelColumns(alias)) +} + +func buildParcelWhere[Q psql.Filterable](cols parcelColumns) parcelWhere[Q] { + return parcelWhere[Q]{ + Apn: psql.Where[Q, string](cols.Apn), + Description: psql.Where[Q, string](cols.Description), + ID: psql.Where[Q, int32](cols.ID), + Geometry: psql.Where[Q, string](cols.Geometry), + } +} diff --git a/db/models/pool.bob.go b/db/models/pool.bob.go new file mode 100644 index 00000000..105aad34 --- /dev/null +++ b/db/models/pool.bob.go @@ -0,0 +1,681 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "fmt" + "io" + "time" + + "github.com/Gleipnir-Technology/bob" + "github.com/Gleipnir-Technology/bob/dialect/psql" + "github.com/Gleipnir-Technology/bob/dialect/psql/dialect" + "github.com/Gleipnir-Technology/bob/dialect/psql/dm" + "github.com/Gleipnir-Technology/bob/dialect/psql/sm" + "github.com/Gleipnir-Technology/bob/dialect/psql/um" + "github.com/Gleipnir-Technology/bob/expr" + "github.com/Gleipnir-Technology/bob/mods" + "github.com/Gleipnir-Technology/bob/orm" + "github.com/Gleipnir-Technology/bob/types/pgtypes" + enums "github.com/Gleipnir-Technology/nidus-sync/db/enums" + "github.com/aarondl/opt/null" + "github.com/aarondl/opt/omit" + "github.com/aarondl/opt/omitnull" +) + +// Pool is an object representing the database table. +type Pool struct { + Condition enums.Poolconditiontype `db:"condition" ` + Created time.Time `db:"created" ` + CreatorID int32 `db:"creator_id" ` + ID int32 `db:"id,pk" ` + SiteID null.Val[int32] `db:"site_id" ` + + R poolR `db:"-" ` +} + +// PoolSlice is an alias for a slice of pointers to Pool. +// This should almost always be used instead of []*Pool. +type PoolSlice []*Pool + +// Pools contains methods to work with the pool table +var Pools = psql.NewTablex[*Pool, PoolSlice, *PoolSetter]("", "pool", buildPoolColumns("pool")) + +// PoolsQuery is a query on the pool table +type PoolsQuery = *psql.ViewQuery[*Pool, PoolSlice] + +// poolR is where relationships are stored. +type poolR struct { + CreatorUser *User // pool.pool_creator_id_fkey +} + +func buildPoolColumns(alias string) poolColumns { + return poolColumns{ + ColumnsExpr: expr.NewColumnsExpr( + "condition", "created", "creator_id", "id", "site_id", + ).WithParent("pool"), + tableAlias: alias, + Condition: psql.Quote(alias, "condition"), + Created: psql.Quote(alias, "created"), + CreatorID: psql.Quote(alias, "creator_id"), + ID: psql.Quote(alias, "id"), + SiteID: psql.Quote(alias, "site_id"), + } +} + +type poolColumns struct { + expr.ColumnsExpr + tableAlias string + Condition psql.Expression + Created psql.Expression + CreatorID psql.Expression + ID psql.Expression + SiteID psql.Expression +} + +func (c poolColumns) Alias() string { + return c.tableAlias +} + +func (poolColumns) AliasedAs(alias string) poolColumns { + return buildPoolColumns(alias) +} + +// PoolSetter is used for insert/upsert/update operations +// All values are optional, and do not have to be set +// Generated columns are not included +type PoolSetter struct { + Condition omit.Val[enums.Poolconditiontype] `db:"condition" ` + Created omit.Val[time.Time] `db:"created" ` + CreatorID omit.Val[int32] `db:"creator_id" ` + ID omit.Val[int32] `db:"id,pk" ` + SiteID omitnull.Val[int32] `db:"site_id" ` +} + +func (s PoolSetter) SetColumns() []string { + vals := make([]string, 0, 5) + if s.Condition.IsValue() { + vals = append(vals, "condition") + } + if s.Created.IsValue() { + vals = append(vals, "created") + } + if s.CreatorID.IsValue() { + vals = append(vals, "creator_id") + } + if s.ID.IsValue() { + vals = append(vals, "id") + } + if !s.SiteID.IsUnset() { + vals = append(vals, "site_id") + } + return vals +} + +func (s PoolSetter) Overwrite(t *Pool) { + if s.Condition.IsValue() { + t.Condition = s.Condition.MustGet() + } + if s.Created.IsValue() { + t.Created = s.Created.MustGet() + } + if s.CreatorID.IsValue() { + t.CreatorID = s.CreatorID.MustGet() + } + if s.ID.IsValue() { + t.ID = s.ID.MustGet() + } + if !s.SiteID.IsUnset() { + t.SiteID = s.SiteID.MustGetNull() + } +} + +func (s *PoolSetter) Apply(q *dialect.InsertQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return Pools.BeforeInsertHooks.RunHooks(ctx, exec, s) + }) + + q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + vals := make([]bob.Expression, 5) + if s.Condition.IsValue() { + vals[0] = psql.Arg(s.Condition.MustGet()) + } else { + vals[0] = psql.Raw("DEFAULT") + } + + if s.Created.IsValue() { + vals[1] = psql.Arg(s.Created.MustGet()) + } else { + vals[1] = psql.Raw("DEFAULT") + } + + if s.CreatorID.IsValue() { + vals[2] = psql.Arg(s.CreatorID.MustGet()) + } else { + vals[2] = psql.Raw("DEFAULT") + } + + if s.ID.IsValue() { + vals[3] = psql.Arg(s.ID.MustGet()) + } else { + vals[3] = psql.Raw("DEFAULT") + } + + if !s.SiteID.IsUnset() { + vals[4] = psql.Arg(s.SiteID.MustGetNull()) + } else { + vals[4] = psql.Raw("DEFAULT") + } + + return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "") + })) +} + +func (s PoolSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return um.Set(s.Expressions()...) +} + +func (s PoolSetter) Expressions(prefix ...string) []bob.Expression { + exprs := make([]bob.Expression, 0, 5) + + if s.Condition.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "condition")...), + psql.Arg(s.Condition), + }}) + } + + if s.Created.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "created")...), + psql.Arg(s.Created), + }}) + } + + if s.CreatorID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "creator_id")...), + psql.Arg(s.CreatorID), + }}) + } + + if s.ID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "id")...), + psql.Arg(s.ID), + }}) + } + + if !s.SiteID.IsUnset() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "site_id")...), + psql.Arg(s.SiteID), + }}) + } + + return exprs +} + +// FindPool retrieves a single record by primary key +// If cols is empty Find will return all columns. +func FindPool(ctx context.Context, exec bob.Executor, IDPK int32, cols ...string) (*Pool, error) { + if len(cols) == 0 { + return Pools.Query( + sm.Where(Pools.Columns.ID.EQ(psql.Arg(IDPK))), + ).One(ctx, exec) + } + + return Pools.Query( + sm.Where(Pools.Columns.ID.EQ(psql.Arg(IDPK))), + sm.Columns(Pools.Columns.Only(cols...)), + ).One(ctx, exec) +} + +// PoolExists checks the presence of a single record by primary key +func PoolExists(ctx context.Context, exec bob.Executor, IDPK int32) (bool, error) { + return Pools.Query( + sm.Where(Pools.Columns.ID.EQ(psql.Arg(IDPK))), + ).Exists(ctx, exec) +} + +// AfterQueryHook is called after Pool is retrieved from the database +func (o *Pool) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = Pools.AfterSelectHooks.RunHooks(ctx, exec, PoolSlice{o}) + case bob.QueryTypeInsert: + ctx, err = Pools.AfterInsertHooks.RunHooks(ctx, exec, PoolSlice{o}) + case bob.QueryTypeUpdate: + ctx, err = Pools.AfterUpdateHooks.RunHooks(ctx, exec, PoolSlice{o}) + case bob.QueryTypeDelete: + ctx, err = Pools.AfterDeleteHooks.RunHooks(ctx, exec, PoolSlice{o}) + } + + return err +} + +// primaryKeyVals returns the primary key values of the Pool +func (o *Pool) primaryKeyVals() bob.Expression { + return psql.Arg(o.ID) +} + +func (o *Pool) pkEQ() dialect.Expression { + return psql.Quote("pool", "id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + return o.primaryKeyVals().WriteSQL(ctx, w, d, start) + })) +} + +// Update uses an executor to update the Pool +func (o *Pool) Update(ctx context.Context, exec bob.Executor, s *PoolSetter) error { + v, err := Pools.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec) + if err != nil { + return err + } + + o.R = v.R + *o = *v + + return nil +} + +// Delete deletes a single Pool record with an executor +func (o *Pool) Delete(ctx context.Context, exec bob.Executor) error { + _, err := Pools.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec) + return err +} + +// Reload refreshes the Pool using the executor +func (o *Pool) Reload(ctx context.Context, exec bob.Executor) error { + o2, err := Pools.Query( + sm.Where(Pools.Columns.ID.EQ(psql.Arg(o.ID))), + ).One(ctx, exec) + if err != nil { + return err + } + o2.R = o.R + *o = *o2 + + return nil +} + +// AfterQueryHook is called after PoolSlice is retrieved from the database +func (o PoolSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = Pools.AfterSelectHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeInsert: + ctx, err = Pools.AfterInsertHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeUpdate: + ctx, err = Pools.AfterUpdateHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeDelete: + ctx, err = Pools.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err +} + +func (o PoolSlice) pkIN() dialect.Expression { + if len(o) == 0 { + return psql.Raw("NULL") + } + + return psql.Quote("pool", "id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + pkPairs := make([]bob.Expression, len(o)) + for i, row := range o { + pkPairs[i] = row.primaryKeyVals() + } + return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "") + })) +} + +// copyMatchingRows finds models in the given slice that have the same primary key +// then it first copies the existing relationships from the old model to the new model +// and then replaces the old model in the slice with the new model +func (o PoolSlice) copyMatchingRows(from ...*Pool) { + for i, old := range o { + for _, new := range from { + if new.ID != old.ID { + continue + } + new.R = old.R + o[i] = new + break + } + } +} + +// UpdateMod modifies an update query with "WHERE primary_key IN (o...)" +func (o PoolSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return Pools.BeforeUpdateHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *Pool: + o.copyMatchingRows(retrieved) + case []*Pool: + o.copyMatchingRows(retrieved...) + case PoolSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a Pool or a slice of Pool + // then run the AfterUpdateHooks on the slice + _, err = Pools.AfterUpdateHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)" +func (o PoolSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] { + return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return Pools.BeforeDeleteHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *Pool: + o.copyMatchingRows(retrieved) + case []*Pool: + o.copyMatchingRows(retrieved...) + case PoolSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a Pool or a slice of Pool + // then run the AfterDeleteHooks on the slice + _, err = Pools.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +func (o PoolSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals PoolSetter) error { + if len(o) == 0 { + return nil + } + + _, err := Pools.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec) + return err +} + +func (o PoolSlice) DeleteAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + _, err := Pools.Delete(o.DeleteMod()).Exec(ctx, exec) + return err +} + +func (o PoolSlice) ReloadAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + o2, err := Pools.Query(sm.Where(o.pkIN())).All(ctx, exec) + if err != nil { + return err + } + + o.copyMatchingRows(o2...) + + return nil +} + +// CreatorUser starts a query for related objects on user_ +func (o *Pool) CreatorUser(mods ...bob.Mod[*dialect.SelectQuery]) UsersQuery { + return Users.Query(append(mods, + sm.Where(Users.Columns.ID.EQ(psql.Arg(o.CreatorID))), + )...) +} + +func (os PoolSlice) CreatorUser(mods ...bob.Mod[*dialect.SelectQuery]) UsersQuery { + pkCreatorID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkCreatorID = append(pkCreatorID, o.CreatorID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkCreatorID), "integer[]")), + )) + + return Users.Query(append(mods, + sm.Where(psql.Group(Users.Columns.ID).OP("IN", PKArgExpr)), + )...) +} + +func attachPoolCreatorUser0(ctx context.Context, exec bob.Executor, count int, pool0 *Pool, user1 *User) (*Pool, error) { + setter := &PoolSetter{ + CreatorID: omit.From(user1.ID), + } + + err := pool0.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachPoolCreatorUser0: %w", err) + } + + return pool0, nil +} + +func (pool0 *Pool) InsertCreatorUser(ctx context.Context, exec bob.Executor, related *UserSetter) error { + var err error + + user1, err := Users.Insert(related).One(ctx, exec) + if err != nil { + return fmt.Errorf("inserting related objects: %w", err) + } + + _, err = attachPoolCreatorUser0(ctx, exec, 1, pool0, user1) + if err != nil { + return err + } + + pool0.R.CreatorUser = user1 + + user1.R.CreatorPools = append(user1.R.CreatorPools, pool0) + + return nil +} + +func (pool0 *Pool) AttachCreatorUser(ctx context.Context, exec bob.Executor, user1 *User) error { + var err error + + _, err = attachPoolCreatorUser0(ctx, exec, 1, pool0, user1) + if err != nil { + return err + } + + pool0.R.CreatorUser = user1 + + user1.R.CreatorPools = append(user1.R.CreatorPools, pool0) + + return nil +} + +type poolWhere[Q psql.Filterable] struct { + Condition psql.WhereMod[Q, enums.Poolconditiontype] + Created psql.WhereMod[Q, time.Time] + CreatorID psql.WhereMod[Q, int32] + ID psql.WhereMod[Q, int32] + SiteID psql.WhereNullMod[Q, int32] +} + +func (poolWhere[Q]) AliasedAs(alias string) poolWhere[Q] { + return buildPoolWhere[Q](buildPoolColumns(alias)) +} + +func buildPoolWhere[Q psql.Filterable](cols poolColumns) poolWhere[Q] { + return poolWhere[Q]{ + Condition: psql.Where[Q, enums.Poolconditiontype](cols.Condition), + Created: psql.Where[Q, time.Time](cols.Created), + CreatorID: psql.Where[Q, int32](cols.CreatorID), + ID: psql.Where[Q, int32](cols.ID), + SiteID: psql.WhereNull[Q, int32](cols.SiteID), + } +} + +func (o *Pool) Preload(name string, retrieved any) error { + if o == nil { + return nil + } + + switch name { + case "CreatorUser": + rel, ok := retrieved.(*User) + if !ok { + return fmt.Errorf("pool cannot load %T as %q", retrieved, name) + } + + o.R.CreatorUser = rel + + if rel != nil { + rel.R.CreatorPools = PoolSlice{o} + } + return nil + default: + return fmt.Errorf("pool has no relationship %q", name) + } +} + +type poolPreloader struct { + CreatorUser func(...psql.PreloadOption) psql.Preloader +} + +func buildPoolPreloader() poolPreloader { + return poolPreloader{ + CreatorUser: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*User, UserSlice](psql.PreloadRel{ + Name: "CreatorUser", + Sides: []psql.PreloadSide{ + { + From: Pools, + To: Users, + FromColumns: []string{"creator_id"}, + ToColumns: []string{"id"}, + }, + }, + }, Users.Columns.Names(), opts...) + }, + } +} + +type poolThenLoader[Q orm.Loadable] struct { + CreatorUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildPoolThenLoader[Q orm.Loadable]() poolThenLoader[Q] { + type CreatorUserLoadInterface interface { + LoadCreatorUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return poolThenLoader[Q]{ + CreatorUser: thenLoadBuilder[Q]( + "CreatorUser", + func(ctx context.Context, exec bob.Executor, retrieved CreatorUserLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCreatorUser(ctx, exec, mods...) + }, + ), + } +} + +// LoadCreatorUser loads the pool's CreatorUser into the .R struct +func (o *Pool) LoadCreatorUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.CreatorUser = nil + + related, err := o.CreatorUser(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.CreatorPools = PoolSlice{o} + + o.R.CreatorUser = related + return nil +} + +// LoadCreatorUser loads the pool's CreatorUser into the .R struct +func (os PoolSlice) LoadCreatorUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + users, err := os.CreatorUser(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range users { + + if !(o.CreatorID == rel.ID) { + continue + } + + rel.R.CreatorPools = append(rel.R.CreatorPools, o) + + o.R.CreatorUser = rel + break + } + } + + return nil +} + +type poolJoins[Q dialect.Joinable] struct { + typ string + CreatorUser modAs[Q, userColumns] +} + +func (j poolJoins[Q]) aliasedAs(alias string) poolJoins[Q] { + return buildPoolJoins[Q](buildPoolColumns(alias), j.typ) +} + +func buildPoolJoins[Q dialect.Joinable](cols poolColumns, typ string) poolJoins[Q] { + return poolJoins[Q]{ + typ: typ, + CreatorUser: modAs[Q, userColumns]{ + c: Users.Columns, + f: func(to userColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, Users.Name().As(to.Alias())).On( + to.ID.EQ(cols.CreatorID), + )) + } + + return mods + }, + }, + } +} diff --git a/db/models/site.bob.go b/db/models/site.bob.go new file mode 100644 index 00000000..5651ec7b --- /dev/null +++ b/db/models/site.bob.go @@ -0,0 +1,1245 @@ +// Code generated by BobGen psql v0.42.5. DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package models + +import ( + "context" + "fmt" + "io" + "time" + + "github.com/Gleipnir-Technology/bob" + "github.com/Gleipnir-Technology/bob/dialect/psql" + "github.com/Gleipnir-Technology/bob/dialect/psql/dialect" + "github.com/Gleipnir-Technology/bob/dialect/psql/dm" + "github.com/Gleipnir-Technology/bob/dialect/psql/sm" + "github.com/Gleipnir-Technology/bob/dialect/psql/um" + "github.com/Gleipnir-Technology/bob/expr" + "github.com/Gleipnir-Technology/bob/mods" + "github.com/Gleipnir-Technology/bob/orm" + "github.com/Gleipnir-Technology/bob/types/pgtypes" + "github.com/aarondl/opt/null" + "github.com/aarondl/opt/omit" + "github.com/aarondl/opt/omitnull" +) + +// Site is an object representing the database table. +type Site struct { + AddressID int32 `db:"address_id" ` + Created time.Time `db:"created" ` + CreatorID int32 `db:"creator_id" ` + FileID null.Val[int32] `db:"file_id" ` + ID int32 `db:"id,pk" ` + Notes string `db:"notes" ` + OrganizationID int32 `db:"organization_id" ` + OwnerName string `db:"owner_name" ` + OwnerPhoneE164 null.Val[string] `db:"owner_phone_e164" ` + ResidentOwned null.Val[bool] `db:"resident_owned" ` + ResidentPhoneE164 null.Val[string] `db:"resident_phone_e164" ` + Tags pgtypes.HStore `db:"tags" ` + Version int32 `db:"version,pk" ` + + R siteR `db:"-" ` +} + +// SiteSlice is an alias for a slice of pointers to Site. +// This should almost always be used instead of []*Site. +type SiteSlice []*Site + +// Sites contains methods to work with the site table +var Sites = psql.NewTablex[*Site, SiteSlice, *SiteSetter]("", "site", buildSiteColumns("site")) + +// SitesQuery is a query on the site table +type SitesQuery = *psql.ViewQuery[*Site, SiteSlice] + +// siteR is where relationships are stored. +type siteR struct { + Address *Address // site.site_address_id_fkey + CreatorUser *User // site.site_creator_id_fkey + File *FileuploadFile // site.site_file_id_fkey +} + +func buildSiteColumns(alias string) siteColumns { + return siteColumns{ + ColumnsExpr: expr.NewColumnsExpr( + "address_id", "created", "creator_id", "file_id", "id", "notes", "organization_id", "owner_name", "owner_phone_e164", "resident_owned", "resident_phone_e164", "tags", "version", + ).WithParent("site"), + tableAlias: alias, + AddressID: psql.Quote(alias, "address_id"), + Created: psql.Quote(alias, "created"), + CreatorID: psql.Quote(alias, "creator_id"), + FileID: psql.Quote(alias, "file_id"), + ID: psql.Quote(alias, "id"), + Notes: psql.Quote(alias, "notes"), + OrganizationID: psql.Quote(alias, "organization_id"), + OwnerName: psql.Quote(alias, "owner_name"), + OwnerPhoneE164: psql.Quote(alias, "owner_phone_e164"), + ResidentOwned: psql.Quote(alias, "resident_owned"), + ResidentPhoneE164: psql.Quote(alias, "resident_phone_e164"), + Tags: psql.Quote(alias, "tags"), + Version: psql.Quote(alias, "version"), + } +} + +type siteColumns struct { + expr.ColumnsExpr + tableAlias string + AddressID psql.Expression + Created psql.Expression + CreatorID psql.Expression + FileID psql.Expression + ID psql.Expression + Notes psql.Expression + OrganizationID psql.Expression + OwnerName psql.Expression + OwnerPhoneE164 psql.Expression + ResidentOwned psql.Expression + ResidentPhoneE164 psql.Expression + Tags psql.Expression + Version psql.Expression +} + +func (c siteColumns) Alias() string { + return c.tableAlias +} + +func (siteColumns) AliasedAs(alias string) siteColumns { + return buildSiteColumns(alias) +} + +// SiteSetter is used for insert/upsert/update operations +// All values are optional, and do not have to be set +// Generated columns are not included +type SiteSetter struct { + AddressID omit.Val[int32] `db:"address_id" ` + Created omit.Val[time.Time] `db:"created" ` + CreatorID omit.Val[int32] `db:"creator_id" ` + FileID omitnull.Val[int32] `db:"file_id" ` + ID omit.Val[int32] `db:"id,pk" ` + Notes omit.Val[string] `db:"notes" ` + OrganizationID omit.Val[int32] `db:"organization_id" ` + OwnerName omit.Val[string] `db:"owner_name" ` + OwnerPhoneE164 omitnull.Val[string] `db:"owner_phone_e164" ` + ResidentOwned omitnull.Val[bool] `db:"resident_owned" ` + ResidentPhoneE164 omitnull.Val[string] `db:"resident_phone_e164" ` + Tags omit.Val[pgtypes.HStore] `db:"tags" ` + Version omit.Val[int32] `db:"version,pk" ` +} + +func (s SiteSetter) SetColumns() []string { + vals := make([]string, 0, 13) + if s.AddressID.IsValue() { + vals = append(vals, "address_id") + } + if s.Created.IsValue() { + vals = append(vals, "created") + } + if s.CreatorID.IsValue() { + vals = append(vals, "creator_id") + } + if !s.FileID.IsUnset() { + vals = append(vals, "file_id") + } + if s.ID.IsValue() { + vals = append(vals, "id") + } + if s.Notes.IsValue() { + vals = append(vals, "notes") + } + if s.OrganizationID.IsValue() { + vals = append(vals, "organization_id") + } + if s.OwnerName.IsValue() { + vals = append(vals, "owner_name") + } + if !s.OwnerPhoneE164.IsUnset() { + vals = append(vals, "owner_phone_e164") + } + if !s.ResidentOwned.IsUnset() { + vals = append(vals, "resident_owned") + } + if !s.ResidentPhoneE164.IsUnset() { + vals = append(vals, "resident_phone_e164") + } + if s.Tags.IsValue() { + vals = append(vals, "tags") + } + if s.Version.IsValue() { + vals = append(vals, "version") + } + return vals +} + +func (s SiteSetter) Overwrite(t *Site) { + if s.AddressID.IsValue() { + t.AddressID = s.AddressID.MustGet() + } + if s.Created.IsValue() { + t.Created = s.Created.MustGet() + } + if s.CreatorID.IsValue() { + t.CreatorID = s.CreatorID.MustGet() + } + if !s.FileID.IsUnset() { + t.FileID = s.FileID.MustGetNull() + } + if s.ID.IsValue() { + t.ID = s.ID.MustGet() + } + if s.Notes.IsValue() { + t.Notes = s.Notes.MustGet() + } + if s.OrganizationID.IsValue() { + t.OrganizationID = s.OrganizationID.MustGet() + } + if s.OwnerName.IsValue() { + t.OwnerName = s.OwnerName.MustGet() + } + if !s.OwnerPhoneE164.IsUnset() { + t.OwnerPhoneE164 = s.OwnerPhoneE164.MustGetNull() + } + if !s.ResidentOwned.IsUnset() { + t.ResidentOwned = s.ResidentOwned.MustGetNull() + } + if !s.ResidentPhoneE164.IsUnset() { + t.ResidentPhoneE164 = s.ResidentPhoneE164.MustGetNull() + } + if s.Tags.IsValue() { + t.Tags = s.Tags.MustGet() + } + if s.Version.IsValue() { + t.Version = s.Version.MustGet() + } +} + +func (s *SiteSetter) Apply(q *dialect.InsertQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return Sites.BeforeInsertHooks.RunHooks(ctx, exec, s) + }) + + q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + vals := make([]bob.Expression, 13) + if s.AddressID.IsValue() { + vals[0] = psql.Arg(s.AddressID.MustGet()) + } else { + vals[0] = psql.Raw("DEFAULT") + } + + if s.Created.IsValue() { + vals[1] = psql.Arg(s.Created.MustGet()) + } else { + vals[1] = psql.Raw("DEFAULT") + } + + if s.CreatorID.IsValue() { + vals[2] = psql.Arg(s.CreatorID.MustGet()) + } else { + vals[2] = psql.Raw("DEFAULT") + } + + if !s.FileID.IsUnset() { + vals[3] = psql.Arg(s.FileID.MustGetNull()) + } else { + vals[3] = psql.Raw("DEFAULT") + } + + if s.ID.IsValue() { + vals[4] = psql.Arg(s.ID.MustGet()) + } else { + vals[4] = psql.Raw("DEFAULT") + } + + if s.Notes.IsValue() { + vals[5] = psql.Arg(s.Notes.MustGet()) + } else { + vals[5] = psql.Raw("DEFAULT") + } + + if s.OrganizationID.IsValue() { + vals[6] = psql.Arg(s.OrganizationID.MustGet()) + } else { + vals[6] = psql.Raw("DEFAULT") + } + + if s.OwnerName.IsValue() { + vals[7] = psql.Arg(s.OwnerName.MustGet()) + } else { + vals[7] = psql.Raw("DEFAULT") + } + + if !s.OwnerPhoneE164.IsUnset() { + vals[8] = psql.Arg(s.OwnerPhoneE164.MustGetNull()) + } else { + vals[8] = psql.Raw("DEFAULT") + } + + if !s.ResidentOwned.IsUnset() { + vals[9] = psql.Arg(s.ResidentOwned.MustGetNull()) + } else { + vals[9] = psql.Raw("DEFAULT") + } + + if !s.ResidentPhoneE164.IsUnset() { + vals[10] = psql.Arg(s.ResidentPhoneE164.MustGetNull()) + } else { + vals[10] = psql.Raw("DEFAULT") + } + + if s.Tags.IsValue() { + vals[11] = psql.Arg(s.Tags.MustGet()) + } else { + vals[11] = psql.Raw("DEFAULT") + } + + if s.Version.IsValue() { + vals[12] = psql.Arg(s.Version.MustGet()) + } else { + vals[12] = psql.Raw("DEFAULT") + } + + return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "") + })) +} + +func (s SiteSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return um.Set(s.Expressions()...) +} + +func (s SiteSetter) Expressions(prefix ...string) []bob.Expression { + exprs := make([]bob.Expression, 0, 13) + + if s.AddressID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "address_id")...), + psql.Arg(s.AddressID), + }}) + } + + if s.Created.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "created")...), + psql.Arg(s.Created), + }}) + } + + if s.CreatorID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "creator_id")...), + psql.Arg(s.CreatorID), + }}) + } + + if !s.FileID.IsUnset() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "file_id")...), + psql.Arg(s.FileID), + }}) + } + + if s.ID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "id")...), + psql.Arg(s.ID), + }}) + } + + if s.Notes.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "notes")...), + psql.Arg(s.Notes), + }}) + } + + if s.OrganizationID.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "organization_id")...), + psql.Arg(s.OrganizationID), + }}) + } + + if s.OwnerName.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "owner_name")...), + psql.Arg(s.OwnerName), + }}) + } + + if !s.OwnerPhoneE164.IsUnset() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "owner_phone_e164")...), + psql.Arg(s.OwnerPhoneE164), + }}) + } + + if !s.ResidentOwned.IsUnset() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "resident_owned")...), + psql.Arg(s.ResidentOwned), + }}) + } + + if !s.ResidentPhoneE164.IsUnset() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "resident_phone_e164")...), + psql.Arg(s.ResidentPhoneE164), + }}) + } + + if s.Tags.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "tags")...), + psql.Arg(s.Tags), + }}) + } + + if s.Version.IsValue() { + exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{ + psql.Quote(append(prefix, "version")...), + psql.Arg(s.Version), + }}) + } + + return exprs +} + +// FindSite retrieves a single record by primary key +// If cols is empty Find will return all columns. +func FindSite(ctx context.Context, exec bob.Executor, IDPK int32, VersionPK int32, cols ...string) (*Site, error) { + if len(cols) == 0 { + return Sites.Query( + sm.Where(Sites.Columns.ID.EQ(psql.Arg(IDPK))), + sm.Where(Sites.Columns.Version.EQ(psql.Arg(VersionPK))), + ).One(ctx, exec) + } + + return Sites.Query( + sm.Where(Sites.Columns.ID.EQ(psql.Arg(IDPK))), + sm.Where(Sites.Columns.Version.EQ(psql.Arg(VersionPK))), + sm.Columns(Sites.Columns.Only(cols...)), + ).One(ctx, exec) +} + +// SiteExists checks the presence of a single record by primary key +func SiteExists(ctx context.Context, exec bob.Executor, IDPK int32, VersionPK int32) (bool, error) { + return Sites.Query( + sm.Where(Sites.Columns.ID.EQ(psql.Arg(IDPK))), + sm.Where(Sites.Columns.Version.EQ(psql.Arg(VersionPK))), + ).Exists(ctx, exec) +} + +// AfterQueryHook is called after Site is retrieved from the database +func (o *Site) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = Sites.AfterSelectHooks.RunHooks(ctx, exec, SiteSlice{o}) + case bob.QueryTypeInsert: + ctx, err = Sites.AfterInsertHooks.RunHooks(ctx, exec, SiteSlice{o}) + case bob.QueryTypeUpdate: + ctx, err = Sites.AfterUpdateHooks.RunHooks(ctx, exec, SiteSlice{o}) + case bob.QueryTypeDelete: + ctx, err = Sites.AfterDeleteHooks.RunHooks(ctx, exec, SiteSlice{o}) + } + + return err +} + +// primaryKeyVals returns the primary key values of the Site +func (o *Site) primaryKeyVals() bob.Expression { + return psql.ArgGroup( + o.ID, + o.Version, + ) +} + +func (o *Site) pkEQ() dialect.Expression { + return psql.Group(psql.Quote("site", "id"), psql.Quote("site", "version")).EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + return o.primaryKeyVals().WriteSQL(ctx, w, d, start) + })) +} + +// Update uses an executor to update the Site +func (o *Site) Update(ctx context.Context, exec bob.Executor, s *SiteSetter) error { + v, err := Sites.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec) + if err != nil { + return err + } + + o.R = v.R + *o = *v + + return nil +} + +// Delete deletes a single Site record with an executor +func (o *Site) Delete(ctx context.Context, exec bob.Executor) error { + _, err := Sites.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec) + return err +} + +// Reload refreshes the Site using the executor +func (o *Site) Reload(ctx context.Context, exec bob.Executor) error { + o2, err := Sites.Query( + sm.Where(Sites.Columns.ID.EQ(psql.Arg(o.ID))), + sm.Where(Sites.Columns.Version.EQ(psql.Arg(o.Version))), + ).One(ctx, exec) + if err != nil { + return err + } + o2.R = o.R + *o = *o2 + + return nil +} + +// AfterQueryHook is called after SiteSlice is retrieved from the database +func (o SiteSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error { + var err error + + switch queryType { + case bob.QueryTypeSelect: + ctx, err = Sites.AfterSelectHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeInsert: + ctx, err = Sites.AfterInsertHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeUpdate: + ctx, err = Sites.AfterUpdateHooks.RunHooks(ctx, exec, o) + case bob.QueryTypeDelete: + ctx, err = Sites.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err +} + +func (o SiteSlice) pkIN() dialect.Expression { + if len(o) == 0 { + return psql.Raw("NULL") + } + + return psql.Group(psql.Quote("site", "id"), psql.Quote("site", "version")).In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) { + pkPairs := make([]bob.Expression, len(o)) + for i, row := range o { + pkPairs[i] = row.primaryKeyVals() + } + return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "") + })) +} + +// copyMatchingRows finds models in the given slice that have the same primary key +// then it first copies the existing relationships from the old model to the new model +// and then replaces the old model in the slice with the new model +func (o SiteSlice) copyMatchingRows(from ...*Site) { + for i, old := range o { + for _, new := range from { + if new.ID != old.ID { + continue + } + if new.Version != old.Version { + continue + } + new.R = old.R + o[i] = new + break + } + } +} + +// UpdateMod modifies an update query with "WHERE primary_key IN (o...)" +func (o SiteSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] { + return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return Sites.BeforeUpdateHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *Site: + o.copyMatchingRows(retrieved) + case []*Site: + o.copyMatchingRows(retrieved...) + case SiteSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a Site or a slice of Site + // then run the AfterUpdateHooks on the slice + _, err = Sites.AfterUpdateHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)" +func (o SiteSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] { + return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) { + q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) { + return Sites.BeforeDeleteHooks.RunHooks(ctx, exec, o) + }) + + q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error { + var err error + switch retrieved := retrieved.(type) { + case *Site: + o.copyMatchingRows(retrieved) + case []*Site: + o.copyMatchingRows(retrieved...) + case SiteSlice: + o.copyMatchingRows(retrieved...) + default: + // If the retrieved value is not a Site or a slice of Site + // then run the AfterDeleteHooks on the slice + _, err = Sites.AfterDeleteHooks.RunHooks(ctx, exec, o) + } + + return err + })) + + q.AppendWhere(o.pkIN()) + }) +} + +func (o SiteSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals SiteSetter) error { + if len(o) == 0 { + return nil + } + + _, err := Sites.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec) + return err +} + +func (o SiteSlice) DeleteAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + _, err := Sites.Delete(o.DeleteMod()).Exec(ctx, exec) + return err +} + +func (o SiteSlice) ReloadAll(ctx context.Context, exec bob.Executor) error { + if len(o) == 0 { + return nil + } + + o2, err := Sites.Query(sm.Where(o.pkIN())).All(ctx, exec) + if err != nil { + return err + } + + o.copyMatchingRows(o2...) + + return nil +} + +// Address starts a query for related objects on address +func (o *Site) Address(mods ...bob.Mod[*dialect.SelectQuery]) AddressesQuery { + return Addresses.Query(append(mods, + sm.Where(Addresses.Columns.ID.EQ(psql.Arg(o.AddressID))), + )...) +} + +func (os SiteSlice) Address(mods ...bob.Mod[*dialect.SelectQuery]) AddressesQuery { + pkAddressID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkAddressID = append(pkAddressID, o.AddressID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkAddressID), "integer[]")), + )) + + return Addresses.Query(append(mods, + sm.Where(psql.Group(Addresses.Columns.ID).OP("IN", PKArgExpr)), + )...) +} + +// CreatorUser starts a query for related objects on user_ +func (o *Site) CreatorUser(mods ...bob.Mod[*dialect.SelectQuery]) UsersQuery { + return Users.Query(append(mods, + sm.Where(Users.Columns.ID.EQ(psql.Arg(o.CreatorID))), + )...) +} + +func (os SiteSlice) CreatorUser(mods ...bob.Mod[*dialect.SelectQuery]) UsersQuery { + pkCreatorID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkCreatorID = append(pkCreatorID, o.CreatorID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkCreatorID), "integer[]")), + )) + + return Users.Query(append(mods, + sm.Where(psql.Group(Users.Columns.ID).OP("IN", PKArgExpr)), + )...) +} + +// File starts a query for related objects on fileupload.file +func (o *Site) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery { + return FileuploadFiles.Query(append(mods, + sm.Where(FileuploadFiles.Columns.ID.EQ(psql.Arg(o.FileID))), + )...) +} + +func (os SiteSlice) File(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadFilesQuery { + pkFileID := make(pgtypes.Array[null.Val[int32]], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkFileID = append(pkFileID, o.FileID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkFileID), "integer[]")), + )) + + return FileuploadFiles.Query(append(mods, + sm.Where(psql.Group(FileuploadFiles.Columns.ID).OP("IN", PKArgExpr)), + )...) +} + +func attachSiteAddress0(ctx context.Context, exec bob.Executor, count int, site0 *Site, address1 *Address) (*Site, error) { + setter := &SiteSetter{ + AddressID: omit.From(address1.ID), + } + + err := site0.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachSiteAddress0: %w", err) + } + + return site0, nil +} + +func (site0 *Site) InsertAddress(ctx context.Context, exec bob.Executor, related *AddressSetter) error { + var err error + + address1, err := Addresses.Insert(related).One(ctx, exec) + if err != nil { + return fmt.Errorf("inserting related objects: %w", err) + } + + _, err = attachSiteAddress0(ctx, exec, 1, site0, address1) + if err != nil { + return err + } + + site0.R.Address = address1 + + address1.R.Site = site0 + + return nil +} + +func (site0 *Site) AttachAddress(ctx context.Context, exec bob.Executor, address1 *Address) error { + var err error + + _, err = attachSiteAddress0(ctx, exec, 1, site0, address1) + if err != nil { + return err + } + + site0.R.Address = address1 + + address1.R.Site = site0 + + return nil +} + +func attachSiteCreatorUser0(ctx context.Context, exec bob.Executor, count int, site0 *Site, user1 *User) (*Site, error) { + setter := &SiteSetter{ + CreatorID: omit.From(user1.ID), + } + + err := site0.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachSiteCreatorUser0: %w", err) + } + + return site0, nil +} + +func (site0 *Site) InsertCreatorUser(ctx context.Context, exec bob.Executor, related *UserSetter) error { + var err error + + user1, err := Users.Insert(related).One(ctx, exec) + if err != nil { + return fmt.Errorf("inserting related objects: %w", err) + } + + _, err = attachSiteCreatorUser0(ctx, exec, 1, site0, user1) + if err != nil { + return err + } + + site0.R.CreatorUser = user1 + + user1.R.CreatorSites = append(user1.R.CreatorSites, site0) + + return nil +} + +func (site0 *Site) AttachCreatorUser(ctx context.Context, exec bob.Executor, user1 *User) error { + var err error + + _, err = attachSiteCreatorUser0(ctx, exec, 1, site0, user1) + if err != nil { + return err + } + + site0.R.CreatorUser = user1 + + user1.R.CreatorSites = append(user1.R.CreatorSites, site0) + + return nil +} + +func attachSiteFile0(ctx context.Context, exec bob.Executor, count int, site0 *Site, fileuploadFile1 *FileuploadFile) (*Site, error) { + setter := &SiteSetter{ + FileID: omitnull.From(fileuploadFile1.ID), + } + + err := site0.Update(ctx, exec, setter) + if err != nil { + return nil, fmt.Errorf("attachSiteFile0: %w", err) + } + + return site0, nil +} + +func (site0 *Site) InsertFile(ctx context.Context, exec bob.Executor, related *FileuploadFileSetter) error { + var err error + + fileuploadFile1, err := FileuploadFiles.Insert(related).One(ctx, exec) + if err != nil { + return fmt.Errorf("inserting related objects: %w", err) + } + + _, err = attachSiteFile0(ctx, exec, 1, site0, fileuploadFile1) + if err != nil { + return err + } + + site0.R.File = fileuploadFile1 + + fileuploadFile1.R.Sites = append(fileuploadFile1.R.Sites, site0) + + return nil +} + +func (site0 *Site) AttachFile(ctx context.Context, exec bob.Executor, fileuploadFile1 *FileuploadFile) error { + var err error + + _, err = attachSiteFile0(ctx, exec, 1, site0, fileuploadFile1) + if err != nil { + return err + } + + site0.R.File = fileuploadFile1 + + fileuploadFile1.R.Sites = append(fileuploadFile1.R.Sites, site0) + + return nil +} + +type siteWhere[Q psql.Filterable] struct { + AddressID psql.WhereMod[Q, int32] + Created psql.WhereMod[Q, time.Time] + CreatorID psql.WhereMod[Q, int32] + FileID psql.WhereNullMod[Q, int32] + ID psql.WhereMod[Q, int32] + Notes psql.WhereMod[Q, string] + OrganizationID psql.WhereMod[Q, int32] + OwnerName psql.WhereMod[Q, string] + OwnerPhoneE164 psql.WhereNullMod[Q, string] + ResidentOwned psql.WhereNullMod[Q, bool] + ResidentPhoneE164 psql.WhereNullMod[Q, string] + Tags psql.WhereMod[Q, pgtypes.HStore] + Version psql.WhereMod[Q, int32] +} + +func (siteWhere[Q]) AliasedAs(alias string) siteWhere[Q] { + return buildSiteWhere[Q](buildSiteColumns(alias)) +} + +func buildSiteWhere[Q psql.Filterable](cols siteColumns) siteWhere[Q] { + return siteWhere[Q]{ + AddressID: psql.Where[Q, int32](cols.AddressID), + Created: psql.Where[Q, time.Time](cols.Created), + CreatorID: psql.Where[Q, int32](cols.CreatorID), + FileID: psql.WhereNull[Q, int32](cols.FileID), + ID: psql.Where[Q, int32](cols.ID), + Notes: psql.Where[Q, string](cols.Notes), + OrganizationID: psql.Where[Q, int32](cols.OrganizationID), + OwnerName: psql.Where[Q, string](cols.OwnerName), + OwnerPhoneE164: psql.WhereNull[Q, string](cols.OwnerPhoneE164), + ResidentOwned: psql.WhereNull[Q, bool](cols.ResidentOwned), + ResidentPhoneE164: psql.WhereNull[Q, string](cols.ResidentPhoneE164), + Tags: psql.Where[Q, pgtypes.HStore](cols.Tags), + Version: psql.Where[Q, int32](cols.Version), + } +} + +func (o *Site) Preload(name string, retrieved any) error { + if o == nil { + return nil + } + + switch name { + case "Address": + rel, ok := retrieved.(*Address) + if !ok { + return fmt.Errorf("site cannot load %T as %q", retrieved, name) + } + + o.R.Address = rel + + if rel != nil { + rel.R.Site = o + } + return nil + case "CreatorUser": + rel, ok := retrieved.(*User) + if !ok { + return fmt.Errorf("site cannot load %T as %q", retrieved, name) + } + + o.R.CreatorUser = rel + + if rel != nil { + rel.R.CreatorSites = SiteSlice{o} + } + return nil + case "File": + rel, ok := retrieved.(*FileuploadFile) + if !ok { + return fmt.Errorf("site cannot load %T as %q", retrieved, name) + } + + o.R.File = rel + + if rel != nil { + rel.R.Sites = SiteSlice{o} + } + return nil + default: + return fmt.Errorf("site has no relationship %q", name) + } +} + +type sitePreloader struct { + Address func(...psql.PreloadOption) psql.Preloader + CreatorUser func(...psql.PreloadOption) psql.Preloader + File func(...psql.PreloadOption) psql.Preloader +} + +func buildSitePreloader() sitePreloader { + return sitePreloader{ + Address: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*Address, AddressSlice](psql.PreloadRel{ + Name: "Address", + Sides: []psql.PreloadSide{ + { + From: Sites, + To: Addresses, + FromColumns: []string{"address_id"}, + ToColumns: []string{"id"}, + }, + }, + }, Addresses.Columns.Names(), opts...) + }, + CreatorUser: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*User, UserSlice](psql.PreloadRel{ + Name: "CreatorUser", + Sides: []psql.PreloadSide{ + { + From: Sites, + To: Users, + FromColumns: []string{"creator_id"}, + ToColumns: []string{"id"}, + }, + }, + }, Users.Columns.Names(), opts...) + }, + File: func(opts ...psql.PreloadOption) psql.Preloader { + return psql.Preload[*FileuploadFile, FileuploadFileSlice](psql.PreloadRel{ + Name: "File", + Sides: []psql.PreloadSide{ + { + From: Sites, + To: FileuploadFiles, + FromColumns: []string{"file_id"}, + ToColumns: []string{"id"}, + }, + }, + }, FileuploadFiles.Columns.Names(), opts...) + }, + } +} + +type siteThenLoader[Q orm.Loadable] struct { + Address func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + CreatorUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + File func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] +} + +func buildSiteThenLoader[Q orm.Loadable]() siteThenLoader[Q] { + type AddressLoadInterface interface { + LoadAddress(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type CreatorUserLoadInterface interface { + LoadCreatorUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type FileLoadInterface interface { + LoadFile(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + + return siteThenLoader[Q]{ + Address: thenLoadBuilder[Q]( + "Address", + func(ctx context.Context, exec bob.Executor, retrieved AddressLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadAddress(ctx, exec, mods...) + }, + ), + CreatorUser: thenLoadBuilder[Q]( + "CreatorUser", + func(ctx context.Context, exec bob.Executor, retrieved CreatorUserLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCreatorUser(ctx, exec, mods...) + }, + ), + File: thenLoadBuilder[Q]( + "File", + func(ctx context.Context, exec bob.Executor, retrieved FileLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadFile(ctx, exec, mods...) + }, + ), + } +} + +// LoadAddress loads the site's Address into the .R struct +func (o *Site) LoadAddress(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.Address = nil + + related, err := o.Address(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.Site = o + + o.R.Address = related + return nil +} + +// LoadAddress loads the site's Address into the .R struct +func (os SiteSlice) LoadAddress(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + addresses, err := os.Address(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range addresses { + + if !(o.AddressID == rel.ID) { + continue + } + + rel.R.Site = o + + o.R.Address = rel + break + } + } + + return nil +} + +// LoadCreatorUser loads the site's CreatorUser into the .R struct +func (o *Site) LoadCreatorUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.CreatorUser = nil + + related, err := o.CreatorUser(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.CreatorSites = SiteSlice{o} + + o.R.CreatorUser = related + return nil +} + +// LoadCreatorUser loads the site's CreatorUser into the .R struct +func (os SiteSlice) LoadCreatorUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + users, err := os.CreatorUser(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range users { + + if !(o.CreatorID == rel.ID) { + continue + } + + rel.R.CreatorSites = append(rel.R.CreatorSites, o) + + o.R.CreatorUser = rel + break + } + } + + return nil +} + +// LoadFile loads the site's File into the .R struct +func (o *Site) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.File = nil + + related, err := o.File(mods...).One(ctx, exec) + if err != nil { + return err + } + + related.R.Sites = SiteSlice{o} + + o.R.File = related + return nil +} + +// LoadFile loads the site's File into the .R struct +func (os SiteSlice) LoadFile(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + fileuploadFiles, err := os.File(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range fileuploadFiles { + if !o.FileID.IsValue() { + continue + } + + if !(o.FileID.IsValue() && o.FileID.MustGet() == rel.ID) { + continue + } + + rel.R.Sites = append(rel.R.Sites, o) + + o.R.File = rel + break + } + } + + return nil +} + +type siteJoins[Q dialect.Joinable] struct { + typ string + Address modAs[Q, addressColumns] + CreatorUser modAs[Q, userColumns] + File modAs[Q, fileuploadFileColumns] +} + +func (j siteJoins[Q]) aliasedAs(alias string) siteJoins[Q] { + return buildSiteJoins[Q](buildSiteColumns(alias), j.typ) +} + +func buildSiteJoins[Q dialect.Joinable](cols siteColumns, typ string) siteJoins[Q] { + return siteJoins[Q]{ + typ: typ, + Address: modAs[Q, addressColumns]{ + c: Addresses.Columns, + f: func(to addressColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, Addresses.Name().As(to.Alias())).On( + to.ID.EQ(cols.AddressID), + )) + } + + return mods + }, + }, + CreatorUser: modAs[Q, userColumns]{ + c: Users.Columns, + f: func(to userColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, Users.Name().As(to.Alias())).On( + to.ID.EQ(cols.CreatorID), + )) + } + + return mods + }, + }, + File: modAs[Q, fileuploadFileColumns]{ + c: FileuploadFiles.Columns, + f: func(to fileuploadFileColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, FileuploadFiles.Name().As(to.Alias())).On( + to.ID.EQ(cols.FileID), + )) + } + + return mods + }, + }, + } +} diff --git a/db/models/user_.bob.go b/db/models/user_.bob.go index ba453061..a9452cb2 100644 --- a/db/models/user_.bob.go +++ b/db/models/user_.bob.go @@ -60,13 +60,15 @@ type UsersQuery = *psql.ViewQuery[*User, UserSlice] type userR struct { PublicUserUser ArcgisUserSlice // arcgis.user_.user__public_user_id_fkey CreatorFiles FileuploadFileSlice // fileupload.file.file_creator_id_fkey - CreatorPools FileuploadPoolSlice // fileupload.pool.pool_creator_id_fkey + FileuploadPool FileuploadPoolSlice // fileupload.pool.pool_creator_id_fkey CreatorNoteAudios NoteAudioSlice // note_audio.note_audio_creator_id_fkey DeletorNoteAudios NoteAudioSlice // note_audio.note_audio_deletor_id_fkey CreatorNoteImages NoteImageSlice // note_image.note_image_creator_id_fkey DeletorNoteImages NoteImageSlice // note_image.note_image_deletor_id_fkey UserNotifications NotificationSlice // notification.notification_user_id_fkey UserOauthTokens OauthTokenSlice // oauth_token.oauth_token_user_id_fkey + CreatorPools PoolSlice // pool.pool_creator_id_fkey + CreatorSites SiteSlice // site.site_creator_id_fkey Organization *Organization // user_.user__organization_id_fkey } @@ -684,14 +686,14 @@ func (os UserSlice) CreatorFiles(mods ...bob.Mod[*dialect.SelectQuery]) Fileuplo )...) } -// CreatorPools starts a query for related objects on fileupload.pool -func (o *User) CreatorPools(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadPoolsQuery { +// FileuploadPool starts a query for related objects on fileupload.pool +func (o *User) FileuploadPool(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadPoolsQuery { return FileuploadPools.Query(append(mods, sm.Where(FileuploadPools.Columns.CreatorID.EQ(psql.Arg(o.ID))), )...) } -func (os UserSlice) CreatorPools(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadPoolsQuery { +func (os UserSlice) FileuploadPool(mods ...bob.Mod[*dialect.SelectQuery]) FileuploadPoolsQuery { pkID := make(pgtypes.Array[int32], 0, len(os)) for _, o := range os { if o == nil { @@ -852,6 +854,54 @@ func (os UserSlice) UserOauthTokens(mods ...bob.Mod[*dialect.SelectQuery]) Oauth )...) } +// CreatorPools starts a query for related objects on pool +func (o *User) CreatorPools(mods ...bob.Mod[*dialect.SelectQuery]) PoolsQuery { + return Pools.Query(append(mods, + sm.Where(Pools.Columns.CreatorID.EQ(psql.Arg(o.ID))), + )...) +} + +func (os UserSlice) CreatorPools(mods ...bob.Mod[*dialect.SelectQuery]) PoolsQuery { + pkID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkID = append(pkID, o.ID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")), + )) + + return Pools.Query(append(mods, + sm.Where(psql.Group(Pools.Columns.CreatorID).OP("IN", PKArgExpr)), + )...) +} + +// CreatorSites starts a query for related objects on site +func (o *User) CreatorSites(mods ...bob.Mod[*dialect.SelectQuery]) SitesQuery { + return Sites.Query(append(mods, + sm.Where(Sites.Columns.CreatorID.EQ(psql.Arg(o.ID))), + )...) +} + +func (os UserSlice) CreatorSites(mods ...bob.Mod[*dialect.SelectQuery]) SitesQuery { + pkID := make(pgtypes.Array[int32], 0, len(os)) + for _, o := range os { + if o == nil { + continue + } + pkID = append(pkID, o.ID) + } + PKArgExpr := psql.Select(sm.Columns( + psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")), + )) + + return Sites.Query(append(mods, + sm.Where(psql.Group(Sites.Columns.CreatorID).OP("IN", PKArgExpr)), + )...) +} + // Organization starts a query for related objects on organization func (o *User) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery { return Organizations.Query(append(mods, @@ -1012,45 +1062,45 @@ func (user0 *User) AttachCreatorFiles(ctx context.Context, exec bob.Executor, re return nil } -func insertUserCreatorPools0(ctx context.Context, exec bob.Executor, fileuploadPools1 []*FileuploadPoolSetter, user0 *User) (FileuploadPoolSlice, error) { +func insertUserFileuploadPool0(ctx context.Context, exec bob.Executor, fileuploadPools1 []*FileuploadPoolSetter, user0 *User) (FileuploadPoolSlice, error) { for i := range fileuploadPools1 { fileuploadPools1[i].CreatorID = omit.From(user0.ID) } ret, err := FileuploadPools.Insert(bob.ToMods(fileuploadPools1...)).All(ctx, exec) if err != nil { - return ret, fmt.Errorf("insertUserCreatorPools0: %w", err) + return ret, fmt.Errorf("insertUserFileuploadPool0: %w", err) } return ret, nil } -func attachUserCreatorPools0(ctx context.Context, exec bob.Executor, count int, fileuploadPools1 FileuploadPoolSlice, user0 *User) (FileuploadPoolSlice, error) { +func attachUserFileuploadPool0(ctx context.Context, exec bob.Executor, count int, fileuploadPools1 FileuploadPoolSlice, user0 *User) (FileuploadPoolSlice, error) { setter := &FileuploadPoolSetter{ CreatorID: omit.From(user0.ID), } err := fileuploadPools1.UpdateAll(ctx, exec, *setter) if err != nil { - return nil, fmt.Errorf("attachUserCreatorPools0: %w", err) + return nil, fmt.Errorf("attachUserFileuploadPool0: %w", err) } return fileuploadPools1, nil } -func (user0 *User) InsertCreatorPools(ctx context.Context, exec bob.Executor, related ...*FileuploadPoolSetter) error { +func (user0 *User) InsertFileuploadPool(ctx context.Context, exec bob.Executor, related ...*FileuploadPoolSetter) error { if len(related) == 0 { return nil } var err error - fileuploadPools1, err := insertUserCreatorPools0(ctx, exec, related, user0) + fileuploadPools1, err := insertUserFileuploadPool0(ctx, exec, related, user0) if err != nil { return err } - user0.R.CreatorPools = append(user0.R.CreatorPools, fileuploadPools1...) + user0.R.FileuploadPool = append(user0.R.FileuploadPool, fileuploadPools1...) for _, rel := range fileuploadPools1 { rel.R.CreatorUser = user0 @@ -1058,7 +1108,7 @@ func (user0 *User) InsertCreatorPools(ctx context.Context, exec bob.Executor, re return nil } -func (user0 *User) AttachCreatorPools(ctx context.Context, exec bob.Executor, related ...*FileuploadPool) error { +func (user0 *User) AttachFileuploadPool(ctx context.Context, exec bob.Executor, related ...*FileuploadPool) error { if len(related) == 0 { return nil } @@ -1066,12 +1116,12 @@ func (user0 *User) AttachCreatorPools(ctx context.Context, exec bob.Executor, re var err error fileuploadPools1 := FileuploadPoolSlice(related) - _, err = attachUserCreatorPools0(ctx, exec, len(related), fileuploadPools1, user0) + _, err = attachUserFileuploadPool0(ctx, exec, len(related), fileuploadPools1, user0) if err != nil { return err } - user0.R.CreatorPools = append(user0.R.CreatorPools, fileuploadPools1...) + user0.R.FileuploadPool = append(user0.R.FileuploadPool, fileuploadPools1...) for _, rel := range related { rel.R.CreatorUser = user0 @@ -1488,6 +1538,142 @@ func (user0 *User) AttachUserOauthTokens(ctx context.Context, exec bob.Executor, return nil } +func insertUserCreatorPools0(ctx context.Context, exec bob.Executor, pools1 []*PoolSetter, user0 *User) (PoolSlice, error) { + for i := range pools1 { + pools1[i].CreatorID = omit.From(user0.ID) + } + + ret, err := Pools.Insert(bob.ToMods(pools1...)).All(ctx, exec) + if err != nil { + return ret, fmt.Errorf("insertUserCreatorPools0: %w", err) + } + + return ret, nil +} + +func attachUserCreatorPools0(ctx context.Context, exec bob.Executor, count int, pools1 PoolSlice, user0 *User) (PoolSlice, error) { + setter := &PoolSetter{ + CreatorID: omit.From(user0.ID), + } + + err := pools1.UpdateAll(ctx, exec, *setter) + if err != nil { + return nil, fmt.Errorf("attachUserCreatorPools0: %w", err) + } + + return pools1, nil +} + +func (user0 *User) InsertCreatorPools(ctx context.Context, exec bob.Executor, related ...*PoolSetter) error { + if len(related) == 0 { + return nil + } + + var err error + + pools1, err := insertUserCreatorPools0(ctx, exec, related, user0) + if err != nil { + return err + } + + user0.R.CreatorPools = append(user0.R.CreatorPools, pools1...) + + for _, rel := range pools1 { + rel.R.CreatorUser = user0 + } + return nil +} + +func (user0 *User) AttachCreatorPools(ctx context.Context, exec bob.Executor, related ...*Pool) error { + if len(related) == 0 { + return nil + } + + var err error + pools1 := PoolSlice(related) + + _, err = attachUserCreatorPools0(ctx, exec, len(related), pools1, user0) + if err != nil { + return err + } + + user0.R.CreatorPools = append(user0.R.CreatorPools, pools1...) + + for _, rel := range related { + rel.R.CreatorUser = user0 + } + + return nil +} + +func insertUserCreatorSites0(ctx context.Context, exec bob.Executor, sites1 []*SiteSetter, user0 *User) (SiteSlice, error) { + for i := range sites1 { + sites1[i].CreatorID = omit.From(user0.ID) + } + + ret, err := Sites.Insert(bob.ToMods(sites1...)).All(ctx, exec) + if err != nil { + return ret, fmt.Errorf("insertUserCreatorSites0: %w", err) + } + + return ret, nil +} + +func attachUserCreatorSites0(ctx context.Context, exec bob.Executor, count int, sites1 SiteSlice, user0 *User) (SiteSlice, error) { + setter := &SiteSetter{ + CreatorID: omit.From(user0.ID), + } + + err := sites1.UpdateAll(ctx, exec, *setter) + if err != nil { + return nil, fmt.Errorf("attachUserCreatorSites0: %w", err) + } + + return sites1, nil +} + +func (user0 *User) InsertCreatorSites(ctx context.Context, exec bob.Executor, related ...*SiteSetter) error { + if len(related) == 0 { + return nil + } + + var err error + + sites1, err := insertUserCreatorSites0(ctx, exec, related, user0) + if err != nil { + return err + } + + user0.R.CreatorSites = append(user0.R.CreatorSites, sites1...) + + for _, rel := range sites1 { + rel.R.CreatorUser = user0 + } + return nil +} + +func (user0 *User) AttachCreatorSites(ctx context.Context, exec bob.Executor, related ...*Site) error { + if len(related) == 0 { + return nil + } + + var err error + sites1 := SiteSlice(related) + + _, err = attachUserCreatorSites0(ctx, exec, len(related), sites1, user0) + if err != nil { + return err + } + + user0.R.CreatorSites = append(user0.R.CreatorSites, sites1...) + + for _, rel := range related { + rel.R.CreatorUser = user0 + } + + return nil +} + func attachUserOrganization0(ctx context.Context, exec bob.Executor, count int, user0 *User, organization1 *Organization) (*User, error) { setter := &UserSetter{ OrganizationID: omit.From(organization1.ID), @@ -1608,13 +1794,13 @@ func (o *User) Preload(name string, retrieved any) error { } } return nil - case "CreatorPools": + case "FileuploadPool": rels, ok := retrieved.(FileuploadPoolSlice) if !ok { return fmt.Errorf("user cannot load %T as %q", retrieved, name) } - o.R.CreatorPools = rels + o.R.FileuploadPool = rels for _, rel := range rels { if rel != nil { @@ -1706,6 +1892,34 @@ func (o *User) Preload(name string, retrieved any) error { } } return nil + case "CreatorPools": + rels, ok := retrieved.(PoolSlice) + if !ok { + return fmt.Errorf("user cannot load %T as %q", retrieved, name) + } + + o.R.CreatorPools = rels + + for _, rel := range rels { + if rel != nil { + rel.R.CreatorUser = o + } + } + return nil + case "CreatorSites": + rels, ok := retrieved.(SiteSlice) + if !ok { + return fmt.Errorf("user cannot load %T as %q", retrieved, name) + } + + o.R.CreatorSites = rels + + for _, rel := range rels { + if rel != nil { + rel.R.CreatorUser = o + } + } + return nil case "Organization": rel, ok := retrieved.(*Organization) if !ok { @@ -1748,13 +1962,15 @@ func buildUserPreloader() userPreloader { type userThenLoader[Q orm.Loadable] struct { PublicUserUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorFiles func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] - CreatorPools func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + FileuploadPool func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] DeletorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorNoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] DeletorNoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] UserNotifications func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] UserOauthTokens func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + CreatorPools func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + CreatorSites func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] Organization func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] } @@ -1765,8 +1981,8 @@ func buildUserThenLoader[Q orm.Loadable]() userThenLoader[Q] { type CreatorFilesLoadInterface interface { LoadCreatorFiles(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } - type CreatorPoolsLoadInterface interface { - LoadCreatorPools(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + type FileuploadPoolLoadInterface interface { + LoadFileuploadPool(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } type CreatorNoteAudiosLoadInterface interface { LoadCreatorNoteAudios(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error @@ -1786,6 +2002,12 @@ func buildUserThenLoader[Q orm.Loadable]() userThenLoader[Q] { type UserOauthTokensLoadInterface interface { LoadUserOauthTokens(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } + type CreatorPoolsLoadInterface interface { + LoadCreatorPools(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type CreatorSitesLoadInterface interface { + LoadCreatorSites(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } type OrganizationLoadInterface interface { LoadOrganization(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } @@ -1803,10 +2025,10 @@ func buildUserThenLoader[Q orm.Loadable]() userThenLoader[Q] { return retrieved.LoadCreatorFiles(ctx, exec, mods...) }, ), - CreatorPools: thenLoadBuilder[Q]( - "CreatorPools", - func(ctx context.Context, exec bob.Executor, retrieved CreatorPoolsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { - return retrieved.LoadCreatorPools(ctx, exec, mods...) + FileuploadPool: thenLoadBuilder[Q]( + "FileuploadPool", + func(ctx context.Context, exec bob.Executor, retrieved FileuploadPoolLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadFileuploadPool(ctx, exec, mods...) }, ), CreatorNoteAudios: thenLoadBuilder[Q]( @@ -1845,6 +2067,18 @@ func buildUserThenLoader[Q orm.Loadable]() userThenLoader[Q] { return retrieved.LoadUserOauthTokens(ctx, exec, mods...) }, ), + CreatorPools: thenLoadBuilder[Q]( + "CreatorPools", + func(ctx context.Context, exec bob.Executor, retrieved CreatorPoolsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCreatorPools(ctx, exec, mods...) + }, + ), + CreatorSites: thenLoadBuilder[Q]( + "CreatorSites", + func(ctx context.Context, exec bob.Executor, retrieved CreatorSitesLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCreatorSites(ctx, exec, mods...) + }, + ), Organization: thenLoadBuilder[Q]( "Organization", func(ctx context.Context, exec bob.Executor, retrieved OrganizationLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { @@ -1976,16 +2210,16 @@ func (os UserSlice) LoadCreatorFiles(ctx context.Context, exec bob.Executor, mod return nil } -// LoadCreatorPools loads the user's CreatorPools into the .R struct -func (o *User) LoadCreatorPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { +// LoadFileuploadPool loads the user's FileuploadPool into the .R struct +func (o *User) LoadFileuploadPool(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { if o == nil { return nil } // Reset the relationship - o.R.CreatorPools = nil + o.R.FileuploadPool = nil - related, err := o.CreatorPools(mods...).All(ctx, exec) + related, err := o.FileuploadPool(mods...).All(ctx, exec) if err != nil { return err } @@ -1994,17 +2228,17 @@ func (o *User) LoadCreatorPools(ctx context.Context, exec bob.Executor, mods ... rel.R.CreatorUser = o } - o.R.CreatorPools = related + o.R.FileuploadPool = related return nil } -// LoadCreatorPools loads the user's CreatorPools into the .R struct -func (os UserSlice) LoadCreatorPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { +// LoadFileuploadPool loads the user's FileuploadPool into the .R struct +func (os UserSlice) LoadFileuploadPool(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { if len(os) == 0 { return nil } - fileuploadPools, err := os.CreatorPools(mods...).All(ctx, exec) + fileuploadPools, err := os.FileuploadPool(mods...).All(ctx, exec) if err != nil { return err } @@ -2014,7 +2248,7 @@ func (os UserSlice) LoadCreatorPools(ctx context.Context, exec bob.Executor, mod continue } - o.R.CreatorPools = nil + o.R.FileuploadPool = nil } for _, o := range os { @@ -2030,7 +2264,7 @@ func (os UserSlice) LoadCreatorPools(ctx context.Context, exec bob.Executor, mod rel.R.CreatorUser = o - o.R.CreatorPools = append(o.R.CreatorPools, rel) + o.R.FileuploadPool = append(o.R.FileuploadPool, rel) } } @@ -2409,6 +2643,128 @@ func (os UserSlice) LoadUserOauthTokens(ctx context.Context, exec bob.Executor, return nil } +// LoadCreatorPools loads the user's CreatorPools into the .R struct +func (o *User) LoadCreatorPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.CreatorPools = nil + + related, err := o.CreatorPools(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, rel := range related { + rel.R.CreatorUser = o + } + + o.R.CreatorPools = related + return nil +} + +// LoadCreatorPools loads the user's CreatorPools into the .R struct +func (os UserSlice) LoadCreatorPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + pools, err := os.CreatorPools(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + o.R.CreatorPools = nil + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range pools { + + if !(o.ID == rel.CreatorID) { + continue + } + + rel.R.CreatorUser = o + + o.R.CreatorPools = append(o.R.CreatorPools, rel) + } + } + + return nil +} + +// LoadCreatorSites loads the user's CreatorSites into the .R struct +func (o *User) LoadCreatorSites(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + // Reset the relationship + o.R.CreatorSites = nil + + related, err := o.CreatorSites(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, rel := range related { + rel.R.CreatorUser = o + } + + o.R.CreatorSites = related + return nil +} + +// LoadCreatorSites loads the user's CreatorSites into the .R struct +func (os UserSlice) LoadCreatorSites(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + sites, err := os.CreatorSites(mods...).All(ctx, exec) + if err != nil { + return err + } + + for _, o := range os { + if o == nil { + continue + } + + o.R.CreatorSites = nil + } + + for _, o := range os { + if o == nil { + continue + } + + for _, rel := range sites { + + if !(o.ID == rel.CreatorID) { + continue + } + + rel.R.CreatorUser = o + + o.R.CreatorSites = append(o.R.CreatorSites, rel) + } + } + + return nil +} + // LoadOrganization loads the user's Organization into the .R struct func (o *User) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { if o == nil { @@ -2465,13 +2821,15 @@ func (os UserSlice) LoadOrganization(ctx context.Context, exec bob.Executor, mod type userC struct { PublicUserUser *int64 CreatorFiles *int64 - CreatorPools *int64 + FileuploadPool *int64 CreatorNoteAudios *int64 DeletorNoteAudios *int64 CreatorNoteImages *int64 DeletorNoteImages *int64 UserNotifications *int64 UserOauthTokens *int64 + CreatorPools *int64 + CreatorSites *int64 } // PreloadCount sets a count in the C struct by name @@ -2485,8 +2843,8 @@ func (o *User) PreloadCount(name string, count int64) error { o.C.PublicUserUser = &count case "CreatorFiles": o.C.CreatorFiles = &count - case "CreatorPools": - o.C.CreatorPools = &count + case "FileuploadPool": + o.C.FileuploadPool = &count case "CreatorNoteAudios": o.C.CreatorNoteAudios = &count case "DeletorNoteAudios": @@ -2499,6 +2857,10 @@ func (o *User) PreloadCount(name string, count int64) error { o.C.UserNotifications = &count case "UserOauthTokens": o.C.UserOauthTokens = &count + case "CreatorPools": + o.C.CreatorPools = &count + case "CreatorSites": + o.C.CreatorSites = &count } return nil } @@ -2506,13 +2868,15 @@ func (o *User) PreloadCount(name string, count int64) error { type userCountPreloader struct { PublicUserUser func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader CreatorFiles func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader - CreatorPools func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader + FileuploadPool func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader CreatorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader DeletorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader CreatorNoteImages func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader DeletorNoteImages func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader UserNotifications func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader UserOauthTokens func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader + CreatorPools func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader + CreatorSites func(...bob.Mod[*dialect.SelectQuery]) psql.Preloader } func buildUserCountPreloader() userCountPreloader { @@ -2551,8 +2915,8 @@ func buildUserCountPreloader() userCountPreloader { return psql.Group(psql.Select(subqueryMods...).Expression) }) }, - CreatorPools: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { - return countPreloader[*User]("CreatorPools", func(parent string) bob.Expression { + FileuploadPool: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { + return countPreloader[*User]("FileuploadPool", func(parent string) bob.Expression { // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) if parent == "" { parent = Users.Alias() @@ -2670,19 +3034,55 @@ func buildUserCountPreloader() userCountPreloader { return psql.Group(psql.Select(subqueryMods...).Expression) }) }, + CreatorPools: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { + return countPreloader[*User]("CreatorPools", func(parent string) bob.Expression { + // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) + if parent == "" { + parent = Users.Alias() + } + + subqueryMods := []bob.Mod[*dialect.SelectQuery]{ + sm.Columns(psql.Raw("count(*)")), + + sm.From(Pools.Name()), + sm.Where(psql.Quote(Pools.Alias(), "creator_id").EQ(psql.Quote(parent, "id"))), + } + subqueryMods = append(subqueryMods, mods...) + return psql.Group(psql.Select(subqueryMods...).Expression) + }) + }, + CreatorSites: func(mods ...bob.Mod[*dialect.SelectQuery]) psql.Preloader { + return countPreloader[*User]("CreatorSites", func(parent string) bob.Expression { + // Build a correlated subquery: (SELECT COUNT(*) FROM related WHERE fk = parent.pk) + if parent == "" { + parent = Users.Alias() + } + + subqueryMods := []bob.Mod[*dialect.SelectQuery]{ + sm.Columns(psql.Raw("count(*)")), + + sm.From(Sites.Name()), + sm.Where(psql.Quote(Sites.Alias(), "creator_id").EQ(psql.Quote(parent, "id"))), + } + subqueryMods = append(subqueryMods, mods...) + return psql.Group(psql.Select(subqueryMods...).Expression) + }) + }, } } type userCountThenLoader[Q orm.Loadable] struct { PublicUserUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorFiles func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] - CreatorPools func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + FileuploadPool func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] DeletorNoteAudios func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] CreatorNoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] DeletorNoteImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] UserNotifications func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] UserOauthTokens func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + CreatorPools func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] + CreatorSites func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q] } func buildUserCountThenLoader[Q orm.Loadable]() userCountThenLoader[Q] { @@ -2692,8 +3092,8 @@ func buildUserCountThenLoader[Q orm.Loadable]() userCountThenLoader[Q] { type CreatorFilesCountInterface interface { LoadCountCreatorFiles(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } - type CreatorPoolsCountInterface interface { - LoadCountCreatorPools(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + type FileuploadPoolCountInterface interface { + LoadCountFileuploadPool(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } type CreatorNoteAudiosCountInterface interface { LoadCountCreatorNoteAudios(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error @@ -2713,6 +3113,12 @@ func buildUserCountThenLoader[Q orm.Loadable]() userCountThenLoader[Q] { type UserOauthTokensCountInterface interface { LoadCountUserOauthTokens(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error } + type CreatorPoolsCountInterface interface { + LoadCountCreatorPools(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } + type CreatorSitesCountInterface interface { + LoadCountCreatorSites(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error + } return userCountThenLoader[Q]{ PublicUserUser: countThenLoadBuilder[Q]( @@ -2727,10 +3133,10 @@ func buildUserCountThenLoader[Q orm.Loadable]() userCountThenLoader[Q] { return retrieved.LoadCountCreatorFiles(ctx, exec, mods...) }, ), - CreatorPools: countThenLoadBuilder[Q]( - "CreatorPools", - func(ctx context.Context, exec bob.Executor, retrieved CreatorPoolsCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { - return retrieved.LoadCountCreatorPools(ctx, exec, mods...) + FileuploadPool: countThenLoadBuilder[Q]( + "FileuploadPool", + func(ctx context.Context, exec bob.Executor, retrieved FileuploadPoolCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCountFileuploadPool(ctx, exec, mods...) }, ), CreatorNoteAudios: countThenLoadBuilder[Q]( @@ -2769,6 +3175,18 @@ func buildUserCountThenLoader[Q orm.Loadable]() userCountThenLoader[Q] { return retrieved.LoadCountUserOauthTokens(ctx, exec, mods...) }, ), + CreatorPools: countThenLoadBuilder[Q]( + "CreatorPools", + func(ctx context.Context, exec bob.Executor, retrieved CreatorPoolsCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCountCreatorPools(ctx, exec, mods...) + }, + ), + CreatorSites: countThenLoadBuilder[Q]( + "CreatorSites", + func(ctx context.Context, exec bob.Executor, retrieved CreatorSitesCountInterface, mods ...bob.Mod[*dialect.SelectQuery]) error { + return retrieved.LoadCountCreatorSites(ctx, exec, mods...) + }, + ), } } @@ -2832,29 +3250,29 @@ func (os UserSlice) LoadCountCreatorFiles(ctx context.Context, exec bob.Executor return nil } -// LoadCountCreatorPools loads the count of CreatorPools into the C struct -func (o *User) LoadCountCreatorPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { +// LoadCountFileuploadPool loads the count of FileuploadPool into the C struct +func (o *User) LoadCountFileuploadPool(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { if o == nil { return nil } - count, err := o.CreatorPools(mods...).Count(ctx, exec) + count, err := o.FileuploadPool(mods...).Count(ctx, exec) if err != nil { return err } - o.C.CreatorPools = &count + o.C.FileuploadPool = &count return nil } -// LoadCountCreatorPools loads the count of CreatorPools for a slice -func (os UserSlice) LoadCountCreatorPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { +// LoadCountFileuploadPool loads the count of FileuploadPool for a slice +func (os UserSlice) LoadCountFileuploadPool(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { if len(os) == 0 { return nil } for _, o := range os { - if err := o.LoadCountCreatorPools(ctx, exec, mods...); err != nil { + if err := o.LoadCountFileuploadPool(ctx, exec, mods...); err != nil { return err } } @@ -3042,17 +3460,79 @@ func (os UserSlice) LoadCountUserOauthTokens(ctx context.Context, exec bob.Execu return nil } +// LoadCountCreatorPools loads the count of CreatorPools into the C struct +func (o *User) LoadCountCreatorPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + count, err := o.CreatorPools(mods...).Count(ctx, exec) + if err != nil { + return err + } + + o.C.CreatorPools = &count + return nil +} + +// LoadCountCreatorPools loads the count of CreatorPools for a slice +func (os UserSlice) LoadCountCreatorPools(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + for _, o := range os { + if err := o.LoadCountCreatorPools(ctx, exec, mods...); err != nil { + return err + } + } + + return nil +} + +// LoadCountCreatorSites loads the count of CreatorSites into the C struct +func (o *User) LoadCountCreatorSites(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if o == nil { + return nil + } + + count, err := o.CreatorSites(mods...).Count(ctx, exec) + if err != nil { + return err + } + + o.C.CreatorSites = &count + return nil +} + +// LoadCountCreatorSites loads the count of CreatorSites for a slice +func (os UserSlice) LoadCountCreatorSites(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error { + if len(os) == 0 { + return nil + } + + for _, o := range os { + if err := o.LoadCountCreatorSites(ctx, exec, mods...); err != nil { + return err + } + } + + return nil +} + type userJoins[Q dialect.Joinable] struct { typ string PublicUserUser modAs[Q, arcgisuserColumns] CreatorFiles modAs[Q, fileuploadFileColumns] - CreatorPools modAs[Q, fileuploadPoolColumns] + FileuploadPool modAs[Q, fileuploadPoolColumns] CreatorNoteAudios modAs[Q, noteAudioColumns] DeletorNoteAudios modAs[Q, noteAudioColumns] CreatorNoteImages modAs[Q, noteImageColumns] DeletorNoteImages modAs[Q, noteImageColumns] UserNotifications modAs[Q, notificationColumns] UserOauthTokens modAs[Q, oauthTokenColumns] + CreatorPools modAs[Q, poolColumns] + CreatorSites modAs[Q, siteColumns] Organization modAs[Q, organizationColumns] } @@ -3091,7 +3571,7 @@ func buildUserJoins[Q dialect.Joinable](cols userColumns, typ string) userJoins[ return mods }, }, - CreatorPools: modAs[Q, fileuploadPoolColumns]{ + FileuploadPool: modAs[Q, fileuploadPoolColumns]{ c: FileuploadPools.Columns, f: func(to fileuploadPoolColumns) bob.Mod[Q] { mods := make(mods.QueryMods[Q], 0, 1) @@ -3189,6 +3669,34 @@ func buildUserJoins[Q dialect.Joinable](cols userColumns, typ string) userJoins[ return mods }, }, + CreatorPools: modAs[Q, poolColumns]{ + c: Pools.Columns, + f: func(to poolColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, Pools.Name().As(to.Alias())).On( + to.CreatorID.EQ(cols.ID), + )) + } + + return mods + }, + }, + CreatorSites: modAs[Q, siteColumns]{ + c: Sites.Columns, + f: func(to siteColumns) bob.Mod[Q] { + mods := make(mods.QueryMods[Q], 0, 1) + + { + mods = append(mods, dialect.Join[Q](typ, Sites.Name().As(to.Alias())).On( + to.CreatorID.EQ(cols.ID), + )) + } + + return mods + }, + }, Organization: modAs[Q, organizationColumns]{ c: Organizations.Columns, f: func(to organizationColumns) bob.Mod[Q] {