Port all of the arcgis schema to using jet
Have not tested anything at this point, it just compiles.
This commit is contained in:
parent
89ed2003fa
commit
bab3200b6c
52 changed files with 479 additions and 13585 deletions
|
|
@ -18,6 +18,7 @@ import (
|
|||
"github.com/jackc/pgx/v5/stdlib"
|
||||
"github.com/pressly/goose/v3"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/stephenafamo/scan"
|
||||
)
|
||||
|
||||
//go:embed migrations/*.sql
|
||||
|
|
@ -33,13 +34,41 @@ var (
|
|||
pgOnce sync.Once
|
||||
)
|
||||
|
||||
func Execute[T any](ctx context.Context, stmt postgres.Statement) (*T, error) {
|
||||
func ExecuteNone(ctx context.Context, stmt postgres.Statement) error {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
row, _ := PGInstance.PGXPool.Query(ctx, query, args...)
|
||||
data, err := pgx.CollectOneRow(row, pgx.RowToAddrOfStructByPos[T])
|
||||
_, err := PGInstance.PGXPool.Query(ctx, query, args...)
|
||||
return err
|
||||
}
|
||||
func ExecuteNoneTx(ctx context.Context, txn bob.Tx, stmt postgres.Statement) error {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
return data, err
|
||||
_, err := txn.QueryContext(ctx, query, args...)
|
||||
return err
|
||||
}
|
||||
func ExecuteOne[T any](ctx context.Context, stmt postgres.Statement) (*T, error) {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
row, err := PGInstance.PGXPool.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("execute query: %w", err)
|
||||
}
|
||||
return pgx.CollectOneRow(row, pgx.RowToAddrOfStructByPos[T])
|
||||
}
|
||||
func ExecuteOneTx[T any](ctx context.Context, txn bob.Tx, stmt postgres.Statement) (*T, error) {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
result, err := scan.One(ctx, txn, scan.StructMapper[T](), query, args...)
|
||||
return &result, err
|
||||
}
|
||||
func ExecuteMany[T any](ctx context.Context, stmt postgres.Statement) ([]*T, error) {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
rows, err := PGInstance.PGXPool.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("execute query: %w", err)
|
||||
}
|
||||
return pgx.CollectRows(rows, pgx.RowToAddrOfStructByPos[T])
|
||||
}
|
||||
func doMigrations(connection_string string) error {
|
||||
log.Debug().Str("dsn", connection_string).Msg("Connecting to database")
|
||||
|
|
|
|||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisAccountErrors = &arcgisAccountErrors{
|
||||
ErrUniqueAccountPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "account",
|
||||
columns: []string{"id"},
|
||||
s: "account_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisAccountErrors struct {
|
||||
ErrUniqueAccountPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisAddressMappingErrors = &arcgisAddressMappingErrors{
|
||||
ErrUniqueAddressMappingPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "address_mapping",
|
||||
columns: []string{"organization_id", "destination"},
|
||||
s: "address_mapping_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisAddressMappingErrors struct {
|
||||
ErrUniqueAddressMappingPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisLayerErrors = &arcgisLayerErrors{
|
||||
ErrUniqueLayerPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "layer",
|
||||
columns: []string{"feature_service_item_id", "index_"},
|
||||
s: "layer_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisLayerErrors struct {
|
||||
ErrUniqueLayerPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisLayerFieldErrors = &arcgisLayerFieldErrors{
|
||||
ErrUniqueLayerFieldPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "layer_field",
|
||||
columns: []string{"layer_feature_service_item_id", "layer_index", "name"},
|
||||
s: "layer_field_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisLayerFieldErrors struct {
|
||||
ErrUniqueLayerFieldPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisOauthTokenErrors = &arcgisOauthTokenErrors{
|
||||
ErrUniqueOauthTokenPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "oauth_token",
|
||||
columns: []string{"id"},
|
||||
s: "oauth_token_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisOauthTokenErrors struct {
|
||||
ErrUniqueOauthTokenPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisParcelMappingErrors = &arcgisParcelMappingErrors{
|
||||
ErrUniqueParcelMappingPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "parcel_mapping",
|
||||
columns: []string{"organization_id", "destination"},
|
||||
s: "parcel_mapping_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisParcelMappingErrors struct {
|
||||
ErrUniqueParcelMappingPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisServiceFeatureErrors = &arcgisServiceFeatureErrors{
|
||||
ErrUniqueFeatureServicePkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "service_feature",
|
||||
columns: []string{"item_id"},
|
||||
s: "feature_service_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisServiceFeatureErrors struct {
|
||||
ErrUniqueFeatureServicePkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisServiceMapErrors = &arcgisServiceMapErrors{
|
||||
ErrUniqueServiceMapPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "service_map",
|
||||
columns: []string{"arcgis_id"},
|
||||
s: "service_map_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisServiceMapErrors struct {
|
||||
ErrUniqueServiceMapPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisUserErrors = &arcgisuserErrors{
|
||||
ErrUniqueUser_Pkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "user_",
|
||||
columns: []string{"id"},
|
||||
s: "user__pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisuserErrors struct {
|
||||
ErrUniqueUser_Pkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisUserPrivilegeErrors = &arcgisUserPrivilegeErrors{
|
||||
ErrUniqueUserPrivilegePkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "user_privilege",
|
||||
columns: []string{"user_id", "privilege"},
|
||||
s: "user_privilege_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeErrors struct {
|
||||
ErrUniqueUserPrivilegePkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,177 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisAccounts = Table[
|
||||
arcgisAccountColumns,
|
||||
arcgisAccountIndexes,
|
||||
arcgisAccountForeignKeys,
|
||||
arcgisAccountUniques,
|
||||
arcgisAccountChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "account",
|
||||
Columns: arcgisAccountColumns{
|
||||
ID: column{
|
||||
Name: "id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Name: column{
|
||||
Name: "name",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
OrganizationID: column{
|
||||
Name: "organization_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URLFeatures: column{
|
||||
Name: "url_features",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URLInsights: column{
|
||||
Name: "url_insights",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URLGeometry: column{
|
||||
Name: "url_geometry",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URLNotebooks: column{
|
||||
Name: "url_notebooks",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URLTiles: column{
|
||||
Name: "url_tiles",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisAccountIndexes{
|
||||
AccountPkey: index{
|
||||
Type: "btree",
|
||||
Name: "account_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "account_pkey",
|
||||
Columns: []string{"id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisAccountForeignKeys{
|
||||
ArcgisAccountAccountOrganizationIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.account.account_organization_id_fkey",
|
||||
Columns: []string{"organization_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "organization",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisAccountColumns struct {
|
||||
ID column
|
||||
Name column
|
||||
OrganizationID column
|
||||
URLFeatures column
|
||||
URLInsights column
|
||||
URLGeometry column
|
||||
URLNotebooks column
|
||||
URLTiles column
|
||||
}
|
||||
|
||||
func (c arcgisAccountColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.ID, c.Name, c.OrganizationID, c.URLFeatures, c.URLInsights, c.URLGeometry, c.URLNotebooks, c.URLTiles,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAccountIndexes struct {
|
||||
AccountPkey index
|
||||
}
|
||||
|
||||
func (i arcgisAccountIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.AccountPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAccountForeignKeys struct {
|
||||
ArcgisAccountAccountOrganizationIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisAccountForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisAccountAccountOrganizationIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAccountUniques struct{}
|
||||
|
||||
func (u arcgisAccountUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisAccountChecks struct{}
|
||||
|
||||
func (c arcgisAccountChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,162 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisAddressMappings = Table[
|
||||
arcgisAddressMappingColumns,
|
||||
arcgisAddressMappingIndexes,
|
||||
arcgisAddressMappingForeignKeys,
|
||||
arcgisAddressMappingUniques,
|
||||
arcgisAddressMappingChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "address_mapping",
|
||||
Columns: arcgisAddressMappingColumns{
|
||||
Destination: column{
|
||||
Name: "destination",
|
||||
DBType: "arcgis.mappingdestinationaddress",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerFeatureServiceItemID: column{
|
||||
Name: "layer_feature_service_item_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerIndex: column{
|
||||
Name: "layer_index",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerFieldName: column{
|
||||
Name: "layer_field_name",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
OrganizationID: column{
|
||||
Name: "organization_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisAddressMappingIndexes{
|
||||
AddressMappingPkey: index{
|
||||
Type: "btree",
|
||||
Name: "address_mapping_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "organization_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
{
|
||||
Name: "destination",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false, false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "address_mapping_pkey",
|
||||
Columns: []string{"organization_id", "destination"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisAddressMappingForeignKeys{
|
||||
ArcgisAddressMappingAddressMappingLayerFeatureServiceItemIDLayerIndexFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.address_mapping.address_mapping_layer_feature_service_item_id_layer_index__fkey",
|
||||
Columns: []string{"layer_feature_service_item_id", "layer_index", "layer_field_name"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.layer_field",
|
||||
ForeignColumns: []string{"layer_feature_service_item_id", "layer_index", "name"},
|
||||
},
|
||||
ArcgisAddressMappingAddressMappingOrganizationIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.address_mapping.address_mapping_organization_id_fkey",
|
||||
Columns: []string{"organization_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "organization",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisAddressMappingColumns struct {
|
||||
Destination column
|
||||
LayerFeatureServiceItemID column
|
||||
LayerIndex column
|
||||
LayerFieldName column
|
||||
OrganizationID column
|
||||
}
|
||||
|
||||
func (c arcgisAddressMappingColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Destination, c.LayerFeatureServiceItemID, c.LayerIndex, c.LayerFieldName, c.OrganizationID,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAddressMappingIndexes struct {
|
||||
AddressMappingPkey index
|
||||
}
|
||||
|
||||
func (i arcgisAddressMappingIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.AddressMappingPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAddressMappingForeignKeys struct {
|
||||
ArcgisAddressMappingAddressMappingLayerFeatureServiceItemIDLayerIndexFkey foreignKey
|
||||
ArcgisAddressMappingAddressMappingOrganizationIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisAddressMappingForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisAddressMappingAddressMappingLayerFeatureServiceItemIDLayerIndexFkey, f.ArcgisAddressMappingAddressMappingOrganizationIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAddressMappingUniques struct{}
|
||||
|
||||
func (u arcgisAddressMappingUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisAddressMappingChecks struct{}
|
||||
|
||||
func (c arcgisAddressMappingChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,132 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisLayers = Table[
|
||||
arcgisLayerColumns,
|
||||
arcgisLayerIndexes,
|
||||
arcgisLayerForeignKeys,
|
||||
arcgisLayerUniques,
|
||||
arcgisLayerChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "layer",
|
||||
Columns: arcgisLayerColumns{
|
||||
Extent: column{
|
||||
Name: "extent",
|
||||
DBType: "box2d",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
FeatureServiceItemID: column{
|
||||
Name: "feature_service_item_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Index: column{
|
||||
Name: "index_",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisLayerIndexes{
|
||||
LayerPkey: index{
|
||||
Type: "btree",
|
||||
Name: "layer_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "feature_service_item_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
{
|
||||
Name: "index_",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false, false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "layer_pkey",
|
||||
Columns: []string{"feature_service_item_id", "index_"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisLayerForeignKeys{
|
||||
ArcgisLayerLayerFeatureServiceItemIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.layer.layer_feature_service_item_id_fkey",
|
||||
Columns: []string{"feature_service_item_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.service_feature",
|
||||
ForeignColumns: []string{"item_id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisLayerColumns struct {
|
||||
Extent column
|
||||
FeatureServiceItemID column
|
||||
Index column
|
||||
}
|
||||
|
||||
func (c arcgisLayerColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Extent, c.FeatureServiceItemID, c.Index,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerIndexes struct {
|
||||
LayerPkey index
|
||||
}
|
||||
|
||||
func (i arcgisLayerIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.LayerPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerForeignKeys struct {
|
||||
ArcgisLayerLayerFeatureServiceItemIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisLayerForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisLayerLayerFeatureServiceItemIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerUniques struct{}
|
||||
|
||||
func (u arcgisLayerUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisLayerChecks struct{}
|
||||
|
||||
func (c arcgisLayerChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,147 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisLayerFields = Table[
|
||||
arcgisLayerFieldColumns,
|
||||
arcgisLayerFieldIndexes,
|
||||
arcgisLayerFieldForeignKeys,
|
||||
arcgisLayerFieldUniques,
|
||||
arcgisLayerFieldChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "layer_field",
|
||||
Columns: arcgisLayerFieldColumns{
|
||||
LayerFeatureServiceItemID: column{
|
||||
Name: "layer_feature_service_item_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerIndex: column{
|
||||
Name: "layer_index",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Name: column{
|
||||
Name: "name",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Type: column{
|
||||
Name: "type_",
|
||||
DBType: "arcgis.fieldtype",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisLayerFieldIndexes{
|
||||
LayerFieldPkey: index{
|
||||
Type: "btree",
|
||||
Name: "layer_field_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "layer_feature_service_item_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
{
|
||||
Name: "layer_index",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
{
|
||||
Name: "name",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false, false, false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "layer_field_pkey",
|
||||
Columns: []string{"layer_feature_service_item_id", "layer_index", "name"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisLayerFieldForeignKeys{
|
||||
ArcgisLayerFieldLayerFieldLayerFeatureServiceItemIDLayerIndexFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.layer_field.layer_field_layer_feature_service_item_id_layer_index_fkey",
|
||||
Columns: []string{"layer_feature_service_item_id", "layer_index"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.layer",
|
||||
ForeignColumns: []string{"feature_service_item_id", "index_"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisLayerFieldColumns struct {
|
||||
LayerFeatureServiceItemID column
|
||||
LayerIndex column
|
||||
Name column
|
||||
Type column
|
||||
}
|
||||
|
||||
func (c arcgisLayerFieldColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.LayerFeatureServiceItemID, c.LayerIndex, c.Name, c.Type,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerFieldIndexes struct {
|
||||
LayerFieldPkey index
|
||||
}
|
||||
|
||||
func (i arcgisLayerFieldIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.LayerFieldPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerFieldForeignKeys struct {
|
||||
ArcgisLayerFieldLayerFieldLayerFeatureServiceItemIDLayerIndexFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisLayerFieldForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisLayerFieldLayerFieldLayerFeatureServiceItemIDLayerIndexFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerFieldUniques struct{}
|
||||
|
||||
func (u arcgisLayerFieldUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisLayerFieldChecks struct{}
|
||||
|
||||
func (c arcgisLayerFieldChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,227 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisOauthTokens = Table[
|
||||
arcgisOauthTokenColumns,
|
||||
arcgisOauthTokenIndexes,
|
||||
arcgisOauthTokenForeignKeys,
|
||||
arcgisOauthTokenUniques,
|
||||
arcgisOauthTokenChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "oauth_token",
|
||||
Columns: arcgisOauthTokenColumns{
|
||||
AccessToken: column{
|
||||
Name: "access_token",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
AccessTokenExpires: column{
|
||||
Name: "access_token_expires",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ArcgisAccountID: column{
|
||||
Name: "arcgis_account_id",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ArcgisID: column{
|
||||
Name: "arcgis_id",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ArcgisLicenseTypeID: column{
|
||||
Name: "arcgis_license_type_id",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Created: column{
|
||||
Name: "created",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ID: column{
|
||||
Name: "id",
|
||||
DBType: "integer",
|
||||
Default: "nextval('arcgis.oauth_token_id_seq'::regclass)",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
InvalidatedAt: column{
|
||||
Name: "invalidated_at",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
RefreshToken: column{
|
||||
Name: "refresh_token",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
RefreshTokenExpires: column{
|
||||
Name: "refresh_token_expires",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
UserID: column{
|
||||
Name: "user_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Username: column{
|
||||
Name: "username",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisOauthTokenIndexes{
|
||||
OauthTokenPkey: index{
|
||||
Type: "btree",
|
||||
Name: "oauth_token_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "oauth_token_pkey",
|
||||
Columns: []string{"id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisOauthTokenForeignKeys{
|
||||
ArcgisOauthTokenOauthTokenArcgisAccountIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.oauth_token.oauth_token_arcgis_account_id_fkey",
|
||||
Columns: []string{"arcgis_account_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.account",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
ArcgisOauthTokenOauthTokenUserIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.oauth_token.oauth_token_user_id_fkey",
|
||||
Columns: []string{"user_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "user_",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisOauthTokenColumns struct {
|
||||
AccessToken column
|
||||
AccessTokenExpires column
|
||||
ArcgisAccountID column
|
||||
ArcgisID column
|
||||
ArcgisLicenseTypeID column
|
||||
Created column
|
||||
ID column
|
||||
InvalidatedAt column
|
||||
RefreshToken column
|
||||
RefreshTokenExpires column
|
||||
UserID column
|
||||
Username column
|
||||
}
|
||||
|
||||
func (c arcgisOauthTokenColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.AccessToken, c.AccessTokenExpires, c.ArcgisAccountID, c.ArcgisID, c.ArcgisLicenseTypeID, c.Created, c.ID, c.InvalidatedAt, c.RefreshToken, c.RefreshTokenExpires, c.UserID, c.Username,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisOauthTokenIndexes struct {
|
||||
OauthTokenPkey index
|
||||
}
|
||||
|
||||
func (i arcgisOauthTokenIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.OauthTokenPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisOauthTokenForeignKeys struct {
|
||||
ArcgisOauthTokenOauthTokenArcgisAccountIDFkey foreignKey
|
||||
ArcgisOauthTokenOauthTokenUserIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisOauthTokenForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisOauthTokenOauthTokenArcgisAccountIDFkey, f.ArcgisOauthTokenOauthTokenUserIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisOauthTokenUniques struct{}
|
||||
|
||||
func (u arcgisOauthTokenUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisOauthTokenChecks struct{}
|
||||
|
||||
func (c arcgisOauthTokenChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,162 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisParcelMappings = Table[
|
||||
arcgisParcelMappingColumns,
|
||||
arcgisParcelMappingIndexes,
|
||||
arcgisParcelMappingForeignKeys,
|
||||
arcgisParcelMappingUniques,
|
||||
arcgisParcelMappingChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "parcel_mapping",
|
||||
Columns: arcgisParcelMappingColumns{
|
||||
Destination: column{
|
||||
Name: "destination",
|
||||
DBType: "arcgis.mappingdestinationparcel",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerFeatureServiceItemID: column{
|
||||
Name: "layer_feature_service_item_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerIndex: column{
|
||||
Name: "layer_index",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerFieldName: column{
|
||||
Name: "layer_field_name",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
OrganizationID: column{
|
||||
Name: "organization_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisParcelMappingIndexes{
|
||||
ParcelMappingPkey: index{
|
||||
Type: "btree",
|
||||
Name: "parcel_mapping_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "organization_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
{
|
||||
Name: "destination",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false, false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "parcel_mapping_pkey",
|
||||
Columns: []string{"organization_id", "destination"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisParcelMappingForeignKeys{
|
||||
ArcgisParcelMappingParcelMappingLayerFeatureServiceItemIDLayerIndexLFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.parcel_mapping.parcel_mapping_layer_feature_service_item_id_layer_index_l_fkey",
|
||||
Columns: []string{"layer_feature_service_item_id", "layer_index", "layer_field_name"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.layer_field",
|
||||
ForeignColumns: []string{"layer_feature_service_item_id", "layer_index", "name"},
|
||||
},
|
||||
ArcgisParcelMappingParcelMappingOrganizationIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.parcel_mapping.parcel_mapping_organization_id_fkey",
|
||||
Columns: []string{"organization_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "organization",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisParcelMappingColumns struct {
|
||||
Destination column
|
||||
LayerFeatureServiceItemID column
|
||||
LayerIndex column
|
||||
LayerFieldName column
|
||||
OrganizationID column
|
||||
}
|
||||
|
||||
func (c arcgisParcelMappingColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Destination, c.LayerFeatureServiceItemID, c.LayerIndex, c.LayerFieldName, c.OrganizationID,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisParcelMappingIndexes struct {
|
||||
ParcelMappingPkey index
|
||||
}
|
||||
|
||||
func (i arcgisParcelMappingIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.ParcelMappingPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisParcelMappingForeignKeys struct {
|
||||
ArcgisParcelMappingParcelMappingLayerFeatureServiceItemIDLayerIndexLFkey foreignKey
|
||||
ArcgisParcelMappingParcelMappingOrganizationIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisParcelMappingForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisParcelMappingParcelMappingLayerFeatureServiceItemIDLayerIndexLFkey, f.ArcgisParcelMappingParcelMappingOrganizationIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisParcelMappingUniques struct{}
|
||||
|
||||
func (u arcgisParcelMappingUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisParcelMappingChecks struct{}
|
||||
|
||||
func (c arcgisParcelMappingChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,147 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisServiceFeatures = Table[
|
||||
arcgisServiceFeatureColumns,
|
||||
arcgisServiceFeatureIndexes,
|
||||
arcgisServiceFeatureForeignKeys,
|
||||
arcgisServiceFeatureUniques,
|
||||
arcgisServiceFeatureChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "service_feature",
|
||||
Columns: arcgisServiceFeatureColumns{
|
||||
Extent: column{
|
||||
Name: "extent",
|
||||
DBType: "box2d",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ItemID: column{
|
||||
Name: "item_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
SpatialReference: column{
|
||||
Name: "spatial_reference",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URL: column{
|
||||
Name: "url",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
AccountID: column{
|
||||
Name: "account_id",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisServiceFeatureIndexes{
|
||||
FeatureServicePkey: index{
|
||||
Type: "btree",
|
||||
Name: "feature_service_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "item_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "feature_service_pkey",
|
||||
Columns: []string{"item_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisServiceFeatureForeignKeys{
|
||||
ArcgisServiceFeatureServiceFeatureAccountIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.service_feature.service_feature_account_id_fkey",
|
||||
Columns: []string{"account_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.account",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisServiceFeatureColumns struct {
|
||||
Extent column
|
||||
ItemID column
|
||||
SpatialReference column
|
||||
URL column
|
||||
AccountID column
|
||||
}
|
||||
|
||||
func (c arcgisServiceFeatureColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Extent, c.ItemID, c.SpatialReference, c.URL, c.AccountID,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisServiceFeatureIndexes struct {
|
||||
FeatureServicePkey index
|
||||
}
|
||||
|
||||
func (i arcgisServiceFeatureIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.FeatureServicePkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisServiceFeatureForeignKeys struct {
|
||||
ArcgisServiceFeatureServiceFeatureAccountIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisServiceFeatureForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisServiceFeatureServiceFeatureAccountIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisServiceFeatureUniques struct{}
|
||||
|
||||
func (u arcgisServiceFeatureUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisServiceFeatureChecks struct{}
|
||||
|
||||
func (c arcgisServiceFeatureChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,147 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisServiceMaps = Table[
|
||||
arcgisServiceMapColumns,
|
||||
arcgisServiceMapIndexes,
|
||||
arcgisServiceMapForeignKeys,
|
||||
arcgisServiceMapUniques,
|
||||
arcgisServiceMapChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "service_map",
|
||||
Columns: arcgisServiceMapColumns{
|
||||
AccountID: column{
|
||||
Name: "account_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ArcgisID: column{
|
||||
Name: "arcgis_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Name: column{
|
||||
Name: "name",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Title: column{
|
||||
Name: "title",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URL: column{
|
||||
Name: "url",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisServiceMapIndexes{
|
||||
ServiceMapPkey: index{
|
||||
Type: "btree",
|
||||
Name: "service_map_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "arcgis_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "service_map_pkey",
|
||||
Columns: []string{"arcgis_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisServiceMapForeignKeys{
|
||||
ArcgisServiceMapServiceMapAccountIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.service_map.service_map_account_id_fkey",
|
||||
Columns: []string{"account_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.account",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisServiceMapColumns struct {
|
||||
AccountID column
|
||||
ArcgisID column
|
||||
Name column
|
||||
Title column
|
||||
URL column
|
||||
}
|
||||
|
||||
func (c arcgisServiceMapColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.AccountID, c.ArcgisID, c.Name, c.Title, c.URL,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisServiceMapIndexes struct {
|
||||
ServiceMapPkey index
|
||||
}
|
||||
|
||||
func (i arcgisServiceMapIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.ServiceMapPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisServiceMapForeignKeys struct {
|
||||
ArcgisServiceMapServiceMapAccountIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisServiceMapForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisServiceMapServiceMapAccountIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisServiceMapUniques struct{}
|
||||
|
||||
func (u arcgisServiceMapUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisServiceMapChecks struct{}
|
||||
|
||||
func (c arcgisServiceMapChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,237 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisUsers = Table[
|
||||
arcgisuserColumns,
|
||||
arcgisuserIndexes,
|
||||
arcgisuserForeignKeys,
|
||||
arcgisuserUniques,
|
||||
arcgisuserChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "user_",
|
||||
Columns: arcgisuserColumns{
|
||||
Access: column{
|
||||
Name: "access",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Created: column{
|
||||
Name: "created",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Email: column{
|
||||
Name: "email",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
FullName: column{
|
||||
Name: "full_name",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ID: column{
|
||||
Name: "id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Level: column{
|
||||
Name: "level",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
OrgID: column{
|
||||
Name: "org_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
PublicUserID: column{
|
||||
Name: "public_user_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Region: column{
|
||||
Name: "region",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Role: column{
|
||||
Name: "role",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
RoleID: column{
|
||||
Name: "role_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Username: column{
|
||||
Name: "username",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
UserLicenseTypeID: column{
|
||||
Name: "user_license_type_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
UserType: column{
|
||||
Name: "user_type",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisuserIndexes{
|
||||
UserPkey: index{
|
||||
Type: "btree",
|
||||
Name: "user__pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "user__pkey",
|
||||
Columns: []string{"id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisuserForeignKeys{
|
||||
ArcgisUserUserPublicUserIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.user_.user__public_user_id_fkey",
|
||||
Columns: []string{"public_user_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "user_",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisuserColumns struct {
|
||||
Access column
|
||||
Created column
|
||||
Email column
|
||||
FullName column
|
||||
ID column
|
||||
Level column
|
||||
OrgID column
|
||||
PublicUserID column
|
||||
Region column
|
||||
Role column
|
||||
RoleID column
|
||||
Username column
|
||||
UserLicenseTypeID column
|
||||
UserType column
|
||||
}
|
||||
|
||||
func (c arcgisuserColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Access, c.Created, c.Email, c.FullName, c.ID, c.Level, c.OrgID, c.PublicUserID, c.Region, c.Role, c.RoleID, c.Username, c.UserLicenseTypeID, c.UserType,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisuserIndexes struct {
|
||||
UserPkey index
|
||||
}
|
||||
|
||||
func (i arcgisuserIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.UserPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisuserForeignKeys struct {
|
||||
ArcgisUserUserPublicUserIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisuserForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisUserUserPublicUserIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisuserUniques struct{}
|
||||
|
||||
func (u arcgisuserUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisuserChecks struct{}
|
||||
|
||||
func (c arcgisuserChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,122 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisUserPrivileges = Table[
|
||||
arcgisUserPrivilegeColumns,
|
||||
arcgisUserPrivilegeIndexes,
|
||||
arcgisUserPrivilegeForeignKeys,
|
||||
arcgisUserPrivilegeUniques,
|
||||
arcgisUserPrivilegeChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "user_privilege",
|
||||
Columns: arcgisUserPrivilegeColumns{
|
||||
UserID: column{
|
||||
Name: "user_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Privilege: column{
|
||||
Name: "privilege",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisUserPrivilegeIndexes{
|
||||
UserPrivilegePkey: index{
|
||||
Type: "btree",
|
||||
Name: "user_privilege_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "user_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
{
|
||||
Name: "privilege",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false, false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "user_privilege_pkey",
|
||||
Columns: []string{"user_id", "privilege"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisUserPrivilegeForeignKeys{
|
||||
ArcgisUserPrivilegeUserPrivilegeUserIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.user_privilege.user_privilege_user_id_fkey",
|
||||
Columns: []string{"user_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.user_",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeColumns struct {
|
||||
UserID column
|
||||
Privilege column
|
||||
}
|
||||
|
||||
func (c arcgisUserPrivilegeColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.UserID, c.Privilege,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeIndexes struct {
|
||||
UserPrivilegePkey index
|
||||
}
|
||||
|
||||
func (i arcgisUserPrivilegeIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.UserPrivilegePkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeForeignKeys struct {
|
||||
ArcgisUserPrivilegeUserPrivilegeUserIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisUserPrivilegeForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisUserPrivilegeUserPrivilegeUserIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeUniques struct{}
|
||||
|
||||
func (u arcgisUserPrivilegeUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeChecks struct{}
|
||||
|
||||
func (c arcgisUserPrivilegeChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -8,270 +8,6 @@ import (
|
|||
"fmt"
|
||||
)
|
||||
|
||||
// Enum values for ArcgisFieldtype
|
||||
const (
|
||||
ArcgisFieldtypeEsrifieldtypesmallinteger ArcgisFieldtype = "esriFieldTypeSmallInteger"
|
||||
ArcgisFieldtypeEsrifieldtypeinteger ArcgisFieldtype = "esriFieldTypeInteger"
|
||||
ArcgisFieldtypeEsrifieldtypesingle ArcgisFieldtype = "esriFieldTypeSingle"
|
||||
ArcgisFieldtypeEsrifieldtypedouble ArcgisFieldtype = "esriFieldTypeDouble"
|
||||
ArcgisFieldtypeEsrifieldtypestring ArcgisFieldtype = "esriFieldTypeString"
|
||||
ArcgisFieldtypeEsrifieldtypedate ArcgisFieldtype = "esriFieldTypeDate"
|
||||
ArcgisFieldtypeEsrifieldtypeoid ArcgisFieldtype = "esriFieldTypeOID"
|
||||
ArcgisFieldtypeEsrifieldtypegeometry ArcgisFieldtype = "esriFieldTypeGeometry"
|
||||
ArcgisFieldtypeEsrifieldtypeblob ArcgisFieldtype = "esriFieldTypeBlob"
|
||||
ArcgisFieldtypeEsrifieldtyperaster ArcgisFieldtype = "esriFieldTypeRaster"
|
||||
ArcgisFieldtypeEsrifieldtypeguid ArcgisFieldtype = "esriFieldTypeGUID"
|
||||
ArcgisFieldtypeEsrifieldtypeglobalid ArcgisFieldtype = "esriFieldTypeGlobalID"
|
||||
ArcgisFieldtypeEsrifieldtypexml ArcgisFieldtype = "esriFieldTypeXML"
|
||||
ArcgisFieldtypeEsrifieldtypebiginteger ArcgisFieldtype = "esriFieldTypeBigInteger"
|
||||
)
|
||||
|
||||
func AllArcgisFieldtype() []ArcgisFieldtype {
|
||||
return []ArcgisFieldtype{
|
||||
ArcgisFieldtypeEsrifieldtypesmallinteger,
|
||||
ArcgisFieldtypeEsrifieldtypeinteger,
|
||||
ArcgisFieldtypeEsrifieldtypesingle,
|
||||
ArcgisFieldtypeEsrifieldtypedouble,
|
||||
ArcgisFieldtypeEsrifieldtypestring,
|
||||
ArcgisFieldtypeEsrifieldtypedate,
|
||||
ArcgisFieldtypeEsrifieldtypeoid,
|
||||
ArcgisFieldtypeEsrifieldtypegeometry,
|
||||
ArcgisFieldtypeEsrifieldtypeblob,
|
||||
ArcgisFieldtypeEsrifieldtyperaster,
|
||||
ArcgisFieldtypeEsrifieldtypeguid,
|
||||
ArcgisFieldtypeEsrifieldtypeglobalid,
|
||||
ArcgisFieldtypeEsrifieldtypexml,
|
||||
ArcgisFieldtypeEsrifieldtypebiginteger,
|
||||
}
|
||||
}
|
||||
|
||||
type ArcgisFieldtype string
|
||||
|
||||
func (e ArcgisFieldtype) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e ArcgisFieldtype) Valid() bool {
|
||||
switch e {
|
||||
case ArcgisFieldtypeEsrifieldtypesmallinteger,
|
||||
ArcgisFieldtypeEsrifieldtypeinteger,
|
||||
ArcgisFieldtypeEsrifieldtypesingle,
|
||||
ArcgisFieldtypeEsrifieldtypedouble,
|
||||
ArcgisFieldtypeEsrifieldtypestring,
|
||||
ArcgisFieldtypeEsrifieldtypedate,
|
||||
ArcgisFieldtypeEsrifieldtypeoid,
|
||||
ArcgisFieldtypeEsrifieldtypegeometry,
|
||||
ArcgisFieldtypeEsrifieldtypeblob,
|
||||
ArcgisFieldtypeEsrifieldtyperaster,
|
||||
ArcgisFieldtypeEsrifieldtypeguid,
|
||||
ArcgisFieldtypeEsrifieldtypeglobalid,
|
||||
ArcgisFieldtypeEsrifieldtypexml,
|
||||
ArcgisFieldtypeEsrifieldtypebiginteger:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// useful when testing in other packages
|
||||
func (e ArcgisFieldtype) All() []ArcgisFieldtype {
|
||||
return AllArcgisFieldtype()
|
||||
}
|
||||
|
||||
func (e ArcgisFieldtype) MarshalText() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisFieldtype) UnmarshalText(text []byte) error {
|
||||
return e.Scan(text)
|
||||
}
|
||||
|
||||
func (e ArcgisFieldtype) MarshalBinary() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisFieldtype) UnmarshalBinary(data []byte) error {
|
||||
return e.Scan(data)
|
||||
}
|
||||
|
||||
func (e ArcgisFieldtype) Value() (driver.Value, error) {
|
||||
return string(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisFieldtype) Scan(value any) error {
|
||||
switch x := value.(type) {
|
||||
case string:
|
||||
*e = ArcgisFieldtype(x)
|
||||
case []byte:
|
||||
*e = ArcgisFieldtype(x)
|
||||
case nil:
|
||||
return fmt.Errorf("cannot nil into ArcgisFieldtype")
|
||||
default:
|
||||
return fmt.Errorf("cannot scan type %T: %v", value, value)
|
||||
}
|
||||
|
||||
if !e.Valid() {
|
||||
return fmt.Errorf("invalid ArcgisFieldtype value: %s", *e)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Enum values for ArcgisMappingdestinationaddress
|
||||
const (
|
||||
ArcgisMappingdestinationaddressCountry ArcgisMappingdestinationaddress = "country"
|
||||
ArcgisMappingdestinationaddressLocality ArcgisMappingdestinationaddress = "locality"
|
||||
ArcgisMappingdestinationaddressPostalCode ArcgisMappingdestinationaddress = "postal_code"
|
||||
ArcgisMappingdestinationaddressStreet ArcgisMappingdestinationaddress = "street"
|
||||
ArcgisMappingdestinationaddressUnit ArcgisMappingdestinationaddress = "unit"
|
||||
)
|
||||
|
||||
func AllArcgisMappingdestinationaddress() []ArcgisMappingdestinationaddress {
|
||||
return []ArcgisMappingdestinationaddress{
|
||||
ArcgisMappingdestinationaddressCountry,
|
||||
ArcgisMappingdestinationaddressLocality,
|
||||
ArcgisMappingdestinationaddressPostalCode,
|
||||
ArcgisMappingdestinationaddressStreet,
|
||||
ArcgisMappingdestinationaddressUnit,
|
||||
}
|
||||
}
|
||||
|
||||
type ArcgisMappingdestinationaddress string
|
||||
|
||||
func (e ArcgisMappingdestinationaddress) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationaddress) Valid() bool {
|
||||
switch e {
|
||||
case ArcgisMappingdestinationaddressCountry,
|
||||
ArcgisMappingdestinationaddressLocality,
|
||||
ArcgisMappingdestinationaddressPostalCode,
|
||||
ArcgisMappingdestinationaddressStreet,
|
||||
ArcgisMappingdestinationaddressUnit:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// useful when testing in other packages
|
||||
func (e ArcgisMappingdestinationaddress) All() []ArcgisMappingdestinationaddress {
|
||||
return AllArcgisMappingdestinationaddress()
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationaddress) MarshalText() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisMappingdestinationaddress) UnmarshalText(text []byte) error {
|
||||
return e.Scan(text)
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationaddress) MarshalBinary() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisMappingdestinationaddress) UnmarshalBinary(data []byte) error {
|
||||
return e.Scan(data)
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationaddress) Value() (driver.Value, error) {
|
||||
return string(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisMappingdestinationaddress) Scan(value any) error {
|
||||
switch x := value.(type) {
|
||||
case string:
|
||||
*e = ArcgisMappingdestinationaddress(x)
|
||||
case []byte:
|
||||
*e = ArcgisMappingdestinationaddress(x)
|
||||
case nil:
|
||||
return fmt.Errorf("cannot nil into ArcgisMappingdestinationaddress")
|
||||
default:
|
||||
return fmt.Errorf("cannot scan type %T: %v", value, value)
|
||||
}
|
||||
|
||||
if !e.Valid() {
|
||||
return fmt.Errorf("invalid ArcgisMappingdestinationaddress value: %s", *e)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Enum values for ArcgisMappingdestinationparcel
|
||||
const (
|
||||
ArcgisMappingdestinationparcelApn ArcgisMappingdestinationparcel = "apn"
|
||||
ArcgisMappingdestinationparcelDescription ArcgisMappingdestinationparcel = "description"
|
||||
)
|
||||
|
||||
func AllArcgisMappingdestinationparcel() []ArcgisMappingdestinationparcel {
|
||||
return []ArcgisMappingdestinationparcel{
|
||||
ArcgisMappingdestinationparcelApn,
|
||||
ArcgisMappingdestinationparcelDescription,
|
||||
}
|
||||
}
|
||||
|
||||
type ArcgisMappingdestinationparcel string
|
||||
|
||||
func (e ArcgisMappingdestinationparcel) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationparcel) Valid() bool {
|
||||
switch e {
|
||||
case ArcgisMappingdestinationparcelApn,
|
||||
ArcgisMappingdestinationparcelDescription:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// useful when testing in other packages
|
||||
func (e ArcgisMappingdestinationparcel) All() []ArcgisMappingdestinationparcel {
|
||||
return AllArcgisMappingdestinationparcel()
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationparcel) MarshalText() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisMappingdestinationparcel) UnmarshalText(text []byte) error {
|
||||
return e.Scan(text)
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationparcel) MarshalBinary() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisMappingdestinationparcel) UnmarshalBinary(data []byte) error {
|
||||
return e.Scan(data)
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationparcel) Value() (driver.Value, error) {
|
||||
return string(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisMappingdestinationparcel) Scan(value any) error {
|
||||
switch x := value.(type) {
|
||||
case string:
|
||||
*e = ArcgisMappingdestinationparcel(x)
|
||||
case []byte:
|
||||
*e = ArcgisMappingdestinationparcel(x)
|
||||
case nil:
|
||||
return fmt.Errorf("cannot nil into ArcgisMappingdestinationparcel")
|
||||
default:
|
||||
return fmt.Errorf("cannot scan type %T: %v", value, value)
|
||||
}
|
||||
|
||||
if !e.Valid() {
|
||||
return fmt.Errorf("invalid ArcgisMappingdestinationparcel value: %s", *e)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Enum values for Arcgislicensetype
|
||||
const (
|
||||
ArcgislicensetypeAdvancedut Arcgislicensetype = "advancedUT"
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import (
|
|||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/types"
|
||||
"github.com/go-jet/jet/v2/generator/metadata"
|
||||
genpostgres "github.com/go-jet/jet/v2/generator/postgres"
|
||||
"github.com/go-jet/jet/v2/generator/template"
|
||||
|
|
@ -12,11 +13,6 @@ import (
|
|||
_ "github.com/lib/pq"
|
||||
)
|
||||
|
||||
type Box2D struct {
|
||||
X float64
|
||||
Y float64
|
||||
}
|
||||
|
||||
var schemas []string = []string{
|
||||
"arcgis",
|
||||
"stadia",
|
||||
|
|
@ -29,7 +25,7 @@ func customTemplate() template.Template {
|
|||
defaultTableModelField := template.DefaultTableModelField(column)
|
||||
//log.Printf("'%s' '%s' '%s'", table.Name, column.Name, column.DataType.Name)
|
||||
if column.Name == "extent" && column.DataType.Name == "box2d" {
|
||||
defaultTableModelField.Type = template.NewType(Box2D{})
|
||||
defaultTableModelField.Type = template.NewType(types.Box2D{})
|
||||
}
|
||||
return defaultTableModelField
|
||||
})
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,837 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package models
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/dm"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
|
||||
"github.com/Gleipnir-Technology/bob/expr"
|
||||
"github.com/Gleipnir-Technology/bob/orm"
|
||||
"github.com/Gleipnir-Technology/bob/types/pgtypes"
|
||||
enums "github.com/Gleipnir-Technology/nidus-sync/db/enums"
|
||||
"github.com/aarondl/opt/omit"
|
||||
)
|
||||
|
||||
// ArcgisAddressMapping is an object representing the database table.
|
||||
type ArcgisAddressMapping struct {
|
||||
Destination enums.ArcgisMappingdestinationaddress `db:"destination,pk" `
|
||||
LayerFeatureServiceItemID string `db:"layer_feature_service_item_id" `
|
||||
LayerIndex int32 `db:"layer_index" `
|
||||
LayerFieldName string `db:"layer_field_name" `
|
||||
OrganizationID int32 `db:"organization_id,pk" `
|
||||
|
||||
R arcgisAddressMappingR `db:"-" `
|
||||
}
|
||||
|
||||
// ArcgisAddressMappingSlice is an alias for a slice of pointers to ArcgisAddressMapping.
|
||||
// This should almost always be used instead of []*ArcgisAddressMapping.
|
||||
type ArcgisAddressMappingSlice []*ArcgisAddressMapping
|
||||
|
||||
// ArcgisAddressMappings contains methods to work with the address_mapping table
|
||||
var ArcgisAddressMappings = psql.NewTablex[*ArcgisAddressMapping, ArcgisAddressMappingSlice, *ArcgisAddressMappingSetter]("arcgis", "address_mapping", buildArcgisAddressMappingColumns("arcgis.address_mapping"))
|
||||
|
||||
// ArcgisAddressMappingsQuery is a query on the address_mapping table
|
||||
type ArcgisAddressMappingsQuery = *psql.ViewQuery[*ArcgisAddressMapping, ArcgisAddressMappingSlice]
|
||||
|
||||
// arcgisAddressMappingR is where relationships are stored.
|
||||
type arcgisAddressMappingR struct {
|
||||
LayerField *ArcgisLayerField // arcgis.address_mapping.address_mapping_layer_feature_service_item_id_layer_index__fkey
|
||||
Organization *Organization // arcgis.address_mapping.address_mapping_organization_id_fkey
|
||||
}
|
||||
|
||||
func buildArcgisAddressMappingColumns(alias string) arcgisAddressMappingColumns {
|
||||
return arcgisAddressMappingColumns{
|
||||
ColumnsExpr: expr.NewColumnsExpr(
|
||||
"destination", "layer_feature_service_item_id", "layer_index", "layer_field_name", "organization_id",
|
||||
).WithParent("arcgis.address_mapping"),
|
||||
tableAlias: alias,
|
||||
Destination: psql.Quote(alias, "destination"),
|
||||
LayerFeatureServiceItemID: psql.Quote(alias, "layer_feature_service_item_id"),
|
||||
LayerIndex: psql.Quote(alias, "layer_index"),
|
||||
LayerFieldName: psql.Quote(alias, "layer_field_name"),
|
||||
OrganizationID: psql.Quote(alias, "organization_id"),
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAddressMappingColumns struct {
|
||||
expr.ColumnsExpr
|
||||
tableAlias string
|
||||
Destination psql.Expression
|
||||
LayerFeatureServiceItemID psql.Expression
|
||||
LayerIndex psql.Expression
|
||||
LayerFieldName psql.Expression
|
||||
OrganizationID psql.Expression
|
||||
}
|
||||
|
||||
func (c arcgisAddressMappingColumns) Alias() string {
|
||||
return c.tableAlias
|
||||
}
|
||||
|
||||
func (arcgisAddressMappingColumns) AliasedAs(alias string) arcgisAddressMappingColumns {
|
||||
return buildArcgisAddressMappingColumns(alias)
|
||||
}
|
||||
|
||||
// ArcgisAddressMappingSetter is used for insert/upsert/update operations
|
||||
// All values are optional, and do not have to be set
|
||||
// Generated columns are not included
|
||||
type ArcgisAddressMappingSetter struct {
|
||||
Destination omit.Val[enums.ArcgisMappingdestinationaddress] `db:"destination,pk" `
|
||||
LayerFeatureServiceItemID omit.Val[string] `db:"layer_feature_service_item_id" `
|
||||
LayerIndex omit.Val[int32] `db:"layer_index" `
|
||||
LayerFieldName omit.Val[string] `db:"layer_field_name" `
|
||||
OrganizationID omit.Val[int32] `db:"organization_id,pk" `
|
||||
}
|
||||
|
||||
func (s ArcgisAddressMappingSetter) SetColumns() []string {
|
||||
vals := make([]string, 0, 5)
|
||||
if s.Destination.IsValue() {
|
||||
vals = append(vals, "destination")
|
||||
}
|
||||
if s.LayerFeatureServiceItemID.IsValue() {
|
||||
vals = append(vals, "layer_feature_service_item_id")
|
||||
}
|
||||
if s.LayerIndex.IsValue() {
|
||||
vals = append(vals, "layer_index")
|
||||
}
|
||||
if s.LayerFieldName.IsValue() {
|
||||
vals = append(vals, "layer_field_name")
|
||||
}
|
||||
if s.OrganizationID.IsValue() {
|
||||
vals = append(vals, "organization_id")
|
||||
}
|
||||
return vals
|
||||
}
|
||||
|
||||
func (s ArcgisAddressMappingSetter) Overwrite(t *ArcgisAddressMapping) {
|
||||
if s.Destination.IsValue() {
|
||||
t.Destination = s.Destination.MustGet()
|
||||
}
|
||||
if s.LayerFeatureServiceItemID.IsValue() {
|
||||
t.LayerFeatureServiceItemID = s.LayerFeatureServiceItemID.MustGet()
|
||||
}
|
||||
if s.LayerIndex.IsValue() {
|
||||
t.LayerIndex = s.LayerIndex.MustGet()
|
||||
}
|
||||
if s.LayerFieldName.IsValue() {
|
||||
t.LayerFieldName = s.LayerFieldName.MustGet()
|
||||
}
|
||||
if s.OrganizationID.IsValue() {
|
||||
t.OrganizationID = s.OrganizationID.MustGet()
|
||||
}
|
||||
}
|
||||
|
||||
func (s *ArcgisAddressMappingSetter) Apply(q *dialect.InsertQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisAddressMappings.BeforeInsertHooks.RunHooks(ctx, exec, s)
|
||||
})
|
||||
|
||||
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
vals := make([]bob.Expression, 5)
|
||||
if s.Destination.IsValue() {
|
||||
vals[0] = psql.Arg(s.Destination.MustGet())
|
||||
} else {
|
||||
vals[0] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.LayerFeatureServiceItemID.IsValue() {
|
||||
vals[1] = psql.Arg(s.LayerFeatureServiceItemID.MustGet())
|
||||
} else {
|
||||
vals[1] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.LayerIndex.IsValue() {
|
||||
vals[2] = psql.Arg(s.LayerIndex.MustGet())
|
||||
} else {
|
||||
vals[2] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.LayerFieldName.IsValue() {
|
||||
vals[3] = psql.Arg(s.LayerFieldName.MustGet())
|
||||
} else {
|
||||
vals[3] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.OrganizationID.IsValue() {
|
||||
vals[4] = psql.Arg(s.OrganizationID.MustGet())
|
||||
} else {
|
||||
vals[4] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
|
||||
}))
|
||||
}
|
||||
|
||||
func (s ArcgisAddressMappingSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
|
||||
return um.Set(s.Expressions()...)
|
||||
}
|
||||
|
||||
func (s ArcgisAddressMappingSetter) Expressions(prefix ...string) []bob.Expression {
|
||||
exprs := make([]bob.Expression, 0, 5)
|
||||
|
||||
if s.Destination.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "destination")...),
|
||||
psql.Arg(s.Destination),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.LayerFeatureServiceItemID.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "layer_feature_service_item_id")...),
|
||||
psql.Arg(s.LayerFeatureServiceItemID),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.LayerIndex.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "layer_index")...),
|
||||
psql.Arg(s.LayerIndex),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.LayerFieldName.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "layer_field_name")...),
|
||||
psql.Arg(s.LayerFieldName),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.OrganizationID.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "organization_id")...),
|
||||
psql.Arg(s.OrganizationID),
|
||||
}})
|
||||
}
|
||||
|
||||
return exprs
|
||||
}
|
||||
|
||||
// FindArcgisAddressMapping retrieves a single record by primary key
|
||||
// If cols is empty Find will return all columns.
|
||||
func FindArcgisAddressMapping(ctx context.Context, exec bob.Executor, OrganizationIDPK int32, DestinationPK enums.ArcgisMappingdestinationaddress, cols ...string) (*ArcgisAddressMapping, error) {
|
||||
if len(cols) == 0 {
|
||||
return ArcgisAddressMappings.Query(
|
||||
sm.Where(ArcgisAddressMappings.Columns.OrganizationID.EQ(psql.Arg(OrganizationIDPK))),
|
||||
sm.Where(ArcgisAddressMappings.Columns.Destination.EQ(psql.Arg(DestinationPK))),
|
||||
).One(ctx, exec)
|
||||
}
|
||||
|
||||
return ArcgisAddressMappings.Query(
|
||||
sm.Where(ArcgisAddressMappings.Columns.OrganizationID.EQ(psql.Arg(OrganizationIDPK))),
|
||||
sm.Where(ArcgisAddressMappings.Columns.Destination.EQ(psql.Arg(DestinationPK))),
|
||||
sm.Columns(ArcgisAddressMappings.Columns.Only(cols...)),
|
||||
).One(ctx, exec)
|
||||
}
|
||||
|
||||
// ArcgisAddressMappingExists checks the presence of a single record by primary key
|
||||
func ArcgisAddressMappingExists(ctx context.Context, exec bob.Executor, OrganizationIDPK int32, DestinationPK enums.ArcgisMappingdestinationaddress) (bool, error) {
|
||||
return ArcgisAddressMappings.Query(
|
||||
sm.Where(ArcgisAddressMappings.Columns.OrganizationID.EQ(psql.Arg(OrganizationIDPK))),
|
||||
sm.Where(ArcgisAddressMappings.Columns.Destination.EQ(psql.Arg(DestinationPK))),
|
||||
).Exists(ctx, exec)
|
||||
}
|
||||
|
||||
// AfterQueryHook is called after ArcgisAddressMapping is retrieved from the database
|
||||
func (o *ArcgisAddressMapping) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
|
||||
var err error
|
||||
|
||||
switch queryType {
|
||||
case bob.QueryTypeSelect:
|
||||
ctx, err = ArcgisAddressMappings.AfterSelectHooks.RunHooks(ctx, exec, ArcgisAddressMappingSlice{o})
|
||||
case bob.QueryTypeInsert:
|
||||
ctx, err = ArcgisAddressMappings.AfterInsertHooks.RunHooks(ctx, exec, ArcgisAddressMappingSlice{o})
|
||||
case bob.QueryTypeUpdate:
|
||||
ctx, err = ArcgisAddressMappings.AfterUpdateHooks.RunHooks(ctx, exec, ArcgisAddressMappingSlice{o})
|
||||
case bob.QueryTypeDelete:
|
||||
ctx, err = ArcgisAddressMappings.AfterDeleteHooks.RunHooks(ctx, exec, ArcgisAddressMappingSlice{o})
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// primaryKeyVals returns the primary key values of the ArcgisAddressMapping
|
||||
func (o *ArcgisAddressMapping) primaryKeyVals() bob.Expression {
|
||||
return psql.ArgGroup(
|
||||
o.OrganizationID,
|
||||
o.Destination,
|
||||
)
|
||||
}
|
||||
|
||||
func (o *ArcgisAddressMapping) pkEQ() dialect.Expression {
|
||||
return psql.Group(psql.Quote("arcgis.address_mapping", "organization_id"), psql.Quote("arcgis.address_mapping", "destination")).EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
return o.primaryKeyVals().WriteSQL(ctx, w, d, start)
|
||||
}))
|
||||
}
|
||||
|
||||
// Update uses an executor to update the ArcgisAddressMapping
|
||||
func (o *ArcgisAddressMapping) Update(ctx context.Context, exec bob.Executor, s *ArcgisAddressMappingSetter) error {
|
||||
v, err := ArcgisAddressMappings.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
o.R = v.R
|
||||
*o = *v
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Delete deletes a single ArcgisAddressMapping record with an executor
|
||||
func (o *ArcgisAddressMapping) Delete(ctx context.Context, exec bob.Executor) error {
|
||||
_, err := ArcgisAddressMappings.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
// Reload refreshes the ArcgisAddressMapping using the executor
|
||||
func (o *ArcgisAddressMapping) Reload(ctx context.Context, exec bob.Executor) error {
|
||||
o2, err := ArcgisAddressMappings.Query(
|
||||
sm.Where(ArcgisAddressMappings.Columns.OrganizationID.EQ(psql.Arg(o.OrganizationID))),
|
||||
sm.Where(ArcgisAddressMappings.Columns.Destination.EQ(psql.Arg(o.Destination))),
|
||||
).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
o2.R = o.R
|
||||
*o = *o2
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// AfterQueryHook is called after ArcgisAddressMappingSlice is retrieved from the database
|
||||
func (o ArcgisAddressMappingSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
|
||||
var err error
|
||||
|
||||
switch queryType {
|
||||
case bob.QueryTypeSelect:
|
||||
ctx, err = ArcgisAddressMappings.AfterSelectHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeInsert:
|
||||
ctx, err = ArcgisAddressMappings.AfterInsertHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeUpdate:
|
||||
ctx, err = ArcgisAddressMappings.AfterUpdateHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeDelete:
|
||||
ctx, err = ArcgisAddressMappings.AfterDeleteHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisAddressMappingSlice) pkIN() dialect.Expression {
|
||||
if len(o) == 0 {
|
||||
return psql.Raw("NULL")
|
||||
}
|
||||
|
||||
return psql.Group(psql.Quote("arcgis.address_mapping", "organization_id"), psql.Quote("arcgis.address_mapping", "destination")).In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
pkPairs := make([]bob.Expression, len(o))
|
||||
for i, row := range o {
|
||||
pkPairs[i] = row.primaryKeyVals()
|
||||
}
|
||||
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
|
||||
}))
|
||||
}
|
||||
|
||||
// copyMatchingRows finds models in the given slice that have the same primary key
|
||||
// then it first copies the existing relationships from the old model to the new model
|
||||
// and then replaces the old model in the slice with the new model
|
||||
func (o ArcgisAddressMappingSlice) copyMatchingRows(from ...*ArcgisAddressMapping) {
|
||||
for i, old := range o {
|
||||
for _, new := range from {
|
||||
if new.OrganizationID != old.OrganizationID {
|
||||
continue
|
||||
}
|
||||
if new.Destination != old.Destination {
|
||||
continue
|
||||
}
|
||||
new.R = old.R
|
||||
o[i] = new
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
|
||||
func (o ArcgisAddressMappingSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
|
||||
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisAddressMappings.BeforeUpdateHooks.RunHooks(ctx, exec, o)
|
||||
})
|
||||
|
||||
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
|
||||
var err error
|
||||
switch retrieved := retrieved.(type) {
|
||||
case *ArcgisAddressMapping:
|
||||
o.copyMatchingRows(retrieved)
|
||||
case []*ArcgisAddressMapping:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
case ArcgisAddressMappingSlice:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
default:
|
||||
// If the retrieved value is not a ArcgisAddressMapping or a slice of ArcgisAddressMapping
|
||||
// then run the AfterUpdateHooks on the slice
|
||||
_, err = ArcgisAddressMappings.AfterUpdateHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}))
|
||||
|
||||
q.AppendWhere(o.pkIN())
|
||||
})
|
||||
}
|
||||
|
||||
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
|
||||
func (o ArcgisAddressMappingSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
|
||||
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisAddressMappings.BeforeDeleteHooks.RunHooks(ctx, exec, o)
|
||||
})
|
||||
|
||||
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
|
||||
var err error
|
||||
switch retrieved := retrieved.(type) {
|
||||
case *ArcgisAddressMapping:
|
||||
o.copyMatchingRows(retrieved)
|
||||
case []*ArcgisAddressMapping:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
case ArcgisAddressMappingSlice:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
default:
|
||||
// If the retrieved value is not a ArcgisAddressMapping or a slice of ArcgisAddressMapping
|
||||
// then run the AfterDeleteHooks on the slice
|
||||
_, err = ArcgisAddressMappings.AfterDeleteHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}))
|
||||
|
||||
q.AppendWhere(o.pkIN())
|
||||
})
|
||||
}
|
||||
|
||||
func (o ArcgisAddressMappingSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals ArcgisAddressMappingSetter) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := ArcgisAddressMappings.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisAddressMappingSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := ArcgisAddressMappings.Delete(o.DeleteMod()).Exec(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisAddressMappingSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
o2, err := ArcgisAddressMappings.Query(sm.Where(o.pkIN())).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
o.copyMatchingRows(o2...)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LayerField starts a query for related objects on arcgis.layer_field
|
||||
func (o *ArcgisAddressMapping) LayerField(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayerFieldsQuery {
|
||||
return ArcgisLayerFields.Query(append(mods,
|
||||
sm.Where(ArcgisLayerFields.Columns.LayerFeatureServiceItemID.EQ(psql.Arg(o.LayerFeatureServiceItemID))), sm.Where(ArcgisLayerFields.Columns.LayerIndex.EQ(psql.Arg(o.LayerIndex))), sm.Where(ArcgisLayerFields.Columns.Name.EQ(psql.Arg(o.LayerFieldName))),
|
||||
)...)
|
||||
}
|
||||
|
||||
func (os ArcgisAddressMappingSlice) LayerField(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayerFieldsQuery {
|
||||
pkLayerFeatureServiceItemID := make(pgtypes.Array[string], 0, len(os))
|
||||
|
||||
pkLayerIndex := make(pgtypes.Array[int32], 0, len(os))
|
||||
|
||||
pkLayerFieldName := make(pgtypes.Array[string], 0, len(os))
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
pkLayerFeatureServiceItemID = append(pkLayerFeatureServiceItemID, o.LayerFeatureServiceItemID)
|
||||
pkLayerIndex = append(pkLayerIndex, o.LayerIndex)
|
||||
pkLayerFieldName = append(pkLayerFieldName, o.LayerFieldName)
|
||||
}
|
||||
PKArgExpr := psql.Select(sm.Columns(
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkLayerFeatureServiceItemID), "text[]")),
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkLayerIndex), "integer[]")),
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkLayerFieldName), "text[]")),
|
||||
))
|
||||
|
||||
return ArcgisLayerFields.Query(append(mods,
|
||||
sm.Where(psql.Group(ArcgisLayerFields.Columns.LayerFeatureServiceItemID, ArcgisLayerFields.Columns.LayerIndex, ArcgisLayerFields.Columns.Name).OP("IN", PKArgExpr)),
|
||||
)...)
|
||||
}
|
||||
|
||||
// Organization starts a query for related objects on organization
|
||||
func (o *ArcgisAddressMapping) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery {
|
||||
return Organizations.Query(append(mods,
|
||||
sm.Where(Organizations.Columns.ID.EQ(psql.Arg(o.OrganizationID))),
|
||||
)...)
|
||||
}
|
||||
|
||||
func (os ArcgisAddressMappingSlice) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery {
|
||||
pkOrganizationID := make(pgtypes.Array[int32], 0, len(os))
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
pkOrganizationID = append(pkOrganizationID, o.OrganizationID)
|
||||
}
|
||||
PKArgExpr := psql.Select(sm.Columns(
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkOrganizationID), "integer[]")),
|
||||
))
|
||||
|
||||
return Organizations.Query(append(mods,
|
||||
sm.Where(psql.Group(Organizations.Columns.ID).OP("IN", PKArgExpr)),
|
||||
)...)
|
||||
}
|
||||
|
||||
func attachArcgisAddressMappingLayerField0(ctx context.Context, exec bob.Executor, count int, arcgisAddressMapping0 *ArcgisAddressMapping, arcgisLayerField1 *ArcgisLayerField) (*ArcgisAddressMapping, error) {
|
||||
setter := &ArcgisAddressMappingSetter{
|
||||
LayerFeatureServiceItemID: omit.From(arcgisLayerField1.LayerFeatureServiceItemID),
|
||||
LayerIndex: omit.From(arcgisLayerField1.LayerIndex),
|
||||
LayerFieldName: omit.From(arcgisLayerField1.Name),
|
||||
}
|
||||
|
||||
err := arcgisAddressMapping0.Update(ctx, exec, setter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("attachArcgisAddressMappingLayerField0: %w", err)
|
||||
}
|
||||
|
||||
return arcgisAddressMapping0, nil
|
||||
}
|
||||
|
||||
func (arcgisAddressMapping0 *ArcgisAddressMapping) InsertLayerField(ctx context.Context, exec bob.Executor, related *ArcgisLayerFieldSetter) error {
|
||||
var err error
|
||||
|
||||
arcgisLayerField1, err := ArcgisLayerFields.Insert(related).One(ctx, exec)
|
||||
if err != nil {
|
||||
return fmt.Errorf("inserting related objects: %w", err)
|
||||
}
|
||||
|
||||
_, err = attachArcgisAddressMappingLayerField0(ctx, exec, 1, arcgisAddressMapping0, arcgisLayerField1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisAddressMapping0.R.LayerField = arcgisLayerField1
|
||||
|
||||
arcgisLayerField1.R.AddressMappings = append(arcgisLayerField1.R.AddressMappings, arcgisAddressMapping0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (arcgisAddressMapping0 *ArcgisAddressMapping) AttachLayerField(ctx context.Context, exec bob.Executor, arcgisLayerField1 *ArcgisLayerField) error {
|
||||
var err error
|
||||
|
||||
_, err = attachArcgisAddressMappingLayerField0(ctx, exec, 1, arcgisAddressMapping0, arcgisLayerField1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisAddressMapping0.R.LayerField = arcgisLayerField1
|
||||
|
||||
arcgisLayerField1.R.AddressMappings = append(arcgisLayerField1.R.AddressMappings, arcgisAddressMapping0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func attachArcgisAddressMappingOrganization0(ctx context.Context, exec bob.Executor, count int, arcgisAddressMapping0 *ArcgisAddressMapping, organization1 *Organization) (*ArcgisAddressMapping, error) {
|
||||
setter := &ArcgisAddressMappingSetter{
|
||||
OrganizationID: omit.From(organization1.ID),
|
||||
}
|
||||
|
||||
err := arcgisAddressMapping0.Update(ctx, exec, setter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("attachArcgisAddressMappingOrganization0: %w", err)
|
||||
}
|
||||
|
||||
return arcgisAddressMapping0, nil
|
||||
}
|
||||
|
||||
func (arcgisAddressMapping0 *ArcgisAddressMapping) InsertOrganization(ctx context.Context, exec bob.Executor, related *OrganizationSetter) error {
|
||||
var err error
|
||||
|
||||
organization1, err := Organizations.Insert(related).One(ctx, exec)
|
||||
if err != nil {
|
||||
return fmt.Errorf("inserting related objects: %w", err)
|
||||
}
|
||||
|
||||
_, err = attachArcgisAddressMappingOrganization0(ctx, exec, 1, arcgisAddressMapping0, organization1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisAddressMapping0.R.Organization = organization1
|
||||
|
||||
organization1.R.AddressMappings = append(organization1.R.AddressMappings, arcgisAddressMapping0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (arcgisAddressMapping0 *ArcgisAddressMapping) AttachOrganization(ctx context.Context, exec bob.Executor, organization1 *Organization) error {
|
||||
var err error
|
||||
|
||||
_, err = attachArcgisAddressMappingOrganization0(ctx, exec, 1, arcgisAddressMapping0, organization1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisAddressMapping0.R.Organization = organization1
|
||||
|
||||
organization1.R.AddressMappings = append(organization1.R.AddressMappings, arcgisAddressMapping0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type arcgisAddressMappingWhere[Q psql.Filterable] struct {
|
||||
Destination psql.WhereMod[Q, enums.ArcgisMappingdestinationaddress]
|
||||
LayerFeatureServiceItemID psql.WhereMod[Q, string]
|
||||
LayerIndex psql.WhereMod[Q, int32]
|
||||
LayerFieldName psql.WhereMod[Q, string]
|
||||
OrganizationID psql.WhereMod[Q, int32]
|
||||
}
|
||||
|
||||
func (arcgisAddressMappingWhere[Q]) AliasedAs(alias string) arcgisAddressMappingWhere[Q] {
|
||||
return buildArcgisAddressMappingWhere[Q](buildArcgisAddressMappingColumns(alias))
|
||||
}
|
||||
|
||||
func buildArcgisAddressMappingWhere[Q psql.Filterable](cols arcgisAddressMappingColumns) arcgisAddressMappingWhere[Q] {
|
||||
return arcgisAddressMappingWhere[Q]{
|
||||
Destination: psql.Where[Q, enums.ArcgisMappingdestinationaddress](cols.Destination),
|
||||
LayerFeatureServiceItemID: psql.Where[Q, string](cols.LayerFeatureServiceItemID),
|
||||
LayerIndex: psql.Where[Q, int32](cols.LayerIndex),
|
||||
LayerFieldName: psql.Where[Q, string](cols.LayerFieldName),
|
||||
OrganizationID: psql.Where[Q, int32](cols.OrganizationID),
|
||||
}
|
||||
}
|
||||
|
||||
func (o *ArcgisAddressMapping) Preload(name string, retrieved any) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch name {
|
||||
case "LayerField":
|
||||
rel, ok := retrieved.(*ArcgisLayerField)
|
||||
if !ok {
|
||||
return fmt.Errorf("arcgisAddressMapping cannot load %T as %q", retrieved, name)
|
||||
}
|
||||
|
||||
o.R.LayerField = rel
|
||||
|
||||
if rel != nil {
|
||||
rel.R.AddressMappings = ArcgisAddressMappingSlice{o}
|
||||
}
|
||||
return nil
|
||||
case "Organization":
|
||||
rel, ok := retrieved.(*Organization)
|
||||
if !ok {
|
||||
return fmt.Errorf("arcgisAddressMapping cannot load %T as %q", retrieved, name)
|
||||
}
|
||||
|
||||
o.R.Organization = rel
|
||||
|
||||
if rel != nil {
|
||||
rel.R.AddressMappings = ArcgisAddressMappingSlice{o}
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("arcgisAddressMapping has no relationship %q", name)
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAddressMappingPreloader struct {
|
||||
LayerField func(...psql.PreloadOption) psql.Preloader
|
||||
Organization func(...psql.PreloadOption) psql.Preloader
|
||||
}
|
||||
|
||||
func buildArcgisAddressMappingPreloader() arcgisAddressMappingPreloader {
|
||||
return arcgisAddressMappingPreloader{
|
||||
LayerField: func(opts ...psql.PreloadOption) psql.Preloader {
|
||||
return psql.Preload[*ArcgisLayerField, ArcgisLayerFieldSlice](psql.PreloadRel{
|
||||
Name: "LayerField",
|
||||
Sides: []psql.PreloadSide{
|
||||
{
|
||||
From: ArcgisAddressMappings,
|
||||
To: ArcgisLayerFields,
|
||||
FromColumns: []string{"layer_feature_service_item_id", "layer_index", "layer_field_name"},
|
||||
ToColumns: []string{"layer_feature_service_item_id", "layer_index", "name"},
|
||||
},
|
||||
},
|
||||
}, ArcgisLayerFields.Columns.Names(), opts...)
|
||||
},
|
||||
Organization: func(opts ...psql.PreloadOption) psql.Preloader {
|
||||
return psql.Preload[*Organization, OrganizationSlice](psql.PreloadRel{
|
||||
Name: "Organization",
|
||||
Sides: []psql.PreloadSide{
|
||||
{
|
||||
From: ArcgisAddressMappings,
|
||||
To: Organizations,
|
||||
FromColumns: []string{"organization_id"},
|
||||
ToColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
}, Organizations.Columns.Names(), opts...)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAddressMappingThenLoader[Q orm.Loadable] struct {
|
||||
LayerField func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
Organization func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
}
|
||||
|
||||
func buildArcgisAddressMappingThenLoader[Q orm.Loadable]() arcgisAddressMappingThenLoader[Q] {
|
||||
type LayerFieldLoadInterface interface {
|
||||
LoadLayerField(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
type OrganizationLoadInterface interface {
|
||||
LoadOrganization(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
|
||||
return arcgisAddressMappingThenLoader[Q]{
|
||||
LayerField: thenLoadBuilder[Q](
|
||||
"LayerField",
|
||||
func(ctx context.Context, exec bob.Executor, retrieved LayerFieldLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
return retrieved.LoadLayerField(ctx, exec, mods...)
|
||||
},
|
||||
),
|
||||
Organization: thenLoadBuilder[Q](
|
||||
"Organization",
|
||||
func(ctx context.Context, exec bob.Executor, retrieved OrganizationLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
return retrieved.LoadOrganization(ctx, exec, mods...)
|
||||
},
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
// LoadLayerField loads the arcgisAddressMapping's LayerField into the .R struct
|
||||
func (o *ArcgisAddressMapping) LoadLayerField(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Reset the relationship
|
||||
o.R.LayerField = nil
|
||||
|
||||
related, err := o.LayerField(mods...).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
related.R.AddressMappings = ArcgisAddressMappingSlice{o}
|
||||
|
||||
o.R.LayerField = related
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadLayerField loads the arcgisAddressMapping's LayerField into the .R struct
|
||||
func (os ArcgisAddressMappingSlice) LoadLayerField(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if len(os) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
arcgisLayerFields, err := os.LayerField(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, rel := range arcgisLayerFields {
|
||||
|
||||
if !(o.LayerFeatureServiceItemID == rel.LayerFeatureServiceItemID) {
|
||||
continue
|
||||
}
|
||||
|
||||
if !(o.LayerIndex == rel.LayerIndex) {
|
||||
continue
|
||||
}
|
||||
|
||||
if !(o.LayerFieldName == rel.Name) {
|
||||
continue
|
||||
}
|
||||
|
||||
rel.R.AddressMappings = append(rel.R.AddressMappings, o)
|
||||
|
||||
o.R.LayerField = rel
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadOrganization loads the arcgisAddressMapping's Organization into the .R struct
|
||||
func (o *ArcgisAddressMapping) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Reset the relationship
|
||||
o.R.Organization = nil
|
||||
|
||||
related, err := o.Organization(mods...).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
related.R.AddressMappings = ArcgisAddressMappingSlice{o}
|
||||
|
||||
o.R.Organization = related
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadOrganization loads the arcgisAddressMapping's Organization into the .R struct
|
||||
func (os ArcgisAddressMappingSlice) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if len(os) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
organizations, err := os.Organization(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, rel := range organizations {
|
||||
|
||||
if !(o.OrganizationID == rel.ID) {
|
||||
continue
|
||||
}
|
||||
|
||||
rel.R.AddressMappings = append(rel.R.AddressMappings, o)
|
||||
|
||||
o.R.Organization = rel
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
@ -1,795 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package models
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/dm"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
|
||||
"github.com/Gleipnir-Technology/bob/expr"
|
||||
"github.com/Gleipnir-Technology/bob/orm"
|
||||
"github.com/Gleipnir-Technology/bob/types/pgtypes"
|
||||
"github.com/aarondl/opt/omit"
|
||||
)
|
||||
|
||||
// ArcgisLayer is an object representing the database table.
|
||||
type ArcgisLayer struct {
|
||||
Extent string `db:"extent" `
|
||||
FeatureServiceItemID string `db:"feature_service_item_id,pk" `
|
||||
Index int32 `db:"index_,pk" `
|
||||
|
||||
R arcgisLayerR `db:"-" `
|
||||
}
|
||||
|
||||
// ArcgisLayerSlice is an alias for a slice of pointers to ArcgisLayer.
|
||||
// This should almost always be used instead of []*ArcgisLayer.
|
||||
type ArcgisLayerSlice []*ArcgisLayer
|
||||
|
||||
// ArcgisLayers contains methods to work with the layer table
|
||||
var ArcgisLayers = psql.NewTablex[*ArcgisLayer, ArcgisLayerSlice, *ArcgisLayerSetter]("arcgis", "layer", buildArcgisLayerColumns("arcgis.layer"))
|
||||
|
||||
// ArcgisLayersQuery is a query on the layer table
|
||||
type ArcgisLayersQuery = *psql.ViewQuery[*ArcgisLayer, ArcgisLayerSlice]
|
||||
|
||||
// arcgisLayerR is where relationships are stored.
|
||||
type arcgisLayerR struct {
|
||||
FeatureServiceItemServiceFeature *ArcgisServiceFeature // arcgis.layer.layer_feature_service_item_id_fkey
|
||||
LayerFields ArcgisLayerFieldSlice // arcgis.layer_field.layer_field_layer_feature_service_item_id_layer_index_fkey
|
||||
}
|
||||
|
||||
func buildArcgisLayerColumns(alias string) arcgisLayerColumns {
|
||||
return arcgisLayerColumns{
|
||||
ColumnsExpr: expr.NewColumnsExpr(
|
||||
"extent", "feature_service_item_id", "index_",
|
||||
).WithParent("arcgis.layer"),
|
||||
tableAlias: alias,
|
||||
Extent: psql.Quote(alias, "extent"),
|
||||
FeatureServiceItemID: psql.Quote(alias, "feature_service_item_id"),
|
||||
Index: psql.Quote(alias, "index_"),
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerColumns struct {
|
||||
expr.ColumnsExpr
|
||||
tableAlias string
|
||||
Extent psql.Expression
|
||||
FeatureServiceItemID psql.Expression
|
||||
Index psql.Expression
|
||||
}
|
||||
|
||||
func (c arcgisLayerColumns) Alias() string {
|
||||
return c.tableAlias
|
||||
}
|
||||
|
||||
func (arcgisLayerColumns) AliasedAs(alias string) arcgisLayerColumns {
|
||||
return buildArcgisLayerColumns(alias)
|
||||
}
|
||||
|
||||
// ArcgisLayerSetter is used for insert/upsert/update operations
|
||||
// All values are optional, and do not have to be set
|
||||
// Generated columns are not included
|
||||
type ArcgisLayerSetter struct {
|
||||
Extent omit.Val[string] `db:"extent" `
|
||||
FeatureServiceItemID omit.Val[string] `db:"feature_service_item_id,pk" `
|
||||
Index omit.Val[int32] `db:"index_,pk" `
|
||||
}
|
||||
|
||||
func (s ArcgisLayerSetter) SetColumns() []string {
|
||||
vals := make([]string, 0, 3)
|
||||
if s.Extent.IsValue() {
|
||||
vals = append(vals, "extent")
|
||||
}
|
||||
if s.FeatureServiceItemID.IsValue() {
|
||||
vals = append(vals, "feature_service_item_id")
|
||||
}
|
||||
if s.Index.IsValue() {
|
||||
vals = append(vals, "index_")
|
||||
}
|
||||
return vals
|
||||
}
|
||||
|
||||
func (s ArcgisLayerSetter) Overwrite(t *ArcgisLayer) {
|
||||
if s.Extent.IsValue() {
|
||||
t.Extent = s.Extent.MustGet()
|
||||
}
|
||||
if s.FeatureServiceItemID.IsValue() {
|
||||
t.FeatureServiceItemID = s.FeatureServiceItemID.MustGet()
|
||||
}
|
||||
if s.Index.IsValue() {
|
||||
t.Index = s.Index.MustGet()
|
||||
}
|
||||
}
|
||||
|
||||
func (s *ArcgisLayerSetter) Apply(q *dialect.InsertQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisLayers.BeforeInsertHooks.RunHooks(ctx, exec, s)
|
||||
})
|
||||
|
||||
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
vals := make([]bob.Expression, 3)
|
||||
if s.Extent.IsValue() {
|
||||
vals[0] = psql.Arg(s.Extent.MustGet())
|
||||
} else {
|
||||
vals[0] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.FeatureServiceItemID.IsValue() {
|
||||
vals[1] = psql.Arg(s.FeatureServiceItemID.MustGet())
|
||||
} else {
|
||||
vals[1] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.Index.IsValue() {
|
||||
vals[2] = psql.Arg(s.Index.MustGet())
|
||||
} else {
|
||||
vals[2] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
|
||||
}))
|
||||
}
|
||||
|
||||
func (s ArcgisLayerSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
|
||||
return um.Set(s.Expressions()...)
|
||||
}
|
||||
|
||||
func (s ArcgisLayerSetter) Expressions(prefix ...string) []bob.Expression {
|
||||
exprs := make([]bob.Expression, 0, 3)
|
||||
|
||||
if s.Extent.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "extent")...),
|
||||
psql.Arg(s.Extent),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.FeatureServiceItemID.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "feature_service_item_id")...),
|
||||
psql.Arg(s.FeatureServiceItemID),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.Index.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "index_")...),
|
||||
psql.Arg(s.Index),
|
||||
}})
|
||||
}
|
||||
|
||||
return exprs
|
||||
}
|
||||
|
||||
// FindArcgisLayer retrieves a single record by primary key
|
||||
// If cols is empty Find will return all columns.
|
||||
func FindArcgisLayer(ctx context.Context, exec bob.Executor, FeatureServiceItemIDPK string, IndexPK int32, cols ...string) (*ArcgisLayer, error) {
|
||||
if len(cols) == 0 {
|
||||
return ArcgisLayers.Query(
|
||||
sm.Where(ArcgisLayers.Columns.FeatureServiceItemID.EQ(psql.Arg(FeatureServiceItemIDPK))),
|
||||
sm.Where(ArcgisLayers.Columns.Index.EQ(psql.Arg(IndexPK))),
|
||||
).One(ctx, exec)
|
||||
}
|
||||
|
||||
return ArcgisLayers.Query(
|
||||
sm.Where(ArcgisLayers.Columns.FeatureServiceItemID.EQ(psql.Arg(FeatureServiceItemIDPK))),
|
||||
sm.Where(ArcgisLayers.Columns.Index.EQ(psql.Arg(IndexPK))),
|
||||
sm.Columns(ArcgisLayers.Columns.Only(cols...)),
|
||||
).One(ctx, exec)
|
||||
}
|
||||
|
||||
// ArcgisLayerExists checks the presence of a single record by primary key
|
||||
func ArcgisLayerExists(ctx context.Context, exec bob.Executor, FeatureServiceItemIDPK string, IndexPK int32) (bool, error) {
|
||||
return ArcgisLayers.Query(
|
||||
sm.Where(ArcgisLayers.Columns.FeatureServiceItemID.EQ(psql.Arg(FeatureServiceItemIDPK))),
|
||||
sm.Where(ArcgisLayers.Columns.Index.EQ(psql.Arg(IndexPK))),
|
||||
).Exists(ctx, exec)
|
||||
}
|
||||
|
||||
// AfterQueryHook is called after ArcgisLayer is retrieved from the database
|
||||
func (o *ArcgisLayer) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
|
||||
var err error
|
||||
|
||||
switch queryType {
|
||||
case bob.QueryTypeSelect:
|
||||
ctx, err = ArcgisLayers.AfterSelectHooks.RunHooks(ctx, exec, ArcgisLayerSlice{o})
|
||||
case bob.QueryTypeInsert:
|
||||
ctx, err = ArcgisLayers.AfterInsertHooks.RunHooks(ctx, exec, ArcgisLayerSlice{o})
|
||||
case bob.QueryTypeUpdate:
|
||||
ctx, err = ArcgisLayers.AfterUpdateHooks.RunHooks(ctx, exec, ArcgisLayerSlice{o})
|
||||
case bob.QueryTypeDelete:
|
||||
ctx, err = ArcgisLayers.AfterDeleteHooks.RunHooks(ctx, exec, ArcgisLayerSlice{o})
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// primaryKeyVals returns the primary key values of the ArcgisLayer
|
||||
func (o *ArcgisLayer) primaryKeyVals() bob.Expression {
|
||||
return psql.ArgGroup(
|
||||
o.FeatureServiceItemID,
|
||||
o.Index,
|
||||
)
|
||||
}
|
||||
|
||||
func (o *ArcgisLayer) pkEQ() dialect.Expression {
|
||||
return psql.Group(psql.Quote("arcgis.layer", "feature_service_item_id"), psql.Quote("arcgis.layer", "index_")).EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
return o.primaryKeyVals().WriteSQL(ctx, w, d, start)
|
||||
}))
|
||||
}
|
||||
|
||||
// Update uses an executor to update the ArcgisLayer
|
||||
func (o *ArcgisLayer) Update(ctx context.Context, exec bob.Executor, s *ArcgisLayerSetter) error {
|
||||
v, err := ArcgisLayers.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
o.R = v.R
|
||||
*o = *v
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Delete deletes a single ArcgisLayer record with an executor
|
||||
func (o *ArcgisLayer) Delete(ctx context.Context, exec bob.Executor) error {
|
||||
_, err := ArcgisLayers.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
// Reload refreshes the ArcgisLayer using the executor
|
||||
func (o *ArcgisLayer) Reload(ctx context.Context, exec bob.Executor) error {
|
||||
o2, err := ArcgisLayers.Query(
|
||||
sm.Where(ArcgisLayers.Columns.FeatureServiceItemID.EQ(psql.Arg(o.FeatureServiceItemID))),
|
||||
sm.Where(ArcgisLayers.Columns.Index.EQ(psql.Arg(o.Index))),
|
||||
).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
o2.R = o.R
|
||||
*o = *o2
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// AfterQueryHook is called after ArcgisLayerSlice is retrieved from the database
|
||||
func (o ArcgisLayerSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
|
||||
var err error
|
||||
|
||||
switch queryType {
|
||||
case bob.QueryTypeSelect:
|
||||
ctx, err = ArcgisLayers.AfterSelectHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeInsert:
|
||||
ctx, err = ArcgisLayers.AfterInsertHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeUpdate:
|
||||
ctx, err = ArcgisLayers.AfterUpdateHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeDelete:
|
||||
ctx, err = ArcgisLayers.AfterDeleteHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisLayerSlice) pkIN() dialect.Expression {
|
||||
if len(o) == 0 {
|
||||
return psql.Raw("NULL")
|
||||
}
|
||||
|
||||
return psql.Group(psql.Quote("arcgis.layer", "feature_service_item_id"), psql.Quote("arcgis.layer", "index_")).In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
pkPairs := make([]bob.Expression, len(o))
|
||||
for i, row := range o {
|
||||
pkPairs[i] = row.primaryKeyVals()
|
||||
}
|
||||
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
|
||||
}))
|
||||
}
|
||||
|
||||
// copyMatchingRows finds models in the given slice that have the same primary key
|
||||
// then it first copies the existing relationships from the old model to the new model
|
||||
// and then replaces the old model in the slice with the new model
|
||||
func (o ArcgisLayerSlice) copyMatchingRows(from ...*ArcgisLayer) {
|
||||
for i, old := range o {
|
||||
for _, new := range from {
|
||||
if new.FeatureServiceItemID != old.FeatureServiceItemID {
|
||||
continue
|
||||
}
|
||||
if new.Index != old.Index {
|
||||
continue
|
||||
}
|
||||
new.R = old.R
|
||||
o[i] = new
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
|
||||
func (o ArcgisLayerSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
|
||||
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisLayers.BeforeUpdateHooks.RunHooks(ctx, exec, o)
|
||||
})
|
||||
|
||||
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
|
||||
var err error
|
||||
switch retrieved := retrieved.(type) {
|
||||
case *ArcgisLayer:
|
||||
o.copyMatchingRows(retrieved)
|
||||
case []*ArcgisLayer:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
case ArcgisLayerSlice:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
default:
|
||||
// If the retrieved value is not a ArcgisLayer or a slice of ArcgisLayer
|
||||
// then run the AfterUpdateHooks on the slice
|
||||
_, err = ArcgisLayers.AfterUpdateHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}))
|
||||
|
||||
q.AppendWhere(o.pkIN())
|
||||
})
|
||||
}
|
||||
|
||||
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
|
||||
func (o ArcgisLayerSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
|
||||
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisLayers.BeforeDeleteHooks.RunHooks(ctx, exec, o)
|
||||
})
|
||||
|
||||
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
|
||||
var err error
|
||||
switch retrieved := retrieved.(type) {
|
||||
case *ArcgisLayer:
|
||||
o.copyMatchingRows(retrieved)
|
||||
case []*ArcgisLayer:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
case ArcgisLayerSlice:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
default:
|
||||
// If the retrieved value is not a ArcgisLayer or a slice of ArcgisLayer
|
||||
// then run the AfterDeleteHooks on the slice
|
||||
_, err = ArcgisLayers.AfterDeleteHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}))
|
||||
|
||||
q.AppendWhere(o.pkIN())
|
||||
})
|
||||
}
|
||||
|
||||
func (o ArcgisLayerSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals ArcgisLayerSetter) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := ArcgisLayers.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisLayerSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := ArcgisLayers.Delete(o.DeleteMod()).Exec(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisLayerSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
o2, err := ArcgisLayers.Query(sm.Where(o.pkIN())).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
o.copyMatchingRows(o2...)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// FeatureServiceItemServiceFeature starts a query for related objects on arcgis.service_feature
|
||||
func (o *ArcgisLayer) FeatureServiceItemServiceFeature(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisServiceFeaturesQuery {
|
||||
return ArcgisServiceFeatures.Query(append(mods,
|
||||
sm.Where(ArcgisServiceFeatures.Columns.ItemID.EQ(psql.Arg(o.FeatureServiceItemID))),
|
||||
)...)
|
||||
}
|
||||
|
||||
func (os ArcgisLayerSlice) FeatureServiceItemServiceFeature(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisServiceFeaturesQuery {
|
||||
pkFeatureServiceItemID := make(pgtypes.Array[string], 0, len(os))
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
pkFeatureServiceItemID = append(pkFeatureServiceItemID, o.FeatureServiceItemID)
|
||||
}
|
||||
PKArgExpr := psql.Select(sm.Columns(
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkFeatureServiceItemID), "text[]")),
|
||||
))
|
||||
|
||||
return ArcgisServiceFeatures.Query(append(mods,
|
||||
sm.Where(psql.Group(ArcgisServiceFeatures.Columns.ItemID).OP("IN", PKArgExpr)),
|
||||
)...)
|
||||
}
|
||||
|
||||
// LayerFields starts a query for related objects on arcgis.layer_field
|
||||
func (o *ArcgisLayer) LayerFields(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayerFieldsQuery {
|
||||
return ArcgisLayerFields.Query(append(mods,
|
||||
sm.Where(ArcgisLayerFields.Columns.LayerFeatureServiceItemID.EQ(psql.Arg(o.FeatureServiceItemID))), sm.Where(ArcgisLayerFields.Columns.LayerIndex.EQ(psql.Arg(o.Index))),
|
||||
)...)
|
||||
}
|
||||
|
||||
func (os ArcgisLayerSlice) LayerFields(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayerFieldsQuery {
|
||||
pkFeatureServiceItemID := make(pgtypes.Array[string], 0, len(os))
|
||||
|
||||
pkIndex := make(pgtypes.Array[int32], 0, len(os))
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
pkFeatureServiceItemID = append(pkFeatureServiceItemID, o.FeatureServiceItemID)
|
||||
pkIndex = append(pkIndex, o.Index)
|
||||
}
|
||||
PKArgExpr := psql.Select(sm.Columns(
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkFeatureServiceItemID), "text[]")),
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkIndex), "integer[]")),
|
||||
))
|
||||
|
||||
return ArcgisLayerFields.Query(append(mods,
|
||||
sm.Where(psql.Group(ArcgisLayerFields.Columns.LayerFeatureServiceItemID, ArcgisLayerFields.Columns.LayerIndex).OP("IN", PKArgExpr)),
|
||||
)...)
|
||||
}
|
||||
|
||||
func attachArcgisLayerFeatureServiceItemServiceFeature0(ctx context.Context, exec bob.Executor, count int, arcgisLayer0 *ArcgisLayer, arcgisServiceFeature1 *ArcgisServiceFeature) (*ArcgisLayer, error) {
|
||||
setter := &ArcgisLayerSetter{
|
||||
FeatureServiceItemID: omit.From(arcgisServiceFeature1.ItemID),
|
||||
}
|
||||
|
||||
err := arcgisLayer0.Update(ctx, exec, setter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("attachArcgisLayerFeatureServiceItemServiceFeature0: %w", err)
|
||||
}
|
||||
|
||||
return arcgisLayer0, nil
|
||||
}
|
||||
|
||||
func (arcgisLayer0 *ArcgisLayer) InsertFeatureServiceItemServiceFeature(ctx context.Context, exec bob.Executor, related *ArcgisServiceFeatureSetter) error {
|
||||
var err error
|
||||
|
||||
arcgisServiceFeature1, err := ArcgisServiceFeatures.Insert(related).One(ctx, exec)
|
||||
if err != nil {
|
||||
return fmt.Errorf("inserting related objects: %w", err)
|
||||
}
|
||||
|
||||
_, err = attachArcgisLayerFeatureServiceItemServiceFeature0(ctx, exec, 1, arcgisLayer0, arcgisServiceFeature1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisLayer0.R.FeatureServiceItemServiceFeature = arcgisServiceFeature1
|
||||
|
||||
arcgisServiceFeature1.R.FeatureServiceItemLayers = append(arcgisServiceFeature1.R.FeatureServiceItemLayers, arcgisLayer0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (arcgisLayer0 *ArcgisLayer) AttachFeatureServiceItemServiceFeature(ctx context.Context, exec bob.Executor, arcgisServiceFeature1 *ArcgisServiceFeature) error {
|
||||
var err error
|
||||
|
||||
_, err = attachArcgisLayerFeatureServiceItemServiceFeature0(ctx, exec, 1, arcgisLayer0, arcgisServiceFeature1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisLayer0.R.FeatureServiceItemServiceFeature = arcgisServiceFeature1
|
||||
|
||||
arcgisServiceFeature1.R.FeatureServiceItemLayers = append(arcgisServiceFeature1.R.FeatureServiceItemLayers, arcgisLayer0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func insertArcgisLayerLayerFields0(ctx context.Context, exec bob.Executor, arcgisLayerFields1 []*ArcgisLayerFieldSetter, arcgisLayer0 *ArcgisLayer) (ArcgisLayerFieldSlice, error) {
|
||||
for i := range arcgisLayerFields1 {
|
||||
arcgisLayerFields1[i].LayerFeatureServiceItemID = omit.From(arcgisLayer0.FeatureServiceItemID)
|
||||
arcgisLayerFields1[i].LayerIndex = omit.From(arcgisLayer0.Index)
|
||||
}
|
||||
|
||||
ret, err := ArcgisLayerFields.Insert(bob.ToMods(arcgisLayerFields1...)).All(ctx, exec)
|
||||
if err != nil {
|
||||
return ret, fmt.Errorf("insertArcgisLayerLayerFields0: %w", err)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func attachArcgisLayerLayerFields0(ctx context.Context, exec bob.Executor, count int, arcgisLayerFields1 ArcgisLayerFieldSlice, arcgisLayer0 *ArcgisLayer) (ArcgisLayerFieldSlice, error) {
|
||||
setter := &ArcgisLayerFieldSetter{
|
||||
LayerFeatureServiceItemID: omit.From(arcgisLayer0.FeatureServiceItemID),
|
||||
LayerIndex: omit.From(arcgisLayer0.Index),
|
||||
}
|
||||
|
||||
err := arcgisLayerFields1.UpdateAll(ctx, exec, *setter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("attachArcgisLayerLayerFields0: %w", err)
|
||||
}
|
||||
|
||||
return arcgisLayerFields1, nil
|
||||
}
|
||||
|
||||
func (arcgisLayer0 *ArcgisLayer) InsertLayerFields(ctx context.Context, exec bob.Executor, related ...*ArcgisLayerFieldSetter) error {
|
||||
if len(related) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
arcgisLayerFields1, err := insertArcgisLayerLayerFields0(ctx, exec, related, arcgisLayer0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisLayer0.R.LayerFields = append(arcgisLayer0.R.LayerFields, arcgisLayerFields1...)
|
||||
|
||||
for _, rel := range arcgisLayerFields1 {
|
||||
rel.R.Layer = arcgisLayer0
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (arcgisLayer0 *ArcgisLayer) AttachLayerFields(ctx context.Context, exec bob.Executor, related ...*ArcgisLayerField) error {
|
||||
if len(related) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
arcgisLayerFields1 := ArcgisLayerFieldSlice(related)
|
||||
|
||||
_, err = attachArcgisLayerLayerFields0(ctx, exec, len(related), arcgisLayerFields1, arcgisLayer0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisLayer0.R.LayerFields = append(arcgisLayer0.R.LayerFields, arcgisLayerFields1...)
|
||||
|
||||
for _, rel := range related {
|
||||
rel.R.Layer = arcgisLayer0
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type arcgisLayerWhere[Q psql.Filterable] struct {
|
||||
Extent psql.WhereMod[Q, string]
|
||||
FeatureServiceItemID psql.WhereMod[Q, string]
|
||||
Index psql.WhereMod[Q, int32]
|
||||
}
|
||||
|
||||
func (arcgisLayerWhere[Q]) AliasedAs(alias string) arcgisLayerWhere[Q] {
|
||||
return buildArcgisLayerWhere[Q](buildArcgisLayerColumns(alias))
|
||||
}
|
||||
|
||||
func buildArcgisLayerWhere[Q psql.Filterable](cols arcgisLayerColumns) arcgisLayerWhere[Q] {
|
||||
return arcgisLayerWhere[Q]{
|
||||
Extent: psql.Where[Q, string](cols.Extent),
|
||||
FeatureServiceItemID: psql.Where[Q, string](cols.FeatureServiceItemID),
|
||||
Index: psql.Where[Q, int32](cols.Index),
|
||||
}
|
||||
}
|
||||
|
||||
func (o *ArcgisLayer) Preload(name string, retrieved any) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch name {
|
||||
case "FeatureServiceItemServiceFeature":
|
||||
rel, ok := retrieved.(*ArcgisServiceFeature)
|
||||
if !ok {
|
||||
return fmt.Errorf("arcgisLayer cannot load %T as %q", retrieved, name)
|
||||
}
|
||||
|
||||
o.R.FeatureServiceItemServiceFeature = rel
|
||||
|
||||
if rel != nil {
|
||||
rel.R.FeatureServiceItemLayers = ArcgisLayerSlice{o}
|
||||
}
|
||||
return nil
|
||||
case "LayerFields":
|
||||
rels, ok := retrieved.(ArcgisLayerFieldSlice)
|
||||
if !ok {
|
||||
return fmt.Errorf("arcgisLayer cannot load %T as %q", retrieved, name)
|
||||
}
|
||||
|
||||
o.R.LayerFields = rels
|
||||
|
||||
for _, rel := range rels {
|
||||
if rel != nil {
|
||||
rel.R.Layer = o
|
||||
}
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("arcgisLayer has no relationship %q", name)
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerPreloader struct {
|
||||
FeatureServiceItemServiceFeature func(...psql.PreloadOption) psql.Preloader
|
||||
}
|
||||
|
||||
func buildArcgisLayerPreloader() arcgisLayerPreloader {
|
||||
return arcgisLayerPreloader{
|
||||
FeatureServiceItemServiceFeature: func(opts ...psql.PreloadOption) psql.Preloader {
|
||||
return psql.Preload[*ArcgisServiceFeature, ArcgisServiceFeatureSlice](psql.PreloadRel{
|
||||
Name: "FeatureServiceItemServiceFeature",
|
||||
Sides: []psql.PreloadSide{
|
||||
{
|
||||
From: ArcgisLayers,
|
||||
To: ArcgisServiceFeatures,
|
||||
FromColumns: []string{"feature_service_item_id"},
|
||||
ToColumns: []string{"item_id"},
|
||||
},
|
||||
},
|
||||
}, ArcgisServiceFeatures.Columns.Names(), opts...)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerThenLoader[Q orm.Loadable] struct {
|
||||
FeatureServiceItemServiceFeature func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
LayerFields func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
}
|
||||
|
||||
func buildArcgisLayerThenLoader[Q orm.Loadable]() arcgisLayerThenLoader[Q] {
|
||||
type FeatureServiceItemServiceFeatureLoadInterface interface {
|
||||
LoadFeatureServiceItemServiceFeature(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
type LayerFieldsLoadInterface interface {
|
||||
LoadLayerFields(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
|
||||
return arcgisLayerThenLoader[Q]{
|
||||
FeatureServiceItemServiceFeature: thenLoadBuilder[Q](
|
||||
"FeatureServiceItemServiceFeature",
|
||||
func(ctx context.Context, exec bob.Executor, retrieved FeatureServiceItemServiceFeatureLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
return retrieved.LoadFeatureServiceItemServiceFeature(ctx, exec, mods...)
|
||||
},
|
||||
),
|
||||
LayerFields: thenLoadBuilder[Q](
|
||||
"LayerFields",
|
||||
func(ctx context.Context, exec bob.Executor, retrieved LayerFieldsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
return retrieved.LoadLayerFields(ctx, exec, mods...)
|
||||
},
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
// LoadFeatureServiceItemServiceFeature loads the arcgisLayer's FeatureServiceItemServiceFeature into the .R struct
|
||||
func (o *ArcgisLayer) LoadFeatureServiceItemServiceFeature(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Reset the relationship
|
||||
o.R.FeatureServiceItemServiceFeature = nil
|
||||
|
||||
related, err := o.FeatureServiceItemServiceFeature(mods...).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
related.R.FeatureServiceItemLayers = ArcgisLayerSlice{o}
|
||||
|
||||
o.R.FeatureServiceItemServiceFeature = related
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadFeatureServiceItemServiceFeature loads the arcgisLayer's FeatureServiceItemServiceFeature into the .R struct
|
||||
func (os ArcgisLayerSlice) LoadFeatureServiceItemServiceFeature(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if len(os) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
arcgisServiceFeatures, err := os.FeatureServiceItemServiceFeature(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, rel := range arcgisServiceFeatures {
|
||||
|
||||
if !(o.FeatureServiceItemID == rel.ItemID) {
|
||||
continue
|
||||
}
|
||||
|
||||
rel.R.FeatureServiceItemLayers = append(rel.R.FeatureServiceItemLayers, o)
|
||||
|
||||
o.R.FeatureServiceItemServiceFeature = rel
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadLayerFields loads the arcgisLayer's LayerFields into the .R struct
|
||||
func (o *ArcgisLayer) LoadLayerFields(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Reset the relationship
|
||||
o.R.LayerFields = nil
|
||||
|
||||
related, err := o.LayerFields(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, rel := range related {
|
||||
rel.R.Layer = o
|
||||
}
|
||||
|
||||
o.R.LayerFields = related
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadLayerFields loads the arcgisLayer's LayerFields into the .R struct
|
||||
func (os ArcgisLayerSlice) LoadLayerFields(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if len(os) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
arcgisLayerFields, err := os.LayerFields(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
o.R.LayerFields = nil
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, rel := range arcgisLayerFields {
|
||||
|
||||
if !(o.FeatureServiceItemID == rel.LayerFeatureServiceItemID) {
|
||||
continue
|
||||
}
|
||||
|
||||
if !(o.Index == rel.LayerIndex) {
|
||||
continue
|
||||
}
|
||||
|
||||
rel.R.Layer = o
|
||||
|
||||
o.R.LayerFields = append(o.R.LayerFields, rel)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,989 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package models
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/dm"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
|
||||
"github.com/Gleipnir-Technology/bob/expr"
|
||||
"github.com/Gleipnir-Technology/bob/orm"
|
||||
"github.com/Gleipnir-Technology/bob/types/pgtypes"
|
||||
"github.com/aarondl/opt/null"
|
||||
"github.com/aarondl/opt/omit"
|
||||
"github.com/aarondl/opt/omitnull"
|
||||
)
|
||||
|
||||
// ArcgisOauthToken is an object representing the database table.
|
||||
type ArcgisOauthToken struct {
|
||||
AccessToken string `db:"access_token" `
|
||||
AccessTokenExpires time.Time `db:"access_token_expires" `
|
||||
ArcgisAccountID null.Val[string] `db:"arcgis_account_id" `
|
||||
ArcgisID null.Val[string] `db:"arcgis_id" `
|
||||
ArcgisLicenseTypeID null.Val[string] `db:"arcgis_license_type_id" `
|
||||
Created time.Time `db:"created" `
|
||||
ID int32 `db:"id,pk" `
|
||||
InvalidatedAt null.Val[time.Time] `db:"invalidated_at" `
|
||||
RefreshToken string `db:"refresh_token" `
|
||||
RefreshTokenExpires time.Time `db:"refresh_token_expires" `
|
||||
UserID int32 `db:"user_id" `
|
||||
Username string `db:"username" `
|
||||
|
||||
R arcgisOauthTokenR `db:"-" `
|
||||
}
|
||||
|
||||
// ArcgisOauthTokenSlice is an alias for a slice of pointers to ArcgisOauthToken.
|
||||
// This should almost always be used instead of []*ArcgisOauthToken.
|
||||
type ArcgisOauthTokenSlice []*ArcgisOauthToken
|
||||
|
||||
// ArcgisOauthTokens contains methods to work with the oauth_token table
|
||||
var ArcgisOauthTokens = psql.NewTablex[*ArcgisOauthToken, ArcgisOauthTokenSlice, *ArcgisOauthTokenSetter]("arcgis", "oauth_token", buildArcgisOauthTokenColumns("arcgis.oauth_token"))
|
||||
|
||||
// ArcgisOauthTokensQuery is a query on the oauth_token table
|
||||
type ArcgisOauthTokensQuery = *psql.ViewQuery[*ArcgisOauthToken, ArcgisOauthTokenSlice]
|
||||
|
||||
// arcgisOauthTokenR is where relationships are stored.
|
||||
type arcgisOauthTokenR struct {
|
||||
ArcgisAccountAccount *ArcgisAccount // arcgis.oauth_token.oauth_token_arcgis_account_id_fkey
|
||||
UserUser *User // arcgis.oauth_token.oauth_token_user_id_fkey
|
||||
}
|
||||
|
||||
func buildArcgisOauthTokenColumns(alias string) arcgisOauthTokenColumns {
|
||||
return arcgisOauthTokenColumns{
|
||||
ColumnsExpr: expr.NewColumnsExpr(
|
||||
"access_token", "access_token_expires", "arcgis_account_id", "arcgis_id", "arcgis_license_type_id", "created", "id", "invalidated_at", "refresh_token", "refresh_token_expires", "user_id", "username",
|
||||
).WithParent("arcgis.oauth_token"),
|
||||
tableAlias: alias,
|
||||
AccessToken: psql.Quote(alias, "access_token"),
|
||||
AccessTokenExpires: psql.Quote(alias, "access_token_expires"),
|
||||
ArcgisAccountID: psql.Quote(alias, "arcgis_account_id"),
|
||||
ArcgisID: psql.Quote(alias, "arcgis_id"),
|
||||
ArcgisLicenseTypeID: psql.Quote(alias, "arcgis_license_type_id"),
|
||||
Created: psql.Quote(alias, "created"),
|
||||
ID: psql.Quote(alias, "id"),
|
||||
InvalidatedAt: psql.Quote(alias, "invalidated_at"),
|
||||
RefreshToken: psql.Quote(alias, "refresh_token"),
|
||||
RefreshTokenExpires: psql.Quote(alias, "refresh_token_expires"),
|
||||
UserID: psql.Quote(alias, "user_id"),
|
||||
Username: psql.Quote(alias, "username"),
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisOauthTokenColumns struct {
|
||||
expr.ColumnsExpr
|
||||
tableAlias string
|
||||
AccessToken psql.Expression
|
||||
AccessTokenExpires psql.Expression
|
||||
ArcgisAccountID psql.Expression
|
||||
ArcgisID psql.Expression
|
||||
ArcgisLicenseTypeID psql.Expression
|
||||
Created psql.Expression
|
||||
ID psql.Expression
|
||||
InvalidatedAt psql.Expression
|
||||
RefreshToken psql.Expression
|
||||
RefreshTokenExpires psql.Expression
|
||||
UserID psql.Expression
|
||||
Username psql.Expression
|
||||
}
|
||||
|
||||
func (c arcgisOauthTokenColumns) Alias() string {
|
||||
return c.tableAlias
|
||||
}
|
||||
|
||||
func (arcgisOauthTokenColumns) AliasedAs(alias string) arcgisOauthTokenColumns {
|
||||
return buildArcgisOauthTokenColumns(alias)
|
||||
}
|
||||
|
||||
// ArcgisOauthTokenSetter is used for insert/upsert/update operations
|
||||
// All values are optional, and do not have to be set
|
||||
// Generated columns are not included
|
||||
type ArcgisOauthTokenSetter struct {
|
||||
AccessToken omit.Val[string] `db:"access_token" `
|
||||
AccessTokenExpires omit.Val[time.Time] `db:"access_token_expires" `
|
||||
ArcgisAccountID omitnull.Val[string] `db:"arcgis_account_id" `
|
||||
ArcgisID omitnull.Val[string] `db:"arcgis_id" `
|
||||
ArcgisLicenseTypeID omitnull.Val[string] `db:"arcgis_license_type_id" `
|
||||
Created omit.Val[time.Time] `db:"created" `
|
||||
ID omit.Val[int32] `db:"id,pk" `
|
||||
InvalidatedAt omitnull.Val[time.Time] `db:"invalidated_at" `
|
||||
RefreshToken omit.Val[string] `db:"refresh_token" `
|
||||
RefreshTokenExpires omit.Val[time.Time] `db:"refresh_token_expires" `
|
||||
UserID omit.Val[int32] `db:"user_id" `
|
||||
Username omit.Val[string] `db:"username" `
|
||||
}
|
||||
|
||||
func (s ArcgisOauthTokenSetter) SetColumns() []string {
|
||||
vals := make([]string, 0, 12)
|
||||
if s.AccessToken.IsValue() {
|
||||
vals = append(vals, "access_token")
|
||||
}
|
||||
if s.AccessTokenExpires.IsValue() {
|
||||
vals = append(vals, "access_token_expires")
|
||||
}
|
||||
if !s.ArcgisAccountID.IsUnset() {
|
||||
vals = append(vals, "arcgis_account_id")
|
||||
}
|
||||
if !s.ArcgisID.IsUnset() {
|
||||
vals = append(vals, "arcgis_id")
|
||||
}
|
||||
if !s.ArcgisLicenseTypeID.IsUnset() {
|
||||
vals = append(vals, "arcgis_license_type_id")
|
||||
}
|
||||
if s.Created.IsValue() {
|
||||
vals = append(vals, "created")
|
||||
}
|
||||
if s.ID.IsValue() {
|
||||
vals = append(vals, "id")
|
||||
}
|
||||
if !s.InvalidatedAt.IsUnset() {
|
||||
vals = append(vals, "invalidated_at")
|
||||
}
|
||||
if s.RefreshToken.IsValue() {
|
||||
vals = append(vals, "refresh_token")
|
||||
}
|
||||
if s.RefreshTokenExpires.IsValue() {
|
||||
vals = append(vals, "refresh_token_expires")
|
||||
}
|
||||
if s.UserID.IsValue() {
|
||||
vals = append(vals, "user_id")
|
||||
}
|
||||
if s.Username.IsValue() {
|
||||
vals = append(vals, "username")
|
||||
}
|
||||
return vals
|
||||
}
|
||||
|
||||
func (s ArcgisOauthTokenSetter) Overwrite(t *ArcgisOauthToken) {
|
||||
if s.AccessToken.IsValue() {
|
||||
t.AccessToken = s.AccessToken.MustGet()
|
||||
}
|
||||
if s.AccessTokenExpires.IsValue() {
|
||||
t.AccessTokenExpires = s.AccessTokenExpires.MustGet()
|
||||
}
|
||||
if !s.ArcgisAccountID.IsUnset() {
|
||||
t.ArcgisAccountID = s.ArcgisAccountID.MustGetNull()
|
||||
}
|
||||
if !s.ArcgisID.IsUnset() {
|
||||
t.ArcgisID = s.ArcgisID.MustGetNull()
|
||||
}
|
||||
if !s.ArcgisLicenseTypeID.IsUnset() {
|
||||
t.ArcgisLicenseTypeID = s.ArcgisLicenseTypeID.MustGetNull()
|
||||
}
|
||||
if s.Created.IsValue() {
|
||||
t.Created = s.Created.MustGet()
|
||||
}
|
||||
if s.ID.IsValue() {
|
||||
t.ID = s.ID.MustGet()
|
||||
}
|
||||
if !s.InvalidatedAt.IsUnset() {
|
||||
t.InvalidatedAt = s.InvalidatedAt.MustGetNull()
|
||||
}
|
||||
if s.RefreshToken.IsValue() {
|
||||
t.RefreshToken = s.RefreshToken.MustGet()
|
||||
}
|
||||
if s.RefreshTokenExpires.IsValue() {
|
||||
t.RefreshTokenExpires = s.RefreshTokenExpires.MustGet()
|
||||
}
|
||||
if s.UserID.IsValue() {
|
||||
t.UserID = s.UserID.MustGet()
|
||||
}
|
||||
if s.Username.IsValue() {
|
||||
t.Username = s.Username.MustGet()
|
||||
}
|
||||
}
|
||||
|
||||
func (s *ArcgisOauthTokenSetter) Apply(q *dialect.InsertQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisOauthTokens.BeforeInsertHooks.RunHooks(ctx, exec, s)
|
||||
})
|
||||
|
||||
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
vals := make([]bob.Expression, 12)
|
||||
if s.AccessToken.IsValue() {
|
||||
vals[0] = psql.Arg(s.AccessToken.MustGet())
|
||||
} else {
|
||||
vals[0] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.AccessTokenExpires.IsValue() {
|
||||
vals[1] = psql.Arg(s.AccessTokenExpires.MustGet())
|
||||
} else {
|
||||
vals[1] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if !s.ArcgisAccountID.IsUnset() {
|
||||
vals[2] = psql.Arg(s.ArcgisAccountID.MustGetNull())
|
||||
} else {
|
||||
vals[2] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if !s.ArcgisID.IsUnset() {
|
||||
vals[3] = psql.Arg(s.ArcgisID.MustGetNull())
|
||||
} else {
|
||||
vals[3] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if !s.ArcgisLicenseTypeID.IsUnset() {
|
||||
vals[4] = psql.Arg(s.ArcgisLicenseTypeID.MustGetNull())
|
||||
} else {
|
||||
vals[4] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.Created.IsValue() {
|
||||
vals[5] = psql.Arg(s.Created.MustGet())
|
||||
} else {
|
||||
vals[5] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.ID.IsValue() {
|
||||
vals[6] = psql.Arg(s.ID.MustGet())
|
||||
} else {
|
||||
vals[6] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if !s.InvalidatedAt.IsUnset() {
|
||||
vals[7] = psql.Arg(s.InvalidatedAt.MustGetNull())
|
||||
} else {
|
||||
vals[7] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.RefreshToken.IsValue() {
|
||||
vals[8] = psql.Arg(s.RefreshToken.MustGet())
|
||||
} else {
|
||||
vals[8] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.RefreshTokenExpires.IsValue() {
|
||||
vals[9] = psql.Arg(s.RefreshTokenExpires.MustGet())
|
||||
} else {
|
||||
vals[9] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.UserID.IsValue() {
|
||||
vals[10] = psql.Arg(s.UserID.MustGet())
|
||||
} else {
|
||||
vals[10] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.Username.IsValue() {
|
||||
vals[11] = psql.Arg(s.Username.MustGet())
|
||||
} else {
|
||||
vals[11] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
|
||||
}))
|
||||
}
|
||||
|
||||
func (s ArcgisOauthTokenSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
|
||||
return um.Set(s.Expressions()...)
|
||||
}
|
||||
|
||||
func (s ArcgisOauthTokenSetter) Expressions(prefix ...string) []bob.Expression {
|
||||
exprs := make([]bob.Expression, 0, 12)
|
||||
|
||||
if s.AccessToken.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "access_token")...),
|
||||
psql.Arg(s.AccessToken),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.AccessTokenExpires.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "access_token_expires")...),
|
||||
psql.Arg(s.AccessTokenExpires),
|
||||
}})
|
||||
}
|
||||
|
||||
if !s.ArcgisAccountID.IsUnset() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "arcgis_account_id")...),
|
||||
psql.Arg(s.ArcgisAccountID),
|
||||
}})
|
||||
}
|
||||
|
||||
if !s.ArcgisID.IsUnset() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "arcgis_id")...),
|
||||
psql.Arg(s.ArcgisID),
|
||||
}})
|
||||
}
|
||||
|
||||
if !s.ArcgisLicenseTypeID.IsUnset() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "arcgis_license_type_id")...),
|
||||
psql.Arg(s.ArcgisLicenseTypeID),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.Created.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "created")...),
|
||||
psql.Arg(s.Created),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.ID.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "id")...),
|
||||
psql.Arg(s.ID),
|
||||
}})
|
||||
}
|
||||
|
||||
if !s.InvalidatedAt.IsUnset() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "invalidated_at")...),
|
||||
psql.Arg(s.InvalidatedAt),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.RefreshToken.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "refresh_token")...),
|
||||
psql.Arg(s.RefreshToken),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.RefreshTokenExpires.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "refresh_token_expires")...),
|
||||
psql.Arg(s.RefreshTokenExpires),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.UserID.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "user_id")...),
|
||||
psql.Arg(s.UserID),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.Username.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "username")...),
|
||||
psql.Arg(s.Username),
|
||||
}})
|
||||
}
|
||||
|
||||
return exprs
|
||||
}
|
||||
|
||||
// FindArcgisOauthToken retrieves a single record by primary key
|
||||
// If cols is empty Find will return all columns.
|
||||
func FindArcgisOauthToken(ctx context.Context, exec bob.Executor, IDPK int32, cols ...string) (*ArcgisOauthToken, error) {
|
||||
if len(cols) == 0 {
|
||||
return ArcgisOauthTokens.Query(
|
||||
sm.Where(ArcgisOauthTokens.Columns.ID.EQ(psql.Arg(IDPK))),
|
||||
).One(ctx, exec)
|
||||
}
|
||||
|
||||
return ArcgisOauthTokens.Query(
|
||||
sm.Where(ArcgisOauthTokens.Columns.ID.EQ(psql.Arg(IDPK))),
|
||||
sm.Columns(ArcgisOauthTokens.Columns.Only(cols...)),
|
||||
).One(ctx, exec)
|
||||
}
|
||||
|
||||
// ArcgisOauthTokenExists checks the presence of a single record by primary key
|
||||
func ArcgisOauthTokenExists(ctx context.Context, exec bob.Executor, IDPK int32) (bool, error) {
|
||||
return ArcgisOauthTokens.Query(
|
||||
sm.Where(ArcgisOauthTokens.Columns.ID.EQ(psql.Arg(IDPK))),
|
||||
).Exists(ctx, exec)
|
||||
}
|
||||
|
||||
// AfterQueryHook is called after ArcgisOauthToken is retrieved from the database
|
||||
func (o *ArcgisOauthToken) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
|
||||
var err error
|
||||
|
||||
switch queryType {
|
||||
case bob.QueryTypeSelect:
|
||||
ctx, err = ArcgisOauthTokens.AfterSelectHooks.RunHooks(ctx, exec, ArcgisOauthTokenSlice{o})
|
||||
case bob.QueryTypeInsert:
|
||||
ctx, err = ArcgisOauthTokens.AfterInsertHooks.RunHooks(ctx, exec, ArcgisOauthTokenSlice{o})
|
||||
case bob.QueryTypeUpdate:
|
||||
ctx, err = ArcgisOauthTokens.AfterUpdateHooks.RunHooks(ctx, exec, ArcgisOauthTokenSlice{o})
|
||||
case bob.QueryTypeDelete:
|
||||
ctx, err = ArcgisOauthTokens.AfterDeleteHooks.RunHooks(ctx, exec, ArcgisOauthTokenSlice{o})
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// primaryKeyVals returns the primary key values of the ArcgisOauthToken
|
||||
func (o *ArcgisOauthToken) primaryKeyVals() bob.Expression {
|
||||
return psql.Arg(o.ID)
|
||||
}
|
||||
|
||||
func (o *ArcgisOauthToken) pkEQ() dialect.Expression {
|
||||
return psql.Quote("arcgis.oauth_token", "id").EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
return o.primaryKeyVals().WriteSQL(ctx, w, d, start)
|
||||
}))
|
||||
}
|
||||
|
||||
// Update uses an executor to update the ArcgisOauthToken
|
||||
func (o *ArcgisOauthToken) Update(ctx context.Context, exec bob.Executor, s *ArcgisOauthTokenSetter) error {
|
||||
v, err := ArcgisOauthTokens.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
o.R = v.R
|
||||
*o = *v
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Delete deletes a single ArcgisOauthToken record with an executor
|
||||
func (o *ArcgisOauthToken) Delete(ctx context.Context, exec bob.Executor) error {
|
||||
_, err := ArcgisOauthTokens.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
// Reload refreshes the ArcgisOauthToken using the executor
|
||||
func (o *ArcgisOauthToken) Reload(ctx context.Context, exec bob.Executor) error {
|
||||
o2, err := ArcgisOauthTokens.Query(
|
||||
sm.Where(ArcgisOauthTokens.Columns.ID.EQ(psql.Arg(o.ID))),
|
||||
).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
o2.R = o.R
|
||||
*o = *o2
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// AfterQueryHook is called after ArcgisOauthTokenSlice is retrieved from the database
|
||||
func (o ArcgisOauthTokenSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
|
||||
var err error
|
||||
|
||||
switch queryType {
|
||||
case bob.QueryTypeSelect:
|
||||
ctx, err = ArcgisOauthTokens.AfterSelectHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeInsert:
|
||||
ctx, err = ArcgisOauthTokens.AfterInsertHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeUpdate:
|
||||
ctx, err = ArcgisOauthTokens.AfterUpdateHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeDelete:
|
||||
ctx, err = ArcgisOauthTokens.AfterDeleteHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisOauthTokenSlice) pkIN() dialect.Expression {
|
||||
if len(o) == 0 {
|
||||
return psql.Raw("NULL")
|
||||
}
|
||||
|
||||
return psql.Quote("arcgis.oauth_token", "id").In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
pkPairs := make([]bob.Expression, len(o))
|
||||
for i, row := range o {
|
||||
pkPairs[i] = row.primaryKeyVals()
|
||||
}
|
||||
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
|
||||
}))
|
||||
}
|
||||
|
||||
// copyMatchingRows finds models in the given slice that have the same primary key
|
||||
// then it first copies the existing relationships from the old model to the new model
|
||||
// and then replaces the old model in the slice with the new model
|
||||
func (o ArcgisOauthTokenSlice) copyMatchingRows(from ...*ArcgisOauthToken) {
|
||||
for i, old := range o {
|
||||
for _, new := range from {
|
||||
if new.ID != old.ID {
|
||||
continue
|
||||
}
|
||||
new.R = old.R
|
||||
o[i] = new
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
|
||||
func (o ArcgisOauthTokenSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
|
||||
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisOauthTokens.BeforeUpdateHooks.RunHooks(ctx, exec, o)
|
||||
})
|
||||
|
||||
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
|
||||
var err error
|
||||
switch retrieved := retrieved.(type) {
|
||||
case *ArcgisOauthToken:
|
||||
o.copyMatchingRows(retrieved)
|
||||
case []*ArcgisOauthToken:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
case ArcgisOauthTokenSlice:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
default:
|
||||
// If the retrieved value is not a ArcgisOauthToken or a slice of ArcgisOauthToken
|
||||
// then run the AfterUpdateHooks on the slice
|
||||
_, err = ArcgisOauthTokens.AfterUpdateHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}))
|
||||
|
||||
q.AppendWhere(o.pkIN())
|
||||
})
|
||||
}
|
||||
|
||||
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
|
||||
func (o ArcgisOauthTokenSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
|
||||
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisOauthTokens.BeforeDeleteHooks.RunHooks(ctx, exec, o)
|
||||
})
|
||||
|
||||
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
|
||||
var err error
|
||||
switch retrieved := retrieved.(type) {
|
||||
case *ArcgisOauthToken:
|
||||
o.copyMatchingRows(retrieved)
|
||||
case []*ArcgisOauthToken:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
case ArcgisOauthTokenSlice:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
default:
|
||||
// If the retrieved value is not a ArcgisOauthToken or a slice of ArcgisOauthToken
|
||||
// then run the AfterDeleteHooks on the slice
|
||||
_, err = ArcgisOauthTokens.AfterDeleteHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}))
|
||||
|
||||
q.AppendWhere(o.pkIN())
|
||||
})
|
||||
}
|
||||
|
||||
func (o ArcgisOauthTokenSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals ArcgisOauthTokenSetter) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := ArcgisOauthTokens.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisOauthTokenSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := ArcgisOauthTokens.Delete(o.DeleteMod()).Exec(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisOauthTokenSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
o2, err := ArcgisOauthTokens.Query(sm.Where(o.pkIN())).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
o.copyMatchingRows(o2...)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ArcgisAccountAccount starts a query for related objects on arcgis.account
|
||||
func (o *ArcgisOauthToken) ArcgisAccountAccount(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisAccountsQuery {
|
||||
return ArcgisAccounts.Query(append(mods,
|
||||
sm.Where(ArcgisAccounts.Columns.ID.EQ(psql.Arg(o.ArcgisAccountID))),
|
||||
)...)
|
||||
}
|
||||
|
||||
func (os ArcgisOauthTokenSlice) ArcgisAccountAccount(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisAccountsQuery {
|
||||
pkArcgisAccountID := make(pgtypes.Array[null.Val[string]], 0, len(os))
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
pkArcgisAccountID = append(pkArcgisAccountID, o.ArcgisAccountID)
|
||||
}
|
||||
PKArgExpr := psql.Select(sm.Columns(
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkArcgisAccountID), "text[]")),
|
||||
))
|
||||
|
||||
return ArcgisAccounts.Query(append(mods,
|
||||
sm.Where(psql.Group(ArcgisAccounts.Columns.ID).OP("IN", PKArgExpr)),
|
||||
)...)
|
||||
}
|
||||
|
||||
// UserUser starts a query for related objects on user_
|
||||
func (o *ArcgisOauthToken) UserUser(mods ...bob.Mod[*dialect.SelectQuery]) UsersQuery {
|
||||
return Users.Query(append(mods,
|
||||
sm.Where(Users.Columns.ID.EQ(psql.Arg(o.UserID))),
|
||||
)...)
|
||||
}
|
||||
|
||||
func (os ArcgisOauthTokenSlice) UserUser(mods ...bob.Mod[*dialect.SelectQuery]) UsersQuery {
|
||||
pkUserID := make(pgtypes.Array[int32], 0, len(os))
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
pkUserID = append(pkUserID, o.UserID)
|
||||
}
|
||||
PKArgExpr := psql.Select(sm.Columns(
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkUserID), "integer[]")),
|
||||
))
|
||||
|
||||
return Users.Query(append(mods,
|
||||
sm.Where(psql.Group(Users.Columns.ID).OP("IN", PKArgExpr)),
|
||||
)...)
|
||||
}
|
||||
|
||||
func attachArcgisOauthTokenArcgisAccountAccount0(ctx context.Context, exec bob.Executor, count int, arcgisOauthToken0 *ArcgisOauthToken, arcgisAccount1 *ArcgisAccount) (*ArcgisOauthToken, error) {
|
||||
setter := &ArcgisOauthTokenSetter{
|
||||
ArcgisAccountID: omitnull.From(arcgisAccount1.ID),
|
||||
}
|
||||
|
||||
err := arcgisOauthToken0.Update(ctx, exec, setter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("attachArcgisOauthTokenArcgisAccountAccount0: %w", err)
|
||||
}
|
||||
|
||||
return arcgisOauthToken0, nil
|
||||
}
|
||||
|
||||
func (arcgisOauthToken0 *ArcgisOauthToken) InsertArcgisAccountAccount(ctx context.Context, exec bob.Executor, related *ArcgisAccountSetter) error {
|
||||
var err error
|
||||
|
||||
arcgisAccount1, err := ArcgisAccounts.Insert(related).One(ctx, exec)
|
||||
if err != nil {
|
||||
return fmt.Errorf("inserting related objects: %w", err)
|
||||
}
|
||||
|
||||
_, err = attachArcgisOauthTokenArcgisAccountAccount0(ctx, exec, 1, arcgisOauthToken0, arcgisAccount1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisOauthToken0.R.ArcgisAccountAccount = arcgisAccount1
|
||||
|
||||
arcgisAccount1.R.ArcgisAccountOauthTokens = append(arcgisAccount1.R.ArcgisAccountOauthTokens, arcgisOauthToken0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (arcgisOauthToken0 *ArcgisOauthToken) AttachArcgisAccountAccount(ctx context.Context, exec bob.Executor, arcgisAccount1 *ArcgisAccount) error {
|
||||
var err error
|
||||
|
||||
_, err = attachArcgisOauthTokenArcgisAccountAccount0(ctx, exec, 1, arcgisOauthToken0, arcgisAccount1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisOauthToken0.R.ArcgisAccountAccount = arcgisAccount1
|
||||
|
||||
arcgisAccount1.R.ArcgisAccountOauthTokens = append(arcgisAccount1.R.ArcgisAccountOauthTokens, arcgisOauthToken0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func attachArcgisOauthTokenUserUser0(ctx context.Context, exec bob.Executor, count int, arcgisOauthToken0 *ArcgisOauthToken, user1 *User) (*ArcgisOauthToken, error) {
|
||||
setter := &ArcgisOauthTokenSetter{
|
||||
UserID: omit.From(user1.ID),
|
||||
}
|
||||
|
||||
err := arcgisOauthToken0.Update(ctx, exec, setter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("attachArcgisOauthTokenUserUser0: %w", err)
|
||||
}
|
||||
|
||||
return arcgisOauthToken0, nil
|
||||
}
|
||||
|
||||
func (arcgisOauthToken0 *ArcgisOauthToken) InsertUserUser(ctx context.Context, exec bob.Executor, related *UserSetter) error {
|
||||
var err error
|
||||
|
||||
user1, err := Users.Insert(related).One(ctx, exec)
|
||||
if err != nil {
|
||||
return fmt.Errorf("inserting related objects: %w", err)
|
||||
}
|
||||
|
||||
_, err = attachArcgisOauthTokenUserUser0(ctx, exec, 1, arcgisOauthToken0, user1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisOauthToken0.R.UserUser = user1
|
||||
|
||||
user1.R.UserOauthTokens = append(user1.R.UserOauthTokens, arcgisOauthToken0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (arcgisOauthToken0 *ArcgisOauthToken) AttachUserUser(ctx context.Context, exec bob.Executor, user1 *User) error {
|
||||
var err error
|
||||
|
||||
_, err = attachArcgisOauthTokenUserUser0(ctx, exec, 1, arcgisOauthToken0, user1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisOauthToken0.R.UserUser = user1
|
||||
|
||||
user1.R.UserOauthTokens = append(user1.R.UserOauthTokens, arcgisOauthToken0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type arcgisOauthTokenWhere[Q psql.Filterable] struct {
|
||||
AccessToken psql.WhereMod[Q, string]
|
||||
AccessTokenExpires psql.WhereMod[Q, time.Time]
|
||||
ArcgisAccountID psql.WhereNullMod[Q, string]
|
||||
ArcgisID psql.WhereNullMod[Q, string]
|
||||
ArcgisLicenseTypeID psql.WhereNullMod[Q, string]
|
||||
Created psql.WhereMod[Q, time.Time]
|
||||
ID psql.WhereMod[Q, int32]
|
||||
InvalidatedAt psql.WhereNullMod[Q, time.Time]
|
||||
RefreshToken psql.WhereMod[Q, string]
|
||||
RefreshTokenExpires psql.WhereMod[Q, time.Time]
|
||||
UserID psql.WhereMod[Q, int32]
|
||||
Username psql.WhereMod[Q, string]
|
||||
}
|
||||
|
||||
func (arcgisOauthTokenWhere[Q]) AliasedAs(alias string) arcgisOauthTokenWhere[Q] {
|
||||
return buildArcgisOauthTokenWhere[Q](buildArcgisOauthTokenColumns(alias))
|
||||
}
|
||||
|
||||
func buildArcgisOauthTokenWhere[Q psql.Filterable](cols arcgisOauthTokenColumns) arcgisOauthTokenWhere[Q] {
|
||||
return arcgisOauthTokenWhere[Q]{
|
||||
AccessToken: psql.Where[Q, string](cols.AccessToken),
|
||||
AccessTokenExpires: psql.Where[Q, time.Time](cols.AccessTokenExpires),
|
||||
ArcgisAccountID: psql.WhereNull[Q, string](cols.ArcgisAccountID),
|
||||
ArcgisID: psql.WhereNull[Q, string](cols.ArcgisID),
|
||||
ArcgisLicenseTypeID: psql.WhereNull[Q, string](cols.ArcgisLicenseTypeID),
|
||||
Created: psql.Where[Q, time.Time](cols.Created),
|
||||
ID: psql.Where[Q, int32](cols.ID),
|
||||
InvalidatedAt: psql.WhereNull[Q, time.Time](cols.InvalidatedAt),
|
||||
RefreshToken: psql.Where[Q, string](cols.RefreshToken),
|
||||
RefreshTokenExpires: psql.Where[Q, time.Time](cols.RefreshTokenExpires),
|
||||
UserID: psql.Where[Q, int32](cols.UserID),
|
||||
Username: psql.Where[Q, string](cols.Username),
|
||||
}
|
||||
}
|
||||
|
||||
func (o *ArcgisOauthToken) Preload(name string, retrieved any) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch name {
|
||||
case "ArcgisAccountAccount":
|
||||
rel, ok := retrieved.(*ArcgisAccount)
|
||||
if !ok {
|
||||
return fmt.Errorf("arcgisOauthToken cannot load %T as %q", retrieved, name)
|
||||
}
|
||||
|
||||
o.R.ArcgisAccountAccount = rel
|
||||
|
||||
if rel != nil {
|
||||
rel.R.ArcgisAccountOauthTokens = ArcgisOauthTokenSlice{o}
|
||||
}
|
||||
return nil
|
||||
case "UserUser":
|
||||
rel, ok := retrieved.(*User)
|
||||
if !ok {
|
||||
return fmt.Errorf("arcgisOauthToken cannot load %T as %q", retrieved, name)
|
||||
}
|
||||
|
||||
o.R.UserUser = rel
|
||||
|
||||
if rel != nil {
|
||||
rel.R.UserOauthTokens = ArcgisOauthTokenSlice{o}
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("arcgisOauthToken has no relationship %q", name)
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisOauthTokenPreloader struct {
|
||||
ArcgisAccountAccount func(...psql.PreloadOption) psql.Preloader
|
||||
UserUser func(...psql.PreloadOption) psql.Preloader
|
||||
}
|
||||
|
||||
func buildArcgisOauthTokenPreloader() arcgisOauthTokenPreloader {
|
||||
return arcgisOauthTokenPreloader{
|
||||
ArcgisAccountAccount: func(opts ...psql.PreloadOption) psql.Preloader {
|
||||
return psql.Preload[*ArcgisAccount, ArcgisAccountSlice](psql.PreloadRel{
|
||||
Name: "ArcgisAccountAccount",
|
||||
Sides: []psql.PreloadSide{
|
||||
{
|
||||
From: ArcgisOauthTokens,
|
||||
To: ArcgisAccounts,
|
||||
FromColumns: []string{"arcgis_account_id"},
|
||||
ToColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
}, ArcgisAccounts.Columns.Names(), opts...)
|
||||
},
|
||||
UserUser: func(opts ...psql.PreloadOption) psql.Preloader {
|
||||
return psql.Preload[*User, UserSlice](psql.PreloadRel{
|
||||
Name: "UserUser",
|
||||
Sides: []psql.PreloadSide{
|
||||
{
|
||||
From: ArcgisOauthTokens,
|
||||
To: Users,
|
||||
FromColumns: []string{"user_id"},
|
||||
ToColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
}, Users.Columns.Names(), opts...)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisOauthTokenThenLoader[Q orm.Loadable] struct {
|
||||
ArcgisAccountAccount func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
UserUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
}
|
||||
|
||||
func buildArcgisOauthTokenThenLoader[Q orm.Loadable]() arcgisOauthTokenThenLoader[Q] {
|
||||
type ArcgisAccountAccountLoadInterface interface {
|
||||
LoadArcgisAccountAccount(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
type UserUserLoadInterface interface {
|
||||
LoadUserUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
|
||||
return arcgisOauthTokenThenLoader[Q]{
|
||||
ArcgisAccountAccount: thenLoadBuilder[Q](
|
||||
"ArcgisAccountAccount",
|
||||
func(ctx context.Context, exec bob.Executor, retrieved ArcgisAccountAccountLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
return retrieved.LoadArcgisAccountAccount(ctx, exec, mods...)
|
||||
},
|
||||
),
|
||||
UserUser: thenLoadBuilder[Q](
|
||||
"UserUser",
|
||||
func(ctx context.Context, exec bob.Executor, retrieved UserUserLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
return retrieved.LoadUserUser(ctx, exec, mods...)
|
||||
},
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
// LoadArcgisAccountAccount loads the arcgisOauthToken's ArcgisAccountAccount into the .R struct
|
||||
func (o *ArcgisOauthToken) LoadArcgisAccountAccount(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Reset the relationship
|
||||
o.R.ArcgisAccountAccount = nil
|
||||
|
||||
related, err := o.ArcgisAccountAccount(mods...).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
related.R.ArcgisAccountOauthTokens = ArcgisOauthTokenSlice{o}
|
||||
|
||||
o.R.ArcgisAccountAccount = related
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadArcgisAccountAccount loads the arcgisOauthToken's ArcgisAccountAccount into the .R struct
|
||||
func (os ArcgisOauthTokenSlice) LoadArcgisAccountAccount(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if len(os) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
arcgisAccounts, err := os.ArcgisAccountAccount(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, rel := range arcgisAccounts {
|
||||
if !o.ArcgisAccountID.IsValue() {
|
||||
continue
|
||||
}
|
||||
|
||||
if !(o.ArcgisAccountID.IsValue() && o.ArcgisAccountID.MustGet() == rel.ID) {
|
||||
continue
|
||||
}
|
||||
|
||||
rel.R.ArcgisAccountOauthTokens = append(rel.R.ArcgisAccountOauthTokens, o)
|
||||
|
||||
o.R.ArcgisAccountAccount = rel
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadUserUser loads the arcgisOauthToken's UserUser into the .R struct
|
||||
func (o *ArcgisOauthToken) LoadUserUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Reset the relationship
|
||||
o.R.UserUser = nil
|
||||
|
||||
related, err := o.UserUser(mods...).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
related.R.UserOauthTokens = ArcgisOauthTokenSlice{o}
|
||||
|
||||
o.R.UserUser = related
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadUserUser loads the arcgisOauthToken's UserUser into the .R struct
|
||||
func (os ArcgisOauthTokenSlice) LoadUserUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if len(os) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
users, err := os.UserUser(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, rel := range users {
|
||||
|
||||
if !(o.UserID == rel.ID) {
|
||||
continue
|
||||
}
|
||||
|
||||
rel.R.UserOauthTokens = append(rel.R.UserOauthTokens, o)
|
||||
|
||||
o.R.UserUser = rel
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
@ -1,837 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package models
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/dm"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
|
||||
"github.com/Gleipnir-Technology/bob/expr"
|
||||
"github.com/Gleipnir-Technology/bob/orm"
|
||||
"github.com/Gleipnir-Technology/bob/types/pgtypes"
|
||||
enums "github.com/Gleipnir-Technology/nidus-sync/db/enums"
|
||||
"github.com/aarondl/opt/omit"
|
||||
)
|
||||
|
||||
// ArcgisParcelMapping is an object representing the database table.
|
||||
type ArcgisParcelMapping struct {
|
||||
Destination enums.ArcgisMappingdestinationparcel `db:"destination,pk" `
|
||||
LayerFeatureServiceItemID string `db:"layer_feature_service_item_id" `
|
||||
LayerIndex int32 `db:"layer_index" `
|
||||
LayerFieldName string `db:"layer_field_name" `
|
||||
OrganizationID int32 `db:"organization_id,pk" `
|
||||
|
||||
R arcgisParcelMappingR `db:"-" `
|
||||
}
|
||||
|
||||
// ArcgisParcelMappingSlice is an alias for a slice of pointers to ArcgisParcelMapping.
|
||||
// This should almost always be used instead of []*ArcgisParcelMapping.
|
||||
type ArcgisParcelMappingSlice []*ArcgisParcelMapping
|
||||
|
||||
// ArcgisParcelMappings contains methods to work with the parcel_mapping table
|
||||
var ArcgisParcelMappings = psql.NewTablex[*ArcgisParcelMapping, ArcgisParcelMappingSlice, *ArcgisParcelMappingSetter]("arcgis", "parcel_mapping", buildArcgisParcelMappingColumns("arcgis.parcel_mapping"))
|
||||
|
||||
// ArcgisParcelMappingsQuery is a query on the parcel_mapping table
|
||||
type ArcgisParcelMappingsQuery = *psql.ViewQuery[*ArcgisParcelMapping, ArcgisParcelMappingSlice]
|
||||
|
||||
// arcgisParcelMappingR is where relationships are stored.
|
||||
type arcgisParcelMappingR struct {
|
||||
LayerField *ArcgisLayerField // arcgis.parcel_mapping.parcel_mapping_layer_feature_service_item_id_layer_index_l_fkey
|
||||
Organization *Organization // arcgis.parcel_mapping.parcel_mapping_organization_id_fkey
|
||||
}
|
||||
|
||||
func buildArcgisParcelMappingColumns(alias string) arcgisParcelMappingColumns {
|
||||
return arcgisParcelMappingColumns{
|
||||
ColumnsExpr: expr.NewColumnsExpr(
|
||||
"destination", "layer_feature_service_item_id", "layer_index", "layer_field_name", "organization_id",
|
||||
).WithParent("arcgis.parcel_mapping"),
|
||||
tableAlias: alias,
|
||||
Destination: psql.Quote(alias, "destination"),
|
||||
LayerFeatureServiceItemID: psql.Quote(alias, "layer_feature_service_item_id"),
|
||||
LayerIndex: psql.Quote(alias, "layer_index"),
|
||||
LayerFieldName: psql.Quote(alias, "layer_field_name"),
|
||||
OrganizationID: psql.Quote(alias, "organization_id"),
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisParcelMappingColumns struct {
|
||||
expr.ColumnsExpr
|
||||
tableAlias string
|
||||
Destination psql.Expression
|
||||
LayerFeatureServiceItemID psql.Expression
|
||||
LayerIndex psql.Expression
|
||||
LayerFieldName psql.Expression
|
||||
OrganizationID psql.Expression
|
||||
}
|
||||
|
||||
func (c arcgisParcelMappingColumns) Alias() string {
|
||||
return c.tableAlias
|
||||
}
|
||||
|
||||
func (arcgisParcelMappingColumns) AliasedAs(alias string) arcgisParcelMappingColumns {
|
||||
return buildArcgisParcelMappingColumns(alias)
|
||||
}
|
||||
|
||||
// ArcgisParcelMappingSetter is used for insert/upsert/update operations
|
||||
// All values are optional, and do not have to be set
|
||||
// Generated columns are not included
|
||||
type ArcgisParcelMappingSetter struct {
|
||||
Destination omit.Val[enums.ArcgisMappingdestinationparcel] `db:"destination,pk" `
|
||||
LayerFeatureServiceItemID omit.Val[string] `db:"layer_feature_service_item_id" `
|
||||
LayerIndex omit.Val[int32] `db:"layer_index" `
|
||||
LayerFieldName omit.Val[string] `db:"layer_field_name" `
|
||||
OrganizationID omit.Val[int32] `db:"organization_id,pk" `
|
||||
}
|
||||
|
||||
func (s ArcgisParcelMappingSetter) SetColumns() []string {
|
||||
vals := make([]string, 0, 5)
|
||||
if s.Destination.IsValue() {
|
||||
vals = append(vals, "destination")
|
||||
}
|
||||
if s.LayerFeatureServiceItemID.IsValue() {
|
||||
vals = append(vals, "layer_feature_service_item_id")
|
||||
}
|
||||
if s.LayerIndex.IsValue() {
|
||||
vals = append(vals, "layer_index")
|
||||
}
|
||||
if s.LayerFieldName.IsValue() {
|
||||
vals = append(vals, "layer_field_name")
|
||||
}
|
||||
if s.OrganizationID.IsValue() {
|
||||
vals = append(vals, "organization_id")
|
||||
}
|
||||
return vals
|
||||
}
|
||||
|
||||
func (s ArcgisParcelMappingSetter) Overwrite(t *ArcgisParcelMapping) {
|
||||
if s.Destination.IsValue() {
|
||||
t.Destination = s.Destination.MustGet()
|
||||
}
|
||||
if s.LayerFeatureServiceItemID.IsValue() {
|
||||
t.LayerFeatureServiceItemID = s.LayerFeatureServiceItemID.MustGet()
|
||||
}
|
||||
if s.LayerIndex.IsValue() {
|
||||
t.LayerIndex = s.LayerIndex.MustGet()
|
||||
}
|
||||
if s.LayerFieldName.IsValue() {
|
||||
t.LayerFieldName = s.LayerFieldName.MustGet()
|
||||
}
|
||||
if s.OrganizationID.IsValue() {
|
||||
t.OrganizationID = s.OrganizationID.MustGet()
|
||||
}
|
||||
}
|
||||
|
||||
func (s *ArcgisParcelMappingSetter) Apply(q *dialect.InsertQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisParcelMappings.BeforeInsertHooks.RunHooks(ctx, exec, s)
|
||||
})
|
||||
|
||||
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
vals := make([]bob.Expression, 5)
|
||||
if s.Destination.IsValue() {
|
||||
vals[0] = psql.Arg(s.Destination.MustGet())
|
||||
} else {
|
||||
vals[0] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.LayerFeatureServiceItemID.IsValue() {
|
||||
vals[1] = psql.Arg(s.LayerFeatureServiceItemID.MustGet())
|
||||
} else {
|
||||
vals[1] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.LayerIndex.IsValue() {
|
||||
vals[2] = psql.Arg(s.LayerIndex.MustGet())
|
||||
} else {
|
||||
vals[2] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.LayerFieldName.IsValue() {
|
||||
vals[3] = psql.Arg(s.LayerFieldName.MustGet())
|
||||
} else {
|
||||
vals[3] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.OrganizationID.IsValue() {
|
||||
vals[4] = psql.Arg(s.OrganizationID.MustGet())
|
||||
} else {
|
||||
vals[4] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
|
||||
}))
|
||||
}
|
||||
|
||||
func (s ArcgisParcelMappingSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
|
||||
return um.Set(s.Expressions()...)
|
||||
}
|
||||
|
||||
func (s ArcgisParcelMappingSetter) Expressions(prefix ...string) []bob.Expression {
|
||||
exprs := make([]bob.Expression, 0, 5)
|
||||
|
||||
if s.Destination.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "destination")...),
|
||||
psql.Arg(s.Destination),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.LayerFeatureServiceItemID.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "layer_feature_service_item_id")...),
|
||||
psql.Arg(s.LayerFeatureServiceItemID),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.LayerIndex.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "layer_index")...),
|
||||
psql.Arg(s.LayerIndex),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.LayerFieldName.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "layer_field_name")...),
|
||||
psql.Arg(s.LayerFieldName),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.OrganizationID.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "organization_id")...),
|
||||
psql.Arg(s.OrganizationID),
|
||||
}})
|
||||
}
|
||||
|
||||
return exprs
|
||||
}
|
||||
|
||||
// FindArcgisParcelMapping retrieves a single record by primary key
|
||||
// If cols is empty Find will return all columns.
|
||||
func FindArcgisParcelMapping(ctx context.Context, exec bob.Executor, OrganizationIDPK int32, DestinationPK enums.ArcgisMappingdestinationparcel, cols ...string) (*ArcgisParcelMapping, error) {
|
||||
if len(cols) == 0 {
|
||||
return ArcgisParcelMappings.Query(
|
||||
sm.Where(ArcgisParcelMappings.Columns.OrganizationID.EQ(psql.Arg(OrganizationIDPK))),
|
||||
sm.Where(ArcgisParcelMappings.Columns.Destination.EQ(psql.Arg(DestinationPK))),
|
||||
).One(ctx, exec)
|
||||
}
|
||||
|
||||
return ArcgisParcelMappings.Query(
|
||||
sm.Where(ArcgisParcelMappings.Columns.OrganizationID.EQ(psql.Arg(OrganizationIDPK))),
|
||||
sm.Where(ArcgisParcelMappings.Columns.Destination.EQ(psql.Arg(DestinationPK))),
|
||||
sm.Columns(ArcgisParcelMappings.Columns.Only(cols...)),
|
||||
).One(ctx, exec)
|
||||
}
|
||||
|
||||
// ArcgisParcelMappingExists checks the presence of a single record by primary key
|
||||
func ArcgisParcelMappingExists(ctx context.Context, exec bob.Executor, OrganizationIDPK int32, DestinationPK enums.ArcgisMappingdestinationparcel) (bool, error) {
|
||||
return ArcgisParcelMappings.Query(
|
||||
sm.Where(ArcgisParcelMappings.Columns.OrganizationID.EQ(psql.Arg(OrganizationIDPK))),
|
||||
sm.Where(ArcgisParcelMappings.Columns.Destination.EQ(psql.Arg(DestinationPK))),
|
||||
).Exists(ctx, exec)
|
||||
}
|
||||
|
||||
// AfterQueryHook is called after ArcgisParcelMapping is retrieved from the database
|
||||
func (o *ArcgisParcelMapping) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
|
||||
var err error
|
||||
|
||||
switch queryType {
|
||||
case bob.QueryTypeSelect:
|
||||
ctx, err = ArcgisParcelMappings.AfterSelectHooks.RunHooks(ctx, exec, ArcgisParcelMappingSlice{o})
|
||||
case bob.QueryTypeInsert:
|
||||
ctx, err = ArcgisParcelMappings.AfterInsertHooks.RunHooks(ctx, exec, ArcgisParcelMappingSlice{o})
|
||||
case bob.QueryTypeUpdate:
|
||||
ctx, err = ArcgisParcelMappings.AfterUpdateHooks.RunHooks(ctx, exec, ArcgisParcelMappingSlice{o})
|
||||
case bob.QueryTypeDelete:
|
||||
ctx, err = ArcgisParcelMappings.AfterDeleteHooks.RunHooks(ctx, exec, ArcgisParcelMappingSlice{o})
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// primaryKeyVals returns the primary key values of the ArcgisParcelMapping
|
||||
func (o *ArcgisParcelMapping) primaryKeyVals() bob.Expression {
|
||||
return psql.ArgGroup(
|
||||
o.OrganizationID,
|
||||
o.Destination,
|
||||
)
|
||||
}
|
||||
|
||||
func (o *ArcgisParcelMapping) pkEQ() dialect.Expression {
|
||||
return psql.Group(psql.Quote("arcgis.parcel_mapping", "organization_id"), psql.Quote("arcgis.parcel_mapping", "destination")).EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
return o.primaryKeyVals().WriteSQL(ctx, w, d, start)
|
||||
}))
|
||||
}
|
||||
|
||||
// Update uses an executor to update the ArcgisParcelMapping
|
||||
func (o *ArcgisParcelMapping) Update(ctx context.Context, exec bob.Executor, s *ArcgisParcelMappingSetter) error {
|
||||
v, err := ArcgisParcelMappings.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
o.R = v.R
|
||||
*o = *v
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Delete deletes a single ArcgisParcelMapping record with an executor
|
||||
func (o *ArcgisParcelMapping) Delete(ctx context.Context, exec bob.Executor) error {
|
||||
_, err := ArcgisParcelMappings.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
// Reload refreshes the ArcgisParcelMapping using the executor
|
||||
func (o *ArcgisParcelMapping) Reload(ctx context.Context, exec bob.Executor) error {
|
||||
o2, err := ArcgisParcelMappings.Query(
|
||||
sm.Where(ArcgisParcelMappings.Columns.OrganizationID.EQ(psql.Arg(o.OrganizationID))),
|
||||
sm.Where(ArcgisParcelMappings.Columns.Destination.EQ(psql.Arg(o.Destination))),
|
||||
).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
o2.R = o.R
|
||||
*o = *o2
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// AfterQueryHook is called after ArcgisParcelMappingSlice is retrieved from the database
|
||||
func (o ArcgisParcelMappingSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
|
||||
var err error
|
||||
|
||||
switch queryType {
|
||||
case bob.QueryTypeSelect:
|
||||
ctx, err = ArcgisParcelMappings.AfterSelectHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeInsert:
|
||||
ctx, err = ArcgisParcelMappings.AfterInsertHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeUpdate:
|
||||
ctx, err = ArcgisParcelMappings.AfterUpdateHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeDelete:
|
||||
ctx, err = ArcgisParcelMappings.AfterDeleteHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisParcelMappingSlice) pkIN() dialect.Expression {
|
||||
if len(o) == 0 {
|
||||
return psql.Raw("NULL")
|
||||
}
|
||||
|
||||
return psql.Group(psql.Quote("arcgis.parcel_mapping", "organization_id"), psql.Quote("arcgis.parcel_mapping", "destination")).In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
pkPairs := make([]bob.Expression, len(o))
|
||||
for i, row := range o {
|
||||
pkPairs[i] = row.primaryKeyVals()
|
||||
}
|
||||
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
|
||||
}))
|
||||
}
|
||||
|
||||
// copyMatchingRows finds models in the given slice that have the same primary key
|
||||
// then it first copies the existing relationships from the old model to the new model
|
||||
// and then replaces the old model in the slice with the new model
|
||||
func (o ArcgisParcelMappingSlice) copyMatchingRows(from ...*ArcgisParcelMapping) {
|
||||
for i, old := range o {
|
||||
for _, new := range from {
|
||||
if new.OrganizationID != old.OrganizationID {
|
||||
continue
|
||||
}
|
||||
if new.Destination != old.Destination {
|
||||
continue
|
||||
}
|
||||
new.R = old.R
|
||||
o[i] = new
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
|
||||
func (o ArcgisParcelMappingSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
|
||||
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisParcelMappings.BeforeUpdateHooks.RunHooks(ctx, exec, o)
|
||||
})
|
||||
|
||||
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
|
||||
var err error
|
||||
switch retrieved := retrieved.(type) {
|
||||
case *ArcgisParcelMapping:
|
||||
o.copyMatchingRows(retrieved)
|
||||
case []*ArcgisParcelMapping:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
case ArcgisParcelMappingSlice:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
default:
|
||||
// If the retrieved value is not a ArcgisParcelMapping or a slice of ArcgisParcelMapping
|
||||
// then run the AfterUpdateHooks on the slice
|
||||
_, err = ArcgisParcelMappings.AfterUpdateHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}))
|
||||
|
||||
q.AppendWhere(o.pkIN())
|
||||
})
|
||||
}
|
||||
|
||||
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
|
||||
func (o ArcgisParcelMappingSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
|
||||
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisParcelMappings.BeforeDeleteHooks.RunHooks(ctx, exec, o)
|
||||
})
|
||||
|
||||
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
|
||||
var err error
|
||||
switch retrieved := retrieved.(type) {
|
||||
case *ArcgisParcelMapping:
|
||||
o.copyMatchingRows(retrieved)
|
||||
case []*ArcgisParcelMapping:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
case ArcgisParcelMappingSlice:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
default:
|
||||
// If the retrieved value is not a ArcgisParcelMapping or a slice of ArcgisParcelMapping
|
||||
// then run the AfterDeleteHooks on the slice
|
||||
_, err = ArcgisParcelMappings.AfterDeleteHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}))
|
||||
|
||||
q.AppendWhere(o.pkIN())
|
||||
})
|
||||
}
|
||||
|
||||
func (o ArcgisParcelMappingSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals ArcgisParcelMappingSetter) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := ArcgisParcelMappings.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisParcelMappingSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := ArcgisParcelMappings.Delete(o.DeleteMod()).Exec(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisParcelMappingSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
o2, err := ArcgisParcelMappings.Query(sm.Where(o.pkIN())).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
o.copyMatchingRows(o2...)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LayerField starts a query for related objects on arcgis.layer_field
|
||||
func (o *ArcgisParcelMapping) LayerField(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayerFieldsQuery {
|
||||
return ArcgisLayerFields.Query(append(mods,
|
||||
sm.Where(ArcgisLayerFields.Columns.LayerFeatureServiceItemID.EQ(psql.Arg(o.LayerFeatureServiceItemID))), sm.Where(ArcgisLayerFields.Columns.LayerIndex.EQ(psql.Arg(o.LayerIndex))), sm.Where(ArcgisLayerFields.Columns.Name.EQ(psql.Arg(o.LayerFieldName))),
|
||||
)...)
|
||||
}
|
||||
|
||||
func (os ArcgisParcelMappingSlice) LayerField(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisLayerFieldsQuery {
|
||||
pkLayerFeatureServiceItemID := make(pgtypes.Array[string], 0, len(os))
|
||||
|
||||
pkLayerIndex := make(pgtypes.Array[int32], 0, len(os))
|
||||
|
||||
pkLayerFieldName := make(pgtypes.Array[string], 0, len(os))
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
pkLayerFeatureServiceItemID = append(pkLayerFeatureServiceItemID, o.LayerFeatureServiceItemID)
|
||||
pkLayerIndex = append(pkLayerIndex, o.LayerIndex)
|
||||
pkLayerFieldName = append(pkLayerFieldName, o.LayerFieldName)
|
||||
}
|
||||
PKArgExpr := psql.Select(sm.Columns(
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkLayerFeatureServiceItemID), "text[]")),
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkLayerIndex), "integer[]")),
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkLayerFieldName), "text[]")),
|
||||
))
|
||||
|
||||
return ArcgisLayerFields.Query(append(mods,
|
||||
sm.Where(psql.Group(ArcgisLayerFields.Columns.LayerFeatureServiceItemID, ArcgisLayerFields.Columns.LayerIndex, ArcgisLayerFields.Columns.Name).OP("IN", PKArgExpr)),
|
||||
)...)
|
||||
}
|
||||
|
||||
// Organization starts a query for related objects on organization
|
||||
func (o *ArcgisParcelMapping) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery {
|
||||
return Organizations.Query(append(mods,
|
||||
sm.Where(Organizations.Columns.ID.EQ(psql.Arg(o.OrganizationID))),
|
||||
)...)
|
||||
}
|
||||
|
||||
func (os ArcgisParcelMappingSlice) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery {
|
||||
pkOrganizationID := make(pgtypes.Array[int32], 0, len(os))
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
pkOrganizationID = append(pkOrganizationID, o.OrganizationID)
|
||||
}
|
||||
PKArgExpr := psql.Select(sm.Columns(
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkOrganizationID), "integer[]")),
|
||||
))
|
||||
|
||||
return Organizations.Query(append(mods,
|
||||
sm.Where(psql.Group(Organizations.Columns.ID).OP("IN", PKArgExpr)),
|
||||
)...)
|
||||
}
|
||||
|
||||
func attachArcgisParcelMappingLayerField0(ctx context.Context, exec bob.Executor, count int, arcgisParcelMapping0 *ArcgisParcelMapping, arcgisLayerField1 *ArcgisLayerField) (*ArcgisParcelMapping, error) {
|
||||
setter := &ArcgisParcelMappingSetter{
|
||||
LayerFeatureServiceItemID: omit.From(arcgisLayerField1.LayerFeatureServiceItemID),
|
||||
LayerIndex: omit.From(arcgisLayerField1.LayerIndex),
|
||||
LayerFieldName: omit.From(arcgisLayerField1.Name),
|
||||
}
|
||||
|
||||
err := arcgisParcelMapping0.Update(ctx, exec, setter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("attachArcgisParcelMappingLayerField0: %w", err)
|
||||
}
|
||||
|
||||
return arcgisParcelMapping0, nil
|
||||
}
|
||||
|
||||
func (arcgisParcelMapping0 *ArcgisParcelMapping) InsertLayerField(ctx context.Context, exec bob.Executor, related *ArcgisLayerFieldSetter) error {
|
||||
var err error
|
||||
|
||||
arcgisLayerField1, err := ArcgisLayerFields.Insert(related).One(ctx, exec)
|
||||
if err != nil {
|
||||
return fmt.Errorf("inserting related objects: %w", err)
|
||||
}
|
||||
|
||||
_, err = attachArcgisParcelMappingLayerField0(ctx, exec, 1, arcgisParcelMapping0, arcgisLayerField1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisParcelMapping0.R.LayerField = arcgisLayerField1
|
||||
|
||||
arcgisLayerField1.R.ParcelMappings = append(arcgisLayerField1.R.ParcelMappings, arcgisParcelMapping0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (arcgisParcelMapping0 *ArcgisParcelMapping) AttachLayerField(ctx context.Context, exec bob.Executor, arcgisLayerField1 *ArcgisLayerField) error {
|
||||
var err error
|
||||
|
||||
_, err = attachArcgisParcelMappingLayerField0(ctx, exec, 1, arcgisParcelMapping0, arcgisLayerField1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisParcelMapping0.R.LayerField = arcgisLayerField1
|
||||
|
||||
arcgisLayerField1.R.ParcelMappings = append(arcgisLayerField1.R.ParcelMappings, arcgisParcelMapping0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func attachArcgisParcelMappingOrganization0(ctx context.Context, exec bob.Executor, count int, arcgisParcelMapping0 *ArcgisParcelMapping, organization1 *Organization) (*ArcgisParcelMapping, error) {
|
||||
setter := &ArcgisParcelMappingSetter{
|
||||
OrganizationID: omit.From(organization1.ID),
|
||||
}
|
||||
|
||||
err := arcgisParcelMapping0.Update(ctx, exec, setter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("attachArcgisParcelMappingOrganization0: %w", err)
|
||||
}
|
||||
|
||||
return arcgisParcelMapping0, nil
|
||||
}
|
||||
|
||||
func (arcgisParcelMapping0 *ArcgisParcelMapping) InsertOrganization(ctx context.Context, exec bob.Executor, related *OrganizationSetter) error {
|
||||
var err error
|
||||
|
||||
organization1, err := Organizations.Insert(related).One(ctx, exec)
|
||||
if err != nil {
|
||||
return fmt.Errorf("inserting related objects: %w", err)
|
||||
}
|
||||
|
||||
_, err = attachArcgisParcelMappingOrganization0(ctx, exec, 1, arcgisParcelMapping0, organization1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisParcelMapping0.R.Organization = organization1
|
||||
|
||||
organization1.R.ParcelMappings = append(organization1.R.ParcelMappings, arcgisParcelMapping0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (arcgisParcelMapping0 *ArcgisParcelMapping) AttachOrganization(ctx context.Context, exec bob.Executor, organization1 *Organization) error {
|
||||
var err error
|
||||
|
||||
_, err = attachArcgisParcelMappingOrganization0(ctx, exec, 1, arcgisParcelMapping0, organization1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisParcelMapping0.R.Organization = organization1
|
||||
|
||||
organization1.R.ParcelMappings = append(organization1.R.ParcelMappings, arcgisParcelMapping0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type arcgisParcelMappingWhere[Q psql.Filterable] struct {
|
||||
Destination psql.WhereMod[Q, enums.ArcgisMappingdestinationparcel]
|
||||
LayerFeatureServiceItemID psql.WhereMod[Q, string]
|
||||
LayerIndex psql.WhereMod[Q, int32]
|
||||
LayerFieldName psql.WhereMod[Q, string]
|
||||
OrganizationID psql.WhereMod[Q, int32]
|
||||
}
|
||||
|
||||
func (arcgisParcelMappingWhere[Q]) AliasedAs(alias string) arcgisParcelMappingWhere[Q] {
|
||||
return buildArcgisParcelMappingWhere[Q](buildArcgisParcelMappingColumns(alias))
|
||||
}
|
||||
|
||||
func buildArcgisParcelMappingWhere[Q psql.Filterable](cols arcgisParcelMappingColumns) arcgisParcelMappingWhere[Q] {
|
||||
return arcgisParcelMappingWhere[Q]{
|
||||
Destination: psql.Where[Q, enums.ArcgisMappingdestinationparcel](cols.Destination),
|
||||
LayerFeatureServiceItemID: psql.Where[Q, string](cols.LayerFeatureServiceItemID),
|
||||
LayerIndex: psql.Where[Q, int32](cols.LayerIndex),
|
||||
LayerFieldName: psql.Where[Q, string](cols.LayerFieldName),
|
||||
OrganizationID: psql.Where[Q, int32](cols.OrganizationID),
|
||||
}
|
||||
}
|
||||
|
||||
func (o *ArcgisParcelMapping) Preload(name string, retrieved any) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch name {
|
||||
case "LayerField":
|
||||
rel, ok := retrieved.(*ArcgisLayerField)
|
||||
if !ok {
|
||||
return fmt.Errorf("arcgisParcelMapping cannot load %T as %q", retrieved, name)
|
||||
}
|
||||
|
||||
o.R.LayerField = rel
|
||||
|
||||
if rel != nil {
|
||||
rel.R.ParcelMappings = ArcgisParcelMappingSlice{o}
|
||||
}
|
||||
return nil
|
||||
case "Organization":
|
||||
rel, ok := retrieved.(*Organization)
|
||||
if !ok {
|
||||
return fmt.Errorf("arcgisParcelMapping cannot load %T as %q", retrieved, name)
|
||||
}
|
||||
|
||||
o.R.Organization = rel
|
||||
|
||||
if rel != nil {
|
||||
rel.R.ParcelMappings = ArcgisParcelMappingSlice{o}
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("arcgisParcelMapping has no relationship %q", name)
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisParcelMappingPreloader struct {
|
||||
LayerField func(...psql.PreloadOption) psql.Preloader
|
||||
Organization func(...psql.PreloadOption) psql.Preloader
|
||||
}
|
||||
|
||||
func buildArcgisParcelMappingPreloader() arcgisParcelMappingPreloader {
|
||||
return arcgisParcelMappingPreloader{
|
||||
LayerField: func(opts ...psql.PreloadOption) psql.Preloader {
|
||||
return psql.Preload[*ArcgisLayerField, ArcgisLayerFieldSlice](psql.PreloadRel{
|
||||
Name: "LayerField",
|
||||
Sides: []psql.PreloadSide{
|
||||
{
|
||||
From: ArcgisParcelMappings,
|
||||
To: ArcgisLayerFields,
|
||||
FromColumns: []string{"layer_feature_service_item_id", "layer_index", "layer_field_name"},
|
||||
ToColumns: []string{"layer_feature_service_item_id", "layer_index", "name"},
|
||||
},
|
||||
},
|
||||
}, ArcgisLayerFields.Columns.Names(), opts...)
|
||||
},
|
||||
Organization: func(opts ...psql.PreloadOption) psql.Preloader {
|
||||
return psql.Preload[*Organization, OrganizationSlice](psql.PreloadRel{
|
||||
Name: "Organization",
|
||||
Sides: []psql.PreloadSide{
|
||||
{
|
||||
From: ArcgisParcelMappings,
|
||||
To: Organizations,
|
||||
FromColumns: []string{"organization_id"},
|
||||
ToColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
}, Organizations.Columns.Names(), opts...)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisParcelMappingThenLoader[Q orm.Loadable] struct {
|
||||
LayerField func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
Organization func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
}
|
||||
|
||||
func buildArcgisParcelMappingThenLoader[Q orm.Loadable]() arcgisParcelMappingThenLoader[Q] {
|
||||
type LayerFieldLoadInterface interface {
|
||||
LoadLayerField(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
type OrganizationLoadInterface interface {
|
||||
LoadOrganization(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
|
||||
return arcgisParcelMappingThenLoader[Q]{
|
||||
LayerField: thenLoadBuilder[Q](
|
||||
"LayerField",
|
||||
func(ctx context.Context, exec bob.Executor, retrieved LayerFieldLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
return retrieved.LoadLayerField(ctx, exec, mods...)
|
||||
},
|
||||
),
|
||||
Organization: thenLoadBuilder[Q](
|
||||
"Organization",
|
||||
func(ctx context.Context, exec bob.Executor, retrieved OrganizationLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
return retrieved.LoadOrganization(ctx, exec, mods...)
|
||||
},
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
// LoadLayerField loads the arcgisParcelMapping's LayerField into the .R struct
|
||||
func (o *ArcgisParcelMapping) LoadLayerField(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Reset the relationship
|
||||
o.R.LayerField = nil
|
||||
|
||||
related, err := o.LayerField(mods...).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
related.R.ParcelMappings = ArcgisParcelMappingSlice{o}
|
||||
|
||||
o.R.LayerField = related
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadLayerField loads the arcgisParcelMapping's LayerField into the .R struct
|
||||
func (os ArcgisParcelMappingSlice) LoadLayerField(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if len(os) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
arcgisLayerFields, err := os.LayerField(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, rel := range arcgisLayerFields {
|
||||
|
||||
if !(o.LayerFeatureServiceItemID == rel.LayerFeatureServiceItemID) {
|
||||
continue
|
||||
}
|
||||
|
||||
if !(o.LayerIndex == rel.LayerIndex) {
|
||||
continue
|
||||
}
|
||||
|
||||
if !(o.LayerFieldName == rel.Name) {
|
||||
continue
|
||||
}
|
||||
|
||||
rel.R.ParcelMappings = append(rel.R.ParcelMappings, o)
|
||||
|
||||
o.R.LayerField = rel
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadOrganization loads the arcgisParcelMapping's Organization into the .R struct
|
||||
func (o *ArcgisParcelMapping) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Reset the relationship
|
||||
o.R.Organization = nil
|
||||
|
||||
related, err := o.Organization(mods...).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
related.R.ParcelMappings = ArcgisParcelMappingSlice{o}
|
||||
|
||||
o.R.Organization = related
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadOrganization loads the arcgisParcelMapping's Organization into the .R struct
|
||||
func (os ArcgisParcelMappingSlice) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if len(os) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
organizations, err := os.Organization(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, rel := range organizations {
|
||||
|
||||
if !(o.OrganizationID == rel.ID) {
|
||||
continue
|
||||
}
|
||||
|
||||
rel.R.ParcelMappings = append(rel.R.ParcelMappings, o)
|
||||
|
||||
o.R.Organization = rel
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,582 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package models
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/dm"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
|
||||
"github.com/Gleipnir-Technology/bob/expr"
|
||||
"github.com/Gleipnir-Technology/bob/orm"
|
||||
"github.com/Gleipnir-Technology/bob/types/pgtypes"
|
||||
"github.com/aarondl/opt/omit"
|
||||
)
|
||||
|
||||
// ArcgisUserPrivilege is an object representing the database table.
|
||||
type ArcgisUserPrivilege struct {
|
||||
UserID string `db:"user_id,pk" `
|
||||
Privilege string `db:"privilege,pk" `
|
||||
|
||||
R arcgisUserPrivilegeR `db:"-" `
|
||||
}
|
||||
|
||||
// ArcgisUserPrivilegeSlice is an alias for a slice of pointers to ArcgisUserPrivilege.
|
||||
// This should almost always be used instead of []*ArcgisUserPrivilege.
|
||||
type ArcgisUserPrivilegeSlice []*ArcgisUserPrivilege
|
||||
|
||||
// ArcgisUserPrivileges contains methods to work with the user_privilege table
|
||||
var ArcgisUserPrivileges = psql.NewTablex[*ArcgisUserPrivilege, ArcgisUserPrivilegeSlice, *ArcgisUserPrivilegeSetter]("arcgis", "user_privilege", buildArcgisUserPrivilegeColumns("arcgis.user_privilege"))
|
||||
|
||||
// ArcgisUserPrivilegesQuery is a query on the user_privilege table
|
||||
type ArcgisUserPrivilegesQuery = *psql.ViewQuery[*ArcgisUserPrivilege, ArcgisUserPrivilegeSlice]
|
||||
|
||||
// arcgisUserPrivilegeR is where relationships are stored.
|
||||
type arcgisUserPrivilegeR struct {
|
||||
UserUser *ArcgisUser // arcgis.user_privilege.user_privilege_user_id_fkey
|
||||
}
|
||||
|
||||
func buildArcgisUserPrivilegeColumns(alias string) arcgisUserPrivilegeColumns {
|
||||
return arcgisUserPrivilegeColumns{
|
||||
ColumnsExpr: expr.NewColumnsExpr(
|
||||
"user_id", "privilege",
|
||||
).WithParent("arcgis.user_privilege"),
|
||||
tableAlias: alias,
|
||||
UserID: psql.Quote(alias, "user_id"),
|
||||
Privilege: psql.Quote(alias, "privilege"),
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeColumns struct {
|
||||
expr.ColumnsExpr
|
||||
tableAlias string
|
||||
UserID psql.Expression
|
||||
Privilege psql.Expression
|
||||
}
|
||||
|
||||
func (c arcgisUserPrivilegeColumns) Alias() string {
|
||||
return c.tableAlias
|
||||
}
|
||||
|
||||
func (arcgisUserPrivilegeColumns) AliasedAs(alias string) arcgisUserPrivilegeColumns {
|
||||
return buildArcgisUserPrivilegeColumns(alias)
|
||||
}
|
||||
|
||||
// ArcgisUserPrivilegeSetter is used for insert/upsert/update operations
|
||||
// All values are optional, and do not have to be set
|
||||
// Generated columns are not included
|
||||
type ArcgisUserPrivilegeSetter struct {
|
||||
UserID omit.Val[string] `db:"user_id,pk" `
|
||||
Privilege omit.Val[string] `db:"privilege,pk" `
|
||||
}
|
||||
|
||||
func (s ArcgisUserPrivilegeSetter) SetColumns() []string {
|
||||
vals := make([]string, 0, 2)
|
||||
if s.UserID.IsValue() {
|
||||
vals = append(vals, "user_id")
|
||||
}
|
||||
if s.Privilege.IsValue() {
|
||||
vals = append(vals, "privilege")
|
||||
}
|
||||
return vals
|
||||
}
|
||||
|
||||
func (s ArcgisUserPrivilegeSetter) Overwrite(t *ArcgisUserPrivilege) {
|
||||
if s.UserID.IsValue() {
|
||||
t.UserID = s.UserID.MustGet()
|
||||
}
|
||||
if s.Privilege.IsValue() {
|
||||
t.Privilege = s.Privilege.MustGet()
|
||||
}
|
||||
}
|
||||
|
||||
func (s *ArcgisUserPrivilegeSetter) Apply(q *dialect.InsertQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisUserPrivileges.BeforeInsertHooks.RunHooks(ctx, exec, s)
|
||||
})
|
||||
|
||||
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
vals := make([]bob.Expression, 2)
|
||||
if s.UserID.IsValue() {
|
||||
vals[0] = psql.Arg(s.UserID.MustGet())
|
||||
} else {
|
||||
vals[0] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
if s.Privilege.IsValue() {
|
||||
vals[1] = psql.Arg(s.Privilege.MustGet())
|
||||
} else {
|
||||
vals[1] = psql.Raw("DEFAULT")
|
||||
}
|
||||
|
||||
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
|
||||
}))
|
||||
}
|
||||
|
||||
func (s ArcgisUserPrivilegeSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
|
||||
return um.Set(s.Expressions()...)
|
||||
}
|
||||
|
||||
func (s ArcgisUserPrivilegeSetter) Expressions(prefix ...string) []bob.Expression {
|
||||
exprs := make([]bob.Expression, 0, 2)
|
||||
|
||||
if s.UserID.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "user_id")...),
|
||||
psql.Arg(s.UserID),
|
||||
}})
|
||||
}
|
||||
|
||||
if s.Privilege.IsValue() {
|
||||
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
|
||||
psql.Quote(append(prefix, "privilege")...),
|
||||
psql.Arg(s.Privilege),
|
||||
}})
|
||||
}
|
||||
|
||||
return exprs
|
||||
}
|
||||
|
||||
// FindArcgisUserPrivilege retrieves a single record by primary key
|
||||
// If cols is empty Find will return all columns.
|
||||
func FindArcgisUserPrivilege(ctx context.Context, exec bob.Executor, UserIDPK string, PrivilegePK string, cols ...string) (*ArcgisUserPrivilege, error) {
|
||||
if len(cols) == 0 {
|
||||
return ArcgisUserPrivileges.Query(
|
||||
sm.Where(ArcgisUserPrivileges.Columns.UserID.EQ(psql.Arg(UserIDPK))),
|
||||
sm.Where(ArcgisUserPrivileges.Columns.Privilege.EQ(psql.Arg(PrivilegePK))),
|
||||
).One(ctx, exec)
|
||||
}
|
||||
|
||||
return ArcgisUserPrivileges.Query(
|
||||
sm.Where(ArcgisUserPrivileges.Columns.UserID.EQ(psql.Arg(UserIDPK))),
|
||||
sm.Where(ArcgisUserPrivileges.Columns.Privilege.EQ(psql.Arg(PrivilegePK))),
|
||||
sm.Columns(ArcgisUserPrivileges.Columns.Only(cols...)),
|
||||
).One(ctx, exec)
|
||||
}
|
||||
|
||||
// ArcgisUserPrivilegeExists checks the presence of a single record by primary key
|
||||
func ArcgisUserPrivilegeExists(ctx context.Context, exec bob.Executor, UserIDPK string, PrivilegePK string) (bool, error) {
|
||||
return ArcgisUserPrivileges.Query(
|
||||
sm.Where(ArcgisUserPrivileges.Columns.UserID.EQ(psql.Arg(UserIDPK))),
|
||||
sm.Where(ArcgisUserPrivileges.Columns.Privilege.EQ(psql.Arg(PrivilegePK))),
|
||||
).Exists(ctx, exec)
|
||||
}
|
||||
|
||||
// AfterQueryHook is called after ArcgisUserPrivilege is retrieved from the database
|
||||
func (o *ArcgisUserPrivilege) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
|
||||
var err error
|
||||
|
||||
switch queryType {
|
||||
case bob.QueryTypeSelect:
|
||||
ctx, err = ArcgisUserPrivileges.AfterSelectHooks.RunHooks(ctx, exec, ArcgisUserPrivilegeSlice{o})
|
||||
case bob.QueryTypeInsert:
|
||||
ctx, err = ArcgisUserPrivileges.AfterInsertHooks.RunHooks(ctx, exec, ArcgisUserPrivilegeSlice{o})
|
||||
case bob.QueryTypeUpdate:
|
||||
ctx, err = ArcgisUserPrivileges.AfterUpdateHooks.RunHooks(ctx, exec, ArcgisUserPrivilegeSlice{o})
|
||||
case bob.QueryTypeDelete:
|
||||
ctx, err = ArcgisUserPrivileges.AfterDeleteHooks.RunHooks(ctx, exec, ArcgisUserPrivilegeSlice{o})
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// primaryKeyVals returns the primary key values of the ArcgisUserPrivilege
|
||||
func (o *ArcgisUserPrivilege) primaryKeyVals() bob.Expression {
|
||||
return psql.ArgGroup(
|
||||
o.UserID,
|
||||
o.Privilege,
|
||||
)
|
||||
}
|
||||
|
||||
func (o *ArcgisUserPrivilege) pkEQ() dialect.Expression {
|
||||
return psql.Group(psql.Quote("arcgis.user_privilege", "user_id"), psql.Quote("arcgis.user_privilege", "privilege")).EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
return o.primaryKeyVals().WriteSQL(ctx, w, d, start)
|
||||
}))
|
||||
}
|
||||
|
||||
// Update uses an executor to update the ArcgisUserPrivilege
|
||||
func (o *ArcgisUserPrivilege) Update(ctx context.Context, exec bob.Executor, s *ArcgisUserPrivilegeSetter) error {
|
||||
v, err := ArcgisUserPrivileges.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
o.R = v.R
|
||||
*o = *v
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Delete deletes a single ArcgisUserPrivilege record with an executor
|
||||
func (o *ArcgisUserPrivilege) Delete(ctx context.Context, exec bob.Executor) error {
|
||||
_, err := ArcgisUserPrivileges.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
// Reload refreshes the ArcgisUserPrivilege using the executor
|
||||
func (o *ArcgisUserPrivilege) Reload(ctx context.Context, exec bob.Executor) error {
|
||||
o2, err := ArcgisUserPrivileges.Query(
|
||||
sm.Where(ArcgisUserPrivileges.Columns.UserID.EQ(psql.Arg(o.UserID))),
|
||||
sm.Where(ArcgisUserPrivileges.Columns.Privilege.EQ(psql.Arg(o.Privilege))),
|
||||
).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
o2.R = o.R
|
||||
*o = *o2
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// AfterQueryHook is called after ArcgisUserPrivilegeSlice is retrieved from the database
|
||||
func (o ArcgisUserPrivilegeSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
|
||||
var err error
|
||||
|
||||
switch queryType {
|
||||
case bob.QueryTypeSelect:
|
||||
ctx, err = ArcgisUserPrivileges.AfterSelectHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeInsert:
|
||||
ctx, err = ArcgisUserPrivileges.AfterInsertHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeUpdate:
|
||||
ctx, err = ArcgisUserPrivileges.AfterUpdateHooks.RunHooks(ctx, exec, o)
|
||||
case bob.QueryTypeDelete:
|
||||
ctx, err = ArcgisUserPrivileges.AfterDeleteHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisUserPrivilegeSlice) pkIN() dialect.Expression {
|
||||
if len(o) == 0 {
|
||||
return psql.Raw("NULL")
|
||||
}
|
||||
|
||||
return psql.Group(psql.Quote("arcgis.user_privilege", "user_id"), psql.Quote("arcgis.user_privilege", "privilege")).In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
|
||||
pkPairs := make([]bob.Expression, len(o))
|
||||
for i, row := range o {
|
||||
pkPairs[i] = row.primaryKeyVals()
|
||||
}
|
||||
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
|
||||
}))
|
||||
}
|
||||
|
||||
// copyMatchingRows finds models in the given slice that have the same primary key
|
||||
// then it first copies the existing relationships from the old model to the new model
|
||||
// and then replaces the old model in the slice with the new model
|
||||
func (o ArcgisUserPrivilegeSlice) copyMatchingRows(from ...*ArcgisUserPrivilege) {
|
||||
for i, old := range o {
|
||||
for _, new := range from {
|
||||
if new.UserID != old.UserID {
|
||||
continue
|
||||
}
|
||||
if new.Privilege != old.Privilege {
|
||||
continue
|
||||
}
|
||||
new.R = old.R
|
||||
o[i] = new
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
|
||||
func (o ArcgisUserPrivilegeSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
|
||||
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisUserPrivileges.BeforeUpdateHooks.RunHooks(ctx, exec, o)
|
||||
})
|
||||
|
||||
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
|
||||
var err error
|
||||
switch retrieved := retrieved.(type) {
|
||||
case *ArcgisUserPrivilege:
|
||||
o.copyMatchingRows(retrieved)
|
||||
case []*ArcgisUserPrivilege:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
case ArcgisUserPrivilegeSlice:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
default:
|
||||
// If the retrieved value is not a ArcgisUserPrivilege or a slice of ArcgisUserPrivilege
|
||||
// then run the AfterUpdateHooks on the slice
|
||||
_, err = ArcgisUserPrivileges.AfterUpdateHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}))
|
||||
|
||||
q.AppendWhere(o.pkIN())
|
||||
})
|
||||
}
|
||||
|
||||
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
|
||||
func (o ArcgisUserPrivilegeSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
|
||||
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
|
||||
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
|
||||
return ArcgisUserPrivileges.BeforeDeleteHooks.RunHooks(ctx, exec, o)
|
||||
})
|
||||
|
||||
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
|
||||
var err error
|
||||
switch retrieved := retrieved.(type) {
|
||||
case *ArcgisUserPrivilege:
|
||||
o.copyMatchingRows(retrieved)
|
||||
case []*ArcgisUserPrivilege:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
case ArcgisUserPrivilegeSlice:
|
||||
o.copyMatchingRows(retrieved...)
|
||||
default:
|
||||
// If the retrieved value is not a ArcgisUserPrivilege or a slice of ArcgisUserPrivilege
|
||||
// then run the AfterDeleteHooks on the slice
|
||||
_, err = ArcgisUserPrivileges.AfterDeleteHooks.RunHooks(ctx, exec, o)
|
||||
}
|
||||
|
||||
return err
|
||||
}))
|
||||
|
||||
q.AppendWhere(o.pkIN())
|
||||
})
|
||||
}
|
||||
|
||||
func (o ArcgisUserPrivilegeSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals ArcgisUserPrivilegeSetter) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := ArcgisUserPrivileges.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisUserPrivilegeSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := ArcgisUserPrivileges.Delete(o.DeleteMod()).Exec(ctx, exec)
|
||||
return err
|
||||
}
|
||||
|
||||
func (o ArcgisUserPrivilegeSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
|
||||
if len(o) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
o2, err := ArcgisUserPrivileges.Query(sm.Where(o.pkIN())).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
o.copyMatchingRows(o2...)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UserUser starts a query for related objects on arcgis.user_
|
||||
func (o *ArcgisUserPrivilege) UserUser(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisUsersQuery {
|
||||
return ArcgisUsers.Query(append(mods,
|
||||
sm.Where(ArcgisUsers.Columns.ID.EQ(psql.Arg(o.UserID))),
|
||||
)...)
|
||||
}
|
||||
|
||||
func (os ArcgisUserPrivilegeSlice) UserUser(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisUsersQuery {
|
||||
pkUserID := make(pgtypes.Array[string], 0, len(os))
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
pkUserID = append(pkUserID, o.UserID)
|
||||
}
|
||||
PKArgExpr := psql.Select(sm.Columns(
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkUserID), "text[]")),
|
||||
))
|
||||
|
||||
return ArcgisUsers.Query(append(mods,
|
||||
sm.Where(psql.Group(ArcgisUsers.Columns.ID).OP("IN", PKArgExpr)),
|
||||
)...)
|
||||
}
|
||||
|
||||
func attachArcgisUserPrivilegeUserUser0(ctx context.Context, exec bob.Executor, count int, arcgisUserPrivilege0 *ArcgisUserPrivilege, arcgisuser1 *ArcgisUser) (*ArcgisUserPrivilege, error) {
|
||||
setter := &ArcgisUserPrivilegeSetter{
|
||||
UserID: omit.From(arcgisuser1.ID),
|
||||
}
|
||||
|
||||
err := arcgisUserPrivilege0.Update(ctx, exec, setter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("attachArcgisUserPrivilegeUserUser0: %w", err)
|
||||
}
|
||||
|
||||
return arcgisUserPrivilege0, nil
|
||||
}
|
||||
|
||||
func (arcgisUserPrivilege0 *ArcgisUserPrivilege) InsertUserUser(ctx context.Context, exec bob.Executor, related *ArcgisUserSetter) error {
|
||||
var err error
|
||||
|
||||
arcgisuser1, err := ArcgisUsers.Insert(related).One(ctx, exec)
|
||||
if err != nil {
|
||||
return fmt.Errorf("inserting related objects: %w", err)
|
||||
}
|
||||
|
||||
_, err = attachArcgisUserPrivilegeUserUser0(ctx, exec, 1, arcgisUserPrivilege0, arcgisuser1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisUserPrivilege0.R.UserUser = arcgisuser1
|
||||
|
||||
arcgisuser1.R.UserUserPrivileges = append(arcgisuser1.R.UserUserPrivileges, arcgisUserPrivilege0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (arcgisUserPrivilege0 *ArcgisUserPrivilege) AttachUserUser(ctx context.Context, exec bob.Executor, arcgisuser1 *ArcgisUser) error {
|
||||
var err error
|
||||
|
||||
_, err = attachArcgisUserPrivilegeUserUser0(ctx, exec, 1, arcgisUserPrivilege0, arcgisuser1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arcgisUserPrivilege0.R.UserUser = arcgisuser1
|
||||
|
||||
arcgisuser1.R.UserUserPrivileges = append(arcgisuser1.R.UserUserPrivileges, arcgisUserPrivilege0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeWhere[Q psql.Filterable] struct {
|
||||
UserID psql.WhereMod[Q, string]
|
||||
Privilege psql.WhereMod[Q, string]
|
||||
}
|
||||
|
||||
func (arcgisUserPrivilegeWhere[Q]) AliasedAs(alias string) arcgisUserPrivilegeWhere[Q] {
|
||||
return buildArcgisUserPrivilegeWhere[Q](buildArcgisUserPrivilegeColumns(alias))
|
||||
}
|
||||
|
||||
func buildArcgisUserPrivilegeWhere[Q psql.Filterable](cols arcgisUserPrivilegeColumns) arcgisUserPrivilegeWhere[Q] {
|
||||
return arcgisUserPrivilegeWhere[Q]{
|
||||
UserID: psql.Where[Q, string](cols.UserID),
|
||||
Privilege: psql.Where[Q, string](cols.Privilege),
|
||||
}
|
||||
}
|
||||
|
||||
func (o *ArcgisUserPrivilege) Preload(name string, retrieved any) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch name {
|
||||
case "UserUser":
|
||||
rel, ok := retrieved.(*ArcgisUser)
|
||||
if !ok {
|
||||
return fmt.Errorf("arcgisUserPrivilege cannot load %T as %q", retrieved, name)
|
||||
}
|
||||
|
||||
o.R.UserUser = rel
|
||||
|
||||
if rel != nil {
|
||||
rel.R.UserUserPrivileges = ArcgisUserPrivilegeSlice{o}
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("arcgisUserPrivilege has no relationship %q", name)
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegePreloader struct {
|
||||
UserUser func(...psql.PreloadOption) psql.Preloader
|
||||
}
|
||||
|
||||
func buildArcgisUserPrivilegePreloader() arcgisUserPrivilegePreloader {
|
||||
return arcgisUserPrivilegePreloader{
|
||||
UserUser: func(opts ...psql.PreloadOption) psql.Preloader {
|
||||
return psql.Preload[*ArcgisUser, ArcgisUserSlice](psql.PreloadRel{
|
||||
Name: "UserUser",
|
||||
Sides: []psql.PreloadSide{
|
||||
{
|
||||
From: ArcgisUserPrivileges,
|
||||
To: ArcgisUsers,
|
||||
FromColumns: []string{"user_id"},
|
||||
ToColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
}, ArcgisUsers.Columns.Names(), opts...)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeThenLoader[Q orm.Loadable] struct {
|
||||
UserUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
}
|
||||
|
||||
func buildArcgisUserPrivilegeThenLoader[Q orm.Loadable]() arcgisUserPrivilegeThenLoader[Q] {
|
||||
type UserUserLoadInterface interface {
|
||||
LoadUserUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
|
||||
return arcgisUserPrivilegeThenLoader[Q]{
|
||||
UserUser: thenLoadBuilder[Q](
|
||||
"UserUser",
|
||||
func(ctx context.Context, exec bob.Executor, retrieved UserUserLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
return retrieved.LoadUserUser(ctx, exec, mods...)
|
||||
},
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
// LoadUserUser loads the arcgisUserPrivilege's UserUser into the .R struct
|
||||
func (o *ArcgisUserPrivilege) LoadUserUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Reset the relationship
|
||||
o.R.UserUser = nil
|
||||
|
||||
related, err := o.UserUser(mods...).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
related.R.UserUserPrivileges = ArcgisUserPrivilegeSlice{o}
|
||||
|
||||
o.R.UserUser = related
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadUserUser loads the arcgisUserPrivilege's UserUser into the .R struct
|
||||
func (os ArcgisUserPrivilegeSlice) LoadUserUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if len(os) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
arcgisusers, err := os.UserUser(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, rel := range arcgisusers {
|
||||
|
||||
if !(o.UserID == rel.ID) {
|
||||
continue
|
||||
}
|
||||
|
||||
rel.R.UserUserPrivileges = append(rel.R.UserUserPrivileges, o)
|
||||
|
||||
o.R.UserUser = rel
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
@ -18,16 +18,6 @@ var Preload = getPreloaders()
|
|||
|
||||
type preloaders struct {
|
||||
Address addressPreloader
|
||||
ArcgisAccount arcgisAccountPreloader
|
||||
ArcgisAddressMapping arcgisAddressMappingPreloader
|
||||
ArcgisLayer arcgisLayerPreloader
|
||||
ArcgisLayerField arcgisLayerFieldPreloader
|
||||
ArcgisOauthToken arcgisOauthTokenPreloader
|
||||
ArcgisParcelMapping arcgisParcelMappingPreloader
|
||||
ArcgisServiceFeature arcgisServiceFeaturePreloader
|
||||
ArcgisServiceMap arcgisServiceMapPreloader
|
||||
ArcgisUser arcgisuserPreloader
|
||||
ArcgisUserPrivilege arcgisUserPrivilegePreloader
|
||||
CommsEmailContact commsEmailContactPreloader
|
||||
CommsEmailLog commsEmailLogPreloader
|
||||
CommsEmailTemplate commsEmailTemplatePreloader
|
||||
|
|
@ -122,16 +112,6 @@ type preloaders struct {
|
|||
func getPreloaders() preloaders {
|
||||
return preloaders{
|
||||
Address: buildAddressPreloader(),
|
||||
ArcgisAccount: buildArcgisAccountPreloader(),
|
||||
ArcgisAddressMapping: buildArcgisAddressMappingPreloader(),
|
||||
ArcgisLayer: buildArcgisLayerPreloader(),
|
||||
ArcgisLayerField: buildArcgisLayerFieldPreloader(),
|
||||
ArcgisOauthToken: buildArcgisOauthTokenPreloader(),
|
||||
ArcgisParcelMapping: buildArcgisParcelMappingPreloader(),
|
||||
ArcgisServiceFeature: buildArcgisServiceFeaturePreloader(),
|
||||
ArcgisServiceMap: buildArcgisServiceMapPreloader(),
|
||||
ArcgisUser: buildArcgisUserPreloader(),
|
||||
ArcgisUserPrivilege: buildArcgisUserPrivilegePreloader(),
|
||||
CommsEmailContact: buildCommsEmailContactPreloader(),
|
||||
CommsEmailLog: buildCommsEmailLogPreloader(),
|
||||
CommsEmailTemplate: buildCommsEmailTemplatePreloader(),
|
||||
|
|
@ -232,16 +212,6 @@ var (
|
|||
|
||||
type thenLoaders[Q orm.Loadable] struct {
|
||||
Address addressThenLoader[Q]
|
||||
ArcgisAccount arcgisAccountThenLoader[Q]
|
||||
ArcgisAddressMapping arcgisAddressMappingThenLoader[Q]
|
||||
ArcgisLayer arcgisLayerThenLoader[Q]
|
||||
ArcgisLayerField arcgisLayerFieldThenLoader[Q]
|
||||
ArcgisOauthToken arcgisOauthTokenThenLoader[Q]
|
||||
ArcgisParcelMapping arcgisParcelMappingThenLoader[Q]
|
||||
ArcgisServiceFeature arcgisServiceFeatureThenLoader[Q]
|
||||
ArcgisServiceMap arcgisServiceMapThenLoader[Q]
|
||||
ArcgisUser arcgisuserThenLoader[Q]
|
||||
ArcgisUserPrivilege arcgisUserPrivilegeThenLoader[Q]
|
||||
CommsEmailContact commsEmailContactThenLoader[Q]
|
||||
CommsEmailLog commsEmailLogThenLoader[Q]
|
||||
CommsEmailTemplate commsEmailTemplateThenLoader[Q]
|
||||
|
|
@ -336,16 +306,6 @@ type thenLoaders[Q orm.Loadable] struct {
|
|||
func getThenLoaders[Q orm.Loadable]() thenLoaders[Q] {
|
||||
return thenLoaders[Q]{
|
||||
Address: buildAddressThenLoader[Q](),
|
||||
ArcgisAccount: buildArcgisAccountThenLoader[Q](),
|
||||
ArcgisAddressMapping: buildArcgisAddressMappingThenLoader[Q](),
|
||||
ArcgisLayer: buildArcgisLayerThenLoader[Q](),
|
||||
ArcgisLayerField: buildArcgisLayerFieldThenLoader[Q](),
|
||||
ArcgisOauthToken: buildArcgisOauthTokenThenLoader[Q](),
|
||||
ArcgisParcelMapping: buildArcgisParcelMappingThenLoader[Q](),
|
||||
ArcgisServiceFeature: buildArcgisServiceFeatureThenLoader[Q](),
|
||||
ArcgisServiceMap: buildArcgisServiceMapThenLoader[Q](),
|
||||
ArcgisUser: buildArcgisUserThenLoader[Q](),
|
||||
ArcgisUserPrivilege: buildArcgisUserPrivilegeThenLoader[Q](),
|
||||
CommsEmailContact: buildCommsEmailContactThenLoader[Q](),
|
||||
CommsEmailLog: buildCommsEmailLogThenLoader[Q](),
|
||||
CommsEmailTemplate: buildCommsEmailTemplateThenLoader[Q](),
|
||||
|
|
|
|||
|
|
@ -18,16 +18,6 @@ var (
|
|||
|
||||
func Where[Q psql.Filterable]() struct {
|
||||
Addresses addressWhere[Q]
|
||||
ArcgisAccounts arcgisAccountWhere[Q]
|
||||
ArcgisAddressMappings arcgisAddressMappingWhere[Q]
|
||||
ArcgisLayers arcgisLayerWhere[Q]
|
||||
ArcgisLayerFields arcgisLayerFieldWhere[Q]
|
||||
ArcgisOauthTokens arcgisOauthTokenWhere[Q]
|
||||
ArcgisParcelMappings arcgisParcelMappingWhere[Q]
|
||||
ArcgisServiceFeatures arcgisServiceFeatureWhere[Q]
|
||||
ArcgisServiceMaps arcgisServiceMapWhere[Q]
|
||||
ArcgisUsers arcgisuserWhere[Q]
|
||||
ArcgisUserPrivileges arcgisUserPrivilegeWhere[Q]
|
||||
CommsEmailContacts commsEmailContactWhere[Q]
|
||||
CommsEmailLogs commsEmailLogWhere[Q]
|
||||
CommsEmailTemplates commsEmailTemplateWhere[Q]
|
||||
|
|
@ -130,16 +120,6 @@ func Where[Q psql.Filterable]() struct {
|
|||
} {
|
||||
return struct {
|
||||
Addresses addressWhere[Q]
|
||||
ArcgisAccounts arcgisAccountWhere[Q]
|
||||
ArcgisAddressMappings arcgisAddressMappingWhere[Q]
|
||||
ArcgisLayers arcgisLayerWhere[Q]
|
||||
ArcgisLayerFields arcgisLayerFieldWhere[Q]
|
||||
ArcgisOauthTokens arcgisOauthTokenWhere[Q]
|
||||
ArcgisParcelMappings arcgisParcelMappingWhere[Q]
|
||||
ArcgisServiceFeatures arcgisServiceFeatureWhere[Q]
|
||||
ArcgisServiceMaps arcgisServiceMapWhere[Q]
|
||||
ArcgisUsers arcgisuserWhere[Q]
|
||||
ArcgisUserPrivileges arcgisUserPrivilegeWhere[Q]
|
||||
CommsEmailContacts commsEmailContactWhere[Q]
|
||||
CommsEmailLogs commsEmailLogWhere[Q]
|
||||
CommsEmailTemplates commsEmailTemplateWhere[Q]
|
||||
|
|
@ -241,16 +221,6 @@ func Where[Q psql.Filterable]() struct {
|
|||
Users userWhere[Q]
|
||||
}{
|
||||
Addresses: buildAddressWhere[Q](Addresses.Columns),
|
||||
ArcgisAccounts: buildArcgisAccountWhere[Q](ArcgisAccounts.Columns),
|
||||
ArcgisAddressMappings: buildArcgisAddressMappingWhere[Q](ArcgisAddressMappings.Columns),
|
||||
ArcgisLayers: buildArcgisLayerWhere[Q](ArcgisLayers.Columns),
|
||||
ArcgisLayerFields: buildArcgisLayerFieldWhere[Q](ArcgisLayerFields.Columns),
|
||||
ArcgisOauthTokens: buildArcgisOauthTokenWhere[Q](ArcgisOauthTokens.Columns),
|
||||
ArcgisParcelMappings: buildArcgisParcelMappingWhere[Q](ArcgisParcelMappings.Columns),
|
||||
ArcgisServiceFeatures: buildArcgisServiceFeatureWhere[Q](ArcgisServiceFeatures.Columns),
|
||||
ArcgisServiceMaps: buildArcgisServiceMapWhere[Q](ArcgisServiceMaps.Columns),
|
||||
ArcgisUsers: buildArcgisUserWhere[Q](ArcgisUsers.Columns),
|
||||
ArcgisUserPrivileges: buildArcgisUserPrivilegeWhere[Q](ArcgisUserPrivileges.Columns),
|
||||
CommsEmailContacts: buildCommsEmailContactWhere[Q](CommsEmailContacts.Columns),
|
||||
CommsEmailLogs: buildCommsEmailLogWhere[Q](CommsEmailLogs.Columns),
|
||||
CommsEmailTemplates: buildCommsEmailTemplateWhere[Q](CommsEmailTemplates.Columns),
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -43,8 +43,7 @@ type TileServicesQuery = *psql.ViewQuery[*TileService, TileServiceSlice]
|
|||
|
||||
// tileServiceR is where relationships are stored.
|
||||
type tileServiceR struct {
|
||||
CachedImages TileCachedImageSlice // tile.cached_image.cached_image_service_id_fkey
|
||||
ArcgisServiceMap *ArcgisServiceMap // tile.service.service_arcgis_id_fkey
|
||||
CachedImages TileCachedImageSlice // tile.cached_image.cached_image_service_id_fkey
|
||||
}
|
||||
|
||||
func buildTileServiceColumns(alias string) tileServiceColumns {
|
||||
|
|
@ -417,30 +416,6 @@ func (os TileServiceSlice) CachedImages(mods ...bob.Mod[*dialect.SelectQuery]) T
|
|||
)...)
|
||||
}
|
||||
|
||||
// ArcgisServiceMap starts a query for related objects on arcgis.service_map
|
||||
func (o *TileService) ArcgisServiceMap(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisServiceMapsQuery {
|
||||
return ArcgisServiceMaps.Query(append(mods,
|
||||
sm.Where(ArcgisServiceMaps.Columns.ArcgisID.EQ(psql.Arg(o.ArcgisID))),
|
||||
)...)
|
||||
}
|
||||
|
||||
func (os TileServiceSlice) ArcgisServiceMap(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisServiceMapsQuery {
|
||||
pkArcgisID := make(pgtypes.Array[null.Val[string]], 0, len(os))
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
pkArcgisID = append(pkArcgisID, o.ArcgisID)
|
||||
}
|
||||
PKArgExpr := psql.Select(sm.Columns(
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkArcgisID), "text[]")),
|
||||
))
|
||||
|
||||
return ArcgisServiceMaps.Query(append(mods,
|
||||
sm.Where(psql.Group(ArcgisServiceMaps.Columns.ArcgisID).OP("IN", PKArgExpr)),
|
||||
)...)
|
||||
}
|
||||
|
||||
func insertTileServiceCachedImages0(ctx context.Context, exec bob.Executor, tileCachedImages1 []*TileCachedImageSetter, tileService0 *TileService) (TileCachedImageSlice, error) {
|
||||
for i := range tileCachedImages1 {
|
||||
tileCachedImages1[i].ServiceID = omit.From(tileService0.ID)
|
||||
|
|
@ -509,54 +484,6 @@ func (tileService0 *TileService) AttachCachedImages(ctx context.Context, exec bo
|
|||
return nil
|
||||
}
|
||||
|
||||
func attachTileServiceArcgisServiceMap0(ctx context.Context, exec bob.Executor, count int, tileService0 *TileService, arcgisServiceMap1 *ArcgisServiceMap) (*TileService, error) {
|
||||
setter := &TileServiceSetter{
|
||||
ArcgisID: omitnull.From(arcgisServiceMap1.ArcgisID),
|
||||
}
|
||||
|
||||
err := tileService0.Update(ctx, exec, setter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("attachTileServiceArcgisServiceMap0: %w", err)
|
||||
}
|
||||
|
||||
return tileService0, nil
|
||||
}
|
||||
|
||||
func (tileService0 *TileService) InsertArcgisServiceMap(ctx context.Context, exec bob.Executor, related *ArcgisServiceMapSetter) error {
|
||||
var err error
|
||||
|
||||
arcgisServiceMap1, err := ArcgisServiceMaps.Insert(related).One(ctx, exec)
|
||||
if err != nil {
|
||||
return fmt.Errorf("inserting related objects: %w", err)
|
||||
}
|
||||
|
||||
_, err = attachTileServiceArcgisServiceMap0(ctx, exec, 1, tileService0, arcgisServiceMap1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tileService0.R.ArcgisServiceMap = arcgisServiceMap1
|
||||
|
||||
arcgisServiceMap1.R.ArcgisServices = append(arcgisServiceMap1.R.ArcgisServices, tileService0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (tileService0 *TileService) AttachArcgisServiceMap(ctx context.Context, exec bob.Executor, arcgisServiceMap1 *ArcgisServiceMap) error {
|
||||
var err error
|
||||
|
||||
_, err = attachTileServiceArcgisServiceMap0(ctx, exec, 1, tileService0, arcgisServiceMap1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tileService0.R.ArcgisServiceMap = arcgisServiceMap1
|
||||
|
||||
arcgisServiceMap1.R.ArcgisServices = append(arcgisServiceMap1.R.ArcgisServices, tileService0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type tileServiceWhere[Q psql.Filterable] struct {
|
||||
ID psql.WhereMod[Q, int32]
|
||||
Name psql.WhereMod[Q, string]
|
||||
|
|
@ -595,57 +522,25 @@ func (o *TileService) Preload(name string, retrieved any) error {
|
|||
}
|
||||
}
|
||||
return nil
|
||||
case "ArcgisServiceMap":
|
||||
rel, ok := retrieved.(*ArcgisServiceMap)
|
||||
if !ok {
|
||||
return fmt.Errorf("tileService cannot load %T as %q", retrieved, name)
|
||||
}
|
||||
|
||||
o.R.ArcgisServiceMap = rel
|
||||
|
||||
if rel != nil {
|
||||
rel.R.ArcgisServices = TileServiceSlice{o}
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("tileService has no relationship %q", name)
|
||||
}
|
||||
}
|
||||
|
||||
type tileServicePreloader struct {
|
||||
ArcgisServiceMap func(...psql.PreloadOption) psql.Preloader
|
||||
}
|
||||
type tileServicePreloader struct{}
|
||||
|
||||
func buildTileServicePreloader() tileServicePreloader {
|
||||
return tileServicePreloader{
|
||||
ArcgisServiceMap: func(opts ...psql.PreloadOption) psql.Preloader {
|
||||
return psql.Preload[*ArcgisServiceMap, ArcgisServiceMapSlice](psql.PreloadRel{
|
||||
Name: "ArcgisServiceMap",
|
||||
Sides: []psql.PreloadSide{
|
||||
{
|
||||
From: TileServices,
|
||||
To: ArcgisServiceMaps,
|
||||
FromColumns: []string{"arcgis_id"},
|
||||
ToColumns: []string{"arcgis_id"},
|
||||
},
|
||||
},
|
||||
}, ArcgisServiceMaps.Columns.Names(), opts...)
|
||||
},
|
||||
}
|
||||
return tileServicePreloader{}
|
||||
}
|
||||
|
||||
type tileServiceThenLoader[Q orm.Loadable] struct {
|
||||
CachedImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
ArcgisServiceMap func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
CachedImages func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
}
|
||||
|
||||
func buildTileServiceThenLoader[Q orm.Loadable]() tileServiceThenLoader[Q] {
|
||||
type CachedImagesLoadInterface interface {
|
||||
LoadCachedImages(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
type ArcgisServiceMapLoadInterface interface {
|
||||
LoadArcgisServiceMap(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
|
||||
return tileServiceThenLoader[Q]{
|
||||
CachedImages: thenLoadBuilder[Q](
|
||||
|
|
@ -654,12 +549,6 @@ func buildTileServiceThenLoader[Q orm.Loadable]() tileServiceThenLoader[Q] {
|
|||
return retrieved.LoadCachedImages(ctx, exec, mods...)
|
||||
},
|
||||
),
|
||||
ArcgisServiceMap: thenLoadBuilder[Q](
|
||||
"ArcgisServiceMap",
|
||||
func(ctx context.Context, exec bob.Executor, retrieved ArcgisServiceMapLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
return retrieved.LoadArcgisServiceMap(ctx, exec, mods...)
|
||||
},
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -723,58 +612,3 @@ func (os TileServiceSlice) LoadCachedImages(ctx context.Context, exec bob.Execut
|
|||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadArcgisServiceMap loads the tileService's ArcgisServiceMap into the .R struct
|
||||
func (o *TileService) LoadArcgisServiceMap(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Reset the relationship
|
||||
o.R.ArcgisServiceMap = nil
|
||||
|
||||
related, err := o.ArcgisServiceMap(mods...).One(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
related.R.ArcgisServices = TileServiceSlice{o}
|
||||
|
||||
o.R.ArcgisServiceMap = related
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadArcgisServiceMap loads the tileService's ArcgisServiceMap into the .R struct
|
||||
func (os TileServiceSlice) LoadArcgisServiceMap(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if len(os) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
arcgisServiceMaps, err := os.ArcgisServiceMap(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, rel := range arcgisServiceMaps {
|
||||
if !o.ArcgisID.IsValue() {
|
||||
continue
|
||||
}
|
||||
|
||||
if !(o.ArcgisID.IsValue() && o.ArcgisID.MustGet() == rel.ArcgisID) {
|
||||
continue
|
||||
}
|
||||
|
||||
rel.R.ArcgisServices = append(rel.R.ArcgisServices, o)
|
||||
|
||||
o.R.ArcgisServiceMap = rel
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -60,8 +60,6 @@ type UsersQuery = *psql.ViewQuery[*User, UserSlice]
|
|||
|
||||
// userR is where relationships are stored.
|
||||
type userR struct {
|
||||
UserOauthTokens ArcgisOauthTokenSlice // arcgis.oauth_token.oauth_token_user_id_fkey
|
||||
PublicUserUser ArcgisUserSlice // arcgis.user_.user__public_user_id_fkey
|
||||
CreatorTextJobs CommsTextJobSlice // comms.text_job.text_job_creator_id_fkey
|
||||
ClosedByCommunications CommunicationSlice // communication.communication_closed_by_fkey
|
||||
InvalidatedByCommunications CommunicationSlice // communication.communication_invalidated_by_fkey
|
||||
|
|
@ -748,54 +746,6 @@ func (o UserSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// UserOauthTokens starts a query for related objects on arcgis.oauth_token
|
||||
func (o *User) UserOauthTokens(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisOauthTokensQuery {
|
||||
return ArcgisOauthTokens.Query(append(mods,
|
||||
sm.Where(ArcgisOauthTokens.Columns.UserID.EQ(psql.Arg(o.ID))),
|
||||
)...)
|
||||
}
|
||||
|
||||
func (os UserSlice) UserOauthTokens(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisOauthTokensQuery {
|
||||
pkID := make(pgtypes.Array[int32], 0, len(os))
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
pkID = append(pkID, o.ID)
|
||||
}
|
||||
PKArgExpr := psql.Select(sm.Columns(
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")),
|
||||
))
|
||||
|
||||
return ArcgisOauthTokens.Query(append(mods,
|
||||
sm.Where(psql.Group(ArcgisOauthTokens.Columns.UserID).OP("IN", PKArgExpr)),
|
||||
)...)
|
||||
}
|
||||
|
||||
// PublicUserUser starts a query for related objects on arcgis.user_
|
||||
func (o *User) PublicUserUser(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisUsersQuery {
|
||||
return ArcgisUsers.Query(append(mods,
|
||||
sm.Where(ArcgisUsers.Columns.PublicUserID.EQ(psql.Arg(o.ID))),
|
||||
)...)
|
||||
}
|
||||
|
||||
func (os UserSlice) PublicUserUser(mods ...bob.Mod[*dialect.SelectQuery]) ArcgisUsersQuery {
|
||||
pkID := make(pgtypes.Array[int32], 0, len(os))
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
pkID = append(pkID, o.ID)
|
||||
}
|
||||
PKArgExpr := psql.Select(sm.Columns(
|
||||
psql.F("unnest", psql.Cast(psql.Arg(pkID), "integer[]")),
|
||||
))
|
||||
|
||||
return ArcgisUsers.Query(append(mods,
|
||||
sm.Where(psql.Group(ArcgisUsers.Columns.PublicUserID).OP("IN", PKArgExpr)),
|
||||
)...)
|
||||
}
|
||||
|
||||
// CreatorTextJobs starts a query for related objects on comms.text_job
|
||||
func (o *User) CreatorTextJobs(mods ...bob.Mod[*dialect.SelectQuery]) CommsTextJobsQuery {
|
||||
return CommsTextJobs.Query(append(mods,
|
||||
|
|
@ -1516,142 +1466,6 @@ func (os UserSlice) Organization(mods ...bob.Mod[*dialect.SelectQuery]) Organiza
|
|||
)...)
|
||||
}
|
||||
|
||||
func insertUserUserOauthTokens0(ctx context.Context, exec bob.Executor, arcgisOauthTokens1 []*ArcgisOauthTokenSetter, user0 *User) (ArcgisOauthTokenSlice, error) {
|
||||
for i := range arcgisOauthTokens1 {
|
||||
arcgisOauthTokens1[i].UserID = omit.From(user0.ID)
|
||||
}
|
||||
|
||||
ret, err := ArcgisOauthTokens.Insert(bob.ToMods(arcgisOauthTokens1...)).All(ctx, exec)
|
||||
if err != nil {
|
||||
return ret, fmt.Errorf("insertUserUserOauthTokens0: %w", err)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func attachUserUserOauthTokens0(ctx context.Context, exec bob.Executor, count int, arcgisOauthTokens1 ArcgisOauthTokenSlice, user0 *User) (ArcgisOauthTokenSlice, error) {
|
||||
setter := &ArcgisOauthTokenSetter{
|
||||
UserID: omit.From(user0.ID),
|
||||
}
|
||||
|
||||
err := arcgisOauthTokens1.UpdateAll(ctx, exec, *setter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("attachUserUserOauthTokens0: %w", err)
|
||||
}
|
||||
|
||||
return arcgisOauthTokens1, nil
|
||||
}
|
||||
|
||||
func (user0 *User) InsertUserOauthTokens(ctx context.Context, exec bob.Executor, related ...*ArcgisOauthTokenSetter) error {
|
||||
if len(related) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
arcgisOauthTokens1, err := insertUserUserOauthTokens0(ctx, exec, related, user0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
user0.R.UserOauthTokens = append(user0.R.UserOauthTokens, arcgisOauthTokens1...)
|
||||
|
||||
for _, rel := range arcgisOauthTokens1 {
|
||||
rel.R.UserUser = user0
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (user0 *User) AttachUserOauthTokens(ctx context.Context, exec bob.Executor, related ...*ArcgisOauthToken) error {
|
||||
if len(related) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
arcgisOauthTokens1 := ArcgisOauthTokenSlice(related)
|
||||
|
||||
_, err = attachUserUserOauthTokens0(ctx, exec, len(related), arcgisOauthTokens1, user0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
user0.R.UserOauthTokens = append(user0.R.UserOauthTokens, arcgisOauthTokens1...)
|
||||
|
||||
for _, rel := range related {
|
||||
rel.R.UserUser = user0
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func insertUserPublicUserUser0(ctx context.Context, exec bob.Executor, arcgisusers1 []*ArcgisUserSetter, user0 *User) (ArcgisUserSlice, error) {
|
||||
for i := range arcgisusers1 {
|
||||
arcgisusers1[i].PublicUserID = omit.From(user0.ID)
|
||||
}
|
||||
|
||||
ret, err := ArcgisUsers.Insert(bob.ToMods(arcgisusers1...)).All(ctx, exec)
|
||||
if err != nil {
|
||||
return ret, fmt.Errorf("insertUserPublicUserUser0: %w", err)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func attachUserPublicUserUser0(ctx context.Context, exec bob.Executor, count int, arcgisusers1 ArcgisUserSlice, user0 *User) (ArcgisUserSlice, error) {
|
||||
setter := &ArcgisUserSetter{
|
||||
PublicUserID: omit.From(user0.ID),
|
||||
}
|
||||
|
||||
err := arcgisusers1.UpdateAll(ctx, exec, *setter)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("attachUserPublicUserUser0: %w", err)
|
||||
}
|
||||
|
||||
return arcgisusers1, nil
|
||||
}
|
||||
|
||||
func (user0 *User) InsertPublicUserUser(ctx context.Context, exec bob.Executor, related ...*ArcgisUserSetter) error {
|
||||
if len(related) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
arcgisusers1, err := insertUserPublicUserUser0(ctx, exec, related, user0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
user0.R.PublicUserUser = append(user0.R.PublicUserUser, arcgisusers1...)
|
||||
|
||||
for _, rel := range arcgisusers1 {
|
||||
rel.R.PublicUserUser = user0
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (user0 *User) AttachPublicUserUser(ctx context.Context, exec bob.Executor, related ...*ArcgisUser) error {
|
||||
if len(related) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var err error
|
||||
arcgisusers1 := ArcgisUserSlice(related)
|
||||
|
||||
_, err = attachUserPublicUserUser0(ctx, exec, len(related), arcgisusers1, user0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
user0.R.PublicUserUser = append(user0.R.PublicUserUser, arcgisusers1...)
|
||||
|
||||
for _, rel := range related {
|
||||
rel.R.PublicUserUser = user0
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func insertUserCreatorTextJobs0(ctx context.Context, exec bob.Executor, commsTextJobs1 []*CommsTextJobSetter, user0 *User) (CommsTextJobSlice, error) {
|
||||
for i := range commsTextJobs1 {
|
||||
commsTextJobs1[i].CreatorID = omitnull.From(user0.ID)
|
||||
|
|
@ -3724,34 +3538,6 @@ func (o *User) Preload(name string, retrieved any) error {
|
|||
}
|
||||
|
||||
switch name {
|
||||
case "UserOauthTokens":
|
||||
rels, ok := retrieved.(ArcgisOauthTokenSlice)
|
||||
if !ok {
|
||||
return fmt.Errorf("user cannot load %T as %q", retrieved, name)
|
||||
}
|
||||
|
||||
o.R.UserOauthTokens = rels
|
||||
|
||||
for _, rel := range rels {
|
||||
if rel != nil {
|
||||
rel.R.UserUser = o
|
||||
}
|
||||
}
|
||||
return nil
|
||||
case "PublicUserUser":
|
||||
rels, ok := retrieved.(ArcgisUserSlice)
|
||||
if !ok {
|
||||
return fmt.Errorf("user cannot load %T as %q", retrieved, name)
|
||||
}
|
||||
|
||||
o.R.PublicUserUser = rels
|
||||
|
||||
for _, rel := range rels {
|
||||
if rel != nil {
|
||||
rel.R.PublicUserUser = o
|
||||
}
|
||||
}
|
||||
return nil
|
||||
case "CreatorTextJobs":
|
||||
rels, ok := retrieved.(CommsTextJobSlice)
|
||||
if !ok {
|
||||
|
|
@ -4198,8 +3984,6 @@ func buildUserPreloader() userPreloader {
|
|||
}
|
||||
|
||||
type userThenLoader[Q orm.Loadable] struct {
|
||||
UserOauthTokens func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
PublicUserUser func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
CreatorTextJobs func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
ClosedByCommunications func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
InvalidatedByCommunications func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
|
||||
|
|
@ -4233,12 +4017,6 @@ type userThenLoader[Q orm.Loadable] struct {
|
|||
}
|
||||
|
||||
func buildUserThenLoader[Q orm.Loadable]() userThenLoader[Q] {
|
||||
type UserOauthTokensLoadInterface interface {
|
||||
LoadUserOauthTokens(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
type PublicUserUserLoadInterface interface {
|
||||
LoadPublicUserUser(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
type CreatorTextJobsLoadInterface interface {
|
||||
LoadCreatorTextJobs(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
|
||||
}
|
||||
|
|
@ -4331,18 +4109,6 @@ func buildUserThenLoader[Q orm.Loadable]() userThenLoader[Q] {
|
|||
}
|
||||
|
||||
return userThenLoader[Q]{
|
||||
UserOauthTokens: thenLoadBuilder[Q](
|
||||
"UserOauthTokens",
|
||||
func(ctx context.Context, exec bob.Executor, retrieved UserOauthTokensLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
return retrieved.LoadUserOauthTokens(ctx, exec, mods...)
|
||||
},
|
||||
),
|
||||
PublicUserUser: thenLoadBuilder[Q](
|
||||
"PublicUserUser",
|
||||
func(ctx context.Context, exec bob.Executor, retrieved PublicUserUserLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
return retrieved.LoadPublicUserUser(ctx, exec, mods...)
|
||||
},
|
||||
),
|
||||
CreatorTextJobs: thenLoadBuilder[Q](
|
||||
"CreatorTextJobs",
|
||||
func(ctx context.Context, exec bob.Executor, retrieved CreatorTextJobsLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
|
|
@ -4526,128 +4292,6 @@ func buildUserThenLoader[Q orm.Loadable]() userThenLoader[Q] {
|
|||
}
|
||||
}
|
||||
|
||||
// LoadUserOauthTokens loads the user's UserOauthTokens into the .R struct
|
||||
func (o *User) LoadUserOauthTokens(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Reset the relationship
|
||||
o.R.UserOauthTokens = nil
|
||||
|
||||
related, err := o.UserOauthTokens(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, rel := range related {
|
||||
rel.R.UserUser = o
|
||||
}
|
||||
|
||||
o.R.UserOauthTokens = related
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadUserOauthTokens loads the user's UserOauthTokens into the .R struct
|
||||
func (os UserSlice) LoadUserOauthTokens(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if len(os) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
arcgisOauthTokens, err := os.UserOauthTokens(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
o.R.UserOauthTokens = nil
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, rel := range arcgisOauthTokens {
|
||||
|
||||
if !(o.ID == rel.UserID) {
|
||||
continue
|
||||
}
|
||||
|
||||
rel.R.UserUser = o
|
||||
|
||||
o.R.UserOauthTokens = append(o.R.UserOauthTokens, rel)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadPublicUserUser loads the user's PublicUserUser into the .R struct
|
||||
func (o *User) LoadPublicUserUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if o == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Reset the relationship
|
||||
o.R.PublicUserUser = nil
|
||||
|
||||
related, err := o.PublicUserUser(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, rel := range related {
|
||||
rel.R.PublicUserUser = o
|
||||
}
|
||||
|
||||
o.R.PublicUserUser = related
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadPublicUserUser loads the user's PublicUserUser into the .R struct
|
||||
func (os UserSlice) LoadPublicUserUser(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if len(os) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
arcgisusers, err := os.PublicUserUser(mods...).All(ctx, exec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
o.R.PublicUserUser = nil
|
||||
}
|
||||
|
||||
for _, o := range os {
|
||||
if o == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, rel := range arcgisusers {
|
||||
|
||||
if !(o.ID == rel.PublicUserID) {
|
||||
continue
|
||||
}
|
||||
|
||||
rel.R.PublicUserUser = o
|
||||
|
||||
o.R.PublicUserUser = append(o.R.PublicUserUser, rel)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadCreatorTextJobs loads the user's CreatorTextJobs into the .R struct
|
||||
func (o *User) LoadCreatorTextJobs(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
|
||||
if o == nil {
|
||||
|
|
|
|||
24
db/query/arcgis/account.go
Normal file
24
db/query/arcgis/account.go
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
package arcgis
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/model"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/table"
|
||||
"github.com/go-jet/jet/v2/postgres"
|
||||
)
|
||||
|
||||
func AccountFromID(ctx context.Context, org_id string) (*model.Account, error) {
|
||||
statement := table.Account.SELECT(
|
||||
table.Account.AllColumns,
|
||||
).FROM(table.Account).
|
||||
WHERE(table.Account.ID.EQ(postgres.String(org_id)))
|
||||
return db.ExecuteOne[model.Account](ctx, statement)
|
||||
}
|
||||
func AccountInsert(ctx context.Context, txn bob.Tx, m *model.Account) (*model.Account, error) {
|
||||
statement := table.Account.INSERT(table.Account.AllColumns).
|
||||
MODEL(m)
|
||||
return db.ExecuteOneTx[model.Account](ctx, txn, statement)
|
||||
}
|
||||
87
db/query/arcgis/oauth.go
Normal file
87
db/query/arcgis/oauth.go
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
package arcgis
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/model"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/table"
|
||||
"github.com/go-jet/jet/v2/postgres"
|
||||
)
|
||||
|
||||
func OAuthTokenInsert(ctx context.Context, m *model.OAuthToken) (*model.OAuthToken, error) {
|
||||
statement := table.OAuthToken.INSERT(table.OAuthToken.MutableColumns).
|
||||
MODEL(m)
|
||||
return db.ExecuteOne[model.OAuthToken](ctx, statement)
|
||||
}
|
||||
func OAuthTokenInvalidate(ctx context.Context, id int64) error {
|
||||
statement := table.OAuthToken.UPDATE().
|
||||
SET(table.OAuthToken.InvalidatedAt.SET(postgres.LOCALTIMESTAMP())).
|
||||
WHERE(table.OAuthToken.ID.EQ(postgres.Int(id)))
|
||||
return db.ExecuteNone(ctx, statement)
|
||||
}
|
||||
func OAuthTokensValid(ctx context.Context) ([]*model.OAuthToken, error) {
|
||||
statement := table.OAuthToken.SELECT(table.OAuthToken.AllColumns).
|
||||
FROM(table.OAuthToken).
|
||||
WHERE(table.OAuthToken.InvalidatedAt.IS_NULL())
|
||||
return db.ExecuteMany[model.OAuthToken](ctx, statement)
|
||||
}
|
||||
func OAuthTokenFromID(ctx context.Context, id int64) (*model.OAuthToken, error) {
|
||||
statement := table.OAuthToken.SELECT(
|
||||
table.OAuthToken.AllColumns,
|
||||
).FROM(table.OAuthToken).
|
||||
WHERE(table.OAuthToken.ID.EQ(postgres.Int(id)))
|
||||
return db.ExecuteOne[model.OAuthToken](ctx, statement)
|
||||
}
|
||||
func OAuthTokenForUser(ctx context.Context, user_id int64) (*model.OAuthToken, error) {
|
||||
statement := table.OAuthToken.SELECT(table.OAuthToken.AllColumns).
|
||||
FROM(table.OAuthToken).
|
||||
WHERE(table.OAuthToken.InvalidatedAt.IS_NULL().AND(
|
||||
table.OAuthToken.UserID.EQ(postgres.Int(user_id)),
|
||||
)).
|
||||
ORDER_BY(table.OAuthToken.Created.DESC()).
|
||||
LIMIT(1)
|
||||
return db.ExecuteOne[model.OAuthToken](ctx, statement)
|
||||
}
|
||||
func OAuthTokensForUser(ctx context.Context, user_id int64) ([]*model.OAuthToken, error) {
|
||||
statement := table.OAuthToken.SELECT(table.OAuthToken.AllColumns).
|
||||
FROM(table.OAuthToken).
|
||||
WHERE(table.OAuthToken.InvalidatedAt.IS_NULL().AND(
|
||||
table.OAuthToken.UserID.EQ(postgres.Int(user_id)),
|
||||
))
|
||||
return db.ExecuteMany[model.OAuthToken](ctx, statement)
|
||||
}
|
||||
func OAuthTokenForUserExists(ctx context.Context, user_id int64) (*bool, error) {
|
||||
statement := table.OAuthToken.SELECT(postgres.Bool(true)).
|
||||
FROM(table.OAuthToken).
|
||||
WHERE(table.OAuthToken.UserID.EQ(postgres.Int(user_id))).
|
||||
LIMIT(1)
|
||||
return db.ExecuteOne[bool](ctx, statement)
|
||||
}
|
||||
func OAuthTokenUpdateAccessToken(ctx context.Context, oauth_id int64, updates *model.OAuthToken) error {
|
||||
statement := table.OAuthToken.UPDATE(
|
||||
table.OAuthToken.AccessToken,
|
||||
table.OAuthToken.AccessTokenExpires,
|
||||
table.OAuthToken.Username,
|
||||
).MODEL(updates).
|
||||
WHERE(table.OAuthToken.ID.EQ(postgres.Int(oauth_id)))
|
||||
return db.ExecuteNone(ctx, statement)
|
||||
}
|
||||
func OAuthTokenUpdateRefreshToken(ctx context.Context, oauth_id int64, updates *model.OAuthToken) error {
|
||||
statement := table.OAuthToken.UPDATE(
|
||||
table.OAuthToken.RefreshToken,
|
||||
table.OAuthToken.RefreshTokenExpires,
|
||||
table.OAuthToken.Username,
|
||||
).MODEL(updates).
|
||||
WHERE(table.OAuthToken.ID.EQ(postgres.Int(oauth_id)))
|
||||
return db.ExecuteNone(ctx, statement)
|
||||
|
||||
}
|
||||
func OAuthTokenUpdateLicense(ctx context.Context, refresh_token string, updates *model.OAuthToken) error {
|
||||
statement := table.OAuthToken.UPDATE(
|
||||
table.OAuthToken.ArcgisID,
|
||||
table.OAuthToken.ArcgisLicenseTypeID,
|
||||
).MODEL(updates).
|
||||
WHERE(table.OAuthToken.RefreshToken.EQ(postgres.String(refresh_token)))
|
||||
return db.ExecuteNone(ctx, statement)
|
||||
}
|
||||
31
db/query/arcgis/service_feature.go
Normal file
31
db/query/arcgis/service_feature.go
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
package arcgis
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/model"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/table"
|
||||
"github.com/go-jet/jet/v2/postgres"
|
||||
)
|
||||
|
||||
func ServiceFeatureFromID(ctx context.Context, id string) (*model.ServiceFeature, error) {
|
||||
statement := table.ServiceFeature.SELECT(
|
||||
table.ServiceFeature.AllColumns,
|
||||
).FROM(table.ServiceFeature).
|
||||
WHERE(table.ServiceFeature.ItemID.EQ(postgres.String(id)))
|
||||
return db.ExecuteOne[model.ServiceFeature](ctx, statement)
|
||||
}
|
||||
func ServiceFeatureFromURL(ctx context.Context, url string) (*model.ServiceFeature, error) {
|
||||
statement := table.ServiceFeature.SELECT(
|
||||
table.ServiceFeature.AllColumns,
|
||||
).FROM(table.ServiceFeature).
|
||||
WHERE(table.ServiceFeature.URL.EQ(postgres.String(url)))
|
||||
return db.ExecuteOne[model.ServiceFeature](ctx, statement)
|
||||
}
|
||||
func ServiceFeatureInsert(ctx context.Context, txn bob.Tx, m *model.ServiceFeature) error {
|
||||
statement := table.ServiceMap.INSERT(table.ServiceMap.MutableColumns).
|
||||
MODEL(m)
|
||||
return db.ExecuteNoneTx(ctx, txn, statement)
|
||||
}
|
||||
31
db/query/arcgis/service_map.go
Normal file
31
db/query/arcgis/service_map.go
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
package arcgis
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/model"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/table"
|
||||
"github.com/go-jet/jet/v2/postgres"
|
||||
)
|
||||
|
||||
func ServiceMapFromID(ctx context.Context, id string) (*model.ServiceMap, error) {
|
||||
statement := table.ServiceMap.SELECT(
|
||||
table.ServiceMap.AllColumns,
|
||||
).FROM(table.ServiceMap).
|
||||
WHERE(table.ServiceMap.ArcgisID.EQ(postgres.String(id)))
|
||||
return db.ExecuteOne[model.ServiceMap](ctx, statement)
|
||||
}
|
||||
func ServiceMapsFromAccountID(ctx context.Context, account_id string) ([]*model.ServiceMap, error) {
|
||||
statement := table.ServiceMap.SELECT(
|
||||
table.ServiceMap.AllColumns,
|
||||
).FROM(table.ServiceMap).
|
||||
WHERE(table.ServiceMap.AccountID.EQ(postgres.String(account_id)))
|
||||
return db.ExecuteMany[model.ServiceMap](ctx, statement)
|
||||
}
|
||||
func ServiceMapInsert(ctx context.Context, txn bob.Tx, m *model.ServiceMap) error {
|
||||
statement := table.ServiceMap.INSERT(table.ServiceMap.MutableColumns).
|
||||
MODEL(m)
|
||||
return db.ExecuteNoneTx(ctx, txn, statement)
|
||||
}
|
||||
24
db/query/arcgis/user.go
Normal file
24
db/query/arcgis/user.go
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
package arcgis
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/model"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/table"
|
||||
"github.com/go-jet/jet/v2/postgres"
|
||||
)
|
||||
|
||||
func UserFromID(ctx context.Context, id string) (*model.User, error) {
|
||||
statement := table.User.SELECT(table.User.AllColumns).
|
||||
FROM(table.User).
|
||||
WHERE(table.User.ID.EQ(postgres.String(id)))
|
||||
return db.ExecuteOne[model.User](ctx, statement)
|
||||
}
|
||||
func UserInsert(ctx context.Context, txn bob.Tx, m *model.User) (*model.User, error) {
|
||||
statement := table.User.INSERT(table.User.MutableColumns).
|
||||
MODEL(m).
|
||||
RETURNING(table.User.AllColumns)
|
||||
return db.ExecuteOneTx[model.User](ctx, txn, statement)
|
||||
}
|
||||
22
db/query/arcgis/user_privileges.go
Normal file
22
db/query/arcgis/user_privileges.go
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
package arcgis
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/model"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/table"
|
||||
"github.com/go-jet/jet/v2/postgres"
|
||||
)
|
||||
|
||||
func UserPrivilegesDeleteByUserID(ctx context.Context, txn bob.Tx, id string) error {
|
||||
statement := table.User.DELETE().
|
||||
WHERE(table.User.ID.EQ(postgres.String(id)))
|
||||
return db.ExecuteNoneTx(ctx, txn, statement)
|
||||
}
|
||||
func UserPrivilegeInsert(ctx context.Context, txn bob.Tx, m *model.UserPrivilege) error {
|
||||
statement := table.UserPrivilege.INSERT(table.UserPrivilege.MutableColumns).
|
||||
MODEL(m)
|
||||
return db.ExecuteNoneTx(ctx, txn, statement)
|
||||
}
|
||||
8
db/types/box2d.go
Normal file
8
db/types/box2d.go
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
package types
|
||||
|
||||
type Box2D struct {
|
||||
XMax float64
|
||||
YMax float64
|
||||
XMin float64
|
||||
YMin float64
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue