Save tags on pool rows, show errors in summary table

This commit is contained in:
Eli Ribble 2026-02-16 16:38:04 +00:00
parent 123a4bf590
commit ef569aef18
No known key found for this signature in database
11 changed files with 305 additions and 172 deletions

View file

@ -96,15 +96,6 @@ var FileuploadPools = Table[
Generated: false,
AutoIncr: false,
},
Geom: column{
Name: "geom",
DBType: "geometry",
Default: "NULL",
Comment: "",
Nullable: true,
Generated: false,
AutoIncr: false,
},
H3cell: column{
Name: "h3cell",
DBType: "h3index",
@ -204,6 +195,24 @@ var FileuploadPools = Table[
Generated: false,
AutoIncr: false,
},
Geom: column{
Name: "geom",
DBType: "geometry",
Default: "NULL",
Comment: "",
Nullable: true,
Generated: false,
AutoIncr: false,
},
Tags: column{
Name: "tags",
DBType: "hstore",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
},
Indexes: fileuploadPoolIndexes{
PoolPkey: index{
@ -295,7 +304,6 @@ type fileuploadPoolColumns struct {
CreatorID column
CSVFile column
Deleted column
Geom column
H3cell column
ID column
IsInDistrict column
@ -307,11 +315,13 @@ type fileuploadPoolColumns struct {
Version column
PropertyOwnerPhoneE164 column
ResidentPhoneE164 column
Geom column
Tags column
}
func (c fileuploadPoolColumns) AsSlice() []column {
return []column{
c.AddressCity, c.AddressPostalCode, c.AddressStreet, c.Committed, c.Condition, c.Created, c.CreatorID, c.CSVFile, c.Deleted, c.Geom, c.H3cell, c.ID, c.IsInDistrict, c.IsNew, c.Notes, c.OrganizationID, c.PropertyOwnerName, c.ResidentOwned, c.Version, c.PropertyOwnerPhoneE164, c.ResidentPhoneE164,
c.AddressCity, c.AddressPostalCode, c.AddressStreet, c.Committed, c.Condition, c.Created, c.CreatorID, c.CSVFile, c.Deleted, c.H3cell, c.ID, c.IsInDistrict, c.IsNew, c.Notes, c.OrganizationID, c.PropertyOwnerName, c.ResidentOwned, c.Version, c.PropertyOwnerPhoneE164, c.ResidentPhoneE164, c.Geom, c.Tags,
}
}

View file

@ -2425,7 +2425,6 @@ func (f *Factory) FromExistingFileuploadPool(m *models.FileuploadPool) *Fileuplo
o.CreatorID = func() int32 { return m.CreatorID }
o.CSVFile = func() int32 { return m.CSVFile }
o.Deleted = func() null.Val[time.Time] { return m.Deleted }
o.Geom = func() null.Val[string] { return m.Geom }
o.H3cell = func() null.Val[string] { return m.H3cell }
o.ID = func() int32 { return m.ID }
o.IsInDistrict = func() bool { return m.IsInDistrict }
@ -2437,6 +2436,8 @@ func (f *Factory) FromExistingFileuploadPool(m *models.FileuploadPool) *Fileuplo
o.Version = func() int32 { return m.Version }
o.PropertyOwnerPhoneE164 = func() null.Val[string] { return m.PropertyOwnerPhoneE164 }
o.ResidentPhoneE164 = func() null.Val[string] { return m.ResidentPhoneE164 }
o.Geom = func() null.Val[string] { return m.Geom }
o.Tags = func() pgtypes.HStore { return m.Tags }
ctx := context.Background()
if m.R.CreatorUser != nil {

View file

@ -9,6 +9,7 @@ import (
"time"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/types/pgtypes"
enums "github.com/Gleipnir-Technology/nidus-sync/db/enums"
models "github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/aarondl/opt/null"
@ -47,7 +48,6 @@ type FileuploadPoolTemplate struct {
CreatorID func() int32
CSVFile func() int32
Deleted func() null.Val[time.Time]
Geom func() null.Val[string]
H3cell func() null.Val[string]
ID func() int32
IsInDistrict func() bool
@ -59,6 +59,8 @@ type FileuploadPoolTemplate struct {
Version func() int32
PropertyOwnerPhoneE164 func() null.Val[string]
ResidentPhoneE164 func() null.Val[string]
Geom func() null.Val[string]
Tags func() pgtypes.HStore
r fileuploadPoolR
f *Factory
@ -177,10 +179,6 @@ func (o FileuploadPoolTemplate) BuildSetter() *models.FileuploadPoolSetter {
val := o.Deleted()
m.Deleted = omitnull.FromNull(val)
}
if o.Geom != nil {
val := o.Geom()
m.Geom = omitnull.FromNull(val)
}
if o.H3cell != nil {
val := o.H3cell()
m.H3cell = omitnull.FromNull(val)
@ -225,6 +223,14 @@ func (o FileuploadPoolTemplate) BuildSetter() *models.FileuploadPoolSetter {
val := o.ResidentPhoneE164()
m.ResidentPhoneE164 = omitnull.FromNull(val)
}
if o.Geom != nil {
val := o.Geom()
m.Geom = omitnull.FromNull(val)
}
if o.Tags != nil {
val := o.Tags()
m.Tags = omit.From(val)
}
return m
}
@ -274,9 +280,6 @@ func (o FileuploadPoolTemplate) Build() *models.FileuploadPool {
if o.Deleted != nil {
m.Deleted = o.Deleted()
}
if o.Geom != nil {
m.Geom = o.Geom()
}
if o.H3cell != nil {
m.H3cell = o.H3cell()
}
@ -310,6 +313,12 @@ func (o FileuploadPoolTemplate) Build() *models.FileuploadPool {
if o.ResidentPhoneE164 != nil {
m.ResidentPhoneE164 = o.ResidentPhoneE164()
}
if o.Geom != nil {
m.Geom = o.Geom()
}
if o.Tags != nil {
m.Tags = o.Tags()
}
o.setModelRels(m)
@ -386,6 +395,10 @@ func ensureCreatableFileuploadPool(m *models.FileuploadPoolSetter) {
val := random_int32(nil)
m.Version = omit.From(val)
}
if !(m.Tags.IsValue()) {
val := random_pgtypes_HStore(nil)
m.Tags = omit.From(val)
}
}
// insertOptRels creates and inserts any optional the relationships on *models.FileuploadPool
@ -588,7 +601,6 @@ func (m fileuploadPoolMods) RandomizeAllColumns(f *faker.Faker) FileuploadPoolMo
FileuploadPoolMods.RandomCreatorID(f),
FileuploadPoolMods.RandomCSVFile(f),
FileuploadPoolMods.RandomDeleted(f),
FileuploadPoolMods.RandomGeom(f),
FileuploadPoolMods.RandomH3cell(f),
FileuploadPoolMods.RandomID(f),
FileuploadPoolMods.RandomIsInDistrict(f),
@ -600,6 +612,8 @@ func (m fileuploadPoolMods) RandomizeAllColumns(f *faker.Faker) FileuploadPoolMo
FileuploadPoolMods.RandomVersion(f),
FileuploadPoolMods.RandomPropertyOwnerPhoneE164(f),
FileuploadPoolMods.RandomResidentPhoneE164(f),
FileuploadPoolMods.RandomGeom(f),
FileuploadPoolMods.RandomTags(f),
}
}
@ -904,59 +918,6 @@ func (m fileuploadPoolMods) RandomDeletedNotNull(f *faker.Faker) FileuploadPoolM
})
}
// Set the model columns to this value
func (m fileuploadPoolMods) Geom(val null.Val[string]) FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Geom = func() null.Val[string] { return val }
})
}
// Set the Column from the function
func (m fileuploadPoolMods) GeomFunc(f func() null.Val[string]) FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Geom = f
})
}
// Clear any values for the column
func (m fileuploadPoolMods) UnsetGeom() FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Geom = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
// The generated value is sometimes null
func (m fileuploadPoolMods) RandomGeom(f *faker.Faker) FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Geom = func() null.Val[string] {
if f == nil {
f = &defaultFaker
}
val := random_string(f)
return null.From(val)
}
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
// The generated value is never null
func (m fileuploadPoolMods) RandomGeomNotNull(f *faker.Faker) FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Geom = func() null.Val[string] {
if f == nil {
f = &defaultFaker
}
val := random_string(f)
return null.From(val)
}
})
}
// Set the model columns to this value
func (m fileuploadPoolMods) H3cell(val null.Val[string]) FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
@ -1386,6 +1347,90 @@ func (m fileuploadPoolMods) RandomResidentPhoneE164NotNull(f *faker.Faker) Fileu
})
}
// Set the model columns to this value
func (m fileuploadPoolMods) Geom(val null.Val[string]) FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Geom = func() null.Val[string] { return val }
})
}
// Set the Column from the function
func (m fileuploadPoolMods) GeomFunc(f func() null.Val[string]) FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Geom = f
})
}
// Clear any values for the column
func (m fileuploadPoolMods) UnsetGeom() FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Geom = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
// The generated value is sometimes null
func (m fileuploadPoolMods) RandomGeom(f *faker.Faker) FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Geom = func() null.Val[string] {
if f == nil {
f = &defaultFaker
}
val := random_string(f)
return null.From(val)
}
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
// The generated value is never null
func (m fileuploadPoolMods) RandomGeomNotNull(f *faker.Faker) FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Geom = func() null.Val[string] {
if f == nil {
f = &defaultFaker
}
val := random_string(f)
return null.From(val)
}
})
}
// Set the model columns to this value
func (m fileuploadPoolMods) Tags(val pgtypes.HStore) FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Tags = func() pgtypes.HStore { return val }
})
}
// Set the Column from the function
func (m fileuploadPoolMods) TagsFunc(f func() pgtypes.HStore) FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Tags = f
})
}
// Clear any values for the column
func (m fileuploadPoolMods) UnsetTags() FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Tags = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m fileuploadPoolMods) RandomTags(f *faker.Faker) FileuploadPoolMod {
return FileuploadPoolModFunc(func(_ context.Context, o *FileuploadPoolTemplate) {
o.Tags = func() pgtypes.HStore {
return random_pgtypes_HStore(f)
}
})
}
func (m fileuploadPoolMods) WithParentsCascading() FileuploadPoolMod {
return FileuploadPoolModFunc(func(ctx context.Context, o *FileuploadPoolTemplate) {
if isDone, _ := fileuploadPoolWithParentsCascadingCtx.Value(ctx); isDone {

View file

@ -0,0 +1,4 @@
-- +goose Up
ALTER TABLE fileupload.pool ADD COLUMN tags HSTORE NOT NULL;
-- +goose Down
ALTER TABLE fileupload.pool DROP COLUMN tags;

View file

@ -36,7 +36,6 @@ type FileuploadPool struct {
CreatorID int32 `db:"creator_id" `
CSVFile int32 `db:"csv_file" `
Deleted null.Val[time.Time] `db:"deleted" `
Geom null.Val[string] `db:"geom" `
H3cell null.Val[string] `db:"h3cell" `
ID int32 `db:"id,pk" `
IsInDistrict bool `db:"is_in_district" `
@ -48,6 +47,8 @@ type FileuploadPool struct {
Version int32 `db:"version,pk" `
PropertyOwnerPhoneE164 null.Val[string] `db:"property_owner_phone_e164" `
ResidentPhoneE164 null.Val[string] `db:"resident_phone_e164" `
Geom null.Val[string] `db:"geom" `
Tags pgtypes.HStore `db:"tags" `
R fileuploadPoolR `db:"-" `
}
@ -74,7 +75,7 @@ type fileuploadPoolR struct {
func buildFileuploadPoolColumns(alias string) fileuploadPoolColumns {
return fileuploadPoolColumns{
ColumnsExpr: expr.NewColumnsExpr(
"address_city", "address_postal_code", "address_street", "committed", "condition", "created", "creator_id", "csv_file", "deleted", "geom", "h3cell", "id", "is_in_district", "is_new", "notes", "organization_id", "property_owner_name", "resident_owned", "version", "property_owner_phone_e164", "resident_phone_e164",
"address_city", "address_postal_code", "address_street", "committed", "condition", "created", "creator_id", "csv_file", "deleted", "h3cell", "id", "is_in_district", "is_new", "notes", "organization_id", "property_owner_name", "resident_owned", "version", "property_owner_phone_e164", "resident_phone_e164", "geom", "tags",
).WithParent("fileupload.pool"),
tableAlias: alias,
AddressCity: psql.Quote(alias, "address_city"),
@ -86,7 +87,6 @@ func buildFileuploadPoolColumns(alias string) fileuploadPoolColumns {
CreatorID: psql.Quote(alias, "creator_id"),
CSVFile: psql.Quote(alias, "csv_file"),
Deleted: psql.Quote(alias, "deleted"),
Geom: psql.Quote(alias, "geom"),
H3cell: psql.Quote(alias, "h3cell"),
ID: psql.Quote(alias, "id"),
IsInDistrict: psql.Quote(alias, "is_in_district"),
@ -98,6 +98,8 @@ func buildFileuploadPoolColumns(alias string) fileuploadPoolColumns {
Version: psql.Quote(alias, "version"),
PropertyOwnerPhoneE164: psql.Quote(alias, "property_owner_phone_e164"),
ResidentPhoneE164: psql.Quote(alias, "resident_phone_e164"),
Geom: psql.Quote(alias, "geom"),
Tags: psql.Quote(alias, "tags"),
}
}
@ -113,7 +115,6 @@ type fileuploadPoolColumns struct {
CreatorID psql.Expression
CSVFile psql.Expression
Deleted psql.Expression
Geom psql.Expression
H3cell psql.Expression
ID psql.Expression
IsInDistrict psql.Expression
@ -125,6 +126,8 @@ type fileuploadPoolColumns struct {
Version psql.Expression
PropertyOwnerPhoneE164 psql.Expression
ResidentPhoneE164 psql.Expression
Geom psql.Expression
Tags psql.Expression
}
func (c fileuploadPoolColumns) Alias() string {
@ -148,7 +151,6 @@ type FileuploadPoolSetter struct {
CreatorID omit.Val[int32] `db:"creator_id" `
CSVFile omit.Val[int32] `db:"csv_file" `
Deleted omitnull.Val[time.Time] `db:"deleted" `
Geom omitnull.Val[string] `db:"geom" `
H3cell omitnull.Val[string] `db:"h3cell" `
ID omit.Val[int32] `db:"id,pk" `
IsInDistrict omit.Val[bool] `db:"is_in_district" `
@ -160,10 +162,12 @@ type FileuploadPoolSetter struct {
Version omit.Val[int32] `db:"version,pk" `
PropertyOwnerPhoneE164 omitnull.Val[string] `db:"property_owner_phone_e164" `
ResidentPhoneE164 omitnull.Val[string] `db:"resident_phone_e164" `
Geom omitnull.Val[string] `db:"geom" `
Tags omit.Val[pgtypes.HStore] `db:"tags" `
}
func (s FileuploadPoolSetter) SetColumns() []string {
vals := make([]string, 0, 21)
vals := make([]string, 0, 22)
if s.AddressCity.IsValue() {
vals = append(vals, "address_city")
}
@ -191,9 +195,6 @@ func (s FileuploadPoolSetter) SetColumns() []string {
if !s.Deleted.IsUnset() {
vals = append(vals, "deleted")
}
if !s.Geom.IsUnset() {
vals = append(vals, "geom")
}
if !s.H3cell.IsUnset() {
vals = append(vals, "h3cell")
}
@ -227,6 +228,12 @@ func (s FileuploadPoolSetter) SetColumns() []string {
if !s.ResidentPhoneE164.IsUnset() {
vals = append(vals, "resident_phone_e164")
}
if !s.Geom.IsUnset() {
vals = append(vals, "geom")
}
if s.Tags.IsValue() {
vals = append(vals, "tags")
}
return vals
}
@ -258,9 +265,6 @@ func (s FileuploadPoolSetter) Overwrite(t *FileuploadPool) {
if !s.Deleted.IsUnset() {
t.Deleted = s.Deleted.MustGetNull()
}
if !s.Geom.IsUnset() {
t.Geom = s.Geom.MustGetNull()
}
if !s.H3cell.IsUnset() {
t.H3cell = s.H3cell.MustGetNull()
}
@ -294,6 +298,12 @@ func (s FileuploadPoolSetter) Overwrite(t *FileuploadPool) {
if !s.ResidentPhoneE164.IsUnset() {
t.ResidentPhoneE164 = s.ResidentPhoneE164.MustGetNull()
}
if !s.Geom.IsUnset() {
t.Geom = s.Geom.MustGetNull()
}
if s.Tags.IsValue() {
t.Tags = s.Tags.MustGet()
}
}
func (s *FileuploadPoolSetter) Apply(q *dialect.InsertQuery) {
@ -302,7 +312,7 @@ func (s *FileuploadPoolSetter) Apply(q *dialect.InsertQuery) {
})
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
vals := make([]bob.Expression, 21)
vals := make([]bob.Expression, 22)
if s.AddressCity.IsValue() {
vals[0] = psql.Arg(s.AddressCity.MustGet())
} else {
@ -357,78 +367,84 @@ func (s *FileuploadPoolSetter) Apply(q *dialect.InsertQuery) {
vals[8] = psql.Raw("DEFAULT")
}
if !s.Geom.IsUnset() {
vals[9] = psql.Arg(s.Geom.MustGetNull())
if !s.H3cell.IsUnset() {
vals[9] = psql.Arg(s.H3cell.MustGetNull())
} else {
vals[9] = psql.Raw("DEFAULT")
}
if !s.H3cell.IsUnset() {
vals[10] = psql.Arg(s.H3cell.MustGetNull())
if s.ID.IsValue() {
vals[10] = psql.Arg(s.ID.MustGet())
} else {
vals[10] = psql.Raw("DEFAULT")
}
if s.ID.IsValue() {
vals[11] = psql.Arg(s.ID.MustGet())
if s.IsInDistrict.IsValue() {
vals[11] = psql.Arg(s.IsInDistrict.MustGet())
} else {
vals[11] = psql.Raw("DEFAULT")
}
if s.IsInDistrict.IsValue() {
vals[12] = psql.Arg(s.IsInDistrict.MustGet())
if s.IsNew.IsValue() {
vals[12] = psql.Arg(s.IsNew.MustGet())
} else {
vals[12] = psql.Raw("DEFAULT")
}
if s.IsNew.IsValue() {
vals[13] = psql.Arg(s.IsNew.MustGet())
if s.Notes.IsValue() {
vals[13] = psql.Arg(s.Notes.MustGet())
} else {
vals[13] = psql.Raw("DEFAULT")
}
if s.Notes.IsValue() {
vals[14] = psql.Arg(s.Notes.MustGet())
if s.OrganizationID.IsValue() {
vals[14] = psql.Arg(s.OrganizationID.MustGet())
} else {
vals[14] = psql.Raw("DEFAULT")
}
if s.OrganizationID.IsValue() {
vals[15] = psql.Arg(s.OrganizationID.MustGet())
if s.PropertyOwnerName.IsValue() {
vals[15] = psql.Arg(s.PropertyOwnerName.MustGet())
} else {
vals[15] = psql.Raw("DEFAULT")
}
if s.PropertyOwnerName.IsValue() {
vals[16] = psql.Arg(s.PropertyOwnerName.MustGet())
if !s.ResidentOwned.IsUnset() {
vals[16] = psql.Arg(s.ResidentOwned.MustGetNull())
} else {
vals[16] = psql.Raw("DEFAULT")
}
if !s.ResidentOwned.IsUnset() {
vals[17] = psql.Arg(s.ResidentOwned.MustGetNull())
if s.Version.IsValue() {
vals[17] = psql.Arg(s.Version.MustGet())
} else {
vals[17] = psql.Raw("DEFAULT")
}
if s.Version.IsValue() {
vals[18] = psql.Arg(s.Version.MustGet())
if !s.PropertyOwnerPhoneE164.IsUnset() {
vals[18] = psql.Arg(s.PropertyOwnerPhoneE164.MustGetNull())
} else {
vals[18] = psql.Raw("DEFAULT")
}
if !s.PropertyOwnerPhoneE164.IsUnset() {
vals[19] = psql.Arg(s.PropertyOwnerPhoneE164.MustGetNull())
if !s.ResidentPhoneE164.IsUnset() {
vals[19] = psql.Arg(s.ResidentPhoneE164.MustGetNull())
} else {
vals[19] = psql.Raw("DEFAULT")
}
if !s.ResidentPhoneE164.IsUnset() {
vals[20] = psql.Arg(s.ResidentPhoneE164.MustGetNull())
if !s.Geom.IsUnset() {
vals[20] = psql.Arg(s.Geom.MustGetNull())
} else {
vals[20] = psql.Raw("DEFAULT")
}
if s.Tags.IsValue() {
vals[21] = psql.Arg(s.Tags.MustGet())
} else {
vals[21] = psql.Raw("DEFAULT")
}
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
}))
}
@ -438,7 +454,7 @@ func (s FileuploadPoolSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
}
func (s FileuploadPoolSetter) Expressions(prefix ...string) []bob.Expression {
exprs := make([]bob.Expression, 0, 21)
exprs := make([]bob.Expression, 0, 22)
if s.AddressCity.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
@ -503,13 +519,6 @@ func (s FileuploadPoolSetter) Expressions(prefix ...string) []bob.Expression {
}})
}
if !s.Geom.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "geom")...),
psql.Arg(s.Geom),
}})
}
if !s.H3cell.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "h3cell")...),
@ -587,6 +596,20 @@ func (s FileuploadPoolSetter) Expressions(prefix ...string) []bob.Expression {
}})
}
if !s.Geom.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "geom")...),
psql.Arg(s.Geom),
}})
}
if s.Tags.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "tags")...),
psql.Arg(s.Tags),
}})
}
return exprs
}
@ -1193,7 +1216,6 @@ type fileuploadPoolWhere[Q psql.Filterable] struct {
CreatorID psql.WhereMod[Q, int32]
CSVFile psql.WhereMod[Q, int32]
Deleted psql.WhereNullMod[Q, time.Time]
Geom psql.WhereNullMod[Q, string]
H3cell psql.WhereNullMod[Q, string]
ID psql.WhereMod[Q, int32]
IsInDistrict psql.WhereMod[Q, bool]
@ -1205,6 +1227,8 @@ type fileuploadPoolWhere[Q psql.Filterable] struct {
Version psql.WhereMod[Q, int32]
PropertyOwnerPhoneE164 psql.WhereNullMod[Q, string]
ResidentPhoneE164 psql.WhereNullMod[Q, string]
Geom psql.WhereNullMod[Q, string]
Tags psql.WhereMod[Q, pgtypes.HStore]
}
func (fileuploadPoolWhere[Q]) AliasedAs(alias string) fileuploadPoolWhere[Q] {
@ -1222,7 +1246,6 @@ func buildFileuploadPoolWhere[Q psql.Filterable](cols fileuploadPoolColumns) fil
CreatorID: psql.Where[Q, int32](cols.CreatorID),
CSVFile: psql.Where[Q, int32](cols.CSVFile),
Deleted: psql.WhereNull[Q, time.Time](cols.Deleted),
Geom: psql.WhereNull[Q, string](cols.Geom),
H3cell: psql.WhereNull[Q, string](cols.H3cell),
ID: psql.Where[Q, int32](cols.ID),
IsInDistrict: psql.Where[Q, bool](cols.IsInDistrict),
@ -1234,6 +1257,8 @@ func buildFileuploadPoolWhere[Q psql.Filterable](cols fileuploadPoolColumns) fil
Version: psql.Where[Q, int32](cols.Version),
PropertyOwnerPhoneE164: psql.WhereNull[Q, string](cols.PropertyOwnerPhoneE164),
ResidentPhoneE164: psql.WhereNull[Q, string](cols.ResidentPhoneE164),
Geom: psql.WhereNull[Q, string](cols.Geom),
Tags: psql.Where[Q, pgtypes.HStore](cols.Tags),
}
}

32
db/pgdata.go Normal file
View file

@ -0,0 +1,32 @@
package db
import (
"database/sql"
"github.com/Gleipnir-Technology/bob/types/pgtypes"
"github.com/rs/zerolog/log"
)
func ConvertFromPGData(d pgtypes.HStore) map[string]string {
result := make(map[string]string, 0)
for k, v := range d {
value, err := v.Value()
if err != nil {
log.Warn().Err(err).Str("key", k).Msg("Failed to convert from HSTORE")
continue
}
value_str, ok := value.(string)
if !ok {
log.Warn().Msg("Failed to convert to string")
}
result[k] = value_str
}
return result
}
func ConvertToPGData(data map[string]string) pgtypes.HStore {
result := pgtypes.HStore{}
for k, v := range data {
result[k] = sql.Null[string]{V: v, Valid: true}
}
return result
}

View file

@ -13,22 +13,22 @@ const ORG_ID={{ .User.Organization.ID }}
function onLoad() {
const map = document.querySelector("map-libre-test");
map.addEventListener("load", (event) => {
map.addSource('tegola-nidus', {
map.addSource('tegola-nidus-fileupload', {
'type': 'vector',
'tiles': [
`{{.URL.Tegola}}maps/nidus/{z}/{x}/{y}?csv_file=${CSV_FILE_ID}&organization_id=${ORG_ID}`
`{{.URL.Tegola}}maps/fileupload/{z}/{x}/{y}?csv_file=${CSV_FILE_ID}&organization_id=${ORG_ID}`
]
});
map.addLayer({
'id': 'pool',
'source': 'tegola-nidus',
'source-layer': 'fileupload_pool',
'source': 'tegola-nidus-fileupload',
'source-layer': 'pool',
'type': 'circle',
'paint': {
'circle-color': "#DC4535",
'circle-color': "#91b979",
'circle-radius': 7,
'circle-stroke-width': 2,
'circle-stroke-color': "#9C1C28"
'circle-stroke-color': "#7aab5f"
}
});
});
@ -129,16 +129,29 @@ document.addEventListener('DOMContentLoaded', onLoad);
<table class="table table-hover table-striped">
<thead class="table-light">
<tr>
<th></th>
<th>Street</th>
<th>City</th>
<th>Post</th>
<th>Status</th>
<th>Condition</th>
<th>Tags</th>
</tr>
</thead>
<tbody>
{{ range .Upload.Pools }}
<tr>
<tr {{ if gt (len .Errors) 0 }}class="has-error"{{ end }}>
<td>
{{ if gt (len .Errors) 0 }}
<i
class="bi bi-info-circle-fill text-primary ms-1"
data-bs-toggle="tooltip"
data-bs-placement="top"
title="{{ range .Errors }}{{ .Message }}{{ end }}"
></i>
{{ end }}
</td>
<td>{{ .Street }}</td>
<td>{{ .City }}</td>
<td>{{ .PostalCode }}</td>
@ -166,6 +179,7 @@ document.addEventListener('DOMContentLoaded', onLoad);
</td>
{{ end }}
<td>{{ .Condition }}</td>
<td>{{ len .Tags }}</td>
</tr>
{{ end }}
</tbody>

View file

@ -175,8 +175,8 @@ func parseFile(ctx context.Context, txn bob.Tx, file models.FileuploadFile) ([]*
if err != nil {
return pools, fmt.Errorf("Failed to read header of CSV for file %d: %w", file.ID, err)
}
headers := parseHeaders(h)
missing_headers := missingRequiredHeaders(headers)
header_types, header_names := parseHeaders(h)
missing_headers := missingRequiredHeaders(header_types)
for _, mh := range missing_headers {
errorMissingHeader(ctx, txn, c, mh)
file.Update(ctx, txn, &models.FileuploadFileSetter{
@ -197,6 +197,7 @@ func parseFile(ctx context.Context, txn bob.Tx, file models.FileuploadFile) ([]*
}
return pools, fmt.Errorf("Failed to read all CSV records for file %d: %w", file.ID, err)
}
tags := make(map[string]string, 0)
setter := models.FileuploadPoolSetter{
// required fields
//AddressCity: omit.From(),
@ -219,14 +220,15 @@ func parseFile(ctx context.Context, txn bob.Tx, file models.FileuploadFile) ([]*
PropertyOwnerPhoneE164: omitnull.FromPtr[string](nil),
ResidentOwned: omitnull.FromPtr[bool](nil),
ResidentPhoneE164: omitnull.FromPtr[string](nil),
//Tags: convertToPGData(tags),
Version: omit.From(int32(0)),
}
for i, col := range row {
hdr := headers[i]
hdr_t := header_types[i]
if col == "" {
continue
}
switch hdr {
switch hdr_t {
case headerAddressCity:
setter.AddressCity = omit.From(col)
case headerAddressPostalCode:
@ -268,8 +270,12 @@ func parseFile(ctx context.Context, txn bob.Tx, file models.FileuploadFile) ([]*
}
text.EnsureInDB(ctx, txn, *phone)
setter.ResidentPhoneE164 = omitnull.From(text.PhoneString(*phone))
case headerTag:
tags[header_names[i]] = col
}
}
setter.Tags = omit.From(db.ConvertToPGData(tags))
pool, err := models.FileuploadPools.Insert(&setter).One(ctx, txn)
if err != nil {
return pools, fmt.Errorf("Failed to create pool: %w", err)
@ -317,8 +323,9 @@ func errorMissingHeader(ctx context.Context, txn bob.Tx, c *models.FileuploadCSV
msg := fmt.Sprintf("The file is missing the '%s' header", h.String())
return addError(ctx, txn, c, 0, 0, msg)
}
func parseHeaders(row []string) []headerPoolEnum {
results := make([]headerPoolEnum, 0)
func parseHeaders(row []string) ([]headerPoolEnum, []string) {
result_enums := make([]headerPoolEnum, 0)
result_names := make([]string, 0)
for _, h := range row {
ht := strings.TrimSpace(h)
hl := strings.ToLower(ht)
@ -350,10 +357,11 @@ func parseHeaders(row []string) []headerPoolEnum {
default:
type_ = headerTag
}
results = append(results, type_)
result_enums = append(result_enums, type_)
result_names = append(result_names, hl)
}
return results
return result_enums, result_names
}
func missingRequiredHeaders(headers []headerPoolEnum) []headerPoolEnum {
results := make([]headerPoolEnum, 0)

View file

@ -3,14 +3,12 @@ package email
import (
"context"
"crypto/sha256"
"database/sql"
"encoding/hex"
"fmt"
"sort"
"strings"
"time"
"github.com/Gleipnir-Technology/bob/types/pgtypes"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
@ -42,33 +40,8 @@ func EnsureInDB(ctx context.Context, destination string) (err error) {
return nil
}
func convertToPGData(data map[string]string) pgtypes.HStore {
result := pgtypes.HStore{}
for k, v := range data {
result[k] = sql.Null[string]{V: v, Valid: true}
}
return result
}
func convertFromPGData(d pgtypes.HStore) map[string]string {
result := make(map[string]string, 0)
for k, v := range d {
value, err := v.Value()
if err != nil {
log.Warn().Err(err).Str("key", k).Msg("Failed to convert from HSTORE")
continue
}
value_str, ok := value.(string)
if !ok {
log.Warn().Msg("Failed to convert to string")
}
result[k] = value_str
}
return result
}
func insertEmailLog(ctx context.Context, data map[string]string, destination string, public_id string, source string, subject string, template_id int32) (err error) {
data_for_insert := convertToPGData(data)
data_for_insert := db.ConvertToPGData(data)
var type_ enums.CommsMessagetypeemail
switch template_id {
case templateReportNotificationConfirmationID:

View file

@ -103,7 +103,7 @@ func LoadTemplates() error {
}
func RenderHTML(template_id int32, s pgtypes.HStore) (html []byte, err error) {
data := convertFromPGData(s)
data := db.ConvertFromPGData(s)
t, ok := templateByID[template_id]
if !ok {
return []byte{}, fmt.Errorf("Failed to lookup template %d", template_id)

View file

@ -38,9 +38,11 @@ type UploadPoolError struct {
type UploadPoolRow struct {
City string
Condition string
Street string
Errors []UploadPoolError
PostalCode string
Status string
Street string
Tags map[string]string
}
type PoolUpload struct {
Created time.Time `db:"created"`
@ -102,13 +104,25 @@ func GetUploadPoolDetail(ctx context.Context, organization_id int32, file_id int
if err != nil {
return UploadPoolDetail{}, fmt.Errorf("Failed to lookup errors in csv %d: %w", file_id, err)
}
errors := make([]UploadPoolError, 0)
file_errors := make([]UploadPoolError, 0)
errors_by_row := make(map[int32][]UploadPoolError, 0)
for _, row := range error_rows {
errors = append(errors, UploadPoolError{
e := UploadPoolError{
Column: uint(row.Col),
Line: uint(row.Line),
Message: row.Message,
})
}
if row.Line == 0 {
file_errors = append(file_errors, e)
} else {
by_row, ok := errors_by_row[row.Line]
if !ok {
errors_by_row[row.Line] = []UploadPoolError{e}
continue
}
by_row = append(by_row, e)
errors_by_row[row.Line] = by_row
}
}
pool_rows, err := models.FileuploadPools.Query(
@ -123,7 +137,7 @@ func GetUploadPoolDetail(ctx context.Context, organization_id int32, file_id int
count_new := 0
count_outside := 0
status := "unknown"
for _, r := range pool_rows {
for i, r := range pool_rows {
if r.IsNew {
count_new = count_new + 1
status = "new"
@ -134,19 +148,26 @@ func GetUploadPoolDetail(ctx context.Context, organization_id int32, file_id int
count_existing = count_existing + 1
status = "existing"
}
tags := db.ConvertFromPGData(r.Tags)
errors, ok := errors_by_row[int32(i)]
if !ok {
errors = []UploadPoolError{}
}
pools = append(pools, UploadPoolRow{
City: r.AddressCity,
Condition: r.Condition.String(),
Errors: errors,
PostalCode: r.AddressPostalCode,
Status: status,
Street: r.AddressStreet,
Tags: tags,
})
}
return UploadPoolDetail{
CountExisting: count_existing,
CountOutside: count_outside,
CountNew: count_new,
Errors: errors,
Errors: file_errors,
Name: file.Name,
Pools: pools,
Status: file.Status.String(),