Push geocoding down a layer

This makes it possible to always save address information from our
geocoder.
This commit is contained in:
Eli Ribble 2026-03-04 18:29:52 +00:00
parent 80e14568c6
commit daa8cb1748
No known key found for this signature in database
26 changed files with 576 additions and 431 deletions

View file

@ -12,6 +12,7 @@ func AddRoutes(r chi.Router) {
r.Use(render.SetContentType(render.ContentTypeJSON))
r.Method("GET", "/mosquito-source", auth.NewEnsureAuth(apiMosquitoSource))
r.Method("GET", "/service-request", auth.NewEnsureAuth(apiServiceRequest))
r.Method("GET", "/signal", authenticatedHandlerJSON(listSignal))
r.Method("GET", "/trap-data", auth.NewEnsureAuth(apiTrapData))
r.Method("GET", "/client/ios", auth.NewEnsureAuth(handleClientIos))
r.Method("POST", "/audio/{uuid}", auth.NewEnsureAuth(apiAudioPost))

View file

@ -23,10 +23,10 @@ var waitGroup sync.WaitGroup
func Start(ctx context.Context) {
newOAuthTokenChannel = make(chan struct{}, 10)
channelJobAudio = make(chan jobAudio, 100) // Buffered channel to prevent blocking
channelJobImportCSV = make(chan jobImportCSV, 100) // Buffered channel to prevent blocking
channelJobEmail = make(chan email.Job, 100) // Buffered channel to prevent blocking
channelJobText = make(chan text.Job, 100) // Buffered channel to prevent blocking
channelJobAudio = make(chan jobAudio, 100) // Buffered channel to prevent blocking
channelJobCSV = make(chan jobCSV, 100) // Buffered channel to prevent blocking
channelJobEmail = make(chan email.Job, 100) // Buffered channel to prevent blocking
channelJobText = make(chan text.Job, 100) // Buffered channel to prevent blocking
waitGroup.Add(1)
go func() {
@ -49,7 +49,7 @@ func Start(ctx context.Context) {
waitGroup.Add(1)
go func() {
defer waitGroup.Done()
startWorkerCSV(ctx, channelJobImportCSV)
startWorkerCSV(ctx, channelJobCSV)
}()
waitGroup.Add(1)
@ -97,16 +97,10 @@ func addWaitingJobs(ctx context.Context) error {
}
for _, row := range rows {
report_id := row.ID
job := jobImportCSV{
fileID: report_id,
type_: row.Type,
}
select {
case channelJobImportCSV <- job:
log.Info().Int32("report_id", report_id).Msg("CSV upload job queued")
default:
log.Warn().Int32("report_id", report_id).Msg("CSV upload job failed to queue, channel full")
}
enqueueJobCSV(jobCSV{
fileID: report_id,
csvType: row.Type,
})
}
return nil
}

View file

@ -1,52 +0,0 @@
package background
import (
"context"
//"fmt"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/platform/csv"
//"github.com/Gleipnir-Technology/nidus-sync/userfile"
//"github.com/google/uuid"
"github.com/rs/zerolog/log"
)
type jobImportCSV struct {
fileID int32
type_ enums.FileuploadCsvtype
}
var channelJobImportCSV chan jobImportCSV
func ProcessUpload(file_id int32, t enums.FileuploadCsvtype) {
enqueueUploadJob(jobImportCSV{
fileID: file_id,
type_: t,
})
}
func enqueueUploadJob(job jobImportCSV) {
select {
case channelJobImportCSV <- job:
log.Info().Int32("file_id", job.fileID).Msg("Enqueued csv job")
default:
log.Warn().Int32("file_id", job.fileID).Msg("csv channel is full, dropping job")
}
}
func startWorkerCSV(ctx context.Context, channelJobImport chan jobImportCSV) {
go func() {
for {
select {
case <-ctx.Done():
log.Info().Msg("CSV worker shutting down.")
return
case job := <-channelJobImport:
log.Info().Int32("id", job.fileID).Msg("Processing CSV job")
err := csv.ProcessJob(ctx, job.fileID, job.type_)
if err != nil {
log.Error().Err(err).Int32("id", job.fileID).Msg("Error processing CSV file")
}
}
}
}()
}

80
background/upload.go Normal file
View file

@ -0,0 +1,80 @@
package background
import (
"context"
//"fmt"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/platform/csv"
//"github.com/Gleipnir-Technology/nidus-sync/userfile"
//"github.com/google/uuid"
"github.com/rs/zerolog/log"
)
type jobCSVAction = int
const (
jobCSVActionCommit jobCSVAction = iota
jobCSVActionImport
)
type jobCSV struct {
action jobCSVAction
csvType enums.FileuploadCsvtype
fileID int32
}
var channelJobCSV chan jobCSV
func CommitUpload(file_id int32) {
enqueueJobCSV(jobCSV{
action: jobCSVActionCommit,
fileID: file_id,
})
}
func ProcessUpload(file_id int32, t enums.FileuploadCsvtype) {
enqueueJobCSV(jobCSV{
csvType: t,
fileID: file_id,
})
}
func enqueueJobCSV(job jobCSV) {
select {
case channelJobCSV <- job:
log.Info().Int32("file_id", job.fileID).Msg("Enqueued csv job")
default:
log.Warn().Int32("file_id", job.fileID).Msg("csv channel is full, dropping job")
}
}
func startWorkerCSV(ctx context.Context, channelJobImport chan jobCSV) {
go func() {
for {
select {
case <-ctx.Done():
log.Info().Msg("CSV worker shutting down.")
return
case job := <-channelJobImport:
log.Info().Int32("id", job.fileID).Msg("Processing CSV job")
switch job.action {
case jobCSVActionCommit:
err := csv.JobCommit(ctx, job.fileID)
if err != nil {
log.Error().Err(err).Int32("id", job.fileID).Msg("Error processing CSV file")
continue
}
case jobCSVActionImport:
err := csv.JobImport(ctx, job.fileID, job.csvType)
if err != nil {
log.Error().Err(err).Int32("id", job.fileID).Msg("Error processing CSV file")
continue
}
default:
log.Error().Msg("Unrecognized job action")
return
}
log.Info().Int32("id", job.fileID).Msg("Done processing CSV job")
}
}
}()
}

View file

@ -10,17 +10,8 @@ var AddressErrors = &addressErrors{
columns: []string{"id"},
s: "address_pkey",
},
ErrUniqueAddressCountryLocalityUnitNumber_StreetKey: &UniqueConstraintError{
schema: "",
table: "address",
columns: []string{"country", "locality", "unit", "number_", "street"},
s: "address_country_locality_unit_number__street_key",
},
}
type addressErrors struct {
ErrUniqueAddressPkey *UniqueConstraintError
ErrUniqueAddressCountryLocalityUnitNumber_StreetKey *UniqueConstraintError
}

View file

@ -69,15 +69,6 @@ var Addresses = Table[
Generated: false,
AutoIncr: false,
},
Number: column{
Name: "number_",
DBType: "integer",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
PostalCode: column{
Name: "postal_code",
DBType: "text",
@ -114,6 +105,15 @@ var Addresses = Table[
Generated: false,
AutoIncr: false,
},
Number: column{
Name: "number_",
DBType: "text",
Default: "",
Comment: "",
Nullable: false,
Generated: false,
AutoIncr: false,
},
},
Indexes: addressIndexes{
AddressPkey: index{
@ -133,43 +133,6 @@ var Addresses = Table[
Where: "",
Include: []string{},
},
AddressCountryLocalityUnitNumberStreetKey: index{
Type: "btree",
Name: "address_country_locality_unit_number__street_key",
Columns: []indexColumn{
{
Name: "country",
Desc: null.FromCond(false, true),
IsExpression: false,
},
{
Name: "locality",
Desc: null.FromCond(false, true),
IsExpression: false,
},
{
Name: "unit",
Desc: null.FromCond(false, true),
IsExpression: false,
},
{
Name: "number_",
Desc: null.FromCond(false, true),
IsExpression: false,
},
{
Name: "street",
Desc: null.FromCond(false, true),
IsExpression: false,
},
},
Unique: true,
Comment: "",
NullsFirst: []bool{false, false, false, false, false},
NullsDistinct: false,
Where: "",
Include: []string{},
},
IdxAddressGeom: index{
Type: "gist",
Name: "idx_address_geom",
@ -194,14 +157,6 @@ var Addresses = Table[
Comment: "",
},
Uniques: addressUniques{
AddressCountryLocalityUnitNumberStreetKey: constraint{
Name: "address_country_locality_unit_number__street_key",
Columns: []string{"country", "locality", "unit", "number_", "street"},
Comment: "",
},
},
Comment: "",
}
@ -212,28 +167,27 @@ type addressColumns struct {
H3cell column
ID column
Locality column
Number column
PostalCode column
Street column
Unit column
Region column
Number column
}
func (c addressColumns) AsSlice() []column {
return []column{
c.Country, c.Created, c.Geom, c.H3cell, c.ID, c.Locality, c.Number, c.PostalCode, c.Street, c.Unit, c.Region,
c.Country, c.Created, c.Geom, c.H3cell, c.ID, c.Locality, c.PostalCode, c.Street, c.Unit, c.Region, c.Number,
}
}
type addressIndexes struct {
AddressPkey index
AddressCountryLocalityUnitNumberStreetKey index
IdxAddressGeom index
AddressPkey index
IdxAddressGeom index
}
func (i addressIndexes) AsSlice() []index {
return []index{
i.AddressPkey, i.AddressCountryLocalityUnitNumberStreetKey, i.IdxAddressGeom,
i.AddressPkey, i.IdxAddressGeom,
}
}
@ -243,14 +197,10 @@ func (f addressForeignKeys) AsSlice() []foreignKey {
return []foreignKey{}
}
type addressUniques struct {
AddressCountryLocalityUnitNumberStreetKey constraint
}
type addressUniques struct{}
func (u addressUniques) AsSlice() []constraint {
return []constraint{
u.AddressCountryLocalityUnitNumberStreetKey,
}
return []constraint{}
}
type addressChecks struct{}

View file

@ -988,11 +988,13 @@ func (e *FileuploadCsvtype) Scan(value any) error {
// Enum values for FileuploadFilestatustype
const (
FileuploadFilestatustypeError FileuploadFilestatustype = "error"
FileuploadFilestatustypeParsed FileuploadFilestatustype = "parsed"
FileuploadFilestatustypeUploaded FileuploadFilestatustype = "uploaded"
FileuploadFilestatustypeCommitted FileuploadFilestatustype = "committed"
FileuploadFilestatustypeDiscarded FileuploadFilestatustype = "discarded"
FileuploadFilestatustypeError FileuploadFilestatustype = "error"
FileuploadFilestatustypeParsed FileuploadFilestatustype = "parsed"
FileuploadFilestatustypeUploaded FileuploadFilestatustype = "uploaded"
FileuploadFilestatustypeParsing FileuploadFilestatustype = "parsing"
FileuploadFilestatustypeCommitting FileuploadFilestatustype = "committing"
FileuploadFilestatustypeCommitted FileuploadFilestatustype = "committed"
FileuploadFilestatustypeDiscarded FileuploadFilestatustype = "discarded"
)
func AllFileuploadFilestatustype() []FileuploadFilestatustype {
@ -1000,6 +1002,8 @@ func AllFileuploadFilestatustype() []FileuploadFilestatustype {
FileuploadFilestatustypeError,
FileuploadFilestatustypeParsed,
FileuploadFilestatustypeUploaded,
FileuploadFilestatustypeParsing,
FileuploadFilestatustypeCommitting,
FileuploadFilestatustypeCommitted,
FileuploadFilestatustypeDiscarded,
}
@ -1016,6 +1020,8 @@ func (e FileuploadFilestatustype) Valid() bool {
case FileuploadFilestatustypeError,
FileuploadFilestatustypeParsed,
FileuploadFilestatustypeUploaded,
FileuploadFilestatustypeParsing,
FileuploadFilestatustypeCommitting,
FileuploadFilestatustypeCommitted,
FileuploadFilestatustypeDiscarded:
return true

View file

@ -42,11 +42,11 @@ type AddressTemplate struct {
H3cell func() string
ID func() int32
Locality func() string
Number func() int32
PostalCode func() string
Street func() string
Unit func() string
Region func() string
Number func() string
r addressR
f *Factory
@ -145,10 +145,6 @@ func (o AddressTemplate) BuildSetter() *models.AddressSetter {
val := o.Locality()
m.Locality = omit.From(val)
}
if o.Number != nil {
val := o.Number()
m.Number = omit.From(val)
}
if o.PostalCode != nil {
val := o.PostalCode()
m.PostalCode = omit.From(val)
@ -165,6 +161,10 @@ func (o AddressTemplate) BuildSetter() *models.AddressSetter {
val := o.Region()
m.Region = omit.From(val)
}
if o.Number != nil {
val := o.Number()
m.Number = omit.From(val)
}
return m
}
@ -205,9 +205,6 @@ func (o AddressTemplate) Build() *models.Address {
if o.Locality != nil {
m.Locality = o.Locality()
}
if o.Number != nil {
m.Number = o.Number()
}
if o.PostalCode != nil {
m.PostalCode = o.PostalCode()
}
@ -220,6 +217,9 @@ func (o AddressTemplate) Build() *models.Address {
if o.Region != nil {
m.Region = o.Region()
}
if o.Number != nil {
m.Number = o.Number()
}
o.setModelRels(m)
@ -260,10 +260,6 @@ func ensureCreatableAddress(m *models.AddressSetter) {
val := random_string(nil)
m.Locality = omit.From(val)
}
if !(m.Number.IsValue()) {
val := random_int32(nil)
m.Number = omit.From(val)
}
if !(m.PostalCode.IsValue()) {
val := random_string(nil)
m.PostalCode = omit.From(val)
@ -280,6 +276,10 @@ func ensureCreatableAddress(m *models.AddressSetter) {
val := random_string(nil)
m.Region = omit.From(val)
}
if !(m.Number.IsValue()) {
val := random_string(nil)
m.Number = omit.From(val)
}
}
// insertOptRels creates and inserts any optional the relationships on *models.Address
@ -445,11 +445,11 @@ func (m addressMods) RandomizeAllColumns(f *faker.Faker) AddressMod {
AddressMods.RandomH3cell(f),
AddressMods.RandomID(f),
AddressMods.RandomLocality(f),
AddressMods.RandomNumber(f),
AddressMods.RandomPostalCode(f),
AddressMods.RandomStreet(f),
AddressMods.RandomUnit(f),
AddressMods.RandomRegion(f),
AddressMods.RandomNumber(f),
}
}
@ -639,37 +639,6 @@ func (m addressMods) RandomLocality(f *faker.Faker) AddressMod {
})
}
// Set the model columns to this value
func (m addressMods) Number(val int32) AddressMod {
return AddressModFunc(func(_ context.Context, o *AddressTemplate) {
o.Number = func() int32 { return val }
})
}
// Set the Column from the function
func (m addressMods) NumberFunc(f func() int32) AddressMod {
return AddressModFunc(func(_ context.Context, o *AddressTemplate) {
o.Number = f
})
}
// Clear any values for the column
func (m addressMods) UnsetNumber() AddressMod {
return AddressModFunc(func(_ context.Context, o *AddressTemplate) {
o.Number = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m addressMods) RandomNumber(f *faker.Faker) AddressMod {
return AddressModFunc(func(_ context.Context, o *AddressTemplate) {
o.Number = func() int32 {
return random_int32(f)
}
})
}
// Set the model columns to this value
func (m addressMods) PostalCode(val string) AddressMod {
return AddressModFunc(func(_ context.Context, o *AddressTemplate) {
@ -794,6 +763,37 @@ func (m addressMods) RandomRegion(f *faker.Faker) AddressMod {
})
}
// Set the model columns to this value
func (m addressMods) Number(val string) AddressMod {
return AddressModFunc(func(_ context.Context, o *AddressTemplate) {
o.Number = func() string { return val }
})
}
// Set the Column from the function
func (m addressMods) NumberFunc(f func() string) AddressMod {
return AddressModFunc(func(_ context.Context, o *AddressTemplate) {
o.Number = f
})
}
// Clear any values for the column
func (m addressMods) UnsetNumber() AddressMod {
return AddressModFunc(func(_ context.Context, o *AddressTemplate) {
o.Number = nil
})
}
// Generates a random value for the column using the given faker
// if faker is nil, a default faker is used
func (m addressMods) RandomNumber(f *faker.Faker) AddressMod {
return AddressModFunc(func(_ context.Context, o *AddressTemplate) {
o.Number = func() string {
return random_string(f)
}
})
}
func (m addressMods) WithParentsCascading() AddressMod {
return AddressModFunc(func(ctx context.Context, o *AddressTemplate) {
if isDone, _ := addressWithParentsCascadingCtx.Value(ctx); isDone {

View file

@ -143,11 +143,11 @@ func (f *Factory) FromExistingAddress(m *models.Address) *AddressTemplate {
o.H3cell = func() string { return m.H3cell }
o.ID = func() int32 { return m.ID }
o.Locality = func() string { return m.Locality }
o.Number = func() int32 { return m.Number }
o.PostalCode = func() string { return m.PostalCode }
o.Street = func() string { return m.Street }
o.Unit = func() string { return m.Unit }
o.Region = func() string { return m.Region }
o.Number = func() string { return m.Number }
ctx := context.Background()
if len(m.R.Mailers) > 0 {

View file

@ -0,0 +1,4 @@
-- +goose Up
ALTER TYPE fileupload.FileStatusType ADD VALUE 'parsing' AFTER 'uploaded';
ALTER TYPE fileupload.FileStatusType ADD VALUE 'committing' AFTER 'parsing';
-- +goose Down

View file

@ -0,0 +1,10 @@
-- +goose Up
ALTER TABLE address DROP COLUMN number_;
ALTER TABLE address ADD COLUMN number_ TEXT;
UPDATE address SET number_ = '';
ALTER TABLE address ALTER COLUMN number_ SET NOT NULL;
-- +goose Down
ALTER TABLE address DROP COLUMN number_;
ALTER TABLE address ADD COLUMN number_ INTEGER;
UPDATE address SET number_ = 0;
ALTER TABLE address ALTER COLUMN number_ SET NOT NULL;

View file

@ -31,11 +31,11 @@ type Address struct {
H3cell string `db:"h3cell" `
ID int32 `db:"id,pk" `
Locality string `db:"locality" `
Number int32 `db:"number_" `
PostalCode string `db:"postal_code" `
Street string `db:"street" `
Unit string `db:"unit" `
Region string `db:"region" `
Number string `db:"number_" `
R addressR `db:"-" `
@ -62,7 +62,7 @@ type addressR struct {
func buildAddressColumns(alias string) addressColumns {
return addressColumns{
ColumnsExpr: expr.NewColumnsExpr(
"country", "created", "geom", "h3cell", "id", "locality", "number_", "postal_code", "street", "unit", "region",
"country", "created", "geom", "h3cell", "id", "locality", "postal_code", "street", "unit", "region", "number_",
).WithParent("address"),
tableAlias: alias,
Country: psql.Quote(alias, "country"),
@ -71,11 +71,11 @@ func buildAddressColumns(alias string) addressColumns {
H3cell: psql.Quote(alias, "h3cell"),
ID: psql.Quote(alias, "id"),
Locality: psql.Quote(alias, "locality"),
Number: psql.Quote(alias, "number_"),
PostalCode: psql.Quote(alias, "postal_code"),
Street: psql.Quote(alias, "street"),
Unit: psql.Quote(alias, "unit"),
Region: psql.Quote(alias, "region"),
Number: psql.Quote(alias, "number_"),
}
}
@ -88,11 +88,11 @@ type addressColumns struct {
H3cell psql.Expression
ID psql.Expression
Locality psql.Expression
Number psql.Expression
PostalCode psql.Expression
Street psql.Expression
Unit psql.Expression
Region psql.Expression
Number psql.Expression
}
func (c addressColumns) Alias() string {
@ -113,11 +113,11 @@ type AddressSetter struct {
H3cell omit.Val[string] `db:"h3cell" `
ID omit.Val[int32] `db:"id,pk" `
Locality omit.Val[string] `db:"locality" `
Number omit.Val[int32] `db:"number_" `
PostalCode omit.Val[string] `db:"postal_code" `
Street omit.Val[string] `db:"street" `
Unit omit.Val[string] `db:"unit" `
Region omit.Val[string] `db:"region" `
Number omit.Val[string] `db:"number_" `
}
func (s AddressSetter) SetColumns() []string {
@ -140,9 +140,6 @@ func (s AddressSetter) SetColumns() []string {
if s.Locality.IsValue() {
vals = append(vals, "locality")
}
if s.Number.IsValue() {
vals = append(vals, "number_")
}
if s.PostalCode.IsValue() {
vals = append(vals, "postal_code")
}
@ -155,6 +152,9 @@ func (s AddressSetter) SetColumns() []string {
if s.Region.IsValue() {
vals = append(vals, "region")
}
if s.Number.IsValue() {
vals = append(vals, "number_")
}
return vals
}
@ -177,9 +177,6 @@ func (s AddressSetter) Overwrite(t *Address) {
if s.Locality.IsValue() {
t.Locality = s.Locality.MustGet()
}
if s.Number.IsValue() {
t.Number = s.Number.MustGet()
}
if s.PostalCode.IsValue() {
t.PostalCode = s.PostalCode.MustGet()
}
@ -192,6 +189,9 @@ func (s AddressSetter) Overwrite(t *Address) {
if s.Region.IsValue() {
t.Region = s.Region.MustGet()
}
if s.Number.IsValue() {
t.Number = s.Number.MustGet()
}
}
func (s *AddressSetter) Apply(q *dialect.InsertQuery) {
@ -237,32 +237,32 @@ func (s *AddressSetter) Apply(q *dialect.InsertQuery) {
vals[5] = psql.Raw("DEFAULT")
}
if s.Number.IsValue() {
vals[6] = psql.Arg(s.Number.MustGet())
if s.PostalCode.IsValue() {
vals[6] = psql.Arg(s.PostalCode.MustGet())
} else {
vals[6] = psql.Raw("DEFAULT")
}
if s.PostalCode.IsValue() {
vals[7] = psql.Arg(s.PostalCode.MustGet())
if s.Street.IsValue() {
vals[7] = psql.Arg(s.Street.MustGet())
} else {
vals[7] = psql.Raw("DEFAULT")
}
if s.Street.IsValue() {
vals[8] = psql.Arg(s.Street.MustGet())
if s.Unit.IsValue() {
vals[8] = psql.Arg(s.Unit.MustGet())
} else {
vals[8] = psql.Raw("DEFAULT")
}
if s.Unit.IsValue() {
vals[9] = psql.Arg(s.Unit.MustGet())
if s.Region.IsValue() {
vals[9] = psql.Arg(s.Region.MustGet())
} else {
vals[9] = psql.Raw("DEFAULT")
}
if s.Region.IsValue() {
vals[10] = psql.Arg(s.Region.MustGet())
if s.Number.IsValue() {
vals[10] = psql.Arg(s.Number.MustGet())
} else {
vals[10] = psql.Raw("DEFAULT")
}
@ -320,13 +320,6 @@ func (s AddressSetter) Expressions(prefix ...string) []bob.Expression {
}})
}
if s.Number.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "number_")...),
psql.Arg(s.Number),
}})
}
if s.PostalCode.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "postal_code")...),
@ -355,6 +348,13 @@ func (s AddressSetter) Expressions(prefix ...string) []bob.Expression {
}})
}
if s.Number.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "number_")...),
psql.Arg(s.Number),
}})
}
return exprs
}
@ -850,11 +850,11 @@ type addressWhere[Q psql.Filterable] struct {
H3cell psql.WhereMod[Q, string]
ID psql.WhereMod[Q, int32]
Locality psql.WhereMod[Q, string]
Number psql.WhereMod[Q, int32]
PostalCode psql.WhereMod[Q, string]
Street psql.WhereMod[Q, string]
Unit psql.WhereMod[Q, string]
Region psql.WhereMod[Q, string]
Number psql.WhereMod[Q, string]
}
func (addressWhere[Q]) AliasedAs(alias string) addressWhere[Q] {
@ -869,11 +869,11 @@ func buildAddressWhere[Q psql.Filterable](cols addressColumns) addressWhere[Q] {
H3cell: psql.Where[Q, string](cols.H3cell),
ID: psql.Where[Q, int32](cols.ID),
Locality: psql.Where[Q, string](cols.Locality),
Number: psql.Where[Q, int32](cols.Number),
PostalCode: psql.Where[Q, string](cols.PostalCode),
Street: psql.Where[Q, string](cols.Street),
Unit: psql.Where[Q, string](cols.Unit),
Region: psql.Where[Q, string](cols.Region),
Number: psql.Where[Q, string](cols.Number),
}
}

View file

@ -17,6 +17,7 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/platform/geocode"
//"github.com/Gleipnir-Technology/nidus-sync/h3utils"
//"github.com/Gleipnir-Technology/nidus-sync/platform/geom"
//"github.com/Gleipnir-Technology/nidus-sync/platform/text"
@ -30,18 +31,71 @@ import (
type csvParserFunc[T any] = func(context.Context, bob.Tx, *models.FileuploadFile, *models.FileuploadCSV) ([]T, error)
type csvProcessorFunc[T any] = func(context.Context, bob.Tx, *models.FileuploadFile, *models.FileuploadCSV, []T) error
func ProcessJob(ctx context.Context, file_id int32, type_ enums.FileuploadCsvtype) error {
func JobCommit(ctx context.Context, file_id int32) error {
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
if err != nil {
return fmt.Errorf("Failed to start transaction: %w", err)
}
f, err := models.FindFileuploadFile(ctx, txn, file_id)
if err != nil {
return fmt.Errorf("Failed to get csv file %d from DB: %w", file_id, err)
}
org, err := models.FindOrganization(ctx, txn, f.OrganizationID)
if err != nil {
return fmt.Errorf("Failed to get org %d from DB: %w", f.OrganizationID, err)
}
rows, err := models.FileuploadPools.Query(
models.SelectWhere.FileuploadPools.CSVFile.EQ(file_id),
).All(ctx, txn)
if err != nil {
return fmt.Errorf("Failed to get all rows of file %d: %w", file_id, err)
}
for _, row := range rows {
a := geocode.Address{
Country: enums.CountrytypeUsa,
Locality: row.AddressLocality,
Number: row.AddressNumber,
PostalCode: row.AddressPostalCode,
Region: row.AddressRegion,
Street: row.AddressStreet,
Unit: "",
}
address, err := geocode.EnsureAddress(ctx, txn, org, a)
if err != nil {
return fmt.Errorf("ensure address: %w", err)
}
log.Info().Int32("id", address.ID).Msg("made address")
}
return nil
}
func JobImport(ctx context.Context, file_id int32, type_ enums.FileuploadCsvtype) error {
var err error
switch type_ {
case enums.FileuploadCsvtypePoollist:
err = processCSV(ctx, file_id, parseCSVPoollist, processCSVPoollist)
err = importCSV(ctx, file_id, parseCSVPoollist, processCSVPoollist)
case enums.FileuploadCsvtypeFlyover:
err = processCSV(ctx, file_id, parseCSVFlyover, processCSVFlyover)
err = importCSV(ctx, file_id, parseCSVFlyover, processCSVFlyover)
}
if err != nil {
psql.Update(
um.Table("fileupload.csv"),
um.SetCol("status").ToArg("error"),
um.Where(psql.Quote("file_id").EQ(psql.Arg(file_id))),
).Exec(ctx, db.PGInstance.BobDB)
}
return err
}
func processCSV[T any](ctx context.Context, file_id int32, parser csvParserFunc[T], processor csvProcessorFunc[T]) error {
func importCSV[T any](ctx context.Context, file_id int32, parser csvParserFunc[T], processor csvProcessorFunc[T]) error {
// Not done in the transaction so the state shows up immediately
_, err := psql.Update(
um.Table("fileupload.csv"),
um.SetCol("status").ToArg("processing"),
um.Where(psql.Quote("file_id").EQ(psql.Arg(file_id))),
).Exec(ctx, db.PGInstance.BobDB)
file, c, err := loadFileAndCSV(ctx, file_id)
if err != nil {
return fmt.Errorf("load file and csv: %w", err)

View file

@ -16,7 +16,7 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/h3utils"
"github.com/Gleipnir-Technology/nidus-sync/platform/geocode"
"github.com/Gleipnir-Technology/nidus-sync/platform/geom"
"github.com/Gleipnir-Technology/nidus-sync/platform/text"
"github.com/Gleipnir-Technology/nidus-sync/stadia"
@ -130,41 +130,22 @@ type jobGeocode struct {
pool *models.FileuploadPool
}
func geocode(ctx context.Context, txn bob.Tx, client *stadia.StadiaMaps, job *jobGeocode) error {
func geocodePool(ctx context.Context, txn bob.Tx, client *stadia.StadiaMaps, job *jobGeocode) error {
pool := job.pool
sublog := log.With().
Str("pool.address_postal", pool.AddressPostalCode).
Str("pool.address_street", pool.AddressStreet).
Str("pool.postal", pool.AddressPostalCode).
Logger()
req := stadia.StructuredGeocodeRequest{
Address: &pool.AddressStreet,
Locality: &pool.AddressLocality,
PostalCode: &pool.AddressPostalCode,
a := geocode.Address{
Number: pool.AddressNumber,
Locality: pool.AddressLocality,
PostalCode: pool.AddressPostalCode,
Street: pool.AddressStreet,
}
maybeAddServiceArea(&req, job.org)
resp, err := client.StructuredGeocode(ctx, req)
address, err := geocode.Geocode(ctx, job.org, a)
if err != nil {
return fmt.Errorf("client structured geocode failure on %s, %s, %s: %w", pool.AddressStreet, pool.AddressLocality, pool.AddressPostalCode, err)
addError(ctx, txn, job.csv, job.rownumber, 0, err.Error())
}
if len(resp.Features) > 1 {
sublog.Warn().Int("len", len(resp.Features)).Msg("More than one feature")
addError(ctx, txn, job.csv, job.rownumber, 0, "The address provided matched more than one location")
}
feature := resp.Features[0]
if feature.Geometry.Type != "Point" {
return fmt.Errorf("wrong type %s from %s %s", feature.Geometry.Type, pool.AddressStreet, pool.AddressPostalCode)
}
longitude := feature.Geometry.Coordinates[0]
latitude := feature.Geometry.Coordinates[1]
cell, err := h3utils.GetCell(longitude, latitude, 15)
if err != nil {
return fmt.Errorf("failed to convert lat %f lng %f to h3 cell", longitude, latitude)
}
geom_query := geom.PostgisPointQuery(longitude, latitude)
geom_query := geom.PostgisPointQuery(address.Longitude, address.Latitude)
_, err = psql.Update(
um.Table("fileupload.pool"),
um.SetCol("h3cell").ToArg(cell),
um.SetCol("h3cell").ToArg(address.Cell),
um.SetCol("geom").To(geom_query),
um.Where(psql.Quote("id").EQ(psql.Arg(pool.ID))),
).Exec(ctx, txn)
@ -318,31 +299,6 @@ func processCSVPoollist(ctx context.Context, txn bob.Tx, file *models.Fileupload
return nil
}
func maybeAddServiceArea(req *stadia.StructuredGeocodeRequest, org *models.Organization) {
/*
if org.ServiceAreaXmax.IsNull() ||
org.ServiceAreaYmax.IsNull() ||
org.ServiceAreaXmin.IsNull() ||
org.ServiceAreaYmin.IsNull() {
return
}
xmax := org.ServiceAreaXmax.MustGet()
ymax := org.ServiceAreaYmax.MustGet()
xmin := org.ServiceAreaXmin.MustGet()
ymin := org.ServiceAreaYmin.MustGet()
req.BoundaryRectMaxLon = &xmax
req.BoundaryRectMaxLat = &ymax
req.BoundaryRectMinLon = &xmin
req.BoundaryRectMinLat = &ymin
*/
if org.ServiceAreaCentroidX.IsNull() || org.ServiceAreaCentroidY.IsNull() {
return
}
centroid_x := org.ServiceAreaCentroidX.MustGet()
centroid_y := org.ServiceAreaCentroidY.MustGet()
req.FocusPointLat = &centroid_y
req.FocusPointLng = &centroid_x
}
func parseHeaders(row []string) ([]headerPoolEnum, []string) {
result_enums := make([]headerPoolEnum, 0)
result_names := make([]string, 0)
@ -414,7 +370,7 @@ func worker(ctx context.Context, txn bob.Tx, client *stadia.StadiaMaps, jobs <-c
defer wg.Done()
for job := range jobs {
err := geocode(ctx, txn, client, job)
err := geocodePool(ctx, txn, client, job)
if err != nil {
errors <- err

187
platform/geocode/geocode.go Normal file
View file

@ -0,0 +1,187 @@
package geocode
import (
"context"
"fmt"
"strings"
"time"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/im"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/h3utils"
"github.com/Gleipnir-Technology/nidus-sync/stadia"
"github.com/stephenafamo/scan"
//"github.com/rs/zerolog/log"
"github.com/uber/h3-go/v4"
)
type Address struct {
Country enums.Countrytype
Locality string
Number string
PostalCode string
Region string
Street string
Unit string
}
type GeocodeResult struct {
Address Address
Cell h3.Cell
Longitude float64
Latitude float64
}
func (a Address) String() string {
return fmt.Sprintf("%s %s, %s, %s, %s, %s", a.Number, a.Street, a.Locality, a.Region, a.PostalCode, a.Country)
}
var client *stadia.StadiaMaps
// Either get an address that matches, or create a new address. Either way, return an address
// This will make a call to a structured geocode service, so it's slow.
func EnsureAddress(ctx context.Context, txn bob.Tx, org *models.Organization, a Address) (*models.Address, error) {
address, err := models.Addresses.Query(
models.SelectWhere.Addresses.Country.EQ(a.Country),
models.SelectWhere.Addresses.Locality.EQ(a.Locality),
models.SelectWhere.Addresses.Number.EQ(a.Number),
models.SelectWhere.Addresses.PostalCode.EQ(a.PostalCode),
models.SelectWhere.Addresses.Region.EQ(a.Region),
models.SelectWhere.Addresses.Street.EQ(a.Street),
models.SelectWhere.Addresses.Unit.EQ(a.Unit),
).One(ctx, txn)
if err == nil {
return address, nil
}
// Geocode
geo, err := Geocode(ctx, org, a)
if err != nil {
return nil, fmt.Errorf("geocode: %w", err)
}
type _row struct {
ID int32 `db:"id"`
}
created := time.Now()
row, err := bob.One(ctx, txn, psql.Insert(
im.Into("address", "country", "created", "geom", "h3cell", "id", "locality", "number", "postal_code", "region", "street", "unit"),
im.Values(
psql.Arg(geo.Address.Country),
psql.Arg(created),
psql.F("ST_Point", geo.Longitude, geo.Latitude, 4326),
psql.Arg(geo.Cell),
psql.Raw("DEFAULT"),
psql.Arg(geo.Address.Locality),
psql.Arg(geo.Address.Number),
psql.Arg(geo.Address.PostalCode),
psql.Arg(geo.Address.Region),
psql.Arg(geo.Address.Street),
psql.Raw("''"),
),
im.Returning("id"),
), scan.StructMapper[_row]())
if err != nil {
return nil, fmt.Errorf("insert: %w", err)
}
return &models.Address{
Country: geo.Address.Country,
Created: created,
Geom: "",
H3cell: "",
ID: row.ID,
Locality: geo.Address.Locality,
PostalCode: geo.Address.PostalCode,
Street: geo.Address.Street,
Unit: geo.Address.Unit,
Region: geo.Address.Region,
Number: geo.Address.Number,
}, nil
}
func Geocode(ctx context.Context, org *models.Organization, a Address) (GeocodeResult, error) {
street := fmt.Sprintf("%s %s", a.Number, a.Street)
country_s := a.Country.String()
/*
sublog := log.With().
Str("street", street).
Str("country", country).
Str("locality", a.Locality).
Str("postal", a.PostalCode).
Str("region", a.Region).
Logger()
*/
req := stadia.StructuredGeocodeRequest{
Address: &street,
Country: &country_s,
Locality: &a.Locality,
PostalCode: &a.PostalCode,
Region: &a.Region,
}
maybeAddServiceArea(&req, org)
resp, err := client.StructuredGeocode(ctx, req)
if err != nil {
return GeocodeResult{}, fmt.Errorf("client structured geocode failure on %s: %w", a.String(), err)
}
if len(resp.Features) > 1 {
return GeocodeResult{}, fmt.Errorf("%s matched more than one location", a.String())
}
feature := resp.Features[0]
if feature.Geometry.Type != "Point" {
return GeocodeResult{}, fmt.Errorf("wrong type %s from %s", feature.Geometry.Type, a.String())
}
longitude := feature.Geometry.Coordinates[0]
latitude := feature.Geometry.Coordinates[1]
cell, err := h3utils.GetCell(longitude, latitude, 15)
if err != nil {
return GeocodeResult{}, fmt.Errorf("failed to convert lat %f lng %f to h3 cell", longitude, latitude)
}
var country enums.Countrytype
country_s = strings.ToLower(feature.Properties.CountryA)
err = country.Scan(country_s)
if err != nil {
return GeocodeResult{}, fmt.Errorf("failed to scan country '%s': %w", country_s, err)
}
return GeocodeResult{
Address: Address{
Country: country,
Locality: feature.Properties.Locality,
Number: feature.Properties.HouseNumber,
PostalCode: feature.Properties.PostalCode,
Region: feature.Properties.Region,
Street: feature.Properties.Street,
Unit: "",
},
Cell: cell,
Longitude: feature.Geometry.Coordinates[0],
Latitude: feature.Geometry.Coordinates[1],
}, nil
}
func maybeAddServiceArea(req *stadia.StructuredGeocodeRequest, org *models.Organization) {
if org.ServiceAreaXmax.IsNull() ||
org.ServiceAreaYmax.IsNull() ||
org.ServiceAreaXmin.IsNull() ||
org.ServiceAreaYmin.IsNull() {
return
}
xmax := org.ServiceAreaXmax.MustGet()
ymax := org.ServiceAreaYmax.MustGet()
xmin := org.ServiceAreaXmin.MustGet()
ymin := org.ServiceAreaYmin.MustGet()
req.BoundaryRectMaxLon = &xmax
req.BoundaryRectMaxLat = &ymax
req.BoundaryRectMinLon = &xmin
req.BoundaryRectMinLat = &ymin
if org.ServiceAreaCentroidX.IsNull() || org.ServiceAreaCentroidY.IsNull() {
return
}
centroid_x := org.ServiceAreaCentroidX.MustGet()
centroid_y := org.ServiceAreaCentroidY.MustGet()
req.FocusPointLat = &centroid_y
req.FocusPointLng = &centroid_x
}

6
platform/organization.go Normal file
View file

@ -0,0 +1,6 @@
package platform
type Organization struct {
ID int
Name string
}

View file

@ -9,14 +9,9 @@ import (
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/nidus-sync/background"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/rs/zerolog/log"
"github.com/stephenafamo/scan"
)
@ -52,43 +47,6 @@ type Upload struct {
Status string `db:"status"`
}
func NewUpload(ctx context.Context, u *models.User, upload userfile.FileUpload, t enums.FileuploadCsvtype) (Upload, error) {
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
if err != nil {
return Upload{}, fmt.Errorf("Failed to begin transaction: %w", err)
}
defer txn.Rollback(ctx)
file, err := models.FileuploadFiles.Insert(&models.FileuploadFileSetter{
ContentType: omit.From(upload.ContentType),
Created: omit.From(time.Now()),
CreatorID: omit.From(u.ID),
Deleted: omitnull.FromPtr[time.Time](nil),
Name: omit.From(upload.Name),
OrganizationID: omit.From(u.OrganizationID),
Status: omit.From(enums.FileuploadFilestatustypeUploaded),
SizeBytes: omit.From(int32(upload.SizeBytes)),
FileUUID: omit.From(upload.UUID),
}).One(ctx, txn)
if err != nil {
return Upload{}, fmt.Errorf("Failed to create file upload: %w", err)
}
_, err = models.FileuploadCSVS.Insert(&models.FileuploadCSVSetter{
Committed: omitnull.FromPtr[time.Time](nil),
FileID: omit.From(file.ID),
Rowcount: omit.From(int32(0)),
Type: omit.From(t),
}).One(ctx, txn)
if err != nil {
return Upload{}, fmt.Errorf("Failed to create csv: %w", err)
}
log.Info().Int32("id", file.ID).Msg("Created new pool CSV upload")
txn.Commit(ctx)
background.ProcessUpload(file.ID, t)
return Upload{
ID: file.ID,
}, nil
}
func GetUploadDetail(ctx context.Context, organization_id int32, file_id int32) (UploadPoolDetail, error) {
file, err := models.FindFileuploadFile(ctx, db.PGInstance.BobDB, file_id)
if err != nil {

View file

@ -9,8 +9,14 @@ import (
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
"github.com/Gleipnir-Technology/nidus-sync/background"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/rs/zerolog/log"
"github.com/stephenafamo/scan"
)
@ -35,8 +41,55 @@ type UploadSummary struct {
Type string `db:"type"`
}
func NewUpload(ctx context.Context, u *models.User, upload userfile.FileUpload, t enums.FileuploadCsvtype) (Upload, error) {
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
if err != nil {
return Upload{}, fmt.Errorf("Failed to begin transaction: %w", err)
}
defer txn.Rollback(ctx)
file, err := models.FileuploadFiles.Insert(&models.FileuploadFileSetter{
ContentType: omit.From(upload.ContentType),
Created: omit.From(time.Now()),
CreatorID: omit.From(u.ID),
Deleted: omitnull.FromPtr[time.Time](nil),
Name: omit.From(upload.Name),
OrganizationID: omit.From(u.OrganizationID),
Status: omit.From(enums.FileuploadFilestatustypeUploaded),
SizeBytes: omit.From(int32(upload.SizeBytes)),
FileUUID: omit.From(upload.UUID),
}).One(ctx, txn)
if err != nil {
return Upload{}, fmt.Errorf("Failed to create file upload: %w", err)
}
_, err = models.FileuploadCSVS.Insert(&models.FileuploadCSVSetter{
Committed: omitnull.FromPtr[time.Time](nil),
FileID: omit.From(file.ID),
Rowcount: omit.From(int32(0)),
Type: omit.From(t),
}).One(ctx, txn)
if err != nil {
return Upload{}, fmt.Errorf("Failed to create csv: %w", err)
}
log.Info().Int32("id", file.ID).Msg("Created new pool CSV upload")
txn.Commit(ctx)
background.ProcessUpload(file.ID, t)
return Upload{
ID: file.ID,
}, nil
}
func UploadCommit(ctx context.Context, org *models.Organization, file_id int32) error {
return nil
// Create addresses for each row
// Create sites for each row
// Create pools for each row
_, err := psql.Update(
um.Table(models.FileuploadFiles.Alias()),
um.SetCol("status").ToArg("committed"),
um.Where(psql.Quote("id").EQ(psql.Arg(file_id))),
um.Where(psql.Quote("organization_id").EQ(psql.Arg(org.ID))),
).Exec(ctx, db.PGInstance.BobDB)
background.CommitUpload(file_id)
return err
}
func UploadDiscard(ctx context.Context, org *models.Organization, file_id int32) error {
_, err := psql.Update(

14
platform/user.go Normal file
View file

@ -0,0 +1,14 @@
package platform
import (
"github.com/Gleipnir-Technology/nidus-sync/notification"
)
type User struct {
DisplayName string
Initials string
Notifications []notification.Notification
Organization Organization
Role string
Username string
}

View file

@ -33,19 +33,32 @@ type GeocodeGeometry struct {
// GeocodeProperties contains the properties of a geocoding result
type GeocodeProperties struct {
GID string `json:"gid"`
Layer string `json:"layer"`
Sources []GeocodeSource `json:"sources"`
Precision string `json:"precision"`
Name string `json:"name"`
FormattedAddressLines []string `json:"formatted_address_lines"`
FormattedAddressLine string `json:"formatted_address_line"`
CoarseLocation string `json:"coarse_location"`
AddressComponents AddressComponents `json:"address_components,omitempty"`
Context GeocodeContext `json:"context,omitempty"`
Confidence float64 `json:"confidence,omitempty"`
Distance float64 `json:"distance,omitempty"`
Addendum map[string]interface{} `json:"addendum,omitempty"`
Addendum map[string]interface{} `json:"addendum,omitempty"`
Accuracy string `json:"accuracy"` // 'point'
Confidence float64 `json:"confidence"` // 1
Country string `json:"country"` // 'United States'
CountryA string `json:"country_a"` // 'USA'
CountryCode string `json:"country_code"` // 'US'
CountryGID string `json:"country_gid"` // 'whosonfirst:country:85633793'
County string `json:"county"` // "Tulare County"
CountyA string `json:"county_a"` // 'TL'
CountyGID string `json:"county_gid"` // 'whosonfirst:county:102082895'
GID string `json:"gid"` // 'openaddresses:address:us/ca/tulare-addresses-county:fe9dfab3d45c4550'
HouseNumber string `json:"housenumber"` // '1234'
ID string `json:"id"` // us/ca/tulare-addresses-county:fe9dfab3d45c4550
Label string `json:"label"` // 1234 Main St, Dinuba, CA, USA
Layer string `json:"layer"` // 'address'
Locality string `json:"locality"` // 'Dinuba'
LocalityGID string `json:"locality_gid"` // 'whosonfirst:locality:85922491'
MatchType string `json:"match_type"` // 'exact'
Name string `json:"name"` // '1234 Main St'
PostalCode string `json:"postalcode"` // '93618'
Region string `json:"region"` // 'California'
RegionA string `json:"region_a"` // 'CA'
RegionGID string `json:"region_gid"` // 'whosonfirst:region:85688637'
Source string `json:"source"` // 'openaddresses'
SourceID string `json:"source"` // 'us/ca/tulare-addresses-county:fe9dfab3d45c4550'
Street string `json:"street"` // 'Main Street'
}
// GeocodeSource represents a source of geocoding data
@ -53,34 +66,3 @@ type GeocodeSource struct {
Source string `json:"source"`
SourceID string `json:"source_id"`
}
// AddressComponents represents the structured components of an address
type AddressComponents struct {
Number string `json:"number,omitempty"`
Street string `json:"street,omitempty"`
Unit string `json:"unit,omitempty"`
PostalCode string `json:"postal_code,omitempty"`
}
// GeocodeContext represents the geographic context of a result
type GeocodeContext struct {
WhosOnFirst WhosOnFirstContext `json:"whosonfirst,omitempty"`
ISO3166A2 string `json:"iso_3166_a2,omitempty"`
ISO3166A3 string `json:"iso_3166_a3,omitempty"`
}
// WhosOnFirstContext contains geographic hierarchy information
type WhosOnFirstContext struct {
Country *ContextPlace `json:"country,omitempty"`
Region *ContextPlace `json:"region,omitempty"`
County *ContextPlace `json:"county,omitempty"`
Locality *ContextPlace `json:"locality,omitempty"`
Neighbourhood *ContextPlace `json:"neighbourhood,omitempty"`
Borough *ContextPlace `json:"borough,omitempty"`
}
// ContextPlace represents a place in the geographic hierarchy
type ContextPlace struct {
GID string `json:"gid"`
Name string `json:"name"`
}

View file

@ -2,9 +2,9 @@ package stadia
import (
"crypto/tls"
"github.com/rs/zerolog/log"
"os"
"resty.dev/v3"
//"github.com/rs/zerolog/log"
)
type StadiaMaps struct {
@ -20,6 +20,7 @@ func NewStadiaMaps(api_key string) *StadiaMaps {
//r := resty.New().SetDebug(true)
r := resty.New()
if os.Getenv("STADIA_INSECURE_SKIP_VERIFY") != "" {
log.Warn().Msg("Using insecure TLS verification settings")
r.SetTLSClientConfig(&tls.Config{
InsecureSkipVerify: true,
})

View file

@ -15,6 +15,7 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/go-chi/chi/v5"
"github.com/google/uuid"
)
@ -30,12 +31,12 @@ type contentSource struct {
Treatments []Treatment
//TreatmentCadence TreatmentCadence
TreatmentModels []TreatmentModel
User User
User platform.User
}
type contentTrap struct {
MapData ComponentMap
Trap Trap
User User
User platform.User
}
type contentDashboard struct {
CountTraps int
@ -49,7 +50,7 @@ type contentDashboard struct {
}
type contentLayoutTest struct {
User User
User platform.User
}
type ContentDistrict struct {
MapboxToken string
@ -103,7 +104,7 @@ func getSource(ctx context.Context, r *http.Request, org *models.Organization, u
if err != nil {
return nil, nhttp.NewError("globalid is not a UUID: %w", nil)
}
userContent, err := contentForUser(r.Context(), user)
userContent, err := auth.ContentForUser(r.Context(), user)
if err != nil {
return nil, nhttp.NewError("Failed to get user content: %w", err)
}
@ -172,7 +173,7 @@ func getTrap(ctx context.Context, r *http.Request, org *models.Organization, use
if err != nil {
return nil, nhttp.NewError("globalid is not a UUID: %w", nil)
}
userContent, err := contentForUser(r.Context(), user)
userContent, err := auth.ContentForUser(r.Context(), user)
if err != nil {
return nil, nhttp.NewError("Failed to get user content: %w", err)
}
@ -254,7 +255,7 @@ func dashboard(ctx context.Context, w http.ResponseWriter, org *models.Organizat
},
RecentRequests: requests,
}
userContent, err := contentForUser(ctx, user)
userContent, err := auth.ContentForUser(ctx, user)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return

View file

@ -9,6 +9,7 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/rs/zerolog/log"
)
@ -19,14 +20,14 @@ type contentAuthenticated[T any] struct {
Config html.ContentConfig
Organization *models.Organization
URL html.ContentURL
User User
User platform.User
}
// w http.ResponseWriter, r *http.Request, u *models.User) {
func authenticatedHandler[T any](f handlerFunctionGet[T]) http.Handler {
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u *models.User) {
ctx := r.Context()
userContent, err := contentForUser(ctx, u)
userContent, err := auth.ContentForUser(ctx, u)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return

View file

@ -3,7 +3,6 @@ package sync
import (
"time"
"github.com/Gleipnir-Technology/nidus-sync/notification"
"github.com/google/uuid"
"github.com/uber/h3-go/v4"
)
@ -48,9 +47,6 @@ type ContentReportDetail struct {
}
type ContentReportDiagnostic struct {
}
type ContentDashboardLoading struct {
User User
}
type Inspection struct {
Action string
@ -63,20 +59,8 @@ type Link struct {
Href string
Title string
}
type Organization struct {
ID int
Name string
}
type ServiceRequestSummary struct {
Date time.Time
Location string
Status string
}
type User struct {
DisplayName string
Initials string
Notifications []notification.Notification
Organization Organization
Role string
Username string
}

View file

@ -85,7 +85,7 @@ func postUploadCommit(ctx context.Context, r *http.Request, org *models.Organiza
}
err = platform.UploadCommit(ctx, org, int32(file_id_))
if err != nil {
return "", nhttp.NewError("Failed to mark discarded: %w", err)
return "", nhttp.NewError("Failed to mark committed: %w", err)
}
return "/configuration/upload", nil
}

View file

@ -12,7 +12,6 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/db/sql"
"github.com/Gleipnir-Technology/nidus-sync/notification"
"github.com/google/uuid"
"github.com/uber/h3-go/v4"
)
@ -77,41 +76,6 @@ func sourceByGlobalId(ctx context.Context, org *models.Organization, id uuid.UUI
return toTemplateBreedingSource(row), nil
}
func extractInitials(name string) string {
parts := strings.Fields(name)
var initials strings.Builder
for _, part := range parts {
if len(part) > 0 {
initials.WriteString(strings.ToUpper(string(part[0])))
}
}
return initials.String()
}
func contentForUser(ctx context.Context, user *models.User) (User, error) {
notifications, err := notification.ForUser(ctx, user)
if err != nil {
return User{}, err
}
org := user.R.Organization
var organization Organization
if org != nil {
organization.ID = int(org.ID)
organization.Name = org.Name
}
return User{
DisplayName: user.DisplayName,
Initials: extractInitials(user.DisplayName),
Notifications: notifications,
Organization: organization,
Role: user.Role.String(),
Username: user.Username,
}, nil
}
func trapsBySource(ctx context.Context, org *models.Organization, sourceID uuid.UUID) ([]TrapNearby, error) {
locations, err := sql.TrapLocationBySourceID(org.ID, sourceID).All(ctx, db.PGInstance.BobDB)
if err != nil {