nidus-sync/platform/pool.go

185 lines
5.2 KiB
Go
Raw Normal View History

package platform
import (
"context"
"errors"
"fmt"
"time"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/stephenafamo/scan"
)
type UploadPoolDetail struct {
2026-02-09 19:17:33 +00:00
CountExisting int
CountNew int
CountOutside int
2026-02-09 19:17:33 +00:00
Created time.Time
2026-02-14 04:36:47 +00:00
Errors []UploadPoolError
2026-02-09 19:17:33 +00:00
ID int32
Name string
Pools []UploadPoolRow
Status string
}
2026-02-14 04:36:47 +00:00
type UploadPoolError struct {
Column uint
Line uint
Message string
}
type UploadPoolRow struct {
2026-02-14 04:36:47 +00:00
Condition string
Errors []UploadPoolError
Locality string
2026-02-14 04:36:47 +00:00
PostalCode string
Region string
2026-02-14 04:36:47 +00:00
Status string
Street string
Tags map[string]string
}
type Upload struct {
Created time.Time `db:"created"`
ID int32 `db:"id"`
Status string `db:"status"`
}
func GetUploadDetail(ctx context.Context, organization_id int32, file_id int32) (UploadPoolDetail, error) {
file, err := models.FindFileuploadFile(ctx, db.PGInstance.BobDB, file_id)
if err != nil {
return UploadPoolDetail{}, fmt.Errorf("Failed to lookup file %d: %w", file_id, err)
}
csv, err := models.FindFileuploadCSV(ctx, db.PGInstance.BobDB, file_id)
2026-02-14 04:36:47 +00:00
if err != nil {
return UploadPoolDetail{}, fmt.Errorf("Failed to lookup csv %d: %w", file_id, err)
2026-02-14 04:36:47 +00:00
}
switch csv.Type {
case enums.FileuploadCsvtypeFlyover:
return getUploadPoollistDetail(ctx, file)
case enums.FileuploadCsvtypePoollist:
return getUploadPoollistDetail(ctx, file)
2026-02-14 04:36:47 +00:00
}
return UploadPoolDetail{}, errors.New("No idea what to do with upload type")
}
2026-02-14 04:36:47 +00:00
func getUploadPoollistDetail(ctx context.Context, file *models.FileuploadFile) (UploadPoolDetail, error) {
file_errors, errors_by_line, err := errorsByLine(ctx, file)
if err != nil {
return UploadPoolDetail{}, fmt.Errorf("get errors by line: %w", err)
}
2026-02-14 04:36:47 +00:00
pool_rows, err := models.FileuploadPools.Query(
models.SelectWhere.FileuploadPools.CSVFile.EQ(file.ID),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
return UploadPoolDetail{}, fmt.Errorf("Failed to query pools for %d: %w", file.ID, err)
}
2026-02-14 04:36:47 +00:00
pools := make([]UploadPoolRow, 0)
2026-02-09 19:17:33 +00:00
count_existing := 0
count_new := 0
count_outside := 0
2026-02-14 04:36:47 +00:00
status := "unknown"
2026-02-16 17:59:18 +00:00
for _, r := range pool_rows {
2026-02-09 19:17:33 +00:00
if r.IsNew {
count_new = count_new + 1
2026-02-14 04:36:47 +00:00
status = "new"
2026-02-09 19:17:33 +00:00
} else {
count_existing = count_existing + 1
2026-02-14 04:36:47 +00:00
status = "existing"
2026-02-09 19:17:33 +00:00
}
2026-03-04 22:04:22 +00:00
if !r.IsInDistrict {
count_outside++
status = "outside"
}
tags := db.ConvertFromPGData(r.Tags)
2026-02-16 17:59:18 +00:00
// add 2 here because our file lines are 1-indexed and we skip the header line, but we are ranging 0-indexed
errors, ok := errors_by_line[r.LineNumber]
if !ok {
errors = []UploadPoolError{}
}
pools = append(pools, UploadPoolRow{
2026-02-14 04:36:47 +00:00
Condition: r.Condition.String(),
Errors: errors,
Locality: r.AddressLocality,
2026-02-14 04:36:47 +00:00
PostalCode: r.AddressPostalCode,
Region: r.AddressRegion,
2026-02-14 04:36:47 +00:00
Status: status,
Street: r.AddressStreet,
Tags: tags,
})
}
return UploadPoolDetail{
2026-02-09 19:17:33 +00:00
CountExisting: count_existing,
CountOutside: count_outside,
2026-02-09 19:17:33 +00:00
CountNew: count_new,
Errors: file_errors,
2026-02-09 19:17:33 +00:00
Name: file.Name,
Pools: pools,
Status: file.Status.String(),
}, nil
}
func PoolUploadList(ctx context.Context, organization_id int32) ([]Upload, error) {
results := make([]Upload, 0)
rows, err := bob.All(ctx, db.PGInstance.BobDB, psql.Select(
sm.Columns(
// fileupload.csv columns
//"csv.file_id",
//"csv.committed",
//"csv.rowcount",
//"csv.type_",
// fileupload.file columns
//"file.content_type",
"file.created AS created",
//"file.creator_id",
//"file.deleted",
"file.id AS id",
//"file.name",
//"file.organization_id",
"file.status AS status",
//"file.size_bytes",
//"file.file_uuid",
),
sm.From("fileupload.csv").As("csv"),
sm.InnerJoin("fileupload.file").As("file").OnEQ(psql.Raw("csv.file_id"), psql.Raw("file.id")),
sm.Where(psql.Raw("file.organization_id").EQ(psql.Arg(organization_id))),
), scan.StructMapper[Upload]())
if err != nil {
return results, fmt.Errorf("Failed to query pool upload rows: %w", err)
}
return rows, nil
}
func errorsByLine(ctx context.Context, file *models.FileuploadFile) ([]UploadPoolError, map[int32][]UploadPoolError, error) {
file_errors := make([]UploadPoolError, 0)
errors_by_line := make(map[int32][]UploadPoolError, 0)
error_rows, err := models.FileuploadErrorCSVS.Query(
models.SelectWhere.FileuploadErrorCSVS.CSVFileID.EQ(file.ID),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
return file_errors, errors_by_line, fmt.Errorf("Failed to lookup errors in csv %d: %w", file.ID, err)
}
for _, row := range error_rows {
e := UploadPoolError{
Column: uint(row.Col),
Line: uint(row.Line),
Message: row.Message,
}
if row.Line == 0 {
file_errors = append(file_errors, e)
} else {
//log.Info().Int32("line", row.Line).Msg("Found error")
by_line, ok := errors_by_line[row.Line]
if !ok {
errors_by_line[row.Line] = []UploadPoolError{e}
continue
}
by_line = append(by_line, e)
errors_by_line[row.Line] = by_line
}
}
return file_errors, errors_by_line, nil
}