2026-02-24 20:08:57 +00:00
|
|
|
package platform
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"context"
|
2026-03-25 21:44:06 -07:00
|
|
|
"errors"
|
2026-02-24 20:08:57 +00:00
|
|
|
"fmt"
|
|
|
|
|
"time"
|
|
|
|
|
|
|
|
|
|
"github.com/Gleipnir-Technology/bob"
|
|
|
|
|
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
|
|
|
|
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
|
|
|
|
|
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
|
|
|
|
|
"github.com/Gleipnir-Technology/nidus-sync/db"
|
2026-03-04 18:29:52 +00:00
|
|
|
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
|
2026-02-24 20:08:57 +00:00
|
|
|
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
2026-03-12 23:49:16 +00:00
|
|
|
"github.com/Gleipnir-Technology/nidus-sync/platform/background"
|
|
|
|
|
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
|
2026-03-25 21:44:06 -07:00
|
|
|
"github.com/Gleipnir-Technology/nidus-sync/platform/types"
|
2026-03-04 18:29:52 +00:00
|
|
|
"github.com/aarondl/opt/omit"
|
|
|
|
|
"github.com/aarondl/opt/omitnull"
|
|
|
|
|
"github.com/rs/zerolog/log"
|
2026-02-24 20:08:57 +00:00
|
|
|
"github.com/stephenafamo/scan"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
type UploadType = int
|
|
|
|
|
|
|
|
|
|
const (
|
|
|
|
|
UploadTypePool UploadType = iota
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
type UploadStatus = int
|
|
|
|
|
|
|
|
|
|
const (
|
|
|
|
|
UploadStatusComplete UploadStatus = iota
|
|
|
|
|
)
|
|
|
|
|
|
2026-03-25 21:44:06 -07:00
|
|
|
type Upload struct {
|
2026-03-27 14:04:33 -07:00
|
|
|
Created time.Time `db:"created" json:"created"`
|
2026-04-15 19:02:25 +00:00
|
|
|
Error string `db:"error" json:"error"`
|
2026-03-27 14:04:33 -07:00
|
|
|
Filename string `db:"filename" json:"filename"`
|
|
|
|
|
ID int32 `db:"id" json:"id"`
|
|
|
|
|
RecordCount int `db:"recordcount" json:"recordcount"`
|
|
|
|
|
Status string `db:"status" json:"status"`
|
|
|
|
|
Type string `db:"type" json:"type"`
|
|
|
|
|
CSVPool *CSVPoolDetail `json:"csv_pool"`
|
2026-03-25 21:44:06 -07:00
|
|
|
}
|
|
|
|
|
|
2026-03-27 14:04:33 -07:00
|
|
|
type CSVPoolDetailCount struct {
|
2026-03-25 21:44:06 -07:00
|
|
|
Existing int `json:"existing"`
|
|
|
|
|
New int `json:"new"`
|
|
|
|
|
Outside int `json:"outside"`
|
|
|
|
|
}
|
2026-03-27 14:04:33 -07:00
|
|
|
type CSVPoolDetail struct {
|
|
|
|
|
Count CSVPoolDetailCount `json:"count"`
|
|
|
|
|
Errors []UploadPoolError `json:"errors"`
|
|
|
|
|
Pools []UploadPoolRow `json:"pools"`
|
|
|
|
|
}
|
|
|
|
|
type UploadPoolRow struct {
|
|
|
|
|
Address types.Address `json:"address"`
|
|
|
|
|
Condition string `json:"condition"`
|
|
|
|
|
Errors []UploadPoolError `json:"errors"`
|
|
|
|
|
Status string `json:"status"`
|
|
|
|
|
Tags map[string]string `json:"tags"`
|
2026-03-25 21:44:06 -07:00
|
|
|
}
|
|
|
|
|
|
2026-03-27 14:04:33 -07:00
|
|
|
func GetUploadDetail(ctx context.Context, organization_id int32, file_id int32) (*Upload, error) {
|
2026-03-25 21:44:06 -07:00
|
|
|
file, err := models.FindFileuploadFile(ctx, db.PGInstance.BobDB, file_id)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("Failed to lookup file %d: %w", file_id, err)
|
|
|
|
|
}
|
|
|
|
|
csv, err := models.FindFileuploadCSV(ctx, db.PGInstance.BobDB, file_id)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("Failed to lookup csv %d: %w", file_id, err)
|
|
|
|
|
}
|
|
|
|
|
switch csv.Type {
|
|
|
|
|
case enums.FileuploadCsvtypeFlyover:
|
2026-03-27 14:04:33 -07:00
|
|
|
return getUploadDetailPool(ctx, file)
|
2026-03-25 21:44:06 -07:00
|
|
|
case enums.FileuploadCsvtypePoollist:
|
2026-03-27 14:04:33 -07:00
|
|
|
return getUploadDetailPool(ctx, file)
|
2026-03-25 21:44:06 -07:00
|
|
|
}
|
|
|
|
|
return nil, errors.New("No idea what to do with upload type")
|
|
|
|
|
}
|
2026-02-24 20:08:57 +00:00
|
|
|
|
2026-03-27 14:04:33 -07:00
|
|
|
func NewUpload(ctx context.Context, u User, upload file.Upload, t enums.FileuploadCsvtype) (*int32, error) {
|
2026-03-04 18:29:52 +00:00
|
|
|
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
|
|
|
|
|
if err != nil {
|
2026-03-16 19:52:29 +00:00
|
|
|
return nil, fmt.Errorf("Failed to begin transaction: %w", err)
|
2026-03-04 18:29:52 +00:00
|
|
|
}
|
|
|
|
|
defer txn.Rollback(ctx)
|
|
|
|
|
|
|
|
|
|
file, err := models.FileuploadFiles.Insert(&models.FileuploadFileSetter{
|
|
|
|
|
ContentType: omit.From(upload.ContentType),
|
|
|
|
|
Created: omit.From(time.Now()),
|
2026-03-12 23:49:16 +00:00
|
|
|
CreatorID: omit.From(int32(u.ID)),
|
2026-03-04 18:29:52 +00:00
|
|
|
Deleted: omitnull.FromPtr[time.Time](nil),
|
2026-04-15 19:02:25 +00:00
|
|
|
Error: omit.From(""),
|
2026-03-04 18:29:52 +00:00
|
|
|
Name: omit.From(upload.Name),
|
2026-03-22 01:22:44 +00:00
|
|
|
OrganizationID: omit.From(u.Organization.ID),
|
2026-03-04 18:29:52 +00:00
|
|
|
Status: omit.From(enums.FileuploadFilestatustypeUploaded),
|
|
|
|
|
SizeBytes: omit.From(int32(upload.SizeBytes)),
|
|
|
|
|
FileUUID: omit.From(upload.UUID),
|
|
|
|
|
}).One(ctx, txn)
|
|
|
|
|
if err != nil {
|
2026-03-16 19:52:29 +00:00
|
|
|
return nil, fmt.Errorf("Failed to create file upload: %w", err)
|
2026-03-04 18:29:52 +00:00
|
|
|
}
|
|
|
|
|
_, err = models.FileuploadCSVS.Insert(&models.FileuploadCSVSetter{
|
|
|
|
|
Committed: omitnull.FromPtr[time.Time](nil),
|
|
|
|
|
FileID: omit.From(file.ID),
|
|
|
|
|
Rowcount: omit.From(int32(0)),
|
|
|
|
|
Type: omit.From(t),
|
|
|
|
|
}).One(ctx, txn)
|
|
|
|
|
if err != nil {
|
2026-03-16 19:52:29 +00:00
|
|
|
return nil, fmt.Errorf("Failed to create csv: %w", err)
|
2026-03-04 18:29:52 +00:00
|
|
|
}
|
|
|
|
|
log.Info().Int32("id", file.ID).Msg("Created new pool CSV upload")
|
2026-03-16 19:52:29 +00:00
|
|
|
err = background.NewCSVImport(ctx, txn, file.ID)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("background job create: %w", err)
|
|
|
|
|
}
|
2026-03-04 18:29:52 +00:00
|
|
|
txn.Commit(ctx)
|
2026-03-27 14:04:33 -07:00
|
|
|
return &file.ID, nil
|
2026-03-04 18:29:52 +00:00
|
|
|
}
|
2026-03-12 23:49:16 +00:00
|
|
|
func UploadCommit(ctx context.Context, org Organization, file_id int32, committer User) error {
|
2026-03-16 19:52:29 +00:00
|
|
|
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return fmt.Errorf("Failed to begin transaction: %w", err)
|
|
|
|
|
}
|
|
|
|
|
defer txn.Rollback(ctx)
|
|
|
|
|
|
|
|
|
|
_, err = psql.Update(
|
2026-03-04 18:29:52 +00:00
|
|
|
um.Table(models.FileuploadFiles.Alias()),
|
2026-03-05 02:24:51 +00:00
|
|
|
um.SetCol("status").ToArg("committing"),
|
2026-03-05 01:22:21 +00:00
|
|
|
um.SetCol("committer").ToArg(committer.ID),
|
2026-03-04 18:29:52 +00:00
|
|
|
um.Where(psql.Quote("id").EQ(psql.Arg(file_id))),
|
2026-03-22 01:22:44 +00:00
|
|
|
um.Where(psql.Quote("organization_id").EQ(psql.Arg(org.ID))),
|
2026-03-16 19:52:29 +00:00
|
|
|
).Exec(ctx, txn)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return fmt.Errorf("update upload: %w", err)
|
|
|
|
|
}
|
2026-03-19 03:52:56 +00:00
|
|
|
err = background.NewCSVCommit(ctx, txn, file_id)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return fmt.Errorf("background csv commit: %w", err)
|
|
|
|
|
}
|
2026-03-16 19:52:29 +00:00
|
|
|
err = txn.Commit(ctx)
|
|
|
|
|
|
2026-03-04 18:29:52 +00:00
|
|
|
return err
|
2026-03-02 18:49:02 +00:00
|
|
|
}
|
2026-03-12 23:49:16 +00:00
|
|
|
func UploadDiscard(ctx context.Context, org Organization, file_id int32) error {
|
2026-02-24 20:08:57 +00:00
|
|
|
_, err := psql.Update(
|
|
|
|
|
um.Table(models.FileuploadFiles.Alias()),
|
|
|
|
|
um.SetCol("status").ToArg("discarded"),
|
|
|
|
|
um.Where(psql.Quote("id").EQ(psql.Arg(file_id))),
|
2026-03-22 01:22:44 +00:00
|
|
|
um.Where(psql.Quote("organization_id").EQ(psql.Arg(org.ID))),
|
2026-02-24 20:08:57 +00:00
|
|
|
).Exec(ctx, db.PGInstance.BobDB)
|
|
|
|
|
return err
|
|
|
|
|
}
|
2026-03-27 14:04:33 -07:00
|
|
|
func UploadList(ctx context.Context, org Organization) ([]Upload, error) {
|
|
|
|
|
results := make([]Upload, 0)
|
2026-02-24 20:08:57 +00:00
|
|
|
rows, err := bob.All(ctx, db.PGInstance.BobDB, psql.Select(
|
|
|
|
|
sm.Columns(
|
|
|
|
|
// fileupload.csv columns
|
2026-03-27 14:04:33 -07:00
|
|
|
//"csv.file_id AS file_id",
|
2026-02-24 20:08:57 +00:00
|
|
|
//"csv.committed",
|
|
|
|
|
"csv.rowcount AS recordcount",
|
|
|
|
|
"csv.type_ AS type",
|
|
|
|
|
|
|
|
|
|
// fileupload.file columns
|
|
|
|
|
//"file.content_type",
|
|
|
|
|
"file.created AS created",
|
|
|
|
|
//"file.creator_id",
|
|
|
|
|
//"file.deleted",
|
2026-04-15 19:02:25 +00:00
|
|
|
"file.error AS error",
|
2026-02-24 20:08:57 +00:00
|
|
|
"file.id AS id",
|
|
|
|
|
"file.name AS filename",
|
|
|
|
|
//"file.organization_id",
|
|
|
|
|
"file.status AS status",
|
|
|
|
|
//"file.size_bytes",
|
|
|
|
|
//"file.file_uuid",
|
2026-03-27 14:04:33 -07:00
|
|
|
// Aggregate data
|
2026-02-24 20:08:57 +00:00
|
|
|
),
|
|
|
|
|
sm.From("fileupload.csv").As("csv"),
|
|
|
|
|
sm.InnerJoin("fileupload.file").As("file").OnEQ(psql.Raw("csv.file_id"), psql.Raw("file.id")),
|
2026-03-22 01:22:44 +00:00
|
|
|
sm.Where(psql.Quote("file", "organization_id").EQ(psql.Arg(org.ID))),
|
2026-02-24 20:08:57 +00:00
|
|
|
sm.OrderBy("created").Desc(),
|
2026-03-27 14:04:33 -07:00
|
|
|
), scan.StructMapper[Upload]())
|
2026-02-24 20:08:57 +00:00
|
|
|
if err != nil {
|
|
|
|
|
return results, fmt.Errorf("Failed to query pool upload rows: %w", err)
|
|
|
|
|
}
|
|
|
|
|
return rows, nil
|
|
|
|
|
}
|
2026-03-27 14:04:33 -07:00
|
|
|
func getUploadDetailPool(ctx context.Context, file *models.FileuploadFile) (*Upload, error) {
|
2026-03-25 21:44:06 -07:00
|
|
|
file_errors, errors_by_line, err := errorsByLine(ctx, file)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("get errors by line: %w", err)
|
|
|
|
|
}
|
|
|
|
|
pool_rows, err := models.FileuploadPools.Query(
|
|
|
|
|
models.SelectWhere.FileuploadPools.CSVFile.EQ(file.ID),
|
2026-04-16 04:01:44 +00:00
|
|
|
sm.OrderBy(models.FileuploadPools.Columns.LineNumber).Asc(),
|
2026-03-25 21:44:06 -07:00
|
|
|
).All(ctx, db.PGInstance.BobDB)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("Failed to query pools for %d: %w", file.ID, err)
|
|
|
|
|
}
|
2026-04-16 02:46:24 +00:00
|
|
|
address_ids := make([]int32, 0)
|
|
|
|
|
for _, r := range pool_rows {
|
|
|
|
|
if r.AddressID.IsValue() {
|
|
|
|
|
address_ids = append(address_ids, r.AddressID.MustGet())
|
|
|
|
|
}
|
|
|
|
|
}
|
2026-04-16 03:06:18 +00:00
|
|
|
addresses, err := types.AddressList(ctx, address_ids)
|
2026-04-16 02:46:24 +00:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("get address list: %w", err)
|
|
|
|
|
}
|
2026-03-25 21:44:06 -07:00
|
|
|
pools := make([]UploadPoolRow, 0)
|
|
|
|
|
count_existing := 0
|
|
|
|
|
count_new := 0
|
|
|
|
|
count_outside := 0
|
|
|
|
|
status := "unknown"
|
|
|
|
|
for _, r := range pool_rows {
|
|
|
|
|
if r.IsNew {
|
|
|
|
|
count_new = count_new + 1
|
|
|
|
|
status = "new"
|
|
|
|
|
} else {
|
|
|
|
|
count_existing = count_existing + 1
|
|
|
|
|
status = "existing"
|
|
|
|
|
}
|
|
|
|
|
if !r.IsInDistrict {
|
|
|
|
|
count_outside++
|
|
|
|
|
status = "outside"
|
|
|
|
|
}
|
|
|
|
|
tags := db.ConvertFromPGData(r.Tags)
|
|
|
|
|
// add 2 here because our file lines are 1-indexed and we skip the header line, but we are ranging 0-indexed
|
|
|
|
|
errors, ok := errors_by_line[r.LineNumber]
|
|
|
|
|
if !ok {
|
|
|
|
|
errors = []UploadPoolError{}
|
|
|
|
|
}
|
2026-04-16 02:46:24 +00:00
|
|
|
var address *types.Address
|
|
|
|
|
if r.AddressID.IsValue() {
|
2026-04-16 03:06:18 +00:00
|
|
|
var ok bool
|
|
|
|
|
address, ok = addresses[r.AddressID.MustGet()]
|
|
|
|
|
if !ok {
|
|
|
|
|
log.Error().Int32("id", r.AddressID.MustGet()).Msg("address missing")
|
|
|
|
|
continue
|
|
|
|
|
}
|
2026-04-16 02:46:24 +00:00
|
|
|
} else {
|
|
|
|
|
address = &types.Address{
|
2026-03-25 21:44:06 -07:00
|
|
|
Country: "usa",
|
|
|
|
|
Locality: r.AddressLocality,
|
|
|
|
|
Number: r.AddressNumber,
|
|
|
|
|
PostalCode: r.AddressPostalCode,
|
|
|
|
|
Region: r.AddressRegion,
|
|
|
|
|
Street: r.AddressStreet,
|
2026-04-16 02:46:24 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
pools = append(pools, UploadPoolRow{
|
|
|
|
|
Address: *address,
|
2026-03-25 21:44:06 -07:00
|
|
|
Condition: r.Condition.String(),
|
|
|
|
|
Errors: errors,
|
|
|
|
|
Status: status,
|
|
|
|
|
Tags: tags,
|
|
|
|
|
})
|
|
|
|
|
}
|
2026-03-27 14:04:33 -07:00
|
|
|
return &Upload{
|
|
|
|
|
Created: file.Created,
|
2026-04-15 19:02:25 +00:00
|
|
|
Error: file.Error,
|
2026-03-27 14:04:33 -07:00
|
|
|
Filename: file.Name,
|
|
|
|
|
ID: file.ID,
|
|
|
|
|
RecordCount: len(pool_rows),
|
|
|
|
|
CSVPool: &CSVPoolDetail{
|
|
|
|
|
Count: CSVPoolDetailCount{
|
|
|
|
|
Existing: count_existing,
|
|
|
|
|
Outside: count_outside,
|
|
|
|
|
New: count_new,
|
|
|
|
|
},
|
|
|
|
|
Errors: file_errors,
|
|
|
|
|
Pools: pools,
|
2026-03-25 21:44:06 -07:00
|
|
|
},
|
2026-03-27 14:04:33 -07:00
|
|
|
Status: file.Status.String(),
|
2026-03-25 21:44:06 -07:00
|
|
|
}, nil
|
|
|
|
|
}
|