Split out ability to upload flyover data from pool uploads

Tons of changes here, all in the name of quickly getting to where I can
create test compliance letters.
This commit is contained in:
Eli Ribble 2026-03-02 18:49:02 +00:00
parent 9939434cb3
commit ff2ec0ad14
No known key found for this signature in database
38 changed files with 4204 additions and 233 deletions

128
platform/csv/csv.go Normal file
View file

@ -0,0 +1,128 @@
package csv
import (
"context"
//"encoding/csv"
"fmt"
//"io"
"strconv"
"strings"
//"sync"
//"time"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
//"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
//"github.com/Gleipnir-Technology/nidus-sync/h3utils"
//"github.com/Gleipnir-Technology/nidus-sync/platform/geom"
//"github.com/Gleipnir-Technology/nidus-sync/platform/text"
//"github.com/Gleipnir-Technology/nidus-sync/stadia"
//"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/aarondl/opt/omit"
//"github.com/aarondl/opt/omitnull"
"github.com/rs/zerolog/log"
)
type csvParserFunc[T any] = func(context.Context, bob.Tx, *models.FileuploadFile, *models.FileuploadCSV) ([]T, error)
type csvProcessorFunc[T any] = func(context.Context, bob.Tx, *models.FileuploadFile, *models.FileuploadCSV, []T) error
func ProcessJob(ctx context.Context, file_id int32, type_ enums.FileuploadCsvtype) error {
var err error
switch type_ {
case enums.FileuploadCsvtypePoollist:
err = processCSV(ctx, file_id, parseCSVPoollist, processCSVPoollist)
case enums.FileuploadCsvtypeFlyover:
err = processCSV(ctx, file_id, parseCSVFlyover, processCSVFlyover)
}
return err
}
func processCSV[T any](ctx context.Context, file_id int32, parser csvParserFunc[T], processor csvProcessorFunc[T]) error {
file, c, err := loadFileAndCSV(ctx, file_id)
if err != nil {
return fmt.Errorf("load file and csv: %w", err)
}
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
if err != nil {
return fmt.Errorf("Failed to start transaction: %w", err)
}
defer txn.Rollback(ctx)
parsed, err := parser(ctx, txn, file, c)
if err != nil {
return fmt.Errorf("parse file: %w", err)
}
_, err = psql.Update(
um.Table("fileupload.csv"),
um.SetCol("rowcount").ToArg(len(parsed)),
um.Where(psql.Quote("file_id").EQ(psql.Arg(file_id))),
).Exec(ctx, txn)
if err != nil {
return fmt.Errorf("update csv row: %w", err)
}
err = processor(ctx, txn, file, c, parsed)
if err != nil {
return fmt.Errorf("process parsed file: %w", err)
}
file.Update(ctx, txn, &models.FileuploadFileSetter{
Status: omit.From(enums.FileuploadFilestatustypeParsed),
})
log.Info().Int32("file.ID", file.ID).Msg("Set file to parsed")
txn.Commit(ctx)
return nil
}
func loadFileAndCSV(ctx context.Context, file_id int32) (*models.FileuploadFile, *models.FileuploadCSV, error) {
file, err := models.FindFileuploadFile(ctx, db.PGInstance.BobDB, file_id)
if err != nil {
return nil, nil, fmt.Errorf("Failed to get file %d from DB: %w", file_id, err)
}
c, err := models.FindFileuploadCSV(ctx, db.PGInstance.BobDB, file.ID)
if err != nil {
return nil, nil, fmt.Errorf("Failed to get csv file %d from DB: %w", file.ID, err)
}
return file, c, nil
}
func addError(ctx context.Context, txn bob.Tx, c *models.FileuploadCSV, row_number int32, column_number int32, msg string) error {
r, err := models.FileuploadErrorCSVS.Insert(&models.FileuploadErrorCSVSetter{
Col: omit.From(column_number),
CSVFileID: omit.From(c.FileID),
// ID
Line: omit.From(row_number),
Message: omit.From(msg),
}).One(ctx, txn)
if err != nil {
return fmt.Errorf("Failed to add error: %w", err)
}
log.Info().Int32("id", r.ID).Int32("file_id", c.FileID).Str("msg", msg).Int32("row", row_number).Int32("col", column_number).Msg("Created CSV file error")
return nil
}
func addImportError(file *models.FileuploadFile, err error) {
log.Debug().Err(err).Int32("file_id", file.ID).Msg("Fake add import error")
}
func parseBool(s string) (bool, error) {
sl := strings.ToLower(s)
boolValue, err := strconv.ParseBool(sl)
if err != nil {
// Handle some of the stuff that strconv doesn't handle
switch sl {
case "yes":
return true, nil
case "no":
return false, nil
default:
return false, fmt.Errorf("unrecognized '%s'", sl)
}
}
return boolValue, err
}
func errorMissingHeader(ctx context.Context, txn bob.Tx, c *models.FileuploadCSV, h headerPoolEnum) error {
msg := fmt.Sprintf("The file is missing the '%s' header", h.String())
return addError(ctx, txn, c, 0, 0, msg)
}

307
platform/csv/flyover.go Normal file
View file

@ -0,0 +1,307 @@
package csv
import (
"context"
"encoding/csv"
"fmt"
"io"
"strconv"
"strings"
"time"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/h3utils"
"github.com/Gleipnir-Technology/nidus-sync/platform/geom"
"github.com/Gleipnir-Technology/nidus-sync/platform/text"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/rs/zerolog/log"
)
type Enum interface {
~int | ~int8 | ~int16 | ~int32 | ~int64 | ~string
}
type headerFlyoverEnum int
const (
headerFlyoverComment headerFlyoverEnum = iota
headerFlyoverLatitude
headerFlyoverLongitude
headerFlyoverNone
)
func (e headerFlyoverEnum) String() string {
switch e {
case headerFlyoverComment:
return "Comment"
case headerFlyoverLatitude:
return "TargetLat"
case headerFlyoverLongitude:
return "TargetLon"
default:
return "bad programmer"
}
}
var parseCSVFlyover = makeParseCSV(
makeParseHeaders(map[string]headerFlyoverEnum{
"comment": headerFlyoverComment,
"targetlat": headerFlyoverLatitude,
"targetlon": headerFlyoverLongitude,
"*": headerFlyoverNone,
}),
insertFlyover,
)
type insertModelFunc[ModelType any, HeaderType Enum] = func(context.Context, bob.Tx, *models.FileuploadFile, *models.FileuploadCSV, int32, []HeaderType, []string, []string) (ModelType, error)
type parseCSVFunc[ModelType any] = func(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV) ([]ModelType, error)
func makeParseCSV[ModelType any, HeaderType Enum](parseHeader parseHeaderFunc[HeaderType], insertModel insertModelFunc[ModelType, HeaderType]) parseCSVFunc[ModelType] {
return func(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV) ([]ModelType, error) {
rows := make([]ModelType, 0)
r, err := userfile.NewFileReader(userfile.CollectionCSV, file.FileUUID)
if err != nil {
return rows, fmt.Errorf("Failed to get filereader for %d: %w", file.ID, err)
}
reader := csv.NewReader(r)
h, err := reader.Read()
if err != nil {
return rows, fmt.Errorf("Failed to read header of CSV for file %d: %w", file.ID, err)
}
header_types, header_names := parseHeader(h)
/*
TODO: Add support for missing headersi
missing_headers := missingRequiredHeaders(header_types)
for _, mh := range missing_headers {
errorMissingHeader(ctx, txn, c, mh)
file.Update(ctx, txn, &models.FileuploadFileSetter{
Status: omit.From(enums.FileuploadFilestatustypeError),
})
return pools, nil
}
*/
// Start at 2 because the header is line 1, not line 0
line_number := int32(2)
for {
row, err := reader.Read()
if err != nil {
if err == io.EOF {
return rows, nil
}
return rows, fmt.Errorf("Failed to read all CSV records for file %d: %w", file.ID, err)
}
m, err := insertModel(ctx, txn, file, c, line_number, header_types, header_names, row)
rows = append(rows, m)
line_number = line_number + 1
}
}
}
func insertFlyover(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV, line_number int32, header_types []headerFlyoverEnum, header_names []string, row []string) (*models.FileuploadFlyoverAerialService, error) {
setter := models.FileuploadFlyoverAerialServiceSetter{
Committed: omit.From(false),
Condition: omit.From(enums.FileuploadPoolconditiontypeUnknown),
Created: omit.From(time.Now()),
CreatorID: omit.From(file.CreatorID),
CSVFile: omit.From(file.ID),
Deleted: omitnull.FromPtr[time.Time](nil),
Geom: omitnull.FromPtr[string](nil),
H3cell: omitnull.FromPtr[string](nil),
// ID - generated
OrganizationID: omit.From(file.OrganizationID),
}
var lat, lng float64
var err error
for i, value := range row {
if value == "" {
continue
}
header_type := header_types[i]
switch header_type {
case headerFlyoverComment:
condition, err := parsePoolCondition(value)
if err == nil {
setter.Condition = omit.From(condition)
} else {
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not a pool condition that we recognize. It should be one of %s", value, poolConditionValidValues()))
continue
}
case headerFlyoverLatitude:
lat, err = strconv.ParseFloat(value, 10)
if err != nil {
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not decimal value", value))
continue
}
case headerFlyoverLongitude:
lng, err = strconv.ParseFloat(value, 10)
if err != nil {
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not decimal value", value))
continue
}
}
}
flyover, err := models.FileuploadFlyoverAerialServices.Insert(&setter).One(ctx, txn)
if err != nil {
return nil, fmt.Errorf("Failed to create flyover: %w", err)
}
cell, err := h3utils.GetCell(lng, lat, 15)
if err != nil {
return nil, fmt.Errorf("failed to convert lat %f lng %f to h3 cell", lng, lat)
}
geom_query := geom.PostgisPointQuery(lng, lat)
_, err = psql.Update(
um.Table("fileupload.flyover_aerial_service"),
um.SetCol("h3cell").ToArg(cell),
um.SetCol("geom").To(geom_query),
um.Where(psql.Quote("id").EQ(psql.Arg(flyover.ID))),
).Exec(ctx, txn)
if err != nil {
return nil, fmt.Errorf("failed to update flyover geometry: %w", err)
}
return flyover, nil
}
func insertPoollistRow(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV, line_number int32, header_types []headerPoolEnum, header_names []string, row []string) (*models.FileuploadPool, error) {
tags := make(map[string]string, 0)
// Start with a setter with default values, comment out the required fields to ensure they're set
setter := models.FileuploadPoolSetter{
// AddressCity: omit.From(),
// AddressPostalCode: omit.From(),
// AddressStreet: omit.From(),
Committed: omit.From(false),
Condition: omit.From(enums.FileuploadPoolconditiontypeUnknown),
Created: omit.From(time.Now()),
CreatorID: omit.From(file.CreatorID),
CSVFile: omit.From(file.ID),
Deleted: omitnull.FromPtr[time.Time](nil),
Geom: omitnull.FromPtr[string](nil),
H3cell: omitnull.FromPtr[string](nil),
// ID - generated
IsInDistrict: omit.From(false),
IsNew: omit.From(true),
LineNumber: omit.From(line_number),
Notes: omit.From(""),
OrganizationID: omit.From(file.OrganizationID),
PropertyOwnerName: omit.From(""),
PropertyOwnerPhoneE164: omitnull.FromPtr[string](nil),
ResidentOwned: omitnull.FromPtr[bool](nil),
ResidentPhoneE164: omitnull.FromPtr[string](nil),
// Can't set this via a Setter
// Tags: convertToPGData(tags),
}
for i, value := range row {
if value == "" {
continue
}
header_type := header_types[i]
switch header_type {
case headerAddressCity:
setter.AddressCity = omit.From(value)
case headerAddressPostalCode:
setter.AddressPostalCode = omit.From(value)
case headerAddressStreet:
setter.AddressStreet = omit.From(value)
case headerCondition:
condition, err := parsePoolCondition(value)
if err == nil {
setter.Condition = omit.From(condition)
} else {
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not a pool condition that we recognize. It should be one of %s", value, poolConditionValidValues()))
continue
}
case headerNotes:
setter.Notes = omit.From(value)
case headerPropertyOwnerName:
setter.PropertyOwnerName = omit.From(value)
case headerPropertyOwnerPhone:
phone, err := text.ParsePhoneNumber(value)
if err != nil {
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not a phone number that we recognize. Ideally it should be of the form '+12223334444'", value))
continue
}
text.EnsureInDB(ctx, txn, *phone)
setter.PropertyOwnerPhoneE164 = omitnull.From(text.PhoneString(*phone))
case headerResidentOwned:
boolValue, err := parseBool(value)
if err != nil {
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not something that we recognize as a true/false value. Please use either 'true' or 'false'", value))
continue
}
setter.ResidentOwned = omitnull.From(boolValue)
case headerResidentPhone:
phone, err := text.ParsePhoneNumber(value)
if err != nil {
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not a phone number that we recognize. Ideally it should be of the form '+12223334444'", value))
continue
}
text.EnsureInDB(ctx, txn, *phone)
setter.ResidentPhoneE164 = omitnull.From(text.PhoneString(*phone))
case headerTag:
tags[header_names[i]] = value
}
}
setter.Tags = omit.From(db.ConvertToPGData(tags))
return models.FileuploadPools.Insert(&setter).One(ctx, txn)
}
type parseHeaderFunc[EnumType any] = func(row []string) ([]EnumType, []string)
func makeParseHeaders[EnumType any](headerToType map[string]EnumType) parseHeaderFunc[EnumType] {
return func(row []string) ([]EnumType, []string) {
result_enums := make([]EnumType, len(row))
result_names := make([]string, len(row))
for i, h := range row {
ht := strings.TrimSpace(h)
hl := strings.ToLower(ht)
log.Debug().Str("header", hl).Msg("Saw CSV header")
var type_ EnumType
type_, ok := headerToType[hl]
if !ok {
// See if there is a '*' entry which should match anything
all_type, ok2 := headerToType["*"]
if !ok2 {
log.Error().Str("name", hl).Msg("No header type matches column. You should add a '*' to the makeParseHeaders call")
continue
} else {
type_ = all_type
}
}
result_enums[i] = type_
result_names[i] = hl
}
return result_enums, result_names
}
}
func processCSVFlyover(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV, rows []*models.FileuploadFlyoverAerialService) error {
return nil
}
var poolConditionAliases = map[string]string{
"covered": "unknown",
"dark bottom": "unknown",
"no data": "unknown",
"empty": "dry",
"green": "green",
"murky pool": "murky",
"putting green": "false pool",
"questionable": "unknown",
}
func parsePoolCondition(c string) (enums.FileuploadPoolconditiontype, error) {
var condition enums.FileuploadPoolconditiontype
col_l := strings.ToLower(c)
col_translated, ok := poolConditionAliases[col_l]
if ok {
col_l = col_translated
}
err := condition.Scan(col_l)
return condition, err
}

View file

@ -5,7 +5,6 @@ import (
"encoding/csv"
"fmt"
"io"
"strconv"
"strings"
"sync"
"time"
@ -66,47 +65,6 @@ func (e headerPoolEnum) String() string {
return "bad programmer"
}
}
func ProcessJob(ctx context.Context, file_id int32) error {
file, err := models.FindFileuploadFile(ctx, db.PGInstance.BobDB, file_id)
if err != nil {
return fmt.Errorf("Failed to get file %d from DB: %w", file_id, err)
}
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
if err != nil {
return fmt.Errorf("Failed to start transaction: %w", err)
}
defer txn.Rollback(ctx)
c, err := models.FindFileuploadCSV(ctx, txn, file.ID)
if err != nil {
return fmt.Errorf("Failed to get csv file %d from DB: %w", file.ID, err)
}
pools, err := parseFile(ctx, txn, file, c)
if err != nil {
return fmt.Errorf("parse file: %w", err)
}
_, err = psql.Update(
um.Table("fileupload.csv"),
um.SetCol("rowcount").ToArg(len(pools)),
um.Where(psql.Quote("file_id").EQ(psql.Arg(file_id))),
).Exec(ctx, txn)
if err != nil {
return fmt.Errorf("update csv row: %w", err)
}
org, err := models.FindOrganization(ctx, db.PGInstance.BobDB, file.OrganizationID)
if err != nil {
return fmt.Errorf("get org: %w", err)
}
err = bulkGeocode(ctx, txn, file, c, pools, org)
if err != nil {
log.Error().Err(err).Msg("Failure during geocoding")
}
file.Update(ctx, txn, &models.FileuploadFileSetter{
Status: omit.From(enums.FileuploadFilestatustypeParsed),
})
log.Info().Int32("file.ID", file.ID).Msg("Set file to parsed")
txn.Commit(ctx)
return nil
}
func bulkGeocode(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV, pools []*models.FileuploadPool, org *models.Organization) error {
if len(pools) == 0 {
return nil
@ -215,7 +173,7 @@ func geocode(ctx context.Context, txn bob.Tx, client *stadia.StadiaMaps, job *jo
}
return nil
}
func parseFile(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV) ([]*models.FileuploadPool, error) {
func parseCSVPoollist(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV) ([]*models.FileuploadPool, error) {
pools := make([]*models.FileuploadPool, 0)
r, err := userfile.NewFileReader(userfile.CollectionCSV, file.FileUUID)
if err != nil {
@ -339,45 +297,18 @@ func parseFile(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *
line_number = line_number + 1
}
}
func addError(ctx context.Context, txn bob.Tx, c *models.FileuploadCSV, row_number int32, column_number int32, msg string) error {
r, err := models.FileuploadErrorCSVS.Insert(&models.FileuploadErrorCSVSetter{
Col: omit.From(column_number),
CSVFileID: omit.From(c.FileID),
// ID
Line: omit.From(row_number),
Message: omit.From(msg),
}).One(ctx, txn)
func processCSVPoollist(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV, parsed []*models.FileuploadPool) error {
org, err := models.FindOrganization(ctx, db.PGInstance.BobDB, file.OrganizationID)
if err != nil {
return fmt.Errorf("Failed to add error: %w", err)
return fmt.Errorf("get org: %w", err)
}
err = bulkGeocode(ctx, txn, file, c, parsed, org)
if err != nil {
log.Error().Err(err).Msg("Failure during geocoding")
}
log.Info().Int32("id", r.ID).Int32("file_id", c.FileID).Str("msg", msg).Int32("row", row_number).Int32("col", column_number).Msg("Created CSV file error")
return nil
}
func addImportError(file *models.FileuploadFile, err error) {
log.Debug().Err(err).Int32("file_id", file.ID).Msg("Fake add import error")
}
func parseBool(s string) (bool, error) {
sl := strings.ToLower(s)
boolValue, err := strconv.ParseBool(sl)
if err != nil {
// Handle some of the stuff that strconv doesn't handle
switch sl {
case "yes":
return true, nil
case "no":
return false, nil
default:
return false, fmt.Errorf("unrecognized '%s'", sl)
}
}
return boolValue, err
}
func errorMissingHeader(ctx context.Context, txn bob.Tx, c *models.FileuploadCSV, h headerPoolEnum) error {
msg := fmt.Sprintf("The file is missing the '%s' header", h.String())
return addError(ctx, txn, c, 0, 0, msg)
}
func maybeAddServiceArea(req *stadia.StructuredGeocodeRequest, org *models.Organization) {
/*
if org.ServiceAreaXmax.IsNull() ||

42
platform/pdf/pdf.go Normal file
View file

@ -0,0 +1,42 @@
package pdf
import (
"context"
"fmt"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/chromedp/cdproto/page"
"github.com/chromedp/chromedp"
"github.com/rs/zerolog/log"
)
func GeneratePDF(ctx context.Context, code string) ([]byte, error) {
// create context
chrome_ctx, cancel := chromedp.NewContext(context.Background())
defer cancel()
// capture pdf
var buf []byte
url := fmt.Sprintf("http://%s/mailer/%s/preview", config.Bind, code)
log.Info().Str("url", url).Msg("Getting with headless chrome")
if err := chromedp.Run(chrome_ctx, printToPDF(url, &buf)); err != nil {
return nil, fmt.Errorf("print to pdf: %w", err)
}
return buf, nil
}
// print a specific pdf page.
func printToPDF(urlstr string, res *[]byte) chromedp.Tasks {
return chromedp.Tasks{
chromedp.Navigate(urlstr),
chromedp.ActionFunc(func(ctx context.Context) error {
buf, _, err := page.PrintToPDF().WithPrintBackground(false).Do(ctx)
if err != nil {
return err
}
*res = buf
return nil
}),
}
}

View file

@ -44,16 +44,16 @@ type UploadPoolRow struct {
Street string
Tags map[string]string
}
type PoolUpload struct {
type Upload struct {
Created time.Time `db:"created"`
ID int32 `db:"id"`
Status string `db:"status"`
}
func NewPoolUpload(ctx context.Context, u *models.User, upload userfile.FileUpload) (PoolUpload, error) {
func NewUpload(ctx context.Context, u *models.User, upload userfile.FileUpload, t enums.FileuploadCsvtype) (Upload, error) {
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
if err != nil {
return PoolUpload{}, fmt.Errorf("Failed to begin transaction: %w", err)
return Upload{}, fmt.Errorf("Failed to begin transaction: %w", err)
}
defer txn.Rollback(ctx)
@ -69,21 +69,21 @@ func NewPoolUpload(ctx context.Context, u *models.User, upload userfile.FileUplo
FileUUID: omit.From(upload.UUID),
}).One(ctx, txn)
if err != nil {
return PoolUpload{}, fmt.Errorf("Failed to create file upload: %w", err)
return Upload{}, fmt.Errorf("Failed to create file upload: %w", err)
}
_, err = models.FileuploadCSVS.Insert(&models.FileuploadCSVSetter{
Committed: omitnull.FromPtr[time.Time](nil),
FileID: omit.From(file.ID),
Rowcount: omit.From(int32(0)),
Type: omit.From(enums.FileuploadCsvtypePoollist),
Type: omit.From(t),
}).One(ctx, txn)
if err != nil {
return PoolUpload{}, fmt.Errorf("Failed to create csv: %w", err)
return Upload{}, fmt.Errorf("Failed to create csv: %w", err)
}
log.Info().Int32("id", file.ID).Msg("Created new pool CSV upload")
txn.Commit(ctx)
background.ProcessUpload(file.ID)
return PoolUpload{
background.ProcessUpload(file.ID, t)
return Upload{
ID: file.ID,
}, nil
}
@ -115,7 +115,7 @@ func GetUploadPoolDetail(ctx context.Context, organization_id int32, file_id int
if row.Line == 0 {
file_errors = append(file_errors, e)
} else {
log.Info().Int32("line", row.Line).Msg("Found error")
//log.Info().Int32("line", row.Line).Msg("Found error")
by_line, ok := errors_by_line[row.Line]
if !ok {
errors_by_line[row.Line] = []UploadPoolError{e}
@ -175,8 +175,8 @@ func GetUploadPoolDetail(ctx context.Context, organization_id int32, file_id int
Status: file.Status.String(),
}, nil
}
func PoolUploadList(ctx context.Context, organization_id int32) ([]PoolUpload, error) {
results := make([]PoolUpload, 0)
func PoolUploadList(ctx context.Context, organization_id int32) ([]Upload, error) {
results := make([]Upload, 0)
rows, err := bob.All(ctx, db.PGInstance.BobDB, psql.Select(
sm.Columns(
// fileupload.csv columns
@ -200,7 +200,7 @@ func PoolUploadList(ctx context.Context, organization_id int32) ([]PoolUpload, e
sm.From("fileupload.csv").As("csv"),
sm.InnerJoin("fileupload.file").As("file").OnEQ(psql.Raw("csv.file_id"), psql.Raw("file.id")),
sm.Where(psql.Raw("file.organization_id").EQ(psql.Arg(organization_id))),
), scan.StructMapper[PoolUpload]())
), scan.StructMapper[Upload]())
if err != nil {
return results, fmt.Errorf("Failed to query pool upload rows: %w", err)
}

View file

@ -35,6 +35,9 @@ type UploadSummary struct {
Type string `db:"type"`
}
func UploadCommit(ctx context.Context, org *models.Organization, file_id int32) error {
return nil
}
func UploadDiscard(ctx context.Context, org *models.Organization, file_id int32) error {
_, err := psql.Update(
um.Table(models.FileuploadFiles.Alias()),