Split out ability to upload flyover data from pool uploads
Tons of changes here, all in the name of quickly getting to where I can create test compliance letters.
This commit is contained in:
parent
9939434cb3
commit
ff2ec0ad14
38 changed files with 4204 additions and 233 deletions
128
platform/csv/csv.go
Normal file
128
platform/csv/csv.go
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
package csv
|
||||
|
||||
import (
|
||||
"context"
|
||||
//"encoding/csv"
|
||||
"fmt"
|
||||
//"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
//"sync"
|
||||
//"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
|
||||
//"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
//"github.com/Gleipnir-Technology/nidus-sync/h3utils"
|
||||
//"github.com/Gleipnir-Technology/nidus-sync/platform/geom"
|
||||
//"github.com/Gleipnir-Technology/nidus-sync/platform/text"
|
||||
//"github.com/Gleipnir-Technology/nidus-sync/stadia"
|
||||
//"github.com/Gleipnir-Technology/nidus-sync/userfile"
|
||||
"github.com/aarondl/opt/omit"
|
||||
//"github.com/aarondl/opt/omitnull"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type csvParserFunc[T any] = func(context.Context, bob.Tx, *models.FileuploadFile, *models.FileuploadCSV) ([]T, error)
|
||||
type csvProcessorFunc[T any] = func(context.Context, bob.Tx, *models.FileuploadFile, *models.FileuploadCSV, []T) error
|
||||
|
||||
func ProcessJob(ctx context.Context, file_id int32, type_ enums.FileuploadCsvtype) error {
|
||||
var err error
|
||||
switch type_ {
|
||||
case enums.FileuploadCsvtypePoollist:
|
||||
err = processCSV(ctx, file_id, parseCSVPoollist, processCSVPoollist)
|
||||
case enums.FileuploadCsvtypeFlyover:
|
||||
err = processCSV(ctx, file_id, parseCSVFlyover, processCSVFlyover)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func processCSV[T any](ctx context.Context, file_id int32, parser csvParserFunc[T], processor csvProcessorFunc[T]) error {
|
||||
file, c, err := loadFileAndCSV(ctx, file_id)
|
||||
if err != nil {
|
||||
return fmt.Errorf("load file and csv: %w", err)
|
||||
}
|
||||
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to start transaction: %w", err)
|
||||
}
|
||||
defer txn.Rollback(ctx)
|
||||
parsed, err := parser(ctx, txn, file, c)
|
||||
if err != nil {
|
||||
return fmt.Errorf("parse file: %w", err)
|
||||
}
|
||||
_, err = psql.Update(
|
||||
um.Table("fileupload.csv"),
|
||||
um.SetCol("rowcount").ToArg(len(parsed)),
|
||||
um.Where(psql.Quote("file_id").EQ(psql.Arg(file_id))),
|
||||
).Exec(ctx, txn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("update csv row: %w", err)
|
||||
}
|
||||
err = processor(ctx, txn, file, c, parsed)
|
||||
if err != nil {
|
||||
return fmt.Errorf("process parsed file: %w", err)
|
||||
}
|
||||
|
||||
file.Update(ctx, txn, &models.FileuploadFileSetter{
|
||||
Status: omit.From(enums.FileuploadFilestatustypeParsed),
|
||||
})
|
||||
log.Info().Int32("file.ID", file.ID).Msg("Set file to parsed")
|
||||
txn.Commit(ctx)
|
||||
return nil
|
||||
}
|
||||
func loadFileAndCSV(ctx context.Context, file_id int32) (*models.FileuploadFile, *models.FileuploadCSV, error) {
|
||||
file, err := models.FindFileuploadFile(ctx, db.PGInstance.BobDB, file_id)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("Failed to get file %d from DB: %w", file_id, err)
|
||||
}
|
||||
c, err := models.FindFileuploadCSV(ctx, db.PGInstance.BobDB, file.ID)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("Failed to get csv file %d from DB: %w", file.ID, err)
|
||||
}
|
||||
return file, c, nil
|
||||
}
|
||||
|
||||
func addError(ctx context.Context, txn bob.Tx, c *models.FileuploadCSV, row_number int32, column_number int32, msg string) error {
|
||||
r, err := models.FileuploadErrorCSVS.Insert(&models.FileuploadErrorCSVSetter{
|
||||
Col: omit.From(column_number),
|
||||
CSVFileID: omit.From(c.FileID),
|
||||
// ID
|
||||
Line: omit.From(row_number),
|
||||
Message: omit.From(msg),
|
||||
}).One(ctx, txn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to add error: %w", err)
|
||||
}
|
||||
log.Info().Int32("id", r.ID).Int32("file_id", c.FileID).Str("msg", msg).Int32("row", row_number).Int32("col", column_number).Msg("Created CSV file error")
|
||||
return nil
|
||||
}
|
||||
func addImportError(file *models.FileuploadFile, err error) {
|
||||
log.Debug().Err(err).Int32("file_id", file.ID).Msg("Fake add import error")
|
||||
}
|
||||
func parseBool(s string) (bool, error) {
|
||||
sl := strings.ToLower(s)
|
||||
boolValue, err := strconv.ParseBool(sl)
|
||||
if err != nil {
|
||||
// Handle some of the stuff that strconv doesn't handle
|
||||
switch sl {
|
||||
case "yes":
|
||||
return true, nil
|
||||
case "no":
|
||||
return false, nil
|
||||
default:
|
||||
return false, fmt.Errorf("unrecognized '%s'", sl)
|
||||
}
|
||||
|
||||
}
|
||||
return boolValue, err
|
||||
}
|
||||
|
||||
func errorMissingHeader(ctx context.Context, txn bob.Tx, c *models.FileuploadCSV, h headerPoolEnum) error {
|
||||
msg := fmt.Sprintf("The file is missing the '%s' header", h.String())
|
||||
return addError(ctx, txn, c, 0, 0, msg)
|
||||
}
|
||||
307
platform/csv/flyover.go
Normal file
307
platform/csv/flyover.go
Normal file
|
|
@ -0,0 +1,307 @@
|
|||
package csv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/h3utils"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/geom"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/text"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/userfile"
|
||||
"github.com/aarondl/opt/omit"
|
||||
"github.com/aarondl/opt/omitnull"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type Enum interface {
|
||||
~int | ~int8 | ~int16 | ~int32 | ~int64 | ~string
|
||||
}
|
||||
|
||||
type headerFlyoverEnum int
|
||||
|
||||
const (
|
||||
headerFlyoverComment headerFlyoverEnum = iota
|
||||
headerFlyoverLatitude
|
||||
headerFlyoverLongitude
|
||||
headerFlyoverNone
|
||||
)
|
||||
|
||||
func (e headerFlyoverEnum) String() string {
|
||||
switch e {
|
||||
case headerFlyoverComment:
|
||||
return "Comment"
|
||||
case headerFlyoverLatitude:
|
||||
return "TargetLat"
|
||||
case headerFlyoverLongitude:
|
||||
return "TargetLon"
|
||||
default:
|
||||
return "bad programmer"
|
||||
}
|
||||
}
|
||||
|
||||
var parseCSVFlyover = makeParseCSV(
|
||||
makeParseHeaders(map[string]headerFlyoverEnum{
|
||||
"comment": headerFlyoverComment,
|
||||
"targetlat": headerFlyoverLatitude,
|
||||
"targetlon": headerFlyoverLongitude,
|
||||
"*": headerFlyoverNone,
|
||||
}),
|
||||
insertFlyover,
|
||||
)
|
||||
|
||||
type insertModelFunc[ModelType any, HeaderType Enum] = func(context.Context, bob.Tx, *models.FileuploadFile, *models.FileuploadCSV, int32, []HeaderType, []string, []string) (ModelType, error)
|
||||
type parseCSVFunc[ModelType any] = func(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV) ([]ModelType, error)
|
||||
|
||||
func makeParseCSV[ModelType any, HeaderType Enum](parseHeader parseHeaderFunc[HeaderType], insertModel insertModelFunc[ModelType, HeaderType]) parseCSVFunc[ModelType] {
|
||||
return func(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV) ([]ModelType, error) {
|
||||
rows := make([]ModelType, 0)
|
||||
r, err := userfile.NewFileReader(userfile.CollectionCSV, file.FileUUID)
|
||||
if err != nil {
|
||||
return rows, fmt.Errorf("Failed to get filereader for %d: %w", file.ID, err)
|
||||
}
|
||||
reader := csv.NewReader(r)
|
||||
h, err := reader.Read()
|
||||
if err != nil {
|
||||
return rows, fmt.Errorf("Failed to read header of CSV for file %d: %w", file.ID, err)
|
||||
}
|
||||
header_types, header_names := parseHeader(h)
|
||||
/*
|
||||
TODO: Add support for missing headersi
|
||||
missing_headers := missingRequiredHeaders(header_types)
|
||||
for _, mh := range missing_headers {
|
||||
errorMissingHeader(ctx, txn, c, mh)
|
||||
file.Update(ctx, txn, &models.FileuploadFileSetter{
|
||||
Status: omit.From(enums.FileuploadFilestatustypeError),
|
||||
})
|
||||
return pools, nil
|
||||
}
|
||||
*/
|
||||
// Start at 2 because the header is line 1, not line 0
|
||||
line_number := int32(2)
|
||||
for {
|
||||
row, err := reader.Read()
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
return rows, nil
|
||||
}
|
||||
return rows, fmt.Errorf("Failed to read all CSV records for file %d: %w", file.ID, err)
|
||||
}
|
||||
m, err := insertModel(ctx, txn, file, c, line_number, header_types, header_names, row)
|
||||
rows = append(rows, m)
|
||||
line_number = line_number + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
func insertFlyover(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV, line_number int32, header_types []headerFlyoverEnum, header_names []string, row []string) (*models.FileuploadFlyoverAerialService, error) {
|
||||
setter := models.FileuploadFlyoverAerialServiceSetter{
|
||||
Committed: omit.From(false),
|
||||
Condition: omit.From(enums.FileuploadPoolconditiontypeUnknown),
|
||||
Created: omit.From(time.Now()),
|
||||
CreatorID: omit.From(file.CreatorID),
|
||||
CSVFile: omit.From(file.ID),
|
||||
Deleted: omitnull.FromPtr[time.Time](nil),
|
||||
Geom: omitnull.FromPtr[string](nil),
|
||||
H3cell: omitnull.FromPtr[string](nil),
|
||||
// ID - generated
|
||||
OrganizationID: omit.From(file.OrganizationID),
|
||||
}
|
||||
var lat, lng float64
|
||||
var err error
|
||||
for i, value := range row {
|
||||
if value == "" {
|
||||
continue
|
||||
}
|
||||
header_type := header_types[i]
|
||||
switch header_type {
|
||||
case headerFlyoverComment:
|
||||
condition, err := parsePoolCondition(value)
|
||||
if err == nil {
|
||||
setter.Condition = omit.From(condition)
|
||||
} else {
|
||||
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not a pool condition that we recognize. It should be one of %s", value, poolConditionValidValues()))
|
||||
continue
|
||||
}
|
||||
case headerFlyoverLatitude:
|
||||
lat, err = strconv.ParseFloat(value, 10)
|
||||
if err != nil {
|
||||
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not decimal value", value))
|
||||
continue
|
||||
}
|
||||
case headerFlyoverLongitude:
|
||||
lng, err = strconv.ParseFloat(value, 10)
|
||||
if err != nil {
|
||||
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not decimal value", value))
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
flyover, err := models.FileuploadFlyoverAerialServices.Insert(&setter).One(ctx, txn)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to create flyover: %w", err)
|
||||
}
|
||||
cell, err := h3utils.GetCell(lng, lat, 15)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to convert lat %f lng %f to h3 cell", lng, lat)
|
||||
}
|
||||
geom_query := geom.PostgisPointQuery(lng, lat)
|
||||
_, err = psql.Update(
|
||||
um.Table("fileupload.flyover_aerial_service"),
|
||||
um.SetCol("h3cell").ToArg(cell),
|
||||
um.SetCol("geom").To(geom_query),
|
||||
um.Where(psql.Quote("id").EQ(psql.Arg(flyover.ID))),
|
||||
).Exec(ctx, txn)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to update flyover geometry: %w", err)
|
||||
}
|
||||
return flyover, nil
|
||||
}
|
||||
func insertPoollistRow(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV, line_number int32, header_types []headerPoolEnum, header_names []string, row []string) (*models.FileuploadPool, error) {
|
||||
tags := make(map[string]string, 0)
|
||||
// Start with a setter with default values, comment out the required fields to ensure they're set
|
||||
setter := models.FileuploadPoolSetter{
|
||||
// AddressCity: omit.From(),
|
||||
// AddressPostalCode: omit.From(),
|
||||
// AddressStreet: omit.From(),
|
||||
Committed: omit.From(false),
|
||||
Condition: omit.From(enums.FileuploadPoolconditiontypeUnknown),
|
||||
Created: omit.From(time.Now()),
|
||||
CreatorID: omit.From(file.CreatorID),
|
||||
CSVFile: omit.From(file.ID),
|
||||
Deleted: omitnull.FromPtr[time.Time](nil),
|
||||
Geom: omitnull.FromPtr[string](nil),
|
||||
H3cell: omitnull.FromPtr[string](nil),
|
||||
// ID - generated
|
||||
IsInDistrict: omit.From(false),
|
||||
IsNew: omit.From(true),
|
||||
LineNumber: omit.From(line_number),
|
||||
Notes: omit.From(""),
|
||||
OrganizationID: omit.From(file.OrganizationID),
|
||||
PropertyOwnerName: omit.From(""),
|
||||
PropertyOwnerPhoneE164: omitnull.FromPtr[string](nil),
|
||||
ResidentOwned: omitnull.FromPtr[bool](nil),
|
||||
ResidentPhoneE164: omitnull.FromPtr[string](nil),
|
||||
// Can't set this via a Setter
|
||||
// Tags: convertToPGData(tags),
|
||||
}
|
||||
for i, value := range row {
|
||||
if value == "" {
|
||||
continue
|
||||
}
|
||||
header_type := header_types[i]
|
||||
switch header_type {
|
||||
case headerAddressCity:
|
||||
setter.AddressCity = omit.From(value)
|
||||
case headerAddressPostalCode:
|
||||
setter.AddressPostalCode = omit.From(value)
|
||||
case headerAddressStreet:
|
||||
setter.AddressStreet = omit.From(value)
|
||||
case headerCondition:
|
||||
condition, err := parsePoolCondition(value)
|
||||
if err == nil {
|
||||
setter.Condition = omit.From(condition)
|
||||
} else {
|
||||
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not a pool condition that we recognize. It should be one of %s", value, poolConditionValidValues()))
|
||||
continue
|
||||
}
|
||||
case headerNotes:
|
||||
setter.Notes = omit.From(value)
|
||||
case headerPropertyOwnerName:
|
||||
setter.PropertyOwnerName = omit.From(value)
|
||||
case headerPropertyOwnerPhone:
|
||||
phone, err := text.ParsePhoneNumber(value)
|
||||
if err != nil {
|
||||
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not a phone number that we recognize. Ideally it should be of the form '+12223334444'", value))
|
||||
continue
|
||||
}
|
||||
text.EnsureInDB(ctx, txn, *phone)
|
||||
setter.PropertyOwnerPhoneE164 = omitnull.From(text.PhoneString(*phone))
|
||||
case headerResidentOwned:
|
||||
boolValue, err := parseBool(value)
|
||||
if err != nil {
|
||||
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not something that we recognize as a true/false value. Please use either 'true' or 'false'", value))
|
||||
continue
|
||||
}
|
||||
setter.ResidentOwned = omitnull.From(boolValue)
|
||||
case headerResidentPhone:
|
||||
phone, err := text.ParsePhoneNumber(value)
|
||||
if err != nil {
|
||||
addError(ctx, txn, c, int32(line_number), int32(i), fmt.Sprintf("'%s' is not a phone number that we recognize. Ideally it should be of the form '+12223334444'", value))
|
||||
continue
|
||||
}
|
||||
text.EnsureInDB(ctx, txn, *phone)
|
||||
setter.ResidentPhoneE164 = omitnull.From(text.PhoneString(*phone))
|
||||
case headerTag:
|
||||
tags[header_names[i]] = value
|
||||
}
|
||||
|
||||
}
|
||||
setter.Tags = omit.From(db.ConvertToPGData(tags))
|
||||
return models.FileuploadPools.Insert(&setter).One(ctx, txn)
|
||||
}
|
||||
|
||||
type parseHeaderFunc[EnumType any] = func(row []string) ([]EnumType, []string)
|
||||
|
||||
func makeParseHeaders[EnumType any](headerToType map[string]EnumType) parseHeaderFunc[EnumType] {
|
||||
return func(row []string) ([]EnumType, []string) {
|
||||
result_enums := make([]EnumType, len(row))
|
||||
result_names := make([]string, len(row))
|
||||
for i, h := range row {
|
||||
ht := strings.TrimSpace(h)
|
||||
hl := strings.ToLower(ht)
|
||||
log.Debug().Str("header", hl).Msg("Saw CSV header")
|
||||
var type_ EnumType
|
||||
type_, ok := headerToType[hl]
|
||||
if !ok {
|
||||
// See if there is a '*' entry which should match anything
|
||||
all_type, ok2 := headerToType["*"]
|
||||
if !ok2 {
|
||||
log.Error().Str("name", hl).Msg("No header type matches column. You should add a '*' to the makeParseHeaders call")
|
||||
continue
|
||||
} else {
|
||||
type_ = all_type
|
||||
}
|
||||
}
|
||||
result_enums[i] = type_
|
||||
result_names[i] = hl
|
||||
}
|
||||
|
||||
return result_enums, result_names
|
||||
}
|
||||
}
|
||||
|
||||
func processCSVFlyover(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV, rows []*models.FileuploadFlyoverAerialService) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
var poolConditionAliases = map[string]string{
|
||||
"covered": "unknown",
|
||||
"dark bottom": "unknown",
|
||||
"no data": "unknown",
|
||||
"empty": "dry",
|
||||
"green": "green",
|
||||
"murky pool": "murky",
|
||||
"putting green": "false pool",
|
||||
"questionable": "unknown",
|
||||
}
|
||||
|
||||
func parsePoolCondition(c string) (enums.FileuploadPoolconditiontype, error) {
|
||||
var condition enums.FileuploadPoolconditiontype
|
||||
col_l := strings.ToLower(c)
|
||||
col_translated, ok := poolConditionAliases[col_l]
|
||||
if ok {
|
||||
col_l = col_translated
|
||||
}
|
||||
err := condition.Scan(col_l)
|
||||
return condition, err
|
||||
}
|
||||
|
|
@ -5,7 +5,6 @@ import (
|
|||
"encoding/csv"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
|
@ -66,47 +65,6 @@ func (e headerPoolEnum) String() string {
|
|||
return "bad programmer"
|
||||
}
|
||||
}
|
||||
func ProcessJob(ctx context.Context, file_id int32) error {
|
||||
file, err := models.FindFileuploadFile(ctx, db.PGInstance.BobDB, file_id)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to get file %d from DB: %w", file_id, err)
|
||||
}
|
||||
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to start transaction: %w", err)
|
||||
}
|
||||
defer txn.Rollback(ctx)
|
||||
c, err := models.FindFileuploadCSV(ctx, txn, file.ID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to get csv file %d from DB: %w", file.ID, err)
|
||||
}
|
||||
pools, err := parseFile(ctx, txn, file, c)
|
||||
if err != nil {
|
||||
return fmt.Errorf("parse file: %w", err)
|
||||
}
|
||||
_, err = psql.Update(
|
||||
um.Table("fileupload.csv"),
|
||||
um.SetCol("rowcount").ToArg(len(pools)),
|
||||
um.Where(psql.Quote("file_id").EQ(psql.Arg(file_id))),
|
||||
).Exec(ctx, txn)
|
||||
if err != nil {
|
||||
return fmt.Errorf("update csv row: %w", err)
|
||||
}
|
||||
org, err := models.FindOrganization(ctx, db.PGInstance.BobDB, file.OrganizationID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("get org: %w", err)
|
||||
}
|
||||
err = bulkGeocode(ctx, txn, file, c, pools, org)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("Failure during geocoding")
|
||||
}
|
||||
file.Update(ctx, txn, &models.FileuploadFileSetter{
|
||||
Status: omit.From(enums.FileuploadFilestatustypeParsed),
|
||||
})
|
||||
log.Info().Int32("file.ID", file.ID).Msg("Set file to parsed")
|
||||
txn.Commit(ctx)
|
||||
return nil
|
||||
}
|
||||
func bulkGeocode(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV, pools []*models.FileuploadPool, org *models.Organization) error {
|
||||
if len(pools) == 0 {
|
||||
return nil
|
||||
|
|
@ -215,7 +173,7 @@ func geocode(ctx context.Context, txn bob.Tx, client *stadia.StadiaMaps, job *jo
|
|||
}
|
||||
return nil
|
||||
}
|
||||
func parseFile(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV) ([]*models.FileuploadPool, error) {
|
||||
func parseCSVPoollist(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV) ([]*models.FileuploadPool, error) {
|
||||
pools := make([]*models.FileuploadPool, 0)
|
||||
r, err := userfile.NewFileReader(userfile.CollectionCSV, file.FileUUID)
|
||||
if err != nil {
|
||||
|
|
@ -339,45 +297,18 @@ func parseFile(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *
|
|||
line_number = line_number + 1
|
||||
}
|
||||
}
|
||||
func addError(ctx context.Context, txn bob.Tx, c *models.FileuploadCSV, row_number int32, column_number int32, msg string) error {
|
||||
r, err := models.FileuploadErrorCSVS.Insert(&models.FileuploadErrorCSVSetter{
|
||||
Col: omit.From(column_number),
|
||||
CSVFileID: omit.From(c.FileID),
|
||||
// ID
|
||||
Line: omit.From(row_number),
|
||||
Message: omit.From(msg),
|
||||
}).One(ctx, txn)
|
||||
func processCSVPoollist(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV, parsed []*models.FileuploadPool) error {
|
||||
org, err := models.FindOrganization(ctx, db.PGInstance.BobDB, file.OrganizationID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to add error: %w", err)
|
||||
return fmt.Errorf("get org: %w", err)
|
||||
}
|
||||
err = bulkGeocode(ctx, txn, file, c, parsed, org)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("Failure during geocoding")
|
||||
}
|
||||
log.Info().Int32("id", r.ID).Int32("file_id", c.FileID).Str("msg", msg).Int32("row", row_number).Int32("col", column_number).Msg("Created CSV file error")
|
||||
return nil
|
||||
}
|
||||
func addImportError(file *models.FileuploadFile, err error) {
|
||||
log.Debug().Err(err).Int32("file_id", file.ID).Msg("Fake add import error")
|
||||
}
|
||||
func parseBool(s string) (bool, error) {
|
||||
sl := strings.ToLower(s)
|
||||
boolValue, err := strconv.ParseBool(sl)
|
||||
if err != nil {
|
||||
// Handle some of the stuff that strconv doesn't handle
|
||||
switch sl {
|
||||
case "yes":
|
||||
return true, nil
|
||||
case "no":
|
||||
return false, nil
|
||||
default:
|
||||
return false, fmt.Errorf("unrecognized '%s'", sl)
|
||||
}
|
||||
|
||||
}
|
||||
return boolValue, err
|
||||
}
|
||||
|
||||
func errorMissingHeader(ctx context.Context, txn bob.Tx, c *models.FileuploadCSV, h headerPoolEnum) error {
|
||||
msg := fmt.Sprintf("The file is missing the '%s' header", h.String())
|
||||
return addError(ctx, txn, c, 0, 0, msg)
|
||||
}
|
||||
func maybeAddServiceArea(req *stadia.StructuredGeocodeRequest, org *models.Organization) {
|
||||
/*
|
||||
if org.ServiceAreaXmax.IsNull() ||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue