Make upload GET an API request
This commit is contained in:
parent
2a92420bbe
commit
ef412b28ec
7 changed files with 140 additions and 119 deletions
|
|
@ -30,6 +30,7 @@ func AddRoutes(r chi.Router) {
|
|||
r.Method("GET", "/signal", authenticatedHandlerJSON(listSignal))
|
||||
r.Method("GET", "/trap-data", auth.NewEnsureAuth(apiTrapData))
|
||||
r.Method("GET", "/tile/{z}/{y}/{x}", auth.NewEnsureAuth(getTile))
|
||||
r.Method("GET", "/upload/{id}", authenticatedHandlerJSON(getUploadByID))
|
||||
r.Method("GET", "/user/self", authenticatedHandlerJSON(getUserSelf))
|
||||
r.Method("GET", "/user/suggestion", authenticatedHandlerJSON(listUserSuggestion))
|
||||
r.Method("GET", "/user", authenticatedHandlerJSON(listUser))
|
||||
|
|
|
|||
24
api/upload.go
Normal file
24
api/upload.go
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
package api
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/go-chi/chi/v5"
|
||||
//"github.com/rs/zerolog/log"
|
||||
)
|
||||
func getUploadByID(ctx context.Context, r *http.Request, u platform.User, query queryParams) (*platform.UploadPoolDetail, *nhttp.ErrorWithStatus) {
|
||||
file_id_str := chi.URLParam(r, "id")
|
||||
file_id_, err := strconv.ParseInt(file_id_str, 10, 32)
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("Failed to parse file_id: %w", err)
|
||||
}
|
||||
file_id := int32(file_id_)
|
||||
detail, err := platform.GetUploadDetail(ctx, u.Organization.ID, file_id)
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("Failed to get pool: %w", err)
|
||||
}
|
||||
return detail, nil
|
||||
}
|
||||
|
|
@ -14,6 +14,7 @@ type contentURLAPI struct {
|
|||
Communication string `json:"communication"`
|
||||
PublicreportMessage string `json:"publicreport_message"`
|
||||
Signal string `json:"signal"`
|
||||
Upload string `json:"upload"`
|
||||
}
|
||||
type contentURLs struct {
|
||||
API contentURLAPI `json:"api"`
|
||||
|
|
@ -44,6 +45,7 @@ func getUserSelf(ctx context.Context, r *http.Request, user platform.User, query
|
|||
Communication: urls.API.Communication,
|
||||
PublicreportMessage: urls.API.Publicreport.Message,
|
||||
Signal: config.MakeURLNidus("/api/signal"),
|
||||
Upload: config.MakeURLNidus("/api/upload"),
|
||||
},
|
||||
Tegola: urls.Tegola,
|
||||
Tile: config.MakeURLNidus("/api/tile/{z}/{y}/{x}"),
|
||||
|
|
|
|||
|
|
@ -2,15 +2,12 @@ package platform
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/types"
|
||||
//"github.com/rs/zerolog/log"
|
||||
|
|
@ -21,17 +18,6 @@ type Pool struct {
|
|||
Condition string `db:"condition" json:"condition"`
|
||||
ID int32 `db:"id" json:"-"`
|
||||
}
|
||||
type UploadPoolDetail struct {
|
||||
CountExisting int
|
||||
CountNew int
|
||||
CountOutside int
|
||||
Created time.Time
|
||||
Errors []UploadPoolError
|
||||
ID int32
|
||||
Name string
|
||||
Pools []UploadPoolRow
|
||||
Status string
|
||||
}
|
||||
type UploadPoolError struct {
|
||||
Column uint
|
||||
Line uint
|
||||
|
|
@ -44,90 +30,6 @@ type UploadPoolRow struct {
|
|||
Status string
|
||||
Tags map[string]string
|
||||
}
|
||||
type Upload struct {
|
||||
Created time.Time `db:"created"`
|
||||
ID int32 `db:"id"`
|
||||
Status string `db:"status"`
|
||||
}
|
||||
|
||||
func GetUploadDetail(ctx context.Context, organization_id int32, file_id int32) (UploadPoolDetail, error) {
|
||||
file, err := models.FindFileuploadFile(ctx, db.PGInstance.BobDB, file_id)
|
||||
if err != nil {
|
||||
return UploadPoolDetail{}, fmt.Errorf("Failed to lookup file %d: %w", file_id, err)
|
||||
}
|
||||
csv, err := models.FindFileuploadCSV(ctx, db.PGInstance.BobDB, file_id)
|
||||
if err != nil {
|
||||
return UploadPoolDetail{}, fmt.Errorf("Failed to lookup csv %d: %w", file_id, err)
|
||||
}
|
||||
switch csv.Type {
|
||||
case enums.FileuploadCsvtypeFlyover:
|
||||
return getUploadPoollistDetail(ctx, file)
|
||||
case enums.FileuploadCsvtypePoollist:
|
||||
return getUploadPoollistDetail(ctx, file)
|
||||
}
|
||||
return UploadPoolDetail{}, errors.New("No idea what to do with upload type")
|
||||
}
|
||||
|
||||
func getUploadPoollistDetail(ctx context.Context, file *models.FileuploadFile) (UploadPoolDetail, error) {
|
||||
file_errors, errors_by_line, err := errorsByLine(ctx, file)
|
||||
if err != nil {
|
||||
return UploadPoolDetail{}, fmt.Errorf("get errors by line: %w", err)
|
||||
}
|
||||
pool_rows, err := models.FileuploadPools.Query(
|
||||
models.SelectWhere.FileuploadPools.CSVFile.EQ(file.ID),
|
||||
).All(ctx, db.PGInstance.BobDB)
|
||||
if err != nil {
|
||||
return UploadPoolDetail{}, fmt.Errorf("Failed to query pools for %d: %w", file.ID, err)
|
||||
}
|
||||
|
||||
pools := make([]UploadPoolRow, 0)
|
||||
count_existing := 0
|
||||
count_new := 0
|
||||
count_outside := 0
|
||||
status := "unknown"
|
||||
for _, r := range pool_rows {
|
||||
if r.IsNew {
|
||||
count_new = count_new + 1
|
||||
status = "new"
|
||||
} else {
|
||||
count_existing = count_existing + 1
|
||||
status = "existing"
|
||||
}
|
||||
if !r.IsInDistrict {
|
||||
count_outside++
|
||||
status = "outside"
|
||||
}
|
||||
tags := db.ConvertFromPGData(r.Tags)
|
||||
// add 2 here because our file lines are 1-indexed and we skip the header line, but we are ranging 0-indexed
|
||||
errors, ok := errors_by_line[r.LineNumber]
|
||||
if !ok {
|
||||
errors = []UploadPoolError{}
|
||||
}
|
||||
pools = append(pools, UploadPoolRow{
|
||||
Address: types.Address{
|
||||
Country: "usa",
|
||||
Locality: r.AddressLocality,
|
||||
Number: r.AddressNumber,
|
||||
PostalCode: r.AddressPostalCode,
|
||||
Region: r.AddressRegion,
|
||||
Street: r.AddressStreet,
|
||||
},
|
||||
Condition: r.Condition.String(),
|
||||
Errors: errors,
|
||||
Status: status,
|
||||
Tags: tags,
|
||||
})
|
||||
}
|
||||
return UploadPoolDetail{
|
||||
CountExisting: count_existing,
|
||||
CountOutside: count_outside,
|
||||
CountNew: count_new,
|
||||
Errors: file_errors,
|
||||
Name: file.Name,
|
||||
Pools: pools,
|
||||
Status: file.Status.String(),
|
||||
}, nil
|
||||
}
|
||||
func errorsByLine(ctx context.Context, file *models.FileuploadFile) ([]UploadPoolError, map[int32][]UploadPoolError, error) {
|
||||
file_errors := make([]UploadPoolError, 0)
|
||||
errors_by_line := make(map[int32][]UploadPoolError, 0)
|
||||
|
|
|
|||
|
|
@ -228,13 +228,19 @@ func SignalList(ctx context.Context, user User, limit int) ([]*Signal, error) {
|
|||
row.Pool = p
|
||||
row.Report = nil
|
||||
} else if row.Report.ID != 0 {
|
||||
row.Pool = nil
|
||||
report, ok := report_map[row.Report.ID]
|
||||
if !ok {
|
||||
log.Debug().Int32("id", row.Report.ID).Msg("failed to got report")
|
||||
continue
|
||||
return nil, fmt.Errorf("failed to get report %d for %d", row.Report.ID, row.ID)
|
||||
}
|
||||
if report == nil {
|
||||
return nil, fmt.Errorf("got nil for report %d for %d", row.Report.ID, row.ID)
|
||||
}
|
||||
row.Pool = nil
|
||||
row.Report = report
|
||||
} else {
|
||||
log.Debug().Int32("id", row.ID).Msg("has no publicrreport nor pool")
|
||||
row.Pool = nil
|
||||
row.Report = nil
|
||||
}
|
||||
if row.Address.Street == "" {
|
||||
row.Address = nil
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ package platform
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
|
|
@ -14,6 +15,7 @@ import (
|
|||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/background"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/types"
|
||||
"github.com/aarondl/opt/omit"
|
||||
"github.com/aarondl/opt/omitnull"
|
||||
"github.com/rs/zerolog/log"
|
||||
|
|
@ -32,6 +34,12 @@ const (
|
|||
UploadStatusComplete UploadStatus = iota
|
||||
)
|
||||
|
||||
type Upload struct {
|
||||
Created time.Time `db:"created"`
|
||||
ID int32 `db:"id"`
|
||||
Status string `db:"status"`
|
||||
}
|
||||
|
||||
type UploadSummary struct {
|
||||
Created time.Time `db:"created"`
|
||||
Filename string `db:"filename"`
|
||||
|
|
@ -40,6 +48,38 @@ type UploadSummary struct {
|
|||
Status string `db:"status"`
|
||||
Type string `db:"type"`
|
||||
}
|
||||
type UploadPoolDetailCount struct {
|
||||
Existing int `json:"existing"`
|
||||
New int `json:"new"`
|
||||
Outside int `json:"outside"`
|
||||
}
|
||||
type UploadPoolDetail struct {
|
||||
Count UploadPoolDetailCount `json:"count"`
|
||||
Created time.Time `json:"created"`
|
||||
Errors []UploadPoolError `json:"errors"`
|
||||
ID int32 `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Pools []UploadPoolRow `json:"pools"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
func GetUploadDetail(ctx context.Context, organization_id int32, file_id int32) (*UploadPoolDetail, error) {
|
||||
file, err := models.FindFileuploadFile(ctx, db.PGInstance.BobDB, file_id)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to lookup file %d: %w", file_id, err)
|
||||
}
|
||||
csv, err := models.FindFileuploadCSV(ctx, db.PGInstance.BobDB, file_id)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to lookup csv %d: %w", file_id, err)
|
||||
}
|
||||
switch csv.Type {
|
||||
case enums.FileuploadCsvtypeFlyover:
|
||||
return getUploadPoollistDetail(ctx, file)
|
||||
case enums.FileuploadCsvtypePoollist:
|
||||
return getUploadPoollistDetail(ctx, file)
|
||||
}
|
||||
return nil, errors.New("No idea what to do with upload type")
|
||||
}
|
||||
|
||||
func NewUpload(ctx context.Context, u User, upload file.FileUpload, t enums.FileuploadCsvtype) (*Upload, error) {
|
||||
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
|
||||
|
|
@ -148,3 +188,67 @@ func UploadSummaryList(ctx context.Context, org Organization) ([]UploadSummary,
|
|||
}
|
||||
return rows, nil
|
||||
}
|
||||
func getUploadPoollistDetail(ctx context.Context, file *models.FileuploadFile) (*UploadPoolDetail, error) {
|
||||
file_errors, errors_by_line, err := errorsByLine(ctx, file)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get errors by line: %w", err)
|
||||
}
|
||||
pool_rows, err := models.FileuploadPools.Query(
|
||||
models.SelectWhere.FileuploadPools.CSVFile.EQ(file.ID),
|
||||
).All(ctx, db.PGInstance.BobDB)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to query pools for %d: %w", file.ID, err)
|
||||
}
|
||||
|
||||
pools := make([]UploadPoolRow, 0)
|
||||
count_existing := 0
|
||||
count_new := 0
|
||||
count_outside := 0
|
||||
status := "unknown"
|
||||
for _, r := range pool_rows {
|
||||
if r.IsNew {
|
||||
count_new = count_new + 1
|
||||
status = "new"
|
||||
} else {
|
||||
count_existing = count_existing + 1
|
||||
status = "existing"
|
||||
}
|
||||
if !r.IsInDistrict {
|
||||
count_outside++
|
||||
status = "outside"
|
||||
}
|
||||
tags := db.ConvertFromPGData(r.Tags)
|
||||
// add 2 here because our file lines are 1-indexed and we skip the header line, but we are ranging 0-indexed
|
||||
errors, ok := errors_by_line[r.LineNumber]
|
||||
if !ok {
|
||||
errors = []UploadPoolError{}
|
||||
}
|
||||
pools = append(pools, UploadPoolRow{
|
||||
Address: types.Address{
|
||||
Country: "usa",
|
||||
Locality: r.AddressLocality,
|
||||
Number: r.AddressNumber,
|
||||
PostalCode: r.AddressPostalCode,
|
||||
Region: r.AddressRegion,
|
||||
Street: r.AddressStreet,
|
||||
},
|
||||
Condition: r.Condition.String(),
|
||||
Errors: errors,
|
||||
Status: status,
|
||||
Tags: tags,
|
||||
})
|
||||
}
|
||||
log.Debug().Str("status", file.Status.String()).Int32("id", file.ID).Msg("returning")
|
||||
return &UploadPoolDetail{
|
||||
Count: UploadPoolDetailCount{
|
||||
Existing: count_existing,
|
||||
Outside: count_outside,
|
||||
New: count_new,
|
||||
},
|
||||
Errors: file_errors,
|
||||
ID: file.ID,
|
||||
Name: file.Name,
|
||||
Pools: pools,
|
||||
Status: file.Status.String(),
|
||||
}, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -55,24 +55,6 @@ func getUploadPoolCustomCreate(ctx context.Context, r *http.Request, u platform.
|
|||
data := contentUploadPoolCustomCreate{}
|
||||
return html.NewResponse("sync/upload-csv-pool-custom.html", data), nil
|
||||
}
|
||||
func getUploadByID(ctx context.Context, r *http.Request, u platform.User) (*html.Response[contentUploadDetail], *nhttp.ErrorWithStatus) {
|
||||
file_id_str := chi.URLParam(r, "id")
|
||||
file_id_, err := strconv.ParseInt(file_id_str, 10, 32)
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("Failed to parse file_id: %w", err)
|
||||
}
|
||||
file_id := int32(file_id_)
|
||||
detail, err := platform.GetUploadDetail(ctx, u.Organization.ID, file_id)
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("Failed to get pool: %w", err)
|
||||
}
|
||||
data := contentUploadDetail{
|
||||
CSVFileID: file_id,
|
||||
Organization: u.Organization,
|
||||
Upload: detail,
|
||||
}
|
||||
return html.NewResponse("sync/upload-by-id.html", data), nil
|
||||
}
|
||||
|
||||
type FormUploadCommit struct{}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue