Massive rework of platform layer user/organization

The goal of this rework is to make it so I can pass around platform.User
instead of a pair of models.Organization and models.User. This is useful
for reason I kind of forget now, but it started with working on
notifications and ballooned massively from there into refactoring a
number of things that were bugging me.

This also includes a tiny amount of work on server-side events (SSE).

 * background stuff lives inside the platform now, which I need for
   having it push updates through SSE
 * userfile now lives in the platform, under file, so other platform
   functions can safely use it
 * oauth is broken into pieces and inside platform because other stuff
   was calling it already, but badly.
 * notifications go into the platform as well
This commit is contained in:
Eli Ribble 2026-03-12 23:49:16 +00:00
parent 32dcc50c94
commit 44c4f17f32
No known key found for this signature in database
85 changed files with 1492 additions and 1384 deletions

View file

@ -1,7 +1,6 @@
package api
import (
"context"
"encoding/json"
"fmt"
"io"
@ -11,11 +10,11 @@ import (
"strconv"
"time"
"github.com/Gleipnir-Technology/nidus-sync/background"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/Gleipnir-Technology/nidus-sync/platform/background"
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/go-chi/chi/v5"
@ -24,7 +23,7 @@ import (
"github.com/rs/zerolog/log"
)
func apiAudioPost(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
func apiAudioPost(w http.ResponseWriter, r *http.Request, u platform.User) {
id := chi.URLParam(r, "uuid")
noteUUID, err := uuid.Parse(id)
if err != nil {
@ -43,9 +42,10 @@ func apiAudioPost(w http.ResponseWriter, r *http.Request, org *models.Organizati
http.Error(w, "Failed to decode the payload", http.StatusBadRequest)
return
}
ctx := r.Context()
setter := models.NoteAudioSetter{
Created: omit.From(payload.Created),
CreatorID: omit.From(u.ID),
CreatorID: omit.From(int32(u.ID)),
Deleted: omitnull.FromPtr(payload.Deleted),
DeletorID: omitnull.FromPtr(payload.DeletorID),
Duration: omit.From(payload.Duration),
@ -54,21 +54,21 @@ func apiAudioPost(w http.ResponseWriter, r *http.Request, org *models.Organizati
Version: omit.From(payload.Version),
UUID: omit.From(noteUUID),
}
if err := db.NoteAudioCreate(context.Background(), u.R.Organization, u.ID, setter); err != nil {
if err := platform.NoteAudioCreate(ctx, u, setter); err != nil {
render.Render(w, r, errRender(err))
return
}
w.WriteHeader(http.StatusAccepted)
}
func apiAudioContentPost(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
func apiAudioContentPost(w http.ResponseWriter, r *http.Request, u platform.User) {
u_str := chi.URLParam(r, "uuid")
audioUUID, err := uuid.Parse(u_str)
if err != nil {
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
return
}
err = userfile.FileContentWrite(r.Body, userfile.CollectionAudioRaw, audioUUID)
err = file.FileContentWrite(r.Body, file.CollectionAudioRaw, audioUUID)
if err != nil {
log.Printf("Failed to write content file: %v", err)
http.Error(w, "failed to write content file", http.StatusInternalServerError)
@ -78,7 +78,7 @@ func apiAudioContentPost(w http.ResponseWriter, r *http.Request, org *models.Org
w.WriteHeader(http.StatusOK)
}
func handleClientIos(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
func handleClientIos(w http.ResponseWriter, r *http.Request, u platform.User) {
var sinceStr string
err := r.ParseForm()
if err != nil {
@ -121,69 +121,7 @@ func handleClientIos(w http.ResponseWriter, r *http.Request, org *models.Organiz
}
}
func apiImagePost(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
id := chi.URLParam(r, "uuid")
noteUUID, err := uuid.Parse(id)
if err != nil {
http.Error(w, "Failed to decode the uuid", http.StatusBadRequest)
return
}
var payload NoteImagePayload
body, err := ioutil.ReadAll(r.Body)
if err != nil {
http.Error(w, "Failed to read the payload", http.StatusBadRequest)
return
}
if err := json.Unmarshal(body, &payload); err != nil {
//debugSaveRequest(body, err, "Image note POST JSON decode error")
http.Error(w, "Failed to decode the payload", http.StatusBadRequest)
return
}
setter := models.NoteImageSetter{
Created: omit.From(payload.Created),
CreatorID: omit.From(u.ID),
Deleted: omitnull.FromPtr(payload.Deleted),
DeletorID: omitnull.FromPtr(payload.DeletorID),
Version: omit.From(payload.Version),
UUID: omit.From(noteUUID),
}
err = db.NoteImageCreate(context.Background(), u.R.Organization, u.ID, setter)
if err != nil {
render.Render(w, r, errRender(err))
return
}
w.WriteHeader(http.StatusAccepted)
}
func apiImageContentGet(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
u_str := chi.URLParam(r, "uuid")
imageUUID, err := uuid.Parse(u_str)
if err != nil {
log.Error().Err(err).Msg("Failed to parse image UUID")
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
}
userfile.PublicImageFileToResponse(w, imageUUID)
w.WriteHeader(http.StatusOK)
}
func apiImageContentPost(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
u_str := chi.URLParam(r, "uuid")
imageUUID, err := uuid.Parse(u_str)
if err != nil {
log.Error().Err(err).Msg("Failed to parse image UUID")
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
}
err = userfile.ImageFileContentWrite(imageUUID, r.Body)
if err != nil {
render.Render(w, r, errRender(err))
return
}
w.WriteHeader(http.StatusOK)
log.Printf("Saved image file %s\n", imageUUID)
fmt.Fprintf(w, "PNG uploaded successfully")
}
func apiMosquitoSource(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
func apiMosquitoSource(w http.ResponseWriter, r *http.Request, u platform.User) {
bounds, err := parseBounds(r)
if err != nil {
render.Render(w, r, errRender(err))
@ -208,7 +146,7 @@ func apiMosquitoSource(w http.ResponseWriter, r *http.Request, org *models.Organ
}
}
func apiTrapData(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
func apiTrapData(w http.ResponseWriter, r *http.Request, u platform.User) {
bounds, err := parseBounds(r)
if err != nil {
render.Render(w, r, errRender(err))
@ -233,7 +171,7 @@ func apiTrapData(w http.ResponseWriter, r *http.Request, org *models.Organizatio
}
}
func apiServiceRequest(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
func apiServiceRequest(w http.ResponseWriter, r *http.Request, u platform.User) {
bounds, err := parseBounds(r)
if err != nil {
render.Render(w, r, errRender(err))

View file

@ -7,8 +7,8 @@ import (
"time"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/Gleipnir-Technology/nidus-sync/platform/publicreport"
"github.com/Gleipnir-Technology/nidus-sync/platform/types"
"github.com/google/uuid"
@ -35,12 +35,12 @@ type contentListCommunication struct {
Communications []communication `json:"communications"`
}
func listCommunication(ctx context.Context, r *http.Request, org *models.Organization, user *models.User, query queryParams) (*contentListCommunication, *nhttp.ErrorWithStatus) {
nreports, err := publicreport.NuisanceReportForOrganization(ctx, org.ID)
func listCommunication(ctx context.Context, r *http.Request, user platform.User, query queryParams) (*contentListCommunication, *nhttp.ErrorWithStatus) {
nreports, err := publicreport.NuisanceReportForOrganization(ctx, user.Organization.ID())
if err != nil {
return nil, nhttp.NewError("nuisance report query: %w", err)
}
wreports, err := publicreport.WaterReportForOrganization(ctx, org.ID)
wreports, err := publicreport.WaterReportForOrganization(ctx, user.Organization.ID())
if err != nil {
return nil, nhttp.NewError("water report query: %w", err)
}

View file

@ -12,9 +12,7 @@ import (
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
//"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/Gleipnir-Technology/nidus-sync/platform/imagetile"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/go-chi/chi/v5"
"github.com/paulmach/orb/geojson"
"github.com/rs/zerolog/log"
@ -61,10 +59,7 @@ func getComplianceRequestImagePool(w http.ResponseWriter, r *http.Request) {
psql.Quote("organization.id"),
),
sm.InnerJoin("site").On(
psql.And(
psql.Quote("lead.site_id").EQ(psql.Quote("site.id")),
psql.Quote("lead.site_version").EQ(psql.Quote("site.version")),
),
psql.Quote("lead.site_id").EQ(psql.Quote("site.id")),
),
sm.InnerJoin("parcel").OnEQ(
psql.Quote("site.parcel_id"),
@ -72,9 +67,13 @@ func getComplianceRequestImagePool(w http.ResponseWriter, r *http.Request) {
),
sm.Where(psql.Quote("compliance_report_request").EQ(psql.Arg(code))),
), scan.StructMapper[_Row]())
org, err := models.FindOrganization(ctx, db.PGInstance.BobDB, row.OrganizationID)
org, err := platform.OrganizationByID(ctx, int(row.OrganizationID))
if err != nil {
http.Error(w, "no org", http.StatusInternalServerError)
http.Error(w, "org err", http.StatusInternalServerError)
return
}
if org == nil {
http.Error(w, "no org", http.StatusBadRequest)
return
}
var polygon geojson.Polygon
@ -86,15 +85,15 @@ func getComplianceRequestImagePool(w http.ResponseWriter, r *http.Request) {
}
ring := polygon[0]
p := ring[0]
err = writeImage(ctx, w, org, 19, p[1], p[0])
err = writeImage(ctx, w, *org, 19, p[1], p[0])
if err != nil {
log.Error().Err(err).Msg("write image")
http.Error(w, "failed to write image", http.StatusInternalServerError)
return
}
}
func writeImage(ctx context.Context, w http.ResponseWriter, org *models.Organization, level uint, lat, lng float64) error {
img, err := imagetile.ImageAtPoint(ctx, org, level, lat, lng)
func writeImage(ctx context.Context, w http.ResponseWriter, org platform.Organization, level uint, lat, lng float64) error {
img, err := platform.ImageAtPoint(ctx, org, level, lat, lng)
if err != nil {
return fmt.Errorf("image at point: %w", err)
}

View file

@ -8,7 +8,7 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
"github.com/go-chi/chi/v5"
"github.com/go-chi/render"
)
@ -73,7 +73,7 @@ func apiGetDistrictLogo(w http.ResponseWriter, r *http.Request) {
http.Error(w, "Logo not found", http.StatusNotFound)
return
}
userfile.ImageFileContentWriteLogo(w, org.LogoUUID.MustGet())
file.ImageFileContentWriteLogo(w, org.LogoUUID.MustGet())
return
default:
http.Error(w, "Too many organizations, this is a programmer error", http.StatusInternalServerError)

103
api/event.go Normal file
View file

@ -0,0 +1,103 @@
package api
import (
"encoding/json"
"fmt"
"net/http"
"time"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/rs/zerolog/log"
)
func streamEvents(w http.ResponseWriter, r *http.Request, u platform.User) {
}
type MessageHeartbeat struct {
Time time.Time `json:"time"`
}
type MessageSSE struct {
Content any `json:"content"`
Type string `json:"type"`
}
type ConnectionSSE struct {
chanState chan MessageSSE
id string
}
func (c *ConnectionSSE) SendMessage(w http.ResponseWriter, m MessageSSE) error {
return send(w, MessageSSE{
Type: "heartbeat",
})
}
func (c *ConnectionSSE) SendHeartbeat(w http.ResponseWriter, t time.Time) error {
return send(w, MessageSSE{
Content: MessageHeartbeat{
Time: t,
},
Type: "heartbeat",
})
}
func send[T any](w http.ResponseWriter, msg T) error {
jsonData, err := json.Marshal(msg)
if err != nil {
return fmt.Errorf("marshaling json: %w", err)
}
// Write in SSE format: "data: <json>\n\n"
_, err = fmt.Fprintf(w, "data: %s\n\n", jsonData)
if err != nil {
return fmt.Errorf("writing SSE message: %w", err)
}
w.(http.Flusher).Flush()
return nil
}
type Webserver struct {
connections map[*ConnectionSSE]bool
}
// sseHandler handles the Server-Sent Events connection
func (web *Webserver) sseHandler(w http.ResponseWriter, r *http.Request) {
// Set headers for SSE
w.Header().Set("Content-Type", "text/event-stream")
w.Header().Set("Cache-Control", "no-cache")
w.Header().Set("Connection", "keep-alive")
w.Header().Set("Access-Control-Allow-Origin", "*")
connection := ConnectionSSE{
chanState: make(chan MessageSSE),
id: fmt.Sprintf("%d", time.Now().UnixNano()),
}
web.connections[&connection] = true
// Send an initial connected event
fmt.Fprintf(w, "event: connected\ndata: {\"status\": \"connected\", \"time\": \"%s\"}\n\n", time.Now().Format(time.RFC3339))
w.(http.Flusher).Flush()
// Keep the connection open with a ticker sending periodic events
ticker := time.NewTicker(5 * time.Second)
defer ticker.Stop()
// Use a channel to detect when the client disconnects
done := r.Context().Done()
// Keep connection open until client disconnects
var err error
for {
err = nil
select {
case <-done:
log.Info().Msg("Client closed connection")
return
case t := <-ticker.C:
// Send a heartbeat message
err = connection.SendHeartbeat(w, t)
//case state := <-connection.chanState:
//log.Debug().Msg("Sending new state to connection")
//err = connection.SendState(w, state)
}
if err != nil {
log.Error().Err(err).Msg("Failed to send state from webserver")
}
}
}

View file

@ -8,8 +8,6 @@ import (
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/auth"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
@ -19,7 +17,7 @@ import (
var decoder = schema.NewDecoder()
type handlerFunctionGet[T any] func(context.Context, *http.Request, *models.Organization, *models.User, queryParams) (*T, *nhttp.ErrorWithStatus)
type handlerFunctionGet[T any] func(context.Context, *http.Request, platform.User, queryParams) (*T, *nhttp.ErrorWithStatus)
type wrappedHandler func(http.ResponseWriter, *http.Request)
type contentAuthenticated[T any] struct {
C T
@ -32,26 +30,17 @@ type ErrorAPI struct {
}
func authenticatedHandlerJSON[T any](f handlerFunctionGet[T]) http.Handler {
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
ctx := r.Context()
org, err := u.Organization().One(ctx, db.PGInstance.BobDB)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if org == nil {
http.Error(w, "nil org", http.StatusInternalServerError)
return
}
var body []byte
var params queryParams
err = decoder.Decode(&params, r.URL.Query())
err := decoder.Decode(&params, r.URL.Query())
if err != nil {
log.Error().Err(err).Msg("decode query failure")
http.Error(w, "failed to decode query", http.StatusInternalServerError)
return
}
resp, e := f(ctx, r, org, u, params)
resp, e := f(ctx, r, u, params)
w.Header().Set("Content-Type", "application/json")
//log.Info().Str("template", template).Err(e).Msg("handler done")
if e != nil {
@ -74,10 +63,10 @@ func authenticatedHandlerJSON[T any](f handlerFunctionGet[T]) http.Handler {
})
}
type handlerFunctionPost[ReqType any, ResponseType any] func(context.Context, *http.Request, *models.Organization, *models.User, ReqType) (ResponseType, *nhttp.ErrorWithStatus)
type handlerFunctionPost[ReqType any, ResponseType any] func(context.Context, *http.Request, platform.User, ReqType) (ResponseType, *nhttp.ErrorWithStatus)
func authenticatedHandlerJSONPost[ReqType any, ResponseType any](f handlerFunctionPost[ReqType, ResponseType]) http.Handler {
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
w.Header().Set("Content-Type", "application/json")
var req ReqType
body, err := io.ReadAll(r.Body)
@ -91,7 +80,7 @@ func authenticatedHandlerJSONPost[ReqType any, ResponseType any](f handlerFuncti
return
}
ctx := r.Context()
response, e := f(ctx, r, org, u, req)
response, e := f(ctx, r, u, req)
if e != nil {
log.Warn().Int("status", e.Status).Err(e).Str("user message", e.Message).Msg("Responding with an error from api")
body, err = json.Marshal(ErrorAPI{Message: e.Error()})

81
api/image.go Normal file
View file

@ -0,0 +1,81 @@
package api
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/go-chi/chi/v5"
"github.com/go-chi/render"
"github.com/google/uuid"
"github.com/rs/zerolog/log"
)
func apiImagePost(w http.ResponseWriter, r *http.Request, u platform.User) {
id := chi.URLParam(r, "uuid")
noteUUID, err := uuid.Parse(id)
if err != nil {
http.Error(w, "Failed to decode the uuid", http.StatusBadRequest)
return
}
var payload NoteImagePayload
body, err := ioutil.ReadAll(r.Body)
if err != nil {
http.Error(w, "Failed to read the payload", http.StatusBadRequest)
return
}
if err := json.Unmarshal(body, &payload); err != nil {
//debugSaveRequest(body, err, "Image note POST JSON decode error")
http.Error(w, "Failed to decode the payload", http.StatusBadRequest)
return
}
ctx := r.Context()
setter := models.NoteImageSetter{
Created: omit.From(payload.Created),
CreatorID: omit.From(int32(u.ID)),
Deleted: omitnull.FromPtr(payload.Deleted),
DeletorID: omitnull.FromPtr(payload.DeletorID),
Version: omit.From(payload.Version),
UUID: omit.From(noteUUID),
}
err = platform.NoteImageCreate(ctx, u, setter)
if err != nil {
render.Render(w, r, errRender(err))
return
}
w.WriteHeader(http.StatusAccepted)
}
func apiImageContentGet(w http.ResponseWriter, r *http.Request, u platform.User) {
u_str := chi.URLParam(r, "uuid")
imageUUID, err := uuid.Parse(u_str)
if err != nil {
log.Error().Err(err).Msg("Failed to parse image UUID")
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
}
file.PublicImageFileToResponse(w, imageUUID)
w.WriteHeader(http.StatusOK)
}
func apiImageContentPost(w http.ResponseWriter, r *http.Request, u platform.User) {
u_str := chi.URLParam(r, "uuid")
imageUUID, err := uuid.Parse(u_str)
if err != nil {
log.Error().Err(err).Msg("Failed to parse image UUID")
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
}
err = file.ImageFileContentWrite(imageUUID, r.Body)
if err != nil {
render.Render(w, r, errRender(err))
return
}
w.WriteHeader(http.StatusOK)
log.Printf("Saved image file %s\n", imageUUID)
fmt.Fprintf(w, "PNG uploaded successfully")
}

View file

@ -13,6 +13,7 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/Gleipnir-Technology/nidus-sync/platform/geom"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
@ -34,12 +35,12 @@ type lead struct {
ID int32 `json:"id"`
}
func listLead(ctx context.Context, r *http.Request, org *models.Organization, user *models.User, query queryParams) (*contentListLead, *nhttp.ErrorWithStatus) {
func listLead(ctx context.Context, r *http.Request, user platform.User, query queryParams) (*contentListLead, *nhttp.ErrorWithStatus) {
return &contentListLead{
Leads: make([]lead, 0),
}, nil
}
func postLeads(ctx context.Context, r *http.Request, org *models.Organization, user *models.User, req createLead) (*createdLead, *nhttp.ErrorWithStatus) {
func postLeads(ctx context.Context, r *http.Request, user platform.User, req createLead) (*createdLead, *nhttp.ErrorWithStatus) {
if len(req.SignalIDs) == 0 {
return nil, nhttp.NewErrorStatus(http.StatusBadRequest, "can't make a lead with no signals")
}
@ -54,13 +55,11 @@ func postLeads(ctx context.Context, r *http.Request, org *models.Organization, u
return nil, nhttp.NewError("start transaction: %w", err)
}
type _Row struct {
ID int32 `db:"site_id"`
Version int32 `db:"site_version"`
ID int32 `db:"site_id"`
}
site, err := bob.One(ctx, db.PGInstance.BobDB, psql.Select(
sm.Columns(
"pool.site_id AS site_id",
"pool.site_version AS site_version",
),
sm.From("signal_pool"),
sm.InnerJoin("pool").OnEQ(
@ -68,13 +67,10 @@ func postLeads(ctx context.Context, r *http.Request, org *models.Organization, u
psql.Quote("pool", "id"),
),
sm.InnerJoin("site").On(
psql.And(
psql.Quote("pool", "site_id").EQ(psql.Quote("site", "id")),
psql.Quote("pool", "site_version").EQ(psql.Quote("site", "version")),
),
psql.Quote("pool", "site_id").EQ(psql.Quote("site", "id")),
),
sm.Where(psql.Quote("signal_pool", "signal_id").EQ(psql.Arg(signal_id))),
sm.Where(psql.Quote("site", "organization_id").EQ(psql.Arg(org.ID))),
sm.Where(psql.Quote("site", "organization_id").EQ(psql.Arg(user.Organization.ID()))),
), scan.StructMapper[_Row]())
if err != nil {
if err.Error() == "sql: no rows in result set" {
@ -85,11 +81,10 @@ func postLeads(ctx context.Context, r *http.Request, org *models.Organization, u
lead, err := models.Leads.Insert(&models.LeadSetter{
Created: omit.From(time.Now()),
Creator: omit.From(user.ID),
Creator: omit.From(int32(user.ID)),
// ID
OrganizationID: omit.From(org.ID),
OrganizationID: omit.From(int32(user.Organization.ID())),
SiteID: omitnull.From(site.ID),
SiteVersion: omitnull.From(site.Version),
Type: omit.From(enums.LeadtypeGreenPool),
}).One(ctx, txn)
if err != nil {

View file

@ -12,6 +12,7 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/rs/zerolog/log"
@ -35,7 +36,7 @@ type createReviewPool struct {
}
type createdReviewPool struct{}
func postReviewPool(ctx context.Context, r *http.Request, org *models.Organization, user *models.User, req createReviewPool) (*createdReviewPool, *nhttp.ErrorWithStatus) {
func postReviewPool(ctx context.Context, r *http.Request, user platform.User, req createReviewPool) (*createdReviewPool, *nhttp.ErrorWithStatus) {
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
if err != nil {
return nil, nhttp.NewError("start txn: %w", err)
@ -43,7 +44,7 @@ func postReviewPool(ctx context.Context, r *http.Request, org *models.Organizati
defer txn.Rollback(ctx)
review_task, err := models.ReviewTasks.Query(
models.SelectWhere.ReviewTasks.ID.EQ(req.TaskID),
models.SelectWhere.ReviewTasks.OrganizationID.EQ(org.ID),
models.SelectWhere.ReviewTasks.OrganizationID.EQ(user.Organization.ID()),
).One(ctx, txn)
if err != nil {
return nil, nhttp.NewErrorStatus(http.StatusNotFound, "review task %d not found", req.TaskID)
@ -56,7 +57,7 @@ func postReviewPool(ctx context.Context, r *http.Request, org *models.Organizati
review_task.Update(ctx, txn, &models.ReviewTaskSetter{
Resolution: omitnull.From(resolution),
Reviewed: omitnull.From(time.Now()),
ReviewerID: omitnull.From(user.ID),
ReviewerID: omitnull.From(int32(user.ID)),
})
review_task_pool, err := models.ReviewTaskPools.Query(
models.SelectWhere.ReviewTaskPools.ReviewTaskID.EQ(review_task.ID),
@ -77,10 +78,10 @@ func postReviewPool(ctx context.Context, r *http.Request, org *models.Organizati
log.Info().Int32("id", review_task.ID).Str("status", req.Status).Msg("committed")
return &createdReviewPool{}, e
}
func discardReviewPool(ctx context.Context, txn bob.Tx, user *models.User, req createReviewPool, review_task_pool *models.ReviewTaskPool) *nhttp.ErrorWithStatus {
func discardReviewPool(ctx context.Context, txn bob.Tx, user platform.User, req createReviewPool, review_task_pool *models.ReviewTaskPool) *nhttp.ErrorWithStatus {
return nil
}
func commitReviewPool(ctx context.Context, txn bob.Tx, user *models.User, req createReviewPool, review_task_pool *models.ReviewTaskPool) *nhttp.ErrorWithStatus {
func commitReviewPool(ctx context.Context, txn bob.Tx, user platform.User, req createReviewPool, review_task_pool *models.ReviewTaskPool) *nhttp.ErrorWithStatus {
if req.Updates == nil {
return nil
}

View file

@ -9,7 +9,6 @@ import (
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/Gleipnir-Technology/nidus-sync/platform/types"
@ -32,7 +31,7 @@ type contentListReviewTaskPool struct {
Total int32 `json:"total"`
}
func listReviewTaskPool(ctx context.Context, r *http.Request, org *models.Organization, user *models.User, query queryParams) (*contentListReviewTaskPool, *nhttp.ErrorWithStatus) {
func listReviewTaskPool(ctx context.Context, r *http.Request, user platform.User, query queryParams) (*contentListReviewTaskPool, *nhttp.ErrorWithStatus) {
limit := 20
if query.Limit != nil {
limit = *query.Limit
@ -45,7 +44,7 @@ func listReviewTaskPool(ctx context.Context, r *http.Request, org *models.Organi
"COUNT(*) AS total",
),
sm.From("review_task"),
sm.Where(psql.Quote("review_task", "organization_id").EQ(psql.Arg(org.ID))),
sm.Where(psql.Quote("review_task", "organization_id").EQ(psql.Arg(user.Organization.ID()))),
sm.Where(psql.Quote("review_task", "reviewed").IsNull()),
), scan.StructMapper[_RowTotal]())
if err != nil {
@ -98,23 +97,20 @@ func listReviewTaskPool(ctx context.Context, r *http.Request, org *models.Organi
psql.Quote("feature", "id"),
),
sm.InnerJoin("site").On(
psql.And(
psql.Quote("feature", "site_id").EQ(psql.Quote("site", "id")),
psql.Quote("feature", "site_version").EQ(psql.Quote("site", "version")),
),
psql.Quote("feature", "site_id").EQ(psql.Quote("site", "id")),
),
sm.InnerJoin("address").OnEQ(
psql.Quote("site", "address_id"),
psql.Quote("address", "id"),
),
sm.Where(psql.Quote("review_task", "organization_id").EQ(psql.Arg(org.ID))),
sm.Where(psql.Quote("review_task", "organization_id").EQ(psql.Arg(user.Organization.ID()))),
sm.Where(psql.Quote("review_task", "reviewed").IsNull()),
sm.Limit(limit),
), scan.StructMapper[_Row]())
if err != nil {
return nil, nhttp.NewError("failed to get review tasks: %w", err)
}
users_by_id, err := platform.UsersByID(ctx, org)
users_by_id, err := platform.UsersByOrg(ctx, user.Organization)
if err != nil {
return nil, nhttp.NewError("users by id: %w", err)
}

View file

@ -14,6 +14,7 @@ func AddRoutes(r chi.Router) {
r.Method("POST", "/audio/{uuid}/content", auth.NewEnsureAuth(apiAudioContentPost))
r.Method("GET", "/client/ios", auth.NewEnsureAuth(handleClientIos))
r.Method("GET", "/communication", authenticatedHandlerJSON(listCommunication))
r.Method("GET", "/events", auth.NewEnsureAuth(streamEvents))
r.Method("POST", "/image/{uuid}", auth.NewEnsureAuth(apiImagePost))
r.Method("GET", "/image/{uuid}/content", auth.NewEnsureAuth(apiImageContentGet))
r.Method("POST", "/image/{uuid}/content", auth.NewEnsureAuth(apiImageContentPost))

View file

@ -9,7 +9,6 @@ import (
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/Gleipnir-Technology/nidus-sync/platform/types"
@ -33,7 +32,7 @@ type contentListSignal struct {
Signals []signal `json:"signals"`
}
func listSignal(ctx context.Context, r *http.Request, org *models.Organization, user *models.User, query queryParams) (*contentListSignal, *nhttp.ErrorWithStatus) {
func listSignal(ctx context.Context, r *http.Request, user platform.User, query queryParams) (*contentListSignal, *nhttp.ErrorWithStatus) {
type _Row struct {
Address types.Address `db:"address"`
Addressed *time.Time `db:"addressed"`
@ -82,16 +81,13 @@ func listSignal(ctx context.Context, r *http.Request, org *models.Organization,
psql.Quote("pool", "id"),
),
sm.InnerJoin("site").On(
psql.And(
psql.Quote("pool", "site_id").EQ(psql.Quote("site", "id")),
psql.Quote("pool", "site_version").EQ(psql.Quote("site", "version")),
),
psql.Quote("pool", "site_id").EQ(psql.Quote("site", "id")),
),
sm.InnerJoin("address").OnEQ(
psql.Quote("site", "address_id"),
psql.Quote("address", "id"),
),
sm.Where(psql.Quote("signal", "organization_id").EQ(psql.Arg(org.ID))),
sm.Where(psql.Quote("signal", "organization_id").EQ(psql.Arg(user.Organization.ID()))),
sm.Where(psql.Quote("signal", "addressed").IsNull()),
sm.Limit(limit),
), scan.StructMapper[_Row]())
@ -105,7 +101,7 @@ func listSignal(ctx context.Context, r *http.Request, org *models.Organization,
if err != nil {
return nil, nhttp.NewError("failed to get signals: %w", err)
}
users_by_id, err := platform.UsersByID(ctx, org)
users_by_id, err := platform.UsersByOrg(ctx, user.Organization)
if err != nil {
return nil, nhttp.NewError("users by id: %w", err)
}

View file

@ -1,27 +1,15 @@
package api
import (
"bytes"
"context"
"fmt"
"io"
"net/http"
"os"
"path/filepath"
"strconv"
"github.com/aarondl/opt/omit"
//"github.com/Gleipnir-Technology/bob"
//"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/platform/imagetile"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/go-chi/chi/v5"
"github.com/rs/zerolog/log"
)
func getTile(w http.ResponseWriter, r *http.Request, org *models.Organization, user *models.User) {
func getTile(w http.ResponseWriter, r *http.Request, user platform.User) {
x_str := chi.URLParam(r, "x")
y_str := chi.URLParam(r, "y")
z_str := chi.URLParam(r, "z")
@ -41,101 +29,10 @@ func getTile(w http.ResponseWriter, r *http.Request, org *models.Organization, u
http.Error(w, "can't parse x as an integer", http.StatusBadRequest)
return
}
err = handleTile(r.Context(), w, org, uint(z), uint(y), uint(x))
err = platform.GetTile(r.Context(), w, user.Organization, uint(z), uint(y), uint(x))
if err != nil {
log.Error().Err(err).Msg("failed to do tile")
http.Error(w, "failed to do tile", http.StatusInternalServerError)
return
}
}
func handleTile(ctx context.Context, w http.ResponseWriter, org *models.Organization, z, y, x uint) error {
if org.ArcgisMapServiceID.IsNull() {
return fmt.Errorf("no map service ID set")
}
map_service_id := org.ArcgisMapServiceID.MustGet()
tile_path := tilePath(map_service_id, z, y, x)
tile_row, err := models.TileCachedImages.Query(
models.SelectWhere.TileCachedImages.ArcgisID.EQ(map_service_id),
models.SelectWhere.TileCachedImages.X.EQ(int32(x)),
models.SelectWhere.TileCachedImages.Y.EQ(int32(y)),
models.SelectWhere.TileCachedImages.Z.EQ(int32(z)),
).One(ctx, db.PGInstance.BobDB)
if err == nil {
var tile *imagetile.TileRaster
if tile_row.IsEmpty {
tile = imagetile.TileRasterPlaceholder()
} else {
tile, err = loadTileFromDisk(tile_path)
if err != nil {
return fmt.Errorf("load tile from disk: %w", err)
}
}
log.Debug().Uint("z", z).Uint("y", y).Uint("x", x).Bool("is empty", tile_row.IsEmpty).Msg("tile from cache")
return writeTile(w, tile)
}
if err.Error() != "sql: no rows in result set" {
return fmt.Errorf("query db: %w", err)
}
image, err := imagetile.ImageAtTile(ctx, org, uint(z), uint(y), uint(x))
if err != nil {
return fmt.Errorf("image at tile: %w", err)
}
if !image.IsPlaceholder {
err = saveTileToDisk(image, tile_path)
if err != nil {
return fmt.Errorf("save tile: %w", err)
}
}
_, err = models.TileCachedImages.Insert(&models.TileCachedImageSetter{
ArcgisID: omit.From(map_service_id),
X: omit.From(int32(x)),
Y: omit.From(int32(y)),
Z: omit.From(int32(z)),
IsEmpty: omit.From(image.IsPlaceholder),
}).One(ctx, db.PGInstance.BobDB)
if err != nil {
return fmt.Errorf("save to db: %w", err)
}
log.Debug().Uint("z", z).Uint("y", y).Uint("x", x).Bool("placeholder", image.IsPlaceholder).Msg("caching tile")
return writeTile(w, image)
}
func loadTileFromDisk(tile_path string) (*imagetile.TileRaster, error) {
file, err := os.Open(tile_path)
if err != nil {
return nil, fmt.Errorf("open: %w", err)
}
defer file.Close()
img, err := io.ReadAll(file)
if err != nil {
return nil, fmt.Errorf("readall from %s: %w", tile_path, err)
}
return &imagetile.TileRaster{
Content: img,
IsPlaceholder: false,
}, nil
}
func saveTileToDisk(image *imagetile.TileRaster, tile_path string) error {
parent := filepath.Dir(tile_path)
err := os.MkdirAll(parent, 0750)
if err != nil {
return fmt.Errorf("mkdirall: %w", err)
}
err = os.WriteFile(tile_path, image.Content, 0644)
if err != nil {
return fmt.Errorf("write image file: %w", err)
}
return nil
}
func tilePath(map_service_id string, z, y, x uint) string {
return fmt.Sprintf("%s/tile-cache/%s/%d/%d/%d.raw", config.FilesDirectory, map_service_id, z, y, x)
}
func writeTile(w http.ResponseWriter, image *imagetile.TileRaster) error {
w.Header().Set("Content-Type", "image/png")
w.Header().Set("Content-Length", fmt.Sprintf("%d", len(image.Content)))
_, err := io.Copy(w, bytes.NewBuffer(image.Content))
if err != nil {
return fmt.Errorf("io.copy: %w", err)
}
return nil
}

View file

@ -1,22 +0,0 @@
package arcgis
import (
"context"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
)
func GetOAuthForUser(ctx context.Context, user *models.User) (*models.ArcgisOauthToken, error) {
oauth, err := user.UserOauthTokens(
sm.OrderBy("created").Desc(),
).One(ctx, db.PGInstance.BobDB)
if err != nil {
if err.Error() == "sql: no rows in result set" {
return nil, nil
}
return nil, err
}
return oauth, nil
}

View file

@ -8,14 +8,7 @@ import (
"strconv"
"strings"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/db/sql"
"github.com/Gleipnir-Technology/nidus-sync/debug"
"github.com/aarondl/opt/omit"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/rs/zerolog/log"
"golang.org/x/crypto/bcrypt"
)
@ -24,10 +17,6 @@ type NoCredentialsError struct{}
func (e NoCredentialsError) Error() string { return "No credentials were present in the request" }
type NoUserError struct{}
func (e NoUserError) Error() string { return "That user does not exist" }
type InvalidCredentials struct{}
func (e InvalidCredentials) Error() string { return "No username with that password exists" }
@ -36,28 +25,28 @@ type InvalidUsername struct{}
func (e InvalidUsername) Error() string { return "That username doesn't exist" }
type AuthenticatedHandler func(http.ResponseWriter, *http.Request, *models.Organization, *models.User)
type AuthenticatedHandler func(http.ResponseWriter, *http.Request, platform.User)
type EnsureAuth struct {
handler AuthenticatedHandler
}
func AddUserSession(r *http.Request, user *models.User) {
func AddUserSession(r *http.Request, user *platform.User) {
id := strconv.Itoa(int(user.ID))
sessionManager.Put(r.Context(), "user_id", id)
sessionManager.Put(r.Context(), "username", user.Username)
}
func GetAuthenticatedUser(r *http.Request) (*models.User, error) {
//user_id := sessionManager.GetInt(r.Context(), "user_id")
user_id_str := sessionManager.GetString(r.Context(), "user_id")
func GetAuthenticatedUser(r *http.Request) (*platform.User, error) {
ctx := r.Context()
user_id_str := sessionManager.GetString(ctx, "user_id")
if user_id_str != "" {
user_id, err := strconv.Atoi(user_id_str)
if err != nil {
return nil, fmt.Errorf("Failed to convert user_id to int: %w", err)
}
username := sessionManager.GetString(r.Context(), "username")
username := sessionManager.GetString(ctx, "username")
if user_id > 0 && username != "" {
return findUser(r.Context(), user_id)
return platform.UserByID(ctx, user_id)
}
}
// If we can't get the user from the session try to get from auth headers
@ -65,7 +54,7 @@ func GetAuthenticatedUser(r *http.Request) (*models.User, error) {
if !ok {
return nil, &NoCredentialsError{}
}
user, err := validateUser(r.Context(), username, password)
user, err := validateUser(ctx, username, password)
if err != nil {
return nil, err
}
@ -82,7 +71,6 @@ func (ea *EnsureAuth) ServeHTTP(w http.ResponseWriter, r *http.Request) {
accept := r.Header.Values("Accept")
offers := []string{"application/json", "text/html"}
ctx := r.Context()
content_type := NegotiateContent(accept, offers)
user, err := GetAuthenticatedUser(r)
if err != nil || user == nil {
@ -92,7 +80,7 @@ func (ea *EnsureAuth) ServeHTTP(w http.ResponseWriter, r *http.Request) {
log.Info().Msg("No credentials present and no session")
w.Header().Set("WWW-Authenticate-Error", "no-credentials")
msg = []byte("Please provide credentials.\n")
} else if _, ok := err.(*NoUserError); ok {
} else if _, ok := err.(*platform.NoUserError); ok {
w.Header().Set("WWW-Authenticate-Error", "invalid-credentials")
msg = []byte("Invalid credentials provided.\n")
} else if _, ok := err.(*InvalidCredentials); ok {
@ -109,15 +97,9 @@ func (ea *EnsureAuth) ServeHTTP(w http.ResponseWriter, r *http.Request) {
w.Write(msg)
return
}
org, err := user.Organization().One(ctx, db.PGInstance.BobDB)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
ea.handler(w, r, org, user)
ea.handler(w, r, *user)
}
func SigninUser(r *http.Request, username string, password string) (*models.User, error) {
func SigninUser(r *http.Request, username string, password string) (*platform.User, error) {
user, err := validateUser(r.Context(), username, password)
if err != nil {
return nil, err
@ -129,62 +111,24 @@ func SigninUser(r *http.Request, username string, password string) (*models.User
return user, nil
}
func SignoutUser(r *http.Request, user *models.User) {
func SignoutUser(r *http.Request, user platform.User) {
sessionManager.Put(r.Context(), "user_id", "")
sessionManager.Put(r.Context(), "username", "")
log.Info().Str("username", user.Username).Int32("user_id", user.ID).Msg("Ended user session")
log.Info().Str("username", user.Username).Int32("user_id", int32(user.ID)).Msg("Ended user session")
}
func SignupUser(ctx context.Context, username string, name string, password string) (*models.User, error) {
passwordHash, err := HashPassword(password)
func SignupUser(ctx context.Context, username string, name string, password string) (*platform.User, error) {
password_hash, err := HashPassword(password)
if err != nil {
return nil, fmt.Errorf("Cannot signup user, failed to create hashed password: %w", err)
}
o_setter := models.OrganizationSetter{
Name: omit.From(fmt.Sprintf("%s's organization", username)),
}
o, err := models.Organizations.Insert(&o_setter).One(ctx, db.PGInstance.BobDB)
u, err := platform.CreateUser(ctx, username, name, password_hash)
if err != nil {
return nil, fmt.Errorf("Failed to create organization: %w", err)
return nil, fmt.Errorf("create user: %s", err)
}
log.Info().Int32("id", o.ID).Msg("Created organization")
u_setter := models.UserSetter{
DisplayName: omit.From(name),
OrganizationID: omit.From(o.ID),
PasswordHash: omit.From(passwordHash),
PasswordHashType: omit.From(enums.HashtypeBcrypt14),
Role: omit.From(enums.UserroleAccountOwner),
Username: omit.From(username),
}
u, err := models.Users.Insert(&u_setter).One(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to create user: %w", err)
}
log.Info().Int32("id", u.ID).Str("username", u.Username).Msg("Created user")
return u, nil
}
// Helper function to translate strings into solid error types for operating on
func findUser(ctx context.Context, user_id int) (*models.User, error) {
//user, err := models.FindUser(ctx, db.PGInstance.BobDB, int32(user_id))
user, err := models.Users.Query(
models.Preload.User.Organization(),
sm.Where(models.Users.Columns.ID.EQ(psql.Arg(user_id))),
).One(ctx, db.PGInstance.BobDB)
if err != nil {
if err.Error() == "No such user" || err.Error() == "sql: no rows in result set" {
return nil, &NoUserError{}
} else {
debug.LogErrorTypeInfo(err)
log.Error().Err(err).Msg("Unrecognized error. This should be updated in the findUser code")
return nil, err
}
}
//log.Info().Int32("user_id", user.ID).Int32("org_id", user.OrganizationID).Msg("Found user")
return user, err
}
func HashPassword(password string) (string, error) {
bytes, err := bcrypt.GenerateFromPassword([]byte(password), 14)
return string(bytes), err
@ -207,41 +151,22 @@ func validatePassword(password, hash string) bool {
return err == nil
}
func validateUser(ctx context.Context, username string, password string) (*models.User, error) {
func validateUser(ctx context.Context, username string, password string) (*platform.User, error) {
passwordHash, err := HashPassword(password)
if err != nil {
return nil, fmt.Errorf("Failed to hash password: %w", err)
}
result, err := sql.UserByUsername(username).All(ctx, db.PGInstance.BobDB)
user, err := platform.UserByUsername(ctx, username)
if err != nil {
return nil, fmt.Errorf("Failed to query for user: %w", err)
}
switch len(result) {
case 0:
if user == nil {
log.Info().Str("username", username).Str("password", redact(password)).Msg("Invalid username")
return nil, InvalidUsername{}
case 1:
row := result[0]
if !validatePassword(password, row.PasswordHash) {
log.Info().Str("username", username).Str("password", redact(password)).Str("hash", passwordHash).Msg("Invalid password for user")
return nil, InvalidCredentials{}
}
user := models.User{
ID: row.ID,
ArcgisAccessToken: row.ArcgisAccessToken,
ArcgisLicense: row.ArcgisLicense,
ArcgisRefreshToken: row.ArcgisRefreshToken,
ArcgisRefreshTokenExpires: row.ArcgisRefreshTokenExpires,
ArcgisRole: row.ArcgisRole,
DisplayName: row.DisplayName,
Email: row.Email,
OrganizationID: row.OrganizationID,
Username: row.Username,
}
log.Info().Str("username", username).Msg("Validated user")
return &user, nil
default:
return nil, errors.New("More than one matching row, this should be impossible.")
}
if !validatePassword(password, user.PasswordHash) {
log.Info().Str("username", username).Str("password", redact(password)).Str("hash", passwordHash).Msg("Invalid password for user")
return nil, InvalidCredentials{}
}
return user, nil
}

View file

@ -1,118 +0,0 @@
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package sql
import (
"context"
_ "embed"
"io"
"iter"
"time"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
"github.com/Gleipnir-Technology/bob/orm"
enums "github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/aarondl/opt/null"
"github.com/stephenafamo/scan"
)
//go:embed user_by_username.bob.sql
var formattedQueries_user_by_username string
var userByUsernameSQL = formattedQueries_user_by_username[152:806]
type UserByUsernameQuery = orm.ModQuery[*dialect.SelectQuery, userByUsername, UserByUsernameRow, []UserByUsernameRow, userByUsernameTransformer]
func UserByUsername(Username string) *UserByUsernameQuery {
var expressionTypArgs userByUsername
expressionTypArgs.Username = psql.Arg(Username)
return &UserByUsernameQuery{
Query: orm.Query[userByUsername, UserByUsernameRow, []UserByUsernameRow, userByUsernameTransformer]{
ExecQuery: orm.ExecQuery[userByUsername]{
BaseQuery: bob.BaseQuery[userByUsername]{
Expression: expressionTypArgs,
Dialect: dialect.Dialect,
QueryType: bob.QueryTypeSelect,
},
},
Scanner: func(context.Context, []string) (func(*scan.Row) (any, error), func(any) (UserByUsernameRow, error)) {
return func(row *scan.Row) (any, error) {
var t UserByUsernameRow
row.ScheduleScanByIndex(0, &t.ID)
row.ScheduleScanByIndex(1, &t.ArcgisAccessToken)
row.ScheduleScanByIndex(2, &t.ArcgisLicense)
row.ScheduleScanByIndex(3, &t.ArcgisRefreshToken)
row.ScheduleScanByIndex(4, &t.ArcgisRefreshTokenExpires)
row.ScheduleScanByIndex(5, &t.ArcgisRole)
row.ScheduleScanByIndex(6, &t.DisplayName)
row.ScheduleScanByIndex(7, &t.Email)
row.ScheduleScanByIndex(8, &t.OrganizationID)
row.ScheduleScanByIndex(9, &t.Username)
row.ScheduleScanByIndex(10, &t.PasswordHashType)
row.ScheduleScanByIndex(11, &t.PasswordHash)
row.ScheduleScanByIndex(12, &t.Role)
return &t, nil
}, func(v any) (UserByUsernameRow, error) {
return *(v.(*UserByUsernameRow)), nil
}
},
},
Mod: bob.ModFunc[*dialect.SelectQuery](func(q *dialect.SelectQuery) {
q.AppendSelect(expressionTypArgs.subExpr(7, 577))
q.SetTable(expressionTypArgs.subExpr(583, 595))
q.AppendWhere(expressionTypArgs.subExpr(603, 654))
}),
}
}
type UserByUsernameRow = struct {
ID int32 `db:"id"`
ArcgisAccessToken null.Val[string] `db:"arcgis_access_token"`
ArcgisLicense null.Val[enums.Arcgislicensetype] `db:"arcgis_license"`
ArcgisRefreshToken null.Val[string] `db:"arcgis_refresh_token"`
ArcgisRefreshTokenExpires null.Val[time.Time] `db:"arcgis_refresh_token_expires"`
ArcgisRole null.Val[string] `db:"arcgis_role"`
DisplayName string `db:"display_name"`
Email null.Val[string] `db:"email"`
OrganizationID int32 `db:"organization_id"`
Username string `db:"username"`
PasswordHashType enums.Hashtype `db:"password_hash_type"`
PasswordHash string `db:"password_hash"`
Role enums.Userrole `db:"role"`
}
type userByUsernameTransformer = bob.SliceTransformer[UserByUsernameRow, []UserByUsernameRow]
type userByUsername struct {
Username bob.Expression
}
func (o userByUsername) args() iter.Seq[orm.ArgWithPosition] {
return func(yield func(arg orm.ArgWithPosition) bool) {
if !yield(orm.ArgWithPosition{
Name: "username",
Start: 614,
Stop: 616,
Expression: o.Username,
}) {
return
}
}
}
func (o userByUsername) raw(from, to int) string {
return userByUsernameSQL[from:to]
}
func (o userByUsername) subExpr(from, to int) bob.Expression {
return orm.ArgsToExpression(userByUsernameSQL, from, to, o.args())
}
func (o userByUsername) WriteSQL(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
return o.subExpr(0, len(userByUsernameSQL)).WriteSQL(ctx, w, d, start)
}

View file

@ -1,7 +0,0 @@
-- Code generated by BobGen psql v0.42.5. DO NOT EDIT.
-- This file is meant to be re-generated in place and/or deleted at any time.
-- UserByUsername
SELECT "user_"."id" AS "id", "user_"."arcgis_access_token" AS "arcgis_access_token", "user_"."arcgis_license" AS "arcgis_license", "user_"."arcgis_refresh_token" AS "arcgis_refresh_token", "user_"."arcgis_refresh_token_expires" AS "arcgis_refresh_token_expires", "user_"."arcgis_role" AS "arcgis_role", "user_"."display_name" AS "display_name", "user_"."email" AS "email", "user_"."organization_id" AS "organization_id", "user_"."username" AS "username", "user_"."password_hash_type" AS "password_hash_type", "user_"."password_hash" AS "password_hash", "user_"."role" AS "role" FROM public.user_ WHERE
username = $1 AND
password_hash_type = 'bcrypt-14';

View file

@ -513,10 +513,10 @@
id="map"
organization-id="{{ .Organization.ID }}"
tegola="{{ .URL.Tegola }}"
xmin="{{ .Organization.ServiceAreaXmin.GetOr 0 }}"
ymin="{{ .Organization.ServiceAreaYmin.GetOr 0 }}"
xmax="{{ .Organization.ServiceAreaXmax.GetOr 0 }}"
ymax="{{ .Organization.ServiceAreaYmax.GetOr 0 }}"
xmin="{{ .Organization.ServiceArea.Min.X }}"
ymin="{{ .Organization.ServiceArea.Min.Y }}"
xmax="{{ .Organization.ServiceArea.Max.X }}"
ymax="{{ .Organization.ServiceArea.Max.Y }}"
></map-multipoint>
</div>

12
main.go
View file

@ -12,17 +12,17 @@ import (
"time"
"github.com/Gleipnir-Technology/nidus-sync/auth"
"github.com/Gleipnir-Technology/nidus-sync/background"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/html"
"github.com/Gleipnir-Technology/nidus-sync/llm"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/Gleipnir-Technology/nidus-sync/platform/email"
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
"github.com/Gleipnir-Technology/nidus-sync/platform/geocode"
"github.com/Gleipnir-Technology/nidus-sync/platform/text"
"github.com/Gleipnir-Technology/nidus-sync/rmo"
nidussync "github.com/Gleipnir-Technology/nidus-sync/sync"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/getsentry/sentry-go"
sentryhttp "github.com/getsentry/sentry-go/http"
"github.com/getsentry/sentry-go/zerolog"
@ -107,9 +107,9 @@ func main() {
os.Exit(6)
}
err = userfile.CreateDirectories()
err = file.CreateDirectories()
if err != nil {
log.Error().Err(err).Msg("Failed to create userfile directories")
log.Error().Err(err).Msg("Failed to create file directories")
os.Exit(7)
}
@ -143,7 +143,7 @@ func main() {
// Start up background processes
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
background.Start(ctx)
platform.BackgroundStart(ctx)
openai_logger := log.With().Logger()
err = llm.CreateOpenAIClient(ctx, &openai_logger)
@ -176,7 +176,7 @@ func main() {
}
cancel()
background.WaitForExit()
platform.BackgroundWaitForExit()
log.Info().Msg("Shutdown complete")
}

1
platform/audio.go Normal file
View file

@ -0,0 +1 @@
package platform

14
platform/background.go Normal file
View file

@ -0,0 +1,14 @@
package platform
import (
"context"
"github.com/Gleipnir-Technology/nidus-sync/platform/background"
)
func BackgroundStart(ctx context.Context) {
background.Start(ctx)
}
func BackgroundWaitForExit() {
background.WaitForExit()
}

View file

@ -6,7 +6,6 @@ import (
"crypto/rand"
"crypto/sha256"
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"io"
@ -34,7 +33,7 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/db/sql"
"github.com/Gleipnir-Technology/nidus-sync/debug"
"github.com/Gleipnir-Technology/nidus-sync/notification"
"github.com/Gleipnir-Technology/nidus-sync/platform/oauth"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/alitto/pond/v2"
@ -45,29 +44,25 @@ import (
var syncStatusByOrg map[int32]bool
// When the API responds that the token is now invalidated
type InvalidatedTokenError struct{}
func (e InvalidatedTokenError) Error() string { return "The token has been invalidated by the server" }
// When there is no oauth for an organization
type NoOAuthForOrg struct{}
func (e NoOAuthForOrg) Error() string { return "No oauth available for organization" }
var newOAuthTokenChannel chan struct{}
var CodeVerifier string = "random_secure_string_min_43_chars_long_should_be_stored_in_session"
type OAuthTokenResponse struct {
AccessToken string `json:"access_token"`
ExpiresIn int `json:"expires_in"`
RefreshToken string `json:"refresh_token"`
RefreshTokenExpiresIn int `json:"refresh_token_expires_in"`
SSL bool `json:"ssl"`
Username string `json:"username"`
func HasFieldseekerConnection(ctx context.Context, user_id int32) (bool, error) {
result, err := models.ArcgisOauthTokens.Query(
sm.Where(
models.ArcgisOauthTokens.Columns.UserID.EQ(psql.Arg(user_id)),
),
).Exists(ctx, db.PGInstance.BobDB)
if err != nil {
return false, err
}
return result, nil
}
func GetOAuthForOrg(ctx context.Context, org *models.Organization) (*models.ArcgisOauthToken, error) {
func IsSyncOngoing(org_id int32) bool {
return syncStatusByOrg[org_id]
}
func getOAuthForOrg(ctx context.Context, org *models.Organization) (*models.ArcgisOauthToken, error) {
users, err := org.User().All(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to query all users for org: %w", err)
@ -81,57 +76,7 @@ func GetOAuthForOrg(ctx context.Context, org *models.Organization) (*models.Arcg
return oauth, nil
}
}
return nil, &NoOAuthForOrg{}
}
func HandleOauthAccessCode(ctx context.Context, user *models.User, code string) error {
form := url.Values{
"grant_type": []string{"authorization_code"},
"code": []string{code},
"redirect_uri": []string{config.ArcGISOauthRedirectURL()},
}
token, err := doTokenRequest(ctx, form)
if err != nil {
return fmt.Errorf("Failed to exchange authorization code for token: %w", err)
}
accessExpires := futureUTCTimestamp(token.ExpiresIn)
refreshExpires := futureUTCTimestamp(token.RefreshTokenExpiresIn)
setter := models.ArcgisOauthTokenSetter{
AccessToken: omit.From(token.AccessToken),
AccessTokenExpires: omit.From(accessExpires),
//ArcgisAccountID: omit.From(
ArcgisID: omitnull.FromPtr[string](nil),
ArcgisLicenseTypeID: omitnull.FromPtr[string](nil),
Created: omit.From(time.Now()),
InvalidatedAt: omitnull.FromPtr[time.Time](nil),
RefreshToken: omit.From(token.RefreshToken),
RefreshTokenExpires: omit.From(refreshExpires),
UserID: omit.From(user.ID),
Username: omit.From(token.Username),
}
oauth, err := models.ArcgisOauthTokens.Insert(&setter).One(ctx, db.PGInstance.BobDB)
if err != nil {
return fmt.Errorf("Failed to save token to database: %w", err)
}
go updateArcgisUserData(context.Background(), user, oauth)
return nil
}
func HasFieldseekerConnection(ctx context.Context, user *models.User) (bool, error) {
result, err := models.ArcgisOauthTokens.Query(
sm.Where(
models.ArcgisOauthTokens.Columns.UserID.EQ(psql.Arg(user.ID)),
),
).Exists(ctx, db.PGInstance.BobDB)
if err != nil {
return false, err
}
return result, nil
}
func IsSyncOngoing(org_id int32) bool {
return syncStatusByOrg[org_id]
return nil, nil
}
// This is a goroutine that is in charge of getting Fieldseeker data and keeping it fresh.
@ -181,10 +126,6 @@ func refreshFieldseekerData(background_ctx context.Context, newOauthCh <-chan st
defer wg.Done()
err := periodicallyExportFieldseeker(workerCtx, org)
if err != nil {
if errors.Is(err, &NoOAuthForOrg{}) {
log.Debug().Int("organization_id", int(org.ID)).Msg("No oauth available for organization, exiting exporter.")
return
}
log.Error().Err(err).Msg("Crashed fieldseeker export goroutine")
}
}()
@ -261,10 +202,6 @@ func extractURLParts(urlString string) (string, []string, error) {
return host, pathParts, nil
}
func futureUTCTimestamp(secondsFromNow int) time.Time {
return time.Now().UTC().Add(time.Duration(secondsFromNow) * time.Second)
}
// Helper function to generate code challenge from code verifier
func generateCodeChallenge(codeVerifier string) string {
hash := sha256.Sum256([]byte(codeVerifier))
@ -279,7 +216,7 @@ func generateCodeVerifier() string {
}
// Find out what we can about this user
func updateArcgisUserData(ctx context.Context, user *models.User, oauth *models.ArcgisOauthToken) {
func UpdateArcgisUserData(ctx context.Context, user *models.User, oauth *models.ArcgisOauthToken) {
client, err := arcgis.NewArcGISAuth(
ctx,
&arcgis.AuthenticatorOAuth{
@ -392,14 +329,14 @@ func updateArcgisUserData(ctx context.Context, user *models.User, oauth *models.
}
maybeCreateWebhook(ctx, fssync)
downloadFieldseekerSchema(ctx, fssync, account.ID)
notification.ClearOauth(ctx, user)
//notification.ClearOauth(ctx, user)
newOAuthTokenChannel <- struct{}{}
}
func NewFieldSeeker(ctx context.Context, oauth *models.ArcgisOauthToken) (*fieldseeker.FieldSeeker, error) {
row, err := sql.OrgByOauthId(oauth.ID).One(ctx, db.PGInstance.BobDB)
func NewFieldSeeker(ctx context.Context, oa *models.ArcgisOauthToken) (*fieldseeker.FieldSeeker, error) {
row, err := sql.OrgByOauthId(oa.ID).One(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to get org ID from oauth %d: %w", oauth.ID, err)
return nil, fmt.Errorf("Failed to get org ID from oauth %d: %w", oa.ID, err)
}
// The URL for fieldseeker should be something like
// https://foo.arcgis.com/123abc/arcgis/rest/services/FieldSeekerGIS/FeatureServer
@ -415,17 +352,17 @@ func NewFieldSeeker(ctx context.Context, oauth *models.ArcgisOauthToken) (*field
ar, err := arcgis.NewArcGISAuth(
ctx,
arcgis.AuthenticatorOAuth{
AccessToken: oauth.AccessToken,
AccessTokenExpires: oauth.AccessTokenExpires,
RefreshToken: oauth.RefreshToken,
RefreshTokenExpires: oauth.RefreshTokenExpires,
AccessToken: oa.AccessToken,
AccessTokenExpires: oa.AccessTokenExpires,
RefreshToken: oa.RefreshToken,
RefreshTokenExpires: oa.RefreshTokenExpires,
},
)
if err != nil {
if errors.Is(err, arcgis.ErrorInvalidAuthToken) {
return nil, InvalidatedTokenError{}
return nil, oauth.InvalidatedTokenError{}
} else if errors.Is(err, arcgis.ErrorInvalidRefreshToken) {
return nil, InvalidatedTokenError{}
return nil, oauth.InvalidatedTokenError{}
}
return nil, fmt.Errorf("Failed to create ArcGIS client: %w", err)
}
@ -617,16 +554,17 @@ func periodicallyExportFieldseeker(ctx context.Context, org *models.Organization
return nil
case <-pollTicker.C:
pollTicker = time.NewTicker(15 * time.Minute)
oauth, err := GetOAuthForOrg(ctx, org)
oa, err := getOAuthForOrg(ctx, org)
if err != nil {
return fmt.Errorf("Failed to get oauth for org: %w", err)
}
fssync, err := NewFieldSeeker(
ctx,
oauth,
)
if oa == nil {
log.Debug().Int32("org.id", org.ID).Msg("No oauth for org")
continue
}
fssync, err := NewFieldSeeker(ctx, oa)
if err != nil {
if errors.Is(err, &InvalidatedTokenError{}) {
if errors.Is(err, &oauth.InvalidatedTokenError{}) {
log.Info().Int32("org", org.ID).Msg("oauth token for org is invalid, waiting for refresh")
continue
}
@ -723,39 +661,39 @@ func logPermissions(ctx context.Context, fssync *fieldseeker.FieldSeeker) {
}
}
func maintainOAuth(ctx context.Context, oauth *models.ArcgisOauthToken) error {
func maintainOAuth(ctx context.Context, aot *models.ArcgisOauthToken) error {
for {
// Refresh from the database
oauth, err := models.FindArcgisOauthToken(ctx, db.PGInstance.BobDB, oauth.ID)
oa, err := models.FindArcgisOauthToken(ctx, db.PGInstance.BobDB, aot.ID)
if err != nil {
return fmt.Errorf("Failed to update oauth token from database: %w", err)
}
var accessTokenDelay time.Duration
if oauth.AccessTokenExpires.Before(time.Now()) || time.Until(oauth.AccessTokenExpires) < (3*time.Second) {
if oa.AccessTokenExpires.Before(time.Now()) || time.Until(oa.AccessTokenExpires) < (3*time.Second) {
accessTokenDelay = time.Second
} else {
accessTokenDelay = time.Until(oauth.AccessTokenExpires) - (3 * time.Second)
accessTokenDelay = time.Until(oa.AccessTokenExpires) - (3 * time.Second)
}
var refreshTokenDelay time.Duration
if oauth.RefreshTokenExpires.Before(time.Now()) || time.Until(oauth.RefreshTokenExpires) < (3*time.Second) {
if oa.RefreshTokenExpires.Before(time.Now()) || time.Until(oa.RefreshTokenExpires) < (3*time.Second) {
refreshTokenDelay = time.Second
} else {
refreshTokenDelay = time.Until(oauth.RefreshTokenExpires) - (3 * time.Second)
refreshTokenDelay = time.Until(oa.RefreshTokenExpires) - (3 * time.Second)
}
log.Info().Int("id", int(oauth.ID)).Float64("seconds", accessTokenDelay.Seconds()).Msg("Need to refresh access token")
log.Info().Int("id", int(oauth.ID)).Float64("seconds", refreshTokenDelay.Seconds()).Msg("Need to refresh refresh token")
log.Info().Int("id", int(oa.ID)).Float64("seconds", accessTokenDelay.Seconds()).Msg("Need to refresh access token")
log.Info().Int("id", int(oa.ID)).Float64("seconds", refreshTokenDelay.Seconds()).Msg("Need to refresh refresh token")
accessTokenTicker := time.NewTicker(accessTokenDelay)
refreshTokenTicker := time.NewTicker(refreshTokenDelay)
select {
case <-ctx.Done():
return nil
case <-accessTokenTicker.C:
err := refreshAccessToken(ctx, oauth)
err := oauth.RefreshAccessToken(ctx, oa)
if err != nil {
return fmt.Errorf("Failed to refresh access token: %w", err)
}
case <-refreshTokenTicker.C:
err := refreshRefreshToken(ctx, oauth)
err := oauth.RefreshRefreshToken(ctx, oa)
if err != nil {
return fmt.Errorf("Failed to maintain refresh token: %w", err)
}
@ -774,126 +712,22 @@ func markTokenFailed(ctx context.Context, oauth *models.ArcgisOauthToken) {
if err != nil {
log.Error().Str("err", err.Error()).Msg("Failed to mark token failed")
}
user, err := models.FindUser(ctx, db.PGInstance.BobDB, oauth.UserID)
if err != nil {
log.Error().Str("err", err.Error()).Msg("Failed to get oauth user")
return
}
notification.NotifyOauthInvalid(ctx, user)
/*
user, err := models.FindUser(ctx, db.PGInstance.BobDB, oauth.UserID)
if err != nil {
log.Error().Str("err", err.Error()).Msg("Failed to get oauth user")
return
}
notification.NotifyOauthInvalid(ctx, user)
*/
log.Info().Int("id", int(oauth.ID)).Msg("Marked oauth token invalid")
}
// Update the access token to keep it fresh and alive
func refreshAccessToken(ctx context.Context, oauth *models.ArcgisOauthToken) error {
form := url.Values{
"grant_type": []string{"refresh_token"},
"client_id": []string{config.ClientID},
"refresh_token": []string{oauth.RefreshToken},
}
token, err := doTokenRequest(ctx, form)
if err != nil {
return fmt.Errorf("Failed to handle request: %w", err)
}
accessExpires := futureUTCTimestamp(token.ExpiresIn)
setter := models.ArcgisOauthTokenSetter{
AccessToken: omit.From(token.AccessToken),
AccessTokenExpires: omit.From(accessExpires),
Username: omit.From(token.Username),
}
err = oauth.Update(ctx, db.PGInstance.BobDB, &setter)
if err != nil {
return fmt.Errorf("Failed to update oauth in database: %w", err)
}
log.Info().Int("oauth token id", int(oauth.ID)).Msg("Updated oauth token")
return nil
}
// Update the refresh token to keep it fresh and alive
func refreshRefreshToken(ctx context.Context, oauth *models.ArcgisOauthToken) error {
form := url.Values{
"grant_type": []string{"exchange_refresh_token"},
"redirect_uri": []string{config.ArcGISOauthRedirectURL()},
"refresh_token": []string{oauth.RefreshToken},
}
token, err := doTokenRequest(ctx, form)
if err != nil {
return fmt.Errorf("Failed to handle request: %w", err)
}
refreshExpires := futureUTCTimestamp(token.ExpiresIn)
setter := models.ArcgisOauthTokenSetter{
RefreshToken: omit.From(token.RefreshToken),
RefreshTokenExpires: omit.From(refreshExpires),
Username: omit.From(token.Username),
}
err = oauth.Update(ctx, db.PGInstance.BobDB, &setter)
if err != nil {
return fmt.Errorf("Failed to update oauth in database: %w", err)
}
log.Info().Int("oauth token id", int(oauth.ID)).Msg("Updated oauth token")
return nil
}
func newTimestampedFilename(prefix, suffix string) string {
timestamp := time.Now().Format("20060102_150405") // YYYYMMDD_HHMMSS format
return prefix + timestamp + suffix
}
func doTokenRequest(ctx context.Context, form url.Values) (*OAuthTokenResponse, error) {
form.Set("client_id", config.ClientID)
baseURL := "https://www.arcgis.com/sharing/rest/oauth2/token/"
req, err := http.NewRequest("POST", baseURL, strings.NewReader(form.Encode()))
if err != nil {
return nil, fmt.Errorf("Failed to create request: %w", err)
}
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
client := http.Client{}
log.Info().Str("url", req.URL.String()).Msg("POST")
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("Failed to do request: %w", err)
}
defer resp.Body.Close()
bodyBytes, err := io.ReadAll(resp.Body)
log.Info().Int("status", resp.StatusCode).Msg("Token request")
if resp.StatusCode >= http.StatusBadRequest {
if err != nil {
return nil, fmt.Errorf("Got status code %d and failed to read response body: %w", resp.StatusCode, err)
}
bodyString := string(bodyBytes)
var errorResp arcgis.ErrorResponse
if err := json.Unmarshal(bodyBytes, &errorResp); err == nil {
if errorResp.Error.Code == 498 && errorResp.Error.Description == "invalidated refresh_token" {
return nil, InvalidatedTokenError{}
}
return nil, fmt.Errorf("API response JSON error: %d: %d %s", resp.StatusCode, errorResp.Error.Code, errorResp.Error.Description)
}
return nil, fmt.Errorf("API returned error status %d: %s", resp.StatusCode, bodyString)
}
//logResponseHeaders(resp)
var tokenResponse OAuthTokenResponse
err = json.Unmarshal(bodyBytes, &tokenResponse)
if err != nil {
return nil, fmt.Errorf("Failed to unmarshal JSON: %w", err)
}
// Just because we got a 200-level status code doesn't mean it worked. Experience has taught us that
// we can get errors without anything indicated in the headers or the status code
if tokenResponse == (OAuthTokenResponse{}) {
var errorResponse arcgis.ErrorResponse
err = json.Unmarshal(bodyBytes, &errorResponse)
if err != nil {
return nil, fmt.Errorf("Failed to unmarshal error JSON: %w", err)
}
if errorResponse.Error.Code > 0 {
return nil, errorResponse.AsError(ctx)
}
}
log.Info().Str("refresh token", tokenResponse.RefreshToken).Str("access token", tokenResponse.AccessToken).Int("access expires", tokenResponse.ExpiresIn).Int("refresh expires", tokenResponse.RefreshTokenExpiresIn).Msg("Oauth token acquired")
return &tokenResponse, nil
}
func logResponseHeaders(resp *http.Response) {
if resp == nil {
log.Info().Msg("Response is nil")

View file

@ -4,7 +4,7 @@ import (
"context"
"fmt"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/Gleipnir-Technology/nidus-sync/platform/subprocess"
"github.com/google/uuid"
"github.com/rs/zerolog/log"
)
@ -53,13 +53,13 @@ func enqueueAudioJob(job jobAudio) {
func processAudioFile(audioUUID uuid.UUID) error {
// Normalize audio
err := userfile.NormalizeAudio(audioUUID)
err := subprocess.NormalizeAudio(audioUUID)
if err != nil {
return fmt.Errorf("failed to normalize audio %s: %v", audioUUID, err)
}
// Transcode to OGG
err = userfile.TranscodeToOgg(audioUUID)
err = subprocess.TranscodeToOgg(audioUUID)
if err != nil {
return fmt.Errorf("failed to transcode audio %s to OGG: %v", audioUUID, err)
}

View file

@ -9,7 +9,6 @@ import (
"os"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/label-studio"
"github.com/Gleipnir-Technology/nidus-sync/minio"
@ -98,13 +97,15 @@ func createLabelStudioClient() (*labelstudio.Client, error) {
return labelStudioClient, nil
}
func noteAudioGetLatest(ctx context.Context, uuid string) (*models.NoteAudio, error) {
return nil, nil
}
func processLabelTask(ctx context.Context, minioClient *minio.Client, minioBucket string, labelStudioClient *labelstudio.Client, project *labelstudio.Project, job jobLabelStudio) error {
customer := os.Getenv("CUSTOMER")
if customer == "" {
return errors.New("You must specify a CUSTOMER env var")
}
note, err := db.NoteAudioGetLatest(ctx, job.UUID.String())
note, err := noteAudioGetLatest(ctx, job.UUID.String())
if err != nil {
return errors.New(fmt.Sprintf("Failed to get note %s", note.UUID))
}

24
platform/communication.go Normal file
View file

@ -0,0 +1,24 @@
package platform
import (
"context"
"fmt"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/platform/publicreport"
)
func NotificationCount(ctx context.Context, org *models.Organization, user *models.User) (result uint, err error) {
count_nreports, err := publicreport.NuisanceReportForOrganizationCount(ctx, org.ID)
if err != nil {
return 0, fmt.Errorf("nuisance report query: %w", err)
}
result += count_nreports
count_wreports, err := publicreport.WaterReportForOrganizationCount(ctx, org.ID)
if err != nil {
return 0, fmt.Errorf("water report query: %w", err)
}
result += count_wreports
return result, nil
}

View file

@ -113,7 +113,6 @@ func JobCommit(ctx context.Context, file_id int32) error {
feature, err = models.Features.Query(
models.SelectWhere.Features.OrganizationID.EQ(org.ID),
models.SelectWhere.Features.SiteID.EQ(site.ID),
models.SelectWhere.Features.SiteVersion.EQ(site.Version),
).One(ctx, txn)
if err != nil {
if err.Error() != "sql: no rows in result set" {
@ -125,7 +124,6 @@ func JobCommit(ctx context.Context, file_id int32) error {
//ID: row.Address,
OrganizationID: omit.From(org.ID),
SiteID: omit.From(site.ID),
SiteVersion: omit.From(site.Version),
}).One(ctx, txn)
if err != nil {
return fmt.Errorf("insert feature: %w", err)

View file

@ -16,8 +16,8 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/h3utils"
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
"github.com/Gleipnir-Technology/nidus-sync/platform/geom"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/rs/zerolog/log"
@ -80,19 +80,19 @@ var parseCSVFlyover = makeParseCSV(
)
type insertModelFunc[ModelType any, HeaderType Enum] = func(context.Context, bob.Tx, *models.FileuploadFile, *models.FileuploadCSV, int32, []HeaderType, []string, []string) (ModelType, error)
type parseCSVFunc[ModelType any] = func(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV) ([]ModelType, error)
type parseCSVFunc[ModelType any] = func(ctx context.Context, txn bob.Tx, f *models.FileuploadFile, c *models.FileuploadCSV) ([]ModelType, error)
func makeParseCSV[ModelType any, HeaderType Enum](parseHeader parseHeaderFunc[HeaderType], insertModel insertModelFunc[ModelType, HeaderType]) parseCSVFunc[ModelType] {
return func(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV) ([]ModelType, error) {
return func(ctx context.Context, txn bob.Tx, f *models.FileuploadFile, c *models.FileuploadCSV) ([]ModelType, error) {
rows := make([]ModelType, 0)
r, err := userfile.NewFileReader(userfile.CollectionCSV, file.FileUUID)
r, err := file.NewFileReader(file.CollectionCSV, f.FileUUID)
if err != nil {
return rows, fmt.Errorf("Failed to get filereader for %d: %w", file.ID, err)
return rows, fmt.Errorf("Failed to get filereader for %d: %w", f.ID, err)
}
reader := csv.NewReader(r)
h, err := reader.Read()
if err != nil {
return rows, fmt.Errorf("Failed to read header of CSV for file %d: %w", file.ID, err)
return rows, fmt.Errorf("Failed to read header of CSV for file %d: %w", f.ID, err)
}
header_types, header_names := parseHeader(h)
/*
@ -114,9 +114,9 @@ func makeParseCSV[ModelType any, HeaderType Enum](parseHeader parseHeaderFunc[He
if err == io.EOF {
return rows, nil
}
return rows, fmt.Errorf("Failed to read all CSV records for file %d: %w", file.ID, err)
return rows, fmt.Errorf("Failed to read all CSV records for file %d: %w", f.ID, err)
}
m, err := insertModel(ctx, txn, file, c, line_number, header_types, header_names, row)
m, err := insertModel(ctx, txn, f, c, line_number, header_types, header_names, row)
if err != nil {
return rows, fmt.Errorf("insert models: %w", err)
}

View file

@ -16,12 +16,12 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
"github.com/Gleipnir-Technology/nidus-sync/platform/geocode"
"github.com/Gleipnir-Technology/nidus-sync/platform/geom"
"github.com/Gleipnir-Technology/nidus-sync/platform/text"
"github.com/Gleipnir-Technology/nidus-sync/platform/types"
"github.com/Gleipnir-Technology/nidus-sync/stadia"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/rs/zerolog/log"
@ -155,22 +155,22 @@ func geocodePool(ctx context.Context, txn bob.Tx, client *stadia.StadiaMaps, job
}
return nil
}
func parseCSVPoollist(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV) ([]*models.FileuploadPool, error) {
func parseCSVPoollist(ctx context.Context, txn bob.Tx, f *models.FileuploadFile, c *models.FileuploadCSV) ([]*models.FileuploadPool, error) {
pools := make([]*models.FileuploadPool, 0)
r, err := userfile.NewFileReader(userfile.CollectionCSV, file.FileUUID)
r, err := file.NewFileReader(file.CollectionCSV, f.FileUUID)
if err != nil {
return pools, fmt.Errorf("Failed to get filereader for %d: %w", file.ID, err)
return pools, fmt.Errorf("Failed to get filereader for %d: %w", f.ID, err)
}
reader := csv.NewReader(r)
h, err := reader.Read()
if err != nil {
return pools, fmt.Errorf("Failed to read header of CSV for file %d: %w", file.ID, err)
return pools, fmt.Errorf("Failed to read header of CSV for file %d: %w", f.ID, err)
}
header_types, header_names := parseHeaders(h)
missing_headers := missingRequiredHeaders(header_types)
for _, mh := range missing_headers {
errorMissingHeader(ctx, txn, c, mh)
file.Update(ctx, txn, &models.FileuploadFileSetter{
f.Update(ctx, txn, &models.FileuploadFileSetter{
Status: omit.From(enums.FileuploadFilestatustypeError),
})
return pools, nil
@ -183,7 +183,7 @@ func parseCSVPoollist(ctx context.Context, txn bob.Tx, file *models.FileuploadFi
if err == io.EOF {
return pools, nil
}
return pools, fmt.Errorf("Failed to read all CSV records for file %d: %w", file.ID, err)
return pools, fmt.Errorf("Failed to read all CSV records for file %d: %w", f.ID, err)
}
tags := make(map[string]string, 0)
setter := models.FileuploadPoolSetter{
@ -196,8 +196,8 @@ func parseCSVPoollist(ctx context.Context, txn bob.Tx, file *models.FileuploadFi
Committed: omit.From(false),
Condition: omit.From(enums.PoolconditiontypeUnknown),
Created: omit.From(time.Now()),
CreatorID: omit.From(file.CreatorID),
CSVFile: omit.From(file.ID),
CreatorID: omit.From(f.CreatorID),
CSVFile: omit.From(f.ID),
Deleted: omitnull.FromPtr[time.Time](nil),
Geom: omitnull.FromPtr[string](nil),
H3cell: omitnull.FromPtr[string](nil),
@ -287,12 +287,12 @@ func parseCSVPoollist(ctx context.Context, txn bob.Tx, file *models.FileuploadFi
line_number = line_number + 1
}
}
func processCSVPoollist(ctx context.Context, txn bob.Tx, file *models.FileuploadFile, c *models.FileuploadCSV, parsed []*models.FileuploadPool) error {
org, err := models.FindOrganization(ctx, db.PGInstance.BobDB, file.OrganizationID)
func processCSVPoollist(ctx context.Context, txn bob.Tx, f *models.FileuploadFile, c *models.FileuploadCSV, parsed []*models.FileuploadPool) error {
org, err := models.FindOrganization(ctx, db.PGInstance.BobDB, f.OrganizationID)
if err != nil {
return fmt.Errorf("get org: %w", err)
}
err = bulkGeocode(ctx, txn, file, c, parsed, org)
err = bulkGeocode(ctx, txn, f, c, parsed, org)
if err != nil {
log.Error().Err(err).Msg("Failure during geocoding")
}

View file

Before

Width:  |  Height:  |  Size: 3.8 KiB

After

Width:  |  Height:  |  Size: 3.8 KiB

Before After
Before After

View file

@ -1,4 +1,4 @@
package sync
package platform
import (
"context"
@ -16,24 +16,181 @@ import (
"github.com/uber/h3-go/v4"
)
func breedingSourcesByCell(ctx context.Context, org *models.Organization, c h3.Cell) ([]BreedingSourceSummary, error) {
var results []BreedingSourceSummary
type Inspection struct {
Action string
Date *time.Time
Notes string
Location string
LocationID uuid.UUID
}
func BreedingSourcesByCell(ctx context.Context, org Organization, c h3.Cell) ([]BreedingSourceSummary, error) {
boundary, err := c.Boundary()
if err != nil {
return results, fmt.Errorf("Failed to get cell boundary: %w", err)
return nil, fmt.Errorf("Failed to get cell boundary: %w", err)
}
geom_query := gisStatement(boundary)
rows, err := org.Pointlocations(
rows, err := org.model.Pointlocations(
sm.Where(
psql.F("ST_Within", "geospatial", geom_query),
),
sm.OrderBy("lasttreatdate"),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to query rows: %w", err)
}
return toBreedingSourceSummary(rows), nil
}
func SourceByGlobalID(ctx context.Context, org Organization, id uuid.UUID) (*BreedingSourceDetail, error) {
row, err := org.model.Pointlocations(
models.SelectWhere.FieldseekerPointlocations.Globalid.EQ(id),
).One(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to get point location: %w", err)
}
return toBreedingSource(row)
}
func TrapsBySource(ctx context.Context, org Organization, sourceID uuid.UUID) ([]TrapNearby, error) {
locations, err := sql.TrapLocationBySourceID(org.ID(), sourceID).All(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to query rows: %w", err)
}
location_ids := make([]uuid.UUID, 0)
var args []bob.Expression
for _, location := range locations {
location_ids = append(location_ids, location.TrapLocationGlobalid)
args = append(args, psql.Arg(location.TrapLocationGlobalid))
}
trap_data, err := sql.TrapDataByLocationIDRecent(org.ID(), location_ids).All(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to query trap data: %w", err)
}
counts, err := sql.TrapCountByLocationID(org.ID(), location_ids).All(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to query trap counts: %w", err)
}
traps, err := toTemplateTrapsNearby(locations, trap_data, counts)
if err != nil {
return nil, fmt.Errorf("Failed to convert trap data: %w", err)
}
return traps, nil
}
func TreatmentsBySource(ctx context.Context, org Organization, sourceID uuid.UUID) ([]Treatment, error) {
rows, err := org.model.Treatments(
sm.Where(
models.FieldseekerTreatments.Columns.Pointlocid.EQ(psql.Arg(sourceID)),
),
sm.OrderBy("enddatetime").Desc(),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to query rows: %w", err)
}
return toTreatment(rows)
}
func TrapByGlobalId(ctx context.Context, org Organization, id uuid.UUID) (*Trap, error) {
trap_location, err := org.model.Traplocations(
sm.Where(models.FieldseekerTraplocations.Columns.Globalid.EQ(psql.Arg(id))),
).One(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to get trap location: %w", err)
}
trap_data, err := sql.TrapDataByLocationIDRecent(org.ID(), []uuid.UUID{id}).All(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to query trap data: %w", err)
}
counts, err := sql.TrapCountByLocationID(org.ID(), []uuid.UUID{id}).All(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to query trap counts: %w", err)
}
result, err := toTrap(trap_location, trap_data, counts)
if err != nil {
return nil, fmt.Errorf("to trap: %w", err)
}
return &result, err
}
func TrapsByCell(ctx context.Context, org Organization, c h3.Cell) (results []TrapSummary, err error) {
boundary, err := c.Boundary()
if err != nil {
return results, fmt.Errorf("Failed to get cell boundary: %w", err)
}
geom_query := gisStatement(boundary)
rows, err := org.model.Traplocations(
sm.Where(
psql.F("ST_Within", "geospatial", geom_query),
),
sm.OrderBy("objectid"),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
return results, fmt.Errorf("Failed to query rows: %w", err)
}
for _, r := range rows {
return toTemplateTrapSummary(rows)
}
func TreatmentsByCell(ctx context.Context, org Organization, c h3.Cell) ([]Treatment, error) {
var results []Treatment
boundary, err := c.Boundary()
if err != nil {
return results, fmt.Errorf("Failed to get cell boundary: %w", err)
}
geom_query := gisStatement(boundary)
rows, err := org.model.Treatments(
sm.Where(
psql.F("ST_Within", "geospatial", geom_query),
),
sm.OrderBy("pointlocid"),
sm.OrderBy("enddatetime"),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
return results, fmt.Errorf("Failed to query rows: %w", err)
}
return toTreatment(rows)
}
func InspectionsByCell(ctx context.Context, org Organization, c h3.Cell) ([]Inspection, error) {
var results []Inspection
boundary, err := c.Boundary()
if err != nil {
return results, fmt.Errorf("Failed to get cell boundary: %w", err)
}
geom_query := gisStatement(boundary)
rows, err := org.model.Mosquitoinspections(
sm.Where(
psql.F("ST_Within", "geospatial", geom_query),
),
sm.OrderBy("pointlocid"),
sm.OrderBy("enddatetime"),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
return results, fmt.Errorf("Failed to query rows: %w", err)
}
return toTemplateInspection(rows)
}
func InspectionsBySource(ctx context.Context, org Organization, sourceID uuid.UUID) ([]Inspection, error) {
var results []Inspection
rows, err := org.model.Mosquitoinspections(
sm.Where(
models.FieldseekerMosquitoinspections.Columns.Pointlocid.EQ(psql.Arg(sourceID)),
),
sm.OrderBy("enddatetime").Desc(),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
return results, fmt.Errorf("Failed to query rows: %w", err)
}
return toTemplateInspection(rows)
}
func toBreedingSourceSummary(points []*models.FieldseekerPointlocation) []BreedingSourceSummary {
results := make([]BreedingSourceSummary, len(points))
for i, r := range points {
var last_inspected *time.Time
if !r.Lastinspectdate.IsNull() {
l := r.Lastinspectdate.MustGet()
@ -44,14 +201,14 @@ func breedingSourcesByCell(ctx context.Context, org *models.Organization, c h3.C
l := r.Lasttreatdate.MustGet()
last_treat_date = &l
}
results = append(results, BreedingSourceSummary{
results[i] = BreedingSourceSummary{
ID: r.Globalid,
LastInspected: last_inspected,
LastTreated: last_treat_date,
Type: r.Habitat.GetOr("none"),
})
}
}
return results, nil
return results
}
func gisStatement(cb h3.CellBoundary) string {
var content strings.Builder
@ -65,200 +222,3 @@ func gisStatement(cb h3.CellBoundary) string {
content.WriteString(fmt.Sprintf(", %f %f", cb[0].Lng, cb[0].Lat))
return fmt.Sprintf("ST_GeomFromText('POLYGON((%s))', 3857)", content.String())
}
func sourceByGlobalId(ctx context.Context, org *models.Organization, id uuid.UUID) (*BreedingSourceDetail, error) {
row, err := org.Pointlocations(
sm.Where(models.FieldseekerPointlocations.Columns.Globalid.EQ(psql.Arg(id))),
).One(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to get point location: %w", err)
}
return toTemplateBreedingSource(row), nil
}
func trapsBySource(ctx context.Context, org *models.Organization, sourceID uuid.UUID) ([]TrapNearby, error) {
locations, err := sql.TrapLocationBySourceID(org.ID, sourceID).All(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to query rows: %w", err)
}
location_ids := make([]uuid.UUID, 0)
var args []bob.Expression
for _, location := range locations {
location_ids = append(location_ids, location.TrapLocationGlobalid)
args = append(args, psql.Arg(location.TrapLocationGlobalid))
}
/*
trap_data, err := org.FSTrapdata(
sm.Where(
models.FSTrapdata.Columns.LocID.In(args...),
),
sm.OrderBy("enddatetime"),
).All(ctx, db.PGInstance.BobDB)
*/
/*
query := org.FSTrapdata(
sm.From(
psql.Select(
sm.From(psql.F("ROW_NUMBER")(
fm.Over(
wm.PartitionBy(models.FSTrapdata.Columns.LocID),
wm.OrderBy(models.FSTrapdata.Columns.Enddatetime).Desc(),
),
)).As("row_num"),
sm.Where(models.FSTrapdata.Columns.LocID.In(args...))),
),
sm.Where(psql.Quote("row_num").LTE(psql.Arg(10))),
sm.OrderBy(models.FSTrapdata.Columns.LocID),
sm.OrderBy(models.FSTrapdata.Columns.Enddatetime).Desc(),
)
*/
/*
query := psql.Select(
sm.From(
psql.Select(
sm.Columns(
models.FSTrapdata.Columns.Globalid,
psql.F("ROW_NUMBER")(
fm.Over(
wm.PartitionBy(models.FSTrapdata.Columns.LocID),
wm.OrderBy(models.FSTrapdata.Columns.Enddatetime).Desc(),
),
).As("row_num"),
sm.From(models.FSTrapdata.Name()),
),
sm.Where(models.FSTrapdata.Columns.LocID.In(args...))),
),
sm.Where(psql.Quote("row_num").LTE(psql.Arg(10))),
sm.OrderBy(models.FSTrapdata.Columns.LocID),
sm.OrderBy(models.FSTrapdata.Columns.Enddatetime).Desc(),
)
log.Info().Str("trapdata", queryToString(query)).Msg("Getting trap data")
trap_data, err := query.Exec(ctx, db.PGInstance.BobDB)
*/
trap_data, err := sql.TrapDataByLocationIDRecent(org.ID, location_ids).All(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to query trap data: %w", err)
}
counts, err := sql.TrapCountByLocationID(org.ID, location_ids).All(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to query trap counts: %w", err)
}
traps, err := toTemplateTrapsNearby(locations, trap_data, counts)
if err != nil {
return nil, fmt.Errorf("Failed to convert trap data: %w", err)
}
return traps, nil
}
func treatmentsBySource(ctx context.Context, org *models.Organization, sourceID uuid.UUID) ([]Treatment, error) {
var results []Treatment
rows, err := org.Treatments(
sm.Where(
models.FieldseekerTreatments.Columns.Pointlocid.EQ(psql.Arg(sourceID)),
),
sm.OrderBy("enddatetime").Desc(),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
return results, fmt.Errorf("Failed to query rows: %w", err)
}
//log.Info().Int("row count", len(rows)).Msg("Getting treatments")
return toTemplateTreatment(rows)
}
func trapByGlobalId(ctx context.Context, org *models.Organization, id uuid.UUID) (result Trap, err error) {
row, err := org.Traplocations(
sm.Where(models.FieldseekerTraplocations.Columns.Globalid.EQ(psql.Arg(id))),
).One(ctx, db.PGInstance.BobDB)
if err != nil {
return result, fmt.Errorf("Failed to get trap location: %w", err)
}
trap_data, err := sql.TrapDataByLocationIDRecent(org.ID, []uuid.UUID{id}).All(ctx, db.PGInstance.BobDB)
if err != nil {
return result, fmt.Errorf("Failed to query trap data: %w", err)
}
counts, err := sql.TrapCountByLocationID(org.ID, []uuid.UUID{id}).All(ctx, db.PGInstance.BobDB)
if err != nil {
return result, fmt.Errorf("Failed to query trap counts: %w", err)
}
return toTemplateTrap(row, trap_data, counts)
}
func trapsByCell(ctx context.Context, org *models.Organization, c h3.Cell) (results []TrapSummary, err error) {
boundary, err := c.Boundary()
if err != nil {
return results, fmt.Errorf("Failed to get cell boundary: %w", err)
}
geom_query := gisStatement(boundary)
rows, err := org.Traplocations(
sm.Where(
psql.F("ST_Within", "geospatial", geom_query),
),
sm.OrderBy("objectid"),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
return results, fmt.Errorf("Failed to query rows: %w", err)
}
return toTemplateTrapSummary(rows)
}
func treatmentsByCell(ctx context.Context, org *models.Organization, c h3.Cell) ([]Treatment, error) {
var results []Treatment
boundary, err := c.Boundary()
if err != nil {
return results, fmt.Errorf("Failed to get cell boundary: %w", err)
}
geom_query := gisStatement(boundary)
rows, err := org.Treatments(
sm.Where(
psql.F("ST_Within", "geospatial", geom_query),
),
sm.OrderBy("pointlocid"),
sm.OrderBy("enddatetime"),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
return results, fmt.Errorf("Failed to query rows: %w", err)
}
return toTemplateTreatment(rows)
}
func inspectionsByCell(ctx context.Context, org *models.Organization, c h3.Cell) ([]Inspection, error) {
var results []Inspection
boundary, err := c.Boundary()
if err != nil {
return results, fmt.Errorf("Failed to get cell boundary: %w", err)
}
geom_query := gisStatement(boundary)
rows, err := org.Mosquitoinspections(
sm.Where(
psql.F("ST_Within", "geospatial", geom_query),
),
sm.OrderBy("pointlocid"),
sm.OrderBy("enddatetime"),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
return results, fmt.Errorf("Failed to query rows: %w", err)
}
return toTemplateInspection(rows)
}
func inspectionsBySource(ctx context.Context, org *models.Organization, sourceID uuid.UUID) ([]Inspection, error) {
var results []Inspection
rows, err := org.Mosquitoinspections(
sm.Where(
models.FieldseekerMosquitoinspections.Columns.Pointlocid.EQ(psql.Arg(sourceID)),
),
sm.OrderBy("enddatetime").Desc(),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
return results, fmt.Errorf("Failed to query rows: %w", err)
}
return toTemplateInspection(rows)
}

View file

@ -1,4 +1,4 @@
package userfile
package file
import (
"fmt"
@ -34,6 +34,9 @@ var collectionToSubdir map[Collection]string = map[Collection]string{
CollectionImageRaw: "image-raw",
}
func ContentPath(collection Collection, uid uuid.UUID) string {
return fileContentPath(collection, uid)
}
func fileContentPath(collection Collection, uid uuid.UUID) string {
subdir, ok := collectionToSubdir[collection]
if !ok {

View file

@ -1,4 +1,4 @@
package userfile
package file
type Collection int

View file

@ -1,4 +1,4 @@
package userfile
package file
import (
"fmt"

View file

@ -1,4 +1,4 @@
package userfile
package file
import (
"bytes"

View file

@ -1,4 +1,4 @@
package userfile
package file
import (
"fmt"

View file

@ -1,116 +0,0 @@
package imagetile
import (
"context"
"embed"
"fmt"
"github.com/Gleipnir-Technology/arcgis-go"
"github.com/Gleipnir-Technology/arcgis-go/fieldseeker"
"github.com/Gleipnir-Technology/nidus-sync/background"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
//"github.com/rs/zerolog/log"
)
//go:embed empty-tile.png
var emptyTileFS embed.FS
var clientByOrgID = make(map[int32]*fieldseeker.FieldSeeker, 0)
var tileRasterPlaceholder *TileRaster
type TileRaster struct {
Content []byte
IsPlaceholder bool
}
func ImageAtPoint(ctx context.Context, org *models.Organization, level uint, lat, lng float64) (*TileRaster, error) {
fssync, err := getFieldseeker(ctx, org)
if err != nil {
return nil, fmt.Errorf("create fssync: %w", err)
}
map_service, err := aerialImageService(ctx, fssync.Arcgis)
if err != nil {
return nil, fmt.Errorf("no map service: %w", err)
}
data, e := map_service.TileGPS(ctx, level, lat, lng)
if e != nil {
return nil, fmt.Errorf("tilegps: %w", e)
}
if len(data) == 0 {
return TileRasterPlaceholder(), nil
}
return &TileRaster{
Content: data,
IsPlaceholder: false,
}, nil
}
func ImageAtTile(ctx context.Context, org *models.Organization, level, y, x uint) (*TileRaster, error) {
oauth, err := background.GetOAuthForOrg(ctx, org)
if err != nil {
return nil, fmt.Errorf("get oauth for org: %w", err)
}
fssync, err := background.NewFieldSeeker(
ctx,
oauth,
)
if err != nil {
return nil, fmt.Errorf("create fssync: %w", err)
}
map_service, err := aerialImageService(ctx, fssync.Arcgis)
if err != nil {
return nil, fmt.Errorf("no map service: %w", err)
}
data, e := map_service.Tile(ctx, level, y, x)
if e != nil {
return nil, fmt.Errorf("tile: %w", e)
}
// No data at this location, so supply the empty tile placeholder
if len(data) == 0 {
return TileRasterPlaceholder(), nil
}
return &TileRaster{
Content: data,
IsPlaceholder: false,
}, nil
}
func TileRasterPlaceholder() *TileRaster {
if tileRasterPlaceholder != nil {
return tileRasterPlaceholder
}
empty, err := emptyTileFS.ReadFile("empty-tile.png")
if err != nil {
panic(fmt.Sprintf("Failed to read empty-tile.png: %v", err))
}
tileRasterPlaceholder = &TileRaster{
Content: empty,
IsPlaceholder: true,
}
return tileRasterPlaceholder
}
func aerialImageService(ctx context.Context, gis *arcgis.ArcGIS) (*arcgis.MapService, error) {
map_services, err := gis.MapServices(ctx)
if err != nil {
return nil, fmt.Errorf("aerial image service: %w", err)
}
for _, ms := range map_services {
return &ms, nil
}
return nil, fmt.Errorf("non found")
}
func getFieldseeker(ctx context.Context, org *models.Organization) (*fieldseeker.FieldSeeker, error) {
fssync, ok := clientByOrgID[org.ID]
if ok {
return fssync, nil
}
oauth, err := background.GetOAuthForOrg(ctx, org)
if err != nil {
return nil, fmt.Errorf("get oauth for org: %w", err)
}
fssync, err = background.NewFieldSeeker(
ctx,
oauth,
)
clientByOrgID[org.ID] = fssync
return fssync, nil
}

View file

@ -10,20 +10,13 @@ import (
"github.com/google/uuid"
)
func fieldseeker(ctx context.Context, u *models.User, since *time.Time) (fsync FieldseekerRecordsSync, err error) {
if u == nil {
return fsync, fmt.Errorf("Wha! Nil user!")
}
org := u.R.Organization
if org == nil {
return fsync, fmt.Errorf("Whoa nil org from user %d and org %d.", u.ID, u.OrganizationID)
}
func getFieldseekerRecordsSync(ctx context.Context, u User, since *time.Time) (fsync FieldseekerRecordsSync, err error) {
db_connection := db.PGInstance.BobDB
pl, err := org.Pointlocations().All(ctx, db_connection)
pl, err := u.Organization.model.Pointlocations().All(ctx, db_connection)
if err != nil {
return fsync, fmt.Errorf("Failed to get point locations: %w", err)
}
inspections, err := u.R.Organization.Mosquitoinspections().All(ctx, db.PGInstance.BobDB)
inspections, err := u.Organization.model.Mosquitoinspections().All(ctx, db.PGInstance.BobDB)
if err != nil {
return fsync, fmt.Errorf("Failed to get mosquito inspections: %w", err)
}
@ -40,7 +33,7 @@ func fieldseeker(ctx context.Context, u *models.User, since *time.Time) (fsync F
insp = append(insp, i)
inspections_by_location[locid] = insp
}
treatments, err := u.R.Organization.Treatments().All(ctx, db.PGInstance.BobDB)
treatments, err := u.Organization.model.Treatments().All(ctx, db.PGInstance.BobDB)
if err != nil {
return fsync, fmt.Errorf("Failed to get treatment data: %w", err)
}
@ -78,8 +71,8 @@ func fieldseeker(ctx context.Context, u *models.User, since *time.Time) (fsync F
return fsync, err
}
func ContentClientIos(ctx context.Context, u *models.User, since *time.Time) (csync ClientSync, err error) {
fsync, err := fieldseeker(ctx, u, since)
func ContentClientIos(ctx context.Context, u User, since *time.Time) (csync ClientSync, err error) {
fsync, err := getFieldseekerRecordsSync(ctx, u, since)
return ClientSync{
Fieldseeker: fsync,
}, err

View file

@ -1,15 +1,16 @@
package db
package platform
import (
"context"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/google/uuid"
"github.com/rs/zerolog/log"
)
func NoteAudioCreate(ctx context.Context, org *models.Organization, userID int32, setter models.NoteAudioSetter) error {
err := org.InsertNoteAudios(ctx, PGInstance.BobDB, &setter)
func NoteAudioCreate(ctx context.Context, user User, setter models.NoteAudioSetter) error {
err := user.Organization.model.InsertNoteAudios(ctx, db.PGInstance.BobDB, &setter)
if err == nil {
return nil
}
@ -21,17 +22,14 @@ func NoteAudioCreate(ctx context.Context, org *models.Organization, userID int32
return err
}
func NoteAudioGetLatest(ctx context.Context, uuid string) (*models.NoteAudio, error) {
return nil, nil
}
func NoteAudioNormalized(uuid string) error {
return nil
}
func NoteAudioTranscodedToOgg(uuid string) error {
return nil
}
func NoteImageCreate(ctx context.Context, org *models.Organization, userID int32, setter models.NoteImageSetter) error {
err := org.InsertNoteImages(ctx, PGInstance.BobDB, &setter)
func NoteImageCreate(ctx context.Context, user User, setter models.NoteImageSetter) error {
err := user.Organization.model.InsertNoteImages(ctx, db.PGInstance.BobDB, &setter)
if err == nil {
return nil
}

View file

@ -1,4 +1,4 @@
package notification
package platform
import (
"context"
@ -82,9 +82,9 @@ func NotifyOauthInvalid(ctx context.Context, user *models.User) {
}
}
func ForUser(ctx context.Context, u *models.User) ([]Notification, error) {
func NotificationsForUser(ctx context.Context, u User) ([]Notification, error) {
results := make([]Notification, 0)
notifications, err := u.UserNotifications(
notifications, err := u.model.UserNotifications(
models.SelectWhere.Notifications.ResolvedAt.IsNull(),
).All(ctx, db.PGInstance.BobDB)
if err != nil {

77
platform/oauth.go Normal file
View file

@ -0,0 +1,77 @@
package platform
import (
"context"
"fmt"
"net/url"
"time"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/platform/background"
"github.com/Gleipnir-Technology/nidus-sync/platform/oauth"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
)
// When there is no oauth for an organization
type NoOAuthForOrg struct{}
func (e NoOAuthForOrg) Error() string { return "No oauth available for organization" }
func GetOAuthForOrg(ctx context.Context, org Organization) (*models.ArcgisOauthToken, error) {
result, err := oauth.GetOAuthForOrg(ctx, org.model)
if result == nil && err == nil {
return nil, &NoOAuthForOrg{}
}
return result, err
}
func GetOAuthForUser(ctx context.Context, user User) (*models.ArcgisOauthToken, error) {
oauth, err := user.model.UserOauthTokens(
sm.OrderBy("created").Desc(),
).One(ctx, db.PGInstance.BobDB)
if err != nil {
if err.Error() == "sql: no rows in result set" {
return nil, nil
}
return nil, err
}
return oauth, nil
}
func HandleOauthAccessCode(ctx context.Context, user User, code string) error {
form := url.Values{
"grant_type": []string{"authorization_code"},
"code": []string{code},
"redirect_uri": []string{config.ArcGISOauthRedirectURL()},
}
token, err := oauth.DoTokenRequest(ctx, form)
if err != nil {
return fmt.Errorf("Failed to exchange authorization code for token: %w", err)
}
accessExpires := oauth.FutureUTCTimestamp(token.ExpiresIn)
refreshExpires := oauth.FutureUTCTimestamp(token.RefreshTokenExpiresIn)
setter := models.ArcgisOauthTokenSetter{
AccessToken: omit.From(token.AccessToken),
AccessTokenExpires: omit.From(accessExpires),
//ArcgisAccountID: omit.From(
ArcgisID: omitnull.FromPtr[string](nil),
ArcgisLicenseTypeID: omitnull.FromPtr[string](nil),
Created: omit.From(time.Now()),
InvalidatedAt: omitnull.FromPtr[time.Time](nil),
RefreshToken: omit.From(token.RefreshToken),
RefreshTokenExpires: omit.From(refreshExpires),
UserID: omit.From(int32(user.ID)),
Username: omit.From(token.Username),
}
oauth, err := models.ArcgisOauthTokens.Insert(&setter).One(ctx, db.PGInstance.BobDB)
if err != nil {
return fmt.Errorf("Failed to save token to database: %w", err)
}
go background.UpdateArcgisUserData(context.Background(), user.model, oauth)
return nil
}

160
platform/oauth/oauth.go Normal file
View file

@ -0,0 +1,160 @@
package oauth
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"strings"
"time"
"github.com/Gleipnir-Technology/arcgis-go"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/aarondl/opt/omit"
"github.com/rs/zerolog/log"
)
// When the API responds that the token is now invalidated
type InvalidatedTokenError struct{}
func (e InvalidatedTokenError) Error() string { return "The token has been invalidated by the server" }
type OAuthTokenResponse struct {
AccessToken string `json:"access_token"`
ExpiresIn int `json:"expires_in"`
RefreshToken string `json:"refresh_token"`
RefreshTokenExpiresIn int `json:"refresh_token_expires_in"`
SSL bool `json:"ssl"`
Username string `json:"username"`
}
func DoTokenRequest(ctx context.Context, form url.Values) (*OAuthTokenResponse, error) {
form.Set("client_id", config.ClientID)
baseURL := "https://www.arcgis.com/sharing/rest/oauth2/token/"
req, err := http.NewRequest("POST", baseURL, strings.NewReader(form.Encode()))
if err != nil {
return nil, fmt.Errorf("Failed to create request: %w", err)
}
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
client := http.Client{}
log.Info().Str("url", req.URL.String()).Msg("POST")
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("Failed to do request: %w", err)
}
defer resp.Body.Close()
bodyBytes, err := io.ReadAll(resp.Body)
log.Info().Int("status", resp.StatusCode).Msg("Token request")
if resp.StatusCode >= http.StatusBadRequest {
if err != nil {
return nil, fmt.Errorf("Got status code %d and failed to read response body: %w", resp.StatusCode, err)
}
bodyString := string(bodyBytes)
var errorResp arcgis.ErrorResponse
if err := json.Unmarshal(bodyBytes, &errorResp); err == nil {
if errorResp.Error.Code == 498 && errorResp.Error.Description == "invalidated refresh_token" {
return nil, InvalidatedTokenError{}
}
return nil, fmt.Errorf("API response JSON error: %d: %d %s", resp.StatusCode, errorResp.Error.Code, errorResp.Error.Description)
}
return nil, fmt.Errorf("API returned error status %d: %s", resp.StatusCode, bodyString)
}
//logResponseHeaders(resp)
var tokenResponse OAuthTokenResponse
err = json.Unmarshal(bodyBytes, &tokenResponse)
if err != nil {
return nil, fmt.Errorf("Failed to unmarshal JSON: %w", err)
}
// Just because we got a 200-level status code doesn't mean it worked. Experience has taught us that
// we can get errors without anything indicated in the headers or the status code
if tokenResponse == (OAuthTokenResponse{}) {
var errorResponse arcgis.ErrorResponse
err = json.Unmarshal(bodyBytes, &errorResponse)
if err != nil {
return nil, fmt.Errorf("Failed to unmarshal error JSON: %w", err)
}
if errorResponse.Error.Code > 0 {
return nil, errorResponse.AsError(ctx)
}
}
log.Info().Str("refresh token", tokenResponse.RefreshToken).Str("access token", tokenResponse.AccessToken).Int("access expires", tokenResponse.ExpiresIn).Int("refresh expires", tokenResponse.RefreshTokenExpiresIn).Msg("Oauth token acquired")
return &tokenResponse, nil
}
func FutureUTCTimestamp(secondsFromNow int) time.Time {
return time.Now().UTC().Add(time.Duration(secondsFromNow) * time.Second)
}
func GetOAuthForOrg(ctx context.Context, org *models.Organization) (*models.ArcgisOauthToken, error) {
users, err := org.User().All(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to query all users for org: %w", err)
}
for _, user := range users {
oauths, err := user.UserOauthTokens(models.SelectWhere.ArcgisOauthTokens.InvalidatedAt.IsNull()).All(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to query all oauth tokens for org: %w", err)
}
for _, oauth := range oauths {
return oauth, nil
}
}
return nil, nil
}
// Update the access token to keep it fresh and alive
func RefreshAccessToken(ctx context.Context, oauth *models.ArcgisOauthToken) error {
form := url.Values{
"grant_type": []string{"refresh_token"},
"client_id": []string{config.ClientID},
"refresh_token": []string{oauth.RefreshToken},
}
token, err := DoTokenRequest(ctx, form)
if err != nil {
return fmt.Errorf("Failed to handle request: %w", err)
}
accessExpires := FutureUTCTimestamp(token.ExpiresIn)
setter := models.ArcgisOauthTokenSetter{
AccessToken: omit.From(token.AccessToken),
AccessTokenExpires: omit.From(accessExpires),
Username: omit.From(token.Username),
}
err = oauth.Update(ctx, db.PGInstance.BobDB, &setter)
if err != nil {
return fmt.Errorf("Failed to update oauth in database: %w", err)
}
log.Info().Int("oauth token id", int(oauth.ID)).Msg("Updated oauth token")
return nil
}
// Update the refresh token to keep it fresh and alive
func RefreshRefreshToken(ctx context.Context, oauth *models.ArcgisOauthToken) error {
form := url.Values{
"grant_type": []string{"exchange_refresh_token"},
"redirect_uri": []string{config.ArcGISOauthRedirectURL()},
"refresh_token": []string{oauth.RefreshToken},
}
token, err := DoTokenRequest(ctx, form)
if err != nil {
return fmt.Errorf("Failed to handle request: %w", err)
}
refreshExpires := FutureUTCTimestamp(token.ExpiresIn)
setter := models.ArcgisOauthTokenSetter{
RefreshToken: omit.From(token.RefreshToken),
RefreshTokenExpires: omit.From(refreshExpires),
Username: omit.From(token.Username),
}
err = oauth.Update(ctx, db.PGInstance.BobDB, &setter)
if err != nil {
return fmt.Errorf("Failed to update oauth in database: %w", err)
}
log.Info().Int("oauth token id", int(oauth.ID)).Msg("Updated oauth token")
return nil
}

View file

@ -1,17 +1,115 @@
package platform
import (
"context"
"fmt"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/platform/background"
//"github.com/google/uuid"
)
type Organization struct {
ID int32
Name string
ServiceAreaXmax float64
ServiceAreaXmin float64
ServiceAreaYmax float64
ServiceAreaYmin float64
model *models.Organization
}
func NewOrganization(org *models.Organization) Organization {
return Organization{
ID: org.ID,
Name: org.Name,
func (o Organization) ArcgisAccountID() string {
if o.model.ArcgisAccountID.IsNull() {
return ""
}
return o.model.ArcgisAccountID.MustGet()
}
func (o Organization) CountServiceRequest(ctx context.Context) (uint, error) {
result, err := o.model.Servicerequests().Count(ctx, db.PGInstance.BobDB)
if err != nil {
return 0, fmt.Errorf("get service request count: %w", err)
}
return uint(result), nil
}
func (o Organization) CountSource(ctx context.Context) (uint, error) {
result, err := o.model.Pointlocations().Count(ctx, db.PGInstance.BobDB)
if err != nil {
return 0, fmt.Errorf("get source count: %w", err)
}
return uint(result), nil
}
func (o Organization) CountTrap(ctx context.Context) (uint, error) {
result, err := o.model.Traplocations().Count(ctx, db.PGInstance.BobDB)
if err != nil {
return 0, fmt.Errorf("get trap count: %w", err)
}
return uint(result), nil
}
func (o Organization) Name() string {
return o.model.Name
}
func (o Organization) ID() int32 {
return o.model.ID
}
func (o Organization) IsSyncOngoing() bool {
return background.IsSyncOngoing(o.ID())
}
func (o Organization) FieldseekerSyncLatest(ctx context.Context) (*models.FieldseekerSync, error) {
sync, err := o.model.FieldseekerSyncs(sm.OrderBy("created").Desc()).One(ctx, db.PGInstance.BobDB)
if err != nil {
if err.Error() == "sql: no rows in result set" {
return nil, nil
}
return nil, fmt.Errorf("get syncs: %w", err)
}
return sync, nil
}
type ServiceArea struct {
Min Point
Max Point
}
func (o Organization) ServiceArea() ServiceArea {
if o.model.ServiceAreaXmax.IsNull() ||
o.model.ServiceAreaXmin.IsNull() ||
o.model.ServiceAreaYmax.IsNull() ||
o.model.ServiceAreaYmin.IsNull() {
return ServiceArea{}
}
return ServiceArea{
Min: Point{
X: o.model.ServiceAreaXmin.MustGet(),
Y: o.model.ServiceAreaYmin.MustGet(),
},
Max: Point{
X: o.model.ServiceAreaXmax.MustGet(),
Y: o.model.ServiceAreaYmax.MustGet(),
},
}
}
func (o Organization) ServiceRequestRecent(ctx context.Context) ([]*models.FieldseekerServicerequest, error) {
results, err := o.model.Servicerequests(sm.OrderBy("creationdate").Desc(), sm.Limit(10)).All(ctx, db.PGInstance.BobDB)
if err != nil {
return []*models.FieldseekerServicerequest{}, fmt.Errorf("query service request: %w", err)
}
return results, nil
}
func OrganizationByID(ctx context.Context, id int) (*Organization, error) {
org, err := models.FindOrganization(ctx, db.PGInstance.BobDB, int32(id))
if err != nil {
if err.Error() == "sql: no rows in result set" {
return nil, nil
}
return nil, fmt.Errorf("query org: %w", err)
}
o := newOrganization(org)
return &o, nil
}
func newOrganization(org *models.Organization) Organization {
return Organization{
model: org,
}
}

6
platform/point.go Normal file
View file

@ -0,0 +1,6 @@
package platform
type Point struct {
X float64
Y float64
}

View file

@ -96,3 +96,19 @@ func NuisanceReportForOrganization(ctx context.Context, org_id int32) ([]Nuisanc
}
return reports, nil
}
func NuisanceReportForOrganizationCount(ctx context.Context, org_id int32) (uint, error) {
type _Row struct {
Count uint `db:"count"`
}
row, err := bob.One(ctx, db.PGInstance.BobDB, psql.Select(
sm.Columns(
"COUNT(*) AS count",
),
sm.From("publicreport.nuisance"),
sm.Where(psql.Quote("publicreport", "nuisance", "organization_id").EQ(psql.Arg(org_id))),
), scan.StructMapper[_Row]())
if err != nil {
return 0, fmt.Errorf("query count: %w", err)
}
return row.Count, nil
}

View file

@ -100,3 +100,19 @@ func WaterReportForOrganization(ctx context.Context, org_id int32) ([]Water, err
}
return reports, nil
}
func WaterReportForOrganizationCount(ctx context.Context, org_id int32) (uint, error) {
type _Row struct {
Count uint `db:"count"`
}
row, err := bob.One(ctx, db.PGInstance.BobDB, psql.Select(
sm.Columns(
"COUNT(*) AS count",
),
sm.From("publicreport.water"),
sm.Where(psql.Quote("publicreport", "water", "organization_id").EQ(psql.Arg(org_id))),
), scan.StructMapper[_Row]())
if err != nil {
return 0, fmt.Errorf("query count: %w", err)
}
return row.Count, nil
}

View file

@ -15,10 +15,10 @@ import (
//"github.com/Gleipnir-Technology/bob/dialect/psql"
//"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
//"github.com/Gleipnir-Technology/bob/dialect/psql/um"
"github.com/Gleipnir-Technology/nidus-sync/background"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/db/sql"
"github.com/Gleipnir-Technology/nidus-sync/platform/background"
"github.com/Gleipnir-Technology/nidus-sync/platform/email"
"github.com/Gleipnir-Technology/nidus-sync/platform/text"
"github.com/rs/zerolog/log"

View file

@ -1,4 +1,4 @@
package userfile
package subprocess
import (
"errors"
@ -6,20 +6,24 @@ import (
"os"
"os/exec"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
"github.com/google/uuid"
"github.com/rs/zerolog/log"
)
func fileContentPathAudioNormalized(u uuid.UUID) string {
//destination := AudioFileContentPathNormalized(audioUUID.String())
return file.ContentPath(file.CollectionAudioNormalized, u)
}
func NormalizeAudio(audioUUID uuid.UUID) error {
//source := AudioFileContentPathRaw(audioUUID.String())
source := fileContentPath(CollectionAudioRaw, audioUUID)
source := file.ContentPath(file.CollectionAudioRaw, audioUUID)
_, err := os.Stat(source)
if errors.Is(err, os.ErrNotExist) {
log.Warn().Str("source", source).Msg("file doesn't exist, skipping normalization")
return nil
}
log.Info().Str("sourcce", source).Msg("Normalizing")
log.Info().Str("source", source).Msg("Normalizing")
//destination := AudioFileContentPathNormalized(audioUUID.String())
destination := fileContentPathAudioNormalized(audioUUID)
// Use "ffmpeg" directly, assuming it's in the system PATH
@ -29,10 +33,6 @@ func NormalizeAudio(audioUUID uuid.UUID) error {
log.Printf("FFmpeg output for normalization: %s", out)
return fmt.Errorf("ffmpeg normalization failed: %v", err)
}
err = db.NoteAudioNormalized(audioUUID.String())
if err != nil {
return fmt.Errorf("failed to update database for normalized audio %s: %v", audioUUID, err)
}
log.Info().Str("destination", destination).Msg("Normalized audio")
return nil
}
@ -47,7 +47,7 @@ func TranscodeToOgg(audioUUID uuid.UUID) error {
}
log.Info().Str("source", source).Msg("Transcoding to ogg")
//destination := userfile.AudioFileContentPathOgg(audioUUID.String())
destination := fileContentPath(CollectionAudioTranscoded, audioUUID)
destination := file.ContentPath(file.CollectionAudioTranscoded, audioUUID)
// Use "ffmpeg" directly, assuming it's in the system PATH
cmd := exec.Command("ffmpeg", "-i", source, "-vn", "-acodec", "libvorbis", destination)
out, err := cmd.CombinedOutput()
@ -55,15 +55,6 @@ func TranscodeToOgg(audioUUID uuid.UUID) error {
log.Error().Err(err).Bytes("out", out).Msg("FFmpeg output for OGG transcoding")
return fmt.Errorf("ffmpeg OGG transcoding failed: %v", err)
}
err = db.NoteAudioTranscodedToOgg(audioUUID.String())
if err != nil {
return fmt.Errorf("failed to update database for OGG transcoded audio %s: %v", audioUUID, err)
}
log.Info().Str("destination", destination).Msg("Transcoded audio")
return nil
}
func fileContentPathAudioNormalized(u uuid.UUID) string {
//destination := AudioFileContentPathNormalized(audioUUID.String())
return fileContentPath(CollectionAudioNormalized, u)
}

219
platform/tile.go Normal file
View file

@ -0,0 +1,219 @@
package platform
import (
"bytes"
"context"
"embed"
"fmt"
"io"
"net/http"
"os"
"path/filepath"
"github.com/Gleipnir-Technology/arcgis-go"
"github.com/Gleipnir-Technology/arcgis-go/fieldseeker"
"github.com/aarondl/opt/omit"
//"github.com/Gleipnir-Technology/bob"
//"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/platform/background"
"github.com/Gleipnir-Technology/nidus-sync/platform/oauth"
"github.com/rs/zerolog/log"
)
//go:embed empty-tile.png
var emptyTileFS embed.FS
func GetTile(ctx context.Context, w http.ResponseWriter, org Organization, z, y, x uint) error {
if org.model.ArcgisMapServiceID.IsNull() {
return fmt.Errorf("no map service ID set")
}
map_service_id := org.model.ArcgisMapServiceID.MustGet()
tile_path := tilePath(map_service_id, z, y, x)
tile_row, err := models.TileCachedImages.Query(
models.SelectWhere.TileCachedImages.ArcgisID.EQ(map_service_id),
models.SelectWhere.TileCachedImages.X.EQ(int32(x)),
models.SelectWhere.TileCachedImages.Y.EQ(int32(y)),
models.SelectWhere.TileCachedImages.Z.EQ(int32(z)),
).One(ctx, db.PGInstance.BobDB)
if err == nil {
var tile *TileRaster
if tile_row.IsEmpty {
tile = TileRasterPlaceholder()
} else {
tile, err = loadTileFromDisk(tile_path)
if err != nil {
return fmt.Errorf("load tile from disk: %w", err)
}
}
log.Debug().Uint("z", z).Uint("y", y).Uint("x", x).Bool("is empty", tile_row.IsEmpty).Msg("tile from cache")
return writeTile(w, tile)
}
if err.Error() != "sql: no rows in result set" {
return fmt.Errorf("query db: %w", err)
}
image, err := ImageAtTile(ctx, org.model, uint(z), uint(y), uint(x))
if err != nil {
return fmt.Errorf("image at tile: %w", err)
}
if !image.IsPlaceholder {
err = saveTileToDisk(image, tile_path)
if err != nil {
return fmt.Errorf("save tile: %w", err)
}
}
_, err = models.TileCachedImages.Insert(&models.TileCachedImageSetter{
ArcgisID: omit.From(map_service_id),
X: omit.From(int32(x)),
Y: omit.From(int32(y)),
Z: omit.From(int32(z)),
IsEmpty: omit.From(image.IsPlaceholder),
}).One(ctx, db.PGInstance.BobDB)
if err != nil {
return fmt.Errorf("save to db: %w", err)
}
log.Debug().Uint("z", z).Uint("y", y).Uint("x", x).Bool("placeholder", image.IsPlaceholder).Msg("caching tile")
return writeTile(w, image)
}
func ImageAtPoint(ctx context.Context, org Organization, level uint, lat, lng float64) (*TileRaster, error) {
fssync, err := getFieldseeker(ctx, org.model)
if err != nil {
return nil, fmt.Errorf("create fssync: %w", err)
}
map_service, err := aerialImageService(ctx, fssync.Arcgis)
if err != nil {
return nil, fmt.Errorf("no map service: %w", err)
}
data, e := map_service.TileGPS(ctx, level, lat, lng)
if e != nil {
return nil, fmt.Errorf("tilegps: %w", e)
}
if len(data) == 0 {
return TileRasterPlaceholder(), nil
}
return &TileRaster{
Content: data,
IsPlaceholder: false,
}, nil
}
func loadTileFromDisk(tile_path string) (*TileRaster, error) {
file, err := os.Open(tile_path)
if err != nil {
return nil, fmt.Errorf("open: %w", err)
}
defer file.Close()
img, err := io.ReadAll(file)
if err != nil {
return nil, fmt.Errorf("readall from %s: %w", tile_path, err)
}
return &TileRaster{
Content: img,
IsPlaceholder: false,
}, nil
}
func saveTileToDisk(image *TileRaster, tile_path string) error {
parent := filepath.Dir(tile_path)
err := os.MkdirAll(parent, 0750)
if err != nil {
return fmt.Errorf("mkdirall: %w", err)
}
err = os.WriteFile(tile_path, image.Content, 0644)
if err != nil {
return fmt.Errorf("write image file: %w", err)
}
return nil
}
func tilePath(map_service_id string, z, y, x uint) string {
return fmt.Sprintf("%s/tile-cache/%s/%d/%d/%d.raw", config.FilesDirectory, map_service_id, z, y, x)
}
func writeTile(w http.ResponseWriter, image *TileRaster) error {
w.Header().Set("Content-Type", "image/png")
w.Header().Set("Content-Length", fmt.Sprintf("%d", len(image.Content)))
_, err := io.Copy(w, bytes.NewBuffer(image.Content))
if err != nil {
return fmt.Errorf("io.copy: %w", err)
}
return nil
}
var clientByOrgID = make(map[int32]*fieldseeker.FieldSeeker, 0)
var tileRasterPlaceholder *TileRaster
type TileRaster struct {
Content []byte
IsPlaceholder bool
}
func ImageAtTile(ctx context.Context, org *models.Organization, level, y, x uint) (*TileRaster, error) {
oauth, err := oauth.GetOAuthForOrg(ctx, org)
if err != nil {
return nil, fmt.Errorf("get oauth for org: %w", err)
}
fssync, err := background.NewFieldSeeker(
ctx,
oauth,
)
if err != nil {
return nil, fmt.Errorf("create fssync: %w", err)
}
map_service, err := aerialImageService(ctx, fssync.Arcgis)
if err != nil {
return nil, fmt.Errorf("no map service: %w", err)
}
data, e := map_service.Tile(ctx, level, y, x)
if e != nil {
return nil, fmt.Errorf("tile: %w", e)
}
// No data at this location, so supply the empty tile placeholder
if len(data) == 0 {
return TileRasterPlaceholder(), nil
}
return &TileRaster{
Content: data,
IsPlaceholder: false,
}, nil
}
func TileRasterPlaceholder() *TileRaster {
if tileRasterPlaceholder != nil {
return tileRasterPlaceholder
}
empty, err := emptyTileFS.ReadFile("empty-tile.png")
if err != nil {
panic(fmt.Sprintf("Failed to read empty-tile.png: %v", err))
}
tileRasterPlaceholder = &TileRaster{
Content: empty,
IsPlaceholder: true,
}
return tileRasterPlaceholder
}
func aerialImageService(ctx context.Context, gis *arcgis.ArcGIS) (*arcgis.MapService, error) {
map_services, err := gis.MapServices(ctx)
if err != nil {
return nil, fmt.Errorf("aerial image service: %w", err)
}
for _, ms := range map_services {
return &ms, nil
}
return nil, fmt.Errorf("non found")
}
func getFieldseeker(ctx context.Context, org *models.Organization) (*fieldseeker.FieldSeeker, error) {
fssync, ok := clientByOrgID[org.ID]
if ok {
return fssync, nil
}
oauth, err := oauth.GetOAuthForOrg(ctx, org)
if err != nil {
return nil, fmt.Errorf("get oauth for org: %w", err)
}
fssync, err = background.NewFieldSeeker(
ctx,
oauth,
)
clientByOrgID[org.ID] = fssync
return fssync, nil
}

View file

@ -1,4 +1,4 @@
package sync
package platform
import (
"errors"
@ -76,6 +76,13 @@ type BreedingSourceDetail struct {
Comments string `json:"comments"`
}
type BreedingSourceSummary struct {
ID uuid.UUID
Type string
LastInspected *time.Time
LastTreated *time.Time
}
type Trap struct {
Active bool
Comments string
@ -181,7 +188,7 @@ type Treatment struct {
Product string
}
func toTemplateTrap(trap *models.FieldseekerTraplocation, trap_data []sql.TrapDataByLocationIDRecentRow, count_slice []sql.TrapCountByLocationIDRow) (result Trap, err error) {
func toTrap(trap *models.FieldseekerTraplocation, trap_data []sql.TrapDataByLocationIDRecentRow, count_slice []sql.TrapCountByLocationIDRow) (result Trap, err error) {
log.Debug().Str("globalid", trap.Globalid.String()).Msg("Working on trap")
cell, err := h3utils.ToCell(trap.H3cell.MustGet())
if err != nil {
@ -360,7 +367,7 @@ func toTemplateTrapData(trap_data models.FieldseekerTrapdatumSlice) ([]TrapData,
}
return results, nil
}
func toTemplateTreatment(rows models.FieldseekerTreatmentSlice) ([]Treatment, error) {
func toTreatment(rows models.FieldseekerTreatmentSlice) ([]Treatment, error) {
var results []Treatment
for _, r := range rows {
results = append(results, Treatment{
@ -407,15 +414,13 @@ func fsIntToBool(val null.Val[int16]) bool {
}
// toTemplateBreedingSource transforms the DB model into the display model
func toTemplateBreedingSource(source *models.FieldseekerPointlocation) *BreedingSourceDetail {
func toBreedingSource(source *models.FieldseekerPointlocation) (*BreedingSourceDetail, error) {
if source.H3cell.IsNull() {
log.Error().Msg("h3 cell is null")
return nil
return nil, fmt.Errorf("h3 cell is null")
}
cell, err := h3utils.ToCell(source.H3cell.MustGet())
if err != nil {
log.Error().Err(err).Msg("Failed to get h3 cell from point location")
return nil
return nil, fmt.Errorf("Failed to get h3 cell from point location: %w", err)
}
return &BreedingSourceDetail{
// Basic Information
@ -477,7 +482,7 @@ func toTemplateBreedingSource(source *models.FieldseekerPointlocation) *Breeding
EditedAt: getTimeOrNull(source.Editdate),
Editor: source.Editor.GetOr(""),
Comments: source.Comments.GetOr(""),
}
}, nil
}
func getTimeOrNull(v null.Val[time.Time]) *time.Time {

View file

@ -1,4 +1,4 @@
package sync
package platform
import (
"sort"
@ -17,7 +17,7 @@ type TreatmentModel struct {
Errors []time.Duration
}
func modelTreatment(treatments []Treatment) []TreatmentModel {
func ModelTreatment(treatments []Treatment) []TreatmentModel {
treatment_times := make([]time.Time, 0)
for _, treatment := range treatments {
if treatment.Date != nil {

View file

@ -9,11 +9,11 @@ import (
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
"github.com/Gleipnir-Technology/nidus-sync/background"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/Gleipnir-Technology/nidus-sync/platform/background"
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/rs/zerolog/log"
@ -41,7 +41,7 @@ type UploadSummary struct {
Type string `db:"type"`
}
func NewUpload(ctx context.Context, u *models.User, upload userfile.FileUpload, t enums.FileuploadCsvtype) (Upload, error) {
func NewUpload(ctx context.Context, u User, upload file.FileUpload, t enums.FileuploadCsvtype) (Upload, error) {
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
if err != nil {
return Upload{}, fmt.Errorf("Failed to begin transaction: %w", err)
@ -51,10 +51,10 @@ func NewUpload(ctx context.Context, u *models.User, upload userfile.FileUpload,
file, err := models.FileuploadFiles.Insert(&models.FileuploadFileSetter{
ContentType: omit.From(upload.ContentType),
Created: omit.From(time.Now()),
CreatorID: omit.From(u.ID),
CreatorID: omit.From(int32(u.ID)),
Deleted: omitnull.FromPtr[time.Time](nil),
Name: omit.From(upload.Name),
OrganizationID: omit.From(u.OrganizationID),
OrganizationID: omit.From(u.Organization.ID()),
Status: omit.From(enums.FileuploadFilestatustypeUploaded),
SizeBytes: omit.From(int32(upload.SizeBytes)),
FileUUID: omit.From(upload.UUID),
@ -78,7 +78,7 @@ func NewUpload(ctx context.Context, u *models.User, upload userfile.FileUpload,
ID: file.ID,
}, nil
}
func UploadCommit(ctx context.Context, org *models.Organization, file_id int32, committer *models.User) error {
func UploadCommit(ctx context.Context, org Organization, file_id int32, committer User) error {
// Create addresses for each row
// Create sites for each row
// Create pools for each row
@ -92,7 +92,7 @@ func UploadCommit(ctx context.Context, org *models.Organization, file_id int32,
background.CommitUpload(file_id)
return err
}
func UploadDiscard(ctx context.Context, org *models.Organization, file_id int32) error {
func UploadDiscard(ctx context.Context, org Organization, file_id int32) error {
_, err := psql.Update(
um.Table(models.FileuploadFiles.Alias()),
um.SetCol("status").ToArg("discarded"),
@ -101,7 +101,7 @@ func UploadDiscard(ctx context.Context, org *models.Organization, file_id int32)
).Exec(ctx, db.PGInstance.BobDB)
return err
}
func UploadSummaryList(ctx context.Context, org *models.Organization) ([]UploadSummary, error) {
func UploadSummaryList(ctx context.Context, org Organization) ([]UploadSummary, error) {
results := make([]UploadSummary, 0)
rows, err := bob.All(ctx, db.PGInstance.BobDB, psql.Select(
sm.Columns(

View file

@ -3,37 +3,133 @@ package platform
import (
"context"
"fmt"
"strings"
"github.com/aarondl/opt/omit"
//"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
"github.com/Gleipnir-Technology/bob/mods"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/notification"
"github.com/Gleipnir-Technology/nidus-sync/debug"
"github.com/rs/zerolog/log"
)
type NoUserError struct{}
func (e NoUserError) Error() string { return "That user does not exist" }
type User struct {
DisplayName string `json:"display_name"`
Initials string
Notifications []notification.Notification
Organization Organization `json:"organization"`
Role string `json:"role"`
Username string `json:"username"`
DisplayName string `json:"display_name"`
ID int `json:"-"`
Initials string `json:"initials"`
Notifications []Notification `json:"-"`
Organization Organization `json:"organization"`
PasswordHash string `json:"-"`
PasswordHashType string `json:"-"`
Role string `json:"role"`
Username string `json:"username"`
model *models.User
}
func UsersByID(ctx context.Context, org *models.Organization) (map[int32]*User, error) {
users, err := org.User().All(ctx, db.PGInstance.BobDB)
func (u User) HasRoot() bool {
return u.model.Role != enums.UserroleRoot
}
func CreateUser(ctx context.Context, username string, name string, password_hash string) (*User, error) {
o_setter := models.OrganizationSetter{
Name: omit.From(fmt.Sprintf("%s's organization", username)),
}
o, err := models.Organizations.Insert(&o_setter).One(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to create organization: %w", err)
}
log.Info().Int32("id", o.ID).Msg("Created organization")
u_setter := models.UserSetter{
DisplayName: omit.From(name),
OrganizationID: omit.From(o.ID),
PasswordHash: omit.From(password_hash),
PasswordHashType: omit.From(enums.HashtypeBcrypt14),
Role: omit.From(enums.UserroleAccountOwner),
Username: omit.From(username),
}
user, err := models.Users.Insert(&u_setter).One(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, fmt.Errorf("Failed to create user: %w", err)
}
log.Info().Int32("id", user.ID).Str("username", user.Username).Msg("Created user")
return &User{
DisplayName: user.DisplayName,
Initials: extractInitials(user.DisplayName),
Notifications: []Notification{},
Organization: newOrganization(o),
Role: user.Role.String(),
Username: user.Username,
model: user,
}, nil
}
func UserByID(ctx context.Context, user_id int) (*User, error) {
return getUser(ctx, models.SelectWhere.Users.ID.EQ(int32(user_id)))
}
func UserByUsername(ctx context.Context, username string) (*User, error) {
return getUser(ctx, models.SelectWhere.Users.Username.EQ(username))
}
func UsersByOrg(ctx context.Context, org Organization) (map[int32]*User, error) {
users, err := org.model.User().All(ctx, db.PGInstance.BobDB)
if err != nil {
return make(map[int32]*User, 0), fmt.Errorf("get all org users: %w", err)
}
organization := NewOrganization(org)
results := make(map[int32]*User, len(users))
for _, user := range users {
results[user.ID] = &User{
DisplayName: user.DisplayName,
Initials: "",
Notifications: []notification.Notification{},
Organization: organization,
Notifications: []Notification{},
Organization: org,
Role: user.Role.String(),
Username: user.Username,
model: user,
}
}
return results, nil
}
func getUser(ctx context.Context, where mods.Where[*dialect.SelectQuery]) (*User, error) {
user, err := models.Users.Query(
models.Preload.User.Organization(),
where,
).One(ctx, db.PGInstance.BobDB)
if err != nil {
if err.Error() == "No such user" || err.Error() == "sql: no rows in result set" {
return nil, &NoUserError{}
} else {
debug.LogErrorTypeInfo(err)
log.Error().Err(err).Msg("Unrecognized error. This should be updated in the findUser code")
return nil, err
}
}
org := newOrganization(user.R.Organization)
return &User{
DisplayName: user.DisplayName,
Initials: extractInitials(user.DisplayName),
Notifications: []Notification{},
Organization: org,
Role: user.Role.String(),
Username: user.Username,
}, nil
}
func extractInitials(name string) string {
parts := strings.Fields(name)
var initials strings.Builder
for _, part := range parts {
if len(part) > 0 {
initials.WriteString(strings.ToUpper(string(part[0])))
}
}
return initials.String()
}

View file

@ -18,7 +18,7 @@ import (
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/google/uuid"
@ -90,13 +90,13 @@ func extractExif(content_type string, file_bytes []byte) (result *ExifCollection
}
func extractImageUpload(headers *multipart.FileHeader) (upload ImageUpload, err error) {
file, err := headers.Open()
f, err := headers.Open()
if err != nil {
return upload, fmt.Errorf("Failed to open header: %w", err)
}
defer file.Close()
defer f.Close()
file_bytes, err := io.ReadAll(file)
file_bytes, err := io.ReadAll(f)
content_type := http.DetectContentType(file_bytes)
exif, err := extractExif(content_type, file_bytes)
@ -112,7 +112,7 @@ func extractImageUpload(headers *multipart.FileHeader) (upload ImageUpload, err
if err != nil {
return upload, fmt.Errorf("Failed to create quick report photo uuid", err)
}
err = userfile.PublicImageFileContentWrite(u, bytes.NewReader(file_bytes))
err = file.PublicImageFileContentWrite(u, bytes.NewReader(file_bytes))
if err != nil {
return upload, fmt.Errorf("Failed to write image file to disk: %w", err)
}

View file

@ -3,7 +3,7 @@ package rmo
import (
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
"github.com/go-chi/chi/v5"
"github.com/google/uuid"
)
@ -20,5 +20,5 @@ func getImageByUUID(w http.ResponseWriter, r *http.Request) {
http.Error(w, "Failed to parse uuid", http.StatusBadRequest)
return
}
userfile.PublicImageFileToResponse(w, uid)
file.PublicImageFileToResponse(w, uid)
}

View file

@ -4,14 +4,14 @@ import (
"context"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
)
type contentAdminDash struct{}
func getAdminDash(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentAdminDash], *nhttp.ErrorWithStatus) {
func getAdminDash(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentAdminDash], *nhttp.ErrorWithStatus) {
content := contentAdminDash{}
return html.NewResponse("sync/admin-dash.html", content), nil
}

View file

@ -4,24 +4,24 @@ import (
"context"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/h3utils"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/go-chi/chi/v5"
"github.com/uber/h3-go/v4"
)
type contentCell struct {
BreedingSources []BreedingSourceSummary
BreedingSources []platform.BreedingSourceSummary
CellBoundary h3.CellBoundary
Inspections []Inspection
Inspections []platform.Inspection
MapData ComponentMap
Traps []TrapSummary
Treatments []Treatment
Traps []platform.TrapSummary
Treatments []platform.Treatment
}
func getCellDetails(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentCell], *nhttp.ErrorWithStatus) {
func getCellDetails(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentCell], *nhttp.ErrorWithStatus) {
cell_str := chi.URLParam(r, "cell")
if cell_str == "" {
return nil, nhttp.NewErrorStatus(http.StatusBadRequest, "There should always be a cell")
@ -38,7 +38,7 @@ func getCellDetails(ctx context.Context, r *http.Request, org *models.Organizati
if err != nil {
return nil, nhttp.NewError("Failed to get boundary: %w", err)
}
inspections, err := inspectionsByCell(ctx, org, h3.Cell(c))
inspections, err := platform.InspectionsByCell(ctx, user.Organization, h3.Cell(c))
if err != nil {
return nil, nhttp.NewError("Failed to get inspections by cell: %w", err)
}
@ -47,16 +47,16 @@ func getCellDetails(ctx context.Context, r *http.Request, org *models.Organizati
return nil, nhttp.NewError("Failed to get boundaries: %w", err)
}
resolution := h3.Cell(c).Resolution()
sources, err := breedingSourcesByCell(ctx, org, h3.Cell(c))
sources, err := platform.BreedingSourcesByCell(ctx, user.Organization, h3.Cell(c))
if err != nil {
return nil, nhttp.NewError("Failed to get sources: %w", err)
}
traps, err := trapsByCell(ctx, org, h3.Cell(c))
traps, err := platform.TrapsByCell(ctx, user.Organization, h3.Cell(c))
if err != nil {
return nil, nhttp.NewError("Failed to get traps: %w", err)
}
treatments, err := treatmentsByCell(ctx, org, h3.Cell(c))
treatments, err := platform.TreatmentsByCell(ctx, user.Organization, h3.Cell(c))
if err != nil {
return nil, nhttp.NewError("Failed to get treatments: %w", err)
}

View file

@ -4,13 +4,13 @@ import (
"context"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
)
type contentCommunicationRoot struct{}
func getCommunicationRoot(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentCommunicationRoot], *nhttp.ErrorWithStatus) {
func getCommunicationRoot(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentCommunicationRoot], *nhttp.ErrorWithStatus) {
return html.NewResponse("sync/communication-root.html", contentCommunicationRoot{}), nil
}

View file

@ -6,22 +6,22 @@ import (
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
"github.com/Gleipnir-Technology/nidus-sync/arcgis"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/rs/zerolog/log"
)
type contentConfigurationRoot struct{}
func getConfigurationRoot(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentConfigurationRoot], *nhttp.ErrorWithStatus) {
func getConfigurationRoot(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentConfigurationRoot], *nhttp.ErrorWithStatus) {
return html.NewResponse("sync/configuration/root.html", contentConfigurationRoot{}), nil
}
type contentSettingOrganization struct {
Organization *models.Organization
Organization platform.Organization
}
type contentSettingIntegration struct {
@ -30,11 +30,7 @@ type contentSettingIntegration struct {
ServiceMaps []*models.ArcgisServiceMap
}
func getConfigurationOrganization(ctx context.Context, r *http.Request, org *models.Organization, u *models.User) (*html.Response[contentSettingOrganization], *nhttp.ErrorWithStatus) {
org, err := u.Organization().One(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, nhttp.NewError("get organization: %w", err)
}
func getConfigurationOrganization(ctx context.Context, r *http.Request, u platform.User) (*html.Response[contentSettingOrganization], *nhttp.ErrorWithStatus) {
/*
var district contentDistrict
district, err = bob.One[contentDistrict](ctx, db.PGInstance.BobDB, psql.Select(
@ -67,12 +63,12 @@ func getConfigurationOrganization(ctx context.Context, r *http.Request, org *mod
}
*/
data := contentSettingOrganization{
Organization: org,
Organization: u.Organization,
}
return html.NewResponse("sync/configuration/organization.html", data), nil
}
func getConfigurationIntegration(ctx context.Context, r *http.Request, org *models.Organization, u *models.User) (*html.Response[contentSettingIntegration], *nhttp.ErrorWithStatus) {
oauth, err := arcgis.GetOAuthForUser(ctx, u)
func getConfigurationIntegration(ctx context.Context, r *http.Request, u platform.User) (*html.Response[contentSettingIntegration], *nhttp.ErrorWithStatus) {
oauth, err := platform.GetOAuthForUser(ctx, u)
if err != nil {
return nil, nhttp.NewError("Failed to get oauth: %w", err)
}
@ -81,15 +77,16 @@ func getConfigurationIntegration(ctx context.Context, r *http.Request, org *mode
}
return html.NewResponse("sync/configuration/integration.html", data), nil
}
func getConfigurationIntegrationArcgis(ctx context.Context, r *http.Request, org *models.Organization, u *models.User) (*html.Response[contentSettingIntegration], *nhttp.ErrorWithStatus) {
oauth, err := arcgis.GetOAuthForUser(ctx, u)
func getConfigurationIntegrationArcgis(ctx context.Context, r *http.Request, u platform.User) (*html.Response[contentSettingIntegration], *nhttp.ErrorWithStatus) {
oauth, err := platform.GetOAuthForUser(ctx, u)
if err != nil {
return nil, nhttp.NewError("Failed to get oauth: %w", err)
}
var account *models.ArcgisAccount
var service_maps []*models.ArcgisServiceMap
if org.ArcgisAccountID.IsValue() {
account, err = models.FindArcgisAccount(ctx, db.PGInstance.BobDB, org.ArcgisAccountID.MustGet())
account_id := u.Organization.ArcgisAccountID()
if account_id != "" {
account, err = models.FindArcgisAccount(ctx, db.PGInstance.BobDB, account_id)
if err != nil {
return nil, nhttp.NewError("Failed to get arcgis: %w", err)
}
@ -110,19 +107,19 @@ func getConfigurationIntegrationArcgis(ctx context.Context, r *http.Request, org
type contentSettingPlaceholder struct{}
func getConfigurationPesticide(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentSettingPlaceholder], *nhttp.ErrorWithStatus) {
func getConfigurationPesticide(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentSettingPlaceholder], *nhttp.ErrorWithStatus) {
content := contentSettingPlaceholder{}
return html.NewResponse("sync/configuration/pesticide.html", content), nil
}
func getConfigurationPesticideAdd(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentSettingPlaceholder], *nhttp.ErrorWithStatus) {
func getConfigurationPesticideAdd(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentSettingPlaceholder], *nhttp.ErrorWithStatus) {
content := contentSettingPlaceholder{}
return html.NewResponse("sync/configuration/pesticide-add.html", content), nil
}
func getConfigurationUserAdd(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentSettingPlaceholder], *nhttp.ErrorWithStatus) {
func getConfigurationUserAdd(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentSettingPlaceholder], *nhttp.ErrorWithStatus) {
content := contentSettingPlaceholder{}
return html.NewResponse("sync/configuration/user-add.html", content), nil
}
func getConfigurationUserList(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentSettingPlaceholder], *nhttp.ErrorWithStatus) {
func getConfigurationUserList(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentSettingPlaceholder], *nhttp.ErrorWithStatus) {
content := contentSettingPlaceholder{}
return html.NewResponse("sync/configuration/user-list.html", content), nil
}
@ -131,17 +128,17 @@ type formArcgisConfiguration struct {
MapService *string `schema:"map-service"`
}
func postConfigurationIntegrationArcgis(ctx context.Context, r *http.Request, org *models.Organization, u *models.User, f formArcgisConfiguration) (string, *nhttp.ErrorWithStatus) {
func postConfigurationIntegrationArcgis(ctx context.Context, r *http.Request, u platform.User, f formArcgisConfiguration) (string, *nhttp.ErrorWithStatus) {
if f.MapService != nil {
_, err := psql.Update(
um.Table("organization"),
um.SetCol("arcgis_map_service_id").ToArg(f.MapService),
um.Where(psql.Quote("id").EQ(psql.Arg(org.ID))),
um.Where(psql.Quote("id").EQ(psql.Arg(u.Organization.ID()))),
).Exec(ctx, db.PGInstance.BobDB)
if err != nil {
return "", nhttp.NewError("Failed to update map service config: %w", err)
}
log.Info().Str("map-service", *f.MapService).Int32("org-id", org.ID).Msg("changed map service")
log.Info().Str("map-service", *f.MapService).Int32("org-id", u.Organization.ID()).Msg("changed map service")
} else {
log.Info().Msg("no map service")
}

View file

@ -7,11 +7,7 @@ import (
"net/http"
"time"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/nidus-sync/auth"
"github.com/Gleipnir-Technology/nidus-sync/background"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
@ -19,22 +15,19 @@ import (
"github.com/google/uuid"
)
// Authenticated pages
var ()
type contentSource struct {
Inspections []Inspection
Inspections []platform.Inspection
MapData ComponentMap
Source *BreedingSourceDetail
Traps []TrapNearby
Treatments []Treatment
Source *platform.BreedingSourceDetail
Traps []platform.TrapNearby
Treatments []platform.Treatment
//TreatmentCadence TreatmentCadence
TreatmentModels []TreatmentModel
TreatmentModels []platform.TreatmentModel
User platform.User
}
type contentTrap struct {
MapData ComponentMap
Trap Trap
Trap platform.Trap
User platform.User
}
type contentDashboard struct {
@ -59,7 +52,7 @@ func getDistrict(w http.ResponseWriter, r *http.Request) {
html.RenderOrError(w, "sync/district.html", &context)
}
func getLayoutTest(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentLayoutTest], *nhttp.ErrorWithStatus) {
func getLayoutTest(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentLayoutTest], *nhttp.ErrorWithStatus) {
return html.NewResponse("sync/layout-test.html", contentLayoutTest{}), nil
}
@ -68,7 +61,7 @@ func getRoot(w http.ResponseWriter, r *http.Request) {
user, err := auth.GetAuthenticatedUser(r)
if err != nil {
// No credentials or user not found: go to login
if errors.Is(err, &auth.NoCredentialsError{}) || errors.Is(err, &auth.NoUserError{}) {
if errors.Is(err, &auth.NoCredentialsError{}) || errors.Is(err, &platform.NoUserError{}) {
http.Redirect(w, r, "/signin", http.StatusFound)
return
} else {
@ -81,17 +74,12 @@ func getRoot(w http.ResponseWriter, r *http.Request) {
signin(w, errorCode, "/")
return
} else {
org, err := user.Organization().One(ctx, db.PGInstance.BobDB)
if err != nil {
respondError(w, "Failed to get organization", err, http.StatusInternalServerError)
return
}
dashboard(ctx, w, org, user)
dashboard(ctx, w, *user)
return
}
}
func getSource(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentSource], *nhttp.ErrorWithStatus) {
func getSource(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentSource], *nhttp.ErrorWithStatus) {
globalid_s := chi.URLParam(r, "globalid")
if globalid_s == "" {
return nil, nhttp.NewError("No globalid provided: %w", nil)
@ -100,28 +88,24 @@ func getSource(ctx context.Context, r *http.Request, org *models.Organization, u
if err != nil {
return nil, nhttp.NewError("globalid is not a UUID: %w", nil)
}
userContent, err := auth.ContentForUser(r.Context(), user)
if err != nil {
return nil, nhttp.NewError("Failed to get user content: %w", err)
}
s, err := sourceByGlobalId(r.Context(), org, globalid)
s, err := platform.SourceByGlobalID(ctx, user.Organization, globalid)
if err != nil {
return nil, nhttp.NewError("Failed to get source: %w", err)
}
inspections, err := inspectionsBySource(r.Context(), org, globalid)
inspections, err := platform.InspectionsBySource(ctx, user.Organization, globalid)
if err != nil {
return nil, nhttp.NewError("Failed to get inspections: %w", err)
}
traps, err := trapsBySource(r.Context(), org, globalid)
traps, err := platform.TrapsBySource(ctx, user.Organization, globalid)
if err != nil {
return nil, nhttp.NewError("Failed to get traps: %w", err)
}
treatments, err := treatmentsBySource(r.Context(), org, globalid)
treatments, err := platform.TreatmentsBySource(ctx, user.Organization, globalid)
if err != nil {
return nil, nhttp.NewError("Failed to get treatments: %w", err)
}
treatment_models := modelTreatment(treatments)
treatment_models := platform.ModelTreatment(treatments)
latlng, err := s.H3Cell.LatLng()
if err != nil {
return nil, nhttp.NewError("Failed to get latlng: %w", err)
@ -142,13 +126,13 @@ func getSource(ctx context.Context, r *http.Request, org *models.Organization, u
Traps: traps,
Treatments: treatments,
TreatmentModels: treatment_models,
User: userContent,
User: user,
}
return html.NewResponse("sync/source.html", data), nil
}
func getStadia(ctx context.Context, r *http.Request, org *models.Organization, u *models.User) (*html.Response[contentDashboard], *nhttp.ErrorWithStatus) {
func getStadia(ctx context.Context, r *http.Request, u platform.User) (*html.Response[contentDashboard], *nhttp.ErrorWithStatus) {
data := contentDashboard{
MapData: ComponentMap{},
}
@ -157,7 +141,7 @@ func getStadia(ctx context.Context, r *http.Request, org *models.Organization, u
func getTemplateTest(w http.ResponseWriter, r *http.Request) {
html.RenderOrError(w, "sync/template-test.html", nil)
}
func getTrap(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentTrap], *nhttp.ErrorWithStatus) {
func getTrap(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentTrap], *nhttp.ErrorWithStatus) {
globalid_s := chi.URLParam(r, "globalid")
if globalid_s == "" {
return nil, nhttp.NewError("No globalid provided: %w", nil)
@ -166,11 +150,7 @@ func getTrap(ctx context.Context, r *http.Request, org *models.Organization, use
if err != nil {
return nil, nhttp.NewError("globalid is not a UUID: %w", nil)
}
userContent, err := auth.ContentForUser(r.Context(), user)
if err != nil {
return nil, nhttp.NewError("Failed to get user content: %w", err)
}
t, err := trapByGlobalId(r.Context(), org, globalid)
t, err := platform.TrapByGlobalId(ctx, user.Organization, globalid)
if err != nil {
return nil, nhttp.NewError("Failed to get trap: %w", err)
}
@ -189,47 +169,44 @@ func getTrap(ctx context.Context, r *http.Request, org *models.Organization, use
},
Zoom: 13,
},
Trap: t,
User: userContent,
Trap: *t,
User: user,
}
return html.NewResponse("sync/trap.html", data), nil
}
func dashboard(ctx context.Context, w http.ResponseWriter, org *models.Organization, user *models.User) {
func dashboard(ctx context.Context, w http.ResponseWriter, user platform.User) {
var lastSync *time.Time
sync, err := org.FieldseekerSyncs(sm.OrderBy("created").Desc()).One(ctx, db.PGInstance.BobDB)
sync, err := user.Organization.FieldseekerSyncLatest(ctx)
if err != nil {
if err.Error() != "sql: no rows in result set" {
respondError(w, "Failed to get syncs", err, http.StatusInternalServerError)
return
}
} else {
respondError(w, "Failed to get syncs", err, http.StatusInternalServerError)
} else if sync != nil {
lastSync = &sync.Created
}
is_syncing := background.IsSyncOngoing(org.ID)
trapCount, err := org.Traplocations().Count(ctx, db.PGInstance.BobDB)
is_syncing := user.Organization.IsSyncOngoing()
count_trap, err := user.Organization.CountTrap(ctx)
if err != nil {
respondError(w, "Failed to get trap count", err, http.StatusInternalServerError)
return
}
sourceCount, err := org.Pointlocations().Count(ctx, db.PGInstance.BobDB)
count_source, err := user.Organization.CountTrap(ctx)
if err != nil {
respondError(w, "Failed to get source count", err, http.StatusInternalServerError)
return
}
serviceCount, err := org.Servicerequests().Count(ctx, db.PGInstance.BobDB)
count_service, err := user.Organization.CountServiceRequest(ctx)
if err != nil {
respondError(w, "Failed to get service count", err, http.StatusInternalServerError)
return
}
recentRequests, err := org.Servicerequests(sm.OrderBy("creationdate").Desc(), sm.Limit(10)).All(ctx, db.PGInstance.BobDB)
service_request_recent, err := user.Organization.ServiceRequestRecent(ctx)
if err != nil {
respondError(w, "Failed to get recent service", err, http.StatusInternalServerError)
return
}
requests := make([]ServiceRequestSummary, 0)
for _, r := range recentRequests {
for _, r := range service_request_recent {
requests = append(requests, ServiceRequestSummary{
Date: r.Creationdate.MustGet(),
Location: r.Reqaddr1.MustGet(),
@ -237,30 +214,25 @@ func dashboard(ctx context.Context, w http.ResponseWriter, org *models.Organizat
})
}
content := contentDashboard{
CountTraps: int(trapCount),
CountMosquitoSources: int(sourceCount),
CountServiceRequests: int(serviceCount),
CountTraps: int(count_trap),
CountMosquitoSources: int(count_source),
CountServiceRequests: int(count_service),
IsSyncOngoing: is_syncing,
LastSync: lastSync,
MapData: ComponentMap{},
RecentRequests: requests,
}
userContent, err := auth.ContentForUser(ctx, user)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
html.RenderOrError(w, "sync/dashboard.html", contentAuthenticated[contentDashboard]{
C: content,
Config: html.NewContentConfig(),
Organization: org,
Organization: user.Organization,
URL: html.NewContentURL(),
User: userContent,
User: user,
})
}
func source(w http.ResponseWriter, r *http.Request, org *models.Organization, user *models.User, id uuid.UUID) {
func source(w http.ResponseWriter, r *http.Request, user platform.User, id uuid.UUID) {
}
func trap(w http.ResponseWriter, r *http.Request, org *models.Organization, user *models.User, id uuid.UUID) {
func trap(w http.ResponseWriter, r *http.Request, user platform.User, id uuid.UUID) {
}

View file

@ -4,14 +4,14 @@ import (
"context"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
)
type contentDownloadPlaceholder struct{}
func getDownloadList(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentDownloadPlaceholder], *nhttp.ErrorWithStatus) {
func getDownloadList(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentDownloadPlaceholder], *nhttp.ErrorWithStatus) {
content := contentDownloadPlaceholder{}
return html.NewResponse("sync/download-list.html", content), nil
}

View file

@ -5,7 +5,6 @@ import (
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/auth"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
@ -15,26 +14,21 @@ import (
var decoder = schema.NewDecoder()
type handlerFunctionGet[T any] func(context.Context, *http.Request, *models.Organization, *models.User) (*html.Response[T], *nhttp.ErrorWithStatus)
type handlerFunctionGet[T any] func(context.Context, *http.Request, platform.User) (*html.Response[T], *nhttp.ErrorWithStatus)
type wrappedHandler func(http.ResponseWriter, *http.Request)
type contentAuthenticated[T any] struct {
C T
Config html.ContentConfig
Organization *models.Organization
Organization platform.Organization
URL html.ContentURL
User platform.User
}
// w http.ResponseWriter, r *http.Request, u *models.User) {
// w http.ResponseWriter, r *http.Request, u platform.User) {
func authenticatedHandler[T any](f handlerFunctionGet[T]) http.Handler {
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
ctx := r.Context()
userContent, err := auth.ContentForUser(ctx, u)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
resp, e := f(ctx, r, org, u)
resp, e := f(ctx, r, u)
//log.Info().Str("template", template).Err(e).Msg("handler done")
if e != nil {
log.Warn().Int("status", e.Status).Err(e).Str("user message", e.Message).Msg("Responding with an error from sync pages")
@ -44,17 +38,17 @@ func authenticatedHandler[T any](f handlerFunctionGet[T]) http.Handler {
html.RenderOrError(w, resp.Template, contentAuthenticated[T]{
C: resp.Content,
Config: html.NewContentConfig(),
Organization: org,
Organization: u.Organization,
URL: html.NewContentURL(),
User: userContent,
User: u,
})
})
}
type handlerFunctionPost[T any] func(context.Context, *http.Request, *models.Organization, *models.User, T) (string, *nhttp.ErrorWithStatus)
type handlerFunctionPost[T any] func(context.Context, *http.Request, platform.User, T) (string, *nhttp.ErrorWithStatus)
func authenticatedHandlerPost[T any](f handlerFunctionPost[T]) http.Handler {
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
err := r.ParseForm()
if err != nil {
respondError(w, "Failed to parse form", err, http.StatusBadRequest)
@ -69,7 +63,7 @@ func authenticatedHandlerPost[T any](f handlerFunctionPost[T]) http.Handler {
return
}
ctx := r.Context()
path, e := f(ctx, r, org, u, content)
path, e := f(ctx, r, u, content)
if e != nil {
http.Error(w, e.Error(), e.Status)
return
@ -78,7 +72,7 @@ func authenticatedHandlerPost[T any](f handlerFunctionPost[T]) http.Handler {
})
}
func authenticatedHandlerPostMultipart[T any](f handlerFunctionPost[T]) http.Handler {
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
err := r.ParseMultipartForm(32 << 10) // 32 MB buffer
if err != nil {
respondError(w, "Failed to parse form", err, http.StatusBadRequest)
@ -93,7 +87,7 @@ func authenticatedHandlerPostMultipart[T any](f handlerFunctionPost[T]) http.Han
return
}
ctx := r.Context()
path, e := f(ctx, r, org, u, content)
path, e := f(ctx, r, u, content)
if e != nil {
http.Error(w, e.Error(), e.Status)
return

View file

@ -4,13 +4,13 @@ import (
"context"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
)
type contentIntelligenceRoot struct{}
func getIntelligenceRoot(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentIntelligenceRoot], *nhttp.ErrorWithStatus) {
func getIntelligenceRoot(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentIntelligenceRoot], *nhttp.ErrorWithStatus) {
return html.NewResponse("sync/intelligence-root.html", contentIntelligenceRoot{}), nil
}

View file

@ -4,14 +4,14 @@ import (
"context"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
)
type contentMessageList struct{}
func getMessageList(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentMessageList], *nhttp.ErrorWithStatus) {
func getMessageList(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentMessageList], *nhttp.ErrorWithStatus) {
content := contentMessageList{}
return html.NewResponse("sync/message-list.html", content), nil
}

View file

@ -7,10 +7,9 @@ import (
//"strings"
//"time"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/notification"
"github.com/Gleipnir-Technology/nidus-sync/platform"
//"github.com/Gleipnir-Technology/bob"
//"github.com/Gleipnir-Technology/bob/dialect/psql"
//"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
@ -21,11 +20,11 @@ import (
)
type contentNotificationList struct {
Notifications []notification.Notification
Notifications []platform.Notification
}
func getNotificationList(ctx context.Context, r *http.Request, org *models.Organization, u *models.User) (*html.Response[contentNotificationList], *nhttp.ErrorWithStatus) {
notifications, err := notification.ForUser(ctx, u)
func getNotificationList(ctx context.Context, r *http.Request, u platform.User) (*html.Response[contentNotificationList], *nhttp.ErrorWithStatus) {
notifications, err := platform.NotificationsForUser(ctx, u)
if err != nil {
return nil, nhttp.NewError("Failed to get notifications: %w", err)
}

View file

@ -7,11 +7,10 @@ import (
"strconv"
"github.com/Gleipnir-Technology/nidus-sync/auth"
"github.com/Gleipnir-Technology/nidus-sync/background"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/rs/zerolog/log"
)
@ -59,7 +58,7 @@ func getArcgisOauthCallback(w http.ResponseWriter, r *http.Request) {
respondError(w, "You're not currently authenticated, which really shouldn't happen.", err, http.StatusUnauthorized)
return
}
err = background.HandleOauthAccessCode(r.Context(), user, code)
err = platform.HandleOauthAccessCode(r.Context(), *user, code)
if err != nil {
respondError(w, "Failed to handle access code", err, http.StatusInternalServerError)
return
@ -67,7 +66,7 @@ func getArcgisOauthCallback(w http.ResponseWriter, r *http.Request) {
http.Redirect(w, r, config.MakeURLNidus("/"), http.StatusFound)
}
func getOAuthRefresh(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentOauthPrompt], *nhttp.ErrorWithStatus) {
func getOAuthRefresh(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentOauthPrompt], *nhttp.ErrorWithStatus) {
data := contentOauthPrompt{}
return html.NewResponse("sync/oauth-prompt.html", data), nil
}

View file

@ -4,13 +4,13 @@ import (
"context"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
)
type contentOperationsRoot struct{}
func getOperationsRoot(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentOperationsRoot], *nhttp.ErrorWithStatus) {
func getOperationsRoot(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentOperationsRoot], *nhttp.ErrorWithStatus) {
return html.NewResponse("sync/operations-root.html", contentOperationsRoot{}), nil
}

View file

@ -4,13 +4,13 @@ import (
"context"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
)
type contentParcel struct{}
func getParcel(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentParcel], *nhttp.ErrorWithStatus) {
func getParcel(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentParcel], *nhttp.ErrorWithStatus) {
return html.NewResponse("sync/parcel.html", contentParcel{}), nil
}

View file

@ -4,10 +4,10 @@ import (
"context"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/background"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/rs/zerolog/log"
)
@ -15,11 +15,11 @@ type contentPlanningRoot struct {
ArcgisAccessToken string
}
func getPlanningRoot(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentPlanningRoot], *nhttp.ErrorWithStatus) {
func getPlanningRoot(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentPlanningRoot], *nhttp.ErrorWithStatus) {
var oauth_token *models.ArcgisOauthToken
var err error
var access_token string
oauth_token, err = background.GetOAuthForOrg(ctx, org)
oauth_token, err = platform.GetOAuthForOrg(ctx, user.Organization)
if err != nil {
log.Warn().Err(err).Msg("Failed to get oauth")
oauth_token = nil

View file

@ -4,19 +4,19 @@ import (
"context"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
)
type contentPoolList struct{}
func getPoolList(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentPoolList], *nhttp.ErrorWithStatus) {
func getPoolList(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentPoolList], *nhttp.ErrorWithStatus) {
return html.NewResponse("sync/pool-list.html", contentPoolList{}), nil
}
func getPoolCreate(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentPoolList], *nhttp.ErrorWithStatus) {
func getPoolCreate(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentPoolList], *nhttp.ErrorWithStatus) {
return html.NewResponse("sync/pool-upload.html", contentPoolList{}), nil
}
func getPoolByID(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentPoolList], *nhttp.ErrorWithStatus) {
func getPoolByID(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentPoolList], *nhttp.ErrorWithStatus) {
return html.NewResponse("sync/pool-by-id.html", contentPoolList{}), nil
}

View file

@ -4,23 +4,18 @@ import (
"context"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
)
type contentRadar struct {
Organization *models.Organization
Organization platform.Organization
}
func getRadar(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentRadar], *nhttp.ErrorWithStatus) {
org, err := user.Organization().One(ctx, db.PGInstance.BobDB)
if err != nil {
return nil, nhttp.NewError("get org: %w", err)
}
func getRadar(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentRadar], *nhttp.ErrorWithStatus) {
data := contentRadar{
Organization: org,
Organization: user.Organization,
}
return html.NewResponse("sync/radar.html", data), nil
}

View file

@ -6,11 +6,11 @@ import (
"html/template"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/background"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/rs/zerolog/log"
)
@ -20,11 +20,11 @@ type contentReviewPool struct {
}
type contentReviewRoot struct{}
func getReviewPool(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentReviewPool], *nhttp.ErrorWithStatus) {
func getReviewPool(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentReviewPool], *nhttp.ErrorWithStatus) {
var oauth_token *models.ArcgisOauthToken
var err error
var access_token string
oauth_token, err = background.GetOAuthForOrg(ctx, org)
oauth_token, err = platform.GetOAuthForOrg(ctx, user.Organization)
if err != nil {
log.Warn().Err(err).Msg("Failed to get oauth")
oauth_token = nil
@ -37,9 +37,9 @@ func getReviewPool(ctx context.Context, r *http.Request, org *models.Organizatio
URLTiles: template.HTMLAttr(fmt.Sprintf(`url-tiles="%s"`, config.MakeURLNidus("/api/tile/{z}/{y}/{x}"))),
}), nil
}
func getReviewRoot(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentReviewRoot], *nhttp.ErrorWithStatus) {
func getReviewRoot(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentReviewRoot], *nhttp.ErrorWithStatus) {
return html.NewResponse("sync/review/root.html", contentReviewRoot{}), nil
}
func getReviewSite(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentReviewRoot], *nhttp.ErrorWithStatus) {
func getReviewSite(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentReviewRoot], *nhttp.ErrorWithStatus) {
return html.NewResponse("sync/review/site.html", contentReviewRoot{}), nil
}

View file

@ -6,9 +6,9 @@ import (
"time"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
)
type contentActiveServiceRequest struct {
@ -34,11 +34,11 @@ type contentServiceRequestList struct {
ClosedRequests []contentClosedServiceRequest
}
func getServiceRequestDetail(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentServiceRequestDetail], *nhttp.ErrorWithStatus) {
func getServiceRequestDetail(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentServiceRequestDetail], *nhttp.ErrorWithStatus) {
content := contentServiceRequestDetail{}
return html.NewResponse("sync/service-request-detail.html", content), nil
}
func getServiceRequestList(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentServiceRequestList], *nhttp.ErrorWithStatus) {
func getServiceRequestList(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentServiceRequestList], *nhttp.ErrorWithStatus) {
now := time.Now()
content := contentServiceRequestList{
ActiveRequests: []contentActiveServiceRequest{

View file

@ -7,8 +7,8 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/auth"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/rs/zerolog/log"
)
@ -24,7 +24,7 @@ func getSignin(w http.ResponseWriter, r *http.Request) {
signin(w, errorCode, next)
}
func getSignout(w http.ResponseWriter, r *http.Request, org *models.Organization, user *models.User) {
func getSignout(w http.ResponseWriter, r *http.Request, user platform.User) {
auth.SignoutUser(r, user)
http.Redirect(w, r, "/signin", http.StatusFound)
}

View file

@ -8,10 +8,9 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/comms/email"
"github.com/Gleipnir-Technology/nidus-sync/comms/text"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/rs/zerolog/log"
)
@ -20,8 +19,8 @@ type contentSudo struct {
ForwardEmailNidusAddress string
}
func getSudo(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentSudo], *nhttp.ErrorWithStatus) {
if user.Role != enums.UserroleRoot {
func getSudo(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentSudo], *nhttp.ErrorWithStatus) {
if !user.HasRoot() {
return nil, &nhttp.ErrorWithStatus{
Message: "You have to be a root user to access this",
Status: http.StatusForbidden,
@ -41,8 +40,8 @@ type FormEmail struct {
To string `schema:"emailTo"`
}
func postSudoEmail(ctx context.Context, r *http.Request, org *models.Organization, u *models.User, e FormEmail) (string, *nhttp.ErrorWithStatus) {
if u.Role != enums.UserroleRoot {
func postSudoEmail(ctx context.Context, r *http.Request, u platform.User, e FormEmail) (string, *nhttp.ErrorWithStatus) {
if !u.HasRoot() {
return "", &nhttp.ErrorWithStatus{
Message: "You must have sudo powers to do this",
Status: http.StatusForbidden,
@ -70,8 +69,8 @@ type FormSMS struct {
Phone string `schema:"smsPhone"`
}
func postSudoSMS(ctx context.Context, r *http.Request, org *models.Organization, u *models.User, sms FormSMS) (string, *nhttp.ErrorWithStatus) {
if u.Role != enums.UserroleRoot {
func postSudoSMS(ctx context.Context, r *http.Request, u platform.User, sms FormSMS) (string, *nhttp.ErrorWithStatus) {
if !u.HasRoot() {
return "", &nhttp.ErrorWithStatus{
Message: "You must have sudo powers to do this",
Status: http.StatusForbidden,

View file

@ -4,14 +4,14 @@ import (
"context"
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
)
type contentTextMessages struct{}
func getTextMessages(ctx context.Context, r *http.Request, org *models.Organization, u *models.User) (*html.Response[contentTextMessages], *nhttp.ErrorWithStatus) {
func getTextMessages(ctx context.Context, r *http.Request, u platform.User) (*html.Response[contentTextMessages], *nhttp.ErrorWithStatus) {
content := contentTextMessages{}
return html.NewResponse("sync/text-messages.html", content), nil
}

View file

@ -7,11 +7,10 @@ import (
"net/http"
"strconv"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/platform/imagetile"
"github.com/Gleipnir-Technology/nidus-sync/platform"
)
func getTileGPS(w http.ResponseWriter, r *http.Request, org *models.Organization, u *models.User) {
func getTileGPS(w http.ResponseWriter, r *http.Request, u platform.User) {
ctx := r.Context()
if err := r.ParseForm(); err != nil {
respondError(w, "Could not parse form", err, http.StatusBadRequest)
@ -40,7 +39,7 @@ func getTileGPS(w http.ResponseWriter, r *http.Request, org *models.Organization
respondError(w, "couldn't parse lng", err, http.StatusBadRequest)
return
}
img, err := imagetile.ImageAtPoint(ctx, org, uint(level), lat, lng)
img, err := platform.ImageAtPoint(ctx, u.Organization, uint(level), lat, lng)
if err != nil {
respondError(w, "image at point", err, http.StatusInternalServerError)
return

View file

@ -3,17 +3,9 @@ package sync
import (
"time"
"github.com/google/uuid"
"github.com/uber/h3-go/v4"
)
type BreedingSourceSummary struct {
ID uuid.UUID
Type string
LastInspected *time.Time
LastTreated *time.Time
}
type MapMarker struct {
LatLng h3.LatLng
}
@ -47,13 +39,6 @@ type ContentReportDetail struct {
type ContentReportDiagnostic struct {
}
type Inspection struct {
Action string
Date *time.Time
Notes string
Location string
LocationID uuid.UUID
}
type Link struct {
Href string
Title string

View file

@ -7,11 +7,10 @@ import (
"strconv"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/Gleipnir-Technology/nidus-sync/platform"
"github.com/Gleipnir-Technology/nidus-sync/userfile"
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
"github.com/go-chi/chi/v5"
//"github.com/rs/zerolog/log"
)
@ -21,8 +20,8 @@ type contentUploadList struct {
}
type contentUploadPlaceholder struct{}
func getUploadList(ctx context.Context, r *http.Request, org *models.Organization, user *models.User) (*html.Response[contentUploadList], *nhttp.ErrorWithStatus) {
rows, err := platform.UploadSummaryList(ctx, org)
func getUploadList(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentUploadList], *nhttp.ErrorWithStatus) {
rows, err := platform.UploadSummaryList(ctx, user.Organization)
return html.NewResponse("sync/upload-list.html", contentUploadList{
RecentUploads: rows,
}), nhttp.NewErrorMaybe("get upload list: %w", err)
@ -30,7 +29,7 @@ func getUploadList(ctx context.Context, r *http.Request, org *models.Organizatio
type contentUploadDetail struct {
CSVFileID int32
Organization *models.Organization
Organization platform.Organization
Upload platform.UploadPoolDetail
}
type contentUploadPoolList struct {
@ -38,38 +37,38 @@ type contentUploadPoolList struct {
}
type contentUploadPool struct{}
func getUploadPool(ctx context.Context, r *http.Request, org *models.Organization, u *models.User) (*html.Response[contentUploadPool], *nhttp.ErrorWithStatus) {
func getUploadPool(ctx context.Context, r *http.Request, u platform.User) (*html.Response[contentUploadPool], *nhttp.ErrorWithStatus) {
data := contentUploadPool{}
return html.NewResponse("sync/upload-csv-pool.html", data), nil
}
type contentUploadPoolFlyoverCreate struct{}
func getUploadPoolFlyoverCreate(ctx context.Context, r *http.Request, org *models.Organization, u *models.User) (*html.Response[contentUploadPoolFlyoverCreate], *nhttp.ErrorWithStatus) {
func getUploadPoolFlyoverCreate(ctx context.Context, r *http.Request, u platform.User) (*html.Response[contentUploadPoolFlyoverCreate], *nhttp.ErrorWithStatus) {
data := contentUploadPoolFlyoverCreate{}
return html.NewResponse("sync/upload-csv-pool-flyover.html", data), nil
}
type contentUploadPoolCustomCreate struct{}
func getUploadPoolCustomCreate(ctx context.Context, r *http.Request, org *models.Organization, u *models.User) (*html.Response[contentUploadPoolCustomCreate], *nhttp.ErrorWithStatus) {
func getUploadPoolCustomCreate(ctx context.Context, r *http.Request, u platform.User) (*html.Response[contentUploadPoolCustomCreate], *nhttp.ErrorWithStatus) {
data := contentUploadPoolCustomCreate{}
return html.NewResponse("sync/upload-csv-pool-custom.html", data), nil
}
func getUploadByID(ctx context.Context, r *http.Request, org *models.Organization, u *models.User) (*html.Response[contentUploadDetail], *nhttp.ErrorWithStatus) {
func getUploadByID(ctx context.Context, r *http.Request, u platform.User) (*html.Response[contentUploadDetail], *nhttp.ErrorWithStatus) {
file_id_str := chi.URLParam(r, "id")
file_id_, err := strconv.ParseInt(file_id_str, 10, 32)
if err != nil {
return nil, nhttp.NewError("Failed to parse file_id: %w", err)
}
file_id := int32(file_id_)
detail, err := platform.GetUploadDetail(ctx, u.OrganizationID, file_id)
detail, err := platform.GetUploadDetail(ctx, u.Organization.ID(), file_id)
if err != nil {
return nil, nhttp.NewError("Failed to get pool: %w", err)
}
data := contentUploadDetail{
CSVFileID: file_id,
Organization: org,
Organization: u.Organization,
Upload: detail,
}
return html.NewResponse("sync/upload-by-id.html", data), nil
@ -77,13 +76,13 @@ func getUploadByID(ctx context.Context, r *http.Request, org *models.Organizatio
type FormUploadCommit struct{}
func postUploadCommit(ctx context.Context, r *http.Request, org *models.Organization, u *models.User, f FormUploadCommit) (string, *nhttp.ErrorWithStatus) {
func postUploadCommit(ctx context.Context, r *http.Request, u platform.User, f FormUploadCommit) (string, *nhttp.ErrorWithStatus) {
file_id_str := chi.URLParam(r, "id")
file_id_, err := strconv.ParseInt(file_id_str, 10, 32)
if err != nil {
return "", nhttp.NewError("Failed to parse file_id: %w", err)
}
err = platform.UploadCommit(ctx, org, int32(file_id_), u)
err = platform.UploadCommit(ctx, u.Organization, int32(file_id_), u)
if err != nil {
return "", nhttp.NewError("Failed to mark committed: %w", err)
}
@ -92,13 +91,13 @@ func postUploadCommit(ctx context.Context, r *http.Request, org *models.Organiza
type FormUploadDiscard struct{}
func postUploadDiscard(ctx context.Context, r *http.Request, org *models.Organization, u *models.User, f FormUploadDiscard) (string, *nhttp.ErrorWithStatus) {
func postUploadDiscard(ctx context.Context, r *http.Request, u platform.User, f FormUploadDiscard) (string, *nhttp.ErrorWithStatus) {
file_id_str := chi.URLParam(r, "id")
file_id_, err := strconv.ParseInt(file_id_str, 10, 32)
if err != nil {
return "", nhttp.NewError("Failed to parse file_id: %w", err)
}
err = platform.UploadDiscard(ctx, org, int32(file_id_))
err = platform.UploadDiscard(ctx, u.Organization, int32(file_id_))
if err != nil {
return "", nhttp.NewError("Failed to mark discarded: %w", err)
}
@ -107,8 +106,8 @@ func postUploadDiscard(ctx context.Context, r *http.Request, org *models.Organiz
type FormUploadPool struct{}
func postUploadPoolFlyoverCreate(ctx context.Context, r *http.Request, org *models.Organization, u *models.User, f FormUploadPool) (string, *nhttp.ErrorWithStatus) {
uploads, err := userfile.SaveFileUpload(r, "csvfile", userfile.CollectionCSV)
func postUploadPoolFlyoverCreate(ctx context.Context, r *http.Request, u platform.User, f FormUploadPool) (string, *nhttp.ErrorWithStatus) {
uploads, err := file.SaveFileUpload(r, "csvfile", file.CollectionCSV)
if err != nil {
return "", nhttp.NewError("Failed to extract image uploads: %s", err)
}
@ -125,8 +124,8 @@ func postUploadPoolFlyoverCreate(ctx context.Context, r *http.Request, org *mode
}
return fmt.Sprintf("/configuration/upload/%d", saved_upload.ID), nil
}
func postUploadPoolCustomCreate(ctx context.Context, r *http.Request, org *models.Organization, u *models.User, f FormUploadPool) (string, *nhttp.ErrorWithStatus) {
uploads, err := userfile.SaveFileUpload(r, "csvfile", userfile.CollectionCSV)
func postUploadPoolCustomCreate(ctx context.Context, r *http.Request, u platform.User, f FormUploadPool) (string, *nhttp.ErrorWithStatus) {
uploads, err := file.SaveFileUpload(r, "csvfile", file.CollectionCSV)
if err != nil {
return "", nhttp.NewError("Failed to extract image uploads: %s", err)
}

View file

@ -5,6 +5,8 @@ BEGIN TRANSACTION;
DELETE FROM fileupload.error_file;
DELETE FROM lead WHERE site_id IN (SELECT id FROM SITE WHERE file_id IS NOT NULL);
DELETE FROM site WHERE file_id IS NOT NULL;
DELETE FROM review_task_pool;
DELETE FROM review_task;
DELETE FROM fileupload.file;
COMMIT;

View file

@ -1,5 +1,5 @@
INSERT INTO compliance_report_request(created, creator, id, public_id, site_id, site_version)
VALUES (NOW(), :user_id, DEFAULT, :public_id, :site_id, 1);
INSERT INTO compliance_report_request(created, creator, id, public_id, site_id)
VALUES (NOW(), :user_id, DEFAULT, :public_id, :site_id);
-- INSERT INTO compliance_report_request (created, creator, public_id, site_id, site_version)