2026-03-16 19:52:29 +00:00
package platform
2025-11-06 00:23:58 +00:00
import (
2025-11-07 05:46:41 +00:00
"bytes"
2025-11-06 00:23:58 +00:00
"context"
"crypto/rand"
"crypto/sha256"
"encoding/base64"
2025-11-07 02:07:33 +00:00
"errors"
2025-11-06 00:23:58 +00:00
"fmt"
"io"
"net/http"
"net/url"
2025-11-07 05:46:41 +00:00
"os"
2025-12-02 00:28:14 +00:00
"path/filepath"
2025-11-07 08:34:32 +00:00
"sort"
2025-11-06 00:23:58 +00:00
"strconv"
"strings"
2025-11-07 05:46:41 +00:00
"sync"
2025-11-06 00:23:58 +00:00
"time"
2025-11-06 22:28:56 +00:00
"github.com/Gleipnir-Technology/arcgis-go"
2025-11-07 08:34:32 +00:00
"github.com/Gleipnir-Technology/arcgis-go/fieldseeker"
2026-02-13 19:19:39 +00:00
"github.com/Gleipnir-Technology/arcgis-go/response"
2026-02-28 23:26:08 +00:00
"github.com/Gleipnir-Technology/bob"
2026-01-27 18:44:02 +00:00
"github.com/Gleipnir-Technology/bob/dialect/psql"
2026-03-16 19:52:29 +00:00
"github.com/Gleipnir-Technology/bob/dialect/psql/dialect"
2026-01-27 18:44:02 +00:00
"github.com/Gleipnir-Technology/bob/dialect/psql/dm"
2026-02-28 23:26:08 +00:00
"github.com/Gleipnir-Technology/bob/dialect/psql/im"
2026-01-07 16:07:51 +00:00
"github.com/Gleipnir-Technology/nidus-sync/config"
2025-11-24 18:08:24 +00:00
"github.com/Gleipnir-Technology/nidus-sync/db"
2026-03-16 19:52:29 +00:00
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
2026-05-01 17:28:33 +00:00
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/model"
2025-11-24 18:08:24 +00:00
"github.com/Gleipnir-Technology/nidus-sync/db/models"
2026-05-01 17:28:33 +00:00
queryarcgis "github.com/Gleipnir-Technology/nidus-sync/db/query/arcgis"
2025-11-24 18:08:24 +00:00
"github.com/Gleipnir-Technology/nidus-sync/db/sql"
2026-05-01 17:28:33 +00:00
"github.com/Gleipnir-Technology/nidus-sync/db/types"
2026-01-06 14:46:31 +00:00
"github.com/Gleipnir-Technology/nidus-sync/debug"
2026-03-16 19:52:29 +00:00
"github.com/Gleipnir-Technology/nidus-sync/h3utils"
2026-05-04 19:07:29 +00:00
"github.com/Gleipnir-Technology/nidus-sync/lint"
2026-03-12 23:49:16 +00:00
"github.com/Gleipnir-Technology/nidus-sync/platform/oauth"
2025-11-06 22:31:51 +00:00
"github.com/aarondl/opt/omit"
2025-11-07 02:07:33 +00:00
"github.com/aarondl/opt/omitnull"
2025-11-07 10:45:59 +00:00
"github.com/alitto/pond/v2"
2025-11-07 02:07:33 +00:00
"github.com/jackc/pgx/v5"
2026-02-13 19:19:39 +00:00
"github.com/rs/zerolog"
2025-11-13 20:34:48 +00:00
"github.com/rs/zerolog/log"
2026-03-16 19:52:29 +00:00
"github.com/uber/h3-go/v4"
2025-11-06 00:23:58 +00:00
)
2026-01-06 15:32:26 +00:00
var syncStatusByOrg map [ int32 ] bool
2025-11-06 00:23:58 +00:00
var CodeVerifier string = "random_secure_string_min_43_chars_long_should_be_stored_in_session"
2026-03-12 23:49:16 +00:00
func HasFieldseekerConnection ( ctx context . Context , user_id int32 ) ( bool , error ) {
2026-05-01 17:28:33 +00:00
result , err := queryarcgis . OAuthTokenForUserExists ( ctx , int64 ( user_id ) )
2026-03-12 23:49:16 +00:00
if err != nil {
return false , err
}
2026-05-01 17:28:33 +00:00
return * result , nil
2025-11-06 00:23:58 +00:00
}
2026-03-12 23:49:16 +00:00
func IsSyncOngoing ( org_id int32 ) bool {
return syncStatusByOrg [ org_id ]
}
2026-05-01 17:28:33 +00:00
func getOAuthForOrg ( ctx context . Context , org * models . Organization ) ( * model . OAuthToken , error ) {
2026-02-09 21:40:24 +00:00
users , err := org . User ( ) . All ( ctx , db . PGInstance . BobDB )
if err != nil {
return nil , fmt . Errorf ( "Failed to query all users for org: %w" , err )
}
for _ , user := range users {
2026-05-01 17:28:33 +00:00
oauths , err := queryarcgis . OAuthTokensForUser ( ctx , int64 ( user . ID ) )
2026-02-09 21:40:24 +00:00
if err != nil {
return nil , fmt . Errorf ( "Failed to query all oauth tokens for org: %w" , err )
}
for _ , oauth := range oauths {
return oauth , nil
}
}
2026-03-12 23:49:16 +00:00
return nil , nil
2026-01-07 16:07:51 +00:00
}
// This is a goroutine that is in charge of getting Fieldseeker data and keeping it fresh.
2026-02-13 19:19:39 +00:00
func refreshFieldseekerData ( background_ctx context . Context , newOauthCh <- chan struct { } ) {
ctx := log . With ( ) . Str ( "component" , "arcgis" ) . Logger ( ) . Level ( zerolog . InfoLevel ) . WithContext ( background_ctx )
2026-01-07 16:07:51 +00:00
syncStatusByOrg = make ( map [ int32 ] bool , 0 )
for {
workerCtx , cancel := context . WithCancel ( context . Background ( ) )
var wg sync . WaitGroup
2026-05-01 17:28:33 +00:00
oauths , err := queryarcgis . OAuthTokensValid ( ctx )
2026-01-07 16:07:51 +00:00
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to get oauths" )
return
}
2026-02-13 21:46:20 +00:00
if len ( oauths ) == 0 {
log . Info ( ) . Msg ( "No oauths to maintain" )
}
2026-01-07 16:07:51 +00:00
for _ , oauth := range oauths {
wg . Add ( 1 )
go func ( ) {
defer wg . Done ( )
err := maintainOAuth ( workerCtx , oauth )
if err != nil {
markTokenFailed ( ctx , oauth )
2026-02-12 21:05:51 +00:00
if errors . Is ( err , arcgis . ErrorInvalidRefreshToken ) {
2026-01-07 16:07:51 +00:00
log . Info ( ) . Int ( "oauth_token.id" , int ( oauth . ID ) ) . Msg ( "Marked invalid by the server" )
} else {
debug . LogErrorTypeInfo ( err )
log . Error ( ) . Err ( err ) . Msg ( "Crashed oauth maintenance goroutine" )
}
}
} ( )
}
orgs , err := models . Organizations . Query ( ) . All ( ctx , db . PGInstance . BobDB )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to get orgs" )
return
}
2026-02-13 21:46:20 +00:00
if len ( orgs ) == 0 {
log . Info ( ) . Msg ( "No orgs to maintain" )
}
2026-01-07 16:07:51 +00:00
for _ , org := range orgs {
wg . Add ( 1 )
go func ( ) {
defer wg . Done ( )
err := periodicallyExportFieldseeker ( workerCtx , org )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Crashed fieldseeker export goroutine" )
}
} ( )
}
select {
case <- ctx . Done ( ) :
2026-04-21 19:37:58 +00:00
log . Debug ( ) . Msg ( "Exiting arcgis refresh worker..." )
2026-01-07 16:07:51 +00:00
cancel ( )
wg . Wait ( )
2026-04-21 19:37:58 +00:00
log . Debug ( ) . Msg ( "arcgis refresh worker exited." )
2026-01-07 16:07:51 +00:00
return
case <- newOauthCh :
log . Info ( ) . Msg ( "Updating oauth background work" )
cancel ( )
wg . Wait ( )
}
}
}
type SyncStats struct {
Inserts uint
Updates uint
Unchanged uint
2025-11-06 00:23:58 +00:00
}
2025-12-02 00:30:08 +00:00
func downloadFieldseekerSchema ( ctx context . Context , fieldseekerClient * fieldseeker . FieldSeeker , arcgis_id string ) {
2026-02-28 23:26:08 +00:00
layers , err := fieldseekerClient . Layers ( ctx )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to get layers" )
return
}
2026-02-13 19:19:39 +00:00
log . Debug ( ) . Int ( "len" , len ( layers ) ) . Msg ( "Downloading fieldseeker schema" )
2026-02-28 23:26:08 +00:00
for i , layer := range layers {
2026-01-07 16:07:51 +00:00
err := os . MkdirAll ( filepath . Join ( config . FieldseekerSchemaDirectory , arcgis_id ) , os . ModePerm )
2025-12-02 00:30:08 +00:00
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to create parent directory" )
return
}
2026-01-07 16:07:51 +00:00
output , err := os . Create ( fmt . Sprintf ( "%s/%s/%s.json" , config . FieldseekerSchemaDirectory , arcgis_id , layer . Name ) )
2025-12-02 00:30:08 +00:00
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to open output" )
return
}
2026-05-04 19:07:29 +00:00
defer lint . LogOnErr ( output . Close , "close schema output file" )
2026-02-28 23:26:08 +00:00
schema , err := fieldseekerClient . SchemaRaw ( ctx , uint ( i ) )
2025-12-02 00:30:08 +00:00
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to get schema" )
return
}
_ , err = output . Write ( schema )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to write schema file" )
continue
}
}
}
2026-02-10 21:30:58 +00:00
func extractURLParts ( urlString string ) ( string , [ ] string , error ) {
parsedURL , err := url . Parse ( urlString )
if err != nil {
return "" , nil , err
}
host := parsedURL . Scheme + "://" + parsedURL . Host
// Split the path and filter empty parts
var pathParts [ ] string
for _ , part := range strings . Split ( parsedURL . Path , "/" ) {
if part != "" {
pathParts = append ( pathParts , part )
}
}
return host , pathParts , nil
}
2025-11-06 00:23:58 +00:00
// Helper function to generate code challenge from code verifier
func generateCodeChallenge ( codeVerifier string ) string {
hash := sha256 . Sum256 ( [ ] byte ( codeVerifier ) )
return base64 . RawURLEncoding . EncodeToString ( hash [ : ] )
}
// Generate a random code verifier for PKCE
func generateCodeVerifier ( ) string {
bytes := make ( [ ] byte , 64 ) // 64 bytes = 512 bits
2026-05-04 19:07:29 +00:00
_ , err := rand . Read ( bytes )
if err != nil {
return ""
}
2025-11-06 00:23:58 +00:00
return base64 . RawURLEncoding . EncodeToString ( bytes )
}
2025-11-06 22:28:56 +00:00
// Find out what we can about this user
2026-05-01 17:28:33 +00:00
func updateArcgisUserData ( ctx context . Context , user * models . User , oauth * model . OAuthToken ) {
2026-02-13 19:19:39 +00:00
client , err := arcgis . NewArcGISAuth (
2026-02-12 21:05:51 +00:00
ctx ,
2026-02-13 19:19:39 +00:00
& arcgis . AuthenticatorOAuth {
2026-02-10 21:30:58 +00:00
AccessToken : oauth . AccessToken ,
AccessTokenExpires : oauth . AccessTokenExpires ,
RefreshToken : oauth . RefreshToken ,
RefreshTokenExpires : oauth . RefreshTokenExpires ,
2025-11-06 22:28:56 +00:00
} ,
)
2026-02-12 21:05:51 +00:00
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to create ArcGIS client" )
return
}
2026-01-15 19:18:34 +00:00
2026-02-28 23:26:08 +00:00
txn , err := db . PGInstance . BobDB . BeginTx ( ctx , nil )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Create transaction" )
return
}
defer txn . Rollback ( ctx )
account , ag_user , err := updateArcgisAccount ( ctx , txn , client , user )
2025-11-06 22:28:56 +00:00
if err != nil {
2026-01-15 19:18:34 +00:00
log . Error ( ) . Err ( err ) . Msg ( "Failed to get portal data" )
2025-11-06 22:28:56 +00:00
return
}
2025-11-07 02:07:33 +00:00
2026-02-28 23:26:08 +00:00
err = updateServiceData ( ctx , txn , client , user , account )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to get service data" )
return
}
2026-05-01 17:28:33 +00:00
model := model . OAuthToken {
ArcgisID : & ag_user . ID ,
ArcgisLicenseTypeID : & ag_user . UserLicenseTypeID ,
}
err = queryarcgis . OAuthTokenUpdateLicense ( ctx , oauth . RefreshToken , & model )
2025-11-07 02:07:33 +00:00
if err != nil {
2025-11-13 20:34:48 +00:00
log . Error ( ) . Err ( err ) . Msg ( "Failed to update oauth token portal data" )
2025-11-07 02:07:33 +00:00
return
}
2026-03-20 06:04:30 +00:00
org := user . R . Organization
if org . ArcgisAccountID . IsNull ( ) {
err = org . Update ( ctx , txn , & models . OrganizationSetter {
2026-02-28 23:26:08 +00:00
ArcgisAccountID : omitnull . From ( ag_user . OrgID ) ,
2026-01-15 01:05:21 +00:00
} )
if err != nil {
log . Error ( ) . Err ( err ) . Int32 ( "id" , user . R . Organization . ID ) . Msg ( "Failed to update organization's arcgis info" )
return
}
2026-02-28 23:26:08 +00:00
log . Info ( ) . Int32 ( "org_id" , org . ID ) . Str ( "arcgis_id" , ag_user . OrgID ) . Msg ( "Updated org arcgis ID" )
2025-11-07 02:07:33 +00:00
}
2026-02-13 19:19:39 +00:00
fssync , err := fieldseeker . NewFieldSeekerFromAG ( ctx , * client )
2025-11-06 22:28:56 +00:00
if err != nil {
2026-02-13 19:19:39 +00:00
log . Error ( ) . Err ( err ) . Msg ( "Failed to create fieldseeker" )
2026-02-28 23:26:08 +00:00
return
2025-11-06 22:28:56 +00:00
}
2026-02-13 19:19:39 +00:00
log . Info ( ) . Str ( "url" , fssync . ServiceFeature . URL . String ( ) ) . Msg ( "Found Fieldseeker" )
2026-02-28 23:26:08 +00:00
// Ensure the fieldseeker service is saved on the account
2026-03-01 20:33:16 +00:00
// Why yes, we do get 'ArcGIS' and 'arcgis' from the API, why do you ask?
url_corrected := strings . Replace ( fssync . ServiceFeature . URL . String ( ) , "/arcgis/" , "/ArcGIS/" , 1 )
2026-05-01 17:28:33 +00:00
service_account , err := queryarcgis . ServiceFeatureFromURL ( ctx , url_corrected )
2026-02-28 23:26:08 +00:00
if err != nil {
2026-03-01 20:33:16 +00:00
log . Error ( ) . Err ( err ) . Str ( "url" , fssync . ServiceFeature . URL . String ( ) ) . Str ( "url_corrected" , url_corrected ) . Msg ( "no fieldseeker service to link, it should have been created before" )
2026-02-28 23:26:08 +00:00
return
}
2026-02-13 19:19:39 +00:00
setter := models . OrganizationSetter {
2026-02-28 23:26:08 +00:00
FieldseekerServiceFeatureItemID : omitnull . From ( service_account . ItemID ) ,
2026-02-13 19:19:39 +00:00
}
err = org . Update ( ctx , db . PGInstance . BobDB , & setter )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to create new organization" )
return
2025-11-06 22:28:56 +00:00
}
2026-02-13 19:19:39 +00:00
maybeCreateWebhook ( ctx , fssync )
2026-02-28 23:26:08 +00:00
downloadFieldseekerSchema ( ctx , fssync , account . ID )
2026-03-12 23:49:16 +00:00
//notification.ClearOauth(ctx, user)
2026-01-21 03:30:03 +00:00
newOAuthTokenChannel <- struct { } { }
2025-11-06 22:28:56 +00:00
}
2026-05-01 17:28:33 +00:00
func newFieldSeeker ( ctx context . Context , oa * model . OAuthToken ) ( * fieldseeker . FieldSeeker , error ) {
2026-04-16 20:40:37 +00:00
if oa == nil {
return nil , fmt . Errorf ( "no oath token" )
}
2026-03-12 23:49:16 +00:00
row , err := sql . OrgByOauthId ( oa . ID ) . One ( ctx , db . PGInstance . BobDB )
2026-02-10 21:30:58 +00:00
if err != nil {
2026-03-12 23:49:16 +00:00
return nil , fmt . Errorf ( "Failed to get org ID from oauth %d: %w" , oa . ID , err )
2026-02-10 21:30:58 +00:00
}
// The URL for fieldseeker should be something like
// https://foo.arcgis.com/123abc/arcgis/rest/services/FieldSeekerGIS/FeatureServer
// We need to break it up
2026-02-28 23:26:08 +00:00
host , pathParts , err := extractURLParts ( row . FieldseekerURL )
2026-02-10 21:30:58 +00:00
if err != nil {
return nil , fmt . Errorf ( "Failed to break up provided url: %v" , err )
}
if len ( pathParts ) < 1 {
return nil , errors . New ( "Didn't get enough path parts" )
}
context := pathParts [ 0 ]
2026-02-13 19:19:39 +00:00
ar , err := arcgis . NewArcGISAuth (
2026-02-12 21:05:51 +00:00
ctx ,
2026-02-10 21:30:58 +00:00
arcgis . AuthenticatorOAuth {
2026-03-12 23:49:16 +00:00
AccessToken : oa . AccessToken ,
AccessTokenExpires : oa . AccessTokenExpires ,
RefreshToken : oa . RefreshToken ,
RefreshTokenExpires : oa . RefreshTokenExpires ,
2026-02-10 21:30:58 +00:00
} ,
)
2026-02-12 21:05:51 +00:00
if err != nil {
if errors . Is ( err , arcgis . ErrorInvalidAuthToken ) {
2026-03-12 23:49:16 +00:00
return nil , oauth . InvalidatedTokenError { }
2026-02-12 21:05:51 +00:00
} else if errors . Is ( err , arcgis . ErrorInvalidRefreshToken ) {
2026-03-12 23:49:16 +00:00
return nil , oauth . InvalidatedTokenError { }
2026-02-12 21:05:51 +00:00
}
2026-02-14 04:08:22 +00:00
return nil , fmt . Errorf ( "Failed to create ArcGIS client: %w" , err )
}
log . Info ( ) . Str ( "context" , context ) . Str ( "host" , host ) . Msg ( "Using base fieldseeker URL" )
2026-02-28 23:26:08 +00:00
fssync , err := fieldseeker . NewFieldSeekerFromURL ( ctx , * ar , row . FieldseekerURL )
2026-02-14 04:08:22 +00:00
if err != nil {
2026-02-13 19:19:39 +00:00
return nil , fmt . Errorf ( "Failed to create Fieldseeker client: %w" , err )
2026-02-12 21:05:51 +00:00
}
return fssync , nil
2026-02-10 21:30:58 +00:00
}
2026-05-01 17:28:33 +00:00
func updateArcgisAccount ( ctx context . Context , txn bob . Tx , client * arcgis . ArcGIS , user * models . User ) ( * model . Account , * model . User , error ) {
2026-02-10 21:30:58 +00:00
p , err := client . PortalsSelf ( ctx )
2026-01-15 19:18:34 +00:00
if err != nil {
2026-02-28 23:26:08 +00:00
return nil , nil , fmt . Errorf ( "Failed to get ArcGIS user data: %w" , err )
2026-01-15 19:18:34 +00:00
}
2026-02-28 23:26:08 +00:00
// Ensure that an arcgis account exists to attach to
account , err := ensureArcgisAccount ( ctx , txn , p , user )
2026-05-01 17:28:33 +00:00
ag_user , err := queryarcgis . UserFromID ( ctx , p . User . ID )
2026-01-16 14:52:11 +00:00
if err != nil {
2026-02-28 23:26:08 +00:00
log . Warn ( ) . Err ( err ) . Msg ( "need arcgis user account?" )
if err . Error ( ) == "sql: no rows in result set" {
2026-05-01 17:28:33 +00:00
setter := model . User {
Access : p . Access ,
Created : time . Unix ( p . User . Created , 0 ) ,
Email : p . User . Email ,
FullName : p . User . FullName ,
ID : p . User . ID ,
Level : p . User . Level ,
OrgID : p . User . OrgID ,
PublicUserID : user . ID ,
Region : p . Region ,
Role : p . User . Role ,
RoleID : p . User . RoleId ,
Username : p . User . Username ,
UserLicenseTypeID : p . User . UserLicenseTypeID ,
UserType : p . User . UserType ,
2026-02-28 23:26:08 +00:00
}
2026-05-01 17:28:33 +00:00
ag_user , err = queryarcgis . UserInsert ( ctx , txn , & setter )
2026-02-28 23:26:08 +00:00
if err != nil {
return nil , nil , fmt . Errorf ( "Failed to add arcgis user data: %w" , err )
}
} else {
return nil , nil , fmt . Errorf ( "Failed to find arcgis user: %w" , err )
}
2026-01-16 14:52:11 +00:00
}
2026-05-01 17:28:33 +00:00
err = queryarcgis . UserPrivilegesDeleteByUserID ( ctx , txn , p . User . ID )
2026-01-15 19:18:34 +00:00
if err != nil {
2026-02-28 23:26:08 +00:00
return nil , nil , fmt . Errorf ( "Failed to delete previous user privilege data: %w" , err )
2026-01-15 19:18:34 +00:00
}
for _ , priv := range p . User . Privileges {
2026-05-01 17:28:33 +00:00
s := model . UserPrivilege {
Privilege : priv ,
UserID : p . User . ID ,
2026-01-15 19:18:34 +00:00
}
2026-05-01 17:28:33 +00:00
err := queryarcgis . UserPrivilegeInsert ( ctx , txn , & s )
2026-01-15 19:18:34 +00:00
if err != nil {
2026-02-28 23:26:08 +00:00
return nil , nil , fmt . Errorf ( "Failed to add arcgis user privilege data: %w" , err )
2026-01-15 19:18:34 +00:00
}
}
log . Info ( ) . Str ( "username" , p . User . Username ) . Str ( "user_id" , p . User . ID ) . Str ( "org_id" , p . User . OrgID ) . Str ( "org_name" , p . Name ) . Str ( "license_type_id" , p . User . UserLicenseTypeID ) . Msg ( "Updated portals data" )
2026-02-28 23:26:08 +00:00
return account , ag_user , nil
}
2026-05-01 17:28:33 +00:00
func updateServiceData ( ctx context . Context , txn bob . Tx , client * arcgis . ArcGIS , user * models . User , account * model . Account ) error {
2026-02-28 23:26:08 +00:00
service_maps , err := client . MapServices ( ctx )
if err != nil {
return fmt . Errorf ( "list map services: %w" , err )
}
for _ , sm := range service_maps {
log . Info ( ) . Str ( "account-id" , account . ID ) . Str ( "arcgis-id" , sm . ID ) . Str ( "name" , sm . Name ) . Str ( "title" , sm . Title ) . Str ( "url" , sm . URL . String ( ) ) . Msg ( "inserting map service" )
2026-05-01 17:28:33 +00:00
_ , err := queryarcgis . ServiceMapFromID ( ctx , sm . ID )
2026-02-28 23:26:08 +00:00
if err != nil {
2026-03-01 20:33:16 +00:00
if err . Error ( ) == "sql: no rows in result set" {
2026-05-01 17:28:33 +00:00
setter := model . ServiceMap {
AccountID : account . ID ,
ArcgisID : sm . ID ,
Name : sm . Name ,
Title : sm . Title ,
URL : sm . URL . String ( ) ,
2026-03-01 20:33:16 +00:00
}
2026-05-01 17:28:33 +00:00
err := queryarcgis . ServiceMapInsert ( ctx , txn , & setter )
2026-03-01 20:33:16 +00:00
if err != nil {
return fmt . Errorf ( "save map service: %w" , err )
}
2026-04-17 17:47:38 +00:00
_ , err = models . TileServices . Insert ( & models . TileServiceSetter {
Name : omit . From ( sm . Name ) ,
ArcgisID : omitnull . From ( sm . ID ) ,
} ) . One ( ctx , txn )
if err != nil {
return fmt . Errorf ( "save tile service: %w" , err )
}
2026-03-01 20:33:16 +00:00
} else {
return err
}
2026-02-28 23:26:08 +00:00
}
}
services , err := client . Services ( ctx )
for _ , service := range services {
err := ensureServiceFeature ( ctx , txn , client , user , account , service )
if err != nil {
return fmt . Errorf ( "ensure service feature: %w" , err )
}
}
return nil
}
2026-05-01 17:28:33 +00:00
func ensureServiceFeature ( ctx context . Context , txn bob . Tx , client * arcgis . ArcGIS , user * models . User , account * model . Account , service * arcgis . ServiceFeature ) error {
_ , err := queryarcgis . ServiceFeatureFromURL ( ctx , service . URL . String ( ) )
2026-02-28 23:26:08 +00:00
if err == nil {
return nil
}
if err . Error ( ) != "sql: no rows in result set" {
return err
}
metadata , err := service . PopulateMetadata ( ctx )
if err != nil {
return fmt . Errorf ( "populate metadata: %w" , err )
}
2026-05-01 17:28:33 +00:00
setter := model . ServiceFeature {
AccountID : & account . ID ,
Extent : types . Box2D {
XMax : 180 ,
YMax : 90 ,
XMin : - 180 ,
YMin : - 90 ,
} ,
ItemID : metadata . ServiceItemId ,
SpatialReference : int32 ( * metadata . SpatialReference . LatestWKID ) ,
URL : service . URL . String ( ) ,
}
return queryarcgis . ServiceFeatureInsert ( ctx , txn , & setter )
2026-01-15 19:18:34 +00:00
}
2025-11-13 16:46:30 +00:00
func maybeCreateWebhook ( ctx context . Context , client * fieldseeker . FieldSeeker ) {
2026-02-10 21:30:58 +00:00
webhooks , err := client . WebhookList ( ctx )
2025-11-13 03:15:45 +00:00
if err != nil {
2026-02-10 18:54:47 +00:00
if errors . Is ( err , arcgis . ErrorNotPermitted ) {
log . Info ( ) . Msg ( "This oauth token is not allowed to get webhooks" )
return
}
2025-11-13 20:34:48 +00:00
log . Error ( ) . Err ( err ) . Msg ( "Failed to get webhooks" )
2026-02-10 18:54:47 +00:00
return
2025-11-13 03:15:45 +00:00
}
2026-02-12 21:05:51 +00:00
if webhooks == nil {
log . Error ( ) . Msg ( "nil webhooks" )
return
}
for _ , hook := range * webhooks {
2025-11-13 03:15:45 +00:00
if hook . Name == "Nidus Sync" {
2025-11-13 20:34:48 +00:00
log . Info ( ) . Msg ( "Found nidus sync hook" )
2025-11-13 03:15:45 +00:00
} else {
2025-11-13 20:34:48 +00:00
log . Info ( ) . Str ( "name" , hook . Name ) . Msg ( "Found webhook" )
2025-11-13 03:15:45 +00:00
}
}
}
2025-11-07 09:30:31 +00:00
func periodicallyExportFieldseeker ( ctx context . Context , org * models . Organization ) error {
pollTicker := time . NewTicker ( 1 )
for {
select {
case <- ctx . Done ( ) :
return nil
case <- pollTicker . C :
2026-02-10 18:54:47 +00:00
pollTicker = time . NewTicker ( 15 * time . Minute )
2026-03-12 23:49:16 +00:00
oa , err := getOAuthForOrg ( ctx , org )
2025-11-07 09:30:31 +00:00
if err != nil {
2025-11-13 20:53:20 +00:00
return fmt . Errorf ( "Failed to get oauth for org: %w" , err )
2025-11-07 09:30:31 +00:00
}
2026-03-12 23:49:16 +00:00
if oa == nil {
2026-03-20 16:38:01 +00:00
//log.Debug().Int32("org.id", org.ID).Msg("No oauth for org")
2026-03-12 23:49:16 +00:00
continue
}
2026-03-16 19:52:29 +00:00
fssync , err := newFieldSeeker ( ctx , oa )
2026-02-12 21:05:51 +00:00
if err != nil {
2026-03-12 23:49:16 +00:00
if errors . Is ( err , & oauth . InvalidatedTokenError { } ) {
2026-02-14 04:08:22 +00:00
log . Info ( ) . Int32 ( "org" , org . ID ) . Msg ( "oauth token for org is invalid, waiting for refresh" )
continue
}
2026-02-12 21:05:51 +00:00
return fmt . Errorf ( "Failed to create fieldseeker client: %w" , err )
}
logPermissions ( ctx , fssync )
syncStatusByOrg [ org . ID ] = true
err = exportFieldseekerData ( ctx , fssync , org )
2026-01-06 15:32:26 +00:00
syncStatusByOrg [ org . ID ] = false
2025-11-07 09:30:31 +00:00
if err != nil {
2025-11-13 20:53:20 +00:00
return fmt . Errorf ( "Failed to export Fieldseeker data: %w" , err )
2025-11-07 09:30:31 +00:00
}
2025-11-13 20:34:48 +00:00
log . Info ( ) . Msg ( "Completed exporting data, waiting 15 minutes to go agoin." )
2025-11-07 09:30:31 +00:00
}
}
}
2026-02-12 21:05:51 +00:00
func exportFieldseekerData ( ctx context . Context , fssync * fieldseeker . FieldSeeker , org * models . Organization ) error {
2025-11-13 20:34:48 +00:00
log . Info ( ) . Msg ( "Update Fieldseeker data" )
2025-12-02 22:12:43 +00:00
var err error
2025-11-07 10:45:59 +00:00
var stats SyncStats
2025-12-03 16:27:35 +00:00
2026-01-15 01:05:21 +00:00
pool := pond . NewResultPool [ SyncStats ] ( 20 )
group := pool . NewGroup ( )
2025-12-03 16:27:35 +00:00
var ss SyncStats
2026-02-28 23:26:08 +00:00
layers , err := fssync . Layers ( ctx )
if err != nil {
return fmt . Errorf ( "get layers: %w" , err )
}
2026-02-12 21:05:51 +00:00
for _ , l := range layers {
2026-01-15 01:05:21 +00:00
ss , err = exportFieldseekerLayer ( ctx , group , org , fssync , l )
2025-11-07 08:34:32 +00:00
if err != nil {
2025-12-03 16:27:35 +00:00
return err
2025-11-07 08:34:32 +00:00
}
2025-11-07 10:45:59 +00:00
stats . Inserts += ss . Inserts
stats . Updates += ss . Updates
stats . Unchanged += ss . Unchanged
2025-11-07 08:34:32 +00:00
}
2026-01-15 01:05:21 +00:00
results , err := group . Wait ( )
if err != nil {
return fmt . Errorf ( "one or more tasks in the work pool failed: %w" , err )
}
for _ , r := range results {
stats . Inserts += r . Inserts
stats . Updates += r . Updates
stats . Unchanged += r . Unchanged
}
2025-11-07 08:34:32 +00:00
2025-11-07 09:30:31 +00:00
setter := models . FieldseekerSyncSetter {
2025-11-07 10:45:59 +00:00
RecordsCreated : omit . From ( int32 ( stats . Inserts ) ) ,
RecordsUpdated : omit . From ( int32 ( stats . Updates ) ) ,
RecordsUnchanged : omit . From ( int32 ( stats . Unchanged ) ) ,
2025-11-07 09:30:31 +00:00
}
2025-11-24 18:08:24 +00:00
err = org . InsertFieldseekerSyncs ( ctx , db . PGInstance . BobDB , & setter )
2025-11-08 00:04:44 +00:00
if err != nil {
2025-11-13 20:53:20 +00:00
return fmt . Errorf ( "Failed to insert sync: %w" , err )
2025-11-08 00:04:44 +00:00
}
2025-11-07 09:30:31 +00:00
2025-11-21 17:28:05 +00:00
updateSummaryTables ( ctx , org )
2025-11-07 08:34:32 +00:00
return nil
}
2026-02-12 21:05:51 +00:00
func logPermissions ( ctx context . Context , fssync * fieldseeker . FieldSeeker ) {
2026-02-10 21:30:58 +00:00
/ * row , err := sql . OrgByOauthId ( oauth . ID ) . One ( ctx , db . PGInstance . BobDB )
2026-01-15 01:05:21 +00:00
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to get org in log permissions" )
return
}
2026-02-10 21:30:58 +00:00
oauth , err := models . FindOauthToken ( ctx , db . PGInstance . BobDB , row . ID )
if err != nil {
return fmt . Errorf ( "Failed to update oauth token from database: %w" , err )
}
* /
2026-02-12 21:05:51 +00:00
_ , err := fssync . AdminInfo ( ctx )
2026-01-15 04:10:54 +00:00
if err != nil {
2026-02-10 18:54:47 +00:00
if errors . Is ( err , arcgis . ErrorNotPermitted ) {
log . Info ( ) . Msg ( "This oauth token is not allowed to query for admin info" )
return
}
2026-04-28 22:10:39 +00:00
log . Warn ( ) . Err ( err ) . Msg ( "Failed to get admin info during log permissions" )
2026-01-15 04:10:54 +00:00
return
}
2026-02-10 21:30:58 +00:00
permissions , err := fssync . PermissionList ( ctx )
2026-01-15 01:05:21 +00:00
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to query permissions in log permissions" )
return
}
2026-02-12 21:05:51 +00:00
if permissions == nil {
log . Error ( ) . Msg ( "nil permissions" )
return
}
for _ , p := range * permissions {
2026-01-15 01:05:21 +00:00
log . Info ( ) . Str ( "p" , p . Principal ) . Msg ( "Permission!" )
}
}
2026-01-15 04:10:54 +00:00
2026-05-01 17:28:33 +00:00
func maintainOAuth ( ctx context . Context , aot * model . OAuthToken ) error {
2025-11-13 14:34:50 +00:00
for {
// Refresh from the database
2026-05-01 17:28:33 +00:00
oa , err := queryarcgis . OAuthTokenFromID ( ctx , int64 ( aot . ID ) )
2025-11-07 05:46:41 +00:00
if err != nil {
2025-11-13 20:53:20 +00:00
return fmt . Errorf ( "Failed to update oauth token from database: %w" , err )
2025-11-07 05:46:41 +00:00
}
2025-12-04 02:52:01 +00:00
var accessTokenDelay time . Duration
2026-03-12 23:49:16 +00:00
if oa . AccessTokenExpires . Before ( time . Now ( ) ) || time . Until ( oa . AccessTokenExpires ) < ( 3 * time . Second ) {
2026-02-14 04:08:22 +00:00
accessTokenDelay = time . Second
2025-12-04 02:52:01 +00:00
} else {
2026-03-12 23:49:16 +00:00
accessTokenDelay = time . Until ( oa . AccessTokenExpires ) - ( 3 * time . Second )
2025-11-13 03:17:23 +00:00
}
2025-12-04 02:52:01 +00:00
var refreshTokenDelay time . Duration
2026-03-12 23:49:16 +00:00
if oa . RefreshTokenExpires . Before ( time . Now ( ) ) || time . Until ( oa . RefreshTokenExpires ) < ( 3 * time . Second ) {
2026-02-14 04:08:22 +00:00
refreshTokenDelay = time . Second
2025-12-04 02:52:01 +00:00
} else {
2026-03-12 23:49:16 +00:00
refreshTokenDelay = time . Until ( oa . RefreshTokenExpires ) - ( 3 * time . Second )
2025-11-13 14:34:50 +00:00
}
2026-03-12 23:49:16 +00:00
log . Info ( ) . Int ( "id" , int ( oa . ID ) ) . Float64 ( "seconds" , accessTokenDelay . Seconds ( ) ) . Msg ( "Need to refresh access token" )
log . Info ( ) . Int ( "id" , int ( oa . ID ) ) . Float64 ( "seconds" , refreshTokenDelay . Seconds ( ) ) . Msg ( "Need to refresh refresh token" )
2025-11-13 14:34:50 +00:00
accessTokenTicker := time . NewTicker ( accessTokenDelay )
refreshTokenTicker := time . NewTicker ( refreshTokenDelay )
2025-11-07 05:46:41 +00:00
select {
case <- ctx . Done ( ) :
2025-11-11 22:36:29 +00:00
return nil
2025-11-13 03:17:23 +00:00
case <- accessTokenTicker . C :
2026-03-12 23:49:16 +00:00
err := oauth . RefreshAccessToken ( ctx , oa )
2025-11-13 03:17:23 +00:00
if err != nil {
2025-11-13 20:53:20 +00:00
return fmt . Errorf ( "Failed to refresh access token: %w" , err )
2025-11-13 03:17:23 +00:00
}
case <- refreshTokenTicker . C :
2026-03-12 23:49:16 +00:00
err := oauth . RefreshRefreshToken ( ctx , oa )
2025-11-07 08:34:32 +00:00
if err != nil {
2025-11-13 20:53:20 +00:00
return fmt . Errorf ( "Failed to maintain refresh token: %w" , err )
2025-11-07 08:34:32 +00:00
}
2025-11-07 05:46:41 +00:00
}
}
}
2025-11-11 20:09:11 +00:00
// Mark that a given oauth token has failed. This includes a notification to
// the user.
2026-05-01 17:28:33 +00:00
func markTokenFailed ( ctx context . Context , oauth * model . OAuthToken ) {
err := queryarcgis . OAuthTokenInvalidate ( ctx , int64 ( oauth . ID ) )
2025-11-11 20:09:11 +00:00
if err != nil {
2025-11-13 20:34:48 +00:00
log . Error ( ) . Str ( "err" , err . Error ( ) ) . Msg ( "Failed to mark token failed" )
2025-11-11 20:09:11 +00:00
}
2026-03-12 23:49:16 +00:00
/ *
user , err := models . FindUser ( ctx , db . PGInstance . BobDB , oauth . UserID )
if err != nil {
log . Error ( ) . Str ( "err" , err . Error ( ) ) . Msg ( "Failed to get oauth user" )
return
}
notification . NotifyOauthInvalid ( ctx , user )
* /
2025-11-13 20:34:48 +00:00
log . Info ( ) . Int ( "id" , int ( oauth . ID ) ) . Msg ( "Marked oauth token invalid" )
2025-11-11 20:09:11 +00:00
}
2025-11-13 03:17:23 +00:00
func newTimestampedFilename ( prefix , suffix string ) string {
timestamp := time . Now ( ) . Format ( "20060102_150405" ) // YYYYMMDD_HHMMSS format
return prefix + timestamp + suffix
}
2025-11-07 05:46:41 +00:00
func logResponseHeaders ( resp * http . Response ) {
if resp == nil {
2025-11-13 20:34:48 +00:00
log . Info ( ) . Msg ( "Response is nil" )
2025-11-07 05:46:41 +00:00
return
2025-11-06 00:23:58 +00:00
}
2025-11-07 05:46:41 +00:00
2025-11-13 20:34:48 +00:00
log . Info ( ) . Str ( "status" , resp . Status ) . Int ( "statusCode" , resp . StatusCode ) . Msg ( "HTTP Response headers" )
2025-11-07 05:46:41 +00:00
for name , values := range resp . Header {
2025-11-13 20:34:48 +00:00
log . Info ( ) . Str ( "name" , name ) . Strs ( "values" , values ) . Msg ( "Header" )
2025-11-06 00:23:58 +00:00
}
}
2025-11-07 05:46:41 +00:00
func saveResponse ( data [ ] byte , filename string ) {
dest , err := os . Create ( filename )
2025-11-06 22:58:18 +00:00
if err != nil {
2025-11-13 20:34:48 +00:00
log . Error ( ) . Str ( "filename" , filename ) . Str ( "err" , err . Error ( ) ) . Msg ( "Failed to create file" )
2025-11-07 05:46:41 +00:00
return
2025-11-06 22:58:18 +00:00
}
2025-11-07 05:46:41 +00:00
_ , err = io . Copy ( dest , bytes . NewReader ( data ) )
if err != nil {
2025-11-13 20:34:48 +00:00
log . Error ( ) . Str ( "filename" , filename ) . Str ( "err" , err . Error ( ) ) . Msg ( "Failed to write" )
2025-11-07 05:46:41 +00:00
return
2025-11-07 02:29:34 +00:00
}
2025-11-13 20:34:48 +00:00
log . Info ( ) . Str ( "filename" , filename ) . Msg ( "Wrote response" )
2025-11-07 02:07:33 +00:00
}
2025-11-07 08:34:32 +00:00
2025-12-03 16:27:35 +00:00
/ *
2026-02-12 21:05:51 +00:00
func saveRawQuery ( fssync fieldseeker . FieldSeeker , layer arcgis . LayerFeature , query * arcgis . Query , filename string ) {
2025-11-07 08:34:32 +00:00
output , err := os . Create ( filename )
if err != nil {
2025-11-13 20:34:48 +00:00
log . Error ( ) . Str ( "filename" , filename ) . Msg ( "Failed to create file" )
2025-11-07 08:34:32 +00:00
return
}
qr , err := fssync . DoQueryRaw (
layer . ID ,
query )
if err != nil {
2025-11-13 20:34:48 +00:00
log . Error ( ) . Str ( "err" , err . Error ( ) ) . Msg ( "Failed to do query" )
2025-11-07 08:34:32 +00:00
return
}
_ , err = output . Write ( qr )
if err != nil {
2025-11-13 20:34:48 +00:00
log . Error ( ) . Str ( "err" , err . Error ( ) ) . Msg ( "Failed to write results" )
2025-11-07 08:34:32 +00:00
return
}
2025-11-13 20:34:48 +00:00
log . Info ( ) . Str ( "filename" , filename ) . Msg ( "Wrote failed query" )
2025-11-07 08:34:32 +00:00
}
2025-12-03 16:27:35 +00:00
* /
2025-11-07 08:34:32 +00:00
2026-02-28 23:26:08 +00:00
func saveOrUpdateDBRecords ( ctx context . Context , table string , qr * response . QueryResult , org_id int32 ) ( int , int , error ) {
2025-11-07 08:34:32 +00:00
inserts , updates := 0 , 0
sorted_columns := make ( [ ] string , 0 , len ( qr . Fields ) )
for _ , f := range qr . Fields {
2026-02-28 23:26:08 +00:00
sorted_columns = append ( sorted_columns , * f . Name )
2025-11-07 08:34:32 +00:00
}
sort . Strings ( sorted_columns )
objectids := make ( [ ] int , 0 )
for _ , l := range qr . Features {
2026-02-28 23:26:08 +00:00
attr := l . Attributes [ "OBJECTID" ]
attr_s := attr . String ( )
oid , err := strconv . Atoi ( attr_s )
if err != nil {
log . Warn ( ) . Str ( "attr_s" , attr_s ) . Msg ( "failed to convert" )
continue
}
objectids = append ( objectids , oid )
2025-11-07 08:34:32 +00:00
}
rows_by_objectid , err := rowmapViaQuery ( ctx , table , sorted_columns , objectids )
if err != nil {
2025-11-13 20:53:20 +00:00
return inserts , updates , fmt . Errorf ( "Failed to get existing rows: %w" , err )
2025-11-07 08:34:32 +00:00
}
// log.Println("Rows from query", len(rows_by_objectid))
for _ , feature := range qr . Features {
2026-02-28 23:26:08 +00:00
attr := feature . Attributes [ "OBJECTID" ]
attr_s := attr . String ( )
oid , err := strconv . Atoi ( attr_s )
if err != nil {
log . Warn ( ) . Str ( "attr_s" , attr_s ) . Msg ( "failed to convert" )
continue
}
row := rows_by_objectid [ oid ]
2025-11-07 08:34:32 +00:00
// If we have no matching row we'll need to create it
if len ( row ) == 0 {
2025-11-07 09:30:31 +00:00
if err := insertRowFromFeature ( ctx , table , sorted_columns , & feature , org_id ) ; err != nil {
2025-11-13 20:53:20 +00:00
return inserts , updates , fmt . Errorf ( "Failed to insert row: %w" , err )
2025-11-07 08:34:32 +00:00
}
inserts += 1
} else if hasUpdates ( row , feature ) {
2025-11-07 09:30:31 +00:00
if err := updateRowFromFeature ( ctx , table , sorted_columns , & feature , org_id ) ; err != nil {
2025-11-13 20:53:20 +00:00
return inserts , updates , fmt . Errorf ( "Failed to update row: %w" , err )
2025-11-07 08:34:32 +00:00
}
updates += 1
}
}
return inserts , updates , nil
}
// Produces a map of OBJECTID to a 'row' which is in turn a map of column names to their values as strings
func rowmapViaQuery ( ctx context . Context , table string , sorted_columns [ ] string , objectids [ ] int ) ( map [ int ] map [ string ] string , error ) {
result := make ( map [ int ] map [ string ] string )
query := selectAllFromQueryResult ( table , sorted_columns )
args := pgx . NamedArgs {
"objectids" : objectids ,
}
2025-11-24 18:08:24 +00:00
rows , err := db . PGInstance . PGXPool . Query ( ctx , query , args )
2025-11-07 08:34:32 +00:00
if err != nil {
2025-11-13 20:53:20 +00:00
return result , fmt . Errorf ( "Failed to query rows: %w" , err )
2025-11-07 08:34:32 +00:00
}
defer rows . Close ( )
// +2 for geometry x and geometry x
columnNames := make ( [ ] string , len ( sorted_columns ) + 2 )
2026-05-01 20:49:37 +00:00
copy ( columnNames , sorted_columns )
2025-11-07 08:34:32 +00:00
columnNames [ len ( sorted_columns ) ] = "geometry_x"
columnNames [ len ( sorted_columns ) + 1 ] = "geometry_y"
rowSlice , err := pgx . CollectRows ( rows , func ( row pgx . CollectableRow ) ( map [ string ] string , error ) {
fieldDescriptions := row . FieldDescriptions ( )
values := make ( [ ] interface { } , len ( fieldDescriptions ) )
valuePtrs := make ( [ ] interface { } , len ( fieldDescriptions ) )
for i := range values {
valuePtrs [ i ] = & values [ i ]
}
if err := row . Scan ( valuePtrs ... ) ; err != nil {
return nil , err
}
result := make ( map [ string ] string )
for i , fd := range fieldDescriptions {
if values [ i ] != nil {
result [ fd . Name ] = fmt . Sprintf ( "%v" , values [ i ] )
//log.Printf("col %v type %T val %v", fd.Name, values[i], values[i])
} else {
result [ fd . Name ] = ""
}
}
return result , nil
} )
if err != nil {
2025-11-13 20:53:20 +00:00
return result , fmt . Errorf ( "Failed to collect rows: %w" , err )
2025-11-07 08:34:32 +00:00
}
for _ , row := range rowSlice {
o := row [ "objectid" ]
objectid , err := strconv . Atoi ( o )
if err != nil {
2025-11-13 20:53:20 +00:00
return result , fmt . Errorf ( "Failed to parse objectid %s: %w" , o , err )
2025-11-07 08:34:32 +00:00
}
result [ objectid ] = row
}
return result , nil
}
2026-02-28 23:26:08 +00:00
func insertRowFromFeature ( ctx context . Context , table string , sorted_columns [ ] string , feature * response . Feature , org_id int32 ) error {
txn , err := db . PGInstance . BobDB . BeginTx ( ctx , nil )
2025-11-07 08:34:32 +00:00
if err != nil {
return fmt . Errorf ( "Unable to start transaction" )
}
2026-02-28 23:26:08 +00:00
defer txn . Rollback ( ctx )
2025-11-07 08:34:32 +00:00
2026-02-28 23:26:08 +00:00
err = insertRowFromFeatureFS ( ctx , txn , table , sorted_columns , feature , org_id )
2025-11-07 08:34:32 +00:00
if err != nil {
2025-11-13 20:53:20 +00:00
return fmt . Errorf ( "Unable to insert FS: %w" , err )
2025-11-07 08:34:32 +00:00
}
2026-02-28 23:26:08 +00:00
err = insertRowFromFeatureHistory ( ctx , txn , table , sorted_columns , feature , org_id , 1 )
2025-11-07 08:34:32 +00:00
if err != nil {
2025-11-13 20:53:20 +00:00
return fmt . Errorf ( "Failed to insert history: %w" , err )
2025-11-07 08:34:32 +00:00
}
2026-02-28 23:26:08 +00:00
txn . Commit ( ctx )
2025-11-07 08:34:32 +00:00
if err != nil {
2025-11-13 20:53:20 +00:00
return fmt . Errorf ( "Failed to commit transaction: %w" , err )
2025-11-07 08:34:32 +00:00
}
return nil
}
2026-02-28 23:26:08 +00:00
func insertRowFromFeatureFS ( ctx context . Context , txn bob . Tx , table string , sorted_columns [ ] string , feature * response . Feature , org_id int32 ) error {
2025-11-07 08:34:32 +00:00
// Create the query to produce the main row
var sb strings . Builder
sb . WriteString ( "INSERT INTO " )
sb . WriteString ( table )
sb . WriteString ( " (" )
for _ , field := range sorted_columns {
sb . WriteString ( field )
sb . WriteString ( "," )
}
// Specially add the geometry values since they aren't in the fields
2025-11-07 09:30:31 +00:00
sb . WriteString ( "geometry_x,geometry_y,organization_id,updated" )
2025-11-07 08:34:32 +00:00
sb . WriteString ( ")\nVALUES (" )
for _ , field := range sorted_columns {
sb . WriteString ( "@" )
sb . WriteString ( field )
sb . WriteString ( "," )
}
// Specially add the geometry values since they aren't in the fields
2025-11-07 09:30:31 +00:00
sb . WriteString ( "@geometry_x,@geometry_y,@organization_id,@updated)" )
2025-11-07 08:34:32 +00:00
args := pgx . NamedArgs { }
for k , v := range feature . Attributes {
args [ k ] = v
}
// specially add geometry since it isn't in the list of attributes
2025-12-05 23:11:57 +00:00
//args["geometry_x"] = feature.Geometry.X
//args["geometry_y"] = feature.Geometry.Y
2025-11-07 09:30:31 +00:00
args [ "organization_id" ] = org_id
2025-11-07 08:34:32 +00:00
args [ "updated" ] = time . Now ( )
2026-02-28 23:26:08 +00:00
_ , err := txn . ExecContext ( ctx , sb . String ( ) , args )
2025-11-07 08:34:32 +00:00
if err != nil {
2025-11-13 20:53:20 +00:00
return fmt . Errorf ( "Failed to insert row into %s: %w" , table , err )
2025-11-07 08:34:32 +00:00
}
return nil
}
2026-02-28 23:26:08 +00:00
func hasUpdates ( row map [ string ] string , feature response . Feature ) bool {
return false
/ *
for key , value := range feature . Attributes {
rowdata := row [ strings . ToLower ( key ) ]
// We'll accept any 'nil' as represented by the empty string in the database
if value == nil {
if rowdata == "" {
continue
} else if len ( rowdata ) > 0 {
return true
} else {
log . Error ( ) . Msg ( "Looks like our original value is nil, but our row value is something non-empty with a zero length. Need a programmer to look into this." )
}
2025-11-07 08:34:32 +00:00
}
2026-02-28 23:26:08 +00:00
// check strings first, their simplest
if featureAsString , ok := value . ( response . TextValue ) ; ok {
if featureAsString . String ( ) != rowdata {
return true
}
2025-11-07 08:34:32 +00:00
continue
2026-02-28 23:26:08 +00:00
} else if featureAsInt , ok := value . ( response . Int32Value ) ; ok {
// Previously had a nil value, now we have a real value
if rowdata == "" {
return true
}
rowAsInt , err := strconv . Atoi ( rowdata )
if err != nil {
log . Error ( ) . Msg ( fmt . Sprintf ( "Failed to convert '%s' to an int to compare against %v for %v" , rowdata , featureAsInt , key ) )
}
if rowAsInt != featureAsInt . V {
return true
} else {
continue
}
} else if featureAsFloat , ok := value . ( Float64Value ) ; ok {
// Previously had a nil value, now we have a real value
if rowdata == "" {
return true
}
rowAsFloat , err := strconv . ParseFloat ( rowdata , 64 )
if err != nil {
log . Error ( ) . Msg ( fmt . Sprintf ( "Failed to convert '%s' to a float64 to compare against %v for %v" , rowdata , featureAsFloat , key ) )
}
if rowAsFloat != featureAsFloat {
return true
} else {
continue
}
2025-11-07 08:34:32 +00:00
}
2026-02-28 23:26:08 +00:00
log . Error ( ) . Str ( "key" , key ) . Str ( "rowdata" , rowdata ) . Msg ( "we've hit a point where we can't tell if we have an update or not, need a programmer to look at the above" )
2025-11-07 08:34:32 +00:00
}
2026-02-28 23:26:08 +00:00
return false
* /
2025-11-07 08:34:32 +00:00
}
2026-02-28 23:26:08 +00:00
func updateRowFromFeature ( ctx context . Context , table string , sorted_columns [ ] string , feature * response . Feature , org_id int32 ) error {
return nil
/ *
// Get the current highest version for the row in question
history_table := toHistoryTable ( table )
var sb strings . Builder
sb . WriteString ( "SELECT MAX(version) FROM " )
sb . WriteString ( history_table )
sb . WriteString ( " WHERE OBJECTID=@objectid" )
args := pgx . NamedArgs { }
o := feature . Attributes [ "OBJECTID" ] . ( float64 )
args [ "objectid" ] = int ( o )
var version int
if err := db . PGInstance . PGXPool . QueryRow ( ctx , sb . String ( ) , args ) . Scan ( & version ) ; err != nil {
return fmt . Errorf ( "Failed to query for version: %w" , err )
}
2025-11-07 08:34:32 +00:00
2026-02-28 23:26:08 +00:00
txn , err := db . PGInstance . BobDB . BeginTx ( ctx , nil )
if err != nil {
return fmt . Errorf ( "Unable to start transaction" )
}
defer txn . Rollback ( ctx )
2025-11-07 08:34:32 +00:00
2026-02-28 23:26:08 +00:00
err = insertRowFromFeatureHistory ( ctx , txn , table , sorted_columns , feature , org_id , version + 1 )
if err != nil {
return fmt . Errorf ( "Failed to insert history: %w" , err )
}
err = updateRowFromFeatureFS ( ctx , txn , table , sorted_columns , feature )
if err != nil {
return fmt . Errorf ( "Failed to update row from feature: %w" , err )
}
2025-11-07 08:34:32 +00:00
2026-02-28 23:26:08 +00:00
txn . Commit ( ctx )
return nil
* /
2025-11-07 08:34:32 +00:00
}
2026-02-28 23:26:08 +00:00
func insertRowFromFeatureHistory ( ctx context . Context , transaction bob . Tx , table string , sorted_columns [ ] string , feature * response . Feature , org_id int32 , version int ) error {
2025-11-07 08:34:32 +00:00
history_table := toHistoryTable ( table )
var sb strings . Builder
sb . WriteString ( "INSERT INTO " )
sb . WriteString ( history_table )
sb . WriteString ( " (" )
for _ , field := range sorted_columns {
sb . WriteString ( field )
sb . WriteString ( "," )
}
// Specially add the geometry values since they aren't in the fields
2025-11-07 09:30:31 +00:00
sb . WriteString ( "created,geometry_x,geometry_y,organization_id,version" )
2025-11-07 08:34:32 +00:00
sb . WriteString ( ")\nVALUES (" )
for _ , field := range sorted_columns {
sb . WriteString ( "@" )
sb . WriteString ( field )
sb . WriteString ( "," )
}
// Specially add the geometry values since they aren't in the fields
2025-11-07 09:30:31 +00:00
sb . WriteString ( "@created,@geometry_x,@geometry_y,@organization_id,@version)" )
2025-11-07 08:34:32 +00:00
args := pgx . NamedArgs { }
for k , v := range feature . Attributes {
args [ k ] = v
}
args [ "created" ] = time . Now ( )
2025-11-07 09:30:31 +00:00
args [ "organization_id" ] = org_id
2025-11-07 08:34:32 +00:00
args [ "version" ] = version
2026-02-28 23:26:08 +00:00
if _ , err := transaction . ExecContext ( ctx , sb . String ( ) , args ) ; err != nil {
2025-11-13 20:53:20 +00:00
return fmt . Errorf ( "Failed to insert history row into %s: %w" , table , err )
2025-11-07 08:34:32 +00:00
}
return nil
}
func selectAllFromQueryResult ( table string , sorted_columns [ ] string ) string {
var sb strings . Builder
sb . WriteString ( "SELECT * FROM " )
sb . WriteString ( table )
sb . WriteString ( " WHERE OBJECTID=ANY(@objectids)" )
return sb . String ( )
}
func toHistoryTable ( table string ) string {
2026-05-01 20:49:37 +00:00
return "History_" + table [ 3 : ]
2025-11-07 08:34:32 +00:00
}
2026-02-28 23:26:08 +00:00
func updateRowFromFeatureFS ( ctx context . Context , transaction bob . Tx , table string , sorted_columns [ ] string , feature * response . Feature ) error {
2025-11-07 08:34:32 +00:00
// Create the query to produce the main row
var sb strings . Builder
sb . WriteString ( "UPDATE " )
sb . WriteString ( table )
sb . WriteString ( " SET " )
for _ , field := range sorted_columns {
// OBJECTID is special as our primary key, so skip it
if field == "OBJECTID" {
continue
}
sb . WriteString ( field )
sb . WriteString ( "=@" )
sb . WriteString ( field )
sb . WriteString ( "," )
}
// Specially add the geometry values since they aren't in the fields
sb . WriteString ( "geometry_x=@geometry_x,geometry_y=@geometry_y,updated=@updated WHERE OBJECTID=@OBJECTID" )
args := pgx . NamedArgs { }
for k , v := range feature . Attributes {
args [ k ] = v
}
// specially add geometry since it isn't in the list of attributes
2025-12-05 23:11:57 +00:00
//args["geometry_x"] = feature.Geometry.X
//args["geometry_y"] = feature.Geometry.Y
2025-11-07 08:34:32 +00:00
args [ "updated" ] = time . Now ( )
2026-02-28 23:26:08 +00:00
_ , err := transaction . ExecContext ( ctx , sb . String ( ) , args )
2025-11-07 08:34:32 +00:00
if err != nil {
2025-11-13 20:53:20 +00:00
return fmt . Errorf ( "Failed to update row into %s: %w" , table , err )
2025-11-07 08:34:32 +00:00
}
return nil
}
2025-11-24 18:08:24 +00:00
2026-02-13 19:19:39 +00:00
func exportFieldseekerLayer ( ctx context . Context , group pond . ResultTaskGroup [ SyncStats ] , org * models . Organization , fssync * fieldseeker . FieldSeeker , layer response . Layer ) ( SyncStats , error ) {
2025-12-03 16:27:35 +00:00
var stats SyncStats
2026-02-28 23:26:08 +00:00
return stats , nil
/ *
count , err := fssync . QueryCount ( ctx , layer . ID )
if err != nil {
return stats , fmt . Errorf ( "Failed to get counts for layer %s (%d): %w" , layer . Name , layer . ID , err )
}
if count . Count == 0 {
log . Info ( ) . Str ( "name" , layer . Name ) . Uint ( "layer_id" , layer . ID ) . Int32 ( "org_id" , org . ID ) . Msg ( "No records to download" )
return stats , nil
}
max_records , err := fssync . MaxRecordCount ( ctx )
if err != nil {
return stats , fmt . Errorf ( "Failed to get max records: %w" , err )
}
l , err := fieldseeker . NameToLayerType ( layer . Name )
if err != nil {
return stats , fmt . Errorf ( "Failed to get layer for '%s': %w" , layer . Name , err )
}
log . Info ( ) . Str ( "name" , layer . Name ) . Uint ( "layer_id" , layer . ID ) . Int32 ( "org_id" , org . ID ) . Int ( "count" , count . Count ) . Uint ( "iterations" , uint ( count . Count ) / uint ( max_records ) ) . Msg ( "Queuing jobs for layer" )
for offset := uint ( 0 ) ; offset < uint ( count . Count ) ; offset += uint ( max_records ) {
group . SubmitErr ( func ( ) ( SyncStats , error ) {
var ss SyncStats
var name string
var inserts , unchanged , updates uint
var err error
switch l {
case fieldseeker . LayerAerialSpraySession :
name = "AerialSpraySession"
rows , err := fssync . AerialSpraySession ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateAerialSpraySession ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerAerialSprayLine :
name = "LayerAerialSprayLine"
rows , err := fssync . AerialSprayLine ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateAerialSprayLine ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerBarrierSpray :
name = "LayerBarrierSpray"
rows , err := fssync . BarrierSpray ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateBarrierSpray ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerBarrierSprayRoute :
name = "LayerBarrierSprayRoute"
rows , err := fssync . BarrierSprayRoute ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateBarrierSprayRoute ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerContainerRelate :
name = "LayerContainerRelate"
rows , err := fssync . ContainerRelate ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateContainerRelate ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerFieldScoutingLog :
name = "LayerFieldScoutingLog"
rows , err := fssync . FieldScoutingLog ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateFieldScoutingLog ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerHabitatRelate :
name = "LayerHabitatRelate"
rows , err := fssync . HabitatRelate ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateHabitatRelate ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerInspectionSample :
name = "LayerInspectionSample"
rows , err := fssync . InspectionSample ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateInspectionSample ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerInspectionSampleDetail :
name = "LayerInspectionSampleDetail"
rows , err := fssync . InspectionSampleDetail ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateInspectionSampleDetail ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerLandingCount :
name = "LayerLandingCount"
rows , err := fssync . LandingCount ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateLandingCount ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerLandingCountLocation :
name = "LayerLandingCountLocation"
rows , err := fssync . LandingCountLocation ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateLandingCountLocation ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerLineLocation :
name = "LayerLineLocation"
rows , err := fssync . LineLocation ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateLineLocation ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerLocationTracking :
name = "LayerLocationTracking"
rows , err := fssync . LocationTracking ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateLocationTracking ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerMosquitoInspection :
name = "LayerMosquitoInspection"
rows , err := fssync . MosquitoInspection ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateMosquitoInspection ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerOfflineMapAreas :
name = "LayerOfflineMapAreas"
rows , err := fssync . OfflineMapAreas ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateOfflineMapAreas ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerProposedTreatmentArea :
name = "LayerProposedTreatmentArea"
rows , err := fssync . ProposedTreatmentArea ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateProposedTreatmentArea ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerPointLocation :
name = "LayerPointLocation"
rows , err := fssync . PointLocation ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdatePointLocation ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerPolygonLocation :
name = "LayerPolygonLocation"
rows , err := fssync . PolygonLocation ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdatePolygonLocation ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerPoolDetail :
name = "LayerPoolDetail"
rows , err := fssync . PoolDetail ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdatePoolDetail ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerPool :
name = "LayerPool"
rows , err := fssync . Pool ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdatePool ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerPoolBuffer :
name = "LayerPoolBuffer"
rows , err := fssync . PoolBuffer ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdatePoolBuffer ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerQALarvCount :
name = "LayerQALarvCount"
rows , err := fssync . QALarvCount ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateQALarvCount ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerQAMosquitoInspection :
name = "LayerQAMosquitoInspection"
rows , err := fssync . QAMosquitoInspection ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateQAMosquitoInspection ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerQAProductObservation :
name = "LayerQAProductObservation"
rows , err := fssync . QAProductObservation ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateQAProductObservation ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerRestrictedArea :
name = "LayerRestrictedArea"
rows , err := fssync . RestrictedArea ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateRestrictedArea ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerRodentInspection :
name = "LayerRodentInspection"
rows , err := fssync . RodentInspection ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateRodentInspection ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerRodentLocation :
name = "LayerRodentLocation"
rows , err := fssync . RodentLocation ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateRodentLocation ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerSampleCollection :
name = "LayerSampleCollection"
rows , err := fssync . SampleCollection ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateSampleCollection ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerSampleLocation :
name = "LayerSampleLocation"
rows , err := fssync . SampleLocation ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateSampleLocation ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerServiceRequest :
name = "LayerServiceRequest"
rows , err := fssync . ServiceRequest ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateServiceRequest ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerSpeciesAbundance :
name = "LayerSpeciesAbundance"
rows , err := fssync . SpeciesAbundance ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateSpeciesAbundance ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerStormDrain :
name = "LayerStormDrain"
rows , err := fssync . StormDrain ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateStormDrain ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerTracklog :
name = "LayerTracklog"
rows , err := fssync . Tracklog ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateTracklog ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerTrapLocation :
name = "LayerTrapLocation"
rows , err := fssync . TrapLocation ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateTrapLocation ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerTrapData :
name = "LayerTrapData"
rows , err := fssync . TrapData ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateTrapData ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerTimeCard :
name = "LayerTimeCard"
rows , err := fssync . TimeCard ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateTimeCard ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerTreatment :
name = "LayerTreatment"
rows , err := fssync . Treatment ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateTreatment ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerTreatmentArea :
name = "LayerTreatmentArea"
rows , err := fssync . TreatmentArea ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateTreatmentArea ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerULVSprayRoute :
name = "LayerULVSprayRoute"
rows , err := fssync . ULVSprayRoute ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateULVSprayRoute ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerZones :
name = "LayerZones"
rows , err := fssync . Zones ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateZones ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
case fieldseeker . LayerZones2 :
name = "LayerZones2"
rows , err := fssync . Zones2 ( ctx , offset )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to query %s: %w" , name , err )
}
inserts , updates , err = db . SaveOrUpdateZones2 ( ctx , org , rows )
if err != nil {
return SyncStats { } , fmt . Errorf ( "Failed to update %s: %w" , name , err )
}
unchanged = uint ( len ( rows ) ) - inserts - updates
default :
return ss , errors . New ( "Unrecognized layer" )
}
ss . Inserts = inserts
ss . Updates = updates
ss . Unchanged = unchanged
return ss , err
} )
}
//log.Info().Uint("inserts", stats.Inserts).Uint("updates", stats.Updates).Uint("no change", stats.Unchanged).Str("layer", layer.Name).Msg("Finished layer")
2025-12-03 16:27:35 +00:00
return stats , nil
2026-02-28 23:26:08 +00:00
* /
}
2026-05-01 17:28:33 +00:00
func ensureArcgisAccount ( ctx context . Context , txn bob . Tx , portal * response . Portal , user * models . User ) ( * model . Account , error ) {
account , err := queryarcgis . AccountFromID ( ctx , portal . User . OrgID )
2025-12-03 16:27:35 +00:00
if err != nil {
2026-02-28 23:26:08 +00:00
log . Warn ( ) . Err ( err ) . Msg ( "need arcgis account?" )
if err . Error ( ) == "sql: no rows in result set" {
2026-05-01 17:28:33 +00:00
setter := model . Account {
ID : portal . User . OrgID ,
Name : portal . Name ,
OrganizationID : user . OrganizationID ,
URLFeatures : nil ,
URLInsights : nil ,
URLGeometry : nil ,
URLNotebooks : nil ,
URLTiles : nil ,
2025-12-03 16:27:35 +00:00
}
2026-05-01 17:28:33 +00:00
account , err = queryarcgis . AccountInsert ( ctx , txn , & setter )
2026-02-28 23:26:08 +00:00
if err != nil {
return nil , fmt . Errorf ( "create arcgis account: %w" , err )
}
} else {
return nil , fmt . Errorf ( "find arcgis account: %w" , err )
}
2025-12-03 16:27:35 +00:00
}
2026-02-28 23:26:08 +00:00
return account , nil
2025-12-03 16:27:35 +00:00
}
2026-03-16 19:52:29 +00:00
func updateSummaryTables ( ctx context . Context , org * models . Organization ) {
updateSummaryMosquitoSource ( ctx , org )
updateSummaryServiceRequest ( ctx , org )
updateSummaryTrap ( ctx , org )
}
func aggregateAtResolution ( ctx context . Context , resolution int , org_id int32 , type_ enums . H3aggregationtype , cells [ ] h3 . Cell ) error {
var err error
log . Debug ( ) . Int ( "resolution" , resolution ) . Str ( "type" , string ( type_ ) ) . Msg ( "Working summary layer" )
cellToCount := make ( map [ h3 . Cell ] int , 0 )
for _ , cell := range cells {
scaled , err := cell . Parent ( resolution )
if err != nil {
log . Error ( ) . Err ( err ) . Int ( "resolution" , resolution ) . Msg ( "Failed to get cell's parent at resolution" )
continue
}
cellToCount [ scaled ] = cellToCount [ scaled ] + 1
}
_ , err = models . H3Aggregations . Delete (
dm . Where (
psql . And (
models . H3Aggregations . Columns . OrganizationID . EQ ( psql . Arg ( org_id ) ) ,
models . H3Aggregations . Columns . Resolution . EQ ( psql . Arg ( resolution ) ) ,
models . H3Aggregations . Columns . Type . EQ ( psql . Arg ( type_ ) ) ,
) ,
) ,
) . Exec ( ctx , db . PGInstance . BobDB )
if err != nil {
return fmt . Errorf ( "Failed to clear previous aggregation: %w" , err )
}
2026-05-01 20:49:37 +00:00
var to_insert = make ( [ ] bob . Mod [ * dialect . InsertQuery ] , 0 )
2026-03-16 19:52:29 +00:00
to_insert = append ( to_insert , im . Into ( "h3_aggregation" , "cell" , "resolution" , "count_" , "type_" , "organization_id" , "geometry" ) )
for cell , count := range cellToCount {
polygon , err := h3utils . CellToPostgisGeometry ( cell )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to get PostGIS geometry" )
continue
}
// log.Info().Str("polygon", polygon).Msg("Going to insert")
to_insert = append ( to_insert , im . Values ( psql . Arg ( cell . String ( ) , resolution , count , type_ , org_id ) , psql . F ( "st_geomfromtext" , psql . S ( polygon ) , 4326 ) ) )
}
to_insert = append ( to_insert , im . OnConflict ( "cell, organization_id, type_" ) . DoUpdate (
im . SetCol ( "count_" ) . To ( psql . Raw ( "EXCLUDED.count_" ) ) ,
) )
//log.Info().Str("sql", insertQueryToString(psql.Insert(to_insert...))).Msg("Updating...")
_ , err = psql . Insert ( to_insert ... ) . Exec ( ctx , db . PGInstance . BobDB )
if err != nil {
return fmt . Errorf ( "Failed to add h3 aggregation: %w" , err )
}
return nil
}
func updateSummaryMosquitoSource ( ctx context . Context , org * models . Organization ) {
point_locations , err := org . Pointlocations ( ) . All ( ctx , db . PGInstance . BobDB )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to get all point locations" )
return
}
if len ( point_locations ) == 0 {
log . Info ( ) . Int ( "org_id" , int ( org . ID ) ) . Msg ( "No updates to perform" )
return
}
cells := make ( [ ] h3 . Cell , 0 )
for _ , p := range point_locations {
if p . H3cell . IsNull ( ) {
continue
}
cell , err := h3utils . ToCell ( p . H3cell . MustGet ( ) )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to get geometry point" )
continue
}
cells = append ( cells , cell )
}
for i := range 16 {
err = aggregateAtResolution ( ctx , i , org . ID , enums . H3aggregationtypeMosquitosource , cells )
if err != nil {
log . Error ( ) . Err ( err ) . Int ( "resolution" , i ) . Msg ( "Failed to aggregate mosquito source" )
}
}
}
func updateSummaryServiceRequest ( ctx context . Context , org * models . Organization ) {
service_requests , err := org . Servicerequests ( ) . All ( ctx , db . PGInstance . BobDB )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to get all service requests" )
return
}
if len ( service_requests ) == 0 {
log . Info ( ) . Int ( "org_id" , int ( org . ID ) ) . Msg ( "No updates to perform" )
return
}
cells := make ( [ ] h3 . Cell , 0 )
for _ , p := range service_requests {
if p . H3cell . IsNull ( ) {
continue
}
cell , err := h3utils . ToCell ( p . H3cell . MustGet ( ) )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to get geometry point" )
continue
}
cells = append ( cells , cell )
}
for i := range 16 {
err = aggregateAtResolution ( ctx , i , org . ID , enums . H3aggregationtypeServicerequest , cells )
if err != nil {
log . Error ( ) . Err ( err ) . Int ( "resolution" , i ) . Msg ( "Failed to aggregate service request" )
}
}
}
func updateSummaryTrap ( ctx context . Context , org * models . Organization ) {
traps , err := org . Traplocations ( ) . All ( ctx , db . PGInstance . BobDB )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to get all trap locations" )
return
}
if len ( traps ) == 0 {
log . Info ( ) . Int ( "org_id" , int ( org . ID ) ) . Msg ( "No updates to perform" )
return
}
cells := make ( [ ] h3 . Cell , 0 )
for _ , t := range traps {
if t . H3cell . IsNull ( ) {
continue
}
cell , err := h3utils . ToCell ( t . H3cell . MustGet ( ) )
if err != nil {
log . Error ( ) . Err ( err ) . Msg ( "Failed to get geometry point" )
continue
}
cells = append ( cells , cell )
}
for i := range 16 {
err = aggregateAtResolution ( ctx , i , org . ID , enums . H3aggregationtypeTrap , cells )
if err != nil {
log . Error ( ) . Err ( err ) . Int ( "resolution" , i ) . Msg ( "Failed to aggregate trap" )
}
}
}