lint: remove unused code from platform/arcgis.go, platform/text, rmo

- Remove 7 unused functions from platform/arcgis.go (generateCodeChallenge,
  generateCodeVerifier, newTimestampedFilename, logResponseHeaders, saveResponse,
  saveOrUpdateDBRecords, rowmapViaQuery) plus orphaned stubs
- Delete platform/text/db.go (entirely unused)
- Remove insertTextLog from platform/text/send.go
- Delete rmo/image.go, rmo/mailer.go, rmo/scss.go, rmo/district.go, rmo/water.go
This commit is contained in:
Eli Ribble 2026-05-09 17:10:03 +00:00
parent 9ce5058e85
commit b9a68aab04
8 changed files with 0 additions and 566 deletions

View file

@ -1,20 +1,12 @@
package platform package platform
import ( import (
"bytes"
"context" "context"
"crypto/rand"
"crypto/sha256"
"encoding/base64"
"errors" "errors"
"fmt" "fmt"
"io"
"net/http"
"net/url" "net/url"
"os" "os"
"path/filepath" "path/filepath"
"sort"
"strconv"
"strings" "strings"
"sync" "sync"
"time" "time"
@ -205,21 +197,6 @@ func extractURLParts(urlString string) (string, []string, error) {
} }
// Helper function to generate code challenge from code verifier // Helper function to generate code challenge from code verifier
func generateCodeChallenge(codeVerifier string) string {
hash := sha256.Sum256([]byte(codeVerifier))
return base64.RawURLEncoding.EncodeToString(hash[:])
}
// Generate a random code verifier for PKCE
func generateCodeVerifier() string {
bytes := make([]byte, 64) // 64 bytes = 512 bits
_, err := rand.Read(bytes)
if err != nil {
return ""
}
return base64.RawURLEncoding.EncodeToString(bytes)
}
// Find out what we can about this user // Find out what we can about this user
func updateArcgisUserData(ctx context.Context, user *models.User, oauth *model.OAuthToken) { func updateArcgisUserData(ctx context.Context, user *models.User, oauth *model.OAuthToken) {
client, err := arcgis.NewArcGISAuth( client, err := arcgis.NewArcGISAuth(
@ -673,37 +650,6 @@ func markTokenFailed(ctx context.Context, oauth *model.OAuthToken) {
log.Info().Int("id", int(oauth.ID)).Msg("Marked oauth token invalid") log.Info().Int("id", int(oauth.ID)).Msg("Marked oauth token invalid")
} }
func newTimestampedFilename(prefix, suffix string) string {
timestamp := time.Now().Format("20060102_150405") // YYYYMMDD_HHMMSS format
return prefix + timestamp + suffix
}
func logResponseHeaders(resp *http.Response) {
if resp == nil {
log.Info().Msg("Response is nil")
return
}
log.Info().Str("status", resp.Status).Int("statusCode", resp.StatusCode).Msg("HTTP Response headers")
for name, values := range resp.Header {
log.Info().Str("name", name).Strs("values", values).Msg("Header")
}
}
func saveResponse(data []byte, filename string) {
dest, err := os.Create(filename)
if err != nil {
log.Error().Str("filename", filename).Str("err", err.Error()).Msg("Failed to create file")
return
}
_, err = io.Copy(dest, bytes.NewReader(data))
if err != nil {
log.Error().Str("filename", filename).Str("err", err.Error()).Msg("Failed to write")
return
}
log.Info().Str("filename", filename).Msg("Wrote response")
}
/* /*
func saveRawQuery(fssync fieldseeker.FieldSeeker, layer arcgis.LayerFeature, query *arcgis.Query, filename string) { func saveRawQuery(fssync fieldseeker.FieldSeeker, layer arcgis.LayerFeature, query *arcgis.Query, filename string) {
@ -728,118 +674,6 @@ func saveRawQuery(fssync fieldseeker.FieldSeeker, layer arcgis.LayerFeature, que
} }
*/ */
func saveOrUpdateDBRecords(ctx context.Context, table string, qr *response.QueryResult, org_id int32) (int, int, error) {
inserts, updates := 0, 0
sorted_columns := make([]string, 0, len(qr.Fields))
for _, f := range qr.Fields {
sorted_columns = append(sorted_columns, *f.Name)
}
sort.Strings(sorted_columns)
objectids := make([]int, 0)
for _, l := range qr.Features {
attr := l.Attributes["OBJECTID"]
attr_s := attr.String()
oid, err := strconv.Atoi(attr_s)
if err != nil {
log.Warn().Str("attr_s", attr_s).Msg("failed to convert")
continue
}
objectids = append(objectids, oid)
}
rows_by_objectid, err := rowmapViaQuery(ctx, table, sorted_columns, objectids)
if err != nil {
return inserts, updates, fmt.Errorf("Failed to get existing rows: %w", err)
}
// log.Println("Rows from query", len(rows_by_objectid))
for _, feature := range qr.Features {
attr := feature.Attributes["OBJECTID"]
attr_s := attr.String()
oid, err := strconv.Atoi(attr_s)
if err != nil {
log.Warn().Str("attr_s", attr_s).Msg("failed to convert")
continue
}
row := rows_by_objectid[oid]
// If we have no matching row we'll need to create it
if len(row) == 0 {
if err := insertRowFromFeature(ctx, table, sorted_columns, &feature, org_id); err != nil {
return inserts, updates, fmt.Errorf("Failed to insert row: %w", err)
}
inserts += 1
} else if hasUpdates(row, feature) {
if err := updateRowFromFeature(ctx, table, sorted_columns, &feature, org_id); err != nil {
return inserts, updates, fmt.Errorf("Failed to update row: %w", err)
}
updates += 1
}
}
return inserts, updates, nil
}
// Produces a map of OBJECTID to a 'row' which is in turn a map of column names to their values as strings
func rowmapViaQuery(ctx context.Context, table string, sorted_columns []string, objectids []int) (map[int]map[string]string, error) {
result := make(map[int]map[string]string)
query := selectAllFromQueryResult(table, sorted_columns)
args := pgx.NamedArgs{
"objectids": objectids,
}
rows, err := db.PGInstance.PGXPool.Query(ctx, query, args)
if err != nil {
return result, fmt.Errorf("Failed to query rows: %w", err)
}
defer rows.Close()
// +2 for geometry x and geometry x
columnNames := make([]string, len(sorted_columns)+2)
copy(columnNames, sorted_columns)
columnNames[len(sorted_columns)] = "geometry_x"
columnNames[len(sorted_columns)+1] = "geometry_y"
rowSlice, err := pgx.CollectRows(rows, func(row pgx.CollectableRow) (map[string]string, error) {
fieldDescriptions := row.FieldDescriptions()
values := make([]interface{}, len(fieldDescriptions))
valuePtrs := make([]interface{}, len(fieldDescriptions))
for i := range values {
valuePtrs[i] = &values[i]
}
if err := row.Scan(valuePtrs...); err != nil {
return nil, err
}
result := make(map[string]string)
for i, fd := range fieldDescriptions {
if values[i] != nil {
result[fd.Name] = fmt.Sprintf("%v", values[i])
//log.Printf("col %v type %T val %v", fd.Name, values[i], values[i])
} else {
result[fd.Name] = ""
}
}
return result, nil
})
if err != nil {
return result, fmt.Errorf("Failed to collect rows: %w", err)
}
for _, row := range rowSlice {
o := row["objectid"]
objectid, err := strconv.Atoi(o)
if err != nil {
return result, fmt.Errorf("Failed to parse objectid %s: %w", o, err)
}
result[objectid] = row
}
return result, nil
}
func insertRowFromFeature(ctx context.Context, table string, sorted_columns []string, feature *response.Feature, org_id int32) error { func insertRowFromFeature(ctx context.Context, table string, sorted_columns []string, feature *response.Feature, org_id int32) error {
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil) txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
if err != nil { if err != nil {

View file

@ -1,55 +0,0 @@
package text
import (
"crypto/sha256"
"database/sql"
"encoding/hex"
"fmt"
"sort"
"strings"
"github.com/Gleipnir-Technology/bob/types/pgtypes"
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
)
func convertToPGData(data map[string]string) pgtypes.HStore {
result := pgtypes.HStore{}
for k, v := range data {
result[k] = sql.Null[string]{V: v, Valid: true}
}
return result
}
func generatePublicId(t enums.CommsMessagetypeemail, m map[string]string) string {
if m == nil || len(m) == 0 {
// Return hash of empty string for empty maps
emptyHash := sha256.Sum256([]byte(""))
return hex.EncodeToString(emptyHash[:])
}
// Get and sort keys for deterministic ordering
keys := make([]string, 0, len(m))
for k := range m {
keys = append(keys, k)
}
sort.Strings(keys)
// Build a string with all key-value pairs
var sb strings.Builder
// Add type first
sb.WriteString(fmt.Sprintf("type:%s,", t))
for _, k := range keys {
sb.WriteString(k)
sb.WriteString(":") // Separator between key and value
sb.WriteString(m[k])
sb.WriteString(",") // Separator between pairs
}
// Compute SHA-256 hash
hasher := sha256.New()
hasher.Write([]byte(sb.String()))
hashBytes := hasher.Sum(nil)
// Convert to hex string and return
return hex.EncodeToString(hashBytes)
}

View file

@ -33,22 +33,6 @@ func ensureInitialText(ctx context.Context, txn bob.Executor, dst types.E164) er
} }
return sendInitialText(ctx, txn, dst) return sendInitialText(ctx, txn, dst)
} }
func insertTextLog(ctx context.Context, txn bob.Executor, destination types.E164, source types.E164, origin enums.CommsTextorigin, content string, is_welcome bool, is_visible_to_llm bool) (l *models.CommsTextLog, err error) {
l, err = models.CommsTextLogs.Insert(&models.CommsTextLogSetter{
//ID:
Content: omit.From(content),
Created: omit.From(time.Now()),
Destination: omit.From(destination.PhoneString()),
IsVisibleToLLM: omit.From(is_visible_to_llm),
IsWelcome: omit.From(is_welcome),
Origin: omit.From(origin),
Source: omit.From(source.PhoneString()),
TwilioSid: omitnull.FromPtr[string](nil),
TwilioStatus: omit.From(""),
}).One(ctx, txn)
return l, err
}
func resendInitialText(ctx context.Context, txn bob.Executor, dst types.E164) error { func resendInitialText(ctx context.Context, txn bob.Executor, dst types.E164) error {
phone, err := models.FindCommsPhone(ctx, txn, dst.PhoneString()) phone, err := models.FindCommsPhone(ctx, txn, dst.PhoneString())
if err != nil { if err != nil {

View file

@ -1,69 +0,0 @@
package rmo
import (
"net/http"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/db/models"
"github.com/Gleipnir-Technology/nidus-sync/html"
"github.com/gorilla/mux"
)
type ContentDistrict struct {
Name string
OfficePhone string
URLLogo string
URLRMO string
URLWebsite string
}
type ContentDistrictList struct {
Districts []ContentDistrict
URL ContentURL
}
func districtBySlug(r *http.Request) (*models.Organization, error) {
vars := mux.Vars(r)
slug := vars["slug"]
district, err := models.Organizations.Query(
models.SelectWhere.Organizations.Slug.EQ(slug),
).One(r.Context(), db.PGInstance.BobDB)
return district, err
}
func getDistrictList(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
rows, err := models.Organizations.Query(
models.SelectWhere.Organizations.ImportDistrictGid.IsNotNull(),
sm.OrderBy("name"),
).All(ctx, db.PGInstance.BobDB)
if err != nil {
respondError(w, "failed to query for districts", err, http.StatusInternalServerError)
return
}
districts := make([]ContentDistrict, 0)
for _, row := range rows {
districts = append(districts, *newContentDistrict(row))
}
html.RenderOrError(
w,
"rmo/district-list.html",
ContentDistrictList{
Districts: districts,
URL: makeContentURL(nil),
},
)
}
func newContentDistrict(d *models.Organization) *ContentDistrict {
if d == nil {
return nil
}
return &ContentDistrict{
Name: d.Name,
OfficePhone: "123-456-7890",
URLLogo: config.MakeURLNidus("/api/district/%s/logo", d.Slug.GetOr("unset")),
URLRMO: config.MakeURLReport("/district/%s", d.Slug.GetOr("unset")),
URLWebsite: d.Website.GetOr(""),
}
}

View file

@ -1,25 +0,0 @@
package rmo
import (
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
"github.com/google/uuid"
"github.com/gorilla/mux"
)
// ServeImageByUUID reads an image with the given UUID from disk and writes it to the HTTP response
func getImageByUUID(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
u := vars["uuid"]
if u == "" {
http.NotFound(w, r)
return
}
uid, err := uuid.Parse(u)
if err != nil {
http.Error(w, "Failed to parse uuid", http.StatusBadRequest)
return
}
file.ImageFileToWriter(file.CollectionPublicImage, uid, w)
}

View file

@ -1,155 +0,0 @@
package rmo
import (
"context"
"net/http"
"github.com/Gleipnir-Technology/bob"
"github.com/Gleipnir-Technology/bob/dialect/psql"
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
"github.com/Gleipnir-Technology/nidus-sync/config"
"github.com/Gleipnir-Technology/nidus-sync/db"
"github.com/Gleipnir-Technology/nidus-sync/html"
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
"github.com/gorilla/mux"
"github.com/rs/zerolog/log"
"github.com/stephenafamo/scan"
//"github.com/Gleipnir-Technology/nidus-sync/config"
)
type address struct {
Country string `db:"country"`
Locality string `db:"locality"`
LocationGeoJSON string `db:"location_geo_json"`
Number int32 `db:"number_"`
OrganizationSlug string `db:"slug"`
PostalCode string `db:"postal_code"`
Region string `db:"region"`
Street string `db:"street"`
}
type contentMailer struct {
Address address
PublicID string
URLLogo string
}
func getMailer(ctx context.Context, r *http.Request) (*html.Response[contentMailer], *nhttp.ErrorWithStatus) {
vars := mux.Vars(r)
public_id := vars["public_id"]
if public_id == "" {
return nil, nhttp.NewErrorStatus(http.StatusBadRequest, "No 'public_id' in the url params")
}
/*
compliance_request, err := models.ComplianceReportRequests.Query(
models.Preload.ComplianceReportRequest.Site(),
models.SelectWhere.ComplianceReportRequests.PublicID.EQ(public_id),
).One(ctx, db.PGInstance.BobDB)
if err != nil {
respondError(w, "failed to get compliance request", err, http.StatusBadRequest)
}
site := compliance_request.
*/
report, err := bob.One(ctx, db.PGInstance.BobDB, psql.Select(
sm.Columns(
"address.number_",
"address.street",
"address.locality",
"ST_AsGeoJSON(address.geom) AS location_geo_json",
"address.region",
"address.postal_code",
"address.country",
"organization.slug",
),
sm.From("compliance_report_request").As("crr"),
sm.InnerJoin("lead").OnEQ(psql.Quote("crr", "lead_id"), psql.Quote("lead", "id")),
sm.InnerJoin("site").OnEQ(psql.Quote("lead", "site_id"), psql.Quote("site", "id")),
sm.InnerJoin("organization").OnEQ(psql.Quote("lead", "organization_id"), psql.Quote("organization", "id")),
sm.InnerJoin("address").OnEQ(psql.Quote("site", "address_id"), psql.Quote("address", "id")),
sm.Where(psql.Quote("crr", "public_id").EQ(psql.Arg(public_id))),
), scan.StructMapper[address]())
if err != nil {
log.Warn().Err(err).Msg("failed to get compliance report")
return nil, nhttp.NewErrorStatus(http.StatusNotFound, "No compliance report with that public ID")
}
return html.NewResponse(
"rmo/mailer/root.html", contentMailer{
Address: report,
PublicID: public_id,
URLLogo: config.MakeURLNidus("/api/district/%s/logo", report.OrganizationSlug),
},
), nil
}
func getMailerConfirm(ctx context.Context, r *http.Request) (*html.Response[contentMailer], *nhttp.ErrorWithStatus) {
vars := mux.Vars(r)
public_id := vars["public_id"]
if public_id == "" {
return nil, nhttp.NewErrorStatus(http.StatusBadRequest, "No 'public_id' in the url params")
}
return html.NewResponse(
"rmo/mailer/confirm.html", contentMailer{
PublicID: public_id,
},
), nil
}
func getMailerContribute(ctx context.Context, r *http.Request) (*html.Response[contentMailer], *nhttp.ErrorWithStatus) {
vars := mux.Vars(r)
public_id := vars["public_id"]
if public_id == "" {
return nil, nhttp.NewErrorStatus(http.StatusBadRequest, "No 'public_id' in the url params")
}
return html.NewResponse(
"rmo/mailer/contribute.html", contentMailer{
PublicID: public_id,
},
), nil
}
func getMailerEvidence(ctx context.Context, r *http.Request) (*html.Response[contentMailer], *nhttp.ErrorWithStatus) {
vars := mux.Vars(r)
public_id := vars["public_id"]
if public_id == "" {
return nil, nhttp.NewErrorStatus(http.StatusBadRequest, "No 'public_id' in the url params")
}
return html.NewResponse(
"rmo/mailer/evidence.html", contentMailer{
PublicID: public_id,
},
), nil
}
func getMailerSchedule(ctx context.Context, r *http.Request) (*html.Response[contentMailer], *nhttp.ErrorWithStatus) {
vars := mux.Vars(r)
public_id := vars["public_id"]
if public_id == "" {
return nil, nhttp.NewErrorStatus(http.StatusBadRequest, "No 'public_id' in the url params")
}
return html.NewResponse(
"rmo/mailer/schedule.html", contentMailer{
PublicID: public_id,
},
), nil
}
func getMailerUpdate(ctx context.Context, r *http.Request) (*html.Response[contentMailer], *nhttp.ErrorWithStatus) {
vars := mux.Vars(r)
public_id := vars["public_id"]
if public_id == "" {
return nil, nhttp.NewErrorStatus(http.StatusBadRequest, "No 'public_id' in the url params")
}
return html.NewResponse(
"rmo/mailer/update.html", contentMailer{
PublicID: public_id,
},
), nil
}
type formMailerConfirm struct{}
func postMailerConfirm(ctx context.Context, r *http.Request, form formMailerConfirm) (string, *nhttp.ErrorWithStatus) {
log.Info().Msg("Fake confirm location")
vars := mux.Vars(r)
public_id := vars["public_id"]
if public_id == "" {
return "", nhttp.NewErrorStatus(http.StatusBadRequest, "No 'public_id' in the url params")
}
return config.MakeURLReport("/mailer/%s/evidence", public_id), nil
}

View file

@ -1,42 +0,0 @@
package rmo
import (
"fmt"
"io"
"net/http"
"os"
"github.com/Gleipnir-Technology/nidus-sync/lint"
"github.com/gorilla/mux"
"github.com/rs/zerolog/log"
)
func getScssDebug(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
path := vars["*"]
full_path := "scss/" + path
//log.Debug().Str("path", path).Str("full_path", full_path).Msg("working on SCSS debug")
file, err := os.Open(full_path)
if err != nil {
respondError(w, "failed to open file", err, http.StatusInternalServerError)
return
}
defer lint.LogOnErr(file.Close, "close scss file")
fileInfo, err := file.Stat()
if err != nil {
respondError(w, "failed to stat file", err, http.StatusInternalServerError)
return
}
// Set appropriate headers
w.Header().Set("Content-Type", "text/scss")
w.Header().Set("Content-Length", fmt.Sprintf("%d", fileInfo.Size()))
// Copy file contents to response writer
_, err = io.Copy(w, file)
if err != nil {
// Note: At this point, we've already started writing the response,
// so we can't change the status code anymore. The best we can do
// is log the error and abandon the connection.
log.Warn().Str("path", path).Msg("Failed to write scss file to output")
}
}

View file

@ -1,38 +0,0 @@
package rmo
import (
"net/http"
"github.com/Gleipnir-Technology/nidus-sync/html"
)
type ContentWater struct {
District *ContentDistrict
URL ContentURL
}
func getWater(w http.ResponseWriter, r *http.Request) {
html.RenderOrError(
w,
"rmo/water.html",
ContentWater{
District: nil,
URL: makeContentURL(nil),
},
)
}
func getWaterDistrict(w http.ResponseWriter, r *http.Request) {
district, err := districtBySlug(r)
if err != nil {
respondError(w, "Failed to lookup organization", err, http.StatusBadRequest)
return
}
html.RenderOrError(
w,
"rmo/water.html",
ContentWater{
District: newContentDistrict(district),
URL: makeContentURL(district),
},
)
}