Add basic average cadence calculation to treatments

This commit is contained in:
Eli Ribble 2025-11-21 05:46:31 +00:00
parent 3a1db11e47
commit d3b9d34bd2
No known key found for this signature in database
4 changed files with 172 additions and 21 deletions

140
html.go
View file

@ -9,6 +9,7 @@ import (
"html/template"
"io"
"log/slog"
"math"
"net/http"
"os"
"strconv"
@ -18,8 +19,7 @@ import (
"github.com/Gleipnir-Technology/nidus-sync/models"
"github.com/Gleipnir-Technology/nidus-sync/sql"
"github.com/aarondl/opt/null"
//"github.com/riverqueue/river/rivershared/util/slogutil"
//"github.com/rs/zerolog/log"
"github.com/rs/zerolog/log"
"github.com/stephenafamo/bob"
"github.com/stephenafamo/bob/dialect/psql"
"github.com/stephenafamo/bob/dialect/psql/sm"
@ -123,12 +123,13 @@ type ContentSignin struct {
}
type ContentSignup struct{}
type ContentSource struct {
Inspections []Inspection
MapData ComponentMap
Source *BreedingSourceDetail
Traps []TrapNearby
Treatments []Treatment
User User
Inspections []Inspection
MapData ComponentMap
Source *BreedingSourceDetail
Traps []TrapNearby
Treatments []Treatment
TreatmentCadence time.Duration
User User
}
type Inspection struct {
Action string
@ -146,12 +147,6 @@ type ServiceRequestSummary struct {
Location string
Status string
}
type Treatment struct {
Date time.Time
LocationID string
Notes string
Product string
}
type User struct {
DisplayName string
Initials string
@ -511,6 +506,15 @@ func htmlSource(w http.ResponseWriter, r *http.Request, user *models.User, id st
respondError(w, "Failed to get treatments", err, http.StatusInternalServerError)
return
}
treatment_times := make([]time.Time, 0)
for _, treatment := range treatments {
treatment_times = append(treatment_times, treatment.Date)
}
cadence, deltas := calculateCadenceVariance(treatment_times)
for i, treatment := range treatments {
treatment.CadenceDelta = deltas[i]
treatments[i] = treatment
}
data := ContentSource{
Inspections: inspections,
MapData: ComponentMap{
@ -524,10 +528,11 @@ func htmlSource(w http.ResponseWriter, r *http.Request, user *models.User, id st
},
Zoom: 13,
},
Source: s,
Traps: traps,
Treatments: treatments,
User: userContent,
Source: s,
Traps: traps,
Treatments: treatments,
TreatmentCadence: cadence,
User: userContent,
}
renderOrError(w, source, data)
@ -569,7 +574,9 @@ func makeFuncMap() template.FuncMap {
"bigNumber": bigNumber,
"GISStatement": gisStatement,
"latLngDisplay": latLngDisplay,
"timeDelta": timeDelta,
"timeElapsed": timeElapsed,
"timeInterval": timeInterval,
"timeSince": timeSince,
"uuidShort": uuidShort,
}
@ -625,7 +632,6 @@ func parseFromDisk(files []string) (*template.Template, error) {
for _, f := range components {
paths = append(paths, "templates/components/"+f+".html")
}
//slog.Info("Rendering templates from disk", slog.Any("paths", slogutil.SliceString(paths)))
templ, err := template.New(name).Funcs(funcMap).ParseFiles(paths...)
if err != nil {
return nil, fmt.Errorf("Failed to parse %s: %w", paths, err)
@ -633,6 +639,59 @@ func parseFromDisk(files []string) (*template.Template, error) {
return templ, nil
}
// FormatTimeDuration returns a human-readable string representing a time.Duration
// as "X units early" or "X units late"
func timeDelta(d time.Duration) string {
suffix := "late"
if d < 0 {
suffix = "early"
d = -d // Make duration positive for calculations
}
const (
day = 24 * time.Hour
week = 7 * day
)
log.Info().Int64("delta", int64(d)).Str("suffix", suffix).Msg("Time delta")
switch {
case d >= week:
weeks := d / week
if weeks == 1 {
return "1 week " + suffix
}
return fmt.Sprintf("%d weeks %s", weeks, suffix)
case d >= day:
days := d / day
if days == 1 {
return "1 day " + suffix
}
return fmt.Sprintf("%d days %s", days, suffix)
case d >= time.Hour:
hours := d / time.Hour
if hours == 1 {
return "1 hour " + suffix
}
return fmt.Sprintf("%d hours %s", hours, suffix)
case d >= time.Minute:
minutes := d / time.Minute
if minutes == 1 {
return "1 minute " + suffix
}
return fmt.Sprintf("%d minutes %s", minutes, suffix)
default:
seconds := d / time.Second
if seconds == 1 {
return "1 second " + suffix
}
return fmt.Sprintf("%d seconds %s", seconds, suffix)
}
}
func timeElapsed(seconds null.Val[float32]) string {
if !seconds.IsValue() {
return "none"
@ -651,6 +710,47 @@ func timeElapsed(seconds null.Val[float32]) string {
}
}
func timeInterval(d time.Duration) string {
seconds := d.Seconds()
// Less than 120 seconds -> show in seconds
if seconds < 120 {
return fmt.Sprintf("every %d seconds", int(math.Round(seconds)))
}
minutes := d.Minutes()
// Less than 120 minutes -> show in minutes
if minutes < 120 {
return fmt.Sprintf("every %d minutes", int(math.Round(minutes)))
}
hours := d.Hours()
// Less than 48 hours -> show in hours
if hours < 48 {
return fmt.Sprintf("every %d hours", int(math.Round(hours)))
}
days := hours / 24
// Less than 14 days -> show in days
if days < 14 {
return fmt.Sprintf("every %d days", int(math.Round(days)))
}
weeks := days / 7
// Less than 8 weeks -> show in weeks
if weeks < 8 {
return fmt.Sprintf("every %d weeks", int(math.Round(weeks)))
}
months := days / 30
// Less than 24 months -> show in months
if months < 24 {
return fmt.Sprintf("every %d months", int(math.Round(months)))
}
years := days / 365
return fmt.Sprintf("every %d years", int(math.Round(years)))
}
func timeSince(t *time.Time) string {
if t == nil {
return "never"
@ -721,7 +821,7 @@ func treatmentsBySource(ctx context.Context, org *models.Organization, sourceID
sm.Where(
models.FSTreatments.Columns.Pointlocid.EQ(psql.Arg(sourceID)),
),
sm.OrderBy("enddatetime"),
sm.OrderBy("enddatetime").Desc(),
).All(ctx, PGInstance.BobDB)
if err != nil {
return results, fmt.Errorf("Failed to query rows: %w", err)

View file

@ -154,6 +154,14 @@ type TrapData struct {
Comments string `json:"comments"`
}
type Treatment struct {
CadenceDelta time.Duration
Date time.Time
LocationID string
Notes string
Product string
}
func toTemplateTraps(locations []sql.TrapLocationBySourceIDRow, trap_data models.FSTrapdatumSlice, counts []sql.TrapCountByLocationIDRow) ([]TrapNearby, error) {
results := make([]TrapNearby, 0)
count_by_trap_data_id := make(map[string]*sql.TrapCountByLocationIDRow)

View file

@ -251,6 +251,7 @@
<div class="card mb-4">
<div class="card-body">
<div class="table-responsive">
<p>Estimated Treatment Cadence: {{.TreatmentCadence|timeInterval}}</p>
<table class="table table-striped table-hover">
<thead>
<tr>
@ -265,7 +266,7 @@
<tr>
<td>{{.Date|timeSince}}</td>
<td>{{.Product}}</td>
<td class="time-delta-neutral">On time</td>
<td class="time-delta-neutral">{{.CadenceDelta|timeDelta}}</td>
<td>{{.Notes}}</td>
</tr>
{{ end }}

42
time.go Normal file
View file

@ -0,0 +1,42 @@
package main
import (
"time"
"github.com/rs/zerolog/log"
)
// calculateCadenceVariance takes a slice of time.Time instances and returns
// the average time span between consecutive instances and how much each
// instance deviates from its expected position in an evenly-spaced sequence
// timestamps should be in descending order (most recent time in index 0)
func calculateCadenceVariance(timestamps []time.Time) (averageSpan time.Duration, deviations []time.Duration) {
if len(timestamps) <= 1 {
return 0, []time.Duration{}
}
// Calculate total span between first and last timestamp
totalSpan := timestamps[0].Sub(timestamps[len(timestamps)-1])
// Calculate average span
averageSpan = totalSpan / time.Duration(len(timestamps)-1)
if averageSpan < 0 {
log.Error().Int64("average", int64(averageSpan)).Msg("Negative average")
return 0, []time.Duration{}
}
// Calculate deviations
deviations = make([]time.Duration, len(timestamps))
// The first timestamp is our reference point (deviation = 0)
deviations[0] = 0
// Calculate expected timestamps based on perfect intervals
for i := 1; i < len(timestamps); i++ {
expectedTime := timestamps[0].Add(-averageSpan * time.Duration(i))
deviations[i] = timestamps[i].Sub(expectedTime)
}
log.Info().Int64("average", int64(averageSpan)).Int("number deviations", len(deviations)).Msg("Calculated cadence")
return averageSpan, deviations
}