nidus-sync/db/models/fieldseeker.samplecollection.bob.go
Eli Ribble 06140a9062
Remove bob submodule, add arcgis.user
I had to remove the submodule because of the go bug at
https://github.com/golang/go/issues/77196
I found the bug because of a bug in bob itself
https://github.com/stephenafamo/bob/issues/610
This was because I'm trying to save data about the Arcgis user for use
in determining if I can set up hooks to avoid the polling for data
changes.
2026-01-15 19:20:39 +00:00

1863 lines
59 KiB
Go

// Code generated by BobGen psql v0.42.1. DO NOT EDIT.
// This file is meant to be re-generated in place and/or deleted at any time.
package models
import (
"context"
"encoding/json"
"fmt"
"io"
"time"
"github.com/aarondl/opt/null"
"github.com/aarondl/opt/omit"
"github.com/aarondl/opt/omitnull"
"github.com/google/uuid"
"github.com/stephenafamo/bob"
"github.com/stephenafamo/bob/dialect/psql"
"github.com/stephenafamo/bob/dialect/psql/dialect"
"github.com/stephenafamo/bob/dialect/psql/dm"
"github.com/stephenafamo/bob/dialect/psql/sm"
"github.com/stephenafamo/bob/dialect/psql/um"
"github.com/stephenafamo/bob/expr"
"github.com/stephenafamo/bob/mods"
"github.com/stephenafamo/bob/orm"
"github.com/stephenafamo/bob/types"
"github.com/stephenafamo/bob/types/pgtypes"
)
// FieldseekerSamplecollection is an object representing the database table.
type FieldseekerSamplecollection struct {
Objectid int64 `db:"objectid" `
// Original attribute from ArcGIS API is LOC_ID
LocID null.Val[uuid.UUID] `db:"loc_id" `
// Original attribute from ArcGIS API is STARTDATETIME
Startdatetime null.Val[time.Time] `db:"startdatetime" `
// Original attribute from ArcGIS API is ENDDATETIME
Enddatetime null.Val[time.Time] `db:"enddatetime" `
// Original attribute from ArcGIS API is SITECOND
Sitecond null.Val[string] `db:"sitecond" `
// Original attribute from ArcGIS API is SAMPLEID
Sampleid null.Val[string] `db:"sampleid" `
// Original attribute from ArcGIS API is SURVTECH
Survtech null.Val[string] `db:"survtech" `
// Original attribute from ArcGIS API is DATESENT
Datesent null.Val[time.Time] `db:"datesent" `
// Original attribute from ArcGIS API is DATETESTED
Datetested null.Val[time.Time] `db:"datetested" `
// Original attribute from ArcGIS API is TESTTECH
Testtech null.Val[string] `db:"testtech" `
// Original attribute from ArcGIS API is COMMENTS
Comments null.Val[string] `db:"comments" `
// Original attribute from ArcGIS API is PROCESSED
Processed null.Val[int16] `db:"processed" `
// Original attribute from ArcGIS API is SAMPLETYPE
Sampletype null.Val[string] `db:"sampletype" `
// Original attribute from ArcGIS API is SAMPLECOND
Samplecond null.Val[string] `db:"samplecond" `
// Original attribute from ArcGIS API is SPECIES
Species null.Val[string] `db:"species" `
// Original attribute from ArcGIS API is SEX
Sex null.Val[string] `db:"sex" `
// Original attribute from ArcGIS API is AVETEMP
Avetemp null.Val[float64] `db:"avetemp" `
// Original attribute from ArcGIS API is WINDSPEED
Windspeed null.Val[float64] `db:"windspeed" `
// Original attribute from ArcGIS API is WINDDIR
Winddir null.Val[string] `db:"winddir" `
// Original attribute from ArcGIS API is RAINGAUGE
Raingauge null.Val[float64] `db:"raingauge" `
// Original attribute from ArcGIS API is ACTIVITY
Activity null.Val[string] `db:"activity" `
// Original attribute from ArcGIS API is TESTMETHOD
Testmethod null.Val[string] `db:"testmethod" `
// Original attribute from ArcGIS API is DISEASETESTED
Diseasetested null.Val[string] `db:"diseasetested" `
// Original attribute from ArcGIS API is DISEASEPOS
Diseasepos null.Val[string] `db:"diseasepos" `
// Original attribute from ArcGIS API is REVIEWED
Reviewed null.Val[int16] `db:"reviewed" `
// Original attribute from ArcGIS API is REVIEWEDBY
Reviewedby null.Val[string] `db:"reviewedby" `
// Original attribute from ArcGIS API is REVIEWEDDATE
Revieweddate null.Val[time.Time] `db:"revieweddate" `
// Original attribute from ArcGIS API is LOCATIONNAME
Locationname null.Val[string] `db:"locationname" `
// Original attribute from ArcGIS API is ZONE
Zone null.Val[string] `db:"zone" `
// Original attribute from ArcGIS API is RECORDSTATUS
Recordstatus null.Val[int16] `db:"recordstatus" `
// Original attribute from ArcGIS API is ZONE2
Zone2 null.Val[string] `db:"zone2" `
// Original attribute from ArcGIS API is GlobalID
Globalid uuid.UUID `db:"globalid,pk" `
// Original attribute from ArcGIS API is created_user
CreatedUser null.Val[string] `db:"created_user" `
// Original attribute from ArcGIS API is created_date
CreatedDate null.Val[time.Time] `db:"created_date" `
// Original attribute from ArcGIS API is last_edited_user
LastEditedUser null.Val[string] `db:"last_edited_user" `
// Original attribute from ArcGIS API is last_edited_date
LastEditedDate null.Val[time.Time] `db:"last_edited_date" `
// Original attribute from ArcGIS API is LAB
Lab null.Val[string] `db:"lab" `
// Original attribute from ArcGIS API is FIELDTECH
Fieldtech null.Val[string] `db:"fieldtech" `
// Original attribute from ArcGIS API is FLOCKID
Flockid null.Val[uuid.UUID] `db:"flockid" `
// Original attribute from ArcGIS API is SAMPLECOUNT
Samplecount null.Val[int16] `db:"samplecount" `
// Original attribute from ArcGIS API is CHICKENID
Chickenid null.Val[uuid.UUID] `db:"chickenid" `
// Original attribute from ArcGIS API is GATEWAYSYNC
Gatewaysync null.Val[int16] `db:"gatewaysync" `
// Original attribute from ArcGIS API is CreationDate
Creationdate null.Val[time.Time] `db:"creationdate" `
// Original attribute from ArcGIS API is Creator
Creator null.Val[string] `db:"creator" `
// Original attribute from ArcGIS API is EditDate
Editdate null.Val[time.Time] `db:"editdate" `
// Original attribute from ArcGIS API is Editor
Editor null.Val[string] `db:"editor" `
Geometry types.JSON[json.RawMessage] `db:"geometry" `
Geospatial null.Val[string] `db:"geospatial" `
Version int32 `db:"version,pk" `
OrganizationID int32 `db:"organization_id" `
R fieldseekerSamplecollectionR `db:"-" `
}
// FieldseekerSamplecollectionSlice is an alias for a slice of pointers to FieldseekerSamplecollection.
// This should almost always be used instead of []*FieldseekerSamplecollection.
type FieldseekerSamplecollectionSlice []*FieldseekerSamplecollection
// FieldseekerSamplecollections contains methods to work with the samplecollection table
var FieldseekerSamplecollections = psql.NewTablex[*FieldseekerSamplecollection, FieldseekerSamplecollectionSlice, *FieldseekerSamplecollectionSetter]("fieldseeker", "samplecollection", buildFieldseekerSamplecollectionColumns("fieldseeker.samplecollection"))
// FieldseekerSamplecollectionsQuery is a query on the samplecollection table
type FieldseekerSamplecollectionsQuery = *psql.ViewQuery[*FieldseekerSamplecollection, FieldseekerSamplecollectionSlice]
// fieldseekerSamplecollectionR is where relationships are stored.
type fieldseekerSamplecollectionR struct {
Organization *Organization // fieldseeker.samplecollection.samplecollection_organization_id_fkey
}
func buildFieldseekerSamplecollectionColumns(alias string) fieldseekerSamplecollectionColumns {
return fieldseekerSamplecollectionColumns{
ColumnsExpr: expr.NewColumnsExpr(
"objectid", "loc_id", "startdatetime", "enddatetime", "sitecond", "sampleid", "survtech", "datesent", "datetested", "testtech", "comments", "processed", "sampletype", "samplecond", "species", "sex", "avetemp", "windspeed", "winddir", "raingauge", "activity", "testmethod", "diseasetested", "diseasepos", "reviewed", "reviewedby", "revieweddate", "locationname", "zone", "recordstatus", "zone2", "globalid", "created_user", "created_date", "last_edited_user", "last_edited_date", "lab", "fieldtech", "flockid", "samplecount", "chickenid", "gatewaysync", "creationdate", "creator", "editdate", "editor", "geometry", "geospatial", "version", "organization_id",
).WithParent("fieldseeker.samplecollection"),
tableAlias: alias,
Objectid: psql.Quote(alias, "objectid"),
LocID: psql.Quote(alias, "loc_id"),
Startdatetime: psql.Quote(alias, "startdatetime"),
Enddatetime: psql.Quote(alias, "enddatetime"),
Sitecond: psql.Quote(alias, "sitecond"),
Sampleid: psql.Quote(alias, "sampleid"),
Survtech: psql.Quote(alias, "survtech"),
Datesent: psql.Quote(alias, "datesent"),
Datetested: psql.Quote(alias, "datetested"),
Testtech: psql.Quote(alias, "testtech"),
Comments: psql.Quote(alias, "comments"),
Processed: psql.Quote(alias, "processed"),
Sampletype: psql.Quote(alias, "sampletype"),
Samplecond: psql.Quote(alias, "samplecond"),
Species: psql.Quote(alias, "species"),
Sex: psql.Quote(alias, "sex"),
Avetemp: psql.Quote(alias, "avetemp"),
Windspeed: psql.Quote(alias, "windspeed"),
Winddir: psql.Quote(alias, "winddir"),
Raingauge: psql.Quote(alias, "raingauge"),
Activity: psql.Quote(alias, "activity"),
Testmethod: psql.Quote(alias, "testmethod"),
Diseasetested: psql.Quote(alias, "diseasetested"),
Diseasepos: psql.Quote(alias, "diseasepos"),
Reviewed: psql.Quote(alias, "reviewed"),
Reviewedby: psql.Quote(alias, "reviewedby"),
Revieweddate: psql.Quote(alias, "revieweddate"),
Locationname: psql.Quote(alias, "locationname"),
Zone: psql.Quote(alias, "zone"),
Recordstatus: psql.Quote(alias, "recordstatus"),
Zone2: psql.Quote(alias, "zone2"),
Globalid: psql.Quote(alias, "globalid"),
CreatedUser: psql.Quote(alias, "created_user"),
CreatedDate: psql.Quote(alias, "created_date"),
LastEditedUser: psql.Quote(alias, "last_edited_user"),
LastEditedDate: psql.Quote(alias, "last_edited_date"),
Lab: psql.Quote(alias, "lab"),
Fieldtech: psql.Quote(alias, "fieldtech"),
Flockid: psql.Quote(alias, "flockid"),
Samplecount: psql.Quote(alias, "samplecount"),
Chickenid: psql.Quote(alias, "chickenid"),
Gatewaysync: psql.Quote(alias, "gatewaysync"),
Creationdate: psql.Quote(alias, "creationdate"),
Creator: psql.Quote(alias, "creator"),
Editdate: psql.Quote(alias, "editdate"),
Editor: psql.Quote(alias, "editor"),
Geometry: psql.Quote(alias, "geometry"),
Geospatial: psql.Quote(alias, "geospatial"),
Version: psql.Quote(alias, "version"),
OrganizationID: psql.Quote(alias, "organization_id"),
}
}
type fieldseekerSamplecollectionColumns struct {
expr.ColumnsExpr
tableAlias string
Objectid psql.Expression
LocID psql.Expression
Startdatetime psql.Expression
Enddatetime psql.Expression
Sitecond psql.Expression
Sampleid psql.Expression
Survtech psql.Expression
Datesent psql.Expression
Datetested psql.Expression
Testtech psql.Expression
Comments psql.Expression
Processed psql.Expression
Sampletype psql.Expression
Samplecond psql.Expression
Species psql.Expression
Sex psql.Expression
Avetemp psql.Expression
Windspeed psql.Expression
Winddir psql.Expression
Raingauge psql.Expression
Activity psql.Expression
Testmethod psql.Expression
Diseasetested psql.Expression
Diseasepos psql.Expression
Reviewed psql.Expression
Reviewedby psql.Expression
Revieweddate psql.Expression
Locationname psql.Expression
Zone psql.Expression
Recordstatus psql.Expression
Zone2 psql.Expression
Globalid psql.Expression
CreatedUser psql.Expression
CreatedDate psql.Expression
LastEditedUser psql.Expression
LastEditedDate psql.Expression
Lab psql.Expression
Fieldtech psql.Expression
Flockid psql.Expression
Samplecount psql.Expression
Chickenid psql.Expression
Gatewaysync psql.Expression
Creationdate psql.Expression
Creator psql.Expression
Editdate psql.Expression
Editor psql.Expression
Geometry psql.Expression
Geospatial psql.Expression
Version psql.Expression
OrganizationID psql.Expression
}
func (c fieldseekerSamplecollectionColumns) Alias() string {
return c.tableAlias
}
func (fieldseekerSamplecollectionColumns) AliasedAs(alias string) fieldseekerSamplecollectionColumns {
return buildFieldseekerSamplecollectionColumns(alias)
}
// FieldseekerSamplecollectionSetter is used for insert/upsert/update operations
// All values are optional, and do not have to be set
// Generated columns are not included
type FieldseekerSamplecollectionSetter struct {
Objectid omit.Val[int64] `db:"objectid" `
LocID omitnull.Val[uuid.UUID] `db:"loc_id" `
Startdatetime omitnull.Val[time.Time] `db:"startdatetime" `
Enddatetime omitnull.Val[time.Time] `db:"enddatetime" `
Sitecond omitnull.Val[string] `db:"sitecond" `
Sampleid omitnull.Val[string] `db:"sampleid" `
Survtech omitnull.Val[string] `db:"survtech" `
Datesent omitnull.Val[time.Time] `db:"datesent" `
Datetested omitnull.Val[time.Time] `db:"datetested" `
Testtech omitnull.Val[string] `db:"testtech" `
Comments omitnull.Val[string] `db:"comments" `
Processed omitnull.Val[int16] `db:"processed" `
Sampletype omitnull.Val[string] `db:"sampletype" `
Samplecond omitnull.Val[string] `db:"samplecond" `
Species omitnull.Val[string] `db:"species" `
Sex omitnull.Val[string] `db:"sex" `
Avetemp omitnull.Val[float64] `db:"avetemp" `
Windspeed omitnull.Val[float64] `db:"windspeed" `
Winddir omitnull.Val[string] `db:"winddir" `
Raingauge omitnull.Val[float64] `db:"raingauge" `
Activity omitnull.Val[string] `db:"activity" `
Testmethod omitnull.Val[string] `db:"testmethod" `
Diseasetested omitnull.Val[string] `db:"diseasetested" `
Diseasepos omitnull.Val[string] `db:"diseasepos" `
Reviewed omitnull.Val[int16] `db:"reviewed" `
Reviewedby omitnull.Val[string] `db:"reviewedby" `
Revieweddate omitnull.Val[time.Time] `db:"revieweddate" `
Locationname omitnull.Val[string] `db:"locationname" `
Zone omitnull.Val[string] `db:"zone" `
Recordstatus omitnull.Val[int16] `db:"recordstatus" `
Zone2 omitnull.Val[string] `db:"zone2" `
Globalid omit.Val[uuid.UUID] `db:"globalid,pk" `
CreatedUser omitnull.Val[string] `db:"created_user" `
CreatedDate omitnull.Val[time.Time] `db:"created_date" `
LastEditedUser omitnull.Val[string] `db:"last_edited_user" `
LastEditedDate omitnull.Val[time.Time] `db:"last_edited_date" `
Lab omitnull.Val[string] `db:"lab" `
Fieldtech omitnull.Val[string] `db:"fieldtech" `
Flockid omitnull.Val[uuid.UUID] `db:"flockid" `
Samplecount omitnull.Val[int16] `db:"samplecount" `
Chickenid omitnull.Val[uuid.UUID] `db:"chickenid" `
Gatewaysync omitnull.Val[int16] `db:"gatewaysync" `
Creationdate omitnull.Val[time.Time] `db:"creationdate" `
Creator omitnull.Val[string] `db:"creator" `
Editdate omitnull.Val[time.Time] `db:"editdate" `
Editor omitnull.Val[string] `db:"editor" `
Geometry omit.Val[types.JSON[json.RawMessage]] `db:"geometry" `
Geospatial omitnull.Val[string] `db:"geospatial" `
Version omit.Val[int32] `db:"version,pk" `
OrganizationID omit.Val[int32] `db:"organization_id" `
}
func (s FieldseekerSamplecollectionSetter) SetColumns() []string {
vals := make([]string, 0, 50)
if s.Objectid.IsValue() {
vals = append(vals, "objectid")
}
if !s.LocID.IsUnset() {
vals = append(vals, "loc_id")
}
if !s.Startdatetime.IsUnset() {
vals = append(vals, "startdatetime")
}
if !s.Enddatetime.IsUnset() {
vals = append(vals, "enddatetime")
}
if !s.Sitecond.IsUnset() {
vals = append(vals, "sitecond")
}
if !s.Sampleid.IsUnset() {
vals = append(vals, "sampleid")
}
if !s.Survtech.IsUnset() {
vals = append(vals, "survtech")
}
if !s.Datesent.IsUnset() {
vals = append(vals, "datesent")
}
if !s.Datetested.IsUnset() {
vals = append(vals, "datetested")
}
if !s.Testtech.IsUnset() {
vals = append(vals, "testtech")
}
if !s.Comments.IsUnset() {
vals = append(vals, "comments")
}
if !s.Processed.IsUnset() {
vals = append(vals, "processed")
}
if !s.Sampletype.IsUnset() {
vals = append(vals, "sampletype")
}
if !s.Samplecond.IsUnset() {
vals = append(vals, "samplecond")
}
if !s.Species.IsUnset() {
vals = append(vals, "species")
}
if !s.Sex.IsUnset() {
vals = append(vals, "sex")
}
if !s.Avetemp.IsUnset() {
vals = append(vals, "avetemp")
}
if !s.Windspeed.IsUnset() {
vals = append(vals, "windspeed")
}
if !s.Winddir.IsUnset() {
vals = append(vals, "winddir")
}
if !s.Raingauge.IsUnset() {
vals = append(vals, "raingauge")
}
if !s.Activity.IsUnset() {
vals = append(vals, "activity")
}
if !s.Testmethod.IsUnset() {
vals = append(vals, "testmethod")
}
if !s.Diseasetested.IsUnset() {
vals = append(vals, "diseasetested")
}
if !s.Diseasepos.IsUnset() {
vals = append(vals, "diseasepos")
}
if !s.Reviewed.IsUnset() {
vals = append(vals, "reviewed")
}
if !s.Reviewedby.IsUnset() {
vals = append(vals, "reviewedby")
}
if !s.Revieweddate.IsUnset() {
vals = append(vals, "revieweddate")
}
if !s.Locationname.IsUnset() {
vals = append(vals, "locationname")
}
if !s.Zone.IsUnset() {
vals = append(vals, "zone")
}
if !s.Recordstatus.IsUnset() {
vals = append(vals, "recordstatus")
}
if !s.Zone2.IsUnset() {
vals = append(vals, "zone2")
}
if s.Globalid.IsValue() {
vals = append(vals, "globalid")
}
if !s.CreatedUser.IsUnset() {
vals = append(vals, "created_user")
}
if !s.CreatedDate.IsUnset() {
vals = append(vals, "created_date")
}
if !s.LastEditedUser.IsUnset() {
vals = append(vals, "last_edited_user")
}
if !s.LastEditedDate.IsUnset() {
vals = append(vals, "last_edited_date")
}
if !s.Lab.IsUnset() {
vals = append(vals, "lab")
}
if !s.Fieldtech.IsUnset() {
vals = append(vals, "fieldtech")
}
if !s.Flockid.IsUnset() {
vals = append(vals, "flockid")
}
if !s.Samplecount.IsUnset() {
vals = append(vals, "samplecount")
}
if !s.Chickenid.IsUnset() {
vals = append(vals, "chickenid")
}
if !s.Gatewaysync.IsUnset() {
vals = append(vals, "gatewaysync")
}
if !s.Creationdate.IsUnset() {
vals = append(vals, "creationdate")
}
if !s.Creator.IsUnset() {
vals = append(vals, "creator")
}
if !s.Editdate.IsUnset() {
vals = append(vals, "editdate")
}
if !s.Editor.IsUnset() {
vals = append(vals, "editor")
}
if s.Geometry.IsValue() {
vals = append(vals, "geometry")
}
if !s.Geospatial.IsUnset() {
vals = append(vals, "geospatial")
}
if s.Version.IsValue() {
vals = append(vals, "version")
}
if s.OrganizationID.IsValue() {
vals = append(vals, "organization_id")
}
return vals
}
func (s FieldseekerSamplecollectionSetter) Overwrite(t *FieldseekerSamplecollection) {
if s.Objectid.IsValue() {
t.Objectid = s.Objectid.MustGet()
}
if !s.LocID.IsUnset() {
t.LocID = s.LocID.MustGetNull()
}
if !s.Startdatetime.IsUnset() {
t.Startdatetime = s.Startdatetime.MustGetNull()
}
if !s.Enddatetime.IsUnset() {
t.Enddatetime = s.Enddatetime.MustGetNull()
}
if !s.Sitecond.IsUnset() {
t.Sitecond = s.Sitecond.MustGetNull()
}
if !s.Sampleid.IsUnset() {
t.Sampleid = s.Sampleid.MustGetNull()
}
if !s.Survtech.IsUnset() {
t.Survtech = s.Survtech.MustGetNull()
}
if !s.Datesent.IsUnset() {
t.Datesent = s.Datesent.MustGetNull()
}
if !s.Datetested.IsUnset() {
t.Datetested = s.Datetested.MustGetNull()
}
if !s.Testtech.IsUnset() {
t.Testtech = s.Testtech.MustGetNull()
}
if !s.Comments.IsUnset() {
t.Comments = s.Comments.MustGetNull()
}
if !s.Processed.IsUnset() {
t.Processed = s.Processed.MustGetNull()
}
if !s.Sampletype.IsUnset() {
t.Sampletype = s.Sampletype.MustGetNull()
}
if !s.Samplecond.IsUnset() {
t.Samplecond = s.Samplecond.MustGetNull()
}
if !s.Species.IsUnset() {
t.Species = s.Species.MustGetNull()
}
if !s.Sex.IsUnset() {
t.Sex = s.Sex.MustGetNull()
}
if !s.Avetemp.IsUnset() {
t.Avetemp = s.Avetemp.MustGetNull()
}
if !s.Windspeed.IsUnset() {
t.Windspeed = s.Windspeed.MustGetNull()
}
if !s.Winddir.IsUnset() {
t.Winddir = s.Winddir.MustGetNull()
}
if !s.Raingauge.IsUnset() {
t.Raingauge = s.Raingauge.MustGetNull()
}
if !s.Activity.IsUnset() {
t.Activity = s.Activity.MustGetNull()
}
if !s.Testmethod.IsUnset() {
t.Testmethod = s.Testmethod.MustGetNull()
}
if !s.Diseasetested.IsUnset() {
t.Diseasetested = s.Diseasetested.MustGetNull()
}
if !s.Diseasepos.IsUnset() {
t.Diseasepos = s.Diseasepos.MustGetNull()
}
if !s.Reviewed.IsUnset() {
t.Reviewed = s.Reviewed.MustGetNull()
}
if !s.Reviewedby.IsUnset() {
t.Reviewedby = s.Reviewedby.MustGetNull()
}
if !s.Revieweddate.IsUnset() {
t.Revieweddate = s.Revieweddate.MustGetNull()
}
if !s.Locationname.IsUnset() {
t.Locationname = s.Locationname.MustGetNull()
}
if !s.Zone.IsUnset() {
t.Zone = s.Zone.MustGetNull()
}
if !s.Recordstatus.IsUnset() {
t.Recordstatus = s.Recordstatus.MustGetNull()
}
if !s.Zone2.IsUnset() {
t.Zone2 = s.Zone2.MustGetNull()
}
if s.Globalid.IsValue() {
t.Globalid = s.Globalid.MustGet()
}
if !s.CreatedUser.IsUnset() {
t.CreatedUser = s.CreatedUser.MustGetNull()
}
if !s.CreatedDate.IsUnset() {
t.CreatedDate = s.CreatedDate.MustGetNull()
}
if !s.LastEditedUser.IsUnset() {
t.LastEditedUser = s.LastEditedUser.MustGetNull()
}
if !s.LastEditedDate.IsUnset() {
t.LastEditedDate = s.LastEditedDate.MustGetNull()
}
if !s.Lab.IsUnset() {
t.Lab = s.Lab.MustGetNull()
}
if !s.Fieldtech.IsUnset() {
t.Fieldtech = s.Fieldtech.MustGetNull()
}
if !s.Flockid.IsUnset() {
t.Flockid = s.Flockid.MustGetNull()
}
if !s.Samplecount.IsUnset() {
t.Samplecount = s.Samplecount.MustGetNull()
}
if !s.Chickenid.IsUnset() {
t.Chickenid = s.Chickenid.MustGetNull()
}
if !s.Gatewaysync.IsUnset() {
t.Gatewaysync = s.Gatewaysync.MustGetNull()
}
if !s.Creationdate.IsUnset() {
t.Creationdate = s.Creationdate.MustGetNull()
}
if !s.Creator.IsUnset() {
t.Creator = s.Creator.MustGetNull()
}
if !s.Editdate.IsUnset() {
t.Editdate = s.Editdate.MustGetNull()
}
if !s.Editor.IsUnset() {
t.Editor = s.Editor.MustGetNull()
}
if s.Geometry.IsValue() {
t.Geometry = s.Geometry.MustGet()
}
if !s.Geospatial.IsUnset() {
t.Geospatial = s.Geospatial.MustGetNull()
}
if s.Version.IsValue() {
t.Version = s.Version.MustGet()
}
if s.OrganizationID.IsValue() {
t.OrganizationID = s.OrganizationID.MustGet()
}
}
func (s *FieldseekerSamplecollectionSetter) Apply(q *dialect.InsertQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FieldseekerSamplecollections.BeforeInsertHooks.RunHooks(ctx, exec, s)
})
q.AppendValues(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
vals := make([]bob.Expression, 50)
if s.Objectid.IsValue() {
vals[0] = psql.Arg(s.Objectid.MustGet())
} else {
vals[0] = psql.Raw("DEFAULT")
}
if !s.LocID.IsUnset() {
vals[1] = psql.Arg(s.LocID.MustGetNull())
} else {
vals[1] = psql.Raw("DEFAULT")
}
if !s.Startdatetime.IsUnset() {
vals[2] = psql.Arg(s.Startdatetime.MustGetNull())
} else {
vals[2] = psql.Raw("DEFAULT")
}
if !s.Enddatetime.IsUnset() {
vals[3] = psql.Arg(s.Enddatetime.MustGetNull())
} else {
vals[3] = psql.Raw("DEFAULT")
}
if !s.Sitecond.IsUnset() {
vals[4] = psql.Arg(s.Sitecond.MustGetNull())
} else {
vals[4] = psql.Raw("DEFAULT")
}
if !s.Sampleid.IsUnset() {
vals[5] = psql.Arg(s.Sampleid.MustGetNull())
} else {
vals[5] = psql.Raw("DEFAULT")
}
if !s.Survtech.IsUnset() {
vals[6] = psql.Arg(s.Survtech.MustGetNull())
} else {
vals[6] = psql.Raw("DEFAULT")
}
if !s.Datesent.IsUnset() {
vals[7] = psql.Arg(s.Datesent.MustGetNull())
} else {
vals[7] = psql.Raw("DEFAULT")
}
if !s.Datetested.IsUnset() {
vals[8] = psql.Arg(s.Datetested.MustGetNull())
} else {
vals[8] = psql.Raw("DEFAULT")
}
if !s.Testtech.IsUnset() {
vals[9] = psql.Arg(s.Testtech.MustGetNull())
} else {
vals[9] = psql.Raw("DEFAULT")
}
if !s.Comments.IsUnset() {
vals[10] = psql.Arg(s.Comments.MustGetNull())
} else {
vals[10] = psql.Raw("DEFAULT")
}
if !s.Processed.IsUnset() {
vals[11] = psql.Arg(s.Processed.MustGetNull())
} else {
vals[11] = psql.Raw("DEFAULT")
}
if !s.Sampletype.IsUnset() {
vals[12] = psql.Arg(s.Sampletype.MustGetNull())
} else {
vals[12] = psql.Raw("DEFAULT")
}
if !s.Samplecond.IsUnset() {
vals[13] = psql.Arg(s.Samplecond.MustGetNull())
} else {
vals[13] = psql.Raw("DEFAULT")
}
if !s.Species.IsUnset() {
vals[14] = psql.Arg(s.Species.MustGetNull())
} else {
vals[14] = psql.Raw("DEFAULT")
}
if !s.Sex.IsUnset() {
vals[15] = psql.Arg(s.Sex.MustGetNull())
} else {
vals[15] = psql.Raw("DEFAULT")
}
if !s.Avetemp.IsUnset() {
vals[16] = psql.Arg(s.Avetemp.MustGetNull())
} else {
vals[16] = psql.Raw("DEFAULT")
}
if !s.Windspeed.IsUnset() {
vals[17] = psql.Arg(s.Windspeed.MustGetNull())
} else {
vals[17] = psql.Raw("DEFAULT")
}
if !s.Winddir.IsUnset() {
vals[18] = psql.Arg(s.Winddir.MustGetNull())
} else {
vals[18] = psql.Raw("DEFAULT")
}
if !s.Raingauge.IsUnset() {
vals[19] = psql.Arg(s.Raingauge.MustGetNull())
} else {
vals[19] = psql.Raw("DEFAULT")
}
if !s.Activity.IsUnset() {
vals[20] = psql.Arg(s.Activity.MustGetNull())
} else {
vals[20] = psql.Raw("DEFAULT")
}
if !s.Testmethod.IsUnset() {
vals[21] = psql.Arg(s.Testmethod.MustGetNull())
} else {
vals[21] = psql.Raw("DEFAULT")
}
if !s.Diseasetested.IsUnset() {
vals[22] = psql.Arg(s.Diseasetested.MustGetNull())
} else {
vals[22] = psql.Raw("DEFAULT")
}
if !s.Diseasepos.IsUnset() {
vals[23] = psql.Arg(s.Diseasepos.MustGetNull())
} else {
vals[23] = psql.Raw("DEFAULT")
}
if !s.Reviewed.IsUnset() {
vals[24] = psql.Arg(s.Reviewed.MustGetNull())
} else {
vals[24] = psql.Raw("DEFAULT")
}
if !s.Reviewedby.IsUnset() {
vals[25] = psql.Arg(s.Reviewedby.MustGetNull())
} else {
vals[25] = psql.Raw("DEFAULT")
}
if !s.Revieweddate.IsUnset() {
vals[26] = psql.Arg(s.Revieweddate.MustGetNull())
} else {
vals[26] = psql.Raw("DEFAULT")
}
if !s.Locationname.IsUnset() {
vals[27] = psql.Arg(s.Locationname.MustGetNull())
} else {
vals[27] = psql.Raw("DEFAULT")
}
if !s.Zone.IsUnset() {
vals[28] = psql.Arg(s.Zone.MustGetNull())
} else {
vals[28] = psql.Raw("DEFAULT")
}
if !s.Recordstatus.IsUnset() {
vals[29] = psql.Arg(s.Recordstatus.MustGetNull())
} else {
vals[29] = psql.Raw("DEFAULT")
}
if !s.Zone2.IsUnset() {
vals[30] = psql.Arg(s.Zone2.MustGetNull())
} else {
vals[30] = psql.Raw("DEFAULT")
}
if s.Globalid.IsValue() {
vals[31] = psql.Arg(s.Globalid.MustGet())
} else {
vals[31] = psql.Raw("DEFAULT")
}
if !s.CreatedUser.IsUnset() {
vals[32] = psql.Arg(s.CreatedUser.MustGetNull())
} else {
vals[32] = psql.Raw("DEFAULT")
}
if !s.CreatedDate.IsUnset() {
vals[33] = psql.Arg(s.CreatedDate.MustGetNull())
} else {
vals[33] = psql.Raw("DEFAULT")
}
if !s.LastEditedUser.IsUnset() {
vals[34] = psql.Arg(s.LastEditedUser.MustGetNull())
} else {
vals[34] = psql.Raw("DEFAULT")
}
if !s.LastEditedDate.IsUnset() {
vals[35] = psql.Arg(s.LastEditedDate.MustGetNull())
} else {
vals[35] = psql.Raw("DEFAULT")
}
if !s.Lab.IsUnset() {
vals[36] = psql.Arg(s.Lab.MustGetNull())
} else {
vals[36] = psql.Raw("DEFAULT")
}
if !s.Fieldtech.IsUnset() {
vals[37] = psql.Arg(s.Fieldtech.MustGetNull())
} else {
vals[37] = psql.Raw("DEFAULT")
}
if !s.Flockid.IsUnset() {
vals[38] = psql.Arg(s.Flockid.MustGetNull())
} else {
vals[38] = psql.Raw("DEFAULT")
}
if !s.Samplecount.IsUnset() {
vals[39] = psql.Arg(s.Samplecount.MustGetNull())
} else {
vals[39] = psql.Raw("DEFAULT")
}
if !s.Chickenid.IsUnset() {
vals[40] = psql.Arg(s.Chickenid.MustGetNull())
} else {
vals[40] = psql.Raw("DEFAULT")
}
if !s.Gatewaysync.IsUnset() {
vals[41] = psql.Arg(s.Gatewaysync.MustGetNull())
} else {
vals[41] = psql.Raw("DEFAULT")
}
if !s.Creationdate.IsUnset() {
vals[42] = psql.Arg(s.Creationdate.MustGetNull())
} else {
vals[42] = psql.Raw("DEFAULT")
}
if !s.Creator.IsUnset() {
vals[43] = psql.Arg(s.Creator.MustGetNull())
} else {
vals[43] = psql.Raw("DEFAULT")
}
if !s.Editdate.IsUnset() {
vals[44] = psql.Arg(s.Editdate.MustGetNull())
} else {
vals[44] = psql.Raw("DEFAULT")
}
if !s.Editor.IsUnset() {
vals[45] = psql.Arg(s.Editor.MustGetNull())
} else {
vals[45] = psql.Raw("DEFAULT")
}
if s.Geometry.IsValue() {
vals[46] = psql.Arg(s.Geometry.MustGet())
} else {
vals[46] = psql.Raw("DEFAULT")
}
if !s.Geospatial.IsUnset() {
vals[47] = psql.Arg(s.Geospatial.MustGetNull())
} else {
vals[47] = psql.Raw("DEFAULT")
}
if s.Version.IsValue() {
vals[48] = psql.Arg(s.Version.MustGet())
} else {
vals[48] = psql.Raw("DEFAULT")
}
if s.OrganizationID.IsValue() {
vals[49] = psql.Arg(s.OrganizationID.MustGet())
} else {
vals[49] = psql.Raw("DEFAULT")
}
return bob.ExpressSlice(ctx, w, d, start, vals, "", ", ", "")
}))
}
func (s FieldseekerSamplecollectionSetter) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return um.Set(s.Expressions()...)
}
func (s FieldseekerSamplecollectionSetter) Expressions(prefix ...string) []bob.Expression {
exprs := make([]bob.Expression, 0, 50)
if s.Objectid.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "objectid")...),
psql.Arg(s.Objectid),
}})
}
if !s.LocID.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "loc_id")...),
psql.Arg(s.LocID),
}})
}
if !s.Startdatetime.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "startdatetime")...),
psql.Arg(s.Startdatetime),
}})
}
if !s.Enddatetime.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "enddatetime")...),
psql.Arg(s.Enddatetime),
}})
}
if !s.Sitecond.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "sitecond")...),
psql.Arg(s.Sitecond),
}})
}
if !s.Sampleid.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "sampleid")...),
psql.Arg(s.Sampleid),
}})
}
if !s.Survtech.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "survtech")...),
psql.Arg(s.Survtech),
}})
}
if !s.Datesent.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "datesent")...),
psql.Arg(s.Datesent),
}})
}
if !s.Datetested.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "datetested")...),
psql.Arg(s.Datetested),
}})
}
if !s.Testtech.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "testtech")...),
psql.Arg(s.Testtech),
}})
}
if !s.Comments.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "comments")...),
psql.Arg(s.Comments),
}})
}
if !s.Processed.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "processed")...),
psql.Arg(s.Processed),
}})
}
if !s.Sampletype.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "sampletype")...),
psql.Arg(s.Sampletype),
}})
}
if !s.Samplecond.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "samplecond")...),
psql.Arg(s.Samplecond),
}})
}
if !s.Species.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "species")...),
psql.Arg(s.Species),
}})
}
if !s.Sex.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "sex")...),
psql.Arg(s.Sex),
}})
}
if !s.Avetemp.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "avetemp")...),
psql.Arg(s.Avetemp),
}})
}
if !s.Windspeed.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "windspeed")...),
psql.Arg(s.Windspeed),
}})
}
if !s.Winddir.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "winddir")...),
psql.Arg(s.Winddir),
}})
}
if !s.Raingauge.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "raingauge")...),
psql.Arg(s.Raingauge),
}})
}
if !s.Activity.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "activity")...),
psql.Arg(s.Activity),
}})
}
if !s.Testmethod.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "testmethod")...),
psql.Arg(s.Testmethod),
}})
}
if !s.Diseasetested.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "diseasetested")...),
psql.Arg(s.Diseasetested),
}})
}
if !s.Diseasepos.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "diseasepos")...),
psql.Arg(s.Diseasepos),
}})
}
if !s.Reviewed.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "reviewed")...),
psql.Arg(s.Reviewed),
}})
}
if !s.Reviewedby.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "reviewedby")...),
psql.Arg(s.Reviewedby),
}})
}
if !s.Revieweddate.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "revieweddate")...),
psql.Arg(s.Revieweddate),
}})
}
if !s.Locationname.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "locationname")...),
psql.Arg(s.Locationname),
}})
}
if !s.Zone.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "zone")...),
psql.Arg(s.Zone),
}})
}
if !s.Recordstatus.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "recordstatus")...),
psql.Arg(s.Recordstatus),
}})
}
if !s.Zone2.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "zone2")...),
psql.Arg(s.Zone2),
}})
}
if s.Globalid.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "globalid")...),
psql.Arg(s.Globalid),
}})
}
if !s.CreatedUser.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "created_user")...),
psql.Arg(s.CreatedUser),
}})
}
if !s.CreatedDate.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "created_date")...),
psql.Arg(s.CreatedDate),
}})
}
if !s.LastEditedUser.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "last_edited_user")...),
psql.Arg(s.LastEditedUser),
}})
}
if !s.LastEditedDate.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "last_edited_date")...),
psql.Arg(s.LastEditedDate),
}})
}
if !s.Lab.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "lab")...),
psql.Arg(s.Lab),
}})
}
if !s.Fieldtech.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "fieldtech")...),
psql.Arg(s.Fieldtech),
}})
}
if !s.Flockid.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "flockid")...),
psql.Arg(s.Flockid),
}})
}
if !s.Samplecount.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "samplecount")...),
psql.Arg(s.Samplecount),
}})
}
if !s.Chickenid.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "chickenid")...),
psql.Arg(s.Chickenid),
}})
}
if !s.Gatewaysync.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "gatewaysync")...),
psql.Arg(s.Gatewaysync),
}})
}
if !s.Creationdate.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "creationdate")...),
psql.Arg(s.Creationdate),
}})
}
if !s.Creator.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "creator")...),
psql.Arg(s.Creator),
}})
}
if !s.Editdate.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "editdate")...),
psql.Arg(s.Editdate),
}})
}
if !s.Editor.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "editor")...),
psql.Arg(s.Editor),
}})
}
if s.Geometry.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "geometry")...),
psql.Arg(s.Geometry),
}})
}
if !s.Geospatial.IsUnset() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "geospatial")...),
psql.Arg(s.Geospatial),
}})
}
if s.Version.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "version")...),
psql.Arg(s.Version),
}})
}
if s.OrganizationID.IsValue() {
exprs = append(exprs, expr.Join{Sep: " = ", Exprs: []bob.Expression{
psql.Quote(append(prefix, "organization_id")...),
psql.Arg(s.OrganizationID),
}})
}
return exprs
}
// FindFieldseekerSamplecollection retrieves a single record by primary key
// If cols is empty Find will return all columns.
func FindFieldseekerSamplecollection(ctx context.Context, exec bob.Executor, GlobalidPK uuid.UUID, VersionPK int32, cols ...string) (*FieldseekerSamplecollection, error) {
if len(cols) == 0 {
return FieldseekerSamplecollections.Query(
sm.Where(FieldseekerSamplecollections.Columns.Globalid.EQ(psql.Arg(GlobalidPK))),
sm.Where(FieldseekerSamplecollections.Columns.Version.EQ(psql.Arg(VersionPK))),
).One(ctx, exec)
}
return FieldseekerSamplecollections.Query(
sm.Where(FieldseekerSamplecollections.Columns.Globalid.EQ(psql.Arg(GlobalidPK))),
sm.Where(FieldseekerSamplecollections.Columns.Version.EQ(psql.Arg(VersionPK))),
sm.Columns(FieldseekerSamplecollections.Columns.Only(cols...)),
).One(ctx, exec)
}
// FieldseekerSamplecollectionExists checks the presence of a single record by primary key
func FieldseekerSamplecollectionExists(ctx context.Context, exec bob.Executor, GlobalidPK uuid.UUID, VersionPK int32) (bool, error) {
return FieldseekerSamplecollections.Query(
sm.Where(FieldseekerSamplecollections.Columns.Globalid.EQ(psql.Arg(GlobalidPK))),
sm.Where(FieldseekerSamplecollections.Columns.Version.EQ(psql.Arg(VersionPK))),
).Exists(ctx, exec)
}
// AfterQueryHook is called after FieldseekerSamplecollection is retrieved from the database
func (o *FieldseekerSamplecollection) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FieldseekerSamplecollections.AfterSelectHooks.RunHooks(ctx, exec, FieldseekerSamplecollectionSlice{o})
case bob.QueryTypeInsert:
ctx, err = FieldseekerSamplecollections.AfterInsertHooks.RunHooks(ctx, exec, FieldseekerSamplecollectionSlice{o})
case bob.QueryTypeUpdate:
ctx, err = FieldseekerSamplecollections.AfterUpdateHooks.RunHooks(ctx, exec, FieldseekerSamplecollectionSlice{o})
case bob.QueryTypeDelete:
ctx, err = FieldseekerSamplecollections.AfterDeleteHooks.RunHooks(ctx, exec, FieldseekerSamplecollectionSlice{o})
}
return err
}
// primaryKeyVals returns the primary key values of the FieldseekerSamplecollection
func (o *FieldseekerSamplecollection) primaryKeyVals() bob.Expression {
return psql.ArgGroup(
o.Globalid,
o.Version,
)
}
func (o *FieldseekerSamplecollection) pkEQ() dialect.Expression {
return psql.Group(psql.Quote("fieldseeker.samplecollection", "globalid"), psql.Quote("fieldseeker.samplecollection", "version")).EQ(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
return o.primaryKeyVals().WriteSQL(ctx, w, d, start)
}))
}
// Update uses an executor to update the FieldseekerSamplecollection
func (o *FieldseekerSamplecollection) Update(ctx context.Context, exec bob.Executor, s *FieldseekerSamplecollectionSetter) error {
v, err := FieldseekerSamplecollections.Update(s.UpdateMod(), um.Where(o.pkEQ())).One(ctx, exec)
if err != nil {
return err
}
o.R = v.R
*o = *v
return nil
}
// Delete deletes a single FieldseekerSamplecollection record with an executor
func (o *FieldseekerSamplecollection) Delete(ctx context.Context, exec bob.Executor) error {
_, err := FieldseekerSamplecollections.Delete(dm.Where(o.pkEQ())).Exec(ctx, exec)
return err
}
// Reload refreshes the FieldseekerSamplecollection using the executor
func (o *FieldseekerSamplecollection) Reload(ctx context.Context, exec bob.Executor) error {
o2, err := FieldseekerSamplecollections.Query(
sm.Where(FieldseekerSamplecollections.Columns.Globalid.EQ(psql.Arg(o.Globalid))),
sm.Where(FieldseekerSamplecollections.Columns.Version.EQ(psql.Arg(o.Version))),
).One(ctx, exec)
if err != nil {
return err
}
o2.R = o.R
*o = *o2
return nil
}
// AfterQueryHook is called after FieldseekerSamplecollectionSlice is retrieved from the database
func (o FieldseekerSamplecollectionSlice) AfterQueryHook(ctx context.Context, exec bob.Executor, queryType bob.QueryType) error {
var err error
switch queryType {
case bob.QueryTypeSelect:
ctx, err = FieldseekerSamplecollections.AfterSelectHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeInsert:
ctx, err = FieldseekerSamplecollections.AfterInsertHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeUpdate:
ctx, err = FieldseekerSamplecollections.AfterUpdateHooks.RunHooks(ctx, exec, o)
case bob.QueryTypeDelete:
ctx, err = FieldseekerSamplecollections.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}
func (o FieldseekerSamplecollectionSlice) pkIN() dialect.Expression {
if len(o) == 0 {
return psql.Raw("NULL")
}
return psql.Group(psql.Quote("fieldseeker.samplecollection", "globalid"), psql.Quote("fieldseeker.samplecollection", "version")).In(bob.ExpressionFunc(func(ctx context.Context, w io.StringWriter, d bob.Dialect, start int) ([]any, error) {
pkPairs := make([]bob.Expression, len(o))
for i, row := range o {
pkPairs[i] = row.primaryKeyVals()
}
return bob.ExpressSlice(ctx, w, d, start, pkPairs, "", ", ", "")
}))
}
// copyMatchingRows finds models in the given slice that have the same primary key
// then it first copies the existing relationships from the old model to the new model
// and then replaces the old model in the slice with the new model
func (o FieldseekerSamplecollectionSlice) copyMatchingRows(from ...*FieldseekerSamplecollection) {
for i, old := range o {
for _, new := range from {
if new.Globalid != old.Globalid {
continue
}
if new.Version != old.Version {
continue
}
new.R = old.R
o[i] = new
break
}
}
}
// UpdateMod modifies an update query with "WHERE primary_key IN (o...)"
func (o FieldseekerSamplecollectionSlice) UpdateMod() bob.Mod[*dialect.UpdateQuery] {
return bob.ModFunc[*dialect.UpdateQuery](func(q *dialect.UpdateQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FieldseekerSamplecollections.BeforeUpdateHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FieldseekerSamplecollection:
o.copyMatchingRows(retrieved)
case []*FieldseekerSamplecollection:
o.copyMatchingRows(retrieved...)
case FieldseekerSamplecollectionSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FieldseekerSamplecollection or a slice of FieldseekerSamplecollection
// then run the AfterUpdateHooks on the slice
_, err = FieldseekerSamplecollections.AfterUpdateHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
// DeleteMod modifies an delete query with "WHERE primary_key IN (o...)"
func (o FieldseekerSamplecollectionSlice) DeleteMod() bob.Mod[*dialect.DeleteQuery] {
return bob.ModFunc[*dialect.DeleteQuery](func(q *dialect.DeleteQuery) {
q.AppendHooks(func(ctx context.Context, exec bob.Executor) (context.Context, error) {
return FieldseekerSamplecollections.BeforeDeleteHooks.RunHooks(ctx, exec, o)
})
q.AppendLoader(bob.LoaderFunc(func(ctx context.Context, exec bob.Executor, retrieved any) error {
var err error
switch retrieved := retrieved.(type) {
case *FieldseekerSamplecollection:
o.copyMatchingRows(retrieved)
case []*FieldseekerSamplecollection:
o.copyMatchingRows(retrieved...)
case FieldseekerSamplecollectionSlice:
o.copyMatchingRows(retrieved...)
default:
// If the retrieved value is not a FieldseekerSamplecollection or a slice of FieldseekerSamplecollection
// then run the AfterDeleteHooks on the slice
_, err = FieldseekerSamplecollections.AfterDeleteHooks.RunHooks(ctx, exec, o)
}
return err
}))
q.AppendWhere(o.pkIN())
})
}
func (o FieldseekerSamplecollectionSlice) UpdateAll(ctx context.Context, exec bob.Executor, vals FieldseekerSamplecollectionSetter) error {
if len(o) == 0 {
return nil
}
_, err := FieldseekerSamplecollections.Update(vals.UpdateMod(), o.UpdateMod()).All(ctx, exec)
return err
}
func (o FieldseekerSamplecollectionSlice) DeleteAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
_, err := FieldseekerSamplecollections.Delete(o.DeleteMod()).Exec(ctx, exec)
return err
}
func (o FieldseekerSamplecollectionSlice) ReloadAll(ctx context.Context, exec bob.Executor) error {
if len(o) == 0 {
return nil
}
o2, err := FieldseekerSamplecollections.Query(sm.Where(o.pkIN())).All(ctx, exec)
if err != nil {
return err
}
o.copyMatchingRows(o2...)
return nil
}
// Organization starts a query for related objects on organization
func (o *FieldseekerSamplecollection) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery {
return Organizations.Query(append(mods,
sm.Where(Organizations.Columns.ID.EQ(psql.Arg(o.OrganizationID))),
)...)
}
func (os FieldseekerSamplecollectionSlice) Organization(mods ...bob.Mod[*dialect.SelectQuery]) OrganizationsQuery {
pkOrganizationID := make(pgtypes.Array[int32], 0, len(os))
for _, o := range os {
if o == nil {
continue
}
pkOrganizationID = append(pkOrganizationID, o.OrganizationID)
}
PKArgExpr := psql.Select(sm.Columns(
psql.F("unnest", psql.Cast(psql.Arg(pkOrganizationID), "integer[]")),
))
return Organizations.Query(append(mods,
sm.Where(psql.Group(Organizations.Columns.ID).OP("IN", PKArgExpr)),
)...)
}
func attachFieldseekerSamplecollectionOrganization0(ctx context.Context, exec bob.Executor, count int, fieldseekerSamplecollection0 *FieldseekerSamplecollection, organization1 *Organization) (*FieldseekerSamplecollection, error) {
setter := &FieldseekerSamplecollectionSetter{
OrganizationID: omit.From(organization1.ID),
}
err := fieldseekerSamplecollection0.Update(ctx, exec, setter)
if err != nil {
return nil, fmt.Errorf("attachFieldseekerSamplecollectionOrganization0: %w", err)
}
return fieldseekerSamplecollection0, nil
}
func (fieldseekerSamplecollection0 *FieldseekerSamplecollection) InsertOrganization(ctx context.Context, exec bob.Executor, related *OrganizationSetter) error {
var err error
organization1, err := Organizations.Insert(related).One(ctx, exec)
if err != nil {
return fmt.Errorf("inserting related objects: %w", err)
}
_, err = attachFieldseekerSamplecollectionOrganization0(ctx, exec, 1, fieldseekerSamplecollection0, organization1)
if err != nil {
return err
}
fieldseekerSamplecollection0.R.Organization = organization1
organization1.R.Samplecollections = append(organization1.R.Samplecollections, fieldseekerSamplecollection0)
return nil
}
func (fieldseekerSamplecollection0 *FieldseekerSamplecollection) AttachOrganization(ctx context.Context, exec bob.Executor, organization1 *Organization) error {
var err error
_, err = attachFieldseekerSamplecollectionOrganization0(ctx, exec, 1, fieldseekerSamplecollection0, organization1)
if err != nil {
return err
}
fieldseekerSamplecollection0.R.Organization = organization1
organization1.R.Samplecollections = append(organization1.R.Samplecollections, fieldseekerSamplecollection0)
return nil
}
type fieldseekerSamplecollectionWhere[Q psql.Filterable] struct {
Objectid psql.WhereMod[Q, int64]
LocID psql.WhereNullMod[Q, uuid.UUID]
Startdatetime psql.WhereNullMod[Q, time.Time]
Enddatetime psql.WhereNullMod[Q, time.Time]
Sitecond psql.WhereNullMod[Q, string]
Sampleid psql.WhereNullMod[Q, string]
Survtech psql.WhereNullMod[Q, string]
Datesent psql.WhereNullMod[Q, time.Time]
Datetested psql.WhereNullMod[Q, time.Time]
Testtech psql.WhereNullMod[Q, string]
Comments psql.WhereNullMod[Q, string]
Processed psql.WhereNullMod[Q, int16]
Sampletype psql.WhereNullMod[Q, string]
Samplecond psql.WhereNullMod[Q, string]
Species psql.WhereNullMod[Q, string]
Sex psql.WhereNullMod[Q, string]
Avetemp psql.WhereNullMod[Q, float64]
Windspeed psql.WhereNullMod[Q, float64]
Winddir psql.WhereNullMod[Q, string]
Raingauge psql.WhereNullMod[Q, float64]
Activity psql.WhereNullMod[Q, string]
Testmethod psql.WhereNullMod[Q, string]
Diseasetested psql.WhereNullMod[Q, string]
Diseasepos psql.WhereNullMod[Q, string]
Reviewed psql.WhereNullMod[Q, int16]
Reviewedby psql.WhereNullMod[Q, string]
Revieweddate psql.WhereNullMod[Q, time.Time]
Locationname psql.WhereNullMod[Q, string]
Zone psql.WhereNullMod[Q, string]
Recordstatus psql.WhereNullMod[Q, int16]
Zone2 psql.WhereNullMod[Q, string]
Globalid psql.WhereMod[Q, uuid.UUID]
CreatedUser psql.WhereNullMod[Q, string]
CreatedDate psql.WhereNullMod[Q, time.Time]
LastEditedUser psql.WhereNullMod[Q, string]
LastEditedDate psql.WhereNullMod[Q, time.Time]
Lab psql.WhereNullMod[Q, string]
Fieldtech psql.WhereNullMod[Q, string]
Flockid psql.WhereNullMod[Q, uuid.UUID]
Samplecount psql.WhereNullMod[Q, int16]
Chickenid psql.WhereNullMod[Q, uuid.UUID]
Gatewaysync psql.WhereNullMod[Q, int16]
Creationdate psql.WhereNullMod[Q, time.Time]
Creator psql.WhereNullMod[Q, string]
Editdate psql.WhereNullMod[Q, time.Time]
Editor psql.WhereNullMod[Q, string]
Geometry psql.WhereMod[Q, types.JSON[json.RawMessage]]
Geospatial psql.WhereNullMod[Q, string]
Version psql.WhereMod[Q, int32]
OrganizationID psql.WhereMod[Q, int32]
}
func (fieldseekerSamplecollectionWhere[Q]) AliasedAs(alias string) fieldseekerSamplecollectionWhere[Q] {
return buildFieldseekerSamplecollectionWhere[Q](buildFieldseekerSamplecollectionColumns(alias))
}
func buildFieldseekerSamplecollectionWhere[Q psql.Filterable](cols fieldseekerSamplecollectionColumns) fieldseekerSamplecollectionWhere[Q] {
return fieldseekerSamplecollectionWhere[Q]{
Objectid: psql.Where[Q, int64](cols.Objectid),
LocID: psql.WhereNull[Q, uuid.UUID](cols.LocID),
Startdatetime: psql.WhereNull[Q, time.Time](cols.Startdatetime),
Enddatetime: psql.WhereNull[Q, time.Time](cols.Enddatetime),
Sitecond: psql.WhereNull[Q, string](cols.Sitecond),
Sampleid: psql.WhereNull[Q, string](cols.Sampleid),
Survtech: psql.WhereNull[Q, string](cols.Survtech),
Datesent: psql.WhereNull[Q, time.Time](cols.Datesent),
Datetested: psql.WhereNull[Q, time.Time](cols.Datetested),
Testtech: psql.WhereNull[Q, string](cols.Testtech),
Comments: psql.WhereNull[Q, string](cols.Comments),
Processed: psql.WhereNull[Q, int16](cols.Processed),
Sampletype: psql.WhereNull[Q, string](cols.Sampletype),
Samplecond: psql.WhereNull[Q, string](cols.Samplecond),
Species: psql.WhereNull[Q, string](cols.Species),
Sex: psql.WhereNull[Q, string](cols.Sex),
Avetemp: psql.WhereNull[Q, float64](cols.Avetemp),
Windspeed: psql.WhereNull[Q, float64](cols.Windspeed),
Winddir: psql.WhereNull[Q, string](cols.Winddir),
Raingauge: psql.WhereNull[Q, float64](cols.Raingauge),
Activity: psql.WhereNull[Q, string](cols.Activity),
Testmethod: psql.WhereNull[Q, string](cols.Testmethod),
Diseasetested: psql.WhereNull[Q, string](cols.Diseasetested),
Diseasepos: psql.WhereNull[Q, string](cols.Diseasepos),
Reviewed: psql.WhereNull[Q, int16](cols.Reviewed),
Reviewedby: psql.WhereNull[Q, string](cols.Reviewedby),
Revieweddate: psql.WhereNull[Q, time.Time](cols.Revieweddate),
Locationname: psql.WhereNull[Q, string](cols.Locationname),
Zone: psql.WhereNull[Q, string](cols.Zone),
Recordstatus: psql.WhereNull[Q, int16](cols.Recordstatus),
Zone2: psql.WhereNull[Q, string](cols.Zone2),
Globalid: psql.Where[Q, uuid.UUID](cols.Globalid),
CreatedUser: psql.WhereNull[Q, string](cols.CreatedUser),
CreatedDate: psql.WhereNull[Q, time.Time](cols.CreatedDate),
LastEditedUser: psql.WhereNull[Q, string](cols.LastEditedUser),
LastEditedDate: psql.WhereNull[Q, time.Time](cols.LastEditedDate),
Lab: psql.WhereNull[Q, string](cols.Lab),
Fieldtech: psql.WhereNull[Q, string](cols.Fieldtech),
Flockid: psql.WhereNull[Q, uuid.UUID](cols.Flockid),
Samplecount: psql.WhereNull[Q, int16](cols.Samplecount),
Chickenid: psql.WhereNull[Q, uuid.UUID](cols.Chickenid),
Gatewaysync: psql.WhereNull[Q, int16](cols.Gatewaysync),
Creationdate: psql.WhereNull[Q, time.Time](cols.Creationdate),
Creator: psql.WhereNull[Q, string](cols.Creator),
Editdate: psql.WhereNull[Q, time.Time](cols.Editdate),
Editor: psql.WhereNull[Q, string](cols.Editor),
Geometry: psql.Where[Q, types.JSON[json.RawMessage]](cols.Geometry),
Geospatial: psql.WhereNull[Q, string](cols.Geospatial),
Version: psql.Where[Q, int32](cols.Version),
OrganizationID: psql.Where[Q, int32](cols.OrganizationID),
}
}
func (o *FieldseekerSamplecollection) Preload(name string, retrieved any) error {
if o == nil {
return nil
}
switch name {
case "Organization":
rel, ok := retrieved.(*Organization)
if !ok {
return fmt.Errorf("fieldseekerSamplecollection cannot load %T as %q", retrieved, name)
}
o.R.Organization = rel
if rel != nil {
rel.R.Samplecollections = FieldseekerSamplecollectionSlice{o}
}
return nil
default:
return fmt.Errorf("fieldseekerSamplecollection has no relationship %q", name)
}
}
type fieldseekerSamplecollectionPreloader struct {
Organization func(...psql.PreloadOption) psql.Preloader
}
func buildFieldseekerSamplecollectionPreloader() fieldseekerSamplecollectionPreloader {
return fieldseekerSamplecollectionPreloader{
Organization: func(opts ...psql.PreloadOption) psql.Preloader {
return psql.Preload[*Organization, OrganizationSlice](psql.PreloadRel{
Name: "Organization",
Sides: []psql.PreloadSide{
{
From: FieldseekerSamplecollections,
To: Organizations,
FromColumns: []string{"organization_id"},
ToColumns: []string{"id"},
},
},
}, Organizations.Columns.Names(), opts...)
},
}
}
type fieldseekerSamplecollectionThenLoader[Q orm.Loadable] struct {
Organization func(...bob.Mod[*dialect.SelectQuery]) orm.Loader[Q]
}
func buildFieldseekerSamplecollectionThenLoader[Q orm.Loadable]() fieldseekerSamplecollectionThenLoader[Q] {
type OrganizationLoadInterface interface {
LoadOrganization(context.Context, bob.Executor, ...bob.Mod[*dialect.SelectQuery]) error
}
return fieldseekerSamplecollectionThenLoader[Q]{
Organization: thenLoadBuilder[Q](
"Organization",
func(ctx context.Context, exec bob.Executor, retrieved OrganizationLoadInterface, mods ...bob.Mod[*dialect.SelectQuery]) error {
return retrieved.LoadOrganization(ctx, exec, mods...)
},
),
}
}
// LoadOrganization loads the fieldseekerSamplecollection's Organization into the .R struct
func (o *FieldseekerSamplecollection) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if o == nil {
return nil
}
// Reset the relationship
o.R.Organization = nil
related, err := o.Organization(mods...).One(ctx, exec)
if err != nil {
return err
}
related.R.Samplecollections = FieldseekerSamplecollectionSlice{o}
o.R.Organization = related
return nil
}
// LoadOrganization loads the fieldseekerSamplecollection's Organization into the .R struct
func (os FieldseekerSamplecollectionSlice) LoadOrganization(ctx context.Context, exec bob.Executor, mods ...bob.Mod[*dialect.SelectQuery]) error {
if len(os) == 0 {
return nil
}
organizations, err := os.Organization(mods...).All(ctx, exec)
if err != nil {
return err
}
for _, o := range os {
if o == nil {
continue
}
for _, rel := range organizations {
if !(o.OrganizationID == rel.ID) {
continue
}
rel.R.Samplecollections = append(rel.R.Samplecollections, o)
o.R.Organization = rel
break
}
}
return nil
}
type fieldseekerSamplecollectionJoins[Q dialect.Joinable] struct {
typ string
Organization modAs[Q, organizationColumns]
}
func (j fieldseekerSamplecollectionJoins[Q]) aliasedAs(alias string) fieldseekerSamplecollectionJoins[Q] {
return buildFieldseekerSamplecollectionJoins[Q](buildFieldseekerSamplecollectionColumns(alias), j.typ)
}
func buildFieldseekerSamplecollectionJoins[Q dialect.Joinable](cols fieldseekerSamplecollectionColumns, typ string) fieldseekerSamplecollectionJoins[Q] {
return fieldseekerSamplecollectionJoins[Q]{
typ: typ,
Organization: modAs[Q, organizationColumns]{
c: Organizations.Columns,
f: func(to organizationColumns) bob.Mod[Q] {
mods := make(mods.QueryMods[Q], 0, 1)
{
mods = append(mods, dialect.Join[Q](typ, Organizations.Name().As(to.Alias())).On(
to.ID.EQ(cols.OrganizationID),
))
}
return mods
},
},
}
}