2026-01-21 03:30:03 +00:00
|
|
|
package background
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"context"
|
2026-02-08 04:01:48 +00:00
|
|
|
"fmt"
|
2026-01-21 03:30:03 +00:00
|
|
|
"sync"
|
2026-01-23 20:36:16 +00:00
|
|
|
|
|
|
|
|
"github.com/Gleipnir-Technology/nidus-sync/comms/email"
|
2026-02-08 04:01:48 +00:00
|
|
|
"github.com/Gleipnir-Technology/nidus-sync/db"
|
|
|
|
|
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
|
|
|
|
|
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
2026-01-27 19:56:26 +00:00
|
|
|
"github.com/Gleipnir-Technology/nidus-sync/platform/text"
|
2026-02-08 04:01:48 +00:00
|
|
|
"github.com/rs/zerolog/log"
|
2026-01-21 03:30:03 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
var waitGroup sync.WaitGroup
|
|
|
|
|
|
|
|
|
|
func Start(ctx context.Context) {
|
|
|
|
|
newOAuthTokenChannel = make(chan struct{}, 10)
|
|
|
|
|
|
2026-02-08 03:52:39 +00:00
|
|
|
channelJobAudio = make(chan jobAudio, 100) // Buffered channel to prevent blocking
|
|
|
|
|
channelJobImportCSVPool = make(chan jobImportCSVPool, 100) // Buffered channel to prevent blocking
|
|
|
|
|
channelJobEmail = make(chan email.Job, 100) // Buffered channel to prevent blocking
|
|
|
|
|
channelJobText = make(chan text.Job, 100) // Buffered channel to prevent blocking
|
2026-01-21 03:30:03 +00:00
|
|
|
|
|
|
|
|
waitGroup.Add(1)
|
|
|
|
|
go func() {
|
|
|
|
|
defer waitGroup.Done()
|
|
|
|
|
refreshFieldseekerData(ctx, newOAuthTokenChannel)
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
waitGroup.Add(1)
|
|
|
|
|
go func() {
|
|
|
|
|
defer waitGroup.Done()
|
|
|
|
|
startWorkerAudio(ctx, channelJobAudio)
|
|
|
|
|
}()
|
|
|
|
|
|
2026-02-08 03:52:39 +00:00
|
|
|
waitGroup.Add(1)
|
|
|
|
|
go func() {
|
|
|
|
|
defer waitGroup.Done()
|
|
|
|
|
startWorkerCSV(ctx, channelJobImportCSVPool)
|
|
|
|
|
}()
|
|
|
|
|
|
2026-01-21 03:30:03 +00:00
|
|
|
waitGroup.Add(1)
|
|
|
|
|
go func() {
|
|
|
|
|
defer waitGroup.Done()
|
|
|
|
|
startWorkerEmail(ctx, channelJobEmail)
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
waitGroup.Add(1)
|
|
|
|
|
go func() {
|
|
|
|
|
defer waitGroup.Done()
|
|
|
|
|
startWorkerText(ctx, channelJobText)
|
|
|
|
|
}()
|
2026-02-08 04:01:48 +00:00
|
|
|
|
|
|
|
|
err := addWaitingJobs(ctx)
|
|
|
|
|
if err != nil {
|
|
|
|
|
log.Error().Err(err).Msg("Failed to add waiting background jobs")
|
|
|
|
|
}
|
2026-01-21 03:30:03 +00:00
|
|
|
}
|
2026-02-08 04:01:48 +00:00
|
|
|
|
2026-01-21 03:30:03 +00:00
|
|
|
func WaitForExit() {
|
|
|
|
|
|
|
|
|
|
waitGroup.Wait()
|
|
|
|
|
}
|
2026-02-08 04:01:48 +00:00
|
|
|
|
|
|
|
|
func addWaitingJobs(ctx context.Context) error {
|
|
|
|
|
rows, err := models.FileuploadFiles.Query(
|
|
|
|
|
models.SelectWhere.FileuploadFiles.Status.EQ(
|
|
|
|
|
enums.FileuploadFilestatustypeUploaded,
|
|
|
|
|
),
|
|
|
|
|
).All(ctx, db.PGInstance.BobDB)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return fmt.Errorf("Failed to query file uploads: %w", err)
|
|
|
|
|
}
|
|
|
|
|
for _, row := range rows {
|
|
|
|
|
report_id := row.ID
|
|
|
|
|
job := jobImportCSVPool{
|
|
|
|
|
fileID: report_id,
|
|
|
|
|
}
|
|
|
|
|
select {
|
|
|
|
|
case channelJobImportCSVPool <- job:
|
|
|
|
|
log.Info().Int32("report_id", report_id).Msg("CSV upload job queued")
|
|
|
|
|
default:
|
|
|
|
|
log.Warn().Int32("report_id", report_id).Msg("CSV upload job failed to queue, channel full")
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return nil
|
|
|
|
|
}
|