Add jobs to process queue when csv is uploaded
This commit is contained in:
parent
f9c8f37cec
commit
aee17d2c7a
2 changed files with 16 additions and 0 deletions
|
|
@ -17,6 +17,20 @@ type jobImportCSVPool struct {
|
|||
|
||||
var channelJobImportCSVPool chan jobImportCSVPool
|
||||
|
||||
func ProcessUpload(file_id int32) {
|
||||
enqueueUploadJob(jobImportCSVPool{
|
||||
fileID: file_id,
|
||||
})
|
||||
}
|
||||
|
||||
func enqueueUploadJob(job jobImportCSVPool) {
|
||||
select {
|
||||
case channelJobImportCSVPool <- job:
|
||||
log.Info().Int32("file_id", job.fileID).Msg("Enqueued csv job")
|
||||
default:
|
||||
log.Warn().Int32("file_id", job.fileID).Msg("csv channel is full, dropping job")
|
||||
}
|
||||
}
|
||||
func startWorkerCSV(ctx context.Context, channelJobImport chan jobImportCSVPool) {
|
||||
go func() {
|
||||
for {
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import (
|
|||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/background"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
|
|
@ -45,6 +46,7 @@ func NewPoolUpload(ctx context.Context, u *models.User, upload userfile.FileUplo
|
|||
return PoolUpload{}, fmt.Errorf("Failed to create csv: %w", err)
|
||||
}
|
||||
txn.Commit(ctx)
|
||||
background.ProcessUpload(file.ID)
|
||||
return PoolUpload{
|
||||
ID: file.ID,
|
||||
}, nil
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue