2019-10-10 00:26:28 +03:00
|
|
|
|
/*
|
|
|
|
|
<< WORK IN PROGRESS >>
|
|
|
|
|
Don't use theses classes, yet.
|
|
|
|
|
|
|
|
|
|
This aims to implement https://github.com/n1try/wakapi/issues/1.
|
|
|
|
|
Idea is to have regularly running, cron-like background jobs that request a summary
|
|
|
|
|
from SummaryService for a pre-defined time interval, e.g. 24 hours. Those are persisted
|
|
|
|
|
to the database. Once a user request a summary for a certain time frame that partilly
|
|
|
|
|
overlaps with pre-generated summaries, those will be aggregated together with actual heartbeats
|
|
|
|
|
for the non-overlapping time frames left and right.
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
package services
|
|
|
|
|
|
|
|
|
|
import (
|
2019-10-10 17:47:19 +03:00
|
|
|
|
"log"
|
2019-10-10 00:26:28 +03:00
|
|
|
|
"time"
|
|
|
|
|
|
|
|
|
|
"github.com/jinzhu/gorm"
|
|
|
|
|
"github.com/n1try/wakapi/models"
|
|
|
|
|
)
|
|
|
|
|
|
2019-10-10 17:47:19 +03:00
|
|
|
|
const (
|
|
|
|
|
summaryInterval time.Duration = 24 * time.Hour
|
|
|
|
|
nSummaryWorkers int = 8
|
|
|
|
|
nPersistWorkers int = 8
|
|
|
|
|
)
|
|
|
|
|
|
2019-10-10 00:26:28 +03:00
|
|
|
|
type AggregationService struct {
|
|
|
|
|
Config *models.Config
|
|
|
|
|
Db *gorm.DB
|
|
|
|
|
UserService *UserService
|
|
|
|
|
SummaryService *SummaryService
|
|
|
|
|
HeartbeatService *HeartbeatService
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type AggregationJob struct {
|
2019-10-10 17:47:19 +03:00
|
|
|
|
UserID string
|
2019-10-10 00:26:28 +03:00
|
|
|
|
From time.Time
|
|
|
|
|
To time.Time
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Use https://godoc.org/github.com/jasonlvhit/gocron to trigger jobs on a regular basis.
|
|
|
|
|
func (srv *AggregationService) Start(interval time.Duration) {
|
2019-10-10 17:47:19 +03:00
|
|
|
|
jobs := make(chan *AggregationJob)
|
|
|
|
|
summaries := make(chan *models.Summary)
|
|
|
|
|
|
|
|
|
|
for i := 0; i < nSummaryWorkers; i++ {
|
|
|
|
|
go srv.summaryWorker(jobs, summaries)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for i := 0; i < nPersistWorkers; i++ {
|
|
|
|
|
go srv.persistWorker(summaries)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
srv.generateJobs(jobs)
|
2019-10-10 00:26:28 +03:00
|
|
|
|
}
|
|
|
|
|
|
2019-10-10 17:47:19 +03:00
|
|
|
|
func (srv *AggregationService) summaryWorker(jobs <-chan *AggregationJob, summaries chan<- *models.Summary) {
|
|
|
|
|
for job := range jobs {
|
2019-10-11 09:00:02 +03:00
|
|
|
|
if summary, err := srv.SummaryService.Construct(job.From, job.To, &models.User{ID: job.UserID}); err != nil {
|
2019-10-10 17:47:19 +03:00
|
|
|
|
log.Printf("Failed to generate summary (%v, %v, %s) – %v.", job.From, job.To, job.UserID, err)
|
|
|
|
|
} else {
|
|
|
|
|
summaries <- summary
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-10-10 00:26:28 +03:00
|
|
|
|
|
2019-10-10 17:47:19 +03:00
|
|
|
|
func (srv *AggregationService) persistWorker(summaries <-chan *models.Summary) {
|
|
|
|
|
for summary := range summaries {
|
2019-10-11 09:00:02 +03:00
|
|
|
|
if err := srv.SummaryService.Insert(summary); err != nil {
|
2019-10-10 17:47:19 +03:00
|
|
|
|
log.Printf("Failed to save summary (%v, %v, %s) – %v.", summary.UserID, summary.FromTime, summary.ToTime, err)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (srv *AggregationService) generateJobs(jobs chan<- *AggregationJob) error {
|
2019-10-10 00:26:28 +03:00
|
|
|
|
users, err := srv.UserService.GetAll()
|
|
|
|
|
if err != nil {
|
2019-10-10 17:47:19 +03:00
|
|
|
|
return err
|
2019-10-10 00:26:28 +03:00
|
|
|
|
}
|
|
|
|
|
|
2019-10-11 09:00:02 +03:00
|
|
|
|
latestSummaries, err := srv.SummaryService.GetLatestByUser()
|
2019-10-10 00:26:28 +03:00
|
|
|
|
if err != nil {
|
2019-10-10 17:47:19 +03:00
|
|
|
|
return err
|
2019-10-10 00:26:28 +03:00
|
|
|
|
}
|
|
|
|
|
|
2019-10-11 09:00:02 +03:00
|
|
|
|
userSummaryTimes := make(map[string]time.Time)
|
2019-10-10 00:26:28 +03:00
|
|
|
|
for _, s := range latestSummaries {
|
|
|
|
|
userSummaryTimes[s.UserID] = s.ToTime
|
|
|
|
|
}
|
|
|
|
|
|
2019-10-10 17:47:19 +03:00
|
|
|
|
missingUserIDs := make([]string, 0)
|
2019-10-10 00:26:28 +03:00
|
|
|
|
for _, u := range users {
|
|
|
|
|
if _, ok := userSummaryTimes[u.ID]; !ok {
|
2019-10-10 17:47:19 +03:00
|
|
|
|
missingUserIDs = append(missingUserIDs, u.ID)
|
2019-10-10 00:26:28 +03:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-10-10 17:47:19 +03:00
|
|
|
|
firstHeartbeats, err := srv.HeartbeatService.GetFirstUserHeartbeats(missingUserIDs)
|
2019-10-10 00:26:28 +03:00
|
|
|
|
if err != nil {
|
2019-10-10 17:47:19 +03:00
|
|
|
|
return err
|
2019-10-10 00:26:28 +03:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for id, t := range userSummaryTimes {
|
2019-10-11 09:00:02 +03:00
|
|
|
|
generateUserJobs(id, t, jobs)
|
2019-10-10 00:26:28 +03:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for _, h := range firstHeartbeats {
|
2019-10-11 09:00:02 +03:00
|
|
|
|
generateUserJobs(h.UserID, time.Time(h.Time), jobs)
|
2019-10-10 17:47:19 +03:00
|
|
|
|
}
|
2019-10-10 00:26:28 +03:00
|
|
|
|
|
2019-10-10 17:47:19 +03:00
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func generateUserJobs(userId string, lastAggregation time.Time, jobs chan<- *AggregationJob) {
|
|
|
|
|
var from, to time.Time
|
|
|
|
|
end := getStartOfToday().Add(-1 * time.Second)
|
|
|
|
|
|
|
|
|
|
if lastAggregation.Hour() == 0 {
|
|
|
|
|
from = lastAggregation
|
|
|
|
|
} else {
|
|
|
|
|
nextDay := lastAggregation.Add(24 * time.Hour)
|
|
|
|
|
from = time.Date(nextDay.Year(), nextDay.Month(), nextDay.Day(), 0, 0, 0, 0, lastAggregation.Location())
|
2019-10-10 00:26:28 +03:00
|
|
|
|
}
|
|
|
|
|
|
2019-10-10 17:47:19 +03:00
|
|
|
|
for from.Before(end) && to.Before(end) {
|
|
|
|
|
to = from.Add(24 * time.Hour)
|
|
|
|
|
jobs <- &AggregationJob{userId, from, to}
|
|
|
|
|
from = to
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func getStartOfToday() time.Time {
|
|
|
|
|
now := time.Now()
|
|
|
|
|
return time.Date(now.Year(), now.Month(), now.Day(), 0, 0, 0, 1, now.Location())
|
2019-10-10 00:26:28 +03:00
|
|
|
|
}
|