mirror of
https://github.com/muety/wakapi.git
synced 2023-08-10 21:12:56 +03:00
refactor: include generics based utility lib and refactor some parts accordingly [ci-skip]
This commit is contained in:
@@ -2,6 +2,7 @@ package services
|
||||
|
||||
import (
|
||||
"errors"
|
||||
datastructure "github.com/duke-git/lancet/v2/datastructure/set"
|
||||
"github.com/emvi/logbuch"
|
||||
"github.com/muety/wakapi/config"
|
||||
"runtime"
|
||||
@@ -23,7 +24,7 @@ type AggregationService struct {
|
||||
userService IUserService
|
||||
summaryService ISummaryService
|
||||
heartbeatService IHeartbeatService
|
||||
inProgress map[string]bool
|
||||
inProgress datastructure.Set[string]
|
||||
}
|
||||
|
||||
func NewAggregationService(userService IUserService, summaryService ISummaryService, heartbeatService IHeartbeatService) *AggregationService {
|
||||
@@ -32,7 +33,7 @@ func NewAggregationService(userService IUserService, summaryService ISummaryServ
|
||||
userService: userService,
|
||||
summaryService: summaryService,
|
||||
heartbeatService: heartbeatService,
|
||||
inProgress: map[string]bool{},
|
||||
inProgress: datastructure.NewSet[string](),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,16 +46,16 @@ type AggregationJob struct {
|
||||
// Schedule a job to (re-)generate summaries every day shortly after midnight
|
||||
func (srv *AggregationService) Schedule() {
|
||||
// Run once initially
|
||||
if err := srv.Run(nil); err != nil {
|
||||
if err := srv.Run(datastructure.NewSet[string]()); err != nil {
|
||||
logbuch.Fatal("failed to run AggregationJob: %v", err)
|
||||
}
|
||||
|
||||
s := gocron.NewScheduler(time.Local)
|
||||
s.Every(1).Day().At(srv.config.App.AggregationTime).Do(srv.Run, map[string]bool{})
|
||||
s.Every(1).Day().At(srv.config.App.AggregationTime).Do(srv.Run, datastructure.NewSet[string]())
|
||||
s.StartBlocking()
|
||||
}
|
||||
|
||||
func (srv *AggregationService) Run(userIds map[string]bool) error {
|
||||
func (srv *AggregationService) Run(userIds datastructure.Set[string]) error {
|
||||
if err := srv.lockUsers(userIds); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -100,24 +101,9 @@ func (srv *AggregationService) persistWorker(summaries <-chan *models.Summary) {
|
||||
}
|
||||
}
|
||||
|
||||
func (srv *AggregationService) trigger(jobs chan<- *AggregationJob, userIds map[string]bool) error {
|
||||
func (srv *AggregationService) trigger(jobs chan<- *AggregationJob, userIds datastructure.Set[string]) error {
|
||||
logbuch.Info("generating summaries")
|
||||
|
||||
var users []*models.User
|
||||
if allUsers, err := srv.userService.GetAll(); err != nil {
|
||||
config.Log().Error(err.Error())
|
||||
return err
|
||||
} else if userIds != nil && len(userIds) > 0 {
|
||||
users = make([]*models.User, 0)
|
||||
for _, u := range allUsers {
|
||||
if yes, ok := userIds[u.ID]; yes && ok {
|
||||
users = append(users, u)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
users = allUsers
|
||||
}
|
||||
|
||||
// Get a map from user ids to the time of their latest summary or nil if none exists yet
|
||||
lastUserSummaryTimes, err := srv.summaryService.GetLatestByUser()
|
||||
if err != nil {
|
||||
@@ -140,6 +126,10 @@ func (srv *AggregationService) trigger(jobs chan<- *AggregationJob, userIds map[
|
||||
|
||||
// Generate summary aggregation jobs
|
||||
for _, e := range lastUserSummaryTimes {
|
||||
if userIds != nil && !userIds.IsEmpty() && !userIds.Contain(e.User) {
|
||||
continue
|
||||
}
|
||||
|
||||
if e.Time.Valid() {
|
||||
// Case 1: User has aggregated summaries already
|
||||
// -> Spawn jobs to create summaries from their latest aggregation to now
|
||||
@@ -156,25 +146,23 @@ func (srv *AggregationService) trigger(jobs chan<- *AggregationJob, userIds map[
|
||||
return nil
|
||||
}
|
||||
|
||||
func (srv *AggregationService) lockUsers(userIds map[string]bool) error {
|
||||
func (srv *AggregationService) lockUsers(userIds datastructure.Set[string]) error {
|
||||
aggregationLock.Lock()
|
||||
defer aggregationLock.Unlock()
|
||||
for uid := range userIds {
|
||||
if _, ok := srv.inProgress[uid]; ok {
|
||||
if srv.inProgress.Contain(uid) {
|
||||
return errors.New("aggregation already in progress for at least of the request users")
|
||||
}
|
||||
}
|
||||
for uid := range userIds {
|
||||
srv.inProgress[uid] = true
|
||||
}
|
||||
srv.inProgress = srv.inProgress.Union(userIds)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (srv *AggregationService) unlockUsers(userIds map[string]bool) {
|
||||
func (srv *AggregationService) unlockUsers(userIds datastructure.Set[string]) {
|
||||
aggregationLock.Lock()
|
||||
defer aggregationLock.Unlock()
|
||||
for uid := range userIds {
|
||||
delete(srv.inProgress, uid)
|
||||
srv.inProgress.Delete(uid)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ package services
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
datastructure "github.com/duke-git/lancet/v2/datastructure/set"
|
||||
"github.com/emvi/logbuch"
|
||||
"github.com/muety/wakapi/config"
|
||||
"github.com/muety/wakapi/models"
|
||||
@@ -117,12 +118,13 @@ func (srv *AliasService) Delete(alias *models.Alias) error {
|
||||
|
||||
func (srv *AliasService) DeleteMulti(aliases []*models.Alias) error {
|
||||
ids := make([]uint, len(aliases))
|
||||
affectedUsers := make(map[string]bool)
|
||||
affectedUsers := datastructure.NewSet[string]()
|
||||
|
||||
for i, a := range aliases {
|
||||
if a.UserID == "" {
|
||||
return errors.New("no user id specified")
|
||||
}
|
||||
affectedUsers[a.UserID] = true
|
||||
affectedUsers.Add(a.UserID)
|
||||
ids[i] = a.ID
|
||||
}
|
||||
|
||||
|
||||
@@ -2,10 +2,10 @@ package services
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
datastructure "github.com/duke-git/lancet/v2/datastructure/set"
|
||||
"github.com/leandro-lugaresi/hub"
|
||||
"github.com/muety/wakapi/config"
|
||||
"github.com/muety/wakapi/repositories"
|
||||
"github.com/muety/wakapi/utils"
|
||||
"github.com/patrickmn/go-cache"
|
||||
"strings"
|
||||
"sync"
|
||||
@@ -54,14 +54,14 @@ func (srv *HeartbeatService) Insert(heartbeat *models.Heartbeat) error {
|
||||
}
|
||||
|
||||
func (srv *HeartbeatService) InsertBatch(heartbeats []*models.Heartbeat) error {
|
||||
hashes := make(map[string]bool)
|
||||
hashes := datastructure.NewSet[string]()
|
||||
|
||||
// https://github.com/muety/wakapi/issues/139
|
||||
filteredHeartbeats := make([]*models.Heartbeat, 0, len(heartbeats))
|
||||
for _, hb := range heartbeats {
|
||||
if _, ok := hashes[hb.Hash]; !ok {
|
||||
if !hashes.Contain(hb.Hash) {
|
||||
filteredHeartbeats = append(filteredHeartbeats, hb)
|
||||
hashes[hb.Hash] = true
|
||||
hashes.Add(hb.Hash)
|
||||
}
|
||||
go srv.updateEntityUserCacheByHeartbeat(hb)
|
||||
}
|
||||
@@ -159,7 +159,7 @@ func (srv *HeartbeatService) GetEntitySetByUser(entityType uint8, user *models.U
|
||||
if results, found := srv.cache.Get(cacheKey); found {
|
||||
srv.entityCacheLock.RLock()
|
||||
defer srv.entityCacheLock.RUnlock()
|
||||
return utils.SetToStrings(results.(map[string]bool)), nil
|
||||
return results.(datastructure.Set[string]).Values(), nil
|
||||
}
|
||||
|
||||
results, err := srv.repository.GetEntitySetByUser(entityType, user)
|
||||
@@ -174,7 +174,7 @@ func (srv *HeartbeatService) GetEntitySetByUser(entityType uint8, user *models.U
|
||||
}
|
||||
}
|
||||
|
||||
srv.cache.Set(cacheKey, utils.StringsToSet(filtered), cache.NoExpiration)
|
||||
srv.cache.Set(cacheKey, datastructure.NewSet(filtered...), cache.NoExpiration)
|
||||
return filtered, nil
|
||||
}
|
||||
|
||||
@@ -208,13 +208,13 @@ func (srv *HeartbeatService) getEntityUserCacheKey(entityType uint8, user *model
|
||||
func (srv *HeartbeatService) updateEntityUserCache(entityType uint8, entityKey string, user *models.User) {
|
||||
cacheKey := srv.getEntityUserCacheKey(entityType, user)
|
||||
if entities, found := srv.cache.Get(cacheKey); found {
|
||||
entitySet := entities.(map[string]bool)
|
||||
entitySet := entities.(datastructure.Set[string])
|
||||
|
||||
srv.entityCacheLock.Lock()
|
||||
defer srv.entityCacheLock.Unlock()
|
||||
|
||||
if _, ok := entitySet[entityKey]; !ok {
|
||||
entitySet[entityKey] = true
|
||||
if !entitySet.Contain(entityKey) {
|
||||
entitySet.Add(entityKey)
|
||||
// new project / language / ..., which is not yet present in cache, arrived as part of a heartbeats
|
||||
// -> update cache instead of just invalidating it, because rebuilding is expensive here
|
||||
srv.cache.Set(cacheKey, entitySet, cache.NoExpiration)
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/muety/wakapi/models"
|
||||
"github.com/muety/wakapi/repositories"
|
||||
"github.com/patrickmn/go-cache"
|
||||
"github.com/samber/lo"
|
||||
"time"
|
||||
)
|
||||
|
||||
@@ -45,38 +46,26 @@ func (srv *ProjectLabelService) GetByUser(userId string) ([]*models.ProjectLabel
|
||||
|
||||
// GetByUserGrouped returns lists of project labels, grouped by their project key
|
||||
func (srv *ProjectLabelService) GetByUserGrouped(userId string) (map[string][]*models.ProjectLabel, error) {
|
||||
labelsByProject := make(map[string][]*models.ProjectLabel)
|
||||
userLabels, err := srv.GetByUser(userId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, l := range userLabels {
|
||||
if _, ok := labelsByProject[l.ProjectKey]; !ok {
|
||||
labelsByProject[l.ProjectKey] = []*models.ProjectLabel{l}
|
||||
} else {
|
||||
labelsByProject[l.ProjectKey] = append(labelsByProject[l.ProjectKey], l)
|
||||
}
|
||||
}
|
||||
return labelsByProject, nil
|
||||
mappedLabels := lo.GroupBy[*models.ProjectLabel, string](userLabels, func(l *models.ProjectLabel) string {
|
||||
return l.ProjectKey
|
||||
})
|
||||
return mappedLabels, nil
|
||||
}
|
||||
|
||||
// GetByUserGroupedInverted returns lists of project labels, grouped by their label key
|
||||
func (srv *ProjectLabelService) GetByUserGroupedInverted(userId string) (map[string][]*models.ProjectLabel, error) {
|
||||
projectsByLabel := make(map[string][]*models.ProjectLabel)
|
||||
userLabels, err := srv.GetByUser(userId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, l := range userLabels {
|
||||
if _, ok := projectsByLabel[l.Label]; !ok {
|
||||
projectsByLabel[l.Label] = []*models.ProjectLabel{l}
|
||||
} else {
|
||||
projectsByLabel[l.Label] = append(projectsByLabel[l.Label], l)
|
||||
}
|
||||
}
|
||||
return projectsByLabel, nil
|
||||
mappedLabels := lo.GroupBy[*models.ProjectLabel, string](userLabels, func(l *models.ProjectLabel) string {
|
||||
return l.Label
|
||||
})
|
||||
return mappedLabels, nil
|
||||
}
|
||||
|
||||
func (srv *ProjectLabelService) Create(label *models.ProjectLabel) (*models.ProjectLabel, error) {
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
datastructure "github.com/duke-git/lancet/v2/datastructure/set"
|
||||
"github.com/muety/wakapi/models"
|
||||
"time"
|
||||
)
|
||||
|
||||
type IAggregationService interface {
|
||||
Schedule()
|
||||
Run(map[string]bool) error
|
||||
Run(set datastructure.Set[string]) error
|
||||
}
|
||||
|
||||
type IMiscService interface {
|
||||
|
||||
Reference in New Issue
Block a user