refactor: include logging framework (resolve #92)

This commit is contained in:
Ferdinand Mütsch 2021-01-30 11:17:37 +01:00
parent fd239e4f21
commit 617d9ad7e4
16 changed files with 80 additions and 77 deletions

View File

@ -4,6 +4,7 @@ import (
"encoding/json"
"flag"
"fmt"
"github.com/emvi/logbuch"
"github.com/gorilla/securecookie"
"github.com/jinzhu/configor"
"github.com/markbates/pkger"
@ -14,7 +15,6 @@ import (
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"io/ioutil"
"log"
"net/http"
"os"
"strings"
@ -144,7 +144,7 @@ func (c *Config) GetFixturesFunc(dbDialect string) models.MigrationFunc {
return err
}
log.Printf("applied %d fixtures\n", n)
logbuch.Info("applied %d fixtures", n)
return nil
}
}
@ -221,13 +221,13 @@ func IsDev(env string) bool {
func readVersion() string {
file, err := pkger.Open("/version.txt")
if err != nil {
log.Fatal(err)
logbuch.Fatal(err.Error())
}
defer file.Close()
bytes, err := ioutil.ReadAll(file)
if err != nil {
log.Fatal(err)
logbuch.Fatal(err.Error())
}
return strings.TrimSpace(string(bytes))
@ -246,16 +246,16 @@ func readColors() map[string]map[string]string {
file, err := pkger.Open("/data/colors.json")
if err != nil {
log.Fatal(err)
logbuch.Fatal(err.Error())
}
defer file.Close()
bytes, err := ioutil.ReadAll(file)
if err != nil {
log.Fatal(err)
logbuch.Fatal(err.Error())
}
if err := json.Unmarshal(bytes, &colors); err != nil {
log.Fatal(err)
logbuch.Fatal(err.Error())
}
return colors
@ -263,9 +263,8 @@ func readColors() map[string]map[string]string {
func mustReadConfigLocation() string {
if _, err := os.Stat(*cFlag); err != nil {
log.Fatalf("failed to find config file at '%s'\n", *cFlag)
logbuch.Fatal("failed to find config file at '%s'", *cFlag)
}
return *cFlag
}
@ -292,7 +291,7 @@ func Load() *Config {
maybeMigrateLegacyConfig()
if err := configor.New(&configor.Config{}).Load(config, mustReadConfigLocation()); err != nil {
log.Fatalf("failed to read config: %v\n", err)
logbuch.Fatal("failed to read config: %v", err)
}
config.Version = readVersion()
@ -314,7 +313,7 @@ func Load() *Config {
}
if config.Server.ListenIpV4 == "" && config.Server.ListenIpV6 == "" {
log.Fatalln("either of listen_ipv4 or listen_ipv6 must be set")
logbuch.Fatal("either of listen_ipv4 or listen_ipv6 must be set")
}
Set(config)

View File

@ -1,24 +1,24 @@
package config
import (
"github.com/emvi/logbuch"
"github.com/joho/godotenv"
"gopkg.in/ini.v1"
"gopkg.in/yaml.v2"
"io/ioutil"
"log"
"os"
"strconv"
)
func maybeMigrateLegacyConfig() {
if yes, err := shouldMigrateLegacyConfig(); err != nil {
log.Fatalf("failed to determine whether to migrate legacy config: %v\n", err)
logbuch.Fatal("failed to determine whether to migrate legacy config: %v", err)
} else if yes {
log.Printf("migrating legacy config (%s, %s) to new format (%s); see https://github.com/muety/wakapi/issues/54\n", defaultConfigPathLegacy, defaultEnvConfigPathLegacy, defaultConfigPath)
logbuch.Info("migrating legacy config (%s, %s) to new format (%s); see https://github.com/muety/wakapi/issues/54", defaultConfigPathLegacy, defaultEnvConfigPathLegacy, defaultConfigPath)
if err := migrateLegacyConfig(); err != nil {
log.Fatalf("failed to migrate legacy config: %v\n", err)
logbuch.Fatal("failed to migrate legacy config: %v", err)
}
log.Printf("config migration successful; please delete %s and %s now\n", defaultConfigPathLegacy, defaultEnvConfigPathLegacy)
logbuch.Info("config migration successful; please delete %s and %s now", defaultConfigPathLegacy, defaultEnvConfigPathLegacy)
}
}

1
go.mod
View File

@ -3,6 +3,7 @@ module github.com/muety/wakapi
go 1.13
require (
github.com/emvi/logbuch v1.1.1
github.com/go-co-op/gocron v0.3.3
github.com/gorilla/handlers v1.4.2
github.com/gorilla/mux v1.7.3

2
go.sum
View File

@ -56,6 +56,8 @@ github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5m
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
github.com/emvi/logbuch v1.1.1 h1:poBGNbHy/nB95oNoqLKAaJoBrcKxTO0W9DhMijKEkkU=
github.com/emvi/logbuch v1.1.1/go.mod h1:J2Wgbr3BuSc1JO+D2MBVh6q3WPVSK5GzktwWz8pvkKw=
github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=

31
main.go
View File

@ -3,13 +3,12 @@ package main
//go:generate $GOPATH/bin/pkger
import (
"fmt"
"github.com/emvi/logbuch"
"github.com/gorilla/handlers"
"github.com/markbates/pkger"
conf "github.com/muety/wakapi/config"
"github.com/muety/wakapi/migrations/common"
"github.com/muety/wakapi/repositories"
"log"
"net/http"
"strconv"
"time"
@ -57,9 +56,11 @@ var (
func main() {
config = conf.Load()
// Enable line numbers in logging
// Set log level
if config.IsDev() {
log.SetFlags(log.LstdFlags | log.Lshortfile)
logbuch.SetLevel(logbuch.LevelDebug)
} else {
logbuch.SetLevel(logbuch.LevelInfo)
}
// Connect to database
@ -76,8 +77,8 @@ func main() {
sqlDb.SetMaxIdleConns(int(config.Db.MaxConn))
sqlDb.SetMaxOpenConns(int(config.Db.MaxConn))
if err != nil {
log.Println(err)
log.Fatal("could not connect to database")
logbuch.Error(err.Error())
logbuch.Fatal("could not connect to database")
}
defer sqlDb.Close()
@ -203,35 +204,35 @@ func listen(handler http.Handler) {
if config.UseTLS() {
if s4 != nil {
fmt.Printf("Listening for HTTPS on %s.\n", s4.Addr)
logbuch.Info("--> Listening for HTTPS on %s... ✅", s4.Addr)
go func() {
if err := s4.ListenAndServeTLS(config.Server.TlsCertPath, config.Server.TlsKeyPath); err != nil {
log.Fatalln(err)
logbuch.Fatal(err.Error())
}
}()
}
if s6 != nil {
fmt.Printf("Listening for HTTPS on %s.\n", s6.Addr)
logbuch.Info("--> Listening for HTTPS on %s... ✅", s6.Addr)
go func() {
if err := s6.ListenAndServeTLS(config.Server.TlsCertPath, config.Server.TlsKeyPath); err != nil {
log.Fatalln(err)
logbuch.Fatal(err.Error())
}
}()
}
} else {
if s4 != nil {
fmt.Printf("Listening for HTTP on %s.\n", s4.Addr)
logbuch.Info("--> Listening for HTTP on %s... ✅", s4.Addr)
go func() {
if err := s4.ListenAndServe(); err != nil {
log.Fatalln(err)
logbuch.Fatal(err.Error())
}
}()
}
if s6 != nil {
fmt.Printf("Listening for HTTP on %s.\n", s6.Addr)
logbuch.Info("--> Listening for HTTP on %s... ✅", s6.Addr)
go func() {
if err := s6.ListenAndServe(); err != nil {
log.Fatalln(err)
logbuch.Fatal(err.Error())
}
}()
}
@ -242,6 +243,6 @@ func listen(handler http.Handler) {
func runDatabaseMigrations() {
if err := config.GetMigrationFunc(config.Db.Dialect)(db); err != nil {
log.Fatal(err)
logbuch.Fatal(err.Error())
}
}

View File

@ -4,11 +4,11 @@ import (
"context"
"errors"
"fmt"
"github.com/emvi/logbuch"
conf "github.com/muety/wakapi/config"
"github.com/muety/wakapi/models"
"github.com/muety/wakapi/services"
"github.com/muety/wakapi/utils"
"log"
"net/http"
"strings"
)
@ -99,7 +99,7 @@ func (m *AuthenticateMiddleware) tryGetUserByCookie(r *http.Request) (*models.Us
func CheckAndMigratePassword(user *models.User, login *models.Login, salt string, userServiceRef *services.IUserService) bool {
if utils.IsMd5(user.Password) {
if utils.CompareMd5(user.Password, login.Password, "") {
log.Printf("migrating old md5 password to new bcrypt format for user '%s'", user.ID)
logbuch.Info("migrating old md5 password to new bcrypt format for user '%s'", user.ID)
(*userServiceRef).MigrateMd5Password(user, login)
return true
}

View File

@ -4,11 +4,11 @@ import (
"bytes"
"encoding/base64"
"fmt"
"github.com/emvi/logbuch"
"github.com/muety/wakapi/config"
"github.com/muety/wakapi/models"
"io"
"io/ioutil"
"log"
"net/http"
"time"
)
@ -72,7 +72,7 @@ func (m *WakatimeRelayMiddleware) ServeHTTP(w http.ResponseWriter, r *http.Reque
func (m *WakatimeRelayMiddleware) send(method, url string, body io.Reader, headers http.Header) {
request, err := http.NewRequest(method, url, body)
if err != nil {
log.Printf("error constructing relayed request %v\n", err)
logbuch.Warn("error constructing relayed request %v", err)
return
}
@ -84,11 +84,11 @@ func (m *WakatimeRelayMiddleware) send(method, url string, body io.Reader, heade
response, err := m.httpClient.Do(request)
if err != nil {
log.Printf("error executing relayed request %v\n", err)
logbuch.Warn("error executing relayed request %v", err)
return
}
if response.StatusCode < 200 || response.StatusCode >= 300 {
log.Printf("failed to relay request, got status %d\n", response.StatusCode)
logbuch.Warn("failed to relay request, got status %d", response.StatusCode)
}
}

View File

@ -1,9 +1,9 @@
package common
import (
"github.com/emvi/logbuch"
"github.com/muety/wakapi/config"
"gorm.io/gorm"
"log"
)
var customPostMigrations []migrationFunc
@ -22,9 +22,9 @@ func init() {
func RunCustomPostMigrations(db *gorm.DB, cfg *config.Config) {
for _, m := range customPostMigrations {
log.Printf("potentially running migration '%s'\n", m.name)
logbuch.Info("potentially running migration '%s'", m.name)
if err := m.f(db, cfg); err != nil {
log.Fatalf("migration '%s' failed %v\n", m.name, err)
logbuch.Fatal("migration '%s' failed %v", m.name, err)
}
}
}

View File

@ -1,10 +1,10 @@
package common
import (
"github.com/emvi/logbuch"
"github.com/muety/wakapi/config"
"github.com/muety/wakapi/models"
"gorm.io/gorm"
"log"
)
var customPreMigrations []migrationFunc
@ -18,12 +18,12 @@ func init() {
oldIndexName, newIndexName := "idx_customrule_user", "idx_language_mapping_user"
if migrator.HasTable(oldTableName) {
log.Printf("renaming '%s' table to '%s'\n", oldTableName, newTableName)
logbuch.Info("renaming '%s' table to '%s'", oldTableName, newTableName)
if err := migrator.RenameTable(oldTableName, &models.LanguageMapping{}); err != nil {
return err
}
log.Printf("renaming '%s' index to '%s'\n", oldIndexName, newIndexName)
logbuch.Info("renaming '%s' index to '%s'", oldIndexName, newIndexName)
return migrator.RenameIndex(&models.LanguageMapping{}, oldIndexName, newIndexName)
}
return nil
@ -42,12 +42,12 @@ func init() {
// https://stackoverflow.com/a/1884893/3112139
// unfortunately, we can't migrate existing sqlite databases to the newly introduced cascade settings
// things like deleting all summaries won't work in those cases unless an entirely new db is created
log.Println("not attempting to drop and regenerate constraints on sqlite")
logbuch.Info("not attempting to drop and regenerate constraints on sqlite")
return nil
}
if !migrator.HasTable(&models.KeyStringValue{}) {
log.Println("key-value table not yet existing")
logbuch.Info("key-value table not yet existing")
return nil
}
@ -57,7 +57,7 @@ func init() {
}
lookupResult := db.Where(condition, lookupKey).First(&models.KeyStringValue{})
if lookupResult.Error == nil && lookupResult.RowsAffected > 0 {
log.Println("no need to migrate cascade constraints")
logbuch.Info("no need to migrate cascade constraints")
return nil
}
@ -77,7 +77,7 @@ func init() {
for name, table := range constraints {
if migrator.HasConstraint(table, name) {
log.Printf("dropping constraint '%s'", name)
logbuch.Info("dropping constraint '%s'", name)
if err := migrator.DropConstraint(table, name); err != nil {
return err
}
@ -100,9 +100,9 @@ func init() {
func RunCustomPreMigrations(db *gorm.DB, cfg *config.Config) {
for _, m := range customPreMigrations {
log.Printf("potentially running migration '%s'\n", m.name)
logbuch.Info("potentially running migration '%s'", m.name)
if err := m.f(db, cfg); err != nil {
log.Fatalf("migration '%s' failed %v\n", m.name, err)
logbuch.Fatal("migration '%s' failed %v", m.name, err)
}
}
}

View File

@ -1,10 +1,10 @@
package common
import (
"github.com/emvi/logbuch"
"github.com/muety/wakapi/config"
"github.com/muety/wakapi/models"
"gorm.io/gorm"
"log"
)
func MigrateLanguages(db *gorm.DB) {
@ -16,10 +16,10 @@ func MigrateLanguages(db *gorm.DB) {
Where("entity LIKE ?", "%."+k).
Updates(models.Heartbeat{Language: v})
if result.Error != nil {
log.Fatal(result.Error)
logbuch.Fatal(result.Error.Error())
}
if result.RowsAffected > 0 {
log.Printf("Migrated %+v rows for custom language %+s.\n", result.RowsAffected, k)
logbuch.Info("migrated %+v rows for custom language %+s", result.RowsAffected, k)
}
}
}

View File

@ -2,10 +2,10 @@ package repositories
import (
"errors"
"github.com/emvi/logbuch"
"github.com/muety/wakapi/models"
"gorm.io/gorm"
"gorm.io/gorm/clause"
"log"
)
type KeyValueRepository struct {
@ -41,7 +41,7 @@ func (r *KeyValueRepository) PutString(kv *models.KeyStringValue) error {
}
if result.RowsAffected != 1 {
log.Printf("warning: did not insert key '%s', maybe just updated?\n", kv.Key)
logbuch.Warn("did not insert key '%s', maybe just updated?", kv.Key)
}
return nil

View File

@ -3,6 +3,7 @@ package routes
import (
"encoding/base64"
"fmt"
"github.com/emvi/logbuch"
"github.com/gorilla/mux"
"github.com/gorilla/schema"
conf "github.com/muety/wakapi/config"
@ -10,7 +11,6 @@ import (
"github.com/muety/wakapi/models/view"
"github.com/muety/wakapi/services"
"github.com/muety/wakapi/utils"
"log"
"net/http"
"strconv"
"time"
@ -298,16 +298,16 @@ func (h *SettingsHandler) PostRegenerateSummaries(w http.ResponseWriter, r *http
user := r.Context().Value(models.UserKey).(*models.User)
log.Printf("clearing summaries for user '%s'\n", user.ID)
logbuch.Info("clearing summaries for user '%s'", user.ID)
if err := h.summarySrvc.DeleteByUser(user.ID); err != nil {
log.Printf("failed to clear summaries: %v\n", err)
logbuch.Error("failed to clear summaries: %v", err)
w.WriteHeader(http.StatusInternalServerError)
templates[conf.SettingsTemplate].Execute(w, h.buildViewModel(r).WithError("failed to delete old summaries"))
return
}
if err := h.aggregationSrvc.Run(map[string]bool{user.ID: true}); err != nil {
log.Printf("failed to regenerate summaries: %v\n", err)
logbuch.Error("failed to regenerate summaries: %v", err)
w.WriteHeader(http.StatusInternalServerError)
templates[conf.SettingsTemplate].Execute(w, h.buildViewModel(r).WithError("failed to generate aggregations"))
return

View File

@ -1,8 +1,8 @@
package services
import (
"github.com/emvi/logbuch"
"github.com/muety/wakapi/config"
"log"
"runtime"
"time"
@ -40,7 +40,7 @@ type AggregationJob struct {
func (srv *AggregationService) Schedule() {
// Run once initially
if err := srv.Run(nil); err != nil {
log.Fatalf("failed to run AggregationJob: %v\n", err)
logbuch.Fatal("failed to run AggregationJob: %v", err)
}
s := gocron.NewScheduler(time.Local)
@ -73,9 +73,9 @@ func (srv *AggregationService) Run(userIds map[string]bool) error {
func (srv *AggregationService) summaryWorker(jobs <-chan *AggregationJob, summaries chan<- *models.Summary) {
for job := range jobs {
if summary, err := srv.summaryService.Summarize(job.From, job.To, &models.User{ID: job.UserID}); err != nil {
log.Printf("Failed to generate summary (%v, %v, %s) %v.\n", job.From, job.To, job.UserID, err)
logbuch.Error("failed to generate summary (%v, %v, %s) %v", job.From, job.To, job.UserID, err)
} else {
log.Printf("Successfully generated summary (%v, %v, %s).\n", job.From, job.To, job.UserID)
logbuch.Info("successfully generated summary (%v, %v, %s)", job.From, job.To, job.UserID)
summaries <- summary
}
}
@ -84,17 +84,17 @@ func (srv *AggregationService) summaryWorker(jobs <-chan *AggregationJob, summar
func (srv *AggregationService) persistWorker(summaries <-chan *models.Summary) {
for summary := range summaries {
if err := srv.summaryService.Insert(summary); err != nil {
log.Printf("Failed to save summary (%v, %v, %s) %v.\n", summary.UserID, summary.FromTime, summary.ToTime, err)
logbuch.Error("failed to save summary (%v, %v, %s) %v", summary.UserID, summary.FromTime, summary.ToTime, err)
}
}
}
func (srv *AggregationService) trigger(jobs chan<- *AggregationJob, userIds map[string]bool) error {
log.Println("Generating summaries.")
logbuch.Info("generating summaries")
var users []*models.User
if allUsers, err := srv.userService.GetAll(); err != nil {
log.Println(err)
logbuch.Error(err.Error())
return err
} else if userIds != nil && len(userIds) > 0 {
users = make([]*models.User, len(userIds))
@ -110,14 +110,14 @@ func (srv *AggregationService) trigger(jobs chan<- *AggregationJob, userIds map[
// Get a map from user ids to the time of their latest summary or nil if none exists yet
lastUserSummaryTimes, err := srv.summaryService.GetLatestByUser()
if err != nil {
log.Println(err)
logbuch.Error(err.Error())
return err
}
// Get a map from user ids to the time of their earliest heartbeats or nil if none exists yet
firstUserHeartbeatTimes, err := srv.heartbeatService.GetFirstByUsers()
if err != nil {
log.Println(err)
logbuch.Error(err.Error())
return err
}

View File

@ -2,10 +2,10 @@ package services
import (
"errors"
"github.com/emvi/logbuch"
"github.com/muety/wakapi/config"
"github.com/muety/wakapi/models"
"github.com/muety/wakapi/repositories"
"log"
"sync"
)
@ -110,6 +110,6 @@ func (srv *AliasService) DeleteMulti(aliases []*models.Alias) error {
func (srv *AliasService) reinitUser(userId string) {
if err := srv.InitializeUser(userId); err != nil {
log.Printf("error initializing user aliases %v\n", err)
logbuch.Error("error initializing user aliases %v", err)
}
}

View File

@ -1,9 +1,9 @@
package services
import (
"github.com/emvi/logbuch"
"github.com/muety/wakapi/config"
"go.uber.org/atomic"
"log"
"runtime"
"strconv"
"time"
@ -42,7 +42,7 @@ type CountTotalTimeResult struct {
func (srv *MiscService) ScheduleCountTotalTime() {
// Run once initially
if err := srv.runCountTotalTime(); err != nil {
log.Fatalf("failed to run CountTotalTimeJob: %v\n", err)
logbuch.Error("failed to run CountTotalTimeJob: %v", err)
}
s := gocron.NewScheduler(time.Local)
@ -80,9 +80,9 @@ func (srv *MiscService) runCountTotalTime() error {
func (srv *MiscService) countTotalTimeWorker(jobs <-chan *CountTotalTimeJob, results chan<- *CountTotalTimeResult) {
for job := range jobs {
if result, err := srv.summaryService.Aliased(time.Time{}, time.Now(), &models.User{ID: job.UserID}, srv.summaryService.Retrieve); err != nil {
log.Printf("Failed to count total for user %s: %v.\n", job.UserID, err)
logbuch.Error("failed to count total for user %s: %v", job.UserID, err)
} else {
log.Printf("Successfully counted total for user %s.\n", job.UserID)
logbuch.Info("successfully counted total for user %s", job.UserID)
results <- &CountTotalTimeResult{
UserId: job.UserID,
Total: result.TotalTime(),
@ -107,13 +107,13 @@ func (srv *MiscService) persistTotalTimeWorker(results <-chan *CountTotalTimeRes
Key: config.KeyLatestTotalTime,
Value: total.String(),
}); err != nil {
log.Printf("Failed to save total time count: %v\n", err)
logbuch.Error("failed to save total time count: %v", err)
}
if err := srv.keyValueService.PutString(&models.KeyStringValue{
Key: config.KeyLatestTotalUsers,
Value: strconv.Itoa(c),
}); err != nil {
log.Printf("Failed to save total users count: %v\n", err)
logbuch.Error("failed to save total users count: %v", err)
}
}

View File

@ -2,7 +2,7 @@ package utils
import (
"encoding/json"
"log"
"github.com/emvi/logbuch"
"net/http"
)
@ -10,6 +10,6 @@ func RespondJSON(w http.ResponseWriter, status int, object interface{}) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(status)
if err := json.NewEncoder(w).Encode(object); err != nil {
log.Printf("error while writing json response: %v", err)
logbuch.Error("error while writing json response: %v", err)
}
}