2021-04-18 11:59:13 +03:00
|
|
|
package main
|
|
|
|
|
|
|
|
/*
|
2021-04-23 16:50:00 +03:00
|
|
|
A script to migrate Wakapi data from SQLite to MySQL or Postgres.
|
|
|
|
|
2021-04-18 11:59:13 +03:00
|
|
|
Usage:
|
|
|
|
---
|
2021-04-23 16:50:00 +03:00
|
|
|
1. Set up an empty MySQL or Postgres database (see docker_[mysql|postgres].sh for example)
|
|
|
|
2. Create a migration config file (e.g. config.yml) as shown below
|
2021-04-23 00:35:40 +03:00
|
|
|
3. go run sqlite2mysql.go -config config.yml
|
2021-04-18 11:59:13 +03:00
|
|
|
|
2021-04-23 00:35:40 +03:00
|
|
|
Example: config.yml
|
2021-04-18 11:59:13 +03:00
|
|
|
---
|
|
|
|
source:
|
2021-04-23 16:50:00 +03:00
|
|
|
name: ../wakapi_db.db
|
2021-04-18 11:59:13 +03:00
|
|
|
|
2021-04-23 16:50:00 +03:00
|
|
|
# MySQL / Postgres
|
2021-04-18 11:59:13 +03:00
|
|
|
target:
|
|
|
|
host:
|
|
|
|
port:
|
|
|
|
user:
|
|
|
|
password:
|
|
|
|
name:
|
2021-04-23 00:35:40 +03:00
|
|
|
dialect:
|
2021-04-23 16:50:00 +03:00
|
|
|
|
|
|
|
Troubleshooting:
|
|
|
|
---
|
|
|
|
- Check https://wiki.postgresql.org/wiki/Fixing_Sequences in case of errors with Postgres
|
|
|
|
- Check https://github.com/muety/wakapi/pull/181#issue-621585477 on further details about Postgres migration
|
2021-04-18 11:59:13 +03:00
|
|
|
*/
|
|
|
|
|
|
|
|
import (
|
|
|
|
"flag"
|
|
|
|
"fmt"
|
2021-04-23 00:35:40 +03:00
|
|
|
"log"
|
|
|
|
"os"
|
|
|
|
|
2021-04-18 11:59:13 +03:00
|
|
|
"github.com/jinzhu/configor"
|
|
|
|
"github.com/muety/wakapi/models"
|
|
|
|
"github.com/muety/wakapi/repositories"
|
|
|
|
"gorm.io/driver/mysql"
|
2021-04-23 00:35:40 +03:00
|
|
|
"gorm.io/driver/postgres"
|
2021-04-18 11:59:13 +03:00
|
|
|
"gorm.io/driver/sqlite"
|
|
|
|
"gorm.io/gorm"
|
|
|
|
)
|
|
|
|
|
|
|
|
type config struct {
|
|
|
|
Source dbConfig // sqlite
|
2021-04-23 16:50:00 +03:00
|
|
|
Target dbConfig // mysql / postgres
|
2021-04-18 11:59:13 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
type dbConfig struct {
|
|
|
|
Host string
|
|
|
|
Port uint
|
|
|
|
User string
|
|
|
|
Password string
|
|
|
|
Name string
|
2021-04-23 16:50:00 +03:00
|
|
|
Dialect string `default:"mysql"`
|
2021-04-18 11:59:13 +03:00
|
|
|
}
|
|
|
|
|
2021-04-23 16:50:00 +03:00
|
|
|
const InsertBatchSize = 100
|
|
|
|
|
2021-04-18 11:59:13 +03:00
|
|
|
var cfg *config
|
|
|
|
var dbSource, dbTarget *gorm.DB
|
|
|
|
var cFlag *string
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
cfg = &config{}
|
|
|
|
|
|
|
|
if f := flag.Lookup("config"); f == nil {
|
|
|
|
cFlag = flag.String("config", "sqlite2mysql.yml", "config file location")
|
|
|
|
} else {
|
|
|
|
ff := f.Value.(flag.Getter).Get().(string)
|
|
|
|
cFlag = &ff
|
|
|
|
}
|
|
|
|
flag.Parse()
|
|
|
|
|
|
|
|
if err := configor.New(&configor.Config{}).Load(cfg, mustConfigPath()); err != nil {
|
|
|
|
log.Fatalln("failed to read config", err)
|
|
|
|
}
|
|
|
|
|
2021-04-23 16:50:00 +03:00
|
|
|
log.Println("attempting to open sqlite database as source")
|
2021-04-18 11:59:13 +03:00
|
|
|
if db, err := gorm.Open(sqlite.Open(cfg.Source.Name), &gorm.Config{}); err != nil {
|
|
|
|
log.Fatalln(err)
|
|
|
|
} else {
|
|
|
|
dbSource = db
|
|
|
|
}
|
|
|
|
|
2021-04-23 00:35:40 +03:00
|
|
|
if cfg.Target.Dialect == "postgres" {
|
2021-04-23 16:50:00 +03:00
|
|
|
log.Println("attempting to open postgresql database as target")
|
2021-04-23 00:35:40 +03:00
|
|
|
if db, err := gorm.Open(postgres.Open(fmt.Sprintf("user=%s password=%s host=%s port=%d dbname=%s sslmode=disable timezone=Europe/Berlin",
|
2021-04-18 11:59:13 +03:00
|
|
|
cfg.Target.User,
|
|
|
|
cfg.Target.Password,
|
|
|
|
cfg.Target.Host,
|
|
|
|
cfg.Target.Port,
|
|
|
|
cfg.Target.Name,
|
2021-04-23 00:35:40 +03:00
|
|
|
)), &gorm.Config{}); err != nil {
|
|
|
|
log.Fatalln(err)
|
|
|
|
} else {
|
|
|
|
dbTarget = db
|
|
|
|
}
|
2021-04-18 11:59:13 +03:00
|
|
|
} else {
|
2021-04-23 16:50:00 +03:00
|
|
|
log.Println("attempting to open mysql database as target")
|
2021-04-23 00:35:40 +03:00
|
|
|
if db, err := gorm.Open(mysql.New(mysql.Config{
|
|
|
|
DriverName: "mysql",
|
|
|
|
DSN: fmt.Sprintf("%s:%s@tcp(%s:%d)/%s?charset=%s&parseTime=true&loc=%s&sql_mode=ANSI_QUOTES",
|
|
|
|
cfg.Target.User,
|
|
|
|
cfg.Target.Password,
|
|
|
|
cfg.Target.Host,
|
|
|
|
cfg.Target.Port,
|
|
|
|
cfg.Target.Name,
|
|
|
|
"utf8mb4",
|
|
|
|
"Local",
|
|
|
|
),
|
|
|
|
}), &gorm.Config{}); err != nil {
|
|
|
|
log.Fatalln(err)
|
|
|
|
} else {
|
|
|
|
dbTarget = db
|
|
|
|
}
|
2021-04-18 11:59:13 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func destroy() {
|
|
|
|
if db, _ := dbSource.DB(); db != nil {
|
|
|
|
db.Close()
|
|
|
|
}
|
|
|
|
if db, _ := dbTarget.DB(); db != nil {
|
|
|
|
db.Close()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func main() {
|
|
|
|
defer destroy()
|
|
|
|
if err := createSchema(); err != nil {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
keyValueSource := repositories.NewKeyValueRepository(dbSource)
|
|
|
|
keyValueTarget := repositories.NewKeyValueRepository(dbTarget)
|
|
|
|
|
|
|
|
userSource := repositories.NewUserRepository(dbSource)
|
|
|
|
userTarget := repositories.NewUserRepository(dbTarget)
|
|
|
|
|
|
|
|
languageMappingSource := repositories.NewLanguageMappingRepository(dbSource)
|
|
|
|
languageMappingTarget := repositories.NewLanguageMappingRepository(dbTarget)
|
|
|
|
|
|
|
|
aliasSource := repositories.NewAliasRepository(dbSource)
|
|
|
|
aliasTarget := repositories.NewAliasRepository(dbTarget)
|
|
|
|
|
|
|
|
summarySource := repositories.NewSummaryRepository(dbSource)
|
|
|
|
summaryTarget := repositories.NewSummaryRepository(dbTarget)
|
|
|
|
|
|
|
|
heartbeatSource := repositories.NewHeartbeatRepository(dbSource)
|
|
|
|
heartbeatTarget := repositories.NewHeartbeatRepository(dbTarget)
|
|
|
|
|
|
|
|
// TODO: things could be optimized through batch-inserts / inserts within a single transaction
|
|
|
|
|
|
|
|
log.Println("Migrating key-value pairs ...")
|
|
|
|
if data, err := keyValueSource.GetAll(); err == nil {
|
|
|
|
for _, e := range data {
|
|
|
|
if err := keyValueTarget.PutString(e); err != nil {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Println("Migrating users ...")
|
|
|
|
if data, err := userSource.GetAll(); err == nil {
|
|
|
|
for _, e := range data {
|
|
|
|
if _, _, err := userTarget.InsertOrGet(e); err != nil {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Println("Migrating language mappings ...")
|
|
|
|
if data, err := languageMappingSource.GetAll(); err == nil {
|
|
|
|
for _, e := range data {
|
|
|
|
e.ID = 0
|
|
|
|
if _, err := languageMappingTarget.Insert(e); err != nil {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Println("Migrating aliases ...")
|
|
|
|
if data, err := aliasSource.GetAll(); err == nil {
|
|
|
|
for _, e := range data {
|
|
|
|
e.ID = 0
|
|
|
|
if _, err := aliasTarget.Insert(e); err != nil {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Println("Migrating summaries ...")
|
|
|
|
if data, err := summarySource.GetAll(); err == nil {
|
|
|
|
for _, e := range data {
|
|
|
|
e.ID = 0
|
|
|
|
if err := summaryTarget.Insert(e); err != nil {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: copy in mini-batches instead of loading all heartbeats into memory (potentially millions)
|
2021-04-23 16:50:00 +03:00
|
|
|
|
2021-04-18 11:59:13 +03:00
|
|
|
log.Println("Migrating heartbeats ...")
|
2021-04-23 16:50:00 +03:00
|
|
|
|
2021-04-18 11:59:13 +03:00
|
|
|
if data, err := heartbeatSource.GetAll(); err == nil {
|
|
|
|
log.Printf("Got %d heartbeats loaded into memory. Batch-inserting them now ...\n", len(data))
|
2021-04-23 00:35:40 +03:00
|
|
|
|
|
|
|
var slice = make([]*models.Heartbeat, len(data))
|
|
|
|
for i, heartbeat := range data {
|
|
|
|
heartbeat = heartbeat.Hashed()
|
|
|
|
slice[i] = heartbeat
|
|
|
|
}
|
2021-04-23 16:50:00 +03:00
|
|
|
|
|
|
|
left, right, size := 0, InsertBatchSize, len(slice)
|
2021-04-23 00:35:40 +03:00
|
|
|
for right < size {
|
|
|
|
log.Printf("Inserting batch from %d", left)
|
|
|
|
if err := heartbeatTarget.InsertBatch(slice[left:right]); err != nil {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
2021-04-23 16:50:00 +03:00
|
|
|
left += InsertBatchSize
|
|
|
|
right += InsertBatchSize
|
2021-04-23 00:35:40 +03:00
|
|
|
}
|
|
|
|
if err := heartbeatTarget.InsertBatch(slice[left:]); err != nil {
|
2021-04-18 11:59:13 +03:00
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
log.Fatalln(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func createSchema() error {
|
|
|
|
if err := dbTarget.AutoMigrate(&models.User{}); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := dbTarget.AutoMigrate(&models.KeyStringValue{}); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := dbTarget.AutoMigrate(&models.Alias{}); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := dbTarget.AutoMigrate(&models.Heartbeat{}); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := dbTarget.AutoMigrate(&models.Summary{}); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := dbTarget.AutoMigrate(&models.SummaryItem{}); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := dbTarget.AutoMigrate(&models.LanguageMapping{}); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func mustConfigPath() string {
|
|
|
|
if _, err := os.Stat(*cFlag); err != nil {
|
|
|
|
log.Fatalln("failed to find config file at", *cFlag)
|
|
|
|
}
|
|
|
|
return *cFlag
|
|
|
|
}
|