Reorganized tasks

This commit is contained in:
krateng 2022-03-27 05:08:17 +02:00
parent 3f098b6993
commit e31c0dce57
7 changed files with 63 additions and 156 deletions

View File

@ -105,21 +105,22 @@ def print_info():
print("#####") print("#####")
print() print()
@mainfunction({"l":"level","v":"version","V":"version"},flags=['version'],shield=True) @mainfunction({"l":"level","v":"version","V":"version"},flags=['version','include_images'],shield=True)
def main(*args,**kwargs): def main(*args,**kwargs):
actions = { actions = {
# server
"start":start, "start":start,
"restart":restart, "restart":restart,
"stop":stop, "stop":stop,
"run":direct, "run":direct,
"debug":debug, "debug":debug,
"setup":onlysetup, "setup":onlysetup,
"import":tasks.loadexternal, # admin scripts
"backup":tasks.backuphere, "import":tasks.import_scrobbles,
# "update":update, "backup":tasks.backup, # maloja backup --folder x/y --include_images
"fix":tasks.fixdb, "generate":tasks.generate,
"generate":tasks.generate_scrobbles, # aux
"info":print_info "info":print_info
} }

View File

@ -35,8 +35,6 @@ def start():
while True: while True:
log("Maloja is not running, starting...",module="supervisor") log("Maloja is not running, starting...",module="supervisor")
if malojaconfig["UPDATE_AFTER_CRASH"]:
update()
process = start() process = start()
process.wait() process.wait()

View File

@ -1,39 +1,3 @@
import os from .import_scrobbles import import_scrobbles
from doreah.io import ask,col from .backup import backup
from .generate import generate
from ...globalconf import data_dir
## TODO: import to db
def loadexternal(filename):
if not os.path.exists(filename):
print("File could not be found.")
return
print("Please wait...")
from .importer import import_scrobbles
imported,failed = import_scrobbles(filename)
print("Successfully imported",imported,"scrobbles!")
if failed > 0:
print(col['red'](str(failed) + " Errors!"))
def backuphere():
from .backup import backup
backup(folder=os.getcwd())
def update():
os.system("pip3 install malojaserver --upgrade --no-cache-dir")
from ..control import restart
restart()
def fixdb():
from .fixexisting import fix
fix()
def generate_scrobbles():
targetfile = data_dir['scrobbles']("randomgenerated.tsv")
from .generate import generate
generate(targetfile)

View File

@ -2,46 +2,50 @@ import tarfile
from datetime import datetime from datetime import datetime
import glob import glob
import os import os
from ...globalconf import data_dir from ...globalconf import dir_settings
from pathlib import PurePath from pathlib import PurePath
from doreah.logging import log from doreah.logging import log
from doreah.io import col
user_files = { basic_files = [
"minimal":{ ('config', ['rules/*.tsv','settings.ini','apikeys.yml','custom_css/*.css']),
"rules":["*.tsv"], ('state', ['auth/auth.ddb','malojadb.sqlite'])
"scrobbles":["malojadb.sqlite"] ]
}, expanded_files = [
"full":{ ('state', ['images'])
"clients":["apikeys.yml"], ]
"images":["artists","tracks"],
"settings":["settings.ini"]
}
}
def backup(folder,level="full"): def backup(targetfolder=None,include_images=False):
selected_files = user_files["minimal"] if level == "minimal" else {**user_files["minimal"], **user_files["full"]} if targetfolder is None:
real_files = {cat:[] for cat in selected_files} targetfolder = os.getcwd()
for cat in selected_files:
catfolder = data_dir[cat] if include_images:
for g in selected_files[cat]: file_patterns = basic_files + expanded_files
real_files[cat] += glob.glob(catfolder(g)) else:
file_patterns = basic_files
real_files = {}
for category,patterns in file_patterns:
real_files.setdefault(category,[])
for pattern in patterns:
real_files[category] += glob.glob(os.path.join(dir_settings[category],pattern))
log("Creating backup...") log("Creating backup...")
now = datetime.utcnow() now = datetime.utcnow()
timestr = now.strftime("%Y_%m_%d_%H_%M_%S") timestr = now.strftime("%Y_%m_%d_%H_%M_%S")
filename = "maloja_backup_" + timestr + ".tar.gz" filename = f"maloja_backup_{timestr}.tar.gz"
archivefile = os.path.join(folder,filename) archivefile = os.path.join(targetfolder,filename)
assert not os.path.exists(archivefile) assert not os.path.exists(archivefile)
with tarfile.open(name=archivefile,mode="x:gz") as archive: with tarfile.open(name=archivefile,mode="x:gz") as archive:
for cat, value in real_files.items(): for category, filelist in real_files.items():
for f in value: for f in filelist:
p = PurePath(f) p = PurePath(f)
r = p.relative_to(data_dir[cat]()) r = p.relative_to(dir_settings[category])
archive.add(f,arcname=os.path.join(cat,r)) archive.add(f,arcname=os.path.join(category,r))
log("Backup created!") log("Backup created: " + col['yellow'](archivefile))
return archivefile return archivefile

View File

@ -1,70 +0,0 @@
import os
from ...globalconf import data_dir
import re
from ...cleanup import CleanerAgent
from doreah.logging import log
import difflib
import datetime
from .backup import backup
wendigo = CleanerAgent()
exp = r"([0-9]*)(\t+)([^\t]+?)(\t+)([^\t]+)([^\n]*)\n"
# 1 2 3 4 5 6
# groups:
# 1 - timestamp
# 2 - sep
# 3 - artists
# 4 - sep
# 5 - title
# 6 - rest
def fix():
backup(level="minimal",folder=data_dir['backups']())
now = datetime.datetime.utcnow()
nowstr = now.strftime("%Y_%m_%d_%H_%M_%S")
datestr = now.strftime("%Y/%m/%d")
timestr = now.strftime("%H:%M:%S")
patchfolder = data_dir['logs']("dbfix",nowstr)
os.makedirs(patchfolder)
log("Fixing database...")
for filename in os.listdir(data_dir['scrobbles']()):
if filename.endswith(".tsv"):
log("Fix file " + filename)
filename_new = filename + "_new"
with open(data_dir['scrobbles'](filename_new),"w") as newfile:
with open(data_dir['scrobbles'](filename),"r") as oldfile:
for l in oldfile:
a,t = re.sub(exp,r"\3",l), re.sub(exp,r"\5",l)
r1,r2,r3 = re.sub(exp,r"\1\2",l),re.sub(exp,r"\4",l),re.sub(exp,r"\6",l)
a = a.split("")
(al,t) = wendigo.fullclean(a,t)
a = "".join(al)
newfile.write(r1 + a + r2 + t + r3 + "\n")
#os.system("diff " + "scrobbles/" + fn + "_new" + " " + "scrobbles/" + fn)
with open(data_dir['scrobbles'](filename_new),"r") as newfile, open(data_dir['scrobbles'](filename),"r") as oldfile:
diff = difflib.unified_diff(oldfile.read().split("\n"),newfile.read().split("\n"),lineterm="",n=0)
diff = list(diff)
with open(os.path.join(patchfolder,filename + ".diff"),"w") as patchfile:
patchfile.write("\n".join(diff))
os.replace(data_dir['scrobbles'](filename_new),data_dir['scrobbles'](filename))
log("Database fixed!")

View File

@ -1,6 +1,7 @@
import random import random
import datetime import datetime
artists = [ artists = [
"Chou Tzuyu","Jennie Kim","Kim Seolhyun","Nancy McDonie","Park Junghwa","Hirai Momo","Rosé Park","Laura Brehm","HyunA", "Chou Tzuyu","Jennie Kim","Kim Seolhyun","Nancy McDonie","Park Junghwa","Hirai Momo","Rosé Park","Laura Brehm","HyunA",
"Jeremy Soule","Jerry Goldsmith","Howard Shore","Tilman Sillescu","James Newton Howard","Bear McCreary","David Newman", "Jeremy Soule","Jerry Goldsmith","Howard Shore","Tilman Sillescu","James Newton Howard","Bear McCreary","David Newman",
@ -64,13 +65,15 @@ def generate_track():
def generate(targetfile): def generate():
with open(targetfile,"a") as fd: # TODO
for _ in range(200): pass
track = generate_track()
for _ in range(random.randint(1, 50)):
timestamp = random.randint(1, int(datetime.datetime.now().timestamp()))
entry = "\t".join([str(timestamp),"".join(track['artists']),track['title'],"-"]) for _ in range(200):
fd.write(entry) track = generate_track()
fd.write("\n") for _ in range(random.randint(1, 50)):
timestamp = random.randint(1, int(datetime.datetime.now().timestamp()))
entry = "\t".join([str(timestamp),"".join(track['artists']),track['title'],"-"])
fd.write(entry)
fd.write("\n")

View File

@ -13,10 +13,14 @@ from ...globalconf import data_dir
c = CleanerAgent() c = CleanerAgent()
# TODO db import
def import_scrobbles(fromfile):
def import_scrobbles(inputf): if not os.path.exists(fromfile):
print("File could not be found.")
return
ext = inputf.split('.')[-1].lower() ext = fromfile.split('.')[-1].lower()
if ext == 'csv': if ext == 'csv':
type = "Last.fm" type = "Last.fm"
@ -30,7 +34,7 @@ def import_scrobbles(inputf):
importfunc = parse_spotify importfunc = parse_spotify
print(f"Parsing {col['yellow'](inputf)} as {col['cyan'](type)} export") print(f"Parsing {col['yellow'](fromfile)} as {col['cyan'](type)} export")
if os.path.exists(outputf): if os.path.exists(outputf):
overwrite = ask("Already imported data. Overwrite?",default=False) overwrite = ask("Already imported data. Overwrite?",default=False)
@ -41,7 +45,7 @@ def import_scrobbles(inputf):
failed = 0 failed = 0
timestamps = set() timestamps = set()
for scrobble in importfunc(inputf): for scrobble in importfunc(fromfile):
if scrobble is None: if scrobble is None:
failed += 1 failed += 1
else: else:
@ -73,6 +77,9 @@ def import_scrobbles(inputf):
if success % 100 == 0: if success % 100 == 0:
print(f"Imported {success} scrobbles...") print(f"Imported {success} scrobbles...")
print("Successfully imported",success,"scrobbles!")
if failed > 0:
print(col['red'](str(failed) + " Errors!"))
return success,failed return success,failed