mirror of
https://github.com/krateng/maloja.git
synced 2023-08-10 21:12:55 +03:00
Removed rulestate consistency system
This commit is contained in:
parent
01e555172f
commit
10f636e7ed
@ -24,19 +24,6 @@ class CleanerAgent:
|
|||||||
self.rules_artistintitle = {b.lower():c for [a,b,c,d] in raw if a=="artistintitle"}
|
self.rules_artistintitle = {b.lower():c for [a,b,c,d] in raw if a=="artistintitle"}
|
||||||
#self.rules_regexartist = [[b,c] for [a,b,c,d] in raw if a=="regexartist"]
|
#self.rules_regexartist = [[b,c] for [a,b,c,d] in raw if a=="regexartist"]
|
||||||
#self.rules_regextitle = [[b,c] for [a,b,c,d] in raw if a=="regextitle"]
|
#self.rules_regextitle = [[b,c] for [a,b,c,d] in raw if a=="regextitle"]
|
||||||
# TODO
|
|
||||||
|
|
||||||
#self.plugin_artistparsers = []
|
|
||||||
#self.plugin_titleparsers = []
|
|
||||||
#if settings.get_settings("USE_PARSE_PLUGINS"):
|
|
||||||
# for ep in pkg_resources.iter_entry_points(group='maloja.artistparsers'):
|
|
||||||
# self.plugin_artistparsers.append(ep.load())
|
|
||||||
# for ep in pkg_resources.iter_entry_points(group='maloja.titleparsers'):
|
|
||||||
# self.plugin_titleparsers.append(ep.load())
|
|
||||||
|
|
||||||
|
|
||||||
# we always need to be able to tell if our current database is made with the current rules
|
|
||||||
self.checksums = utilities.checksumTSV(datadir("rules"))
|
|
||||||
|
|
||||||
|
|
||||||
def fullclean(self,artist,title):
|
def fullclean(self,artist,title):
|
||||||
|
@ -78,8 +78,6 @@ clients = []
|
|||||||
|
|
||||||
lastsync = 0
|
lastsync = 0
|
||||||
|
|
||||||
# rulestate that the entire current database was built with, or False if the database was built from inconsistent scrobble files
|
|
||||||
db_rulestate = False
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(datadir("known_servers.yml"),"r") as f:
|
with open(datadir("known_servers.yml"),"r") as f:
|
||||||
@ -272,15 +270,11 @@ def test_server(key=None):
|
|||||||
response.status = 403
|
response.status = 403
|
||||||
return "Wrong API key"
|
return "Wrong API key"
|
||||||
|
|
||||||
elif db_rulestate:
|
|
||||||
response.status = 204
|
|
||||||
return
|
|
||||||
else:
|
else:
|
||||||
response.status = 205
|
response.status = 200
|
||||||
return
|
return
|
||||||
|
|
||||||
# 204 Database server is up and operational
|
# 200 Database server is up and operational
|
||||||
# 205 Database server is up, but DB is not fully built or is inconsistent
|
|
||||||
# 403 Database server is up, but provided API key is not valid
|
# 403 Database server is up, but provided API key is not valid
|
||||||
|
|
||||||
@dbserver.get("serverinfo")
|
@dbserver.get("serverinfo")
|
||||||
@ -795,8 +789,6 @@ def sapi(path:Multi,**keys):
|
|||||||
def newrule(**keys):
|
def newrule(**keys):
|
||||||
tsv.add_entry(datadir("rules/webmade.tsv"),[k for k in keys])
|
tsv.add_entry(datadir("rules/webmade.tsv"),[k for k in keys])
|
||||||
#addEntry("rules/webmade.tsv",[k for k in keys])
|
#addEntry("rules/webmade.tsv",[k for k in keys])
|
||||||
global db_rulestate
|
|
||||||
db_rulestate = False
|
|
||||||
|
|
||||||
|
|
||||||
def issues():
|
def issues():
|
||||||
@ -960,8 +952,6 @@ def import_rulemodule(**keys):
|
|||||||
@authenticated_api
|
@authenticated_api
|
||||||
def rebuild(**keys):
|
def rebuild(**keys):
|
||||||
log("Database rebuild initiated!")
|
log("Database rebuild initiated!")
|
||||||
global db_rulestate
|
|
||||||
db_rulestate = False
|
|
||||||
sync()
|
sync()
|
||||||
from .proccontrol.tasks.fixexisting import fix
|
from .proccontrol.tasks.fixexisting import fix
|
||||||
fix()
|
fix()
|
||||||
@ -1091,8 +1081,6 @@ def build_db():
|
|||||||
utilities.update_weekly()
|
utilities.update_weekly()
|
||||||
utilities.send_stats()
|
utilities.send_stats()
|
||||||
|
|
||||||
global db_rulestate
|
|
||||||
db_rulestate = utilities.consistentRulestate(datadir("scrobbles"),cla.checksums)
|
|
||||||
|
|
||||||
global ISSUES
|
global ISSUES
|
||||||
ISSUES = check_issues()
|
ISSUES = check_issues()
|
||||||
@ -1135,7 +1123,6 @@ def sync():
|
|||||||
for e in entries:
|
for e in entries:
|
||||||
tsv.add_entries(datadir("scrobbles/" + e + ".tsv"),entries[e],comments=False)
|
tsv.add_entries(datadir("scrobbles/" + e + ".tsv"),entries[e],comments=False)
|
||||||
#addEntries("scrobbles/" + e + ".tsv",entries[e],escape=False)
|
#addEntries("scrobbles/" + e + ".tsv",entries[e],escape=False)
|
||||||
utilities.combineChecksums(datadir("scrobbles/" + e + ".tsv"),cla.checksums)
|
|
||||||
|
|
||||||
#log("Written files",module="debug")
|
#log("Written files",module="debug")
|
||||||
|
|
||||||
|
@ -67,7 +67,5 @@ def fix():
|
|||||||
|
|
||||||
os.rename(datadir("scrobbles",filename_new),datadir("scrobbles",filename))
|
os.rename(datadir("scrobbles",filename_new),datadir("scrobbles",filename))
|
||||||
|
|
||||||
with open(datadir("scrobbles",filename + ".rulestate"),"w") as checkfile:
|
|
||||||
checkfile.write(wendigo.checksums)
|
|
||||||
|
|
||||||
log("Database fixed!")
|
log("Database fixed!")
|
||||||
|
@ -61,7 +61,3 @@ def convert(input,output):
|
|||||||
|
|
||||||
outputlog.write(entry)
|
outputlog.write(entry)
|
||||||
outputlog.write("\n")
|
outputlog.write("\n")
|
||||||
|
|
||||||
with open(output + ".rulestate","w") as checksumfile:
|
|
||||||
#this file stores an identifier for all rules that were in place when the corresponding file was created
|
|
||||||
checksumfile.write(c.checksums)
|
|
||||||
|
@ -48,85 +48,6 @@ def serialize(obj):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#####
|
|
||||||
## RULESTATE VALIDATION
|
|
||||||
#####
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def checksumTSV(folder):
|
|
||||||
|
|
||||||
sums = ""
|
|
||||||
|
|
||||||
for f in os.listdir(folder + "/"):
|
|
||||||
if (f.endswith(".tsv")):
|
|
||||||
f = open(folder + "/" + f,"rb")
|
|
||||||
sums += hashlib.md5(f.read()).hexdigest() + "\n"
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
return sums
|
|
||||||
|
|
||||||
# returns whether checksums match and sets the checksum to invalid if they don't (or sets the new one if no previous one exists)
|
|
||||||
def combineChecksums(filename,checksums):
|
|
||||||
import os
|
|
||||||
|
|
||||||
if os.path.exists(filename + ".rulestate"):
|
|
||||||
f = open(filename + ".rulestate","r")
|
|
||||||
oldchecksums = f.read()
|
|
||||||
f.close()
|
|
||||||
if oldchecksums == checksums:
|
|
||||||
# the new checksum given by the calling db server represents the rule state that all current unsaved scrobbles were created under
|
|
||||||
# if this is the same as the existing one, we're all good
|
|
||||||
return True
|
|
||||||
elif (oldchecksums != "INVALID"):
|
|
||||||
#if not, the file is not consistent to any single rule state (some scrobbles were created with an old ruleset, some not)
|
|
||||||
f = open(filename + ".rulestate","w")
|
|
||||||
f.write("INVALID") # this will never match any sha256sum
|
|
||||||
f.close()
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
#if the file already says invalid, no need to open it and rewrite
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
f = open(filename + ".rulestate","w")
|
|
||||||
f.write(checksums)
|
|
||||||
f.close()
|
|
||||||
return True
|
|
||||||
|
|
||||||
# checks ALL files for their rule state. if they are all the same as the current loaded one, the entire database can be assumed to be consistent with the current ruleset
|
|
||||||
# in any other case, get out
|
|
||||||
def consistentRulestate(folder,checksums):
|
|
||||||
|
|
||||||
result = []
|
|
||||||
for scrobblefile in os.listdir(folder + "/"):
|
|
||||||
|
|
||||||
if (scrobblefile.endswith(".tsv")):
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(folder + "/" + scrobblefile + ".rulestate","r") as f:
|
|
||||||
if f.read() != checksums:
|
|
||||||
return False
|
|
||||||
except:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#####
|
#####
|
||||||
## IMAGES
|
## IMAGES
|
||||||
#####
|
#####
|
||||||
|
Loading…
Reference in New Issue
Block a user