1
0
mirror of https://github.com/krateng/maloja.git synced 2023-08-10 21:12:55 +03:00

Compare commits

...

6 Commits

Author SHA1 Message Date
krateng
35f428ef69 Merge branch 'master' of github.com:krateng/maloja 2022-04-24 20:55:57 +02:00
krateng
342b8867d9 Ported cache cleanup from 3.1 2022-04-24 20:55:07 +02:00
krateng
bfc83fdbb0 Ported signal handling fix from 3.1 2022-04-24 20:47:17 +02:00
krateng
de286b58b9
Merge pull request #133 from northys/build_rpi
Build image for raspberry pi 2 (arm/v7)
2022-04-24 17:10:52 +02:00
Jiri Travnicek
00b3e6fc57
actions: build image for linux/arm/v7 (raspberry pi) 2022-04-24 16:12:11 +02:00
Jiri Travnicek
e1074ba259
actions: drop ghcr support 2022-04-24 16:11:58 +02:00
20 changed files with 143 additions and 136 deletions

View File

@ -20,21 +20,12 @@ jobs:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
- name: Login to GHCR
if: github.event_name != 'pull_request'
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@f2a13332ac1ce8c0a71aeac48a150dbb1838ab67
with:
images: |
${{ github.repository_owner }}/maloja
ghcr.io/${{ github.repository_owner }}/maloja
# generate Docker tags based on the following events/attributes
tags: |
type=semver,pattern={{version}}
@ -63,7 +54,7 @@ jobs:
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64
platforms: linux/amd64,linux/arm64,linux/arm/v7
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max

View File

@ -34,6 +34,7 @@ minor_release_name: "Yeonhee"
- "[Feature] Added notification system for web interface"
- "[Bugfix] Fixed crash when encountering error in Lastfm import"
3.0.6:
commit: "b3d4cb7a153845d1f5a5eef67a6508754e338f2f"
notes:
- "[Performance] Implemented search in database"
- "[Bugfix] Better parsing of featuring artists"

View File

@ -43,7 +43,7 @@ for version in releases:
try:
prev_tag = sp.check_output(["git","show",f'v{maj}.{min}.{hot}']).decode()
prev_tag_commit = prev_tag.split('\n')[6].split(" ")[1]
except:
except Exception:
pass
else:
assert prev_tag_commit == info['commit']

View File

@ -62,7 +62,7 @@ class APIHandler:
try:
response.status,result = self.handle(path,keys)
except:
except Exception:
exceptiontype = sys.exc_info()[0]
if exceptiontype in self.errors:
response.status,result = self.errors[exceptiontype]
@ -82,7 +82,7 @@ class APIHandler:
try:
methodname = self.get_method(path,keys)
method = self.methods[methodname]
except:
except Exception:
log("Could not find a handler for method " + str(methodname) + " in API " + self.__apiname__,module="debug")
log("Keys: " + str(keys),module="debug")
raise InvalidMethodException()
@ -94,5 +94,5 @@ class APIHandler:
# fixing etc is handled by the main scrobble function
try:
return database.incoming_scrobble(rawscrobble,api=self.__apiname__,client=client)
except:
except Exception:
raise ScrobblingException()

View File

@ -76,7 +76,7 @@ class Audioscrobbler(APIHandler):
#(artists,title) = cla.fullclean(artiststr,titlestr)
try:
timestamp = int(keys["timestamp"])
except:
except Exception:
timestamp = None
#database.createScrobble(artists,title,timestamp)
self.scrobble({'track_artists':[artiststr],'track_title':titlestr,'scrobble_time':timestamp},client=client)

View File

@ -80,7 +80,7 @@ class AudioscrobblerLegacy(APIHandler):
artiststr,titlestr = keys[artist_key], keys[track_key]
try:
timestamp = int(keys[time_key])
except:
except Exception:
timestamp = None
#database.createScrobble(artists,title,timestamp)
self.scrobble({

View File

@ -34,7 +34,7 @@ class Listenbrainz(APIHandler):
def submit(self,pathnodes,keys):
try:
token = self.get_token_from_request_keys(keys)
except:
except Exception:
raise BadAuthException()
client = apikeystore.check_and_identify_key(token)
@ -45,7 +45,7 @@ class Listenbrainz(APIHandler):
try:
listentype = keys["listen_type"]
payload = keys["payload"]
except:
except Exception:
raise MalformedJSONException()
if listentype == "playing_now":
@ -57,9 +57,9 @@ class Listenbrainz(APIHandler):
artiststr, titlestr = metadata["artist_name"], metadata["track_name"]
try:
timestamp = int(listen["listened_at"])
except:
except Exception:
timestamp = None
except:
except Exception:
raise MalformedJSONException()
self.scrobble({
@ -74,7 +74,7 @@ class Listenbrainz(APIHandler):
def validate_token(self,pathnodes,keys):
try:
token = self.get_token_from_request_keys(keys)
except:
except Exception:
raise BadAuthException()
if not apikeystore.check_key(token):
raise InvalidAuthException()

View File

@ -256,7 +256,7 @@ def get_top_artists(dbconn=None,**keys):
try:
res = get_charts_artists(timerange=rng,dbconn=dbconn)[0]
results.append({"range":rng,"artist":res["artist"],"scrobbles":res["scrobbles"]})
except:
except Exception:
results.append({"range":rng,"artist":None,"scrobbles":0})
return results
@ -272,7 +272,7 @@ def get_top_tracks(dbconn=None,**keys):
try:
res = get_charts_tracks(timerange=rng,dbconn=dbconn)[0]
results.append({"range":rng,"track":res["track"],"scrobbles":res["scrobbles"]})
except:
except Exception:
results.append({"range":rng,"track":None,"scrobbles":0})
return results
@ -302,7 +302,7 @@ def artist_info(dbconn=None,**keys):
},
"topweeks":len([e for e in cached.weekly_topartists if e == artist])
}
except:
except Exception:
# if the artist isnt in the charts, they are not being credited and we
# need to show information about the credited one
replaceartist = sqldb.get_credited_artists(artist)[0]
@ -370,7 +370,7 @@ def get_predefined_rulesets(dbconn=None):
else: name = rawf.split("_")[1]
desc = line2.replace("# DESC: ","") if "# DESC: " in line2 else ""
author = rawf.split("_")[0]
except:
except Exception:
continue
ruleset = {"file":rawf}

View File

@ -10,96 +10,97 @@ from doreah.logging import log
from ..globalconf import malojaconfig
HIGH_NUMBER = 1000000
CACHE_SIZE = 10000
ENTITY_CACHE_SIZE = 1000000
CACHE_ADJUST_STEP = 100
cache = lru.LRU(CACHE_SIZE)
entitycache = lru.LRU(ENTITY_CACHE_SIZE)
hits, misses = 0, 0
@runhourly
def maintenance():
if malojaconfig['USE_GLOBAL_CACHE']:
if malojaconfig['USE_GLOBAL_CACHE']:
CACHE_SIZE = 1000
ENTITY_CACHE_SIZE = 100000
cache = lru.LRU(CACHE_SIZE)
entitycache = lru.LRU(ENTITY_CACHE_SIZE)
hits, misses = 0, 0
@runhourly
def maintenance():
print_stats()
trim_cache()
def print_stats():
log(f"Cache Size: {len(cache)} [{len(entitycache)} E], System RAM Utilization: {psutil.virtual_memory().percent}%, Cache Hits: {hits}/{hits+misses}")
#print("Full rundown:")
#import sys
#for k in cache.keys():
# print(f"\t{k}\t{sys.getsizeof(cache[k])}")
def print_stats():
log(f"Cache Size: {len(cache)} [{len(entitycache)} E], System RAM Utilization: {psutil.virtual_memory().percent}%, Cache Hits: {hits}/{hits+misses}")
#print("Full rundown:")
#import sys
#for k in cache.keys():
# print(f"\t{k}\t{sys.getsizeof(cache[k])}")
def cached_wrapper(inner_func):
def cached_wrapper(inner_func):
if not malojaconfig['USE_GLOBAL_CACHE']: return inner_func
def outer_func(*args,**kwargs):
if 'dbconn' in kwargs:
conn = kwargs.pop('dbconn')
else:
conn = None
global hits, misses
key = (serialize(args),serialize(kwargs), inner_func, kwargs.get("since"), kwargs.get("to"))
def outer_func(*args,**kwargs):
if key in cache:
hits += 1
return cache.get(key)
if 'dbconn' in kwargs:
conn = kwargs.pop('dbconn')
else:
conn = None
global hits, misses
key = (serialize(args),serialize(kwargs), inner_func, kwargs.get("since"), kwargs.get("to"))
if key in cache:
hits += 1
return cache.get(key)
else:
misses += 1
result = inner_func(*args,**kwargs,dbconn=conn)
cache[key] = result
return result
return outer_func
# cache for functions that call with a whole list of entity ids
# we don't want a new cache entry for every single combination, but keep a common
# cache that's aware of what we're calling
def cached_wrapper_individual(inner_func):
def outer_func(set_arg,**kwargs):
if 'dbconn' in kwargs:
conn = kwargs.pop('dbconn')
else:
conn = None
#global hits, misses
result = {}
for id in set_arg:
if (inner_func,id) in entitycache:
result[id] = entitycache[(inner_func,id)]
#hits += 1
else:
pass
#misses += 1
remaining = inner_func(set(e for e in set_arg if e not in result),dbconn=conn)
for id in remaining:
entitycache[(inner_func,id)] = remaining[id]
result[id] = remaining[id]
else:
misses += 1
result = inner_func(*args,**kwargs,dbconn=conn)
cache[key] = result
return result
return outer_func
return outer_func
# cache for functions that call with a whole list of entity ids
# we don't want a new cache entry for every single combination, but keep a common
# cache that's aware of what we're calling
def cached_wrapper_individual(inner_func):
if not malojaconfig['USE_GLOBAL_CACHE']: return inner_func
def outer_func(set_arg,**kwargs):
if 'dbconn' in kwargs:
conn = kwargs.pop('dbconn')
else:
conn = None
#global hits, misses
result = {}
for id in set_arg:
if (inner_func,id) in entitycache:
result[id] = entitycache[(inner_func,id)]
#hits += 1
else:
pass
#misses += 1
remaining = inner_func(set(e for e in set_arg if e not in result),dbconn=conn)
for id in remaining:
entitycache[(inner_func,id)] = remaining[id]
result[id] = remaining[id]
return result
return outer_func
def invalidate_caches(scrobbletime):
if malojaconfig['USE_GLOBAL_CACHE']:
def invalidate_caches(scrobbletime=None):
cleared, kept = 0, 0
for k in cache.keys():
# VERY BIG TODO: differentiate between None as in 'unlimited timerange' and None as in 'time doesnt matter here'!
if (k[3] is None or scrobbletime >= k[3]) and (k[4] is None or scrobbletime <= k[4]):
if scrobbletime is None or (k[3] is None or scrobbletime >= k[3]) and (k[4] is None or scrobbletime <= k[4]):
cleared += 1
del cache[k]
else:
@ -107,35 +108,49 @@ def invalidate_caches(scrobbletime):
log(f"Invalidated {cleared} of {cleared+kept} DB cache entries")
def invalidate_entity_cache():
entitycache.clear()
def invalidate_entity_cache():
entitycache.clear()
def trim_cache():
ramprct = psutil.virtual_memory().percent
if ramprct > malojaconfig["DB_MAX_MEMORY"]:
log(f"{ramprct}% RAM usage, clearing cache and adjusting size!")
#ratio = 0.6
#targetsize = max(int(len(cache) * ratio),50)
#log(f"Reducing to {targetsize} entries")
#cache.set_size(targetsize)
#cache.set_size(HIGH_NUMBER)
cache.clear()
if cache.get_size() > CACHE_ADJUST_STEP:
cache.set_size(cache.get_size() - CACHE_ADJUST_STEP)
def trim_cache():
ramprct = psutil.virtual_memory().percent
if ramprct > malojaconfig["DB_MAX_MEMORY"]:
log(f"{ramprct}% RAM usage, clearing cache and adjusting size!")
#ratio = 0.6
#targetsize = max(int(len(cache) * ratio),50)
#log(f"Reducing to {targetsize} entries")
#cache.set_size(targetsize)
#cache.set_size(HIGH_NUMBER)
cache.clear()
#if cache.get_size() > CACHE_ADJUST_STEP:
# cache.set_size(cache.get_size() - CACHE_ADJUST_STEP)
#log(f"New RAM usage: {psutil.virtual_memory().percent}%")
print_stats()
#log(f"New RAM usage: {psutil.virtual_memory().percent}%")
print_stats()
else:
def cached_wrapper(func):
return func
def cached_wrapper_individual(func):
return func
def invalidate_caches(scrobbletime=None):
return None
def invalidate_entity_cache():
return None
def serialize(obj):
try:
return serialize(obj.hashable())
except:
except Exception:
try:
return json.dumps(obj)
except:
except Exception:
if isinstance(obj, (list, tuple, set)):
return "[" + ",".join(serialize(o) for o in obj) + "]"
elif isinstance(obj,dict):

View File

@ -28,7 +28,7 @@ def is_dir_usable(pth):
os.mknod(pthj(pth,".test"))
os.remove(pthj(pth,".test"))
return True
except:
except Exception:
return False
def get_env_vars(key,pathsuffix=[]):
@ -149,8 +149,8 @@ malojaconfig = Configuration(
"cache_expire_positive":(tp.Integer(), "Image Cache Expiration", 60, "Days until images are refetched"),
"cache_expire_negative":(tp.Integer(), "Image Cache Negative Expiration", 5, "Days until failed image fetches are reattempted"),
"db_max_memory":(tp.Integer(min=0,max=100), "RAM Percentage soft limit", 80, "RAM Usage in percent at which Maloja should no longer increase its database cache."),
"use_request_cache":(tp.Boolean(), "Use request-local DB Cache", True),
"use_global_cache":(tp.Boolean(), "Use global DB Cache", True)
"use_request_cache":(tp.Boolean(), "Use request-local DB Cache", False),
"use_global_cache":(tp.Boolean(), "Use global DB Cache", False)
},
"Fluff":{
"scrobbles_gold":(tp.Integer(), "Scrobbles for Gold", 250, "How many scrobbles a track needs to be considered 'Gold' status"),

View File

@ -94,7 +94,7 @@ def dl_image(url):
uri = datauri.DataURI.make(mime,charset='ascii',base64=True,data=data)
log(f"Downloaded {url} for local caching")
return uri
except:
except Exception:
log(f"Image {url} could not be downloaded for local caching")
return None
@ -260,7 +260,7 @@ def local_files(artist=None,artists=None,title=None):
for f in os.listdir(data_dir['images'](purename)):
if f.split(".")[-1] in ["png","jpg","jpeg","gif"]:
images.append("/images/" + purename + "/" + f)
except:
except Exception:
pass
return images

View File

@ -13,7 +13,7 @@ def find_representative(sequence,attribute_id,attribute_count):
newsequence = [e for e in newsequence if e[attribute_count] == max(el[attribute_count] for el in newsequence)]
return newsequence[0]
except:
except Exception:
return None
finally:
for e in newsequence:

View File

@ -430,7 +430,7 @@ def time_fix(t):
try:
t = [int(p) for p in t]
return MTRangeGregorian(t[:3])
except:
except Exception:
pass
if isinstance(t[1],str) and t[1].startswith("w"):
@ -438,7 +438,7 @@ def time_fix(t):
year = int(t[0])
weeknum = int(t[1][1:])
return MTRangeWeek(year=year,week=weeknum)
except:
except Exception:
raise

View File

@ -146,7 +146,7 @@ def remove_identical(*dicts):
try: #multidicts
for v in d.getall(k):
keys.append(k,v)
except: #normaldicts
except Exception: #normaldicts
v = d.get(k)
keys.append(k,v)

View File

@ -11,21 +11,21 @@ try:
from simplejson import JSONEncoder
JSONEncoder._olddefault = JSONEncoder.default
JSONEncoder.default = newdefault
except:
except Exception:
pass
try:
from json import JSONEncoder
JSONEncoder._olddefault = JSONEncoder.default
JSONEncoder.default = newdefault
except:
except Exception:
pass
try:
from ujson import JSONEncoder
JSONEncoder._olddefault = JSONEncoder.default
JSONEncoder.default = newdefault
except:
except Exception:
pass
@ -51,7 +51,7 @@ class expandeddate(date):
def fromchrcalendar(cls,y,w,d):
try:
return datetime.date.fromisocalendar(y,w,d) - timedelta(days=1) #sunday instead of monday
except:
except Exception:
# pre python3.8 compatibility
firstdayofyear = datetime.date(y,1,1)

View File

@ -27,14 +27,14 @@ def getInstance():
try:
output = subprocess.check_output(["pidof","Maloja"])
return int(output)
except:
except Exception:
return None
def getInstanceSupervisor():
try:
output = subprocess.check_output(["pidof","maloja_supervisor"])
return int(output)
except:
except Exception:
return None
def restart():
@ -59,7 +59,7 @@ def start():
print("\t" + col["blue"]("http://localhost:" + str(port)))
print("\t" + col["blue"]("http://localhost:" + str(port) + "/admin_setup"))
return True
except:
except Exception:
print("Error while starting Maloja.")
return False

View File

@ -33,7 +33,7 @@ def profile(func):
if FULL_PROFILE:
try:
pstats.Stats(profiler).dump_stats(os.path.join(benchmarkfolder,f"{func.__name__}.stats"))
except:
except Exception:
pass
return result

View File

@ -230,7 +230,7 @@ class MetadataInterface(GenericInterface,abstract=True):
for node in self.metadata[resp]:
try:
res = res[node]
except:
except Exception:
return None
return res

View File

@ -57,7 +57,7 @@ class MusicBrainz(MetadataInterface):
if imgurl is not None: imgurl = self.postprocess_url(imgurl)
return imgurl
except:
except Exception:
return None
finally:
time.sleep(2)

View File

@ -37,7 +37,7 @@ def upgrade_apikeys():
for key,identifier in entries:
_apikeys.apikeystore[identifier] = key
os.remove(oldfile)
except:
except Exception:
pass