mirror of
https://github.com/krateng/maloja.git
synced 2023-08-10 21:12:55 +03:00
Compare commits
6 Commits
b3d4cb7a15
...
35f428ef69
Author | SHA1 | Date | |
---|---|---|---|
|
35f428ef69 | ||
|
342b8867d9 | ||
|
bfc83fdbb0 | ||
|
de286b58b9 | ||
|
00b3e6fc57 | ||
|
e1074ba259 |
11
.github/workflows/docker.yml
vendored
11
.github/workflows/docker.yml
vendored
|
@ -20,21 +20,12 @@ jobs:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
|
||||||
|
|
||||||
- name: Login to GHCR
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@f2a13332ac1ce8c0a71aeac48a150dbb1838ab67
|
uses: docker/metadata-action@f2a13332ac1ce8c0a71aeac48a150dbb1838ab67
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
${{ github.repository_owner }}/maloja
|
${{ github.repository_owner }}/maloja
|
||||||
ghcr.io/${{ github.repository_owner }}/maloja
|
|
||||||
# generate Docker tags based on the following events/attributes
|
# generate Docker tags based on the following events/attributes
|
||||||
tags: |
|
tags: |
|
||||||
type=semver,pattern={{version}}
|
type=semver,pattern={{version}}
|
||||||
|
@ -63,7 +54,7 @@ jobs:
|
||||||
push: ${{ github.event_name != 'pull_request' }}
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||||
cache-from: type=local,src=/tmp/.buildx-cache
|
cache-from: type=local,src=/tmp/.buildx-cache
|
||||||
cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max
|
cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max
|
||||||
|
|
||||||
|
|
|
@ -34,6 +34,7 @@ minor_release_name: "Yeonhee"
|
||||||
- "[Feature] Added notification system for web interface"
|
- "[Feature] Added notification system for web interface"
|
||||||
- "[Bugfix] Fixed crash when encountering error in Lastfm import"
|
- "[Bugfix] Fixed crash when encountering error in Lastfm import"
|
||||||
3.0.6:
|
3.0.6:
|
||||||
|
commit: "b3d4cb7a153845d1f5a5eef67a6508754e338f2f"
|
||||||
notes:
|
notes:
|
||||||
- "[Performance] Implemented search in database"
|
- "[Performance] Implemented search in database"
|
||||||
- "[Bugfix] Better parsing of featuring artists"
|
- "[Bugfix] Better parsing of featuring artists"
|
||||||
|
|
|
@ -43,7 +43,7 @@ for version in releases:
|
||||||
try:
|
try:
|
||||||
prev_tag = sp.check_output(["git","show",f'v{maj}.{min}.{hot}']).decode()
|
prev_tag = sp.check_output(["git","show",f'v{maj}.{min}.{hot}']).decode()
|
||||||
prev_tag_commit = prev_tag.split('\n')[6].split(" ")[1]
|
prev_tag_commit = prev_tag.split('\n')[6].split(" ")[1]
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
assert prev_tag_commit == info['commit']
|
assert prev_tag_commit == info['commit']
|
||||||
|
|
|
@ -62,7 +62,7 @@ class APIHandler:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response.status,result = self.handle(path,keys)
|
response.status,result = self.handle(path,keys)
|
||||||
except:
|
except Exception:
|
||||||
exceptiontype = sys.exc_info()[0]
|
exceptiontype = sys.exc_info()[0]
|
||||||
if exceptiontype in self.errors:
|
if exceptiontype in self.errors:
|
||||||
response.status,result = self.errors[exceptiontype]
|
response.status,result = self.errors[exceptiontype]
|
||||||
|
@ -82,7 +82,7 @@ class APIHandler:
|
||||||
try:
|
try:
|
||||||
methodname = self.get_method(path,keys)
|
methodname = self.get_method(path,keys)
|
||||||
method = self.methods[methodname]
|
method = self.methods[methodname]
|
||||||
except:
|
except Exception:
|
||||||
log("Could not find a handler for method " + str(methodname) + " in API " + self.__apiname__,module="debug")
|
log("Could not find a handler for method " + str(methodname) + " in API " + self.__apiname__,module="debug")
|
||||||
log("Keys: " + str(keys),module="debug")
|
log("Keys: " + str(keys),module="debug")
|
||||||
raise InvalidMethodException()
|
raise InvalidMethodException()
|
||||||
|
@ -94,5 +94,5 @@ class APIHandler:
|
||||||
# fixing etc is handled by the main scrobble function
|
# fixing etc is handled by the main scrobble function
|
||||||
try:
|
try:
|
||||||
return database.incoming_scrobble(rawscrobble,api=self.__apiname__,client=client)
|
return database.incoming_scrobble(rawscrobble,api=self.__apiname__,client=client)
|
||||||
except:
|
except Exception:
|
||||||
raise ScrobblingException()
|
raise ScrobblingException()
|
||||||
|
|
|
@ -76,7 +76,7 @@ class Audioscrobbler(APIHandler):
|
||||||
#(artists,title) = cla.fullclean(artiststr,titlestr)
|
#(artists,title) = cla.fullclean(artiststr,titlestr)
|
||||||
try:
|
try:
|
||||||
timestamp = int(keys["timestamp"])
|
timestamp = int(keys["timestamp"])
|
||||||
except:
|
except Exception:
|
||||||
timestamp = None
|
timestamp = None
|
||||||
#database.createScrobble(artists,title,timestamp)
|
#database.createScrobble(artists,title,timestamp)
|
||||||
self.scrobble({'track_artists':[artiststr],'track_title':titlestr,'scrobble_time':timestamp},client=client)
|
self.scrobble({'track_artists':[artiststr],'track_title':titlestr,'scrobble_time':timestamp},client=client)
|
||||||
|
|
|
@ -80,7 +80,7 @@ class AudioscrobblerLegacy(APIHandler):
|
||||||
artiststr,titlestr = keys[artist_key], keys[track_key]
|
artiststr,titlestr = keys[artist_key], keys[track_key]
|
||||||
try:
|
try:
|
||||||
timestamp = int(keys[time_key])
|
timestamp = int(keys[time_key])
|
||||||
except:
|
except Exception:
|
||||||
timestamp = None
|
timestamp = None
|
||||||
#database.createScrobble(artists,title,timestamp)
|
#database.createScrobble(artists,title,timestamp)
|
||||||
self.scrobble({
|
self.scrobble({
|
||||||
|
|
|
@ -34,7 +34,7 @@ class Listenbrainz(APIHandler):
|
||||||
def submit(self,pathnodes,keys):
|
def submit(self,pathnodes,keys):
|
||||||
try:
|
try:
|
||||||
token = self.get_token_from_request_keys(keys)
|
token = self.get_token_from_request_keys(keys)
|
||||||
except:
|
except Exception:
|
||||||
raise BadAuthException()
|
raise BadAuthException()
|
||||||
|
|
||||||
client = apikeystore.check_and_identify_key(token)
|
client = apikeystore.check_and_identify_key(token)
|
||||||
|
@ -45,7 +45,7 @@ class Listenbrainz(APIHandler):
|
||||||
try:
|
try:
|
||||||
listentype = keys["listen_type"]
|
listentype = keys["listen_type"]
|
||||||
payload = keys["payload"]
|
payload = keys["payload"]
|
||||||
except:
|
except Exception:
|
||||||
raise MalformedJSONException()
|
raise MalformedJSONException()
|
||||||
|
|
||||||
if listentype == "playing_now":
|
if listentype == "playing_now":
|
||||||
|
@ -57,9 +57,9 @@ class Listenbrainz(APIHandler):
|
||||||
artiststr, titlestr = metadata["artist_name"], metadata["track_name"]
|
artiststr, titlestr = metadata["artist_name"], metadata["track_name"]
|
||||||
try:
|
try:
|
||||||
timestamp = int(listen["listened_at"])
|
timestamp = int(listen["listened_at"])
|
||||||
except:
|
except Exception:
|
||||||
timestamp = None
|
timestamp = None
|
||||||
except:
|
except Exception:
|
||||||
raise MalformedJSONException()
|
raise MalformedJSONException()
|
||||||
|
|
||||||
self.scrobble({
|
self.scrobble({
|
||||||
|
@ -74,7 +74,7 @@ class Listenbrainz(APIHandler):
|
||||||
def validate_token(self,pathnodes,keys):
|
def validate_token(self,pathnodes,keys):
|
||||||
try:
|
try:
|
||||||
token = self.get_token_from_request_keys(keys)
|
token = self.get_token_from_request_keys(keys)
|
||||||
except:
|
except Exception:
|
||||||
raise BadAuthException()
|
raise BadAuthException()
|
||||||
if not apikeystore.check_key(token):
|
if not apikeystore.check_key(token):
|
||||||
raise InvalidAuthException()
|
raise InvalidAuthException()
|
||||||
|
|
|
@ -256,7 +256,7 @@ def get_top_artists(dbconn=None,**keys):
|
||||||
try:
|
try:
|
||||||
res = get_charts_artists(timerange=rng,dbconn=dbconn)[0]
|
res = get_charts_artists(timerange=rng,dbconn=dbconn)[0]
|
||||||
results.append({"range":rng,"artist":res["artist"],"scrobbles":res["scrobbles"]})
|
results.append({"range":rng,"artist":res["artist"],"scrobbles":res["scrobbles"]})
|
||||||
except:
|
except Exception:
|
||||||
results.append({"range":rng,"artist":None,"scrobbles":0})
|
results.append({"range":rng,"artist":None,"scrobbles":0})
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
@ -272,7 +272,7 @@ def get_top_tracks(dbconn=None,**keys):
|
||||||
try:
|
try:
|
||||||
res = get_charts_tracks(timerange=rng,dbconn=dbconn)[0]
|
res = get_charts_tracks(timerange=rng,dbconn=dbconn)[0]
|
||||||
results.append({"range":rng,"track":res["track"],"scrobbles":res["scrobbles"]})
|
results.append({"range":rng,"track":res["track"],"scrobbles":res["scrobbles"]})
|
||||||
except:
|
except Exception:
|
||||||
results.append({"range":rng,"track":None,"scrobbles":0})
|
results.append({"range":rng,"track":None,"scrobbles":0})
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
@ -302,7 +302,7 @@ def artist_info(dbconn=None,**keys):
|
||||||
},
|
},
|
||||||
"topweeks":len([e for e in cached.weekly_topartists if e == artist])
|
"topweeks":len([e for e in cached.weekly_topartists if e == artist])
|
||||||
}
|
}
|
||||||
except:
|
except Exception:
|
||||||
# if the artist isnt in the charts, they are not being credited and we
|
# if the artist isnt in the charts, they are not being credited and we
|
||||||
# need to show information about the credited one
|
# need to show information about the credited one
|
||||||
replaceartist = sqldb.get_credited_artists(artist)[0]
|
replaceartist = sqldb.get_credited_artists(artist)[0]
|
||||||
|
@ -370,7 +370,7 @@ def get_predefined_rulesets(dbconn=None):
|
||||||
else: name = rawf.split("_")[1]
|
else: name = rawf.split("_")[1]
|
||||||
desc = line2.replace("# DESC: ","") if "# DESC: " in line2 else ""
|
desc = line2.replace("# DESC: ","") if "# DESC: " in line2 else ""
|
||||||
author = rawf.split("_")[0]
|
author = rawf.split("_")[0]
|
||||||
except:
|
except Exception:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ruleset = {"file":rawf}
|
ruleset = {"file":rawf}
|
||||||
|
|
|
@ -10,96 +10,97 @@ from doreah.logging import log
|
||||||
|
|
||||||
from ..globalconf import malojaconfig
|
from ..globalconf import malojaconfig
|
||||||
|
|
||||||
HIGH_NUMBER = 1000000
|
|
||||||
CACHE_SIZE = 10000
|
|
||||||
ENTITY_CACHE_SIZE = 1000000
|
|
||||||
CACHE_ADJUST_STEP = 100
|
|
||||||
|
|
||||||
cache = lru.LRU(CACHE_SIZE)
|
|
||||||
entitycache = lru.LRU(ENTITY_CACHE_SIZE)
|
|
||||||
|
|
||||||
hits, misses = 0, 0
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@runhourly
|
|
||||||
def maintenance():
|
if malojaconfig['USE_GLOBAL_CACHE']:
|
||||||
if malojaconfig['USE_GLOBAL_CACHE']:
|
CACHE_SIZE = 1000
|
||||||
|
ENTITY_CACHE_SIZE = 100000
|
||||||
|
|
||||||
|
cache = lru.LRU(CACHE_SIZE)
|
||||||
|
entitycache = lru.LRU(ENTITY_CACHE_SIZE)
|
||||||
|
|
||||||
|
hits, misses = 0, 0
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@runhourly
|
||||||
|
def maintenance():
|
||||||
print_stats()
|
print_stats()
|
||||||
trim_cache()
|
trim_cache()
|
||||||
|
|
||||||
def print_stats():
|
def print_stats():
|
||||||
log(f"Cache Size: {len(cache)} [{len(entitycache)} E], System RAM Utilization: {psutil.virtual_memory().percent}%, Cache Hits: {hits}/{hits+misses}")
|
log(f"Cache Size: {len(cache)} [{len(entitycache)} E], System RAM Utilization: {psutil.virtual_memory().percent}%, Cache Hits: {hits}/{hits+misses}")
|
||||||
#print("Full rundown:")
|
#print("Full rundown:")
|
||||||
#import sys
|
#import sys
|
||||||
#for k in cache.keys():
|
#for k in cache.keys():
|
||||||
# print(f"\t{k}\t{sys.getsizeof(cache[k])}")
|
# print(f"\t{k}\t{sys.getsizeof(cache[k])}")
|
||||||
|
|
||||||
|
|
||||||
def cached_wrapper(inner_func):
|
def cached_wrapper(inner_func):
|
||||||
|
|
||||||
if not malojaconfig['USE_GLOBAL_CACHE']: return inner_func
|
def outer_func(*args,**kwargs):
|
||||||
def outer_func(*args,**kwargs):
|
|
||||||
if 'dbconn' in kwargs:
|
|
||||||
conn = kwargs.pop('dbconn')
|
|
||||||
else:
|
|
||||||
conn = None
|
|
||||||
global hits, misses
|
|
||||||
key = (serialize(args),serialize(kwargs), inner_func, kwargs.get("since"), kwargs.get("to"))
|
|
||||||
|
|
||||||
if key in cache:
|
if 'dbconn' in kwargs:
|
||||||
hits += 1
|
conn = kwargs.pop('dbconn')
|
||||||
return cache.get(key)
|
else:
|
||||||
|
conn = None
|
||||||
|
global hits, misses
|
||||||
|
key = (serialize(args),serialize(kwargs), inner_func, kwargs.get("since"), kwargs.get("to"))
|
||||||
|
|
||||||
|
if key in cache:
|
||||||
|
hits += 1
|
||||||
|
return cache.get(key)
|
||||||
|
|
||||||
|
else:
|
||||||
|
misses += 1
|
||||||
|
result = inner_func(*args,**kwargs,dbconn=conn)
|
||||||
|
cache[key] = result
|
||||||
|
return result
|
||||||
|
|
||||||
|
return outer_func
|
||||||
|
|
||||||
|
|
||||||
|
# cache for functions that call with a whole list of entity ids
|
||||||
|
# we don't want a new cache entry for every single combination, but keep a common
|
||||||
|
# cache that's aware of what we're calling
|
||||||
|
def cached_wrapper_individual(inner_func):
|
||||||
|
|
||||||
|
|
||||||
|
def outer_func(set_arg,**kwargs):
|
||||||
|
|
||||||
|
|
||||||
|
if 'dbconn' in kwargs:
|
||||||
|
conn = kwargs.pop('dbconn')
|
||||||
|
else:
|
||||||
|
conn = None
|
||||||
|
|
||||||
|
#global hits, misses
|
||||||
|
result = {}
|
||||||
|
for id in set_arg:
|
||||||
|
if (inner_func,id) in entitycache:
|
||||||
|
result[id] = entitycache[(inner_func,id)]
|
||||||
|
#hits += 1
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
#misses += 1
|
||||||
|
|
||||||
|
|
||||||
|
remaining = inner_func(set(e for e in set_arg if e not in result),dbconn=conn)
|
||||||
|
for id in remaining:
|
||||||
|
entitycache[(inner_func,id)] = remaining[id]
|
||||||
|
result[id] = remaining[id]
|
||||||
|
|
||||||
else:
|
|
||||||
misses += 1
|
|
||||||
result = inner_func(*args,**kwargs,dbconn=conn)
|
|
||||||
cache[key] = result
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
return outer_func
|
return outer_func
|
||||||
|
|
||||||
|
def invalidate_caches(scrobbletime=None):
|
||||||
# cache for functions that call with a whole list of entity ids
|
|
||||||
# we don't want a new cache entry for every single combination, but keep a common
|
|
||||||
# cache that's aware of what we're calling
|
|
||||||
def cached_wrapper_individual(inner_func):
|
|
||||||
|
|
||||||
if not malojaconfig['USE_GLOBAL_CACHE']: return inner_func
|
|
||||||
def outer_func(set_arg,**kwargs):
|
|
||||||
|
|
||||||
|
|
||||||
if 'dbconn' in kwargs:
|
|
||||||
conn = kwargs.pop('dbconn')
|
|
||||||
else:
|
|
||||||
conn = None
|
|
||||||
|
|
||||||
#global hits, misses
|
|
||||||
result = {}
|
|
||||||
for id in set_arg:
|
|
||||||
if (inner_func,id) in entitycache:
|
|
||||||
result[id] = entitycache[(inner_func,id)]
|
|
||||||
#hits += 1
|
|
||||||
else:
|
|
||||||
pass
|
|
||||||
#misses += 1
|
|
||||||
|
|
||||||
|
|
||||||
remaining = inner_func(set(e for e in set_arg if e not in result),dbconn=conn)
|
|
||||||
for id in remaining:
|
|
||||||
entitycache[(inner_func,id)] = remaining[id]
|
|
||||||
result[id] = remaining[id]
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
return outer_func
|
|
||||||
|
|
||||||
def invalidate_caches(scrobbletime):
|
|
||||||
if malojaconfig['USE_GLOBAL_CACHE']:
|
|
||||||
cleared, kept = 0, 0
|
cleared, kept = 0, 0
|
||||||
for k in cache.keys():
|
for k in cache.keys():
|
||||||
# VERY BIG TODO: differentiate between None as in 'unlimited timerange' and None as in 'time doesnt matter here'!
|
# VERY BIG TODO: differentiate between None as in 'unlimited timerange' and None as in 'time doesnt matter here'!
|
||||||
if (k[3] is None or scrobbletime >= k[3]) and (k[4] is None or scrobbletime <= k[4]):
|
if scrobbletime is None or (k[3] is None or scrobbletime >= k[3]) and (k[4] is None or scrobbletime <= k[4]):
|
||||||
cleared += 1
|
cleared += 1
|
||||||
del cache[k]
|
del cache[k]
|
||||||
else:
|
else:
|
||||||
|
@ -107,35 +108,49 @@ def invalidate_caches(scrobbletime):
|
||||||
log(f"Invalidated {cleared} of {cleared+kept} DB cache entries")
|
log(f"Invalidated {cleared} of {cleared+kept} DB cache entries")
|
||||||
|
|
||||||
|
|
||||||
def invalidate_entity_cache():
|
def invalidate_entity_cache():
|
||||||
entitycache.clear()
|
entitycache.clear()
|
||||||
|
|
||||||
|
|
||||||
def trim_cache():
|
def trim_cache():
|
||||||
ramprct = psutil.virtual_memory().percent
|
ramprct = psutil.virtual_memory().percent
|
||||||
if ramprct > malojaconfig["DB_MAX_MEMORY"]:
|
if ramprct > malojaconfig["DB_MAX_MEMORY"]:
|
||||||
log(f"{ramprct}% RAM usage, clearing cache and adjusting size!")
|
log(f"{ramprct}% RAM usage, clearing cache and adjusting size!")
|
||||||
#ratio = 0.6
|
#ratio = 0.6
|
||||||
#targetsize = max(int(len(cache) * ratio),50)
|
#targetsize = max(int(len(cache) * ratio),50)
|
||||||
#log(f"Reducing to {targetsize} entries")
|
#log(f"Reducing to {targetsize} entries")
|
||||||
#cache.set_size(targetsize)
|
#cache.set_size(targetsize)
|
||||||
#cache.set_size(HIGH_NUMBER)
|
#cache.set_size(HIGH_NUMBER)
|
||||||
cache.clear()
|
cache.clear()
|
||||||
if cache.get_size() > CACHE_ADJUST_STEP:
|
#if cache.get_size() > CACHE_ADJUST_STEP:
|
||||||
cache.set_size(cache.get_size() - CACHE_ADJUST_STEP)
|
# cache.set_size(cache.get_size() - CACHE_ADJUST_STEP)
|
||||||
|
|
||||||
#log(f"New RAM usage: {psutil.virtual_memory().percent}%")
|
#log(f"New RAM usage: {psutil.virtual_memory().percent}%")
|
||||||
print_stats()
|
print_stats()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
else:
|
||||||
|
def cached_wrapper(func):
|
||||||
|
return func
|
||||||
|
def cached_wrapper_individual(func):
|
||||||
|
return func
|
||||||
|
def invalidate_caches(scrobbletime=None):
|
||||||
|
return None
|
||||||
|
def invalidate_entity_cache():
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def serialize(obj):
|
def serialize(obj):
|
||||||
try:
|
try:
|
||||||
return serialize(obj.hashable())
|
return serialize(obj.hashable())
|
||||||
except:
|
except Exception:
|
||||||
try:
|
try:
|
||||||
return json.dumps(obj)
|
return json.dumps(obj)
|
||||||
except:
|
except Exception:
|
||||||
if isinstance(obj, (list, tuple, set)):
|
if isinstance(obj, (list, tuple, set)):
|
||||||
return "[" + ",".join(serialize(o) for o in obj) + "]"
|
return "[" + ",".join(serialize(o) for o in obj) + "]"
|
||||||
elif isinstance(obj,dict):
|
elif isinstance(obj,dict):
|
||||||
|
|
|
@ -28,7 +28,7 @@ def is_dir_usable(pth):
|
||||||
os.mknod(pthj(pth,".test"))
|
os.mknod(pthj(pth,".test"))
|
||||||
os.remove(pthj(pth,".test"))
|
os.remove(pthj(pth,".test"))
|
||||||
return True
|
return True
|
||||||
except:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_env_vars(key,pathsuffix=[]):
|
def get_env_vars(key,pathsuffix=[]):
|
||||||
|
@ -149,8 +149,8 @@ malojaconfig = Configuration(
|
||||||
"cache_expire_positive":(tp.Integer(), "Image Cache Expiration", 60, "Days until images are refetched"),
|
"cache_expire_positive":(tp.Integer(), "Image Cache Expiration", 60, "Days until images are refetched"),
|
||||||
"cache_expire_negative":(tp.Integer(), "Image Cache Negative Expiration", 5, "Days until failed image fetches are reattempted"),
|
"cache_expire_negative":(tp.Integer(), "Image Cache Negative Expiration", 5, "Days until failed image fetches are reattempted"),
|
||||||
"db_max_memory":(tp.Integer(min=0,max=100), "RAM Percentage soft limit", 80, "RAM Usage in percent at which Maloja should no longer increase its database cache."),
|
"db_max_memory":(tp.Integer(min=0,max=100), "RAM Percentage soft limit", 80, "RAM Usage in percent at which Maloja should no longer increase its database cache."),
|
||||||
"use_request_cache":(tp.Boolean(), "Use request-local DB Cache", True),
|
"use_request_cache":(tp.Boolean(), "Use request-local DB Cache", False),
|
||||||
"use_global_cache":(tp.Boolean(), "Use global DB Cache", True)
|
"use_global_cache":(tp.Boolean(), "Use global DB Cache", False)
|
||||||
},
|
},
|
||||||
"Fluff":{
|
"Fluff":{
|
||||||
"scrobbles_gold":(tp.Integer(), "Scrobbles for Gold", 250, "How many scrobbles a track needs to be considered 'Gold' status"),
|
"scrobbles_gold":(tp.Integer(), "Scrobbles for Gold", 250, "How many scrobbles a track needs to be considered 'Gold' status"),
|
||||||
|
|
|
@ -94,7 +94,7 @@ def dl_image(url):
|
||||||
uri = datauri.DataURI.make(mime,charset='ascii',base64=True,data=data)
|
uri = datauri.DataURI.make(mime,charset='ascii',base64=True,data=data)
|
||||||
log(f"Downloaded {url} for local caching")
|
log(f"Downloaded {url} for local caching")
|
||||||
return uri
|
return uri
|
||||||
except:
|
except Exception:
|
||||||
log(f"Image {url} could not be downloaded for local caching")
|
log(f"Image {url} could not be downloaded for local caching")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -260,7 +260,7 @@ def local_files(artist=None,artists=None,title=None):
|
||||||
for f in os.listdir(data_dir['images'](purename)):
|
for f in os.listdir(data_dir['images'](purename)):
|
||||||
if f.split(".")[-1] in ["png","jpg","jpeg","gif"]:
|
if f.split(".")[-1] in ["png","jpg","jpeg","gif"]:
|
||||||
images.append("/images/" + purename + "/" + f)
|
images.append("/images/" + purename + "/" + f)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return images
|
return images
|
||||||
|
|
|
@ -13,7 +13,7 @@ def find_representative(sequence,attribute_id,attribute_count):
|
||||||
|
|
||||||
newsequence = [e for e in newsequence if e[attribute_count] == max(el[attribute_count] for el in newsequence)]
|
newsequence = [e for e in newsequence if e[attribute_count] == max(el[attribute_count] for el in newsequence)]
|
||||||
return newsequence[0]
|
return newsequence[0]
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
finally:
|
finally:
|
||||||
for e in newsequence:
|
for e in newsequence:
|
||||||
|
|
|
@ -430,7 +430,7 @@ def time_fix(t):
|
||||||
try:
|
try:
|
||||||
t = [int(p) for p in t]
|
t = [int(p) for p in t]
|
||||||
return MTRangeGregorian(t[:3])
|
return MTRangeGregorian(t[:3])
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if isinstance(t[1],str) and t[1].startswith("w"):
|
if isinstance(t[1],str) and t[1].startswith("w"):
|
||||||
|
@ -438,7 +438,7 @@ def time_fix(t):
|
||||||
year = int(t[0])
|
year = int(t[0])
|
||||||
weeknum = int(t[1][1:])
|
weeknum = int(t[1][1:])
|
||||||
return MTRangeWeek(year=year,week=weeknum)
|
return MTRangeWeek(year=year,week=weeknum)
|
||||||
except:
|
except Exception:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -146,7 +146,7 @@ def remove_identical(*dicts):
|
||||||
try: #multidicts
|
try: #multidicts
|
||||||
for v in d.getall(k):
|
for v in d.getall(k):
|
||||||
keys.append(k,v)
|
keys.append(k,v)
|
||||||
except: #normaldicts
|
except Exception: #normaldicts
|
||||||
v = d.get(k)
|
v = d.get(k)
|
||||||
keys.append(k,v)
|
keys.append(k,v)
|
||||||
|
|
||||||
|
|
|
@ -11,21 +11,21 @@ try:
|
||||||
from simplejson import JSONEncoder
|
from simplejson import JSONEncoder
|
||||||
JSONEncoder._olddefault = JSONEncoder.default
|
JSONEncoder._olddefault = JSONEncoder.default
|
||||||
JSONEncoder.default = newdefault
|
JSONEncoder.default = newdefault
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from json import JSONEncoder
|
from json import JSONEncoder
|
||||||
JSONEncoder._olddefault = JSONEncoder.default
|
JSONEncoder._olddefault = JSONEncoder.default
|
||||||
JSONEncoder.default = newdefault
|
JSONEncoder.default = newdefault
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from ujson import JSONEncoder
|
from ujson import JSONEncoder
|
||||||
JSONEncoder._olddefault = JSONEncoder.default
|
JSONEncoder._olddefault = JSONEncoder.default
|
||||||
JSONEncoder.default = newdefault
|
JSONEncoder.default = newdefault
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ class expandeddate(date):
|
||||||
def fromchrcalendar(cls,y,w,d):
|
def fromchrcalendar(cls,y,w,d):
|
||||||
try:
|
try:
|
||||||
return datetime.date.fromisocalendar(y,w,d) - timedelta(days=1) #sunday instead of monday
|
return datetime.date.fromisocalendar(y,w,d) - timedelta(days=1) #sunday instead of monday
|
||||||
except:
|
except Exception:
|
||||||
# pre python3.8 compatibility
|
# pre python3.8 compatibility
|
||||||
|
|
||||||
firstdayofyear = datetime.date(y,1,1)
|
firstdayofyear = datetime.date(y,1,1)
|
||||||
|
|
|
@ -27,14 +27,14 @@ def getInstance():
|
||||||
try:
|
try:
|
||||||
output = subprocess.check_output(["pidof","Maloja"])
|
output = subprocess.check_output(["pidof","Maloja"])
|
||||||
return int(output)
|
return int(output)
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def getInstanceSupervisor():
|
def getInstanceSupervisor():
|
||||||
try:
|
try:
|
||||||
output = subprocess.check_output(["pidof","maloja_supervisor"])
|
output = subprocess.check_output(["pidof","maloja_supervisor"])
|
||||||
return int(output)
|
return int(output)
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def restart():
|
def restart():
|
||||||
|
@ -59,7 +59,7 @@ def start():
|
||||||
print("\t" + col["blue"]("http://localhost:" + str(port)))
|
print("\t" + col["blue"]("http://localhost:" + str(port)))
|
||||||
print("\t" + col["blue"]("http://localhost:" + str(port) + "/admin_setup"))
|
print("\t" + col["blue"]("http://localhost:" + str(port) + "/admin_setup"))
|
||||||
return True
|
return True
|
||||||
except:
|
except Exception:
|
||||||
print("Error while starting Maloja.")
|
print("Error while starting Maloja.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,7 @@ def profile(func):
|
||||||
if FULL_PROFILE:
|
if FULL_PROFILE:
|
||||||
try:
|
try:
|
||||||
pstats.Stats(profiler).dump_stats(os.path.join(benchmarkfolder,f"{func.__name__}.stats"))
|
pstats.Stats(profiler).dump_stats(os.path.join(benchmarkfolder,f"{func.__name__}.stats"))
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
2
maloja/thirdparty/__init__.py
vendored
2
maloja/thirdparty/__init__.py
vendored
|
@ -230,7 +230,7 @@ class MetadataInterface(GenericInterface,abstract=True):
|
||||||
for node in self.metadata[resp]:
|
for node in self.metadata[resp]:
|
||||||
try:
|
try:
|
||||||
res = res[node]
|
res = res[node]
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
2
maloja/thirdparty/musicbrainz.py
vendored
2
maloja/thirdparty/musicbrainz.py
vendored
|
@ -57,7 +57,7 @@ class MusicBrainz(MetadataInterface):
|
||||||
if imgurl is not None: imgurl = self.postprocess_url(imgurl)
|
if imgurl is not None: imgurl = self.postprocess_url(imgurl)
|
||||||
return imgurl
|
return imgurl
|
||||||
|
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
finally:
|
finally:
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
|
|
@ -37,7 +37,7 @@ def upgrade_apikeys():
|
||||||
for key,identifier in entries:
|
for key,identifier in entries:
|
||||||
_apikeys.apikeystore[identifier] = key
|
_apikeys.apikeystore[identifier] = key
|
||||||
os.remove(oldfile)
|
os.remove(oldfile)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user