mirror of
https://github.com/krateng/maloja.git
synced 2023-08-10 21:12:55 +03:00
No longer catching BaseExceptions
This commit is contained in:
parent
d5f5b48d85
commit
f359662cf3
@ -30,13 +30,13 @@ def print_header_info():
|
|||||||
def get_instance():
|
def get_instance():
|
||||||
try:
|
try:
|
||||||
return int(subprocess.check_output(["pidof","maloja"]))
|
return int(subprocess.check_output(["pidof","maloja"]))
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_instance_supervisor():
|
def get_instance_supervisor():
|
||||||
try:
|
try:
|
||||||
return int(subprocess.check_output(["pidof","maloja_supervisor"]))
|
return int(subprocess.check_output(["pidof","maloja_supervisor"]))
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def restart():
|
def restart():
|
||||||
@ -62,7 +62,7 @@ def start():
|
|||||||
print("\t" + col["blue"]("http://localhost:" + str(port)))
|
print("\t" + col["blue"]("http://localhost:" + str(port)))
|
||||||
print("\t" + col["blue"]("http://localhost:" + str(port) + "/admin_setup"))
|
print("\t" + col["blue"]("http://localhost:" + str(port) + "/admin_setup"))
|
||||||
return True
|
return True
|
||||||
except:
|
except Exception:
|
||||||
print("Error while starting Maloja.")
|
print("Error while starting Maloja.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@ class APIHandler:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
response.status,result = self.handle(path,keys)
|
response.status,result = self.handle(path,keys)
|
||||||
except:
|
except Exception:
|
||||||
exceptiontype = sys.exc_info()[0]
|
exceptiontype = sys.exc_info()[0]
|
||||||
if exceptiontype in self.errors:
|
if exceptiontype in self.errors:
|
||||||
response.status,result = self.errors[exceptiontype]
|
response.status,result = self.errors[exceptiontype]
|
||||||
@ -82,7 +82,7 @@ class APIHandler:
|
|||||||
try:
|
try:
|
||||||
methodname = self.get_method(path,keys)
|
methodname = self.get_method(path,keys)
|
||||||
method = self.methods[methodname]
|
method = self.methods[methodname]
|
||||||
except:
|
except Exception:
|
||||||
log("Could not find a handler for method " + str(methodname) + " in API " + self.__apiname__,module="debug")
|
log("Could not find a handler for method " + str(methodname) + " in API " + self.__apiname__,module="debug")
|
||||||
log("Keys: " + str(keys),module="debug")
|
log("Keys: " + str(keys),module="debug")
|
||||||
raise InvalidMethodException()
|
raise InvalidMethodException()
|
||||||
@ -94,5 +94,5 @@ class APIHandler:
|
|||||||
# fixing etc is handled by the main scrobble function
|
# fixing etc is handled by the main scrobble function
|
||||||
try:
|
try:
|
||||||
return database.incoming_scrobble(rawscrobble,api=self.__apiname__,client=client)
|
return database.incoming_scrobble(rawscrobble,api=self.__apiname__,client=client)
|
||||||
except:
|
except Exception:
|
||||||
raise ScrobblingException()
|
raise ScrobblingException()
|
||||||
|
@ -76,7 +76,7 @@ class Audioscrobbler(APIHandler):
|
|||||||
#(artists,title) = cla.fullclean(artiststr,titlestr)
|
#(artists,title) = cla.fullclean(artiststr,titlestr)
|
||||||
try:
|
try:
|
||||||
timestamp = int(keys["timestamp"])
|
timestamp = int(keys["timestamp"])
|
||||||
except:
|
except Exception:
|
||||||
timestamp = None
|
timestamp = None
|
||||||
#database.createScrobble(artists,title,timestamp)
|
#database.createScrobble(artists,title,timestamp)
|
||||||
self.scrobble({'track_artists':[artiststr],'track_title':titlestr,'scrobble_time':timestamp},client=client)
|
self.scrobble({'track_artists':[artiststr],'track_title':titlestr,'scrobble_time':timestamp},client=client)
|
||||||
|
@ -80,7 +80,7 @@ class AudioscrobblerLegacy(APIHandler):
|
|||||||
artiststr,titlestr = keys[artist_key], keys[track_key]
|
artiststr,titlestr = keys[artist_key], keys[track_key]
|
||||||
try:
|
try:
|
||||||
timestamp = int(keys[time_key])
|
timestamp = int(keys[time_key])
|
||||||
except:
|
except Exception:
|
||||||
timestamp = None
|
timestamp = None
|
||||||
#database.createScrobble(artists,title,timestamp)
|
#database.createScrobble(artists,title,timestamp)
|
||||||
self.scrobble({
|
self.scrobble({
|
||||||
|
@ -34,7 +34,7 @@ class Listenbrainz(APIHandler):
|
|||||||
def submit(self,pathnodes,keys):
|
def submit(self,pathnodes,keys):
|
||||||
try:
|
try:
|
||||||
token = self.get_token_from_request_keys(keys)
|
token = self.get_token_from_request_keys(keys)
|
||||||
except:
|
except Exception:
|
||||||
raise BadAuthException()
|
raise BadAuthException()
|
||||||
|
|
||||||
client = apikeystore.check_and_identify_key(token)
|
client = apikeystore.check_and_identify_key(token)
|
||||||
@ -45,7 +45,7 @@ class Listenbrainz(APIHandler):
|
|||||||
try:
|
try:
|
||||||
listentype = keys["listen_type"]
|
listentype = keys["listen_type"]
|
||||||
payload = keys["payload"]
|
payload = keys["payload"]
|
||||||
except:
|
except Exception:
|
||||||
raise MalformedJSONException()
|
raise MalformedJSONException()
|
||||||
|
|
||||||
if listentype == "playing_now":
|
if listentype == "playing_now":
|
||||||
@ -57,9 +57,9 @@ class Listenbrainz(APIHandler):
|
|||||||
artiststr, titlestr = metadata["artist_name"], metadata["track_name"]
|
artiststr, titlestr = metadata["artist_name"], metadata["track_name"]
|
||||||
try:
|
try:
|
||||||
timestamp = int(listen["listened_at"])
|
timestamp = int(listen["listened_at"])
|
||||||
except:
|
except Exception:
|
||||||
timestamp = None
|
timestamp = None
|
||||||
except:
|
except Exception:
|
||||||
raise MalformedJSONException()
|
raise MalformedJSONException()
|
||||||
|
|
||||||
self.scrobble({
|
self.scrobble({
|
||||||
@ -74,7 +74,7 @@ class Listenbrainz(APIHandler):
|
|||||||
def validate_token(self,pathnodes,keys):
|
def validate_token(self,pathnodes,keys):
|
||||||
try:
|
try:
|
||||||
token = self.get_token_from_request_keys(keys)
|
token = self.get_token_from_request_keys(keys)
|
||||||
except:
|
except Exception:
|
||||||
raise BadAuthException()
|
raise BadAuthException()
|
||||||
if not apikeystore.check_key(token):
|
if not apikeystore.check_key(token):
|
||||||
raise InvalidAuthException()
|
raise InvalidAuthException()
|
||||||
|
@ -319,7 +319,7 @@ def get_top_artists(dbconn=None,**keys):
|
|||||||
try:
|
try:
|
||||||
res = get_charts_artists(timerange=rng,dbconn=dbconn)[0]
|
res = get_charts_artists(timerange=rng,dbconn=dbconn)[0]
|
||||||
results.append({"range":rng,"artist":res["artist"],"scrobbles":res["scrobbles"]})
|
results.append({"range":rng,"artist":res["artist"],"scrobbles":res["scrobbles"]})
|
||||||
except:
|
except Exception:
|
||||||
results.append({"range":rng,"artist":None,"scrobbles":0})
|
results.append({"range":rng,"artist":None,"scrobbles":0})
|
||||||
|
|
||||||
return results
|
return results
|
||||||
@ -335,7 +335,7 @@ def get_top_tracks(dbconn=None,**keys):
|
|||||||
try:
|
try:
|
||||||
res = get_charts_tracks(timerange=rng,dbconn=dbconn)[0]
|
res = get_charts_tracks(timerange=rng,dbconn=dbconn)[0]
|
||||||
results.append({"range":rng,"track":res["track"],"scrobbles":res["scrobbles"]})
|
results.append({"range":rng,"track":res["track"],"scrobbles":res["scrobbles"]})
|
||||||
except:
|
except Exception:
|
||||||
results.append({"range":rng,"track":None,"scrobbles":0})
|
results.append({"range":rng,"track":None,"scrobbles":0})
|
||||||
|
|
||||||
return results
|
return results
|
||||||
@ -367,7 +367,7 @@ def artist_info(dbconn=None,**keys):
|
|||||||
"topweeks":len([e for e in cached.weekly_topartists if e == artist_id]),
|
"topweeks":len([e for e in cached.weekly_topartists if e == artist_id]),
|
||||||
"id":artist_id
|
"id":artist_id
|
||||||
}
|
}
|
||||||
except:
|
except Exception:
|
||||||
# if the artist isnt in the charts, they are not being credited and we
|
# if the artist isnt in the charts, they are not being credited and we
|
||||||
# need to show information about the credited one
|
# need to show information about the credited one
|
||||||
replaceartist = sqldb.get_credited_artists(artist)[0]
|
replaceartist = sqldb.get_credited_artists(artist)[0]
|
||||||
@ -443,7 +443,7 @@ def get_predefined_rulesets(dbconn=None):
|
|||||||
else: name = rawf.split("_")[1]
|
else: name = rawf.split("_")[1]
|
||||||
desc = line2.replace("# DESC: ","") if "# DESC: " in line2 else ""
|
desc = line2.replace("# DESC: ","") if "# DESC: " in line2 else ""
|
||||||
author = rawf.split("_")[0]
|
author = rawf.split("_")[0]
|
||||||
except:
|
except Exception:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ruleset = {"file":rawf}
|
ruleset = {"file":rawf}
|
||||||
|
@ -147,10 +147,10 @@ else:
|
|||||||
def serialize(obj):
|
def serialize(obj):
|
||||||
try:
|
try:
|
||||||
return serialize(obj.hashable())
|
return serialize(obj.hashable())
|
||||||
except:
|
except Exception:
|
||||||
try:
|
try:
|
||||||
return json.dumps(obj)
|
return json.dumps(obj)
|
||||||
except:
|
except Exception:
|
||||||
if isinstance(obj, (list, tuple, set)):
|
if isinstance(obj, (list, tuple, set)):
|
||||||
return "[" + ",".join(serialize(o) for o in obj) + "]"
|
return "[" + ",".join(serialize(o) for o in obj) + "]"
|
||||||
elif isinstance(obj,dict):
|
elif isinstance(obj,dict):
|
||||||
|
@ -32,7 +32,7 @@ def profile(func):
|
|||||||
if FULL_PROFILE:
|
if FULL_PROFILE:
|
||||||
try:
|
try:
|
||||||
pstats.Stats(profiler).dump_stats(os.path.join(benchmarkfolder,f"{func.__name__}.stats"))
|
pstats.Stats(profiler).dump_stats(os.path.join(benchmarkfolder,f"{func.__name__}.stats"))
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -94,7 +94,7 @@ def dl_image(url):
|
|||||||
uri = datauri.DataURI.make(mime,charset='ascii',base64=True,data=data)
|
uri = datauri.DataURI.make(mime,charset='ascii',base64=True,data=data)
|
||||||
log(f"Downloaded {url} for local caching")
|
log(f"Downloaded {url} for local caching")
|
||||||
return uri
|
return uri
|
||||||
except:
|
except Exception:
|
||||||
log(f"Image {url} could not be downloaded for local caching")
|
log(f"Image {url} could not be downloaded for local caching")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -260,7 +260,7 @@ def local_files(artist=None,artists=None,title=None):
|
|||||||
for f in os.listdir(data_dir['images'](purename)):
|
for f in os.listdir(data_dir['images'](purename)):
|
||||||
if f.split(".")[-1] in ["png","jpg","jpeg","gif"]:
|
if f.split(".")[-1] in ["png","jpg","jpeg","gif"]:
|
||||||
images.append("/images/" + purename + "/" + f)
|
images.append("/images/" + purename + "/" + f)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return images
|
return images
|
||||||
|
@ -13,7 +13,7 @@ def find_representative(sequence,attribute_id,attribute_count):
|
|||||||
|
|
||||||
newsequence = [e for e in newsequence if e[attribute_count] == max(el[attribute_count] for el in newsequence)]
|
newsequence = [e for e in newsequence if e[attribute_count] == max(el[attribute_count] for el in newsequence)]
|
||||||
return newsequence[0]
|
return newsequence[0]
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
finally:
|
finally:
|
||||||
for e in newsequence:
|
for e in newsequence:
|
||||||
|
@ -430,7 +430,7 @@ def time_fix(t):
|
|||||||
try:
|
try:
|
||||||
t = [int(p) for p in t]
|
t = [int(p) for p in t]
|
||||||
return MTRangeGregorian(t[:3])
|
return MTRangeGregorian(t[:3])
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if isinstance(t[1],str) and t[1].startswith("w"):
|
if isinstance(t[1],str) and t[1].startswith("w"):
|
||||||
@ -438,7 +438,7 @@ def time_fix(t):
|
|||||||
year = int(t[0])
|
year = int(t[0])
|
||||||
weeknum = int(t[1][1:])
|
weeknum = int(t[1][1:])
|
||||||
return MTRangeWeek(year=year,week=weeknum)
|
return MTRangeWeek(year=year,week=weeknum)
|
||||||
except:
|
except Exception:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@ -146,7 +146,7 @@ def remove_identical(*dicts):
|
|||||||
try: #multidicts
|
try: #multidicts
|
||||||
for v in d.getall(k):
|
for v in d.getall(k):
|
||||||
keys.append(k,v)
|
keys.append(k,v)
|
||||||
except: #normaldicts
|
except Exception: #normaldicts
|
||||||
v = d.get(k)
|
v = d.get(k)
|
||||||
keys.append(k,v)
|
keys.append(k,v)
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ def is_dir_usable(pth):
|
|||||||
os.mknod(pthj(pth,".test"))
|
os.mknod(pthj(pth,".test"))
|
||||||
os.remove(pthj(pth,".test"))
|
os.remove(pthj(pth,".test"))
|
||||||
return True
|
return True
|
||||||
except:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_env_vars(key,pathsuffix=[]):
|
def get_env_vars(key,pathsuffix=[]):
|
||||||
|
@ -11,21 +11,21 @@ try:
|
|||||||
from simplejson import JSONEncoder
|
from simplejson import JSONEncoder
|
||||||
JSONEncoder._olddefault = JSONEncoder.default
|
JSONEncoder._olddefault = JSONEncoder.default
|
||||||
JSONEncoder.default = newdefault
|
JSONEncoder.default = newdefault
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from json import JSONEncoder
|
from json import JSONEncoder
|
||||||
JSONEncoder._olddefault = JSONEncoder.default
|
JSONEncoder._olddefault = JSONEncoder.default
|
||||||
JSONEncoder.default = newdefault
|
JSONEncoder.default = newdefault
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from ujson import JSONEncoder
|
from ujson import JSONEncoder
|
||||||
JSONEncoder._olddefault = JSONEncoder.default
|
JSONEncoder._olddefault = JSONEncoder.default
|
||||||
JSONEncoder.default = newdefault
|
JSONEncoder.default = newdefault
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@ -51,7 +51,7 @@ class expandeddate(date):
|
|||||||
def fromchrcalendar(cls,y,w,d):
|
def fromchrcalendar(cls,y,w,d):
|
||||||
try:
|
try:
|
||||||
return datetime.date.fromisocalendar(y,w,d) - timedelta(days=1) #sunday instead of monday
|
return datetime.date.fromisocalendar(y,w,d) - timedelta(days=1) #sunday instead of monday
|
||||||
except:
|
except Exception:
|
||||||
# pre python3.8 compatibility
|
# pre python3.8 compatibility
|
||||||
|
|
||||||
firstdayofyear = datetime.date(y,1,1)
|
firstdayofyear = datetime.date(y,1,1)
|
||||||
|
2
maloja/thirdparty/__init__.py
vendored
2
maloja/thirdparty/__init__.py
vendored
@ -230,7 +230,7 @@ class MetadataInterface(GenericInterface,abstract=True):
|
|||||||
for node in self.metadata[resp]:
|
for node in self.metadata[resp]:
|
||||||
try:
|
try:
|
||||||
res = res[node]
|
res = res[node]
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
2
maloja/thirdparty/musicbrainz.py
vendored
2
maloja/thirdparty/musicbrainz.py
vendored
@ -57,7 +57,7 @@ class MusicBrainz(MetadataInterface):
|
|||||||
if imgurl is not None: imgurl = self.postprocess_url(imgurl)
|
if imgurl is not None: imgurl = self.postprocess_url(imgurl)
|
||||||
return imgurl
|
return imgurl
|
||||||
|
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
finally:
|
finally:
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
@ -37,7 +37,7 @@ def upgrade_apikeys():
|
|||||||
for key,identifier in entries:
|
for key,identifier in entries:
|
||||||
_apikeys.apikeystore[identifier] = key
|
_apikeys.apikeystore[identifier] = key
|
||||||
os.remove(oldfile)
|
os.remove(oldfile)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user