This commit is contained in:
Krateng 2020-07-25 05:29:23 +02:00
parent abef221435
commit f89dcf0599
3 changed files with 9 additions and 54 deletions

View File

@ -134,49 +134,3 @@ def api_request_track(track):
pass
return None
### SCROBBLING
# creates signature and returns full query string
def lfmbuild(parameters):
m = hashlib.md5()
keys = sorted(str(k) for k in parameters)
m.update(utf("".join(str(k) + str(parameters[k]) for k in keys)))
m.update(utf(get_settings("LASTFM_API_SECRET")))
sig = m.hexdigest()
return urllib.parse.urlencode(parameters) + "&api_sig=" + sig
def utf(st):
return st.encode(encoding="UTF-8")
apis_scrobble = []
if get_settings("LASTFM_API_SK") not in [None,"ASK"] and get_settings("LASTFM_API_SECRET") not in [None,"ASK"] and get_settings("LASTFM_API_KEY") not in [None,"ASK"]:
apis_scrobble.append({
"name":"LastFM",
"scrobbleurl":"http://ws.audioscrobbler.com/2.0/",
"requestbody":lambda artists,title,timestamp: lfmbuild({"method":"track.scrobble","artist[0]":", ".join(artists),"track[0]":title,"timestamp":timestamp,"api_key":get_settings("LASTFM_API_KEY"),"sk":get_settings("LASTFM_API_SK")})
})
def proxy_scrobble(artists,title,timestamp):
for api in apis_scrobble:
response = urllib.request.urlopen(api["scrobbleurl"],data=utf(api["requestbody"](artists,title,timestamp)))
xml = response.read()
data = ET.fromstring(xml)
if data.attrib.get("status") == "ok":
if data.find("scrobbles").attrib.get("ignored") == "0":
log(api["name"] + ": Scrobble accepted: " + "/".join(artists) + " - " + title)
else:
log(api["name"] + ": Scrobble not accepted: " + "/".join(artists) + " - " + title)

View File

@ -53,13 +53,13 @@ class GenericInterface:
s = cls()
if s.active_proxyscrobble():
services["proxyscrobble"].append(s)
log(cls.name + "Registered as proxy scrobble target")
log(cls.name + " registered as proxy scrobble target")
if s.active_import():
services["import"].append(s)
log(cls.name + "Registered as scrobble import source")
log(cls.name + " registered as scrobble import source")
if s.active_metadata():
services["metadata"].append(s)
log(cls.name + "Registered for metadata provider")
log(cls.name + " registered as metadata provider")
# proxy scrobbler
class ProxyScrobbleInterface(GenericInterface,abstract=True):
@ -78,11 +78,11 @@ class ProxyScrobbleInterface(GenericInterface,abstract=True):
def scrobble(self,artists,title,timestamp):
response = urllib.request.urlopen(
self.proxyscrobble["scrobbleurl"],
data=utf(self.postdata(artists,title,timestamp)))
data=utf(self.proxyscrobble_postdata(artists,title,timestamp)))
responsedata = response.read()
if self.proxyscrobble["response_type"] == "xml":
data = ElementTree.fromstring(responsedata)
return self.parse_response(data)
return self.proxyscrobble_parse_response(data)
# scrobble import
class ImportInterface(GenericInterface,abstract=True):
@ -127,4 +127,5 @@ def utf(st):
### actually create everything
from . import lastfm
__all__ = ["lastfm"] # list them for now, do this dynamically later
from . import *

View File

@ -18,10 +18,10 @@ class LastFM(MetadataInterface, ProxyScrobbleInterface):
"activated_setting": "SCROBBLE_LASTFM"
}
def parse_response(self,data):
def proxyscrobble_parse_response(self,data):
return data.attrib.get("status") == "ok" and data.find("scrobbles").attrib.get("ignored") == "0"
def postdata(self,artists,title,timestamp):
def proxyscrobble_postdata(self,artists,title,timestamp):
return self.query_compose({
"method":"track.scrobble",
"artist[0]":", ".join(artists),