1
0
mirror of https://github.com/krateng/maloja.git synced 2023-08-10 21:12:55 +03:00

Simplified structure for third party services

This commit is contained in:
Krateng 2020-07-25 05:09:58 +02:00
parent 471a61f788
commit abef221435
7 changed files with 177 additions and 63 deletions

View File

@ -20,7 +20,7 @@ requires = [
"setproctitle>=1.1.10", "setproctitle>=1.1.10",
"wand>=0.5.4", "wand>=0.5.4",
"lesscpy>=0.13", "lesscpy>=0.13",
"jinja2">2.11 "jinja2>2.11"
] ]
resources = [ resources = [
"web/*/*", "web/*/*",

View File

@ -6,7 +6,10 @@ from . import utilities
from .malojatime import register_scrobbletime, time_stamps, ranges from .malojatime import register_scrobbletime, time_stamps, ranges
from .urihandler import uri_to_internal, internal_to_uri, compose_querystring from .urihandler import uri_to_internal, internal_to_uri, compose_querystring
from . import compliant_api from . import compliant_api
from .external import proxy_scrobble
#from .external import proxy_scrobble
from .thirdparty import proxy_scrobble_all
from .__pkginfo__ import version from .__pkginfo__ import version
from .globalconf import datadir from .globalconf import datadir
# doreah toolkit # doreah toolkit
@ -155,7 +158,7 @@ def createScrobble(artists,title,time,album=None,duration=None,volatile=False):
invalidate_caches() invalidate_caches()
dblock.release() dblock.release()
proxy_scrobble(artists,title,time) proxy_scrobble_all(artists,title,time)
return get_track_dict(TRACKS[obj.track]) return get_track_dict(TRACKS[obj.track])

130
maloja/thirdparty/__init__.py vendored Normal file
View File

@ -0,0 +1,130 @@
# these different interfaces are for the different roles a third party service
# can fulfill. implementing them adds some generic functionality to attempt to
# actually perform the role, but this will have to be overwritten in most cases.
# functionality is separated into different layers to allow partial override
# also yes, we're using singleton classes for the different providers
# pls don't sue me
import xml.etree.ElementTree as ElementTree
import urllib.parse, urllib.request
from doreah.settings import get_settings
from doreah.logging import log
services = {
"proxyscrobble":[],
"import":[],
"metadata":[]
}
def proxy_scrobble_all(artists,title,timestamp):
for service in services["proxyscrobble"]:
service.scrobble(artists,title,timestamp)
class GenericInterface:
def active_proxyscrobble(self):
return False
def active_import(self):
return False
def active_metadata(self):
return False
settings = {}
proxyscrobble = {}
scrobbleimport = {}
metadata = {}
def __init__(self):
# populate from settings file once on creation
# avoid constant disk access, restart on adding services is acceptable
for key in self.settings:
self.settings[key] = get_settings(self.settings[key])
def __init_subclass__(cls,abstract=False):
if not abstract:
s = cls()
if s.active_proxyscrobble():
services["proxyscrobble"].append(s)
log(cls.name + "Registered as proxy scrobble target")
if s.active_import():
services["import"].append(s)
log(cls.name + "Registered as scrobble import source")
if s.active_metadata():
services["metadata"].append(s)
log(cls.name + "Registered for metadata provider")
# proxy scrobbler
class ProxyScrobbleInterface(GenericInterface,abstract=True):
proxyscrobble = {
"required_settings":[],
"activated_setting":None
}
def active_proxyscrobble(self):
return (
all(self.settings[key] not in [None,"ASK"] for key in self.proxyscrobble["required_settings"]) and
get_settings(self.proxyscrobble["activated_setting"])
)
def scrobble(self,artists,title,timestamp):
response = urllib.request.urlopen(
self.proxyscrobble["scrobbleurl"],
data=utf(self.postdata(artists,title,timestamp)))
responsedata = response.read()
if self.proxyscrobble["response_type"] == "xml":
data = ElementTree.fromstring(responsedata)
return self.parse_response(data)
# scrobble import
class ImportInterface(GenericInterface,abstract=True):
scrobbleimport = {
"required_settings":[],
"activated_setting":None
}
def active_import(self):
return (
all(self.settings[key] not in [None,"ASK"] for key in self.scrobbleimport["required_settings"]) and
get_settings(self.scrobbleimport["activated_setting"])
)
# metadata
class MetadataInterface(GenericInterface,abstract=True):
metadata = {
"required_settings":[],
"activated_setting":None
}
def active_metadata(self):
return (
all(self.settings[key] not in [None,"ASK"] for key in self.metadata["required_settings"]) and
get_settings(self.metadata["activated_setting"])
)
### useful stuff
def utf(st):
return st.encode(encoding="UTF-8")
### actually create everything
from . import lastfm

View File

@ -1,14 +0,0 @@
import hashlib
import urllib
class LastFMInterface:
def query_compose(self,parameters):
m = hashlib.md5()
keys = sorted(str(k) for k in parameters)
m.update(self.utf("".join(str(k) + str(parameters[k]) for k in keys)))
m.update(self.utf(get_settings("LASTFM_API_SECRET")))
sig = m.hexdigest()
return urllib.parse.urlencode(parameters) + "&api_sig=" + sig
def utf(self,st):
return st.encode(encoding="UTF-8")

41
maloja/thirdparty/lastfm.py vendored Normal file
View File

@ -0,0 +1,41 @@
from . import MetadataInterface, ProxyScrobbleInterface, utf
import hashlib
import urllib.parse, urllib.request
class LastFM(MetadataInterface, ProxyScrobbleInterface):
name = "LastFM"
settings = {
"apikey":"LASTFM_API_KEY",
"sk":"LASTFM_API_SK",
"secret":"LASTFM_API_SECRET"
}
proxyscrobble = {
"scrobbleurl": "http://ws.audioscrobbler.com/2.0/",
"response_type":"xml",
"required_settings": ["apikey","sk","secret"],
"activated_setting": "SCROBBLE_LASTFM"
}
def parse_response(self,data):
return data.attrib.get("status") == "ok" and data.find("scrobbles").attrib.get("ignored") == "0"
def postdata(self,artists,title,timestamp):
return self.query_compose({
"method":"track.scrobble",
"artist[0]":", ".join(artists),
"track[0]":title,
"timestamp":timestamp,
"api_key":self.settings["apikey"],
"sk":self.settings["sk"]
})
# creates signature and returns full query string
def query_compose(self,parameters):
m = hashlib.md5()
keys = sorted(str(k) for k in parameters)
m.update(utf("".join(str(k) + str(parameters[k]) for k in keys)))
m.update(utf(self.settings["secret"]))
sig = m.hexdigest()
return urllib.parse.urlencode(parameters) + "&api_sig=" + sig

View File

@ -1,20 +0,0 @@
import xml.etree.ElementTree as ElementTree
import urllib
class ScrobbleInterface:
required_settings = []
activated_setting = ""
def active(self):
return (
all(get_settings(settingname) not in [None,"ASK"] for settingname in self.required_settings) and
get_settings(self.activated_setting)
)
def scrobble(self,artists,title,timestamp):
response = urllib.request.urlopen(self.scrobbleurl,data=self.postdata(artists,title,timestamp))
responsedata = response.read()
if self.response_type == "xml":
data = ElementTree.fromstring(responsedata)
return self.parse_response(data)

View File

@ -1,26 +0,0 @@
from . import ScrobbleInterface
from doreah.settings import get_settings
class LastFMScrobbler(ScrobbleInterface,LastFMInterface):
scrobbleurl = "http://ws.audioscrobbler.com/2.0/"
required_settings = [
"LASTFM_API_KEY",
"LASTFM_API_SK",
"LASTFM_API_SECRET"
]
activated_setting = "SCROBBLE_LASTFM"
def parse_response(self,data):
return data.attrib.get("status") == "ok" and data.find("scrobbles").attrib.get("ignored") == "0"
def postdata(self,artists,title,timestamp):
return self.query_compose({
"method":"track.scrobble",
"artist[0]":", ".join(artists),
"track[0]":title,
"timestamp":timestamp,
"api_key":get_settings("LASTFM_API_KEY"),
"sk":get_settings("LASTFM_API_SK")
})