2018-12-19 18:11:10 +03:00
|
|
|
import urllib
|
|
|
|
import json
|
|
|
|
|
|
|
|
|
2019-02-17 16:25:40 +03:00
|
|
|
def instructions(keys,dbport):
|
2018-12-29 02:08:00 +03:00
|
|
|
from utilities import getArtistInfo, getTrackInfo
|
2018-12-26 21:20:26 +03:00
|
|
|
from htmlgenerators import getTimeDesc, artistLink, artistLinks, trackLink, keysToUrl, pickKeys, clean
|
2018-12-19 18:11:10 +03:00
|
|
|
|
2018-12-26 21:20:26 +03:00
|
|
|
clean(keys)
|
2019-02-03 18:52:37 +03:00
|
|
|
timekeys = pickKeys(keys,"since","to","in","max")
|
2018-12-26 21:20:26 +03:00
|
|
|
limitkeys = pickKeys(keys,"artist","title","associated")
|
2018-12-27 05:09:29 +03:00
|
|
|
|
|
|
|
# Get scrobble data
|
2019-02-15 17:18:57 +03:00
|
|
|
response = urllib.request.urlopen("http://[::1]:" + str(dbport) + "/scrobbles?" + keysToUrl(limitkeys,timekeys))
|
2018-12-23 01:19:52 +03:00
|
|
|
db_data = json.loads(response.read())
|
|
|
|
scrobbles = db_data["list"]
|
|
|
|
|
2018-12-27 05:09:29 +03:00
|
|
|
# describe the scope
|
|
|
|
limitstring = ""
|
2018-12-24 23:25:09 +03:00
|
|
|
if keys.get("title") is not None:
|
2018-12-27 05:09:29 +03:00
|
|
|
limitstring += "of " + trackLink({"title":keys.get("title"),"artists":keys.getall("artist")}) + " "
|
|
|
|
limitstring += "by " + artistLinks(keys.getall("artist"))
|
2018-12-24 23:25:09 +03:00
|
|
|
|
|
|
|
elif keys.get("artist") is not None:
|
2018-12-27 05:09:29 +03:00
|
|
|
limitstring += "by " + artistLink(keys.get("artist"))
|
2018-12-22 16:06:21 +03:00
|
|
|
if keys.get("associated") is not None:
|
2019-02-15 17:18:57 +03:00
|
|
|
response = urllib.request.urlopen("http://[::1]:" + str(dbport) + "/artistinfo?artist=" + urllib.parse.quote(keys["artist"]))
|
2018-12-22 16:06:21 +03:00
|
|
|
db_data = json.loads(response.read())
|
2018-12-27 05:09:29 +03:00
|
|
|
moreartists = db_data["associated"]
|
2018-12-23 01:19:52 +03:00
|
|
|
if moreartists != []:
|
2018-12-27 05:09:29 +03:00
|
|
|
limitstring += " <span class='extra'>including " + artistLinks(moreartists) + "</span>"
|
2018-12-23 01:19:52 +03:00
|
|
|
|
2018-12-27 05:09:29 +03:00
|
|
|
|
2018-12-29 02:08:00 +03:00
|
|
|
# get image
|
|
|
|
if limitkeys.get("title") is not None:
|
|
|
|
imgurl = getTrackInfo(limitkeys.getall("artist"),limitkeys.get("title")).get("image")
|
|
|
|
elif keys.get("artist") is not None:
|
2018-12-27 05:09:29 +03:00
|
|
|
imgurl = getArtistInfo(keys.get("artist")).get("image")
|
|
|
|
elif (len(scrobbles) != 0):
|
2019-01-10 01:29:01 +03:00
|
|
|
imgurl = getTrackInfo(scrobbles[0]["artists"],scrobbles[0]["title"]).get("image")
|
|
|
|
#imgurl = getArtistInfo(scrobbles[0]["artists"][0]).get("image")
|
2018-12-23 01:19:52 +03:00
|
|
|
else:
|
2018-12-27 05:09:29 +03:00
|
|
|
imgurl = ""
|
2019-02-17 16:25:40 +03:00
|
|
|
|
|
|
|
pushresources = [{"file":imgurl,"type":"image"}] if imgurl.startswith("/") else []
|
2018-12-19 18:11:10 +03:00
|
|
|
|
2018-12-23 01:19:52 +03:00
|
|
|
|
2018-12-27 05:09:29 +03:00
|
|
|
# build list
|
2018-12-22 02:31:27 +03:00
|
|
|
html = "<table class='list'>"
|
2018-12-19 18:11:10 +03:00
|
|
|
for s in scrobbles:
|
2018-12-26 21:20:26 +03:00
|
|
|
html += "<tr>"
|
|
|
|
html += "<td class='time'>" + getTimeDesc(s["time"]) + "</td>"
|
2018-12-28 20:06:09 +03:00
|
|
|
#html += """<td class='icon' style="background-image:url('""" + getArtistInfo(s["artists"][0]).get("image") + """')" /></td>"""
|
2018-12-26 21:20:26 +03:00
|
|
|
html += "<td class='artists'>" + artistLinks(s["artists"]) + "</td>"
|
2018-12-27 05:09:29 +03:00
|
|
|
html += "<td class='title'>" + trackLink({"artists":s["artists"],"title":s["title"]}) + "</td>"
|
|
|
|
html += "</tr>"
|
2018-12-19 18:11:10 +03:00
|
|
|
html += "</table>"
|
|
|
|
|
2019-02-17 16:25:40 +03:00
|
|
|
replace = {"KEY_SCROBBLELIST":html,"KEY_SCROBBLES":str(len(scrobbles)),"KEY_IMAGEURL":imgurl,"KEY_LIMITS":limitstring}
|
|
|
|
|
|
|
|
return (replace,pushresources)
|
2018-12-19 18:11:10 +03:00
|
|
|
|