2019-02-02 18:54:01 +03:00
|
|
|
import urllib
|
|
|
|
import json
|
2019-02-04 15:45:51 +03:00
|
|
|
from threading import Thread
|
2019-02-02 18:54:01 +03:00
|
|
|
|
2019-02-04 15:45:51 +03:00
|
|
|
|
|
|
|
def getpictures(ls,result,tracks=False):
|
|
|
|
from utilities import getArtistsInfo, getTracksInfo
|
|
|
|
if tracks:
|
|
|
|
for element in getTracksInfo(ls):
|
|
|
|
result.append(element.get("image"))
|
|
|
|
else:
|
|
|
|
for element in getArtistsInfo(ls):
|
|
|
|
result.append(element.get("image"))
|
2019-02-02 18:54:01 +03:00
|
|
|
|
|
|
|
def replacedict(keys,dbport):
|
2019-02-04 15:45:51 +03:00
|
|
|
from utilities import getArtistsInfo, getTracksInfo
|
2019-02-03 18:52:37 +03:00
|
|
|
from htmlgenerators import artistLink, artistLinks, trackLink, scrobblesArtistLink, keysToUrl, pickKeys, clean, getTimeDesc
|
2019-02-02 18:54:01 +03:00
|
|
|
|
2019-02-02 20:08:30 +03:00
|
|
|
max_show = 15
|
|
|
|
posrange = ["#" + str(i) for i in range(1,max_show)]
|
|
|
|
|
|
|
|
#clean(keys)
|
|
|
|
#timekeys = pickKeys(keys,"since","to","in")
|
|
|
|
#limitkeys = pickKeys(keys)
|
2019-02-02 18:54:01 +03:00
|
|
|
|
|
|
|
# get chart data
|
2019-02-02 20:08:30 +03:00
|
|
|
|
|
|
|
# artists
|
2019-02-15 17:18:57 +03:00
|
|
|
response = urllib.request.urlopen("http://[::1]:" + str(dbport) + "/charts/artists")
|
2019-02-02 18:54:01 +03:00
|
|
|
db_data = json.loads(response.read())
|
2019-02-02 20:08:30 +03:00
|
|
|
charts = db_data["list"][:max_show]
|
2019-02-02 18:54:01 +03:00
|
|
|
topartist = charts[0]["artist"]
|
|
|
|
|
2019-02-02 20:08:30 +03:00
|
|
|
artisttitles = [c["artist"] for c in charts]
|
2019-02-04 15:45:51 +03:00
|
|
|
artistimages = []
|
|
|
|
t1 = Thread(target=getpictures,args=(artisttitles,artistimages,))
|
|
|
|
t1.start()
|
|
|
|
#artistimages = [info.get("image") for info in getArtistsInfo(artisttitles)]
|
2019-02-02 20:08:30 +03:00
|
|
|
artistlinks = [artistLink(a) for a in artisttitles]
|
2019-02-02 18:54:01 +03:00
|
|
|
|
|
|
|
|
2019-02-02 20:08:30 +03:00
|
|
|
# tracks
|
2019-02-15 17:18:57 +03:00
|
|
|
response = urllib.request.urlopen("http://[::1]:" + str(dbport) + "/charts/tracks")
|
2019-02-02 18:54:01 +03:00
|
|
|
db_data = json.loads(response.read())
|
2019-02-02 20:08:30 +03:00
|
|
|
charts = db_data["list"][:max_show]
|
|
|
|
|
|
|
|
trackobjects = [t["track"] for t in charts]
|
|
|
|
tracktitles = [t["title"] for t in trackobjects]
|
|
|
|
trackartists = [", ".join(t["artists"]) for t in trackobjects]
|
2019-02-04 15:45:51 +03:00
|
|
|
trackimages = []
|
|
|
|
t2 = Thread(target=getpictures,args=(trackobjects,trackimages,),kwargs={"tracks":True})
|
|
|
|
t2.start()
|
|
|
|
#trackimages = [info.get("image") for info in getTracksInfo(trackobjects)]
|
2019-02-02 20:08:30 +03:00
|
|
|
tracklinks = [trackLink(t) for t in trackobjects]
|
|
|
|
|
|
|
|
|
|
|
|
# get scrobbles
|
2019-02-15 17:18:57 +03:00
|
|
|
response = urllib.request.urlopen("http://[::1]:" + str(dbport) + "/scrobbles?max=50")
|
2019-02-03 18:52:37 +03:00
|
|
|
db_data = json.loads(response.read())
|
|
|
|
scrobblelist = db_data["list"]
|
|
|
|
scrobbletrackobjects = scrobblelist #ignore the extra time attribute, the format should still work
|
|
|
|
scrobbleartists = [", ".join([artistLink(a) for a in s["artists"]]) for s in scrobblelist]
|
|
|
|
scrobbletitles = [s["title"] for s in scrobblelist]
|
|
|
|
scrobbletimes = [getTimeDesc(s["time"],short=True) for s in scrobblelist]
|
2019-02-04 15:45:51 +03:00
|
|
|
scrobbleimages = []
|
|
|
|
t3 = Thread(target=getpictures,args=(scrobbletrackobjects,scrobbleimages,),kwargs={"tracks":True})
|
|
|
|
t3.start()
|
|
|
|
#scrobbleimages = [info.get("image") for info in getTracksInfo(scrobbletrackobjects)]
|
2019-02-03 18:52:37 +03:00
|
|
|
scrobbletracklinks = [trackLink(t) for t in scrobbletrackobjects]
|
|
|
|
|
2019-02-02 20:08:30 +03:00
|
|
|
|
|
|
|
# get stats
|
2019-02-15 17:18:57 +03:00
|
|
|
response = urllib.request.urlopen("http://[::1]:" +str(dbport) + "/numscrobbles?since=today")
|
2019-02-02 20:08:30 +03:00
|
|
|
stats = json.loads(response.read())
|
|
|
|
scrobbles_today = "<a href='/scrobbles?since=today'>" + str(stats["amount"]) + "</a>"
|
|
|
|
|
2019-02-15 17:18:57 +03:00
|
|
|
response = urllib.request.urlopen("http://[::1]:" +str(dbport) + "/numscrobbles?since=month")
|
2019-02-02 20:08:30 +03:00
|
|
|
stats = json.loads(response.read())
|
|
|
|
scrobbles_month = "<a href='/scrobbles?since=month'>" + str(stats["amount"]) + "</a>"
|
|
|
|
|
2019-02-15 17:18:57 +03:00
|
|
|
response = urllib.request.urlopen("http://[::1]:" +str(dbport) + "/numscrobbles?since=year")
|
2019-02-02 20:08:30 +03:00
|
|
|
stats = json.loads(response.read())
|
|
|
|
scrobbles_year = "<a href='/scrobbles?since=year'>" + str(stats["amount"]) + "</a>"
|
|
|
|
|
2019-02-15 17:18:57 +03:00
|
|
|
response = urllib.request.urlopen("http://[::1]:" +str(dbport) + "/numscrobbles")
|
2019-02-02 20:08:30 +03:00
|
|
|
stats = json.loads(response.read())
|
|
|
|
scrobbles_total = "<a href='/scrobbles'>" + str(stats["amount"]) + "</a>"
|
2019-02-02 18:54:01 +03:00
|
|
|
|
2019-02-15 17:18:57 +03:00
|
|
|
|
|
|
|
|
2019-02-04 15:45:51 +03:00
|
|
|
t1.join()
|
|
|
|
t2.join()
|
|
|
|
t3.join()
|
|
|
|
|
|
|
|
|
2019-02-02 18:54:01 +03:00
|
|
|
|
2019-02-02 20:08:30 +03:00
|
|
|
return {"KEY_ARTISTIMAGE":artistimages,"KEY_ARTISTNAME":artisttitles,"KEY_ARTISTLINK":artistlinks,"KEY_POSITION_ARTIST":posrange,
|
|
|
|
"KEY_TRACKIMAGE":trackimages,"KEY_TRACKNAME":tracktitles,"KEY_TRACKLINK":tracklinks,"KEY_POSITION_TRACK":posrange,
|
2019-02-03 18:52:37 +03:00
|
|
|
"KEY_SCROBBLES_TODAY":scrobbles_today,"KEY_SCROBBLES_MONTH":scrobbles_month,"KEY_SCROBBLES_YEAR":scrobbles_year,"KEY_SCROBBLES_TOTAL":scrobbles_total,
|
|
|
|
"KEY_SCROBBLE_TIME":scrobbletimes,"KEY_SCROBBLE_ARTISTS":scrobbleartists,"KEY_SCROBBLE_TITLE":scrobbletracklinks,"KEY_SCROBBLE_IMAGE":scrobbleimages}
|
2019-02-02 18:54:01 +03:00
|
|
|
|