1
0
mirror of https://github.com/krateng/maloja.git synced 2023-08-10 21:12:55 +03:00

Asynchronous image calls and expanded functionality of website generation

This commit is contained in:
Krateng 2019-02-02 16:17:07 +01:00
parent c158dc2d88
commit 4edc352bcc
3 changed files with 32 additions and 14 deletions

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
__pycache__
*.sh
apikey

View File

@ -100,7 +100,12 @@ def static_html(name):
with open("website/" + name + ".html") as htmlfile: with open("website/" + name + ".html") as htmlfile:
html = htmlfile.read() html = htmlfile.read()
for k in txt_keys: for k in txt_keys:
html = html.replace(k,txt_keys[k]) if isinstance(txt_keys[k],list):
# if list, we replace each occurence with the next item
for element in txt_keys[k]:
html = html.replace(k,element,1)
else:
html = html.replace(k,txt_keys[k])
return html return html

View File

@ -1,3 +1,9 @@
import re
import os
import hashlib
from threading import Thread
### TSV files ### TSV files
def parseTSV(filename,*args): def parseTSV(filename,*args):
@ -37,8 +43,6 @@ def parseTSV(filename,*args):
return result return result
def checksumTSV(folder): def checksumTSV(folder):
import hashlib
import os
sums = "" sums = ""
@ -80,7 +84,6 @@ def combineChecksums(filename,checksums):
# checks ALL files for their rule state. if they are all the same as the current loaded one, the entire database can be assumed to be consistent with the current ruleset # checks ALL files for their rule state. if they are all the same as the current loaded one, the entire database can be assumed to be consistent with the current ruleset
# in any other case, get out # in any other case, get out
def consistentRulestate(folder,checksums): def consistentRulestate(folder,checksums):
import os
result = [] result = []
for scrobblefile in os.listdir(folder + "/"): for scrobblefile in os.listdir(folder + "/"):
@ -102,7 +105,6 @@ def consistentRulestate(folder,checksums):
def parseAllTSV(path,*args): def parseAllTSV(path,*args):
import os
result = [] result = []
for f in os.listdir(path + "/"): for f in os.listdir(path + "/"):
@ -114,7 +116,6 @@ def parseAllTSV(path,*args):
return result return result
def createTSV(filename): def createTSV(filename):
import os
if not os.path.exists(filename): if not os.path.exists(filename):
open(filename,"w").close() open(filename,"w").close()
@ -211,8 +212,6 @@ cachedTracks = {}
cachedArtists = {} cachedArtists = {}
def getTrackInfo(artists,title): def getTrackInfo(artists,title):
import re
import os
obj = (frozenset(artists),title) obj = (frozenset(artists),title)
filename = "-".join([re.sub("[^a-zA-Z0-9]","",artist) for artist in artists]) + "_" + re.sub("[^a-zA-Z0-9]","",title) filename = "-".join([re.sub("[^a-zA-Z0-9]","",artist) for artist in artists]) + "_" + re.sub("[^a-zA-Z0-9]","",title)
@ -245,8 +244,6 @@ def getTrackInfo(artists,title):
return result return result
def getArtistInfo(artist): def getArtistInfo(artist):
import re
import os
obj = artist obj = artist
filename = re.sub("[^a-zA-Z0-9]","",artist) filename = re.sub("[^a-zA-Z0-9]","",artist)
@ -275,5 +272,18 @@ def getArtistInfo(artist):
cachedArtists[artist] = result["image"] cachedArtists[artist] = result["image"]
return result return result
def getArtistsInfo(artistlist):
threads = []
for artist in artistlist:
t = Thread(target=getArtistInfo,args=(artist,))
t.start()
threads.append(t)
for t in threads:
t.join()
# async calls only cached results, now we need to get them
return [getArtistInfo(a) for a in artistlist]