2021-12-20 00:15:28 +03:00
|
|
|
from ..globalconf import data_dir, malojaconfig
|
2020-09-04 03:42:01 +03:00
|
|
|
from .. import thirdparty
|
|
|
|
|
2021-12-20 00:15:28 +03:00
|
|
|
from doreah import caching
|
2020-09-04 03:42:01 +03:00
|
|
|
from doreah.logging import log
|
|
|
|
|
|
|
|
import itertools
|
2019-02-02 18:17:07 +03:00
|
|
|
import os
|
2019-03-03 23:55:35 +03:00
|
|
|
import urllib
|
2019-04-03 14:20:18 +03:00
|
|
|
import random
|
2019-11-22 01:14:53 +03:00
|
|
|
import base64
|
2020-09-04 03:42:01 +03:00
|
|
|
from threading import Thread, Timer
|
|
|
|
import re
|
2020-09-04 14:59:04 +03:00
|
|
|
import datetime
|
2019-04-03 18:16:27 +03:00
|
|
|
|
|
|
|
|
2018-12-17 17:10:10 +03:00
|
|
|
|
2019-03-28 19:40:22 +03:00
|
|
|
|
|
|
|
|
2019-04-01 17:52:42 +03:00
|
|
|
### Caches
|
2019-03-28 19:40:22 +03:00
|
|
|
|
2021-12-19 23:10:55 +03:00
|
|
|
cacheage = malojaconfig["CACHE_EXPIRE_POSITIVE"] * 24 * 3600
|
|
|
|
cacheage_neg = malojaconfig["CACHE_EXPIRE_NEGATIVE"] * 24 * 3600
|
2019-03-28 19:40:22 +03:00
|
|
|
|
2019-04-15 13:26:12 +03:00
|
|
|
artist_cache = caching.Cache(name="imgcache_artists",maxage=cacheage,maxage_negative=cacheage_neg,persistent=True)
|
|
|
|
track_cache = caching.Cache(name="imgcache_tracks",maxage=cacheage,maxage_negative=cacheage_neg,persistent=True)
|
2019-03-28 19:40:22 +03:00
|
|
|
|
|
|
|
|
2019-04-03 17:03:48 +03:00
|
|
|
# removes emojis and weird shit from names
|
|
|
|
def clean(name):
|
|
|
|
return "".join(c for c in name if c.isalnum() or c in []).strip()
|
|
|
|
|
2019-11-22 01:14:53 +03:00
|
|
|
def get_all_possible_filenames(artist=None,artists=None,title=None):
|
2019-04-03 17:03:48 +03:00
|
|
|
# check if we're dealing with a track or artist, then clean up names
|
|
|
|
# (only remove non-alphanumeric, allow korean and stuff)
|
2019-11-22 01:14:53 +03:00
|
|
|
|
2019-04-03 17:03:48 +03:00
|
|
|
if title is not None and artists is not None:
|
|
|
|
track = True
|
|
|
|
title, artists = clean(title), [clean(a) for a in artists]
|
|
|
|
elif artist is not None:
|
|
|
|
track = False
|
|
|
|
artist = clean(artist)
|
|
|
|
else: return []
|
|
|
|
|
|
|
|
|
2020-12-25 07:24:59 +03:00
|
|
|
superfolder = "tracks/" if track else "artists/"
|
2019-04-03 17:03:48 +03:00
|
|
|
|
|
|
|
filenames = []
|
|
|
|
|
|
|
|
if track:
|
|
|
|
#unsafeartists = [artist.translate(None,"-_./\\") for artist in artists]
|
|
|
|
safeartists = [re.sub("[^a-zA-Z0-9]","",artist) for artist in artists]
|
|
|
|
#unsafetitle = title.translate(None,"-_./\\")
|
|
|
|
safetitle = re.sub("[^a-zA-Z0-9]","",title)
|
|
|
|
|
|
|
|
if len(artists) < 4:
|
|
|
|
unsafeperms = itertools.permutations(artists)
|
|
|
|
safeperms = itertools.permutations(safeartists)
|
|
|
|
else:
|
|
|
|
unsafeperms = [sorted(artists)]
|
|
|
|
safeperms = [sorted(safeartists)]
|
|
|
|
|
|
|
|
|
|
|
|
for unsafeartistlist in unsafeperms:
|
|
|
|
filename = "-".join(unsafeartistlist) + "_" + title
|
|
|
|
if filename != "":
|
|
|
|
filenames.append(filename)
|
|
|
|
filenames.append(filename.lower())
|
|
|
|
for safeartistlist in safeperms:
|
|
|
|
filename = "-".join(safeartistlist) + "_" + safetitle
|
|
|
|
if filename != "":
|
|
|
|
filenames.append(filename)
|
|
|
|
filenames.append(filename.lower())
|
|
|
|
filenames = list(set(filenames))
|
|
|
|
if len(filenames) == 0: filenames.append(str(hash((frozenset(artists),title))))
|
|
|
|
else:
|
|
|
|
#unsafeartist = artist.translate(None,"-_./\\")
|
|
|
|
safeartist = re.sub("[^a-zA-Z0-9]","",artist)
|
|
|
|
|
|
|
|
filename = artist
|
|
|
|
if filename != "":
|
|
|
|
filenames.append(filename)
|
|
|
|
filenames.append(filename.lower())
|
|
|
|
filename = safeartist
|
|
|
|
if filename != "":
|
|
|
|
filenames.append(filename)
|
|
|
|
filenames.append(filename.lower())
|
|
|
|
|
|
|
|
filenames = list(set(filenames))
|
|
|
|
if len(filenames) == 0: filenames.append(str(hash(artist)))
|
|
|
|
|
2019-11-22 01:14:53 +03:00
|
|
|
return [superfolder + name for name in filenames]
|
|
|
|
|
|
|
|
def local_files(artist=None,artists=None,title=None):
|
|
|
|
|
|
|
|
|
|
|
|
filenames = get_all_possible_filenames(artist,artists,title)
|
|
|
|
|
2019-04-03 17:03:48 +03:00
|
|
|
images = []
|
|
|
|
|
|
|
|
for purename in filenames:
|
|
|
|
# direct files
|
|
|
|
for ext in ["png","jpg","jpeg","gif"]:
|
|
|
|
#for num in [""] + [str(n) for n in range(0,10)]:
|
2020-12-25 07:24:59 +03:00
|
|
|
if os.path.exists(data_dir['images'](purename + "." + ext)):
|
|
|
|
images.append("/images/" + purename + "." + ext)
|
2019-04-03 17:03:48 +03:00
|
|
|
|
|
|
|
# folder
|
|
|
|
try:
|
2020-12-25 07:24:59 +03:00
|
|
|
for f in os.listdir(data_dir['images'](purename)):
|
2019-04-03 17:03:48 +03:00
|
|
|
if f.split(".")[-1] in ["png","jpg","jpeg","gif"]:
|
2020-12-25 07:24:59 +03:00
|
|
|
images.append("/images/" + purename + "/" + f)
|
2019-04-03 17:03:48 +03:00
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return images
|
|
|
|
|
|
|
|
|
2019-03-28 19:40:22 +03:00
|
|
|
|
2019-05-05 12:56:09 +03:00
|
|
|
# these caches are there so we don't check all files every time, but return the same one
|
2021-12-19 23:10:55 +03:00
|
|
|
local_cache_age = malojaconfig["LOCAL_IMAGE_ROTATE"]
|
2019-04-03 17:43:09 +03:00
|
|
|
local_artist_cache = caching.Cache(maxage=local_cache_age)
|
|
|
|
local_track_cache = caching.Cache(maxage=local_cache_age)
|
2019-02-16 18:42:45 +03:00
|
|
|
|
2019-03-12 13:39:36 +03:00
|
|
|
def getTrackImage(artists,title,fast=False):
|
2019-03-06 20:04:12 +03:00
|
|
|
|
2021-12-09 23:41:57 +03:00
|
|
|
hashable_track = (frozenset(artists),title)
|
2019-04-07 14:22:33 +03:00
|
|
|
|
2021-12-09 23:41:57 +03:00
|
|
|
# Prio 1: Local image
|
2021-12-19 23:10:55 +03:00
|
|
|
if malojaconfig["USE_LOCAL_IMAGES"]:
|
2019-04-07 14:22:33 +03:00
|
|
|
try:
|
2021-12-21 09:11:24 +03:00
|
|
|
return local_track_cache.get(hashable_track)
|
2019-04-07 14:22:33 +03:00
|
|
|
except:
|
|
|
|
images = local_files(artists=artists,title=title)
|
|
|
|
if len(images) != 0:
|
|
|
|
res = random.choice(images)
|
2021-12-09 23:41:57 +03:00
|
|
|
local_track_cache.add(hashable_track,res)
|
2019-04-07 14:22:33 +03:00
|
|
|
return urllib.parse.quote(res)
|
2019-04-03 17:03:48 +03:00
|
|
|
|
2019-04-03 14:20:18 +03:00
|
|
|
|
2021-12-09 23:41:57 +03:00
|
|
|
# Prio 2: Cached remote link
|
2018-12-28 20:06:09 +03:00
|
|
|
try:
|
2021-12-09 23:41:57 +03:00
|
|
|
result = track_cache.get(hashable_track)
|
2021-12-21 09:11:24 +03:00
|
|
|
if result is not None: return result
|
2021-12-09 23:41:57 +03:00
|
|
|
# if we have cached the nonexistence of that image, we immediately return
|
|
|
|
# the redirect to the artist and let the resolver handle it
|
2019-03-12 13:39:36 +03:00
|
|
|
# (even if we're not in a fast lookup right now)
|
2021-10-19 15:58:24 +03:00
|
|
|
for a in artists:
|
|
|
|
res = getArtistImage(artist=a,fast=True)
|
|
|
|
if res != "": return res
|
|
|
|
return ""
|
2018-12-28 20:06:09 +03:00
|
|
|
except:
|
|
|
|
pass
|
2019-03-28 16:18:12 +03:00
|
|
|
|
|
|
|
|
2021-12-09 23:41:57 +03:00
|
|
|
# fast request will not go further than this, but now generate redirect link
|
2021-10-19 15:58:24 +03:00
|
|
|
if fast:
|
|
|
|
return ("/image?title=" + urllib.parse.quote(title) + "&" + "&".join(
|
|
|
|
"artist=" + urllib.parse.quote(a) for a in artists))
|
2019-03-28 16:18:12 +03:00
|
|
|
|
|
|
|
|
2021-12-09 23:41:57 +03:00
|
|
|
# Prio 3 (only non-fast lookup): actually call third parties
|
|
|
|
result = thirdparty.get_image_track_all((artists,title))
|
2019-03-12 13:39:36 +03:00
|
|
|
# cache results (even negative ones)
|
2021-12-09 23:41:57 +03:00
|
|
|
track_cache.add(hashable_track,result)
|
2019-03-12 13:39:36 +03:00
|
|
|
# return either result or redirect to artist
|
2021-12-21 09:11:24 +03:00
|
|
|
if result is not None: return result
|
2021-10-19 15:58:24 +03:00
|
|
|
for a in artists:
|
|
|
|
res = getArtistImage(artist=a,fast=False)
|
|
|
|
if res != "": return res
|
|
|
|
return ""
|
2019-03-12 13:39:36 +03:00
|
|
|
|
2019-03-28 16:18:12 +03:00
|
|
|
|
|
|
|
def getArtistImage(artist,fast=False):
|
|
|
|
|
2021-12-09 23:41:57 +03:00
|
|
|
# Prio 1: Local image
|
2021-12-19 23:10:55 +03:00
|
|
|
if malojaconfig["USE_LOCAL_IMAGES"]:
|
2019-04-07 14:22:33 +03:00
|
|
|
try:
|
2021-12-21 09:11:24 +03:00
|
|
|
return local_artist_cache.get(artist)
|
2019-04-07 14:22:33 +03:00
|
|
|
except:
|
|
|
|
images = local_files(artist=artist)
|
|
|
|
if len(images) != 0:
|
|
|
|
res = random.choice(images)
|
|
|
|
local_artist_cache.add(artist,res)
|
2021-12-21 09:11:24 +03:00
|
|
|
return urllib.parse.quote(res)
|
2019-03-28 16:18:12 +03:00
|
|
|
|
2019-04-03 14:20:18 +03:00
|
|
|
|
2021-12-09 23:41:57 +03:00
|
|
|
# Prio 2: Cached remote link
|
2018-12-17 17:10:10 +03:00
|
|
|
try:
|
2019-12-10 15:51:51 +03:00
|
|
|
result = artist_cache.get(artist)
|
2021-12-21 09:11:24 +03:00
|
|
|
if result is not None: return result
|
2019-03-12 13:39:36 +03:00
|
|
|
else: return ""
|
2019-12-10 15:51:51 +03:00
|
|
|
# none means non-existence is cached, return empty
|
2018-12-28 20:06:09 +03:00
|
|
|
except:
|
2018-12-17 17:10:10 +03:00
|
|
|
pass
|
2019-12-10 15:51:51 +03:00
|
|
|
# no cache entry, go on
|
2019-03-28 16:18:12 +03:00
|
|
|
|
|
|
|
|
2021-12-09 23:41:57 +03:00
|
|
|
# fast request will not go further than this, but now generate redirect link
|
2019-03-12 13:39:36 +03:00
|
|
|
if fast: return "/image?artist=" + urllib.parse.quote(artist)
|
2019-03-28 16:18:12 +03:00
|
|
|
|
|
|
|
|
2021-12-09 23:41:57 +03:00
|
|
|
# Prio 3 (only non-fast lookup): actually call third parties
|
|
|
|
result = thirdparty.get_image_artist_all(artist)
|
2019-03-12 13:39:36 +03:00
|
|
|
# cache results (even negative ones)
|
2019-04-01 17:52:42 +03:00
|
|
|
artist_cache.add(artist,result) #cache_artist(artist,result)
|
2021-12-21 09:11:24 +03:00
|
|
|
if result is not None: return result
|
2019-03-12 13:39:36 +03:00
|
|
|
else: return ""
|
2019-02-02 20:08:30 +03:00
|
|
|
|
2019-03-12 13:39:36 +03:00
|
|
|
def getTrackImages(trackobjectlist,fast=False):
|
2019-02-02 20:08:30 +03:00
|
|
|
|
|
|
|
threads = []
|
2019-03-28 16:18:12 +03:00
|
|
|
|
2019-02-02 20:08:30 +03:00
|
|
|
for track in trackobjectlist:
|
2019-03-12 13:39:36 +03:00
|
|
|
t = Thread(target=getTrackImage,args=(track["artists"],track["title"],),kwargs={"fast":fast})
|
2019-02-02 20:08:30 +03:00
|
|
|
t.start()
|
|
|
|
threads.append(t)
|
2019-03-28 16:18:12 +03:00
|
|
|
|
2019-02-02 20:08:30 +03:00
|
|
|
for t in threads:
|
|
|
|
t.join()
|
2019-03-28 16:18:12 +03:00
|
|
|
|
|
|
|
|
2019-03-12 13:39:36 +03:00
|
|
|
return [getTrackImage(t["artists"],t["title"]) for t in trackobjectlist]
|
2019-03-28 16:18:12 +03:00
|
|
|
|
2019-03-12 13:39:36 +03:00
|
|
|
def getArtistImages(artistlist,fast=False):
|
2019-03-28 16:18:12 +03:00
|
|
|
|
2019-02-02 18:17:07 +03:00
|
|
|
threads = []
|
2019-03-28 16:18:12 +03:00
|
|
|
|
2019-02-02 18:17:07 +03:00
|
|
|
for artist in artistlist:
|
2019-03-12 13:39:36 +03:00
|
|
|
t = Thread(target=getArtistImage,args=(artist,),kwargs={"fast":fast})
|
2019-02-02 18:17:07 +03:00
|
|
|
t.start()
|
|
|
|
threads.append(t)
|
2019-03-28 16:18:12 +03:00
|
|
|
|
2019-02-02 18:17:07 +03:00
|
|
|
for t in threads:
|
|
|
|
t.join()
|
2019-03-28 16:18:12 +03:00
|
|
|
|
|
|
|
# async calls only cached results, now we need to get them
|
2019-03-12 13:39:36 +03:00
|
|
|
return [getArtistImage(a) for a in artistlist]
|
2019-02-03 18:52:37 +03:00
|
|
|
|
2019-02-20 23:10:58 +03:00
|
|
|
|
|
|
|
|
|
|
|
# new way of serving images
|
|
|
|
# instead always generate a link locally, but redirect that on the fly
|
|
|
|
# this way the page can load faster and images will trickle in without having to resort to XHTTP requests
|
|
|
|
|
|
|
|
def resolveImage(artist=None,track=None):
|
|
|
|
if track is not None:
|
2019-03-12 13:39:36 +03:00
|
|
|
return getTrackImage(track["artists"],track["title"])
|
2019-02-20 23:10:58 +03:00
|
|
|
elif artist is not None:
|
2019-03-12 13:39:36 +03:00
|
|
|
return getArtistImage(artist)
|
2019-04-03 18:16:27 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-11-22 01:14:53 +03:00
|
|
|
def set_image(b64,**keys):
|
|
|
|
track = "title" in keys
|
|
|
|
|
2020-08-21 19:06:16 +03:00
|
|
|
log("Trying to set image, b64 string: " + str(b64[:30] + "..."),module="debug")
|
|
|
|
|
2019-11-22 01:14:53 +03:00
|
|
|
regex = r"data:image/(\w+);base64,(.+)"
|
|
|
|
type,b64 = re.fullmatch(regex,b64).groups()
|
|
|
|
b64 = base64.b64decode(b64)
|
|
|
|
filename = "webupload" + str(int(datetime.datetime.now().timestamp())) + "." + type
|
|
|
|
for folder in get_all_possible_filenames(**keys):
|
2020-12-25 07:24:59 +03:00
|
|
|
if os.path.exists(data_dir['images'](folder)):
|
|
|
|
with open(data_dir['images'](folder,filename),"wb") as f:
|
2019-11-22 01:14:53 +03:00
|
|
|
f.write(b64)
|
2021-01-16 22:11:06 +03:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
folder = get_all_possible_filenames(**keys)[0]
|
|
|
|
os.makedirs(data_dir['images'](folder))
|
|
|
|
with open(data_dir['images'](folder,filename),"wb") as f:
|
|
|
|
f.write(b64)
|
2019-11-22 01:14:53 +03:00
|
|
|
|
2021-01-16 22:11:06 +03:00
|
|
|
log("Saved image as " + data_dir['images'](folder,filename),module="debug")
|
2019-04-03 18:16:27 +03:00
|
|
|
|
2019-11-22 01:14:53 +03:00
|
|
|
# set as current picture in rotation
|
2021-01-16 22:11:06 +03:00
|
|
|
if track: local_track_cache.add((frozenset(keys["artists"]),keys["title"]),os.path.join("/images",folder,filename))
|
|
|
|
else: local_artist_cache.add(keys["artist"],os.path.join("/images",folder,filename))
|