Implemented caching

This commit is contained in:
krateng 2022-02-15 05:20:27 +01:00
parent d9f4021342
commit cc060d650b
6 changed files with 72 additions and 25 deletions

View File

@ -11,6 +11,7 @@ from ..globalconf import data_dir, malojaconfig, apikeystore
#db
from . import sqldb
from . import cached
from . import dbcache
# doreah toolkit
from doreah.logging import log
@ -108,6 +109,9 @@ def incoming_scrobble(artists,title,album=None,albumartists=None,duration=None,l
sqldb.add_scrobble(scrobbledict)
proxy_scrobble_all(artists,title,time)
dbcache.invalidate_caches(time)
return {"status":"success","scrobble":scrobbledict}

View File

@ -0,0 +1,56 @@
# the more generalized caching for DB queries
# mostly to avoid long loading times for pages that show lots of information
# that changes very infrequently or not at all
import lru
import json
from doreah.regular import runhourly
from doreah.logging import log
USE_CACHE = True
cache = lru.LRU(300000)
@runhourly
def print_stats():
log(f"Cache Size: {len(cache)}")
def cached_wrapper(inner_func):
def outer_func(**kwargs):
key = (serialize(kwargs), inner_func, kwargs.get("since"), kwargs.get("to"))
if USE_CACHE and key in cache:
return cache.get(key)
else:
result = inner_func(**kwargs)
cache[key] = result
return result
return outer_func
def invalidate_caches(scrobbletime):
for k in cache.keys():
if (k[2] is None or scrobbletime >= k[2]) and (k[3] is None or scrobbletime <= k[3]):
del cache[k]
def serialize(obj):
try:
return serialize(obj.hashable())
except:
try:
return json.dumps(obj)
except:
if isinstance(obj, (list, tuple)):
return "[" + ",".join(serialize(o) for o in obj) + "]"
elif isinstance(obj,dict):
return "{" + ",".join(serialize(o) + ":" + serialize(obj[o]) for o in obj) + "}"
return json.dumps(obj.hashable())

View File

@ -6,6 +6,8 @@ from datetime import datetime
from ..globalconf import data_dir
from .dbcache import cached_wrapper
##### DB Technical
@ -258,6 +260,7 @@ def get_artist_id(artistname):
### Functions that get rows according to parameters
@cached_wrapper
def get_scrobbles_of_artist(artist,since=None,to=None):
if since is None: since=0
@ -278,6 +281,7 @@ def get_scrobbles_of_artist(artist,since=None,to=None):
#result = [scrobble_db_to_dict(row,resolve_references=resolve_references) for row in result]
return result
@cached_wrapper
def get_scrobbles_of_track(track,since=None,to=None):
if since is None: since=0
@ -297,6 +301,7 @@ def get_scrobbles_of_track(track,since=None,to=None):
#result = [scrobble_db_to_dict(row) for row in result]
return result
@cached_wrapper
def get_scrobbles(since=None,to=None,resolve_references=True):
if since is None: since=0
@ -323,6 +328,7 @@ def get_artists_of_track(track_id,resolve_references=True):
artists = [get_artist(row.artist_id) if resolve_references else row.artist_id for row in result]
return artists
def get_tracks_of_artist(artist):
artist_id = get_artist_id(artist)
@ -351,6 +357,7 @@ def get_tracks():
### functions that count rows for parameters
@cached_wrapper
def count_scrobbles_by_artist(since,to):
jointable = sql.join(
DB['scrobbles'],
@ -387,6 +394,7 @@ def count_scrobbles_by_artist(since,to):
result = rank(result,key='scrobbles')
return result
@cached_wrapper
def count_scrobbles_by_track(since,to):
with engine.begin() as conn:
@ -406,6 +414,7 @@ def count_scrobbles_by_track(since,to):
result = rank(result,key='scrobbles')
return result
@cached_wrapper
def count_scrobbles_by_track_of_artist(since,to,artist):
artist_id = get_artist_id(artist)

View File

@ -65,6 +65,9 @@ class MTRangeGeneric:
def active(self):
return (self.last_stamp() > datetime.utcnow().timestamp())
def __contains__(self,timestamp):
return timestamp >= self.first_stamp() and timestamp <= self.last_stamp()
# Any range that has one defining base unit, whether week, year, etc.
class MTRangeSingular(MTRangeGeneric):
def fromstr(self):

View File

@ -1,2 +1 @@
from .images import *
from .utils import *

View File

@ -1,24 +0,0 @@
import json
#####
## SERIALIZE
#####
def serialize(obj):
try:
return serialize(obj.hashable())
except:
try:
return json.dumps(obj)
except:
if isinstance(obj, (list, tuple)):
return "[" + ",".join(serialize(o) for o in obj) + "]"
elif isinstance(obj,dict):
return "{" + ",".join(serialize(o) + ":" + serialize(obj[o]) for o in obj) + "}"
return json.dumps(obj.hashable())
#if isinstance(obj,list) or if isinstance(obj,tuple):
# return "[" + ",".join(dumps(o) for o in obj) + "]"
#if isinstance(obj,str)