mirror of
https://github.com/krateng/maloja.git
synced 2023-08-10 21:12:55 +03:00
Fixed separate caching for each combination of entity IDs
This commit is contained in:
parent
c944a3d937
commit
634cb38dec
@ -17,12 +17,7 @@ from . import dbcache
|
||||
# doreah toolkit
|
||||
from doreah.logging import log
|
||||
from doreah import tsv
|
||||
from doreah.caching import Cache, DeepCache
|
||||
from doreah.auth import authenticated_api, authenticated_api_with_alternate
|
||||
from doreah.io import ProgressBar
|
||||
try:
|
||||
from doreah.persistence import DiskDict
|
||||
except: pass
|
||||
import doreah
|
||||
|
||||
|
||||
|
@ -13,6 +13,8 @@ from ..globalconf import malojaconfig
|
||||
HIGH_NUMBER = 1000000
|
||||
|
||||
cache = lru.LRU(HIGH_NUMBER)
|
||||
entitycache = lru.LRU(HIGH_NUMBER)
|
||||
|
||||
hits, misses = 0, 0
|
||||
|
||||
|
||||
@ -31,6 +33,10 @@ def maintenance():
|
||||
|
||||
def print_stats():
|
||||
log(f"Cache Size: {len(cache)}, System RAM Utilization: {psutil.virtual_memory().percent}%, Cache Hits: {hits}/{hits+misses}")
|
||||
#print("Full rundown:")
|
||||
#import sys
|
||||
#for k in cache.keys():
|
||||
# print(f"\t{k}\t{sys.getsizeof(cache[k])}")
|
||||
|
||||
|
||||
def cached_wrapper(inner_func):
|
||||
@ -57,6 +63,38 @@ def cached_wrapper(inner_func):
|
||||
return outer_func
|
||||
|
||||
|
||||
# cache for functions that call with a whole list of entity ids
|
||||
# we don't want a new cache entry for every single combination, but keep a common
|
||||
# cache that's aware of what we're calling
|
||||
def cached_wrapper_individual(inner_func):
|
||||
|
||||
if not malojaconfig['USE_GLOBAL_CACHE']: return inner_func
|
||||
def outer_func(set_arg,**kwargs):
|
||||
|
||||
|
||||
if 'dbconn' in kwargs:
|
||||
conn = kwargs.pop('dbconn')
|
||||
else:
|
||||
conn = None
|
||||
|
||||
global hits, misses
|
||||
result = {}
|
||||
for id in set_arg:
|
||||
if (inner_func,id) in entitycache:
|
||||
result[id] = entitycache[(inner_func,id)]
|
||||
hits += 1
|
||||
else:
|
||||
misses += 1
|
||||
|
||||
|
||||
remaining = inner_func(set(e for e in set_arg if e not in result),dbconn=conn)
|
||||
for id in remaining:
|
||||
entitycache[(inner_func,id)] = remaining[id]
|
||||
result[id] = remaining[id]
|
||||
|
||||
return result
|
||||
|
||||
return outer_func
|
||||
|
||||
def invalidate_caches(scrobbletime):
|
||||
if malojaconfig['USE_GLOBAL_CACHE']:
|
||||
|
@ -6,7 +6,7 @@ from datetime import datetime
|
||||
|
||||
from ..globalconf import data_dir
|
||||
|
||||
from .dbcache import cached_wrapper
|
||||
from .dbcache import cached_wrapper, cached_wrapper_individual
|
||||
|
||||
from doreah.logging import log
|
||||
from doreah.regular import runhourly
|
||||
@ -498,7 +498,7 @@ def count_scrobbles_by_track_of_artist(since,to,artist,dbconn=None):
|
||||
|
||||
### functions that get mappings for several entities -> rows
|
||||
|
||||
@cached_wrapper
|
||||
@cached_wrapper_individual
|
||||
@connection_provider
|
||||
def get_artists_of_tracks(track_ids,dbconn=None):
|
||||
op = sql.join(DB['trackartists'],DB['artists']).select().where(
|
||||
@ -512,7 +512,7 @@ def get_artists_of_tracks(track_ids,dbconn=None):
|
||||
return artists
|
||||
|
||||
|
||||
@cached_wrapper
|
||||
@cached_wrapper_individual
|
||||
@connection_provider
|
||||
def get_tracks_map(track_ids,dbconn=None):
|
||||
op = DB['tracks'].select().where(
|
||||
@ -527,7 +527,7 @@ def get_tracks_map(track_ids,dbconn=None):
|
||||
tracks[trackids[i]] = trackdicts[i]
|
||||
return tracks
|
||||
|
||||
@cached_wrapper
|
||||
@cached_wrapper_individual
|
||||
@connection_provider
|
||||
def get_artists_map(artist_ids,dbconn=None):
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user