1
0
mirror of https://github.com/krateng/maloja.git synced 2023-08-10 21:12:55 +03:00

Removed previous ability, but this time clean and consistent

This commit is contained in:
krateng 2022-04-24 16:14:24 +02:00
parent 7c77474feb
commit d5f5b48d85

View File

@ -10,125 +10,140 @@ from doreah.logging import log
from ..pkg_global.conf import malojaconfig from ..pkg_global.conf import malojaconfig
CACHE_SIZE = 1000
ENTITY_CACHE_SIZE = 100000
cache = lru.LRU(CACHE_SIZE)
entitycache = lru.LRU(ENTITY_CACHE_SIZE)
hits, misses = 0, 0
@runhourly
def maintenance(): if malojaconfig['USE_GLOBAL_CACHE']:
if malojaconfig['USE_GLOBAL_CACHE']: CACHE_SIZE = 1000
ENTITY_CACHE_SIZE = 100000
cache = lru.LRU(CACHE_SIZE)
entitycache = lru.LRU(ENTITY_CACHE_SIZE)
hits, misses = 0, 0
@runhourly
def maintenance():
print_stats() print_stats()
trim_cache() trim_cache()
def print_stats(): def print_stats():
log(f"Cache Size: {len(cache)} [{len(entitycache)} E], System RAM Utilization: {psutil.virtual_memory().percent}%, Cache Hits: {hits}/{hits+misses}") log(f"Cache Size: {len(cache)} [{len(entitycache)} E], System RAM Utilization: {psutil.virtual_memory().percent}%, Cache Hits: {hits}/{hits+misses}")
#print("Full rundown:") #print("Full rundown:")
#import sys #import sys
#for k in cache.keys(): #for k in cache.keys():
# print(f"\t{k}\t{sys.getsizeof(cache[k])}") # print(f"\t{k}\t{sys.getsizeof(cache[k])}")
def cached_wrapper(inner_func): def cached_wrapper(inner_func):
def outer_func(*args,**kwargs):
if 'dbconn' in kwargs:
conn = kwargs.pop('dbconn')
else:
conn = None
global hits, misses
key = (serialize(args),serialize(kwargs), inner_func, kwargs.get("since"), kwargs.get("to"))
if key in cache:
hits += 1
return cache.get(key)
else:
misses += 1
result = inner_func(*args,**kwargs,dbconn=conn)
cache[key] = result
return result
return outer_func
def outer_func(*args,**kwargs): # cache for functions that call with a whole list of entity ids
if not malojaconfig['USE_GLOBAL_CACHE']: inner_func(*args,**kwargs) # we don't want a new cache entry for every single combination, but keep a common
# cache that's aware of what we're calling
def cached_wrapper_individual(inner_func):
if 'dbconn' in kwargs:
conn = kwargs.pop('dbconn')
else:
conn = None
global hits, misses
key = (serialize(args),serialize(kwargs), inner_func, kwargs.get("since"), kwargs.get("to"))
if key in cache: def outer_func(set_arg,**kwargs):
hits += 1
return cache.get(key)
if 'dbconn' in kwargs:
conn = kwargs.pop('dbconn')
else:
conn = None
#global hits, misses
result = {}
for id in set_arg:
if (inner_func,id) in entitycache:
result[id] = entitycache[(inner_func,id)]
#hits += 1
else:
pass
#misses += 1
remaining = inner_func(set(e for e in set_arg if e not in result),dbconn=conn)
for id in remaining:
entitycache[(inner_func,id)] = remaining[id]
result[id] = remaining[id]
else:
misses += 1
result = inner_func(*args,**kwargs,dbconn=conn)
cache[key] = result
return result return result
return outer_func return outer_func
def invalidate_caches(scrobbletime=None):
# cache for functions that call with a whole list of entity ids cleared, kept = 0, 0
# we don't want a new cache entry for every single combination, but keep a common for k in cache.keys():
# cache that's aware of what we're calling # VERY BIG TODO: differentiate between None as in 'unlimited timerange' and None as in 'time doesnt matter here'!
def cached_wrapper_individual(inner_func): if scrobbletime is None or (k[3] is None or scrobbletime >= k[3]) and (k[4] is None or scrobbletime <= k[4]):
cleared += 1
del cache[k]
def outer_func(set_arg,**kwargs):
if not malojaconfig['USE_GLOBAL_CACHE']: return inner_func(set_arg,**kwargs)
if 'dbconn' in kwargs:
conn = kwargs.pop('dbconn')
else:
conn = None
#global hits, misses
result = {}
for id in set_arg:
if (inner_func,id) in entitycache:
result[id] = entitycache[(inner_func,id)]
#hits += 1
else: else:
pass kept += 1
#misses += 1 log(f"Invalidated {cleared} of {cleared+kept} DB cache entries")
remaining = inner_func(set(e for e in set_arg if e not in result),dbconn=conn) def invalidate_entity_cache():
for id in remaining: entitycache.clear()
entitycache[(inner_func,id)] = remaining[id]
result[id] = remaining[id]
return result
return outer_func
def invalidate_caches(scrobbletime=None):
cleared, kept = 0, 0
for k in cache.keys():
# VERY BIG TODO: differentiate between None as in 'unlimited timerange' and None as in 'time doesnt matter here'!
if scrobbletime is None or (k[3] is None or scrobbletime >= k[3]) and (k[4] is None or scrobbletime <= k[4]):
cleared += 1
del cache[k]
else:
kept += 1
log(f"Invalidated {cleared} of {cleared+kept} DB cache entries")
def invalidate_entity_cache(): def trim_cache():
entitycache.clear() ramprct = psutil.virtual_memory().percent
if ramprct > malojaconfig["DB_MAX_MEMORY"]:
log(f"{ramprct}% RAM usage, clearing cache and adjusting size!")
#ratio = 0.6
#targetsize = max(int(len(cache) * ratio),50)
#log(f"Reducing to {targetsize} entries")
#cache.set_size(targetsize)
#cache.set_size(HIGH_NUMBER)
cache.clear()
#if cache.get_size() > CACHE_ADJUST_STEP:
# cache.set_size(cache.get_size() - CACHE_ADJUST_STEP)
#log(f"New RAM usage: {psutil.virtual_memory().percent}%")
print_stats()
def trim_cache():
ramprct = psutil.virtual_memory().percent
if ramprct > malojaconfig["DB_MAX_MEMORY"]:
log(f"{ramprct}% RAM usage, clearing cache and adjusting size!")
#ratio = 0.6
#targetsize = max(int(len(cache) * ratio),50)
#log(f"Reducing to {targetsize} entries")
#cache.set_size(targetsize)
#cache.set_size(HIGH_NUMBER)
cache.clear()
#if cache.get_size() > CACHE_ADJUST_STEP:
# cache.set_size(cache.get_size() - CACHE_ADJUST_STEP)
#log(f"New RAM usage: {psutil.virtual_memory().percent}%")
print_stats()
else:
def cached_wrapper(func):
return func
def cached_wrapper_individual(func):
return func
def invalidate_caches(scrobbletime=None):
return None
def invalidate_entity_cache():
return None
def serialize(obj): def serialize(obj):
try: try:
return serialize(obj.hashable()) return serialize(obj.hashable())