1
0
mirror of https://github.com/krateng/maloja.git synced 2023-08-10 21:12:55 +03:00

Ported cache cleanup from 3.1

This commit is contained in:
krateng 2022-04-24 20:55:07 +02:00
parent bfc83fdbb0
commit 342b8867d9
2 changed files with 106 additions and 91 deletions

View File

@ -10,96 +10,97 @@ from doreah.logging import log
from ..globalconf import malojaconfig
HIGH_NUMBER = 1000000
CACHE_SIZE = 10000
ENTITY_CACHE_SIZE = 1000000
CACHE_ADJUST_STEP = 100
cache = lru.LRU(CACHE_SIZE)
entitycache = lru.LRU(ENTITY_CACHE_SIZE)
hits, misses = 0, 0
@runhourly
def maintenance():
if malojaconfig['USE_GLOBAL_CACHE']:
if malojaconfig['USE_GLOBAL_CACHE']:
CACHE_SIZE = 1000
ENTITY_CACHE_SIZE = 100000
cache = lru.LRU(CACHE_SIZE)
entitycache = lru.LRU(ENTITY_CACHE_SIZE)
hits, misses = 0, 0
@runhourly
def maintenance():
print_stats()
trim_cache()
def print_stats():
log(f"Cache Size: {len(cache)} [{len(entitycache)} E], System RAM Utilization: {psutil.virtual_memory().percent}%, Cache Hits: {hits}/{hits+misses}")
#print("Full rundown:")
#import sys
#for k in cache.keys():
# print(f"\t{k}\t{sys.getsizeof(cache[k])}")
def print_stats():
log(f"Cache Size: {len(cache)} [{len(entitycache)} E], System RAM Utilization: {psutil.virtual_memory().percent}%, Cache Hits: {hits}/{hits+misses}")
#print("Full rundown:")
#import sys
#for k in cache.keys():
# print(f"\t{k}\t{sys.getsizeof(cache[k])}")
def cached_wrapper(inner_func):
def cached_wrapper(inner_func):
if not malojaconfig['USE_GLOBAL_CACHE']: return inner_func
def outer_func(*args,**kwargs):
if 'dbconn' in kwargs:
conn = kwargs.pop('dbconn')
else:
conn = None
global hits, misses
key = (serialize(args),serialize(kwargs), inner_func, kwargs.get("since"), kwargs.get("to"))
def outer_func(*args,**kwargs):
if key in cache:
hits += 1
return cache.get(key)
if 'dbconn' in kwargs:
conn = kwargs.pop('dbconn')
else:
conn = None
global hits, misses
key = (serialize(args),serialize(kwargs), inner_func, kwargs.get("since"), kwargs.get("to"))
if key in cache:
hits += 1
return cache.get(key)
else:
misses += 1
result = inner_func(*args,**kwargs,dbconn=conn)
cache[key] = result
return result
return outer_func
# cache for functions that call with a whole list of entity ids
# we don't want a new cache entry for every single combination, but keep a common
# cache that's aware of what we're calling
def cached_wrapper_individual(inner_func):
def outer_func(set_arg,**kwargs):
if 'dbconn' in kwargs:
conn = kwargs.pop('dbconn')
else:
conn = None
#global hits, misses
result = {}
for id in set_arg:
if (inner_func,id) in entitycache:
result[id] = entitycache[(inner_func,id)]
#hits += 1
else:
pass
#misses += 1
remaining = inner_func(set(e for e in set_arg if e not in result),dbconn=conn)
for id in remaining:
entitycache[(inner_func,id)] = remaining[id]
result[id] = remaining[id]
else:
misses += 1
result = inner_func(*args,**kwargs,dbconn=conn)
cache[key] = result
return result
return outer_func
return outer_func
# cache for functions that call with a whole list of entity ids
# we don't want a new cache entry for every single combination, but keep a common
# cache that's aware of what we're calling
def cached_wrapper_individual(inner_func):
if not malojaconfig['USE_GLOBAL_CACHE']: return inner_func
def outer_func(set_arg,**kwargs):
if 'dbconn' in kwargs:
conn = kwargs.pop('dbconn')
else:
conn = None
#global hits, misses
result = {}
for id in set_arg:
if (inner_func,id) in entitycache:
result[id] = entitycache[(inner_func,id)]
#hits += 1
else:
pass
#misses += 1
remaining = inner_func(set(e for e in set_arg if e not in result),dbconn=conn)
for id in remaining:
entitycache[(inner_func,id)] = remaining[id]
result[id] = remaining[id]
return result
return outer_func
def invalidate_caches(scrobbletime):
if malojaconfig['USE_GLOBAL_CACHE']:
def invalidate_caches(scrobbletime=None):
cleared, kept = 0, 0
for k in cache.keys():
# VERY BIG TODO: differentiate between None as in 'unlimited timerange' and None as in 'time doesnt matter here'!
if (k[3] is None or scrobbletime >= k[3]) and (k[4] is None or scrobbletime <= k[4]):
if scrobbletime is None or (k[3] is None or scrobbletime >= k[3]) and (k[4] is None or scrobbletime <= k[4]):
cleared += 1
del cache[k]
else:
@ -107,28 +108,42 @@ def invalidate_caches(scrobbletime):
log(f"Invalidated {cleared} of {cleared+kept} DB cache entries")
def invalidate_entity_cache():
entitycache.clear()
def invalidate_entity_cache():
entitycache.clear()
def trim_cache():
ramprct = psutil.virtual_memory().percent
if ramprct > malojaconfig["DB_MAX_MEMORY"]:
log(f"{ramprct}% RAM usage, clearing cache and adjusting size!")
#ratio = 0.6
#targetsize = max(int(len(cache) * ratio),50)
#log(f"Reducing to {targetsize} entries")
#cache.set_size(targetsize)
#cache.set_size(HIGH_NUMBER)
cache.clear()
if cache.get_size() > CACHE_ADJUST_STEP:
cache.set_size(cache.get_size() - CACHE_ADJUST_STEP)
def trim_cache():
ramprct = psutil.virtual_memory().percent
if ramprct > malojaconfig["DB_MAX_MEMORY"]:
log(f"{ramprct}% RAM usage, clearing cache and adjusting size!")
#ratio = 0.6
#targetsize = max(int(len(cache) * ratio),50)
#log(f"Reducing to {targetsize} entries")
#cache.set_size(targetsize)
#cache.set_size(HIGH_NUMBER)
cache.clear()
#if cache.get_size() > CACHE_ADJUST_STEP:
# cache.set_size(cache.get_size() - CACHE_ADJUST_STEP)
#log(f"New RAM usage: {psutil.virtual_memory().percent}%")
print_stats()
#log(f"New RAM usage: {psutil.virtual_memory().percent}%")
print_stats()
else:
def cached_wrapper(func):
return func
def cached_wrapper_individual(func):
return func
def invalidate_caches(scrobbletime=None):
return None
def invalidate_entity_cache():
return None
def serialize(obj):
try:
return serialize(obj.hashable())

View File

@ -149,8 +149,8 @@ malojaconfig = Configuration(
"cache_expire_positive":(tp.Integer(), "Image Cache Expiration", 60, "Days until images are refetched"),
"cache_expire_negative":(tp.Integer(), "Image Cache Negative Expiration", 5, "Days until failed image fetches are reattempted"),
"db_max_memory":(tp.Integer(min=0,max=100), "RAM Percentage soft limit", 80, "RAM Usage in percent at which Maloja should no longer increase its database cache."),
"use_request_cache":(tp.Boolean(), "Use request-local DB Cache", True),
"use_global_cache":(tp.Boolean(), "Use global DB Cache", True)
"use_request_cache":(tp.Boolean(), "Use request-local DB Cache", False),
"use_global_cache":(tp.Boolean(), "Use global DB Cache", False)
},
"Fluff":{
"scrobbles_gold":(tp.Integer(), "Scrobbles for Gold", 250, "How many scrobbles a track needs to be considered 'Gold' status"),