1
0
mirror of https://github.com/krateng/maloja.git synced 2023-08-10 21:12:55 +03:00

More cache organization

This commit is contained in:
krateng 2022-04-25 04:28:53 +02:00
parent 62abc31930
commit ad50ee866c
6 changed files with 51 additions and 32 deletions

View File

@ -43,6 +43,7 @@ minor_release_name: "Yeonhee"
- "[Bugfix] No longer releasing database lock during scrobble creation" - "[Bugfix] No longer releasing database lock during scrobble creation"
- "[Distribution] Experimental arm64 image" - "[Distribution] Experimental arm64 image"
3.0.7: 3.0.7:
commit: "62abc319303a6cb6463f7c27b6ef09b76fc67f86"
notes: notes:
- "[Bugix] Improved signal handling" - "[Bugix] Improved signal handling"
- "[Bugix] Fixed constant re-caching of all-time stats, significantly increasing page load speed" - "[Bugix] Fixed constant re-caching of all-time stats, significantly increasing page load speed"

View File

@ -1 +1,2 @@
- "[Performance] Adjusted cache sizes"
- "[Logging] Added cache memory use information"

View File

@ -6,6 +6,7 @@ FOLDER = "dev/releases"
releases = {} releases = {}
for f in os.listdir(FOLDER): for f in os.listdir(FOLDER):
if f == "branch.yml": continue
#maj,min = (int(i) for i in f.split('.')[:2]) #maj,min = (int(i) for i in f.split('.')[:2])
with open(os.path.join(FOLDER,f)) as fd: with open(os.path.join(FOLDER,f)) as fd:

View File

@ -5,6 +5,7 @@
import lru import lru
import psutil import psutil
import json import json
import sys
from doreah.regular import runhourly from doreah.regular import runhourly
from doreah.logging import log from doreah.logging import log
@ -12,16 +13,10 @@ from ..globalconf import malojaconfig
if malojaconfig['USE_GLOBAL_CACHE']: if malojaconfig['USE_GLOBAL_CACHE']:
CACHE_SIZE = 1000
ENTITY_CACHE_SIZE = 100000
cache = lru.LRU(CACHE_SIZE) cache = lru.LRU(10000)
entitycache = lru.LRU(ENTITY_CACHE_SIZE) entitycache = lru.LRU(100000)
hits, misses = 0, 0
@ -31,11 +26,10 @@ if malojaconfig['USE_GLOBAL_CACHE']:
trim_cache() trim_cache()
def print_stats(): def print_stats():
log(f"Cache Size: {len(cache)} [{len(entitycache)} E], System RAM Utilization: {psutil.virtual_memory().percent}%, Cache Hits: {hits}/{hits+misses}") for name,c in (('Cache',cache),('Entity Cache',entitycache)):
#print("Full rundown:") hits, misses = c.get_stats()
#import sys log(f"{name}: Size: {len(c)} | Hits: {hits}/{hits+misses} | Estimated Memory: {human_readable_size(c)}")
#for k in cache.keys(): log(f"System RAM Utilization: {psutil.virtual_memory().percent}%")
# print(f"\t{k}\t{sys.getsizeof(cache[k])}")
def cached_wrapper(inner_func): def cached_wrapper(inner_func):
@ -49,12 +43,9 @@ if malojaconfig['USE_GLOBAL_CACHE']:
global hits, misses global hits, misses
key = (serialize(args),serialize(kwargs), inner_func, kwargs.get("since"), kwargs.get("to")) key = (serialize(args),serialize(kwargs), inner_func, kwargs.get("since"), kwargs.get("to"))
if key in cache: try:
hits += 1 return cache[key]
return cache.get(key) except KeyError:
else:
misses += 1
result = inner_func(*args,**kwargs,dbconn=conn) result = inner_func(*args,**kwargs,dbconn=conn)
cache[key] = result cache[key] = result
return result return result
@ -67,25 +58,18 @@ if malojaconfig['USE_GLOBAL_CACHE']:
# cache that's aware of what we're calling # cache that's aware of what we're calling
def cached_wrapper_individual(inner_func): def cached_wrapper_individual(inner_func):
def outer_func(set_arg,**kwargs): def outer_func(set_arg,**kwargs):
if 'dbconn' in kwargs: if 'dbconn' in kwargs:
conn = kwargs.pop('dbconn') conn = kwargs.pop('dbconn')
else: else:
conn = None conn = None
#global hits, misses
result = {} result = {}
for id in set_arg: for id in set_arg:
if (inner_func,id) in entitycache: try:
result[id] = entitycache[(inner_func,id)] result[id] = entitycache[(inner_func,id)]
#hits += 1 except KeyError:
else:
pass pass
#misses += 1
remaining = inner_func(set(e for e in set_arg if e not in result),dbconn=conn) remaining = inner_func(set(e for e in set_arg if e not in result),dbconn=conn)
for id in remaining: for id in remaining:
@ -115,13 +99,14 @@ if malojaconfig['USE_GLOBAL_CACHE']:
def trim_cache(): def trim_cache():
ramprct = psutil.virtual_memory().percent ramprct = psutil.virtual_memory().percent
if ramprct > malojaconfig["DB_MAX_MEMORY"]: if ramprct > malojaconfig["DB_MAX_MEMORY"]:
log(f"{ramprct}% RAM usage, clearing cache and adjusting size!") log(f"{ramprct}% RAM usage, clearing cache!")
for c in (cache,entitycache):
c.clear()
#ratio = 0.6 #ratio = 0.6
#targetsize = max(int(len(cache) * ratio),50) #targetsize = max(int(len(cache) * ratio),50)
#log(f"Reducing to {targetsize} entries") #log(f"Reducing to {targetsize} entries")
#cache.set_size(targetsize) #cache.set_size(targetsize)
#cache.set_size(HIGH_NUMBER) #cache.set_size(HIGH_NUMBER)
cache.clear()
#if cache.get_size() > CACHE_ADJUST_STEP: #if cache.get_size() > CACHE_ADJUST_STEP:
# cache.set_size(cache.get_size() - CACHE_ADJUST_STEP) # cache.set_size(cache.get_size() - CACHE_ADJUST_STEP)
@ -156,3 +141,32 @@ def serialize(obj):
elif isinstance(obj,dict): elif isinstance(obj,dict):
return "{" + ",".join(serialize(o) + ":" + serialize(obj[o]) for o in obj) + "}" return "{" + ",".join(serialize(o) + ":" + serialize(obj[o]) for o in obj) + "}"
return json.dumps(obj.hashable()) return json.dumps(obj.hashable())
def get_size_of(obj,counted=None):
if counted is None:
counted = set()
if id(obj) in counted: return 0
size = sys.getsizeof(obj)
counted.add(id(obj))
try:
for k,v in obj.items():
size += get_size_of(v,counted=counted)
except:
try:
for i in obj:
size += get_size_of(i,counted=counted)
except:
pass
return size
def human_readable_size(obj):
units = ['','K','M','G','T','P']
idx = 0
bytes = get_size_of(obj)
while bytes > 1024 and len(units) > idx+1:
bytes = bytes / 1024
idx += 1
return f"{bytes:.2f} {units[idx]}B"

View File

@ -116,6 +116,8 @@ def connection_provider(func):
with engine.connect() as connection: with engine.connect() as connection:
kwargs['dbconn'] = connection kwargs['dbconn'] = connection
return func(*args,**kwargs) return func(*args,**kwargs)
wrapper.__innerfunc__ = func
return wrapper return wrapper
##### DB <-> Dict translations ##### DB <-> Dict translations

View File

@ -148,7 +148,7 @@ malojaconfig = Configuration(
"Technical":{ "Technical":{
"cache_expire_positive":(tp.Integer(), "Image Cache Expiration", 60, "Days until images are refetched"), "cache_expire_positive":(tp.Integer(), "Image Cache Expiration", 60, "Days until images are refetched"),
"cache_expire_negative":(tp.Integer(), "Image Cache Negative Expiration", 5, "Days until failed image fetches are reattempted"), "cache_expire_negative":(tp.Integer(), "Image Cache Negative Expiration", 5, "Days until failed image fetches are reattempted"),
"db_max_memory":(tp.Integer(min=0,max=100), "RAM Percentage soft limit", 80, "RAM Usage in percent at which Maloja should no longer increase its database cache."), "db_max_memory":(tp.Integer(min=0,max=100), "RAM Percentage soft limit", 50, "RAM Usage in percent at which Maloja should no longer increase its database cache."),
"use_request_cache":(tp.Boolean(), "Use request-local DB Cache", False), "use_request_cache":(tp.Boolean(), "Use request-local DB Cache", False),
"use_global_cache":(tp.Boolean(), "Use global DB Cache", True) "use_global_cache":(tp.Boolean(), "Use global DB Cache", True)
}, },