1
0
mirror of https://github.com/krateng/maloja.git synced 2023-08-10 21:12:55 +03:00

Added settings for caching

This commit is contained in:
Krateng 2020-05-28 19:32:41 +02:00
parent b8de507a4f
commit 6aa65bf1ce
3 changed files with 38 additions and 19 deletions

View File

@ -5,7 +5,7 @@ author = {
"email":"maloja@krateng.dev",
"github": "krateng"
}
version = 2,3,8
version = 2,4,0
versionstr = ".".join(str(n) for n in version)
links = {
"pypi":"malojaserver",

View File

@ -25,6 +25,8 @@ TRACK_SEARCH_PROVIDER = None
[Database]
CACHE_DATABASE = true
CACHE_DATABASE_PERM = false #more permanent cache for old timeranges
DB_CACHE_SIZE = 8192 # how many MB on disk each database cache should have available.
INVALID_ARTISTS = ["[Unknown Artist]","Unknown Artist","Spotify"]
REMOVE_FROM_TITLE = ["(Original Mix)","(Radio Edit)","(Album Version)","(Explicit Version)","(Bonus Track)"]

View File

@ -1034,17 +1034,29 @@ def sync():
import copy
cache_query = {}
if doreah.version >= (0,7,1) and settings.get_settings("EXPERIMENTAL_FEATURES"):
cache_query_permanent = DiskDict(name="dbquery",folder=datadir("cache"),maxmemory=1024*1024*500,maxstorage=1024*1024*settings.get_settings("DB_CACHE_SIZE"))
if settings.get_settings("CACHE_DATABASE"):
def db_query(**kwargs):
return db_query_cached(**kwargs)
def db_aggregate(**kwargs):
return db_aggregate_cached(**kwargs)
else:
cache_query_permanent = Cache(maxmemory=1024*1024*500)
def db_query(**kwargs):
return db_query_full(**kwargs)
def db_aggregate(**kwargs):
return db_aggregate_full(**kwargs)
cacheday = (0,0,0)
def db_query(**kwargs):
cache_query = {}
cache_query_permanent = Cache(maxmemory=1024*1024*500)
def db_query_cached(**kwargs):
check_cache_age()
global cache_query, cache_query_permanent
key = utilities.serialize(kwargs)
if "timerange" in kwargs and not kwargs["timerange"].active():
# hit permanent cache for past timeranges
if "timerange" in kwargs and not kwargs["timerange"].active() and settings.get_settings("CACHE_DATABASE_PERM"):
if key in cache_query_permanent:
#print("Hit")
return copy.copy(cache_query_permanent.get(key))
@ -1052,31 +1064,36 @@ def db_query(**kwargs):
result = db_query_full(**kwargs)
cache_query_permanent.add(key,copy.copy(result))
#print(cache_query_permanent.cache)
# hit short term cache
else:
#print("I guess they never miss huh")
if key in cache_query: return copy.copy(cache_query[key])
result = db_query_full(**kwargs)
cache_query[key] = copy.copy(result)
if key in cache_query:
return copy.copy(cache_query[key])
else:
result = db_query_full(**kwargs)
cache_query[key] = copy.copy(result)
return result
cache_aggregate = {}
if doreah.version >= (0,7,1) and settings.get_settings("EXPERIMENTAL_FEATURES"):
cache_aggregate_permanent = DiskDict(name="dbaggregate",folder="cache",maxmemory=1024*1024*500,maxstorage=1024*1024*settings.get_settings("DB_CACHE_SIZE"))
else:
cache_aggregate_permanent = Cache(maxmemory=1024*1024*500)
def db_aggregate(**kwargs):
cache_aggregate_permanent = Cache(maxmemory=1024*1024*500)
def db_aggregate_cached(**kwargs):
check_cache_age()
global cache_aggregate, cache_aggregate_permanent
key = utilities.serialize(kwargs)
if "timerange" in kwargs and not kwargs["timerange"].active():
# hit permanent cache for past timeranges
if "timerange" in kwargs and not kwargs["timerange"].active() and settings.get_settings("CACHE_DATABASE_PERM"):
if key in cache_aggregate_permanent: return copy.copy(cache_aggregate_permanent.get(key))
result = db_aggregate_full(**kwargs)
cache_aggregate_permanent.add(key,copy.copy(result))
# hit short term cache
else:
if key in cache_aggregate: return copy.copy(cache_aggregate[key])
result = db_aggregate_full(**kwargs)
cache_aggregate[key] = copy.copy(result)
if key in cache_aggregate:
return copy.copy(cache_aggregate[key])
else:
result = db_aggregate_full(**kwargs)
cache_aggregate[key] = copy.copy(result)
return result