mirror of
https://github.com/krateng/maloja.git
synced 2023-08-10 21:12:55 +03:00
Replaced DB caches with LRU dicts, hope this improves memory problems
This commit is contained in:
parent
d989134e65
commit
9b787fa3b1
@ -5,7 +5,7 @@ author = {
|
|||||||
"email":"maloja@krateng.dev",
|
"email":"maloja@krateng.dev",
|
||||||
"github": "krateng"
|
"github": "krateng"
|
||||||
}
|
}
|
||||||
version = 2,4,2
|
version = 2,4,3
|
||||||
versionstr = ".".join(str(n) for n in version)
|
versionstr = ".".join(str(n) for n in version)
|
||||||
links = {
|
links = {
|
||||||
"pypi":"malojaserver",
|
"pypi":"malojaserver",
|
||||||
|
@ -25,9 +25,10 @@ TRACK_SEARCH_PROVIDER = None
|
|||||||
|
|
||||||
[Database]
|
[Database]
|
||||||
|
|
||||||
CACHE_DATABASE = true
|
USE_DB_CACHE = yes
|
||||||
CACHE_DATABASE_PERM = false #more permanent cache for old timeranges
|
CACHE_DATABASE_SHORT = true
|
||||||
DB_CACHE_SIZE = 8192 # how many MB on disk each database cache should have available.
|
CACHE_DATABASE_PERM = true #more permanent cache for old timeranges
|
||||||
|
DB_CACHE_ENTRIES = 10000 #experiment with this depending on your RAM
|
||||||
INVALID_ARTISTS = ["[Unknown Artist]","Unknown Artist","Spotify"]
|
INVALID_ARTISTS = ["[Unknown Artist]","Unknown Artist","Spotify"]
|
||||||
REMOVE_FROM_TITLE = ["(Original Mix)","(Radio Edit)","(Album Version)","(Explicit Version)","(Bonus Track)"]
|
REMOVE_FROM_TITLE = ["(Original Mix)","(Radio Edit)","(Album Version)","(Explicit Version)","(Bonus Track)"]
|
||||||
USE_PARSE_PLUGINS = no
|
USE_PARSE_PLUGINS = no
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
# server
|
# server
|
||||||
from bottle import request, response, FormsDict
|
from bottle import request, response, FormsDict
|
||||||
|
|
||||||
# rest of the project
|
# rest of the project
|
||||||
from .cleanup import CleanerAgent, CollectorAgent
|
from .cleanup import CleanerAgent, CollectorAgent
|
||||||
from . import utilities
|
from . import utilities
|
||||||
@ -9,6 +10,7 @@ from . import compliant_api
|
|||||||
from .external import proxy_scrobble
|
from .external import proxy_scrobble
|
||||||
from .__pkginfo__ import version
|
from .__pkginfo__ import version
|
||||||
from .globalconf import datadir
|
from .globalconf import datadir
|
||||||
|
|
||||||
# doreah toolkit
|
# doreah toolkit
|
||||||
from doreah.logging import log
|
from doreah.logging import log
|
||||||
from doreah import tsv
|
from doreah import tsv
|
||||||
@ -18,9 +20,11 @@ try:
|
|||||||
from doreah.persistence import DiskDict
|
from doreah.persistence import DiskDict
|
||||||
except: pass
|
except: pass
|
||||||
import doreah
|
import doreah
|
||||||
|
|
||||||
# nimrodel API
|
# nimrodel API
|
||||||
from nimrodel import EAPI as API
|
from nimrodel import EAPI as API
|
||||||
from nimrodel import Multi
|
from nimrodel import Multi
|
||||||
|
|
||||||
# technical
|
# technical
|
||||||
import os
|
import os
|
||||||
import datetime
|
import datetime
|
||||||
@ -29,6 +33,8 @@ import unicodedata
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
import yaml
|
import yaml
|
||||||
|
import lru
|
||||||
|
|
||||||
# url handling
|
# url handling
|
||||||
from importlib.machinery import SourceFileLoader
|
from importlib.machinery import SourceFileLoader
|
||||||
import urllib
|
import urllib
|
||||||
@ -1032,9 +1038,11 @@ def sync():
|
|||||||
###
|
###
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
if settings.get_settings("CACHE_DATABASE"):
|
if settings.get_settings("USE_DB_CACHE"):
|
||||||
def db_query(**kwargs):
|
def db_query(**kwargs):
|
||||||
return db_query_cached(**kwargs)
|
return db_query_cached(**kwargs)
|
||||||
def db_aggregate(**kwargs):
|
def db_aggregate(**kwargs):
|
||||||
@ -1045,75 +1053,108 @@ else:
|
|||||||
def db_aggregate(**kwargs):
|
def db_aggregate(**kwargs):
|
||||||
return db_aggregate_full(**kwargs)
|
return db_aggregate_full(**kwargs)
|
||||||
|
|
||||||
cacheday = (0,0,0)
|
|
||||||
|
csz = settings.get_settings("DB_CACHE_ENTRIES")
|
||||||
|
|
||||||
|
cache_query = lru.LRU(csz)
|
||||||
|
cache_query_perm = lru.LRU(csz)
|
||||||
|
cache_aggregate = lru.LRU(csz)
|
||||||
|
cache_aggregate_perm = lru.LRU(csz)
|
||||||
|
|
||||||
|
cachestats = {
|
||||||
|
"cache_query_tmp":{
|
||||||
|
"obj":cache_query,
|
||||||
|
"hits":0,
|
||||||
|
"misses":0
|
||||||
|
},
|
||||||
|
"cache_query_perm":{
|
||||||
|
"obj":cache_query_perm,
|
||||||
|
"hits":0,
|
||||||
|
"misses":0
|
||||||
|
},
|
||||||
|
"cache_aggregate_tmp":{
|
||||||
|
"obj":cache_aggregate,
|
||||||
|
"hits":0,
|
||||||
|
"misses":0
|
||||||
|
},
|
||||||
|
"cache_aggregate_perm":{
|
||||||
|
"obj":cache_aggregate_perm,
|
||||||
|
"hits":0,
|
||||||
|
"misses":0
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
from doreah.regular import runhourly
|
||||||
|
|
||||||
|
@runhourly
|
||||||
|
def log_stats():
|
||||||
|
log({c:{"size":len(cachestats[c]["obj"]),"hits":cachestats[c]["hits"],"misses":cachestats[c]["misses"]} for c in cachestats},module="debug")
|
||||||
|
|
||||||
|
|
||||||
cache_query = {}
|
|
||||||
cache_query_permanent = Cache(maxmemory=1024*1024*500)
|
|
||||||
def db_query_cached(**kwargs):
|
def db_query_cached(**kwargs):
|
||||||
check_cache_age()
|
global cache_query, cache_query_perm
|
||||||
global cache_query, cache_query_permanent
|
|
||||||
key = utilities.serialize(kwargs)
|
key = utilities.serialize(kwargs)
|
||||||
|
|
||||||
|
eligible_permanent_caching = (
|
||||||
|
"timerange" in kwargs and
|
||||||
|
not kwargs["timerange"].active() and
|
||||||
|
settings.get_settings("CACHE_DATABASE_PERM")
|
||||||
|
)
|
||||||
|
eligible_temporary_caching = (
|
||||||
|
not eligible_permanent_caching and
|
||||||
|
settings.get_settings("CACHE_DATABASE_SHORT")
|
||||||
|
)
|
||||||
|
|
||||||
# hit permanent cache for past timeranges
|
# hit permanent cache for past timeranges
|
||||||
if "timerange" in kwargs and not kwargs["timerange"].active() and settings.get_settings("CACHE_DATABASE_PERM"):
|
if eligible_permanent_caching and key in cache_query_perm:
|
||||||
if key in cache_query_permanent:
|
return copy.copy(cache_query_perm.get(key))
|
||||||
#print("Hit")
|
|
||||||
return copy.copy(cache_query_permanent.get(key))
|
# hit short term cache
|
||||||
#print("Miss")
|
elif eligible_temporary_caching and key in cache_query:
|
||||||
|
return copy.copy(cache_query.get(key))
|
||||||
|
|
||||||
|
else:
|
||||||
result = db_query_full(**kwargs)
|
result = db_query_full(**kwargs)
|
||||||
cache_query_permanent.add(key,copy.copy(result))
|
if eligible_permanent_caching: cache_query_perm[key] = result
|
||||||
#print(cache_query_permanent.cache)
|
elif eligible_temporary_caching: cache_query[key] = result
|
||||||
# hit short term cache
|
return result
|
||||||
else:
|
|
||||||
#print("I guess they never miss huh")
|
|
||||||
if key in cache_query:
|
|
||||||
return copy.copy(cache_query[key])
|
|
||||||
else:
|
|
||||||
result = db_query_full(**kwargs)
|
|
||||||
cache_query[key] = copy.copy(result)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
cache_aggregate = {}
|
|
||||||
cache_aggregate_permanent = Cache(maxmemory=1024*1024*500)
|
|
||||||
def db_aggregate_cached(**kwargs):
|
def db_aggregate_cached(**kwargs):
|
||||||
check_cache_age()
|
global cache_aggregate, cache_aggregate_perm
|
||||||
global cache_aggregate, cache_aggregate_permanent
|
|
||||||
key = utilities.serialize(kwargs)
|
key = utilities.serialize(kwargs)
|
||||||
|
|
||||||
# hit permanent cache for past timeranges
|
eligible_permanent_caching = (
|
||||||
if "timerange" in kwargs and not kwargs["timerange"].active() and settings.get_settings("CACHE_DATABASE_PERM"):
|
"timerange" in kwargs and
|
||||||
if key in cache_aggregate_permanent: return copy.copy(cache_aggregate_permanent.get(key))
|
not kwargs["timerange"].active() and
|
||||||
result = db_aggregate_full(**kwargs)
|
settings.get_settings("CACHE_DATABASE_PERM")
|
||||||
cache_aggregate_permanent.add(key,copy.copy(result))
|
)
|
||||||
# hit short term cache
|
eligible_temporary_caching = (
|
||||||
else:
|
not eligible_permanent_caching and
|
||||||
if key in cache_aggregate:
|
settings.get_settings("CACHE_DATABASE_SHORT")
|
||||||
return copy.copy(cache_aggregate[key])
|
)
|
||||||
else:
|
|
||||||
result = db_aggregate_full(**kwargs)
|
|
||||||
cache_aggregate[key] = copy.copy(result)
|
|
||||||
|
|
||||||
return result
|
# hit permanent cache for past timeranges
|
||||||
|
if eligible_permanent_caching and key in cache_aggregate_perm:
|
||||||
|
return copy.copy(cache_aggregate_perm.get(key))
|
||||||
|
|
||||||
|
# hit short term cache
|
||||||
|
elif eligible_temporary_caching and key in cache_aggregate:
|
||||||
|
return copy.copy(cache_aggregate.get(key))
|
||||||
|
|
||||||
|
else:
|
||||||
|
result = db_aggregate_full(**kwargs)
|
||||||
|
if eligible_permanent_caching: cache_aggregate_perm[key] = result
|
||||||
|
elif eligible_temporary_caching: cache_aggregate[key] = result
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
def invalidate_caches():
|
def invalidate_caches():
|
||||||
global cache_query, cache_aggregate
|
global cache_query, cache_aggregate
|
||||||
cache_query = {}
|
cache_query.clear()
|
||||||
cache_aggregate = {}
|
cache_aggregate.clear()
|
||||||
|
|
||||||
now = datetime.datetime.utcnow()
|
|
||||||
global cacheday
|
|
||||||
cacheday = (now.year,now.month,now.day)
|
|
||||||
|
|
||||||
log("Database caches invalidated.")
|
log("Database caches invalidated.")
|
||||||
|
|
||||||
def check_cache_age():
|
|
||||||
now = datetime.datetime.utcnow()
|
|
||||||
global cacheday
|
|
||||||
if cacheday != (now.year,now.month,now.day): invalidate_caches()
|
|
||||||
|
|
||||||
|
|
||||||
####
|
####
|
||||||
## Database queries
|
## Database queries
|
||||||
####
|
####
|
||||||
|
@ -27,13 +27,16 @@ from .globalconf import datadir
|
|||||||
|
|
||||||
def serialize(obj):
|
def serialize(obj):
|
||||||
try:
|
try:
|
||||||
return json.dumps(obj)
|
return serialize(obj.hashable())
|
||||||
except:
|
except:
|
||||||
if isinstance(obj,list) or isinstance(obj,tuple):
|
try:
|
||||||
return "[" + ",".join(serialize(o) for o in obj) + "]"
|
return json.dumps(obj)
|
||||||
elif isinstance(obj,dict):
|
except:
|
||||||
return "{" + ",".join(serialize(o) + ":" + serialize(obj[o]) for o in obj) + "}"
|
if isinstance(obj,list) or isinstance(obj,tuple):
|
||||||
return json.dumps(obj.hashable())
|
return "[" + ",".join(serialize(o) for o in obj) + "]"
|
||||||
|
elif isinstance(obj,dict):
|
||||||
|
return "{" + ",".join(serialize(o) + ":" + serialize(obj[o]) for o in obj) + "}"
|
||||||
|
return json.dumps(obj.hashable())
|
||||||
|
|
||||||
|
|
||||||
#if isinstance(obj,list) or if isinstance(obj,tuple):
|
#if isinstance(obj,list) or if isinstance(obj,tuple):
|
||||||
|
Loading…
Reference in New Issue
Block a user