1
0
mirror of https://github.com/krateng/maloja.git synced 2023-08-10 21:12:55 +03:00

Smarter caching of database responses

This commit is contained in:
Krateng 2019-04-12 17:57:28 +02:00
parent 7795863bf2
commit 667c567df0
4 changed files with 86 additions and 14 deletions

View File

@ -9,13 +9,12 @@ from urihandler import uri_to_internal
# doreah toolkit
from doreah.logging import log
from doreah import tsv
from doreah.caching import Cache
# technical
import os
import datetime
import sys
import unicodedata
import json
import pickle
from collections import namedtuple
from threading import Lock
# url handling
@ -896,26 +895,43 @@ def sync():
import copy
cache_query = {}
cache_query_permanent = Cache(maxsize=30000)
cacheday = (0,0,0)
def db_query(**kwargs):
check_cache_age()
global cache_query
key = pickle.dumps(kwargs)
if key in cache_query: return copy.copy(cache_query[key])
global cache_query, cache_query_permanent
key = serialize(kwargs)
if "timerange" in kwargs and not kwargs["timerange"].active():
if key in cache_query_permanent:
#print("Hit")
return copy.copy(cache_query_permanent.get(key))
#print("Miss")
result = db_query_full(**kwargs)
cache_query_permanent.add(key,copy.copy(result))
#print(cache_query_permanent.cache)
else:
#print("I guess they never miss huh")
if key in cache_query: return copy.copy(cache_query[key])
result = db_query_full(**kwargs)
cache_query[key] = copy.copy(result)
result = db_query_full(**kwargs)
cache_query[key] = copy.copy(result)
return result
cache_aggregate = {}
cache_aggregate_permanent = Cache(maxsize=30000)
def db_aggregate(**kwargs):
check_cache_age()
global cache_aggregate
key = pickle.dumps(kwargs)
if key in cache_aggregate: return copy.copy(cache_aggregate[key])
global cache_aggregate, cache_aggregate_permanent
key = serialize(kwargs)
if "timerange" in kwargs and not kwargs["timerange"].active():
if key in cache_aggregate_permanent: return copy.copy(cache_aggregate_permanent.get(key))
result = db_aggregate_full(**kwargs)
cache_aggregate_permanent.add(key,copy.copy(result))
else:
if key in cache_aggregate: return copy.copy(cache_aggregate[key])
result = db_aggregate_full(**kwargs)
cache_aggregate[key] = copy.copy(result)
result = db_aggregate_full(**kwargs)
cache_aggregate[key] = copy.copy(result)
return result
def invalidate_caches():

View File

@ -62,8 +62,10 @@ class MRangeDescriptor:
if not isinstance(other,MRangeDescriptor): return False
return (self.first_stamp() == other.first_stamp() and self.last_stamp() == other.last_stamp())
def __hash__(self):
return hash((self.first_stamp(),self.last_stamp()))
# gives a hashable object that uniquely identifies this time range
def hashable(self):
return self.first_stamp(),self.last_stamp()
def info(self):
return {
@ -105,6 +107,21 @@ class MTime(MRangeDescriptor):
def tostr(self):
return str(self)
# whether we currently live or will ever again live in this range
def active(self):
tod = date.today()
if tod.year > self.year: return False
if self.precision == 1: return True
if tod.year == self.year:
if tod.month > self.month: return False
if self.precision == 2: return True
if tod.month == self.month:
if tod.day > self.day: return False
return True
def urikeys(self):
return {"in":str(self)}
@ -227,6 +244,15 @@ class MTimeWeek(MRangeDescriptor):
def tostr(self):
return str(self)
# whether we currently live or will ever again live in this range
def active(self):
tod = date.today()
if tod.year > self.year: return False
if tod.year == self.year:
if tod.chrcalendar()[1] > self.week: return False
return True
def urikeys(self):
return {"in":str(self)}
@ -284,6 +310,11 @@ class MRange(MRangeDescriptor):
def tostr(self):
return str(self.to)
# whether we currently live or will ever again live in this range
def active(self):
if self.to is None: return True
return self.to.active()
def unlimited(self):
return (self.since is None and self.to is None)

View File

@ -3,6 +3,7 @@ import os
import hashlib
from threading import Thread, Timer
import pickle
import json
import urllib
import datetime
import random
@ -14,6 +15,25 @@ from doreah.regular import yearly, daily
#####
## SERIALIZE
#####
def serialize(obj):
try:
return json.dumps(obj)
except:
if isinstance(obj,list) or isinstance(obj,tuple):
return "[" + ",".join(serialize(o) for o in obj) + "]"
elif isinstance(obj,dict):
return "{" + ",".join(serialize(o) + ":" + serialize(obj[o]) for o in obj) + "}"
return json.dumps(obj.hashable())
#if isinstance(obj,list) or if isinstance(obj,tuple):
# return "[" + ",".join(dumps(o) for o in obj) + "]"
#if isinstance(obj,str)

View File

@ -23,6 +23,11 @@ def instructions(keys):
elif filterkeys.get("artist") is not None:
#limitkey["artist"], limitkey["associated"] = keys.get("artist"), (keys.get("associated")!=None)
limitstring += "of " + artistLink(filterkeys.get("artist"))
# associated are counted by default
data = database.artistInfo(filterkeys["artist"])
moreartists = data["associated"]
if moreartists != []:
limitstring += " <span class='extra'>including " + artistLinks(moreartists) + "</span>"
limitstring += " " + timekeys["timerange"].desc(prefix=True)