mirror of
https://github.com/krateng/maloja.git
synced 2023-08-10 21:12:55 +03:00
Merge branch 'master' into processcontrolrework
This commit is contained in:
commit
5f29cea6ad
18
Dockerfile
18
Dockerfile
@ -1,12 +1,18 @@
|
||||
FROM python:3.6-alpine
|
||||
FROM python:3-alpine
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
RUN apk update
|
||||
RUN apk add gcc libxml2-dev libxslt-dev py3-pip libc-dev linux-headers
|
||||
RUN pip3 install psutil
|
||||
|
||||
RUN pip3 install malojaserver
|
||||
RUN apk add --no-cache --virtual .build-deps \
|
||||
gcc \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
py3-pip \
|
||||
libc-dev \
|
||||
linux-headers \
|
||||
&& \
|
||||
pip3 install psutil && \
|
||||
pip3 install malojaserver && \
|
||||
apk del .build-deps
|
||||
|
||||
EXPOSE 42010
|
||||
|
||||
|
@ -59,8 +59,8 @@ I can support you with issues best if you use **Alpine Linux**. In my experience
|
||||
5) (Recommended) Until I have a proper service implemented, I would recommend setting two cronjobs for maloja:
|
||||
|
||||
```
|
||||
@reboot maloja start
|
||||
42 0 * * * maloja restart
|
||||
@reboot sleep 15 && maloja start
|
||||
42 0 * * 2 maloja restart
|
||||
```
|
||||
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
apk add python3 python3-dev gcc libxml2-dev libxslt-dev py3-pip libc-dev
|
||||
apk add python3 python3-dev gcc libxml2-dev libxslt-dev py3-pip libc-dev linux-headers
|
||||
pip3 install psutil
|
||||
pip3 install malojaserver
|
||||
|
@ -1,4 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
apt update
|
||||
apt install python3 python3-pip
|
||||
pip3 install psutil
|
||||
pip3 install malojaserver
|
||||
|
@ -5,7 +5,7 @@ author = {
|
||||
"email":"maloja@krateng.dev",
|
||||
"github": "krateng"
|
||||
}
|
||||
version = 2,4,9
|
||||
version = 2,4,13
|
||||
versionstr = ".".join(str(n) for n in version)
|
||||
links = {
|
||||
"pypi":"malojaserver",
|
||||
@ -15,7 +15,7 @@ links = {
|
||||
requires = [
|
||||
"bottle>=0.12.16",
|
||||
"waitress>=1.3",
|
||||
"doreah>=1.5.6",
|
||||
"doreah>=1.6.3",
|
||||
"nimrodel>=0.6.3",
|
||||
"setproctitle>=1.1.10",
|
||||
"wand>=0.5.4",
|
||||
|
@ -68,3 +68,4 @@ USE_PYHP = no #not recommended at the moment
|
||||
USE_JINJA = no #overwrites pyhp preference
|
||||
FEDERATION = yes #does nothing yet
|
||||
SKIP_SETUP = no
|
||||
LOGGING = true
|
||||
|
@ -933,6 +933,7 @@ def build_db():
|
||||
log("Building database...")
|
||||
|
||||
global SCROBBLES, ARTISTS, TRACKS
|
||||
global TRACKS_NORMALIZED_SET, TRACKS_NORMALIZED, ARTISTS_NORMALIZED_SET, ARTISTS_NORMALIZED
|
||||
global SCROBBLESDICT, STAMPS
|
||||
|
||||
SCROBBLES = []
|
||||
@ -941,6 +942,11 @@ def build_db():
|
||||
STAMPS = []
|
||||
SCROBBLESDICT = {}
|
||||
|
||||
TRACKS_NORMALIZED = []
|
||||
ARTISTS_NORMALIZED = []
|
||||
ARTISTS_NORMALIZED_SET = set()
|
||||
TRACKS_NORMALIZED_SET = set()
|
||||
|
||||
|
||||
# parse files
|
||||
db = tsv.parse_all(datadir("scrobbles"),"int","string","string",comments=False)
|
||||
@ -1067,6 +1073,9 @@ cache_query_perm = lru.LRU(csz)
|
||||
cache_aggregate = lru.LRU(csz)
|
||||
cache_aggregate_perm = lru.LRU(csz)
|
||||
|
||||
perm_caching = settings.get_settings("CACHE_DATABASE_PERM")
|
||||
temp_caching = settings.get_settings("CACHE_DATABASE_SHORT")
|
||||
|
||||
cachestats = {
|
||||
"cache_query":{
|
||||
"hits_perm":0,
|
||||
@ -1102,11 +1111,11 @@ def db_query_cached(**kwargs):
|
||||
eligible_permanent_caching = (
|
||||
"timerange" in kwargs and
|
||||
not kwargs["timerange"].active() and
|
||||
settings.get_settings("CACHE_DATABASE_PERM")
|
||||
perm_caching
|
||||
)
|
||||
eligible_temporary_caching = (
|
||||
not eligible_permanent_caching and
|
||||
settings.get_settings("CACHE_DATABASE_SHORT")
|
||||
temp_caching
|
||||
)
|
||||
|
||||
# hit permanent cache for past timeranges
|
||||
@ -1138,11 +1147,11 @@ def db_aggregate_cached(**kwargs):
|
||||
eligible_permanent_caching = (
|
||||
"timerange" in kwargs and
|
||||
not kwargs["timerange"].active() and
|
||||
settings.get_settings("CACHE_DATABASE_PERM")
|
||||
perm_caching
|
||||
)
|
||||
eligible_temporary_caching = (
|
||||
not eligible_permanent_caching and
|
||||
settings.get_settings("CACHE_DATABASE_SHORT")
|
||||
temp_caching
|
||||
)
|
||||
|
||||
# hit permanent cache for past timeranges
|
||||
@ -1173,17 +1182,18 @@ def invalidate_caches():
|
||||
log("Database caches invalidated.")
|
||||
|
||||
def reduce_caches(to=0.75):
|
||||
global cache_query, cache_aggregate
|
||||
for c in cache_query, cache_aggregate:
|
||||
global cache_query, cache_aggregate, cache_query_perm, cache_aggregate_perm
|
||||
for c in cache_query, cache_aggregate, cache_query_perm, cache_aggregate_perm:
|
||||
currentsize = len(c)
|
||||
targetsize = int(currentsize * to)
|
||||
c.set_size(targetsize)
|
||||
c.set_size(csz)
|
||||
if currentsize > 100:
|
||||
targetsize = max(int(currentsize * to),10)
|
||||
c.set_size(targetsize)
|
||||
c.set_size(csz)
|
||||
|
||||
def reduce_caches_if_low_ram():
|
||||
ramprct = psutil.virtual_memory().percent
|
||||
if ramprct > cmp:
|
||||
log("{prct}% RAM usage, reducing temporary caches!".format(prct=ramprct),module="debug")
|
||||
log("{prct}% RAM usage, reducing caches!".format(prct=ramprct),module="debug")
|
||||
ratio = (cmp / ramprct) ** 3
|
||||
reduce_caches(to=ratio)
|
||||
|
||||
|
@ -1,23 +1,34 @@
|
||||
import os
|
||||
from doreah.settings import get_settings
|
||||
from doreah.settings import config as settingsconfig
|
||||
|
||||
|
||||
# data folder
|
||||
# must be determined first because getting settings relies on it
|
||||
|
||||
try:
|
||||
DATA_DIR = os.environ["XDG_DATA_HOME"].split(":")[0]
|
||||
assert os.path.exists(DATA_DIR)
|
||||
except:
|
||||
DATA_DIR = os.path.join(os.environ["HOME"],".local/share/")
|
||||
# check environment variables for data directory
|
||||
# otherwise, go with defaults
|
||||
setting_datadir = get_settings("DATA_DIRECTORY",files=[],environ_prefix="MALOJA_")
|
||||
if setting_datadir is not None and os.path.exists(setting_datadir):
|
||||
DATA_DIR = setting_datadir
|
||||
else:
|
||||
try:
|
||||
HOME_DIR = os.environ["XDG_DATA_HOME"].split(":")[0]
|
||||
assert os.path.exists(HOME_DIR)
|
||||
except:
|
||||
HOME_DIR = os.path.join(os.environ["HOME"],".local/share/")
|
||||
|
||||
DATA_DIR = os.path.join(HOME_DIR,"maloja")
|
||||
|
||||
DATA_DIR = os.path.join(DATA_DIR,"maloja")
|
||||
os.makedirs(DATA_DIR,exist_ok=True)
|
||||
|
||||
|
||||
|
||||
def datadir(*args):
|
||||
return os.path.join(DATA_DIR,*args)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
### DOREAH CONFIGURATION
|
||||
|
||||
from doreah import config
|
||||
@ -26,9 +37,6 @@ config(
|
||||
pyhp={
|
||||
"version": 2
|
||||
},
|
||||
logging={
|
||||
"logfolder": datadir("logs")
|
||||
},
|
||||
settings={
|
||||
"files":[
|
||||
datadir("settings/default.ini"),
|
||||
@ -44,9 +52,18 @@ config(
|
||||
}
|
||||
)
|
||||
|
||||
# because we loaded a doreah module already before setting the config, we need to to that manually
|
||||
settingsconfig._readpreconfig()
|
||||
|
||||
config(
|
||||
logging={
|
||||
"logfolder": datadir("logs") if get_settings("LOGGING") else None
|
||||
}
|
||||
)
|
||||
|
||||
settingsconfig._readpreconfig()
|
||||
|
||||
|
||||
from doreah.settings import get_settings
|
||||
|
||||
# thumbor
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user