2019-03-31 13:18:49 +03:00
|
|
|
# server
|
2022-04-25 18:03:44 +03:00
|
|
|
from bottle import request, response, FormsDict
|
2020-05-29 05:46:53 +03:00
|
|
|
|
2019-03-31 13:18:49 +03:00
|
|
|
# rest of the project
|
2022-04-08 17:08:48 +03:00
|
|
|
from ..cleanup import CleanerAgent
|
2022-03-27 21:08:41 +03:00
|
|
|
from .. import images
|
2022-01-07 23:47:55 +03:00
|
|
|
from ..malojatime import register_scrobbletime, time_stamps, ranges, alltime
|
2022-01-04 22:45:15 +03:00
|
|
|
from ..malojauri import uri_to_internal, internal_to_uri, compose_querystring
|
|
|
|
from ..thirdparty import proxy_scrobble_all
|
2022-04-09 22:39:04 +03:00
|
|
|
from ..pkg_global.conf import data_dir, malojaconfig
|
2022-03-06 03:57:46 +03:00
|
|
|
from ..apis import apikeystore
|
2022-01-03 10:01:49 +03:00
|
|
|
#db
|
2022-01-04 22:45:15 +03:00
|
|
|
from . import sqldb
|
2022-01-09 08:58:06 +03:00
|
|
|
from . import cached
|
2022-02-15 07:20:27 +03:00
|
|
|
from . import dbcache
|
2022-04-25 18:03:44 +03:00
|
|
|
from . import exceptions
|
2020-05-29 05:46:53 +03:00
|
|
|
|
2019-03-31 13:18:49 +03:00
|
|
|
# doreah toolkit
|
2019-03-29 21:44:42 +03:00
|
|
|
from doreah.logging import log
|
2020-07-29 21:11:51 +03:00
|
|
|
from doreah.auth import authenticated_api, authenticated_api_with_alternate
|
2019-05-09 17:58:25 +03:00
|
|
|
import doreah
|
2020-05-29 05:46:53 +03:00
|
|
|
|
2020-09-02 16:22:30 +03:00
|
|
|
|
2022-01-03 04:08:02 +03:00
|
|
|
|
2020-05-29 05:46:53 +03:00
|
|
|
|
2019-03-31 13:18:49 +03:00
|
|
|
# technical
|
|
|
|
import os
|
|
|
|
import datetime
|
2018-11-27 18:08:14 +03:00
|
|
|
import sys
|
2019-03-10 19:38:33 +03:00
|
|
|
import unicodedata
|
2019-04-07 15:43:36 +03:00
|
|
|
from collections import namedtuple
|
2019-04-07 16:27:24 +03:00
|
|
|
from threading import Lock
|
2022-01-03 04:08:02 +03:00
|
|
|
import yaml, json
|
2020-11-14 21:42:23 +03:00
|
|
|
import math
|
2020-05-29 05:46:53 +03:00
|
|
|
|
2019-03-31 13:18:49 +03:00
|
|
|
# url handling
|
|
|
|
import urllib
|
|
|
|
|
|
|
|
|
|
|
|
|
2020-12-02 22:09:19 +03:00
|
|
|
dbstatus = {
|
2022-01-09 08:58:06 +03:00
|
|
|
"healthy":False, # we can access the db
|
2021-12-09 08:35:43 +03:00
|
|
|
"rebuildinprogress":False,
|
2022-01-09 08:58:06 +03:00
|
|
|
"complete":False # information is complete
|
2020-12-02 22:09:19 +03:00
|
|
|
}
|
2018-11-24 18:29:24 +03:00
|
|
|
|
2019-04-07 15:43:36 +03:00
|
|
|
|
2022-04-16 16:59:42 +03:00
|
|
|
|
2022-01-06 11:28:34 +03:00
|
|
|
def waitfordb(func):
|
|
|
|
def newfunc(*args,**kwargs):
|
2022-04-25 18:03:44 +03:00
|
|
|
if not dbstatus['healthy']: raise exceptions.DatabaseNotBuilt()
|
2022-01-06 11:28:34 +03:00
|
|
|
return func(*args,**kwargs)
|
|
|
|
return newfunc
|
|
|
|
|
|
|
|
|
2019-04-07 16:27:24 +03:00
|
|
|
|
2020-08-17 19:28:31 +03:00
|
|
|
ISSUES = {}
|
|
|
|
|
2018-12-19 17:28:10 +03:00
|
|
|
cla = CleanerAgent()
|
2019-12-06 20:25:36 +03:00
|
|
|
|
2018-12-20 19:23:16 +03:00
|
|
|
|
2018-12-12 21:37:59 +03:00
|
|
|
|
2019-03-14 13:07:20 +03:00
|
|
|
|
2022-04-06 18:42:48 +03:00
|
|
|
|
|
|
|
## this function accepts a flat dict - all info of the scrobble should be top level key
|
|
|
|
## but can contain a list as value
|
|
|
|
## the following keys are valid:
|
|
|
|
## scrobble_duration int
|
|
|
|
## scrobble_time int
|
|
|
|
## track_title str, mandatory
|
|
|
|
## track_artists list, mandatory
|
|
|
|
## track_length int
|
|
|
|
## album_name str
|
|
|
|
## album_artists list
|
|
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
##
|
|
|
|
|
2022-04-06 23:05:41 +03:00
|
|
|
def incoming_scrobble(rawscrobble,fix=True,client=None,api=None,dbconn=None):
|
2019-08-30 16:57:54 +03:00
|
|
|
|
2022-04-16 16:59:42 +03:00
|
|
|
missing = []
|
|
|
|
for necessary_arg in ["track_artists","track_title"]:
|
|
|
|
if not necessary_arg in rawscrobble or len(rawscrobble[necessary_arg]) == 0:
|
|
|
|
missing.append(necessary_arg)
|
|
|
|
if len(missing) > 0:
|
2022-04-06 23:25:23 +03:00
|
|
|
log(f"Invalid Scrobble [Client: {client} | API: {api}]: {rawscrobble} ",color='red')
|
2022-04-25 18:03:44 +03:00
|
|
|
raise exceptions.MissingScrobbleParameters(missing)
|
2022-04-16 16:59:42 +03:00
|
|
|
|
2022-02-13 08:15:29 +03:00
|
|
|
|
2022-04-06 23:05:41 +03:00
|
|
|
log(f"Incoming scrobble [Client: {client} | API: {api}]: {rawscrobble}")
|
2022-04-06 18:42:48 +03:00
|
|
|
|
2022-04-22 21:04:24 +03:00
|
|
|
scrobbledict = rawscrobble_to_scrobbledict(rawscrobble, fix, client)
|
2022-04-20 16:59:33 +03:00
|
|
|
|
|
|
|
sqldb.add_scrobble(scrobbledict,dbconn=dbconn)
|
|
|
|
proxy_scrobble_all(scrobbledict['track']['artists'],scrobbledict['track']['title'],scrobbledict['time'])
|
|
|
|
|
|
|
|
dbcache.invalidate_caches(scrobbledict['time'])
|
|
|
|
|
|
|
|
#return {"status":"success","scrobble":scrobbledict}
|
|
|
|
return scrobbledict
|
|
|
|
|
2022-04-21 19:28:59 +03:00
|
|
|
|
2022-04-20 16:59:33 +03:00
|
|
|
@waitfordb
|
|
|
|
def reparse_scrobble(timestamp):
|
2022-04-22 18:25:58 +03:00
|
|
|
log(f"Reparsing Scrobble {timestamp}")
|
|
|
|
scrobble = sqldb.get_scrobble(timestamp=timestamp, include_internal=True)
|
2022-04-20 16:59:33 +03:00
|
|
|
|
2022-04-22 18:25:58 +03:00
|
|
|
if not scrobble or not scrobble['rawscrobble']:
|
2022-04-22 19:36:06 +03:00
|
|
|
return False
|
2022-04-20 16:59:33 +03:00
|
|
|
|
2022-04-22 18:43:14 +03:00
|
|
|
newscrobble = rawscrobble_to_scrobbledict(scrobble['rawscrobble'])
|
2022-04-22 18:25:58 +03:00
|
|
|
|
2022-04-22 18:43:14 +03:00
|
|
|
track_id = sqldb.get_track_id(newscrobble['track'])
|
2022-04-22 18:25:58 +03:00
|
|
|
|
|
|
|
# check if id changed
|
|
|
|
if sqldb.get_track_id(scrobble['track']) != track_id:
|
2022-04-22 18:43:14 +03:00
|
|
|
sqldb.edit_scrobble(timestamp, {'track':newscrobble['track']})
|
2022-04-25 21:48:22 +03:00
|
|
|
dbcache.invalidate_entity_cache()
|
|
|
|
dbcache.invalidate_caches()
|
|
|
|
return sqldb.get_scrobble(timestamp=timestamp)
|
2022-04-22 19:36:06 +03:00
|
|
|
|
|
|
|
return False
|
2022-04-20 16:59:33 +03:00
|
|
|
|
|
|
|
|
|
|
|
def rawscrobble_to_scrobbledict(rawscrobble, fix=True, client=None):
|
2022-04-22 18:25:58 +03:00
|
|
|
# raw scrobble to processed info
|
2022-04-06 18:42:48 +03:00
|
|
|
scrobbleinfo = {**rawscrobble}
|
|
|
|
if fix:
|
|
|
|
scrobbleinfo['track_artists'],scrobbleinfo['track_title'] = cla.fullclean(scrobbleinfo['track_artists'],scrobbleinfo['track_title'])
|
|
|
|
scrobbleinfo['scrobble_time'] = scrobbleinfo.get('scrobble_time') or int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
|
2019-08-30 16:57:54 +03:00
|
|
|
|
2022-04-06 18:42:48 +03:00
|
|
|
# processed info to internal scrobble dict
|
2022-01-03 05:04:36 +03:00
|
|
|
scrobbledict = {
|
2022-04-06 18:42:48 +03:00
|
|
|
"time":scrobbleinfo.get('scrobble_time'),
|
2022-01-03 05:04:36 +03:00
|
|
|
"track":{
|
2022-04-06 18:42:48 +03:00
|
|
|
"artists":scrobbleinfo.get('track_artists'),
|
|
|
|
"title":scrobbleinfo.get('track_title'),
|
2022-01-03 05:04:36 +03:00
|
|
|
"album":{
|
2022-04-06 18:42:48 +03:00
|
|
|
"name":scrobbleinfo.get('album_name'),
|
|
|
|
"artists":scrobbleinfo.get('album_artists')
|
2022-01-03 05:04:36 +03:00
|
|
|
},
|
2022-04-06 18:42:48 +03:00
|
|
|
"length":scrobbleinfo.get('track_length')
|
|
|
|
},
|
|
|
|
"duration":scrobbleinfo.get('scrobble_duration'),
|
2022-04-08 05:52:59 +03:00
|
|
|
"origin":f"client:{client}" if client else "generic",
|
2022-04-06 18:42:48 +03:00
|
|
|
"extra":{
|
|
|
|
k:scrobbleinfo[k] for k in scrobbleinfo if k not in
|
2023-03-30 17:27:40 +03:00
|
|
|
['scrobble_time','track_artists','track_title','track_length','scrobble_duration']#,'album_name','album_artists']
|
2022-01-03 05:04:36 +03:00
|
|
|
},
|
2022-04-06 18:42:48 +03:00
|
|
|
"rawscrobble":rawscrobble
|
2022-01-03 05:04:36 +03:00
|
|
|
}
|
2018-11-25 20:17:14 +03:00
|
|
|
|
2022-04-07 06:25:10 +03:00
|
|
|
return scrobbledict
|
2019-06-24 16:43:38 +03:00
|
|
|
|
2019-04-07 16:55:49 +03:00
|
|
|
|
2022-04-07 21:37:46 +03:00
|
|
|
@waitfordb
|
|
|
|
def remove_scrobble(timestamp):
|
2022-04-07 22:21:10 +03:00
|
|
|
log(f"Deleting Scrobble {timestamp}")
|
2022-04-07 21:37:46 +03:00
|
|
|
result = sqldb.delete_scrobble(timestamp)
|
|
|
|
dbcache.invalidate_caches(timestamp)
|
|
|
|
|
2022-04-17 21:18:26 +03:00
|
|
|
return result
|
|
|
|
|
2022-04-15 19:48:03 +03:00
|
|
|
@waitfordb
|
2022-04-15 20:41:44 +03:00
|
|
|
def edit_artist(id,artistinfo):
|
|
|
|
artist = sqldb.get_artist(id)
|
|
|
|
log(f"Renaming {artist} to {artistinfo}")
|
2022-04-17 21:18:26 +03:00
|
|
|
result = sqldb.edit_artist(id,artistinfo)
|
2022-04-15 19:48:03 +03:00
|
|
|
dbcache.invalidate_entity_cache()
|
|
|
|
dbcache.invalidate_caches()
|
2018-11-24 18:29:24 +03:00
|
|
|
|
2022-04-17 21:18:26 +03:00
|
|
|
return result
|
|
|
|
|
2022-04-15 20:41:44 +03:00
|
|
|
@waitfordb
|
|
|
|
def edit_track(id,trackinfo):
|
|
|
|
track = sqldb.get_track(id)
|
|
|
|
log(f"Renaming {track['title']} to {trackinfo['title']}")
|
2022-04-17 21:18:26 +03:00
|
|
|
result = sqldb.edit_track(id,trackinfo)
|
2022-04-15 20:41:44 +03:00
|
|
|
dbcache.invalidate_entity_cache()
|
|
|
|
dbcache.invalidate_caches()
|
2018-11-24 18:29:24 +03:00
|
|
|
|
2022-04-17 21:18:26 +03:00
|
|
|
return result
|
|
|
|
|
2022-04-17 18:37:17 +03:00
|
|
|
@waitfordb
|
|
|
|
def merge_artists(target_id,source_ids):
|
|
|
|
sources = [sqldb.get_artist(id) for id in source_ids]
|
|
|
|
target = sqldb.get_artist(target_id)
|
|
|
|
log(f"Merging {sources} into {target}")
|
2022-04-17 21:18:26 +03:00
|
|
|
result = sqldb.merge_artists(target_id,source_ids)
|
2022-04-17 18:37:17 +03:00
|
|
|
dbcache.invalidate_entity_cache()
|
2022-04-25 19:37:19 +03:00
|
|
|
dbcache.invalidate_caches()
|
2022-01-03 10:01:49 +03:00
|
|
|
|
2022-04-17 21:18:26 +03:00
|
|
|
return result
|
2019-02-15 21:39:19 +03:00
|
|
|
|
2022-04-17 18:37:17 +03:00
|
|
|
@waitfordb
|
|
|
|
def merge_tracks(target_id,source_ids):
|
|
|
|
sources = [sqldb.get_track(id) for id in source_ids]
|
|
|
|
target = sqldb.get_track(target_id)
|
|
|
|
log(f"Merging {sources} into {target}")
|
2022-04-17 21:18:26 +03:00
|
|
|
result = sqldb.merge_tracks(target_id,source_ids)
|
2022-04-17 18:37:17 +03:00
|
|
|
dbcache.invalidate_entity_cache()
|
2022-04-25 19:37:19 +03:00
|
|
|
dbcache.invalidate_caches()
|
2019-02-15 21:39:19 +03:00
|
|
|
|
2022-04-17 21:18:26 +03:00
|
|
|
return result
|
2019-02-15 21:39:19 +03:00
|
|
|
|
|
|
|
|
2019-04-07 15:43:36 +03:00
|
|
|
|
|
|
|
|
2022-01-06 11:28:34 +03:00
|
|
|
@waitfordb
|
2022-02-26 23:30:06 +03:00
|
|
|
def get_scrobbles(dbconn=None,**keys):
|
2022-01-06 11:28:34 +03:00
|
|
|
(since,to) = keys.get('timerange').timestamps()
|
|
|
|
if 'artist' in keys:
|
2022-02-26 23:30:06 +03:00
|
|
|
result = sqldb.get_scrobbles_of_artist(artist=keys['artist'],since=since,to=to,dbconn=dbconn)
|
2022-01-06 11:28:34 +03:00
|
|
|
elif 'track' in keys:
|
2022-02-26 23:30:06 +03:00
|
|
|
result = sqldb.get_scrobbles_of_track(track=keys['track'],since=since,to=to,dbconn=dbconn)
|
2022-01-06 11:28:34 +03:00
|
|
|
else:
|
2022-02-26 23:30:06 +03:00
|
|
|
result = sqldb.get_scrobbles(since=since,to=to,dbconn=dbconn)
|
2022-01-06 11:28:34 +03:00
|
|
|
#return result[keys['page']*keys['perpage']:(keys['page']+1)*keys['perpage']]
|
2022-01-07 06:07:10 +03:00
|
|
|
return list(reversed(result))
|
2019-02-16 18:28:32 +03:00
|
|
|
|
2022-04-21 19:28:59 +03:00
|
|
|
|
2022-01-06 11:28:34 +03:00
|
|
|
@waitfordb
|
2022-02-26 23:30:06 +03:00
|
|
|
def get_scrobbles_num(dbconn=None,**keys):
|
2022-03-06 07:42:13 +03:00
|
|
|
(since,to) = keys.get('timerange').timestamps()
|
|
|
|
if 'artist' in keys:
|
|
|
|
result = len(sqldb.get_scrobbles_of_artist(artist=keys['artist'],since=since,to=to,resolve_references=False,dbconn=dbconn))
|
|
|
|
elif 'track' in keys:
|
|
|
|
result = len(sqldb.get_scrobbles_of_track(track=keys['track'],since=since,to=to,resolve_references=False,dbconn=dbconn))
|
|
|
|
else:
|
|
|
|
result = sqldb.get_scrobbles_num(since=since,to=to,dbconn=dbconn)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2019-02-15 21:39:19 +03:00
|
|
|
|
2022-01-06 11:28:34 +03:00
|
|
|
@waitfordb
|
2022-02-27 02:33:55 +03:00
|
|
|
def get_tracks(dbconn=None,**keys):
|
|
|
|
if keys.get('artist') is None:
|
2022-02-26 23:30:06 +03:00
|
|
|
result = sqldb.get_tracks(dbconn=dbconn)
|
2022-01-06 11:28:34 +03:00
|
|
|
else:
|
2022-02-27 02:33:55 +03:00
|
|
|
result = sqldb.get_tracks_of_artist(keys.get('artist'),dbconn=dbconn)
|
2022-01-06 11:28:34 +03:00
|
|
|
return result
|
2019-03-14 13:07:20 +03:00
|
|
|
|
2022-01-06 11:28:34 +03:00
|
|
|
@waitfordb
|
2022-02-26 23:30:06 +03:00
|
|
|
def get_artists(dbconn=None):
|
|
|
|
return sqldb.get_artists(dbconn=dbconn)
|
2019-02-15 21:39:19 +03:00
|
|
|
|
|
|
|
|
2022-01-06 22:07:55 +03:00
|
|
|
@waitfordb
|
2022-02-26 23:30:06 +03:00
|
|
|
def get_charts_artists(dbconn=None,**keys):
|
2022-01-06 22:07:55 +03:00
|
|
|
(since,to) = keys.get('timerange').timestamps()
|
2022-02-26 23:30:06 +03:00
|
|
|
result = sqldb.count_scrobbles_by_artist(since=since,to=to,dbconn=dbconn)
|
2022-01-06 22:07:55 +03:00
|
|
|
return result
|
2019-02-15 21:39:19 +03:00
|
|
|
|
2022-01-07 06:07:10 +03:00
|
|
|
@waitfordb
|
2022-02-26 23:30:06 +03:00
|
|
|
def get_charts_tracks(dbconn=None,**keys):
|
2022-01-07 06:07:10 +03:00
|
|
|
(since,to) = keys.get('timerange').timestamps()
|
2022-01-07 06:30:23 +03:00
|
|
|
if 'artist' in keys:
|
2022-02-26 23:30:06 +03:00
|
|
|
result = sqldb.count_scrobbles_by_track_of_artist(since=since,to=to,artist=keys['artist'],dbconn=dbconn)
|
2022-01-07 06:30:23 +03:00
|
|
|
else:
|
2022-02-26 23:30:06 +03:00
|
|
|
result = sqldb.count_scrobbles_by_track(since=since,to=to,dbconn=dbconn)
|
2022-01-07 06:07:10 +03:00
|
|
|
return result
|
2019-02-15 21:39:19 +03:00
|
|
|
|
2022-01-07 06:38:41 +03:00
|
|
|
@waitfordb
|
2022-02-26 23:30:06 +03:00
|
|
|
def get_pulse(dbconn=None,**keys):
|
2018-12-08 02:01:44 +03:00
|
|
|
|
2019-04-10 19:50:56 +03:00
|
|
|
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
2018-12-08 02:01:44 +03:00
|
|
|
results = []
|
2019-04-10 16:45:50 +03:00
|
|
|
for rng in rngs:
|
2022-02-26 23:30:06 +03:00
|
|
|
res = get_scrobbles_num(timerange=rng,**{k:keys[k] for k in keys if k != 'timerange'},dbconn=dbconn)
|
2019-04-10 16:45:50 +03:00
|
|
|
results.append({"range":rng,"scrobbles":res})
|
2019-03-14 13:07:20 +03:00
|
|
|
|
2019-02-15 23:07:08 +03:00
|
|
|
return results
|
|
|
|
|
2022-01-07 06:53:35 +03:00
|
|
|
@waitfordb
|
2022-02-26 23:30:06 +03:00
|
|
|
def get_performance(dbconn=None,**keys):
|
2019-04-09 13:13:07 +03:00
|
|
|
|
2019-04-11 13:07:57 +03:00
|
|
|
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
2019-04-09 13:13:07 +03:00
|
|
|
results = []
|
|
|
|
|
2019-04-10 16:45:50 +03:00
|
|
|
for rng in rngs:
|
2019-04-09 13:13:07 +03:00
|
|
|
if "track" in keys:
|
2022-02-26 23:30:06 +03:00
|
|
|
track = sqldb.get_track(sqldb.get_track_id(keys['track'],dbconn=dbconn),dbconn=dbconn)
|
|
|
|
charts = get_charts_tracks(timerange=rng,dbconn=dbconn)
|
2019-04-09 13:13:07 +03:00
|
|
|
rank = None
|
|
|
|
for c in charts:
|
2022-01-07 06:53:35 +03:00
|
|
|
if c["track"] == track:
|
2019-04-09 13:13:07 +03:00
|
|
|
rank = c["rank"]
|
|
|
|
break
|
|
|
|
elif "artist" in keys:
|
2022-02-26 23:30:06 +03:00
|
|
|
artist = sqldb.get_artist(sqldb.get_artist_id(keys['artist'],dbconn=dbconn),dbconn=dbconn)
|
2022-01-07 06:53:35 +03:00
|
|
|
# ^this is the most useless line in programming history
|
|
|
|
# but I like consistency
|
2022-02-26 23:30:06 +03:00
|
|
|
charts = get_charts_artists(timerange=rng,dbconn=dbconn)
|
2019-04-09 13:13:07 +03:00
|
|
|
rank = None
|
|
|
|
for c in charts:
|
2022-01-07 06:53:35 +03:00
|
|
|
if c["artist"] == artist:
|
2019-04-09 13:13:07 +03:00
|
|
|
rank = c["rank"]
|
|
|
|
break
|
2022-04-25 18:03:44 +03:00
|
|
|
else:
|
|
|
|
raise exceptions.MissingEntityParameter()
|
2019-04-10 16:45:50 +03:00
|
|
|
results.append({"range":rng,"rank":rank})
|
2019-04-09 13:13:07 +03:00
|
|
|
|
|
|
|
return results
|
|
|
|
|
2022-01-07 06:53:35 +03:00
|
|
|
@waitfordb
|
2022-02-26 23:30:06 +03:00
|
|
|
def get_top_artists(dbconn=None,**keys):
|
2019-03-14 13:07:20 +03:00
|
|
|
|
2019-04-11 13:07:57 +03:00
|
|
|
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
2018-12-05 16:30:50 +03:00
|
|
|
results = []
|
2019-03-14 13:07:20 +03:00
|
|
|
|
2019-04-11 13:07:57 +03:00
|
|
|
for rng in rngs:
|
2018-12-15 17:25:00 +03:00
|
|
|
try:
|
2022-02-26 23:30:06 +03:00
|
|
|
res = get_charts_artists(timerange=rng,dbconn=dbconn)[0]
|
2022-01-07 06:53:35 +03:00
|
|
|
results.append({"range":rng,"artist":res["artist"],"scrobbles":res["scrobbles"]})
|
2022-04-24 20:41:55 +03:00
|
|
|
except Exception:
|
2019-04-11 13:07:57 +03:00
|
|
|
results.append({"range":rng,"artist":None,"scrobbles":0})
|
2019-03-14 13:07:20 +03:00
|
|
|
|
2019-02-15 23:07:08 +03:00
|
|
|
return results
|
|
|
|
|
|
|
|
|
2022-01-07 06:53:35 +03:00
|
|
|
@waitfordb
|
2022-02-26 23:30:06 +03:00
|
|
|
def get_top_tracks(dbconn=None,**keys):
|
2019-02-15 23:07:08 +03:00
|
|
|
|
2019-04-11 13:07:57 +03:00
|
|
|
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
2018-12-16 19:52:13 +03:00
|
|
|
results = []
|
2019-03-14 13:07:20 +03:00
|
|
|
|
2019-04-23 18:47:49 +03:00
|
|
|
for rng in rngs:
|
2018-12-16 19:52:13 +03:00
|
|
|
try:
|
2022-02-26 23:30:06 +03:00
|
|
|
res = get_charts_tracks(timerange=rng,dbconn=dbconn)[0]
|
2019-04-11 13:07:57 +03:00
|
|
|
results.append({"range":rng,"track":res["track"],"scrobbles":res["scrobbles"]})
|
2022-04-24 20:41:55 +03:00
|
|
|
except Exception:
|
2019-04-11 13:07:57 +03:00
|
|
|
results.append({"range":rng,"track":None,"scrobbles":0})
|
2019-03-14 13:07:20 +03:00
|
|
|
|
2019-02-15 23:07:08 +03:00
|
|
|
return results
|
|
|
|
|
2022-01-07 23:47:55 +03:00
|
|
|
@waitfordb
|
2022-02-27 02:33:55 +03:00
|
|
|
def artist_info(dbconn=None,**keys):
|
|
|
|
|
|
|
|
artist = keys.get('artist')
|
2022-04-25 18:03:44 +03:00
|
|
|
if artist is None: raise exceptions.MissingEntityParameter()
|
2019-03-14 13:07:20 +03:00
|
|
|
|
2022-04-15 20:41:44 +03:00
|
|
|
artist_id = sqldb.get_artist_id(artist,dbconn=dbconn)
|
|
|
|
artist = sqldb.get_artist(artist_id,dbconn=dbconn)
|
2022-02-26 23:30:06 +03:00
|
|
|
alltimecharts = get_charts_artists(timerange=alltime(),dbconn=dbconn)
|
|
|
|
scrobbles = get_scrobbles_num(artist=artist,timerange=alltime(),dbconn=dbconn)
|
2019-06-13 12:37:42 +03:00
|
|
|
#we cant take the scrobble number from the charts because that includes all countas scrobbles
|
2018-12-17 01:56:30 +03:00
|
|
|
try:
|
2022-01-07 23:47:55 +03:00
|
|
|
c = [e for e in alltimecharts if e["artist"] == artist][0]
|
2022-02-26 23:30:06 +03:00
|
|
|
others = sqldb.get_associated_artists(artist,dbconn=dbconn)
|
2019-04-04 22:33:15 +03:00
|
|
|
position = c["rank"]
|
2019-06-27 11:40:38 +03:00
|
|
|
return {
|
2020-05-17 15:11:24 +03:00
|
|
|
"artist":artist,
|
2019-06-27 11:40:38 +03:00
|
|
|
"scrobbles":scrobbles,
|
|
|
|
"position":position,
|
2022-01-08 08:11:42 +03:00
|
|
|
"associated":others,
|
|
|
|
"medals":{
|
2022-04-16 04:10:51 +03:00
|
|
|
"gold": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['gold']],
|
|
|
|
"silver": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['silver']],
|
|
|
|
"bronze": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['bronze']],
|
2022-01-08 08:11:42 +03:00
|
|
|
},
|
2022-04-16 04:10:51 +03:00
|
|
|
"topweeks":len([e for e in cached.weekly_topartists if e == artist_id]),
|
2022-04-15 20:41:44 +03:00
|
|
|
"id":artist_id
|
2019-06-27 11:40:38 +03:00
|
|
|
}
|
2022-04-24 20:41:55 +03:00
|
|
|
except Exception:
|
2019-06-13 12:37:42 +03:00
|
|
|
# if the artist isnt in the charts, they are not being credited and we
|
|
|
|
# need to show information about the credited one
|
2022-01-09 09:50:34 +03:00
|
|
|
replaceartist = sqldb.get_credited_artists(artist)[0]
|
|
|
|
c = [e for e in alltimecharts if e["artist"] == replaceartist][0]
|
2019-04-04 22:33:15 +03:00
|
|
|
position = c["rank"]
|
2022-04-22 22:38:35 +03:00
|
|
|
return {
|
|
|
|
"artist":artist,
|
|
|
|
"replace":replaceartist,
|
|
|
|
"scrobbles":scrobbles,
|
|
|
|
"position":position,
|
|
|
|
"id":artist_id
|
|
|
|
}
|
2019-03-14 13:07:20 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2022-01-09 03:14:06 +03:00
|
|
|
@waitfordb
|
2022-02-27 02:33:55 +03:00
|
|
|
def track_info(dbconn=None,**keys):
|
|
|
|
|
|
|
|
track = keys.get('track')
|
2022-04-25 18:03:44 +03:00
|
|
|
if track is None: raise exceptions.MissingEntityParameter()
|
2022-01-08 08:11:42 +03:00
|
|
|
|
2022-04-15 20:41:44 +03:00
|
|
|
track_id = sqldb.get_track_id(track,dbconn=dbconn)
|
|
|
|
track = sqldb.get_track(track_id,dbconn=dbconn)
|
2022-02-26 23:30:06 +03:00
|
|
|
alltimecharts = get_charts_tracks(timerange=alltime(),dbconn=dbconn)
|
2022-01-08 08:11:42 +03:00
|
|
|
#scrobbles = get_scrobbles_num(track=track,timerange=alltime())
|
|
|
|
|
|
|
|
c = [e for e in alltimecharts if e["track"] == track][0]
|
2019-03-14 13:07:20 +03:00
|
|
|
scrobbles = c["scrobbles"]
|
2019-04-04 22:33:15 +03:00
|
|
|
position = c["rank"]
|
2019-06-13 12:37:42 +03:00
|
|
|
cert = None
|
2021-12-19 23:10:55 +03:00
|
|
|
threshold_gold, threshold_platinum, threshold_diamond = malojaconfig["SCROBBLES_GOLD","SCROBBLES_PLATINUM","SCROBBLES_DIAMOND"]
|
2019-06-13 12:37:42 +03:00
|
|
|
if scrobbles >= threshold_diamond: cert = "diamond"
|
|
|
|
elif scrobbles >= threshold_platinum: cert = "platinum"
|
|
|
|
elif scrobbles >= threshold_gold: cert = "gold"
|
|
|
|
|
2019-06-27 12:04:45 +03:00
|
|
|
|
2019-06-13 12:37:42 +03:00
|
|
|
return {
|
2020-05-17 15:11:24 +03:00
|
|
|
"track":track,
|
2019-06-13 12:37:42 +03:00
|
|
|
"scrobbles":scrobbles,
|
|
|
|
"position":position,
|
2022-01-08 08:11:42 +03:00
|
|
|
"medals":{
|
2022-04-16 04:10:51 +03:00
|
|
|
"gold": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['gold']],
|
|
|
|
"silver": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['silver']],
|
|
|
|
"bronze": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['bronze']],
|
2022-01-08 08:11:42 +03:00
|
|
|
},
|
2019-06-27 11:40:38 +03:00
|
|
|
"certification":cert,
|
2022-04-16 04:10:51 +03:00
|
|
|
"topweeks":len([e for e in cached.weekly_toptracks if e == track_id]),
|
2022-04-15 20:41:44 +03:00
|
|
|
"id":track_id
|
2019-06-13 12:37:42 +03:00
|
|
|
}
|
2019-02-17 17:02:27 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
2022-03-06 06:20:26 +03:00
|
|
|
def get_predefined_rulesets(dbconn=None):
|
2020-07-29 18:49:55 +03:00
|
|
|
validchars = "-_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
|
|
|
|
|
|
|
rulesets = []
|
2019-03-24 18:04:44 +03:00
|
|
|
|
2020-12-25 06:52:05 +03:00
|
|
|
for f in os.listdir(data_dir['rules']("predefined")):
|
2020-07-29 18:49:55 +03:00
|
|
|
if f.endswith(".tsv"):
|
|
|
|
|
|
|
|
rawf = f.replace(".tsv","")
|
2021-10-19 15:58:24 +03:00
|
|
|
valid = all(char in validchars for char in rawf)
|
2020-07-29 18:49:55 +03:00
|
|
|
if not valid: continue
|
2021-10-19 15:58:24 +03:00
|
|
|
if "_" not in rawf: continue
|
2020-07-29 18:49:55 +03:00
|
|
|
|
|
|
|
try:
|
2020-12-25 06:52:05 +03:00
|
|
|
with open(data_dir['rules']("predefined",f)) as tsvfile:
|
2020-07-29 18:49:55 +03:00
|
|
|
line1 = tsvfile.readline()
|
|
|
|
line2 = tsvfile.readline()
|
|
|
|
|
|
|
|
if "# NAME: " in line1:
|
|
|
|
name = line1.replace("# NAME: ","")
|
|
|
|
else: name = rawf.split("_")[1]
|
2021-10-19 15:58:24 +03:00
|
|
|
desc = line2.replace("# DESC: ","") if "# DESC: " in line2 else ""
|
2020-07-29 18:49:55 +03:00
|
|
|
author = rawf.split("_")[0]
|
2022-04-24 20:41:55 +03:00
|
|
|
except Exception:
|
2020-07-29 18:49:55 +03:00
|
|
|
continue
|
2019-03-24 18:04:44 +03:00
|
|
|
|
2020-07-29 18:49:55 +03:00
|
|
|
ruleset = {"file":rawf}
|
|
|
|
rulesets.append(ruleset)
|
2021-10-19 15:58:24 +03:00
|
|
|
ruleset["active"] = bool(os.path.exists(data_dir['rules'](f)))
|
2020-07-29 18:49:55 +03:00
|
|
|
ruleset["name"] = name
|
|
|
|
ruleset["author"] = author
|
|
|
|
ruleset["desc"] = desc
|
|
|
|
|
|
|
|
return rulesets
|
|
|
|
|
2019-11-22 01:14:53 +03:00
|
|
|
|
2018-12-12 21:37:59 +03:00
|
|
|
####
|
|
|
|
## Server operation
|
|
|
|
####
|
|
|
|
|
2019-03-14 13:07:20 +03:00
|
|
|
|
2022-01-03 05:04:36 +03:00
|
|
|
|
2022-01-03 04:08:02 +03:00
|
|
|
def start_db():
|
2022-01-09 08:58:06 +03:00
|
|
|
# Upgrade database
|
2022-01-04 22:45:15 +03:00
|
|
|
from .. import upgrade
|
2022-01-04 09:55:07 +03:00
|
|
|
upgrade.upgrade_db(sqldb.add_scrobbles)
|
2022-01-07 23:47:55 +03:00
|
|
|
|
2022-01-09 08:58:06 +03:00
|
|
|
# Load temporary tables
|
2022-01-07 23:47:55 +03:00
|
|
|
from . import associated
|
|
|
|
associated.load_associated_rules()
|
|
|
|
|
2022-01-03 10:01:49 +03:00
|
|
|
dbstatus['healthy'] = True
|
2019-07-09 21:27:36 +03:00
|
|
|
|
2022-01-09 08:58:06 +03:00
|
|
|
# inform time module about begin of scrobbling
|
2022-02-20 07:06:38 +03:00
|
|
|
try:
|
|
|
|
firstscrobble = sqldb.get_scrobbles()[0]
|
|
|
|
register_scrobbletime(firstscrobble['time'])
|
|
|
|
except IndexError:
|
|
|
|
register_scrobbletime(int(datetime.datetime.now().timestamp()))
|
|
|
|
|
2022-01-06 11:28:34 +03:00
|
|
|
|
2022-01-09 08:58:06 +03:00
|
|
|
# create cached information
|
|
|
|
cached.update_medals()
|
|
|
|
cached.update_weekly()
|
|
|
|
|
|
|
|
dbstatus['complete'] = True
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-03-14 13:07:20 +03:00
|
|
|
|
2019-07-09 21:27:36 +03:00
|
|
|
|
2019-03-14 13:07:20 +03:00
|
|
|
|
2018-12-12 21:37:59 +03:00
|
|
|
# Search for strings
|
|
|
|
def db_search(query,type=None):
|
2021-10-19 15:58:24 +03:00
|
|
|
results = []
|
2018-12-12 21:37:59 +03:00
|
|
|
if type=="ARTIST":
|
2022-04-23 17:36:35 +03:00
|
|
|
results = sqldb.search_artist(query)
|
2018-12-12 21:37:59 +03:00
|
|
|
if type=="TRACK":
|
2022-04-23 17:36:35 +03:00
|
|
|
results = sqldb.search_track(query)
|
2018-12-12 21:37:59 +03:00
|
|
|
return results
|