1
0
mirror of https://github.com/krateng/maloja.git synced 2023-08-10 21:12:55 +03:00

Added basic scrobbling function

This commit is contained in:
Krateng 2018-11-26 16:21:07 +01:00
parent 2f8720898b
commit bdf114d7fe
3 changed files with 29 additions and 1 deletions

View File

@ -1,9 +1,21 @@
import re
def fullclean(artist,title):
artists = cleanup(removespecial(artist))
title = cleantitle(removespecial(title))
(title,moreartists) = findartistsintitle(title)
artists += moreartists
return (artists,title)
def removespecial(s):
return s.replace("\t","").replace("","").replace("\n","")
def cleanup(artiststr):
if artiststr == "":
return []
artists = [artiststr]

View File

@ -3,6 +3,7 @@ from importlib.machinery import SourceFileLoader
import waitress
import os
import datetime
import cleanup
SCROBBLES = [] # Format: tuple(track_ref,timestamp,saved)
@ -104,6 +105,16 @@ def get_charts():
#results = db_query(since=since,to=to)
#return {"list":results}
@route("/newscrobble")
def post_scrobble():
keys = request.query
artists = keys.get("artist")
title = keys.get("title")
(artists,title) = cleanup.fullclean(artists,title)
time = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
createScrobble(artists,title,time)
# Starts the server
def runserver(DATABASE_PORT):

View File

@ -5,6 +5,7 @@ from importlib.machinery import SourceFileLoader
import _thread
import waitress
import urllib.request
import urllib.parse
MAIN_PORT = 12345
@ -35,7 +36,11 @@ def mainpage():
@route("/db/<pth:path>")
def database(pth):
contents = urllib.request.urlopen("http://localhost:" + str(DATABASE_PORT) + "/" + pth).read()
keys = request.query
keystring = "?"
for k in keys:
keystring += urllib.parse.quote(k) + "=" + urllib.parse.quote(keys[k]) + "&"
contents = urllib.request.urlopen("http://localhost:" + str(DATABASE_PORT) + "/" + pth + keystring).read()
response.content_type = "application/json"
#print("Returning " + "http://localhost:" + str(DATABASE_PORT) + "/" + pth)
return contents