mirror of https://github.com/krateng/maloja.git
Compare commits
469 Commits
Author | SHA1 | Date |
---|---|---|
krateng | 39a42e915c | |
krateng | b8944b4954 | |
krateng | 9d9f3b500e | |
krateng | 72c58509a1 | |
krateng | 11a5cb7401 | |
krateng | b4c8a0d68b | |
krateng | 88403d2583 | |
krateng | 866d4ccd9b | |
FoxxMD | 3db51a94d6 | |
FoxxMD | a9c29f158e | |
krateng | ab8af32812 | |
FoxxMD | 7bc2ba0237 | |
FoxxMD | b8371347b7 | |
FoxxMD | 1e3c6597d4 | |
krateng | 37210995fa | |
Chris Newton | 94ae453133 | |
krateng | 93bbaac0e3 | |
krateng | 00a564c54d | |
krateng | 4330b0294b | |
krateng | b53141f065 | |
krateng | 3ae395f697 | |
krateng | 5466b6c37e | |
krateng | e85861fb79 | |
krateng | a611b78dbc | |
krateng | c3ed5f318d | |
krateng | 073448257a | |
krateng | d12229d8a5 | |
krateng | d8f53a56d2 | |
krateng | c8f9e9c391 | |
krateng | 185a5b3e87 | |
krateng | 95eaf0a3d6 | |
krateng | a7d286c90c | |
krateng | ddc78c5756 | |
krateng | a12253dc29 | |
krateng | 9eaeffca7e | |
krateng | db8389e6c1 | |
krateng | ef06f22622 | |
krateng | b333009684 | |
krateng | ebd78914f9 | |
krateng | 36d0e7bb8a | |
krateng | 91750db8ac | |
krateng | d5f2c254f3 | |
krateng | e3933e7dca | |
Karol Kosek | 9b10ca4a5d | |
Karol Kosek | 2ce2e2f682 | |
krateng | 9917210b66 | |
krateng | 5656f8b4c0 | |
badlandspray | 9ae14da397 | |
badlandspray | 3fd02c1675 | |
badlandspray | f7251c613c | |
badlandspray | d57bf33969 | |
krateng | a1b2261fa7 | |
krateng | 260c587248 | |
badlandspray | c1493255b7 | |
krateng | 97fc38f919 | |
krateng | 397d5e7c13 | |
krateng | 1eaba888c7 | |
krateng | 084c7d5a1e | |
krateng | 515fa69fce | |
krateng | ca30309450 | |
badlandspray | 705f4b4252 | |
krateng | ac498bde73 | |
krateng | f3a04c79b1 | |
krateng | f74d5679eb | |
krateng | 5eb838d5df | |
krateng | 96778709bd | |
krateng | a073930601 | |
krateng | 81f4e35258 | |
krateng | c16919eb1e | |
krateng | e116690640 | |
krateng | 8cb332b9fc | |
krateng | 3ede71fc79 | |
krateng | 77a0a0a41b | |
alim4r | ec02672a2e | |
alim4r | 5941123c52 | |
alim4r | 91a7aeb50d | |
krateng | 20aae955b2 | |
krateng | d83b44de6e | |
krateng | 8197548285 | |
krateng | 6171d1d2e1 | |
krateng | 0c948561a8 | |
krateng | 02c77a5e31 | |
krateng | bfa553bed0 | |
krateng | 3592571afd | |
krateng | c77b7c952f | |
krateng | 8a44d3def2 | |
krateng | cf04583122 | |
krateng | 8845f931df | |
krateng | 9c6c91f594 | |
krateng | 2c31df3c58 | |
krateng | 9c656ee90b | |
krateng | 938947d06c | |
krateng | ac3ca0b5e9 | |
krateng | 64d4036f55 | |
krateng | 6df363a763 | |
krateng | 7062c0b440 | |
krateng | ad50ee866c | |
krateng | 62abc31930 | |
krateng | c55e12dd43 | |
krateng | 3b156a73ff | |
krateng | 5b48c33a79 | |
krateng | 95f98370cf | |
krateng | e470e2e43f | |
krateng | 35f428ef69 | |
krateng | 342b8867d9 | |
krateng | bfc83fdbb0 | |
krateng | f359662cf3 | |
krateng | de286b58b9 | |
krateng | d5f5b48d85 | |
Jiri Travnicek | 00b3e6fc57 | |
Jiri Travnicek | e1074ba259 | |
krateng | 7c77474feb | |
krateng | 279499ad9f | |
krateng | dc1becd683 | |
krateng | b3d4cb7a15 | |
krateng | 4c1ba087ba | |
krateng | 0c94dc845b | |
Jiri Travnicek | 9589a6a5c9 | |
Jiri Travnicek | d54f2f8d35 | |
Jiri Travnicek | 082d11309b | |
krateng | 3cb72f46bc | |
krateng | d81f8374c9 | |
krateng | c86ae31ea9 | |
krateng | c3bb8ad322 | |
krateng | 6c5f08aa5a | |
krateng | 29a6a74c37 | |
krateng | 1bbb600481 | |
krateng | df07307730 | |
krateng | 74977b18cc | |
krateng | 029d0464b4 | |
krateng | db8bf60aef | |
krateng | 52ee456b1f | |
northys | 519c26b8d8 | |
krateng | e330678a05 | |
krateng | 0424fa7795 | |
krateng | 528d3565b7 | |
krateng | 56f7c18495 | |
krateng | 1dfda0086e | |
krateng | 7c9f6e9e2d | |
krateng | 529d0c8a5d | |
krateng | cf4b3cd68f | |
krateng | 9272c191d8 | |
krateng | d0ccf3d1ae | |
krateng | 10fef00592 | |
krateng | 1ed4af10ac | |
krateng | 11bc92ee8f | |
krateng | 98c791064d | |
krateng | d208290956 | |
krateng | 009d77a75e | |
krateng | e6992f1e90 | |
krateng | c52ad81fc2 | |
krateng | f5d1fbc576 | |
krateng | a8f8d86ec1 | |
krateng | e9189b8903 | |
krateng | 01d52d7e36 | |
krateng | 528c954de9 | |
krateng | 7c0ecda8a2 | |
krateng | 6e4e62755d | |
krateng | 646c290a37 | |
krateng | 28163348fa | |
alim4r | 495627f3f7 | |
alim4r | 6893fd745a | |
krateng | 91dae00851 | |
krateng | c0ff50b064 | |
krateng | 884e95dc58 | |
krateng | 8023c2d51c | |
krateng | df6bbebe31 | |
krateng | de625cc3c2 | |
krateng | 428d92a267 | |
krateng | 20092df02c | |
krateng | 713dbc34bb | |
krateng | 181406d339 | |
krateng | 9b5eb6f723 | |
krateng | 662923dd5e | |
krateng | ff71a9c526 | |
krateng | fbbd959295 | |
krateng | ce495176c1 | |
krateng | afc78e75b0 | |
alim4r | 85bb1f36cc | |
alim4r | c457b58ab8 | |
krateng | 62208bf668 | |
krateng | 53bc856222 | |
alim4r | b525252af1 | |
krateng | 43ec4c2c9e | |
krateng | 17be00f794 | |
krateng | fe21894c5e | |
krateng | 2bb3fa12b3 | |
krateng | 32a900cf37 | |
krateng | 397eaf668f | |
northys | 80ba4550c7 | |
krateng | b31e778d95 | |
krateng | 6e8cbe6a57 | |
krateng | 45ea7499b2 | |
krateng | 77c4dac7be | |
krateng | 61526fdc89 | |
krateng | ea6d70a650 | |
krateng | 57e66fdafd | |
krateng | 0d985ff706 | |
krateng | 27a9543da9 | |
krateng | c9d2527a98 | |
krateng | 977385a700 | |
krateng | c8522bd473 | |
krateng | 83e3157ad1 | |
krateng | 0525ff400b | |
krateng | 13856a2347 | |
krateng | e9bf65da34 | |
krateng | 5bf66ab270 | |
krateng | 206ebd58ea | |
krateng | a642c274e3 | |
ICTman1076 | 8ba973ed91 | |
krateng | ca726c774a | |
krateng | 33bbe61ece | |
krateng | 15f815ffe9 | |
krateng | fa2ce0c05f | |
krateng | b806be6e02 | |
krateng | 6601920f69 | |
krateng | f3f7dbd8ef | |
krateng | 263e7cd704 | |
krateng | 5b8e2debbc | |
krateng | bccd88acd4 | |
krateng | 371e73ac99 | |
krateng | c33fcf1dc1 | |
krateng | 98e1926613 | |
krateng | b255d424ee | |
krateng | 28d43d00cb | |
krateng | 7f9aa125af | |
krateng | 1d9247fc72 | |
krateng | c91cae9de1 | |
krateng | 1a977d9c0c | |
krateng | 62a654bfbf | |
krateng | 16d8ed0575 | |
krateng | 7c1d45f4af | |
krateng | 65fd57dceb | |
krateng | 29f722e3d3 | |
krateng | e6bb844ff9 | |
krateng | 4cffc9971d | |
krateng | bcb1d36b4a | |
krateng | 9d8752d052 | |
krateng | 741246a7c1 | |
Daniel Aleksandersen | c076518d76 | |
krateng | 4a8221f7a0 | |
krateng | 42579ad1f0 | |
krateng | ef312c6ba9 | |
krateng | dad1365627 | |
krateng | 1c2062c512 | |
krateng | 6b39ca8b19 | |
krateng | 700b81217c | |
krateng | 50cf592a75 | |
krateng | 0f39ecbf7e | |
krateng | d018a758c0 | |
krateng | f31c95228e | |
krateng | 2cf785faae | |
krateng | 189dfb58bc | |
krateng | cabfa298b9 | |
krateng | b8aa2a562e | |
krateng | cc4d40ae3f | |
krateng | 5a2856a682 | |
krateng | 2d2a7c2ee7 | |
krateng | 6635a9ac50 | |
krateng | df5eb499af | |
krateng | e52f35d65b | |
krateng | 97e1eae386 | |
krateng | e152a2edde | |
krateng | 871b3d289d | |
krateng | abde7e72c4 | |
krateng | 24dfa41ad9 | |
krateng | bceb0db09a | |
krateng | 87f1250629 | |
krateng | bb68afee12 | |
krateng | 233e49d087 | |
krateng | fe727dedee | |
krateng | 64f6836365 | |
krateng | 96933d5f18 | |
krateng | fba21b7128 | |
krateng | 1207475e4d | |
krateng | de5ae6408a | |
krateng | 45d481b1ed | |
krateng | 806f024f51 | |
krateng | 5952b8de4d | |
krateng | 3115d0372b | |
krateng | df996f7cb6 | |
krateng | 42cde8b647 | |
krateng | 9e7bbb6c20 | |
krateng | 28ba7b6ad0 | |
krateng | 0f59ffb288 | |
krateng | 7864c9f897 | |
krateng | 5524c0a70f | |
krateng | dc192d7444 | |
krateng | 4e33f808e4 | |
krateng | 700d99c5ae | |
krateng | 037f195803 | |
krateng | e9d8303763 | |
krateng | 387c40d18c | |
krateng | 40c0edb06f | |
krateng | 9f26cce34b | |
krateng | a142804bfe | |
krateng | 8d111b6de7 | |
krateng | 848f009774 | |
krateng | c9fa9956bb | |
krateng | 2deb5f0e36 | |
krateng | 2c73c81434 | |
krateng | c378c9301d | |
krateng | 74f6a931a4 | |
krateng | c982cbd1c4 | |
krateng | 6b4f2f713b | |
krateng | 4682914b88 | |
krateng | 781ed66357 | |
krateng | 2720dc1be5 | |
krateng | 34db81ccef | |
krateng | 6ca18b4471 | |
krateng | c676e0a5bf | |
krateng | 08bd352641 | |
krateng | de18ecff26 | |
krateng | 36f7ab1670 | |
krateng | 24c65d4acc | |
krateng | 1257768e33 | |
krateng | f4e42f9256 | |
krateng | a16c24281e | |
krateng | bd29c1e1ba | |
krateng | 6fc3a9cbf8 | |
krateng | abe658cc77 | |
krateng | 447d31b44e | |
krateng | ec5723d2b3 | |
krateng | 8ff7acfc38 | |
krateng | 0ae9091889 | |
krateng | e1ce80131a | |
krateng | b7781d27c3 | |
krateng | b41203bac7 | |
krateng | c647a57983 | |
krateng | 1b087e92db | |
krateng | 72b74eb27e | |
krateng | 2748d0e360 | |
krateng | e0af117805 | |
krateng | 153ab41ce7 | |
krateng | acc08693b3 | |
krateng | b510e52188 | |
krateng | ba5b0c8957 | |
krateng | c8f678b600 | |
krateng | 31c6fe6243 | |
krateng | b96f0cfc08 | |
krateng | ca2596cfc9 | |
krateng | c150a57090 | |
krateng | a833039ced | |
krateng | d8821efeeb | |
krateng | 3389d6c5f5 | |
krateng | 8ed3923851 | |
krateng | 608986b239 | |
krateng | 2a1f188e37 | |
krateng | 27cacbf658 | |
krateng | 3275e4ec5d | |
krateng | 5d582d39aa | |
krateng | 3108b368ef | |
krateng | 38f2173bde | |
krateng | e611d05c34 | |
krateng | 97aed7e73c | |
krateng | 04947cb97d | |
krateng | a598ba96de | |
krateng | 9f8e691924 | |
krateng | be4ed055ff | |
krateng | e22ef4d268 | |
krateng | c8ed894efb | |
krateng | e31c0dce57 | |
krateng | 3f098b6993 | |
krateng | c0bf8cb8ac | |
krateng | e7663138c1 | |
krateng | 66bd69b49e | |
krateng | fec6686ccc | |
krateng | dad027677e | |
krateng | fce450fac3 | |
krateng | 822895461e | |
krateng | 66d703b623 | |
krateng | 68fd6fe65f | |
krateng | cc24d48e65 | |
krateng | 1fff4eca6c | |
krateng | a443e6250e | |
krateng | c3e6dcd1eb | |
krateng | 634cb38dec | |
krateng | c944a3d937 | |
krateng | cc2b984080 | |
krateng | 34e0b0fd67 | |
krateng | 4dd7cf69a7 | |
krateng | 36b47368a3 | |
krateng | 30e973402b | |
krateng | 4e1b099547 | |
krateng | e3dc401ccf | |
krateng | 7b89d227a3 | |
krateng | c8e658af43 | |
krateng | 2930d40685 | |
krateng | 02e3f17594 | |
krateng | 57142bc327 | |
krateng | e398dd3ac1 | |
krateng | 48d88b208f | |
krateng | 827b05da8f | |
krateng | bdbb644d8e | |
krateng | aff56c9069 | |
krateng | af57103300 | |
krateng | a1ef5a7791 | |
krateng | fe0d06af7e | |
krateng | 86c4261a96 | |
krateng | a4e06413d8 | |
krateng | b83eee559f | |
krateng | 631fd941ec | |
krateng | c952fab440 | |
krateng | 65f3dac40a | |
krateng | 349e0bb7ea | |
krateng | 4cd16d73d3 | |
krateng | 2b75e1e50f | |
krateng | d38cf8d4be | |
krateng | 54a73243cc | |
krateng | d3258a7e63 | |
krateng | 0f473599a7 | |
krateng | bde06deb4f | |
krateng | a367c7c573 | |
krateng | 9e4274f209 | |
krateng | dba31867c6 | |
krateng | 6b05dde7c0 | |
krateng | 7b3e1bbaa6 | |
krateng | 900ce51af0 | |
krateng | e980efa731 | |
krateng | 42607cedb7 | |
krateng | deb35ec042 | |
krateng | ef594c2546 | |
krateng | 7e62ddebf6 | |
krateng | f645f73f1f | |
krateng | bfed3604c5 | |
krateng | cf43a9221a | |
krateng | 4c40fb0577 | |
krateng | 765ab493cb | |
krateng | a1f8e96ae4 | |
krateng | cc060d650b | |
krateng | d9f4021342 | |
krateng | b95d1e8b0c | |
krateng | 73564eccc1 | |
krateng | b53df53c40 | |
krateng | fee94a88c5 | |
krateng | 78c50d24d9 | |
krateng | 055dca4b6d | |
krateng | 8db87bdbc5 | |
krateng | a64d3610d3 | |
krateng | 034f8b32c7 | |
krateng | b325fab698 | |
krateng | eb9d29686b | |
krateng | b50afe70ea | |
krateng | eb9cd4aba4 | |
krateng | df07dd7b00 | |
krateng | 7021099e7b | |
krateng | 1df51748b6 | |
krateng | 632905a1c7 | |
krateng | 65a076c249 | |
krateng | 6611ca8705 | |
krateng | c120850d42 | |
krateng | 02ddeb4dc0 | |
krateng | f68fe04760 | |
krateng | 11bebce807 | |
krateng | 1824a8e5dc | |
krateng | 8a96a2c144 | |
krateng | 44a124e6ec | |
krateng | 80acf6275f | |
krateng | 40e733a054 | |
krateng | 9fc838e4c8 | |
krateng | 2f7f4c8567 | |
krateng | 03186bc49f | |
krateng | f88852ee6a | |
krateng | 0dd6cd9dd5 | |
krateng | 8ab42b844b | |
krateng | 03dd902e1b | |
krateng | c826b069e4 | |
krateng | 0233adedec | |
krateng | 9eb8dc0b47 |
|
@ -0,0 +1,8 @@
|
|||
*
|
||||
!maloja
|
||||
!container
|
||||
!Containerfile
|
||||
!requirements.txt
|
||||
!pyproject.toml
|
||||
!README.md
|
||||
!LICENSE
|
|
@ -0,0 +1 @@
|
|||
custom: ["https://flattr.com/@Krateng", "https://paypal.me/krateng"]
|
|
@ -13,10 +13,10 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
|
@ -30,7 +30,7 @@ jobs:
|
|||
run: python -m build
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
uses: pypa/gh-action-pypi-publish@717ba43cfbb0387f6ce311b169a825772f54d295
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
|
@ -12,7 +12,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Push Extension to Web Store
|
||||
uses: Klemensas/chrome-extension-upload-action@$VERSION
|
||||
uses: Klemensas/chrome-extension-upload-action@1e8ede84548583abf1a2a495f4242c4c51539337
|
||||
with:
|
||||
refresh-token: '${{ secrets.GOOGLE_REFRESHTOKEN }}'
|
||||
client-id: '${{ secrets.GOOGLE_CLIENTID }}'
|
|
@ -0,0 +1,76 @@
|
|||
name: Build and release docker image
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
push_to_registry:
|
||||
name: Push Docker image to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@f2a13332ac1ce8c0a71aeac48a150dbb1838ab67
|
||||
with:
|
||||
images: |
|
||||
${{ github.repository_owner }}/maloja
|
||||
# generate Docker tags based on the following events/attributes
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
flavor: |
|
||||
latest=true
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@48af2dc4a9e8278b89d7fa154b955c30c6aaab09
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a
|
||||
with:
|
||||
context: .
|
||||
file: Containerfile
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max
|
||||
|
||||
# Temp fix
|
||||
# https://github.com/docker/build-push-action/issues/252
|
||||
# https://github.com/moby/buildkit/issues/1896
|
||||
- name: Move cache
|
||||
run: |
|
||||
rm -rf /tmp/.buildx-cache
|
||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||
|
||||
- name: Update Readme and short description
|
||||
uses: peter-evans/dockerhub-description@836d7e6aa8f6f32dce26f5a1dd46d3dc24997eae
|
||||
continue-on-error: true
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
|
||||
repository: krateng/maloja
|
||||
short-description: ${{ github.event.repository.description }}
|
|
@ -1,48 +0,0 @@
|
|||
name: Publish to Dockerhub
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
push_to_registry:
|
||||
name: Push Docker image to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: krateng/maloja
|
||||
# generate Docker tags based on the following events/attributes
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
flavor: |
|
||||
latest=true
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
- name: Update Readme and short description
|
||||
uses: peter-evans/dockerhub-description@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
repository: krateng/maloja
|
||||
short-description: ${{ github.event.repository.description }}
|
|
@ -11,10 +11,10 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@7f80679172b057fc5e90d70d197929d454754a5a
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
|
@ -25,7 +25,7 @@ jobs:
|
|||
run: python -m build
|
||||
|
||||
- name: Publish to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
uses: pypa/gh-action-pypi-publish@717ba43cfbb0387f6ce311b169a825772f54d295
|
||||
with:
|
||||
user: __token__
|
||||
password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
|
|
|
@ -1,20 +1,14 @@
|
|||
# generic temporary / dev files
|
||||
# temporary / generated files
|
||||
*.pyc
|
||||
*.note
|
||||
*.xcf
|
||||
/nohup.out
|
||||
/*-old
|
||||
/*.yml
|
||||
/pylintrc
|
||||
.venv/*
|
||||
|
||||
# build
|
||||
# environments / builds
|
||||
.venv/*
|
||||
testdata*
|
||||
/dist
|
||||
/build
|
||||
/*.egg-info
|
||||
|
||||
# currently not using
|
||||
/screenshot*.png
|
||||
|
||||
# only for development, normally external
|
||||
/doreah
|
||||
# dev files
|
||||
*.xcf
|
||||
*.note
|
||||
*-old
|
||||
|
|
|
@ -0,0 +1,173 @@
|
|||
# Scrobbling
|
||||
|
||||
Scrobbling can be done with the native API, see [below](#submitting-a-scrobble).
|
||||
In order to scrobble from a wide selection of clients, you can also use Maloja's standard-compliant APIs with the following settings:
|
||||
|
||||
GNU FM |
|
||||
------ | ---------
|
||||
Gnukebox URL | Your Maloja URL followed by `/apis/audioscrobbler`
|
||||
Username | Doesn't matter
|
||||
Password | Any of your API keys
|
||||
|
||||
ListenBrainz |
|
||||
------ | ---------
|
||||
API URL | Your Maloja URL followed by `/apis/listenbrainz`
|
||||
Username | Doesn't matter
|
||||
Auth Token | Any of your API keys
|
||||
|
||||
Audioscrobbler v1.2 |
|
||||
------ | ---------
|
||||
Server URL | Your Maloja URL followed by `/apis/audioscrobbler_legacy`
|
||||
Username | Doesn't matter
|
||||
Password | Any of your API keys
|
||||
|
||||
| :warning: | Note that these are the base URLs - some scrobblers ask you for the full endpoint instead. |
|
||||
|---------------|:------------------------|
|
||||
|
||||
## Scrobbling Guideline
|
||||
|
||||
Maloja makes no assumptions about scrobbling behaviour. The clients should decide when and whether a play is scrobbled - the server will accept it as long as it contains all necessary data. However, a general guideline is:
|
||||
|
||||
* As soon as a track has been played for 50% of its length or 4 minutes, it should be counted as a scrobble
|
||||
* That scrobble should be submitted when the play has ended in order to know its duration
|
||||
* If the total play duration is enough to count as a scrobble, but not longer than the total track length + enough for a second scrobble, it should be submitted as a scrobble with the according duration
|
||||
* If the duration exceeds this value, the first scrobble should be submitted as a scrobble with the duration of the full track length, while the second scrobble is queued up following the above suggestions in regards to remaining time
|
||||
|
||||
|
||||
<table>
|
||||
<tr><td>:memo: Example </td><tr>
|
||||
<tr><td>
|
||||
|
||||
The user starts playing '(Fine Layers of) Slaysenflite', which is exactly 3:00 minutes long.
|
||||
* If the user ends the play after 1:22, no scrobble is submitted
|
||||
* If the user ends the play after 2:06, a scrobble with `"duration":126` is submitted
|
||||
* If the user jumps back several times and ends the play after 3:57, a scrobble with `"duration":237` is submitted
|
||||
* If the user jumps back several times and ends the play after 4:49, two scrobbles with `"duration":180` and `"duration":109` are submitted
|
||||
|
||||
</td></tr>
|
||||
<table>
|
||||
|
||||
|
||||
# API Documentation
|
||||
|
||||
The native Maloja API is reachable at `/apis/mlj_1`. Endpoints are listed on `/api_explorer`.
|
||||
|
||||
All endpoints return JSON data. POST request can be made with query string or form data arguments, but this is discouraged - JSON should be used whenever possible.
|
||||
|
||||
No application should ever rely on the non-existence of fields in the JSON data - i.e., additional fields can be added at any time without this being considered a breaking change. Existing fields should usually not be removed or changed, but it is always a good idea to add basic handling for missing fields.
|
||||
|
||||
## Submitting a Scrobble
|
||||
|
||||
The POST endpoint `/newscrobble` is used to submit new scrobbles. These use a flat JSON structure with the following fields:
|
||||
|
||||
| Key | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| `artists` | List(String) | Track artists |
|
||||
| `title` | String | Track title |
|
||||
| `album` | String | Name of the album (Optional) |
|
||||
| `albumartists` | List(String) | Album artists (Optional) |
|
||||
| `duration` | Integer | How long the song was listened to in seconds (Optional) |
|
||||
| `length` | Integer | Actual length of the full song in seconds (Optional) |
|
||||
| `time` | Integer | Timestamp of the listen if it was not at the time of submitting (Optional) |
|
||||
| `nofix` | Boolean | Skip server-side metadata fixing (Optional) |
|
||||
|
||||
## General Structure
|
||||
|
||||
The API is not fully consistent in order to ensure backwards-compatibility. Refer to the individual endpoints.
|
||||
Generally, most endpoints follow this structure:
|
||||
|
||||
| Key | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| `status` | String | Status of the request. Can be `success`, `ok`, `error`, `failure`, `no_operation` |
|
||||
| `error` | Mapping | Details about the error if one occured. |
|
||||
| `warnings` | List | Any warnings that did not result in failure, but should be noted. Field is omitted if there are no warnings! |
|
||||
| `desc` | String | Human-readable feedback. This can be shown directly to the user if desired. |
|
||||
| `list` | List | List of returned [entities](#entity-structure) |
|
||||
|
||||
|
||||
Both errors and warnings have the following structure:
|
||||
|
||||
| Key | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| `type` | String | Name of the error or warning type |
|
||||
| `value` | varies | Specific data for this error or warning instance |
|
||||
| `desc` | String | Human-readable error or warning description. This can be shown directly to the user if desired. |
|
||||
|
||||
|
||||
## Entity Structure
|
||||
|
||||
Whenever a list of entities is returned, they have the following fields:
|
||||
|
||||
### Scrobble
|
||||
|
||||
| Key | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| `time` | Integer | Timestamp of the Scrobble in UTC |
|
||||
| `track` | Mapping | The [track](#track) being scrobbled |
|
||||
| `duration` | Integer | How long the track was played for in seconds |
|
||||
| `origin` | String | Client that submitted the scrobble, or import source |
|
||||
|
||||
|
||||
<table>
|
||||
<tr><td>:memo: Example </td><tr>
|
||||
<tr><td>
|
||||
|
||||
```json
|
||||
{
|
||||
"time": 1650684324,
|
||||
"track": {
|
||||
"artists": ["Jennie Kim","HyunA","LE","SunMi"],
|
||||
"title": "Wow Thing",
|
||||
"length":200
|
||||
},
|
||||
"duration": 196,
|
||||
"origin": "client:navidrome_desktop"
|
||||
}
|
||||
```
|
||||
|
||||
</tr></td>
|
||||
</table>
|
||||
|
||||
|
||||
|
||||
### Track
|
||||
|
||||
| Key | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| `artists` | List | The [artists](#artist) credited with the track |
|
||||
| `title` | String | The title of the track |
|
||||
| `length` | Integer | The full length of the track in seconds |
|
||||
|
||||
<table>
|
||||
<tr><td>:memo: Example </td><tr>
|
||||
<tr><td>
|
||||
|
||||
```json
|
||||
{
|
||||
"artists": ["Blackpink","Chou Tzuyu"],
|
||||
"title": "MORE",
|
||||
"length": 171
|
||||
}
|
||||
```
|
||||
|
||||
</tr></td>
|
||||
</table>
|
||||
|
||||
|
||||
|
||||
### Artist
|
||||
|
||||
Artists are just represented as raw Strings.
|
||||
|
||||
**Example**
|
||||
|
||||
<table>
|
||||
<tr><td>:memo: Example </td><tr>
|
||||
<tr><td>
|
||||
|
||||
```json
|
||||
"Red Velvet"
|
||||
```
|
||||
|
||||
</tr></td>
|
||||
</table>
|
|
@ -0,0 +1,36 @@
|
|||
# Contributor: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
# Maintainer: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
pkgname=maloja
|
||||
pkgver=3.0.0-dev
|
||||
pkgrel=0
|
||||
pkgdesc="Self-hosted music scrobble database"
|
||||
url="https://github.com/krateng/maloja"
|
||||
arch="noarch"
|
||||
license="GPL-3.0"
|
||||
depends="python3 tzdata"
|
||||
pkgusers=$pkgname
|
||||
pkggroups=$pkgname
|
||||
depends_dev="gcc g++ python3-dev libxml2-dev libxslt-dev libffi-dev libc-dev py3-pip linux-headers"
|
||||
makedepends="$depends_dev"
|
||||
source="
|
||||
$pkgname-$pkgver.tar.gz::https://github.com/krateng/maloja/archive/refs/tags/v$pkgver.tar.gz
|
||||
"
|
||||
builddir="$srcdir"/$pkgname-$pkgver
|
||||
|
||||
|
||||
|
||||
build() {
|
||||
cd $builddir
|
||||
python3 -m build .
|
||||
pip3 install dist/*.tar.gz
|
||||
}
|
||||
|
||||
package() {
|
||||
mkdir -p /etc/$pkgname || return 1
|
||||
mkdir -p /var/lib/$pkgname || return 1
|
||||
mkdir -p /var/cache/$pkgname || return 1
|
||||
mkdir -p /var/logs/$pkgname || return 1
|
||||
}
|
||||
|
||||
# TODO
|
||||
sha512sums="a674eaaaa248fc2b315514d79f9a7a0bac6aa1582fe29554d9176e8b551e8aa3aa75abeebdd7713e9e98cc987e7bd57dc7a5e9a2fb85af98b9c18cb54de47bf7 $pkgname-${pkgver}.tar.gz"
|
|
@ -0,0 +1,74 @@
|
|||
FROM lsiobase/alpine:3.17 as base
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
|
||||
|
||||
COPY --chown=abc:abc ./requirements.txt ./requirements.txt
|
||||
|
||||
# based on https://github.com/linuxserver/docker-pyload-ng/blob/main/Dockerfile
|
||||
# everything but the app installation is run in one command so we can purge
|
||||
# all build dependencies and cache in the same layer
|
||||
# it may be possible to decrease image size slightly by using build stage and
|
||||
# copying all site-packages to runtime stage but the image is already pretty small
|
||||
RUN \
|
||||
echo "**** install build packages ****" && \
|
||||
apk add --no-cache --virtual=build-deps \
|
||||
gcc \
|
||||
g++ \
|
||||
python3-dev \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
libffi-dev \
|
||||
libc-dev \
|
||||
py3-pip \
|
||||
linux-headers && \
|
||||
echo "**** install runtime packages ****" && \
|
||||
apk add --no-cache \
|
||||
python3 \
|
||||
py3-lxml \
|
||||
tzdata && \
|
||||
echo "**** install pip dependencies ****" && \
|
||||
python3 -m ensurepip && \
|
||||
pip3 install -U --no-cache-dir \
|
||||
pip \
|
||||
wheel && \
|
||||
echo "**** install maloja requirements ****" && \
|
||||
pip3 install --no-cache-dir -r requirements.txt && \
|
||||
echo "**** cleanup ****" && \
|
||||
apk del --purge \
|
||||
build-deps && \
|
||||
rm -rf \
|
||||
/tmp/* \
|
||||
${HOME}/.cache
|
||||
|
||||
# actual installation in extra layer so we can cache the stuff above
|
||||
|
||||
COPY --chown=abc:abc . .
|
||||
|
||||
RUN \
|
||||
echo "**** install maloja ****" && \
|
||||
apk add --no-cache --virtual=install-deps \
|
||||
py3-pip && \
|
||||
pip3 install /usr/src/app && \
|
||||
apk del --purge \
|
||||
install-deps && \
|
||||
rm -rf \
|
||||
/tmp/* \
|
||||
${HOME}/.cache
|
||||
|
||||
|
||||
|
||||
COPY container/root/ /
|
||||
|
||||
ENV \
|
||||
# Docker-specific configuration
|
||||
MALOJA_SKIP_SETUP=yes \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
# Prevents breaking change for previous container that ran maloja as root
|
||||
# On linux hosts (non-podman rootless) these variables should be set to the
|
||||
# host user that should own the host folder bound to MALOJA_DATA_DIRECTORY
|
||||
PUID=0 \
|
||||
PGID=0
|
||||
|
||||
EXPOSE 42010
|
|
@ -11,10 +11,10 @@ Clone the repository and enter it.
|
|||
|
||||
To avoid cluttering your system, consider using a [virtual environment](https://docs.python.org/3/tutorial/venv.html).
|
||||
|
||||
Your system needs several packages installed. On Alpine, this can be done with
|
||||
Your system needs several packages installed. For supported distributions, this can be done with e.g.
|
||||
|
||||
```console
|
||||
sh ./install/install_dependencies.sh
|
||||
sh ./install/install_dependencies_alpine.sh
|
||||
```
|
||||
|
||||
For other distros, try to find the equivalents of the packages listed or simply check your error output.
|
||||
|
@ -41,6 +41,18 @@ You can also build the package with
|
|||
```console
|
||||
pip install .
|
||||
```
|
||||
|
||||
|
||||
## Docker
|
||||
|
||||
You can also always build and run the server with
|
||||
|
||||
```console
|
||||
sh ./dev/run_docker.sh
|
||||
```
|
||||
|
||||
This will use the directory `testdata`.
|
||||
|
||||
## Further help
|
||||
|
||||
Feel free to [ask](https://github.com/krateng/maloja/discussions) if you need some help!
|
||||
|
|
30
Dockerfile
30
Dockerfile
|
@ -1,30 +0,0 @@
|
|||
FROM python:3-alpine
|
||||
|
||||
# Based on the work of Jonathan Boeckel <jonathanboeckel1996@gmail.com>
|
||||
# https://gitlab.com/Joniator/docker-maloja
|
||||
# https://github.com/Joniator
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
|
||||
# Copy project into dir
|
||||
COPY . .
|
||||
|
||||
RUN \
|
||||
# Build dependencies (This will pipe all packages from the file)
|
||||
sed 's/#.*//' ./install/dependencies_build.txt | xargs apk add --no-cache --virtual .build-deps && \
|
||||
# Runtime dependencies (Same)
|
||||
sed 's/#.*//' ./install/dependencies_run.txt | xargs apk add --no-cache && \
|
||||
# Python dependencies
|
||||
pip3 install --no-cache-dir -r requirements.txt && \
|
||||
# Local project install
|
||||
pip3 install /usr/src/app && \
|
||||
# Remove build dependencies
|
||||
apk del .build-deps
|
||||
|
||||
# expected behavior for a default setup is for maloja to "just work"
|
||||
ENV MALOJA_SKIP_SETUP=yes
|
||||
|
||||
EXPOSE 42010
|
||||
# use exec form for better signal handling https://docs.docker.com/engine/reference/builder/#entrypoint
|
||||
ENTRYPOINT ["maloja", "run"]
|
|
@ -1,31 +0,0 @@
|
|||
FROM python:3-alpine
|
||||
|
||||
# Based on the work of Jonathan Boeckel <jonathanboeckel1996@gmail.com>
|
||||
# https://gitlab.com/Joniator/docker-maloja
|
||||
# https://github.com/Joniator
|
||||
|
||||
ARG MALOJA_RELEASE
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Copy needed information
|
||||
COPY ./install ./install
|
||||
|
||||
RUN \
|
||||
# Build dependencies (This will pipe all packages from the file)
|
||||
sed 's/#.*//' ./install/dependencies_build.txt | xargs apk add --no-cache --virtual .build-deps && \
|
||||
# Runtime dependencies (Same)
|
||||
sed 's/#.*//' ./install/dependencies_run.txt | xargs apk add --no-cache && \
|
||||
|
||||
|
||||
# PyPI install
|
||||
pip3 install malojaserver==$MALOJA_RELEASE && \
|
||||
# Remove build dependencies
|
||||
apk del .build-deps
|
||||
|
||||
# expected behavior for a default setup is for maloja to "just work"
|
||||
ENV MALOJA_SKIP_SETUP=yes
|
||||
ENV MAGICK_HOME=/usr
|
||||
|
||||
EXPOSE 42010
|
||||
# use exec form for better signal handling https://docs.docker.com/engine/reference/builder/#entrypoint
|
||||
ENTRYPOINT ["maloja", "run"]
|
147
README.md
147
README.md
|
@ -1,36 +1,32 @@
|
|||
# Maloja
|
||||
|
||||
[![](https://img.shields.io/github/v/tag/krateng/maloja?label=GitHub&style=for-the-badge)](https://github.com/krateng/maloja)
|
||||
[![](https://img.shields.io/pypi/v/malojaserver?label=PyPI&style=for-the-badge)](https://pypi.org/project/malojaserver/)
|
||||
[![](https://img.shields.io/docker/v/krateng/maloja?label=Docker&style=for-the-badge)](https://hub.docker.com/r/krateng/maloja)
|
||||
[![](https://img.shields.io/github/v/tag/krateng/maloja?label=GitHub&style=for-the-badge&logo=github&logoColor=white)](https://github.com/krateng/maloja)
|
||||
[![](https://img.shields.io/pypi/v/malojaserver?label=PyPI&style=for-the-badge&logo=pypi&logoColor=white)](https://pypi.org/project/malojaserver/)
|
||||
[![](https://img.shields.io/docker/v/krateng/maloja?label=Dockerhub&style=for-the-badge&logo=docker&logoColor=white)](https://hub.docker.com/r/krateng/maloja)
|
||||
|
||||
[![](https://img.shields.io/pypi/l/malojaserver?style=for-the-badge)](https://github.com/krateng/maloja/blob/master/LICENSE)
|
||||
[![](https://img.shields.io/codeclimate/maintainability/krateng/maloja?style=for-the-badge)](https://codeclimate.com/github/krateng/maloja)
|
||||
|
||||
Simple self-hosted music scrobble database to create personal listening statistics. No recommendations, no social network, no nonsense.
|
||||
|
||||
You can check [my own Maloja page](https://maloja.krateng.ch) to see what it looks like (it's down fairly often because I use it as staging environment, that doesn't reflect the stability of the Maloja software!).
|
||||
![screenshot](https://raw.githubusercontent.com/krateng/maloja/master/screenshot.png)
|
||||
|
||||
You can check [my own Maloja page](https://maloja.krateng.ch) as an example instance.
|
||||
|
||||
> **IMPORTANT**: With the update 2.7, the backend has been reworked to use a password. With a normal installation, you are asked to provide a password on setup. If you use docker or skip the setup for other reasons, you need to provide the environment variable `MALOJA_FORCE_PASSWORD` on first startup.
|
||||
|
||||
> **IMPORTANT**: With the update 2.9, the API endpoints have changed. All old endpoints should be redirected properly, but I recommend updating your clients to use the new ones.
|
||||
|
||||
## Table of Contents
|
||||
* [Features](#features)
|
||||
* [How to install](#how-to-install)
|
||||
* [LXC / VM / Bare Metal](#lxc--vm--bare-metal)
|
||||
* [Docker](#docker)
|
||||
* [Requirements](#requirements)
|
||||
* [PyPI](#pypi)
|
||||
* [From Source](#from-source)
|
||||
* [Docker / Podman](#docker--podman)
|
||||
* [Extras](#extras)
|
||||
* [How to use](#how-to-use)
|
||||
* [Basic control](#basic-control)
|
||||
* [Data](#data)
|
||||
* [Customization](#customization)
|
||||
* [How to scrobble](#how-to-scrobble)
|
||||
* [Native support](#native-support)
|
||||
* [Native API](#native-api)
|
||||
* [Standard-compliant API](#standard-compliant-api)
|
||||
* [Manual](#manual)
|
||||
* [How to extend](#how-to-extend)
|
||||
|
||||
## Features
|
||||
|
@ -47,27 +43,25 @@ You can check [my own Maloja page](https://maloja.krateng.ch) to see what it loo
|
|||
|
||||
## How to install
|
||||
|
||||
### LXC / VM / Bare Metal
|
||||
### Requirements
|
||||
|
||||
Maloja should run on any x86 or ARM machine that runs Python.
|
||||
|
||||
I can support you with issues best if you use **Alpine Linux**.
|
||||
|
||||
#### From PyPI
|
||||
Your CPU should have a single core passmark score of at the very least 1500. 500 MB RAM should give you a decent experience, but performance will benefit greatly from up to 2 GB.
|
||||
|
||||
You can download the included script `install_alpine.sh` and run it with
|
||||
### PyPI
|
||||
|
||||
```console
|
||||
sh install_alpine.sh
|
||||
```
|
||||
|
||||
You can also simply call the install command
|
||||
You can install Maloja with
|
||||
|
||||
```console
|
||||
pip install malojaserver
|
||||
```
|
||||
|
||||
directly (e.g. if you're not on Alpine) - make sure you have all the system packages installed.
|
||||
To make sure all dependencies are installed, you can also use one of the included scripts in the `install` folder.
|
||||
|
||||
#### From Source
|
||||
### From Source
|
||||
|
||||
Clone this repository and enter the directory with
|
||||
|
||||
|
@ -76,38 +70,54 @@ Clone this repository and enter the directory with
|
|||
cd maloja
|
||||
```
|
||||
|
||||
Then install all the requirements and build the package:
|
||||
Then install all the requirements and build the package, e.g.:
|
||||
|
||||
```console
|
||||
sh ./install/install_dependencies.sh
|
||||
sh ./install/install_dependencies_alpine.sh
|
||||
pip install -r requirements.txt
|
||||
pip install .
|
||||
```
|
||||
|
||||
### Docker
|
||||
### Docker / Podman
|
||||
|
||||
Pull the [latest image](https://hub.docker.com/r/krateng/maloja) or check out the repository and use the included Dockerfile.
|
||||
Pull the [latest image](https://hub.docker.com/r/krateng/maloja) or check out the repository and use the included Containerfile.
|
||||
|
||||
Of note are these settings which should be passed as environmental variables to the container:
|
||||
|
||||
* `MALOJA_DATA_DIRECTORY` -- Set the directory in the container where configuration folders/files should be located
|
||||
* Mount a [volume](https://docs.docker.com/engine/reference/builder/#volume) to the specified directory to access these files outside the container (and to make them persistent)
|
||||
* `MALOJA_FORCE_PASSWORD` -- Set an admin password for maloja
|
||||
* `MALOJA_HOST` (Optional) -- Maloja uses IPv6 by default for the host. Set this to `0.0.0.0` if you cannot initially access the webserver
|
||||
|
||||
You must also publish a port on your host machine to bind to the container's web port (default 42010)
|
||||
You must publish a port on your host machine to bind to the container's web port (default 42010). The container uses IPv4 per default.
|
||||
|
||||
An example of a minimum run configuration when accessing maloja from an IPv4 address IE `localhost:42010`:
|
||||
An example of a minimum run configuration to access maloja via `localhost:42010`:
|
||||
|
||||
```console
|
||||
docker run -p 42010:42010 -e MALOJA_HOST=0.0.0.0 maloja
|
||||
docker run -p 42010:42010 -v $PWD/malojadata:/mljdata -e MALOJA_DATA_DIRECTORY=/mljdata krateng/maloja
|
||||
```
|
||||
|
||||
#### Linux Host
|
||||
|
||||
**NOTE:** If you are using [rootless containers with Podman](https://developers.redhat.com/blog/2020/09/25/rootless-containers-with-podman-the-basics#why_podman_) this DOES NOT apply to you.
|
||||
|
||||
If you are running Docker on a **Linux Host** you should specify `user:group` ids of the user who owns the folder on the host machine bound to `MALOJA_DATA_DIRECTORY` in order to avoid [docker file permission problems.](https://ikriv.com/blog/?p=4698) These can be specified using the [environmental variables **PUID** and **PGID**.](https://docs.linuxserver.io/general/understanding-puid-and-pgid)
|
||||
|
||||
To get the UID and GID for the current user run these commands from a terminal:
|
||||
|
||||
* `id -u` -- prints UID (EX `1000`)
|
||||
* `id -g` -- prints GID (EX `1001`)
|
||||
|
||||
The modified run command with these variables would look like:
|
||||
|
||||
```console
|
||||
docker run -e PUID=1000 -e PGID=1001 -p 42010:42010 -v $PWD/malojadata:/mljdata -e MALOJA_DATA_DIRECTORY=/mljdata krateng/maloja
|
||||
```
|
||||
|
||||
### Extras
|
||||
|
||||
* If you'd like to display images, you will need API keys for [Last.fm](https://www.last.fm/api/account/create) and [Spotify](https://developer.spotify.com/dashboard/applications). These are free of charge!
|
||||
|
||||
* Put your server behind a reverse proxy for SSL encryption. Make sure that you're proxying to the IPv6 address unless you changed your settings to use IPv4.
|
||||
* Put your server behind a reverse proxy for SSL encryption. Make sure that you're proxying to the IPv6 or IPv4 address according to your settings.
|
||||
|
||||
* You can set up a cronjob to start your server on system boot, and potentially restart it on a regular basis:
|
||||
|
||||
|
@ -138,11 +148,24 @@ If you need to run the server in the foreground, use
|
|||
|
||||
### Data
|
||||
|
||||
* If you would like to import all your previous last.fm scrobbles, use [benfoxall's website](https://benjaminbenben.com/lastfm-to-csv/) ([GitHub page](https://github.com/benfoxall/lastfm-to-csv)). Use the command `maloja import *filename*` to import the downloaded file into Maloja.
|
||||
If you would like to import your previous scrobbles, use the command `maloja import *filename*`. This works on:
|
||||
|
||||
* To backup your data, run `maloja backup` or, to only backup essential data (no artwork etc), `maloja backup -l minimal`.
|
||||
* a Last.fm export generated by [benfoxall's website](https://benjaminbenben.com/lastfm-to-csv/) ([GitHub page](https://github.com/benfoxall/lastfm-to-csv))
|
||||
* an official [Spotify data export file](https://www.spotify.com/us/account/privacy/)
|
||||
* an official [ListenBrainz export file](https://listenbrainz.org/profile/export/)
|
||||
* the export of another Maloja instance
|
||||
|
||||
* To fix your database (e.g. after you've added new rules), use `maloja fix`.
|
||||
⚠️ Never import your data while maloja is running. When you need to do import inside docker container start it in shell mode instead and perform import before starting the container as mentioned above.
|
||||
|
||||
```console
|
||||
docker run -it --entrypoint sh -v $PWD/malojadata:/mljdata -e MALOJA_DATA_DIRECTORY=/mljdata krateng/maloja
|
||||
cd /mljdata
|
||||
maloja import my_last_fm_export.csv
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
To backup your data, run `maloja backup`, optional with `--include_images`.
|
||||
|
||||
### Customization
|
||||
|
||||
|
@ -155,59 +178,11 @@ If you need to run the server in the foreground, use
|
|||
|
||||
## How to scrobble
|
||||
|
||||
You can set up any amount of API keys in the file `authenticated_machines.tsv` in the `/etc/maloja/clients` folder. It is recommended to define a different API key for every scrobbler you use.
|
||||
You can set up any amount of API keys in the file `apikeys.yml` in your configuration folder (or via the web interface). It is recommended to define a different API key for every scrobbler you use.
|
||||
|
||||
### Native support
|
||||
Some scrobbler clients support Maloja's native API. You can also use any scrobbler that allows you to set a custom Listenbrainz or GNUFM server. See [API.md](API.md) for details.
|
||||
|
||||
These solutions allow you to directly setup scrobbling to your Maloja server:
|
||||
* [Tauon](https://tauonmusicbox.rocks) Desktop Player
|
||||
* [Web Scrobbler](https://github.com/web-scrobbler/web-scrobbler) Browser Extension
|
||||
* [Multi Scrobbler](https://github.com/FoxxMD/multi-scrobbler) Desktop Application
|
||||
* [Cmus-maloja-scrobbler](https://git.sr.ht/~xyank/cmus-maloja-scrobbler) Script
|
||||
* [OngakuKiroku](https://github.com/Atelier-Shiori/OngakuKiroku) Desktop Application (Mac)
|
||||
* [Albula](https://github.com/krateng/albula) Music Server
|
||||
* [Maloja Scrobbler](https://chrome.google.com/webstore/detail/maloja-scrobbler/cfnbifdmgbnaalphodcbandoopgbfeeh) Chromium Extension (also included in the repository) for Plex Web, Spotify, Bandcamp, Soundcloud or Youtube Music
|
||||
|
||||
### Native API
|
||||
|
||||
If you want to implement your own method of scrobbling, it's very simple: You only need one POST request to `/apis/mlj_1/newscrobble` with the keys `artist`, `title` and `key` (and optionally `album`,`duration` (in seconds) and `time`(for cached scrobbles)) - either as form-data or json.
|
||||
|
||||
If you're the maintainer of a music player or server and would like to implement native Maloja scrobbling, feel free to reach out - I'll try my best to help. For Python applications, you can simply use the [`malojalib` package](https://pypi.org/project/maloja-lib/) for a consistent interface even with future updates.
|
||||
|
||||
### Standard-compliant API
|
||||
|
||||
You can use any third-party scrobbler that supports the audioscrobbler (GNUFM) or the ListenBrainz protocol. This is still somewhat experimental, but give it a try with these settings:
|
||||
|
||||
GNU FM |
|
||||
------ | ---------
|
||||
Gnukebox URL | Your Maloja URL followed by `/apis/audioscrobbler`
|
||||
Username | Doesn't matter
|
||||
Password | Any of your API keys
|
||||
|
||||
ListenBrainz |
|
||||
------ | ---------
|
||||
API URL | Your Maloja URL followed by `/apis/listenbrainz`
|
||||
Username | Doesn't matter
|
||||
Auth Token | Any of your API keys
|
||||
|
||||
Audioscrobbler v1.2 |
|
||||
------ | ---------
|
||||
Server URL | Your Maloja URL followed by `/apis/audioscrobbler_legacy`
|
||||
Username | Doesn't matter
|
||||
Password | Any of your API keys
|
||||
|
||||
Known working scrobblers:
|
||||
* [Pano Scrobbler](https://github.com/kawaiiDango/pScrobbler) for Android
|
||||
* [Simple Scrobbler](https://simple-last-fm-scrobbler.github.io) for Android
|
||||
* [Airsonic Advanced](https://github.com/airsonic-advanced/airsonic-advanced) (requires you to supply the full endpoint (`yoururl.tld/apis/listenbrainz/1/submit-listens`))
|
||||
* [Funkwhale](https://dev.funkwhale.audio/funkwhale/funkwhale) (use the legacy API `yoururl.tld/apis/audioscrobbler_legacy`)
|
||||
* [mpdscribble](https://github.com/MusicPlayerDaemon/mpdscribble) (use the legacy API `yoururl.tld/apis/audioscrobbler_legacy`)
|
||||
|
||||
I'm thankful for any feedback whether other scrobblers work!
|
||||
|
||||
|
||||
|
||||
### Manual
|
||||
If you're the maintainer of a music player or server and would like to implement native Maloja scrobbling, feel free to reach out!
|
||||
|
||||
If you can't automatically scrobble your music, you can always do it manually on the `/admin_manual` page of your Maloja server.
|
||||
|
||||
|
|
|
@ -6,8 +6,13 @@ chrome.runtime.onMessage.addListener(onInternalMessage);
|
|||
|
||||
tabManagers = {}
|
||||
|
||||
|
||||
const ALWAYS_SCROBBLE_SECONDS = 60*3;
|
||||
// Longer songs are always scrobbled when playing at least 2 minutes
|
||||
|
||||
pages = {
|
||||
"Plex Web":{
|
||||
"plex":{
|
||||
"name":"Plex",
|
||||
"patterns":[
|
||||
"https://app.plex.tv",
|
||||
"http://app.plex.tv",
|
||||
|
@ -16,31 +21,36 @@ pages = {
|
|||
],
|
||||
"script":"plex.js"
|
||||
},
|
||||
"YouTube Music":{
|
||||
"ytmusic":{
|
||||
"name":"YouTube Music",
|
||||
"patterns":[
|
||||
"https://music.youtube.com"
|
||||
],
|
||||
"script":"ytmusic.js"
|
||||
},
|
||||
"Spotify Web":{
|
||||
"spotify":{
|
||||
"name":"Spotify",
|
||||
"patterns":[
|
||||
"https://open.spotify.com"
|
||||
],
|
||||
"script":"spotify.js"
|
||||
},
|
||||
"Bandcamp":{
|
||||
"bandcamp":{
|
||||
"name":"Bandcamp",
|
||||
"patterns":[
|
||||
"bandcamp.com"
|
||||
],
|
||||
"script":"bandcamp.js"
|
||||
},
|
||||
"Soundcloud":{
|
||||
"soundcloud":{
|
||||
"name":"Soundcloud",
|
||||
"patterns":[
|
||||
"https://soundcloud.com"
|
||||
],
|
||||
"script":"soundcloud.js"
|
||||
},
|
||||
"Navidrome":{
|
||||
"navidrome":{
|
||||
"name":"Navidrome",
|
||||
"patterns":[
|
||||
"https://navidrome.",
|
||||
"http://navidrome."
|
||||
|
@ -73,6 +83,13 @@ function onTabUpdated(tabId, changeInfo, tab) {
|
|||
//console.log("Still on same page!")
|
||||
tabManagers[tabId].update();
|
||||
|
||||
// check if the setting for this page is still active
|
||||
chrome.storage.local.get(["service_active_" + page],function(result){
|
||||
if (!result["service_active_" + page]) {
|
||||
delete tabManagers[tabId];
|
||||
}
|
||||
});
|
||||
|
||||
return
|
||||
}
|
||||
}
|
||||
|
@ -86,13 +103,21 @@ function onTabUpdated(tabId, changeInfo, tab) {
|
|||
patterns = pages[key]["patterns"];
|
||||
for (var i=0;i<patterns.length;i++) {
|
||||
if (tab.url.includes(patterns[i])) {
|
||||
console.log("New page on tab " + tabId + " will be handled by new " + key + " manager!");
|
||||
tabManagers[tabId] = new Controller(tabId,key);
|
||||
updateTabNum();
|
||||
return
|
||||
//chrome.tabs.executeScript(tab.id,{"file":"sitescripts/" + pages[key]["script"]})
|
||||
|
||||
// check if we even like that page
|
||||
chrome.storage.local.get(["service_active_" + key],function(result){
|
||||
if (result["service_active_" + key]) {
|
||||
console.log("New page on tab " + tabId + " will be handled by new " + key + " manager!");
|
||||
tabManagers[tabId] = new Controller(tabId,key);
|
||||
updateTabNum();
|
||||
//chrome.tabs.executeScript(tab.id,{"file":"sitescripts/" + pages[key]["script"]})
|
||||
}
|
||||
else {
|
||||
console.log("New page on tab " + tabId + " is " + key + ", not enabled!");
|
||||
}
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -123,10 +148,10 @@ function onInternalMessage(request,sender) {
|
|||
for (tabId in tabManagers) {
|
||||
manager = tabManagers[tabId]
|
||||
if (manager.currentlyPlaying) {
|
||||
answer.push([manager.page,manager.currentArtist,manager.currentTitle]);
|
||||
answer.push([pages[manager.page]['name'],manager.currentArtist,manager.currentTitle]);
|
||||
}
|
||||
else {
|
||||
answer.push([manager.page,null]);
|
||||
answer.push([pages[manager.page]['name'],null]);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -282,7 +307,7 @@ class Controller {
|
|||
|
||||
//ONLY CASE 2: Playback ended
|
||||
if (artist != this.currentArtist || title != this.currentTitle) {
|
||||
if (this.alreadyPlayed > this.currentLength / 2) {
|
||||
if ((this.alreadyPlayed > this.currentLength / 2) || (this.alreadyPlayed > ALWAYS_SCROBBLE_SECONDS)) {
|
||||
scrobble(this.currentArtist,this.currentTitle,this.alreadyPlayed)
|
||||
this.alreadyPlayed = 0
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "Maloja Scrobbler",
|
||||
"version": "1.11",
|
||||
"version": "1.13",
|
||||
"description": "Scrobbles tracks from various sites to your Maloja server",
|
||||
"manifest_version": 2,
|
||||
"permissions": [
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
color:beige;
|
||||
font-family:'Ubuntu';
|
||||
}
|
||||
input {
|
||||
input[type=text] {
|
||||
width:270px;
|
||||
font-family:'Ubuntu';
|
||||
outline:none;
|
||||
|
@ -33,10 +33,14 @@
|
|||
<br /><br />
|
||||
<span id="checkmark_key"></span> <span>API key:</span><br />
|
||||
<input type="text" id="apikey" />
|
||||
<br/><br/>
|
||||
<hr/>
|
||||
<span>Tabs:</span>
|
||||
<list id="playinglist">
|
||||
</list>
|
||||
<hr/>
|
||||
<span>Services:</span>
|
||||
<list id="sitelist">
|
||||
</list>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
@ -1,26 +1,71 @@
|
|||
// duplicate this info for now, don't know if there is a better way than sending messages
|
||||
var pages = {
|
||||
"plex":"Plex",
|
||||
"ytmusic":"YouTube Music",
|
||||
"spotify":"Spotify",
|
||||
"bandcamp":"Bandcamp",
|
||||
"soundcloud":"Soundcloud",
|
||||
"navidrome":"Navidrome"
|
||||
}
|
||||
|
||||
var config_defaults = {
|
||||
serverurl:"http://localhost:42010",
|
||||
apikey:"BlackPinkInYourArea"
|
||||
}
|
||||
|
||||
for (var key in pages) {
|
||||
config_defaults["service_active_" + key] = true;
|
||||
}
|
||||
|
||||
|
||||
document.addEventListener("DOMContentLoaded",function() {
|
||||
|
||||
var sitelist = document.getElementById("sitelist");
|
||||
|
||||
|
||||
for (var identifier in pages) {
|
||||
sitelist.append(document.createElement('br'));
|
||||
var checkbox = document.createElement('input');
|
||||
checkbox.type = "checkbox";
|
||||
checkbox.id = "service_active_" + identifier;
|
||||
var label = document.createElement('label');
|
||||
label.for = checkbox.id;
|
||||
label.textContent = pages[identifier];
|
||||
sitelist.appendChild(checkbox);
|
||||
sitelist.appendChild(label);
|
||||
|
||||
checkbox.addEventListener("change",toggleSite);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
document.getElementById("serverurl").addEventListener("change",checkServer);
|
||||
document.getElementById("apikey").addEventListener("change",checkServer);
|
||||
|
||||
document.getElementById("serverurl").addEventListener("focusout",checkServer);
|
||||
document.getElementById("apikey").addEventListener("focusout",checkServer);
|
||||
|
||||
document.getElementById("serverurl").addEventListener("input",saveConfig);
|
||||
document.getElementById("apikey").addEventListener("input",saveConfig);
|
||||
document.getElementById("serverurl").addEventListener("input",saveServer);
|
||||
document.getElementById("apikey").addEventListener("input",saveServer);
|
||||
|
||||
|
||||
chrome.runtime.onMessage.addListener(onInternalMessage);
|
||||
|
||||
chrome.storage.local.get(config_defaults,function(result){
|
||||
console.log(result);
|
||||
for (var key in result) {
|
||||
document.getElementById(key).value = result[key];
|
||||
|
||||
// booleans
|
||||
if (result[key] == true || result[key] == false) {
|
||||
document.getElementById(key).checked = result[key];
|
||||
}
|
||||
|
||||
// text
|
||||
else{
|
||||
document.getElementById(key).value = result[key];
|
||||
}
|
||||
|
||||
}
|
||||
checkServer();
|
||||
})
|
||||
|
@ -31,6 +76,11 @@ document.addEventListener("DOMContentLoaded",function() {
|
|||
|
||||
});
|
||||
|
||||
function toggleSite(evt) {
|
||||
var element = evt.target;
|
||||
chrome.storage.local.set({ [element.id]: element.checked });
|
||||
}
|
||||
|
||||
|
||||
function onInternalMessage(request,sender) {
|
||||
if (request.type == "response") {
|
||||
|
@ -50,8 +100,8 @@ function onInternalMessage(request,sender) {
|
|||
|
||||
|
||||
|
||||
function saveConfig() {
|
||||
for (var key in config_defaults) {
|
||||
function saveServer() {
|
||||
for (var key of ["serverurl","apikey"]) {
|
||||
var value = document.getElementById(key).value;
|
||||
chrome.storage.local.set({ [key]: value });
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ Node.prototype.xpath = getxpath;
|
|||
|
||||
bar = document.xpath(maloja_scrobbler_selector_playbar, XPathResult.FIRST_ORDERED_NODE_TYPE);
|
||||
if (bar == null) {
|
||||
console.log("Nothing playing right now!");
|
||||
console.log("[Maloja Scrobbler] Nothing playing right now!");
|
||||
chrome.runtime.sendMessage({type:"stopPlayback",time:Math.floor(Date.now()),artist:"",title:""});
|
||||
}
|
||||
else {
|
||||
|
@ -78,12 +78,12 @@ else {
|
|||
label_paused = "Play"
|
||||
}
|
||||
if (control == label_paused) {
|
||||
console.log("Not playing right now");
|
||||
console.log("[Maloja Scrobbler] Not playing right now");
|
||||
chrome.runtime.sendMessage({type:"stopPlayback",time:Math.floor(Date.now()),artist:artist,title:title});
|
||||
//stopPlayback()
|
||||
}
|
||||
else if (control == label_playing) {
|
||||
console.log("Playing " + artist + " - " + title + " (" + durationSeconds + " sec)");
|
||||
console.log("[Maloja Scrobbler] Playing " + artist + " - " + title + " (" + durationSeconds + " sec)");
|
||||
chrome.runtime.sendMessage({type:"startPlayback",time:Math.floor(Date.now()),artist:artist,title:title,duration:durationSeconds});
|
||||
//startPlayback(artist,title,durationSeconds)
|
||||
}
|
||||
|
|
|
@ -21,7 +21,6 @@ dependencies = [
|
|||
"requests"
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
|
||||
[build-system]
|
||||
requires = ["flit_core >=3.2,<4"]
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
#!/usr/bin/with-contenv bash
|
||||
|
||||
if [ "$(s6-setuidgid abc id -u)" = "0" ]; then
|
||||
echo "-------------------------------------"
|
||||
echo "WARN: Running as root! If you meant to do this than this message can be ignored."
|
||||
echo "If you are running this container on a *linux* host and are not using podman rootless you SHOULD"
|
||||
echo "change the ENVs PUID and PGID for this container to ensure correct permissions on your config folder."
|
||||
echo -e "See: https://github.com/krateng/maloja#linux-host\n"
|
||||
echo -e "-------------------------------------\n"
|
||||
fi
|
|
@ -0,0 +1 @@
|
|||
oneshot
|
|
@ -0,0 +1 @@
|
|||
/etc/s6-overlay/s6-rc.d/init-permission-check/run
|
|
@ -0,0 +1,7 @@
|
|||
#!/usr/bin/with-contenv bash
|
||||
|
||||
# used https://github.com/linuxserver/docker-wikijs/blob/master/root/etc/s6-overlay/s6-rc.d/svc-wikijs/run as a template
|
||||
|
||||
echo -e "\nMaloja is starting!"
|
||||
exec \
|
||||
s6-setuidgid abc python -m maloja run
|
|
@ -0,0 +1 @@
|
|||
longrun
|
|
@ -0,0 +1 @@
|
|||
git tag -l '*.0' -n1 --sort=v:refname
|
|
@ -0,0 +1,3 @@
|
|||
minor_release_name: "Yura"
|
||||
'1.0':
|
||||
commit: "1fac2ca965fdbe40c85a88559d5b736f4829e7b0"
|
|
@ -0,0 +1,3 @@
|
|||
minor_release_name: "Solar"
|
||||
'1.1':
|
||||
commit: "5603ca9eb137516e604e9e3e83e273a70ef32f65"
|
|
@ -0,0 +1,3 @@
|
|||
minor_release_name: "Jeonghwa"
|
||||
'1.2':
|
||||
commit: "d46d2be2bf27ef40ddd9f0c077f86dcf0214adbb"
|
|
@ -0,0 +1,3 @@
|
|||
minor_release_name: "IU"
|
||||
'1.3':
|
||||
commit: "0bf1790a7cc0174b84f8c25dade6b221b13d65e9"
|
|
@ -0,0 +1,3 @@
|
|||
minor_release_name: "Chungha"
|
||||
'1.4':
|
||||
commit: "981c0e4ae2ad1bff5a0778b6fa34916b0c4d4f4a"
|
|
@ -0,0 +1,3 @@
|
|||
minor_release_name: "Seulgi"
|
||||
'1.5':
|
||||
commit: "e282789153ec3df133474a56e8d922a73795b72a"
|
|
@ -0,0 +1,5 @@
|
|||
minor_release_name: "Irene"
|
||||
'2.0':
|
||||
commit: "55621ef4efdf61c3092d42565e897dfbaa0244c8"
|
||||
notes:
|
||||
- "[Architecture] Refactored into Python Package"
|
|
@ -0,0 +1,5 @@
|
|||
minor_release_name: "Jennie"
|
||||
2.1.0:
|
||||
commit: "b87379ed986640788201f1ff52826413067e5ffb"
|
||||
2.1.4:
|
||||
commit: "c95ce17451cb19b4775a819f82a532d3a3a6231b"
|
|
@ -0,0 +1,17 @@
|
|||
minor_release_name: "Yeri"
|
||||
2.10.0:
|
||||
commit: "ce9d882856be8f6caca14ab7e5b9f13d6c31940b"
|
||||
2.10.1:
|
||||
commit: "f555ee9d9fc485c6a241f4a8fa88bd68527ed2e2"
|
||||
2.10.2:
|
||||
commit: "9a6617b4b1117a6e53d818aadcda886c831e16db"
|
||||
2.10.3:
|
||||
commit: "5b7d1fd8e9f70a8bf9c5bfbe4aca5b796578e114"
|
||||
2.10.4:
|
||||
commit: "3cf0dd9767fe62702b2f5c4f0267a234338a972b"
|
||||
2.10.5:
|
||||
commit: "034bd064f11ef18ebbfcb25bd8acac8eacce1324"
|
||||
2.10.6:
|
||||
commit: "f62fd254dd44deca50a860f3a966390ae9c3662c"
|
||||
2.10.7:
|
||||
commit: "212fbf368e38281f45a0d8dd63dc051dbd8cd8cf"
|
|
@ -0,0 +1,3 @@
|
|||
minor_release_name: "Akali"
|
||||
2.11.0:
|
||||
commit: "218313f80c160f90b28d99236a062ef62db7260d"
|
|
@ -0,0 +1,41 @@
|
|||
minor_release_name: "Tzuyu"
|
||||
2.12.0:
|
||||
commit: "723efcb8ba12f7bda9acc21d81d9930265881c15"
|
||||
2.12.1:
|
||||
commit: "a42ed56d2de47f88f873737f0f1374e99be895bf"
|
||||
2.12.2:
|
||||
commit: "5006ad2bf1a6d5132ed07d28a7f6f0a9a454d5a7"
|
||||
2.12.3:
|
||||
commit: "2a5d9498d1dd7bb6ac62a27d518a87542ec3f344"
|
||||
2.12.4:
|
||||
commit: "06c32e205e95df8d3d1e27876887b7da7aa2bdf4"
|
||||
2.12.5:
|
||||
commit: "c2f8ecc2dfa1ac4febde228fce150e08fb47be38"
|
||||
2.12.6:
|
||||
commit: "a652a22a96ab693a4e7c3271520e6ad79fa025af"
|
||||
2.12.7:
|
||||
commit: "5455abd0d1c9ecc8e000d04257f721babacb18e9"
|
||||
2.12.8:
|
||||
commit: "8ebd27ab76ad2dcaf1dfef0cc171900fa20d5ee5"
|
||||
2.12.9:
|
||||
commit: "49598e914f4e2d7895959bea954238db8a6cee78"
|
||||
2.12.10:
|
||||
commit: "9037403777faa089092cf103181027febf6f0340"
|
||||
2.12.12:
|
||||
commit: "eaaa0f3b53aa102ba1eb709c1803e94752017d86"
|
||||
2.12.13:
|
||||
commit: "c31770a34c96cecc87af78f861819cc49fe98dda"
|
||||
2.12.14:
|
||||
commit: "5157ce825eea9bf7b74123cb02dd28e25c6a0767"
|
||||
2.12.15:
|
||||
commit: "33af60ed2c8c980f17338827a9cb96e7f2fd2572"
|
||||
2.12.16:
|
||||
commit: "26dfdfb569d0beaf4ba8c6c67a9e2295d1362eed"
|
||||
2.12.17:
|
||||
commit: "21012234409c01fec3cb5c506f0b4ba74b735b0b"
|
||||
2.12.18:
|
||||
commit: "59eaa2264aefa6c9ed5f38e8490f77150bcae27b"
|
||||
2.12.19:
|
||||
commit: "8958eb1b547f07d5d063c46cbe59ec57e000ecae"
|
||||
2.12.20:
|
||||
commit: "7774d9a9361db986092e143e1bc397ce7a7524dd"
|
|
@ -0,0 +1,11 @@
|
|||
minor_release_name: "Aqua"
|
||||
2.13.0:
|
||||
commit: "8555b28fbc9a220577260014b7f71f433263cb9f"
|
||||
2.13.1:
|
||||
commit: "cefed03bc95dd5641b918f79b6ed14b2bfc9898d"
|
||||
2.13.2:
|
||||
commit: "0f5ccd4645ead8d1ad48a532752d401424edb236"
|
||||
2.13.3:
|
||||
commit: "40648b66f36894a297633c650e570ac77555d143"
|
||||
2.13.4:
|
||||
commit: "0ccd39ffd99b19e4cd1b1a14f97bfb4385662eeb"
|
|
@ -0,0 +1,23 @@
|
|||
minor_release_name: "Mina"
|
||||
2.14.0:
|
||||
commit: "1b0e3ffdb2389ae6ca484c78840756d0b7e5c0be"
|
||||
2.14.1:
|
||||
commit: "fb2dff8addc7eaf740c5e30cbcd6791aab882c56"
|
||||
2.14.2:
|
||||
commit: "cd8e0ff90abf7d01761b0576c4168254b9b1f7c1"
|
||||
2.14.3:
|
||||
commit: "f806fb8ed24dd1474b80e7b1a9a7637cdbd35905"
|
||||
2.14.4:
|
||||
commit: "868b8396a0a4ff0f687e651772d746af6d9dfab1"
|
||||
2.14.5:
|
||||
commit: "21d1643988e40a02531bcc708f43925789d854d1"
|
||||
2.14.6:
|
||||
commit: "ccbb3d3a807fd77a1481f9d44f311c7f8df659c7"
|
||||
2.14.7:
|
||||
commit: "634df2ffafdfa00b6caf981108d333e30bf160f8"
|
||||
2.14.8:
|
||||
commit: "ec5723d2b3122faaa5b76c5a1b156c9a915af9d6"
|
||||
2.14.9:
|
||||
commit: "2c73c81434e1a591685a4b1d267a9eb6dbd57174"
|
||||
2.14.10:
|
||||
commit: "e152a2edde836f8fb30427d13eb1e9e0d591a00b"
|
|
@ -0,0 +1,11 @@
|
|||
minor_release_name: "Rosé"
|
||||
2.2.0:
|
||||
commit: "33cea26a791e224625aa9bc523e2cf90e39c8a50"
|
||||
2.2.1:
|
||||
commit: "fbce600c4edd2b530e6673b89513b1a26b068b64"
|
||||
2.2.2:
|
||||
commit: "c518627670f5614a2b9931471337a1a6b2ee344f"
|
||||
2.2.3:
|
||||
commit: "a2cc27ddd46c7cf9959f33478eac396e18f90055"
|
||||
2.2.4:
|
||||
commit: "c6deb1543779ce8b09af6bcbdc35e7668af86010"
|
|
@ -0,0 +1,19 @@
|
|||
minor_release_name: "Nancy"
|
||||
2.3.0:
|
||||
commit: "8793b149f501fe5f3e237d7ae0fcd23c8f4e5e9d"
|
||||
2.3.1:
|
||||
commit: "7c6e2ad60f15d8c4ac85a0808a0abd07549a4a2b"
|
||||
2.3.2:
|
||||
commit: "5a08fd78c69c4047b82ff9c394ea23d25356758e"
|
||||
2.3.3:
|
||||
commit: "9cf1fb3ed83817168dfe2ac30a42dcadb080c043"
|
||||
2.3.4:
|
||||
commit: "eb82282e58259b243958e7590506bd26f8e92db0"
|
||||
2.3.5:
|
||||
commit: "a4f13f6923b7783509462944f1abb235b4a068d0"
|
||||
2.3.6:
|
||||
commit: "b611387011e4cbd274e210d0c21c83d15302281c"
|
||||
2.3.7:
|
||||
commit: "b17060184b6897b18cf8af28a3817c9989aac96f"
|
||||
2.3.8:
|
||||
commit: "afe01c8341acd4cf9f4b84fbba85aab6777fd230"
|
|
@ -0,0 +1,29 @@
|
|||
minor_release_name: "Songhee"
|
||||
2.4.0:
|
||||
commit: "6aa65bf1ce273d9fd36d44f6e24439981b2228a3"
|
||||
2.4.1:
|
||||
commit: "b117e6f7ec80afc6210314ce97bac087d5ab7b54"
|
||||
2.4.2:
|
||||
commit: "d989134e65c20ab33b0ea8e4a132655074057757"
|
||||
2.4.3:
|
||||
commit: "9b787fa3b13d77a9cfbe21061f519defac7fafd0"
|
||||
2.4.4:
|
||||
commit: "948772b1c26070d7814871824b970fb60fc6976d"
|
||||
2.4.5:
|
||||
commit: "2da5ab83b3a410b02af48e70b298069218a7e2a3"
|
||||
2.4.6:
|
||||
commit: "65f9c88da4d56df37e4a3f974d7f660502c7a310"
|
||||
2.4.7:
|
||||
commit: "c166620d5f9706e54f9cd67044d42bf8583575d8"
|
||||
2.4.8:
|
||||
commit: "98c1527f778958b1a3322a4f026cfe2c421388aa"
|
||||
2.4.9:
|
||||
commit: "b21b27bb6e230901281bb524f84e177c937b48fd"
|
||||
2.4.10:
|
||||
commit: "08fe4695f6d5ef09789688481db478d0decbd5df"
|
||||
2.4.11:
|
||||
commit: "5c6a901f5118be54ae44affbd6881b14bc30e04a"
|
||||
2.4.12:
|
||||
commit: "6658165baedeee3939084ba4500de3de06bbc045"
|
||||
2.4.13:
|
||||
commit: "57403a89ab1d679523341d6a607d0b03e495ff35"
|
|
@ -0,0 +1,9 @@
|
|||
minor_release_name: "Seungeun"
|
||||
2.5.0:
|
||||
commit: "990131f546876d1461bac745e5cab3e60c78d038"
|
||||
2.5.1:
|
||||
commit: "0918444ab6ff934ba83393e294a135b1fc25bd0c"
|
||||
2.5.2:
|
||||
commit: "0918444ab6ff934ba83393e294a135b1fc25bd0c"
|
||||
2.5.3:
|
||||
commit: "0918444ab6ff934ba83393e294a135b1fc25bd0c"
|
|
@ -0,0 +1,21 @@
|
|||
minor_release_name: "HyunA"
|
||||
2.6.0:
|
||||
commit: "b161da1c1a1632725a44e998ff0d1872b3d5d184"
|
||||
2.6.1:
|
||||
commit: "1eae55e3bba335d41da0d21dfc383b838d9f0d03"
|
||||
2.6.2:
|
||||
commit: "dd3c83920b668466f2c053434bfd6be93bf32942"
|
||||
2.6.3:
|
||||
commit: "27f3ff6d085f42bdb67385f967db904022339d1d"
|
||||
2.6.4:
|
||||
commit: "5f8e73e6c714e9ca94a66f48d1b72fe516bbb0da"
|
||||
2.6.5:
|
||||
commit: "0fdd7669cced6c2b47f657e510bda03a053ee7ae"
|
||||
2.6.6:
|
||||
commit: "87cdb9987efe08b466f99f9ccb8b808131f9fbcd"
|
||||
2.6.7:
|
||||
commit: "0bdc4654bfb0f42d838e15c3d36dab0b4472db00"
|
||||
2.6.8:
|
||||
commit: "bdfb2a4a0b48362aabda7bb735296d83a02b932d"
|
||||
2.6.9:
|
||||
commit: "cb7a6d224152048176e6187ede6d60625961ab39"
|
|
@ -0,0 +1,25 @@
|
|||
minor_release_name: "Shanshan"
|
||||
2.7.0:
|
||||
commit: "8d7fb9a2c8be3f813ee5994be1818f9f81088faa"
|
||||
2.7.1:
|
||||
commit: "6885fbdeccb8b690fa0af59d8fd341e44803798f"
|
||||
2.7.2:
|
||||
commit: "4113d1761e28a7ee3b3cdabe4404cf3876f1fc84"
|
||||
2.7.3:
|
||||
commit: "1563a15abde175022b50fa085c6b9b19a6021c31"
|
||||
2.7.4:
|
||||
commit: "3e6bcc45d55446c6607664e407768391b47c5421"
|
||||
2.7.5:
|
||||
commit: "fa05c406606e269fb4153465611caeb71c12b486"
|
||||
2.7.6:
|
||||
commit: "47087b4288cbfa6000ca019a000f27ee5846d161"
|
||||
2.7.7:
|
||||
commit: "379ee49f1c61df9720346d3d021dea040587d54d"
|
||||
2.7.8:
|
||||
commit: "75bd823ad0cc24efecd1de193436a28dfaecd4f3"
|
||||
2.7.9:
|
||||
commit: "fb04dd507cee42092b889fe72cdf9975ea48e3b1"
|
||||
2.7.10:
|
||||
commit: "7fc879f77818371721e21c13e9df98796cf632de"
|
||||
2.7.11:
|
||||
commit: "44a2739a3b6e58cb90b7f7dfca2197834cf30464"
|
|
@ -0,0 +1,13 @@
|
|||
minor_release_name: "Haeun"
|
||||
2.8.0:
|
||||
commit: "25661f82af9338a024aae429cdafec7c86692aa5"
|
||||
2.8.1:
|
||||
commit: "1321fcb45ebe0291c9fd47ff2eb8cc329035acf3"
|
||||
2.8.2:
|
||||
commit: "e27a83bdc99a06a207c67c6f0034bc0a554c89af"
|
||||
2.8.3:
|
||||
commit: "6acab324dbd3594dcfbf944bfdfb5c8fe173354b"
|
||||
2.8.4:
|
||||
commit: "f7f1b1225e64b54d8962467182bddcc1de237f51"
|
||||
2.8.5:
|
||||
commit: "1dbc0b7fca05830d654076c74a91b6b74f470d5b"
|
|
@ -0,0 +1,23 @@
|
|||
minor_release_name: "Yaorenmao"
|
||||
2.9.0:
|
||||
commit: "8b4e9609e994d74506fd91471bd5a622b75b2f08"
|
||||
2.9.1:
|
||||
commit: "52a9faae90175841b2c259dd4677697e513e12f9"
|
||||
2.9.2:
|
||||
commit: "5cf7ca2e9bbf66082c4afb76b4033ff17c9cf8c8"
|
||||
2.9.3:
|
||||
commit: "e8c316f1992c3e5f171891272f32d959bb1fa4f0"
|
||||
2.9.4:
|
||||
commit: "e8a87cb8a5e2f63850ff3c02ed5aa8ee388460ed"
|
||||
2.9.5:
|
||||
commit: "09d3f103832bb7e26949a8f2df60c25851886bdc"
|
||||
2.9.6:
|
||||
commit: "9fb352cc6fe2bc41c56304e5ba941035fc1ac82d"
|
||||
2.9.7:
|
||||
commit: "f4a563f080f7dba336034feb1c0c42057f8d8d8c"
|
||||
2.9.8:
|
||||
commit: "2da9f154be240b8648d68a7eb2a3291738cfc93c"
|
||||
2.9.9:
|
||||
commit: "f7861c44b4a44b0cdd34e9f3f62530b8bf2837e3"
|
||||
2.9.10:
|
||||
commit: "22172d8b57df2ad1282f8d835183be45843fdd6a"
|
|
@ -0,0 +1,51 @@
|
|||
minor_release_name: "Yeonhee"
|
||||
3.0.0:
|
||||
commit: "f31c95228eb2dc01e661be928ffd881c063377da"
|
||||
notes:
|
||||
- "[Architecture] Switched to SQLite for main database"
|
||||
- "[Architecture] Switched to SQLite for artwork cache"
|
||||
- "[Feature] Added scrobble deletion from web interface"
|
||||
3.0.1:
|
||||
commit: "700b81217cb585df631d6f069243c56074cd1b71"
|
||||
notes:
|
||||
- "[Bugfix] Fixed upgrading imported scrobbles"
|
||||
3.0.2:
|
||||
commit: "4a8221f7a08f679b21c1fb619f03e5f922a1dc2b"
|
||||
notes:
|
||||
- "[Logging] Cleaned up output for waitress warnings"
|
||||
- "[Bugfix] Fixed exception in native API"
|
||||
3.0.3:
|
||||
commit: "1d9247fc724d7410b6e50d2cbfaa8f375d5e70af"
|
||||
notes:
|
||||
- "[Documentation] Added descriptions for native API endpoints"
|
||||
- "[Code Health] Made arguments for native API scrobbling explicit"
|
||||
- "[Bugfix] Fixed faulty entity type recognition for artists including the string 'artists'"
|
||||
- "[Bugfix] Fixed OS return codes"
|
||||
3.0.4:
|
||||
commit: "206ebd58ea204e0008f2c9bf72d76dd9918fec53"
|
||||
notes:
|
||||
- "[Feature] Enabled dual stack for web server"
|
||||
- "[Feature] Added better feedback to native API endpoints"
|
||||
- "[Bugfix] Fixed native API receiving superfluous keywords"
|
||||
- "[Bugfix] Fixed crash when importing scrobbles with artists with similar names"
|
||||
3.0.5:
|
||||
commit: "fe21894c5ecf3a53c9c5c00453abfc7f41c6a83e"
|
||||
notes:
|
||||
- "[Feature] Added notification system for web interface"
|
||||
- "[Bugfix] Fixed crash when encountering error in Lastfm import"
|
||||
3.0.6:
|
||||
commit: "b3d4cb7a153845d1f5a5eef67a6508754e338f2f"
|
||||
notes:
|
||||
- "[Performance] Implemented search in database"
|
||||
- "[Bugfix] Better parsing of featuring artists"
|
||||
- "[Bugfix] Fixed buffered output in Docker"
|
||||
- "[Bugfix] Fixed importing a Spotify file without path"
|
||||
- "[Bugfix] No longer releasing database lock during scrobble creation"
|
||||
- "[Distribution] Experimental arm64 image"
|
||||
3.0.7:
|
||||
commit: "62abc319303a6cb6463f7c27b6ef09b76fc67f86"
|
||||
notes:
|
||||
- "[Bugix] Improved signal handling"
|
||||
- "[Bugix] Fixed constant re-caching of all-time stats, significantly increasing page load speed"
|
||||
- "[Logging] Disabled cache information when cache is not used"
|
||||
- "[Distribution] Experimental arm/v7 image"
|
|
@ -0,0 +1,46 @@
|
|||
minor_release_name: "Soyeon"
|
||||
3.1.0:
|
||||
commit: "bfa553bed05d7dba33f611a44485d6cf460ba308"
|
||||
notes:
|
||||
- "[Architecture] Cleaned up legacy process control"
|
||||
- "[Architecture] Added proper exception framework to native API"
|
||||
- "[Feature] Implemented track title and artist name editing from web interface"
|
||||
- "[Feature] Implemented track and artist merging from web interface"
|
||||
- "[Feature] Implemented scrobble reparsing from web interface"
|
||||
- "[Performance] Adjusted cache sizes"
|
||||
- "[Logging] Added cache memory use information"
|
||||
- "[Technical] Bumped Python Version and various dependencies"
|
||||
3.1.1:
|
||||
commit: "20aae955b2263be07c56bafe4794f622117116ef"
|
||||
notes:
|
||||
- "[Bugfix] Fixed inclusion of custom css files"
|
||||
- "[Bugfix] Fixed list values in configuration"
|
||||
3.1.2:
|
||||
commit: "a0739306013cd9661f028fb5b2620cfa2d298aa4"
|
||||
notes:
|
||||
- "[Feature] Added remix artist parsing"
|
||||
- "[Feature] Added API debug mode"
|
||||
- "[Bugfix] Fixed leftover whitespaces when parsing titles"
|
||||
- "[Bugfix] Fixed handling of fallthrough values in config file"
|
||||
3.1.3:
|
||||
commit: "f3a04c79b1c37597cdf3cafcd95e3c923cd6a53f"
|
||||
notes:
|
||||
- "[Bugfix] Fixed infinite recursion with capitalized featuring delimiters"
|
||||
- "[Bugfix] Fixed favicon display"
|
||||
3.1.4:
|
||||
commit: "ef06f2262205c903e7c3060e2d2d52397f8ffc9d"
|
||||
notes:
|
||||
- "[Feature] Expanded information saved from Listenbrainz API"
|
||||
- "[Feature] Added import for Listenbrainz exports"
|
||||
- "[Bugfix] Sanitized artists and tracks with html-like structure"
|
||||
3.1.5:
|
||||
commit: "4330b0294bc0a01cdb841e2e3db370108da901db"
|
||||
notes:
|
||||
- "[Feature] Made image upload part of regular API"
|
||||
- "[Bugfix] Additional entity name sanitization"
|
||||
- "[Bugfix] Fixed image display on Safari"
|
||||
- "[Bugfix] Fixed entity editing on Firefox"
|
||||
- "[Bugfix] Made compatibile with SQLAlchemy 2.0"
|
||||
upcoming:
|
||||
notes:
|
||||
- "[Bugfix] Fixed configuration of time format"
|
|
@ -0,0 +1,2 @@
|
|||
docker build -t maloja . -f Containerfile
|
||||
docker run --rm -p 42010:42010 -v $PWD/testdata:/mlj -e MALOJA_DATA_DIRECTORY=/mlj maloja
|
|
@ -0,0 +1,2 @@
|
|||
podman build -t maloja . -f Containerfile
|
||||
podman run --rm -p 42010:42010 -v $PWD/testdata:/mlj -e MALOJA_DATA_DIRECTORY=/mlj maloja
|
|
@ -0,0 +1,36 @@
|
|||
# Contributor: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
# Maintainer: Johannes Krattenmacher <maloja@dev.krateng.ch>
|
||||
pkgname={{ tool.flit.module.name }}
|
||||
pkgver={{ project.version }}
|
||||
pkgrel=0
|
||||
pkgdesc="{{ project.description }}"
|
||||
url="{{ project.urls.homepage }}"
|
||||
arch="noarch"
|
||||
license="GPL-3.0"
|
||||
depends="{{ tool.osreqs.alpine.run | join(' ') }}"
|
||||
pkgusers=$pkgname
|
||||
pkggroups=$pkgname
|
||||
depends_dev="{{ tool.osreqs.alpine.build | join(' ') }}"
|
||||
makedepends="$depends_dev"
|
||||
source="
|
||||
$pkgname-$pkgver.tar.gz::{{ project.urls.repository }}/archive/refs/tags/v$pkgver.tar.gz
|
||||
"
|
||||
builddir="$srcdir"/$pkgname-$pkgver
|
||||
|
||||
|
||||
|
||||
build() {
|
||||
cd $builddir
|
||||
python3 -m build .
|
||||
pip3 install dist/*.tar.gz
|
||||
}
|
||||
|
||||
package() {
|
||||
mkdir -p /etc/$pkgname || return 1
|
||||
mkdir -p /var/lib/$pkgname || return 1
|
||||
mkdir -p /var/cache/$pkgname || return 1
|
||||
mkdir -p /var/logs/$pkgname || return 1
|
||||
}
|
||||
|
||||
# TODO
|
||||
sha512sums="a674eaaaa248fc2b315514d79f9a7a0bac6aa1582fe29554d9176e8b551e8aa3aa75abeebdd7713e9e98cc987e7bd57dc7a5e9a2fb85af98b9c18cb54de47bf7 $pkgname-${pkgver}.tar.gz"
|
|
@ -0,0 +1,40 @@
|
|||
FROM alpine:3.15
|
||||
# Python image includes two Python versions, so use base Alpine
|
||||
|
||||
# Based on the work of Jonathan Boeckel <jonathanboeckel1996@gmail.com>
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Install run dependencies first
|
||||
RUN apk add --no-cache {{ tool.osreqs.alpine.run | join(' ') }}
|
||||
|
||||
# system pip could be removed after build, but apk then decides to also remove all its
|
||||
# python dependencies, even if they are explicitly installed as python packages
|
||||
# whut
|
||||
RUN \
|
||||
apk add py3-pip && \
|
||||
pip install wheel
|
||||
|
||||
|
||||
COPY ./requirements.txt ./requirements.txt
|
||||
|
||||
RUN \
|
||||
apk add --no-cache --virtual .build-deps {{ tool.osreqs.alpine.build | join(' ') }} && \
|
||||
pip install --no-cache-dir -r requirements.txt && \
|
||||
apk del .build-deps
|
||||
|
||||
|
||||
# no chance for caching below here
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN pip install /usr/src/app
|
||||
|
||||
# Docker-specific configuration
|
||||
# defaulting to IPv4 is no longer necessary (default host is dual stack)
|
||||
ENV MALOJA_SKIP_SETUP=yes
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
EXPOSE 42010
|
||||
# use exec form for better signal handling https://docs.docker.com/engine/reference/builder/#entrypoint
|
||||
ENTRYPOINT ["maloja", "run"]
|
|
@ -0,0 +1,4 @@
|
|||
{% include 'install/install_dependencies_alpine.sh.jinja' %}
|
||||
apk add py3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
|
@ -0,0 +1,4 @@
|
|||
{% include 'install/install_dependencies_debian.sh.jinja' %}
|
||||
apt install python3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env sh
|
||||
apk update
|
||||
apk add \
|
||||
{{ (tool.osreqs.alpine.build + tool.osreqs.alpine.run + tool.osreqs.alpine.opt) | join(' \\\n\t') }}
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env sh
|
||||
apt update
|
||||
apt install \
|
||||
{{ (tool.osreqs.debian.build + tool.osreqs.debian.run + tool.osreqs.debian.opt) | join(' \\\n\t') }}
|
|
@ -0,0 +1,3 @@
|
|||
{% for dep in project.dependencies -%}
|
||||
{{ dep }}
|
||||
{% endfor %}
|
|
@ -0,0 +1,3 @@
|
|||
{% for dep in project['optional-dependencies'].full -%}
|
||||
{{ dep }}
|
||||
{% endfor %}
|
|
@ -249,7 +249,7 @@
|
|||
],
|
||||
"body": {
|
||||
"mode": "raw",
|
||||
"raw": "{\n \"key\": \"{{api_key}}\",\n \"artist\": \"{{data.artist1}}\",\n \"title\": \"{{data.artist2}}\"\n}"
|
||||
"raw": "{\n \"key\": \"{{api_key}}\",\n \"artist\": \"{{data.artist1}}\",\n \"title\": \"{{data.title1}}\"\n}"
|
||||
},
|
||||
"url": {
|
||||
"raw": "{{url}}/api/newscrobble",
|
||||
|
@ -904,4 +904,4 @@
|
|||
"value": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
import threading
|
||||
import subprocess
|
||||
import time
|
||||
import requests
|
||||
import os
|
||||
|
||||
ACTIVE = True
|
||||
|
||||
build_cmd = ["docker","build","-t","maloja",".","-f","Containerfile"]
|
||||
subprocess.run(build_cmd)
|
||||
|
||||
common_prc = (
|
||||
["docker","run","--rm","-v",f"{os.path.abspath('./testdata')}:/mlj","-e","MALOJA_DATA_DIRECTORY=/mlj"],
|
||||
["maloja"]
|
||||
)
|
||||
|
||||
servers = [
|
||||
{'port': 42010},
|
||||
{'port': 42011, 'extraargs':["--memory=1g"]},
|
||||
{'port': 42012, 'extraargs':["--memory=500m"]}
|
||||
]
|
||||
for s in servers:
|
||||
cmd = common_prc[0] + ["-p",f"{s['port']}:42010"] + s.get('extraargs',[]) + common_prc[1]
|
||||
print(cmd)
|
||||
t = threading.Thread(target=subprocess.run,args=(cmd,))
|
||||
s['thread'] = t
|
||||
t.daemon = True
|
||||
t.start()
|
||||
time.sleep(5)
|
||||
|
||||
time.sleep(5)
|
||||
while ACTIVE:
|
||||
time.sleep(1)
|
||||
try:
|
||||
for s in servers:
|
||||
requests.get(f"http://localhost:{s['port']}")
|
||||
except KeyboardInterrupt:
|
||||
ACTIVE = False
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
for s in servers:
|
||||
s['thread'].join()
|
|
@ -0,0 +1,33 @@
|
|||
import toml
|
||||
import os
|
||||
import jinja2
|
||||
|
||||
env = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader('dev/templates'),
|
||||
autoescape=jinja2.select_autoescape(['html', 'xml']),
|
||||
keep_trailing_newline=True
|
||||
)
|
||||
|
||||
with open("pyproject.toml") as filed:
|
||||
data = toml.load(filed)
|
||||
|
||||
templatedir = "./dev/templates"
|
||||
|
||||
for root,dirs,files in os.walk(templatedir):
|
||||
|
||||
reldirpath = os.path.relpath(root,start=templatedir)
|
||||
for f in files:
|
||||
|
||||
relfilepath = os.path.join(reldirpath,f)
|
||||
|
||||
if not f.endswith('.jinja'): continue
|
||||
|
||||
srcfile = os.path.join(root,f)
|
||||
trgfile = os.path.join(reldirpath,f.replace(".jinja",""))
|
||||
|
||||
|
||||
template = env.get_template(relfilepath)
|
||||
result = template.render(**data)
|
||||
|
||||
with open(trgfile,"w") as filed:
|
||||
filed.write(result)
|
|
@ -0,0 +1,53 @@
|
|||
import os
|
||||
import subprocess as sp
|
||||
import yaml
|
||||
|
||||
FOLDER = "dev/releases"
|
||||
|
||||
releases = {}
|
||||
for f in os.listdir(FOLDER):
|
||||
if f == "branch.yml": continue
|
||||
#maj,min = (int(i) for i in f.split('.')[:2])
|
||||
|
||||
with open(os.path.join(FOLDER,f)) as fd:
|
||||
data = yaml.safe_load(fd)
|
||||
|
||||
name = data.pop('minor_release_name')
|
||||
|
||||
for tag in data:
|
||||
tagtup = tuple(int(i) for i in tag.split('.'))
|
||||
releases[tagtup] = data[tag]
|
||||
|
||||
# this is a bit dirty, works on our data
|
||||
if len(tagtup)<3 or tagtup[2] == 0: releases[tagtup]['name'] = name
|
||||
|
||||
|
||||
for version in releases:
|
||||
|
||||
info = releases[version]
|
||||
version = '.'.join(str(v) for v in version)
|
||||
msg = [
|
||||
f"Version {version}" + (f" '{info.get('name')}'" if info.get('name') else ''),
|
||||
*([""] if info.get('notes') else []),
|
||||
*[f"* {n}" for n in info.get('notes',[])]
|
||||
]
|
||||
|
||||
|
||||
cmd = [
|
||||
'git','tag','--force',
|
||||
'-a',f'v{version}',
|
||||
'-m',
|
||||
'\n'.join(msg),
|
||||
info['commit']
|
||||
]
|
||||
|
||||
try:
|
||||
prev_tag = sp.check_output(["git","show",f'v{maj}.{min}.{hot}']).decode()
|
||||
prev_tag_commit = prev_tag.split('\n')[6].split(" ")[1]
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
assert prev_tag_commit == info['commit']
|
||||
|
||||
print(cmd)
|
||||
sp.run(cmd)
|
|
@ -0,0 +1,20 @@
|
|||
services:
|
||||
maloja:
|
||||
# from dockerhub
|
||||
image: "krateng/maloja:latest"
|
||||
# or built locally
|
||||
#build:
|
||||
# context: .
|
||||
# dockerfile: ./Containerfile
|
||||
ports:
|
||||
- "42010:42010"
|
||||
# different directories for configuration, state and logs
|
||||
volumes:
|
||||
- "$PWD/config:/etc/maloja"
|
||||
- "$PWD/data:/var/lib/maloja"
|
||||
- "$PWD/logs:/var/log/maloja"
|
||||
#you can also have everything together instead:
|
||||
#volumes:
|
||||
#- "$PWD/data:/data"
|
||||
#environment:
|
||||
#- "MALOJA_DATA_DIRECTORY=/data"
|
|
@ -1 +0,0 @@
|
|||
python3
|
|
@ -1,8 +0,0 @@
|
|||
gcc
|
||||
python3-dev
|
||||
libxml2-dev
|
||||
libxslt-dev
|
||||
libffi-dev
|
||||
libc-dev
|
||||
py3-pip
|
||||
linux-headers
|
|
@ -1 +0,0 @@
|
|||
tzdata
|
|
@ -1 +0,0 @@
|
|||
vips
|
|
@ -0,0 +1,20 @@
|
|||
#!/usr/bin/env sh
|
||||
apk update
|
||||
apk add \
|
||||
gcc \
|
||||
g++ \
|
||||
python3-dev \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
libffi-dev \
|
||||
libc-dev \
|
||||
py3-pip \
|
||||
linux-headers \
|
||||
python3 \
|
||||
py3-lxml \
|
||||
tzdata \
|
||||
vips
|
||||
|
||||
apk add py3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
|
@ -0,0 +1,9 @@
|
|||
#!/usr/bin/env sh
|
||||
apt update
|
||||
apt install \
|
||||
python3-pip \
|
||||
python3
|
||||
|
||||
apt install python3-pip
|
||||
pip install wheel
|
||||
pip install malojaserver
|
|
@ -1,4 +0,0 @@
|
|||
sed 's/#.*//' ./install/dependencies_basic.txt | xargs apk add
|
||||
sed 's/#.*//' ./install/dependencies_build.txt | xargs apk add
|
||||
sed 's/#.*//' ./install/dependencies_run.txt | xargs apk add
|
||||
sed 's/#.*//' ./install/dependencies_run_opt.txt | xargs apk add
|
|
@ -1,15 +1,16 @@
|
|||
#!/usr/bin/env bash
|
||||
#!/usr/bin/env sh
|
||||
apk update
|
||||
apk add \
|
||||
python3 \
|
||||
python3-dev \
|
||||
gcc \
|
||||
g++ \
|
||||
python3-dev \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
libffi-dev \
|
||||
libc-dev \
|
||||
py3-pip \
|
||||
linux-headers \
|
||||
python3 \
|
||||
py3-lxml \
|
||||
tzdata \
|
||||
vips
|
||||
pip3 install wheel
|
||||
pip3 install malojaserver
|
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env sh
|
||||
apt update
|
||||
apt install \
|
||||
python3-pip \
|
||||
python3
|
|
@ -1,4 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
apt update
|
||||
apt install python3 python3-pip
|
||||
pip3 install malojaserver
|
|
@ -1,4 +1,4 @@
|
|||
# monkey patching
|
||||
from . import monkey
|
||||
from .pkg_global import monkey
|
||||
# configuration before all else
|
||||
from . import globalconf
|
||||
from .pkg_global import conf
|
||||
|
|
|
@ -1,4 +1,184 @@
|
|||
# make the package itself runnable with python -m maloja
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from .proccontrol.control import main
|
||||
main()
|
||||
from setproctitle import setproctitle
|
||||
from ipaddress import ip_address
|
||||
|
||||
from doreah.control import mainfunction
|
||||
from doreah.io import col
|
||||
from doreah.logging import log
|
||||
|
||||
from . import __pkginfo__ as pkginfo
|
||||
from .pkg_global import conf
|
||||
from .proccontrol import tasks
|
||||
from .setup import setup
|
||||
from .dev import generate, apidebug
|
||||
|
||||
|
||||
|
||||
def print_header_info():
|
||||
print()
|
||||
#print("#####")
|
||||
print(col['yellow']("Maloja"),f"v{pkginfo.VERSION}")
|
||||
print(pkginfo.HOMEPAGE)
|
||||
#print("#####")
|
||||
print()
|
||||
|
||||
|
||||
|
||||
def get_instance():
|
||||
try:
|
||||
return int(subprocess.check_output(["pidof","maloja"]))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_instance_supervisor():
|
||||
try:
|
||||
return int(subprocess.check_output(["pidof","maloja_supervisor"]))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def restart():
|
||||
if stop():
|
||||
start()
|
||||
else:
|
||||
print(col["red"]("Could not stop Maloja!"))
|
||||
|
||||
def start():
|
||||
if get_instance_supervisor() is not None:
|
||||
print("Maloja is already running.")
|
||||
else:
|
||||
print_header_info()
|
||||
setup()
|
||||
try:
|
||||
#p = subprocess.Popen(["python3","-m","maloja.server"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
|
||||
sp = subprocess.Popen(["python3","-m","maloja","supervisor"],stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
|
||||
print(col["green"]("Maloja started!"))
|
||||
|
||||
port = conf.malojaconfig["PORT"]
|
||||
|
||||
print("Visit your server address (Port " + str(port) + ") to see your web interface. Visit /admin_setup to get started.")
|
||||
print("If you're installing this on your local machine, these links should get you there:")
|
||||
print("\t" + col["blue"]("http://localhost:" + str(port)))
|
||||
print("\t" + col["blue"]("http://localhost:" + str(port) + "/admin_setup"))
|
||||
return True
|
||||
except Exception:
|
||||
print("Error while starting Maloja.")
|
||||
return False
|
||||
|
||||
|
||||
def stop():
|
||||
|
||||
for attempt in [(signal.SIGTERM,2),(signal.SIGTERM,5),(signal.SIGKILL,3),(signal.SIGKILL,5)]:
|
||||
|
||||
pid_sv = get_instance_supervisor()
|
||||
pid = get_instance()
|
||||
|
||||
if pid is None and pid_sv is None:
|
||||
print("Maloja stopped!")
|
||||
return True
|
||||
|
||||
if pid_sv is not None:
|
||||
os.kill(pid_sv,attempt[0])
|
||||
if pid is not None:
|
||||
os.kill(pid,attempt[0])
|
||||
|
||||
time.sleep(attempt[1])
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
print("Maloja stopped!")
|
||||
return True
|
||||
|
||||
def onlysetup():
|
||||
print_header_info()
|
||||
setup()
|
||||
print("Setup complete!")
|
||||
|
||||
def run_server():
|
||||
print_header_info()
|
||||
setup()
|
||||
setproctitle("maloja")
|
||||
from . import server
|
||||
server.run_server()
|
||||
|
||||
def run_supervisor():
|
||||
setproctitle("maloja_supervisor")
|
||||
while True:
|
||||
log("Maloja is not running, starting...",module="supervisor")
|
||||
try:
|
||||
process = subprocess.Popen(
|
||||
["python3", "-m", "maloja","run"],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
except Exception as e:
|
||||
log("Error starting Maloja: " + str(e),module="supervisor")
|
||||
else:
|
||||
try:
|
||||
process.wait()
|
||||
except Exception as e:
|
||||
log("Maloja crashed: " + str(e),module="supervisor")
|
||||
|
||||
def debug():
|
||||
os.environ["MALOJA_DEV_MODE"] = 'true'
|
||||
conf.malojaconfig.load_environment()
|
||||
direct()
|
||||
|
||||
def print_info():
|
||||
print_header_info()
|
||||
print(col['lightblue']("Configuration Directory:"),conf.dir_settings['config'])
|
||||
print(col['lightblue']("Data Directory: "),conf.dir_settings['state'])
|
||||
print(col['lightblue']("Log Directory: "),conf.dir_settings['logs'])
|
||||
print(col['lightblue']("Network: "),f"Dual Stack, Port {conf.malojaconfig['port']}" if conf.malojaconfig['host'] == "*" else f"IPv{ip_address(conf.malojaconfig['host']).version}, Port {conf.malojaconfig['port']}")
|
||||
print(col['lightblue']("Timezone: "),f"UTC{conf.malojaconfig['timezone']:+d}")
|
||||
print()
|
||||
try:
|
||||
import pkg_resources
|
||||
for pkg in ("sqlalchemy","waitress","bottle","doreah","jinja2"):
|
||||
print(col['cyan'] (f"{pkg}:".ljust(13)),pkg_resources.get_distribution(pkg).version)
|
||||
except ImportError:
|
||||
print("Could not determine dependency versions.")
|
||||
print()
|
||||
|
||||
@mainfunction({"l":"level","v":"version","V":"version"},flags=['version','include_images'],shield=True)
|
||||
def main(*args,**kwargs):
|
||||
|
||||
actions = {
|
||||
# server
|
||||
"start":start,
|
||||
"restart":restart,
|
||||
"stop":stop,
|
||||
"run":run_server,
|
||||
"supervisor":run_supervisor,
|
||||
"debug":debug,
|
||||
"setup":onlysetup,
|
||||
# admin scripts
|
||||
"import":tasks.import_scrobbles, # maloja import /x/y.csv
|
||||
"backup":tasks.backup, # maloja backup --targetfolder /x/y --include_images
|
||||
"generate":generate.generate_scrobbles, # maloja generate 400
|
||||
"export":tasks.export, # maloja export
|
||||
"apidebug":apidebug.run, # maloja apidebug
|
||||
# aux
|
||||
"info":print_info
|
||||
}
|
||||
|
||||
if "version" in kwargs:
|
||||
print(info.VERSION)
|
||||
return True
|
||||
else:
|
||||
try:
|
||||
action, *args = args
|
||||
action = actions[action]
|
||||
except (ValueError, KeyError):
|
||||
print("Valid commands: " + " ".join(a for a in actions))
|
||||
return False
|
||||
|
||||
return action(*args,**kwargs)
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
# you know what f*ck it
|
||||
# this is hardcoded for now because of that damn project / package name discrepancy
|
||||
# i'll fix it one day
|
||||
VERSION = "2.14.6"
|
||||
VERSION = "3.1.5"
|
||||
HOMEPAGE = "https://github.com/krateng/maloja"
|
||||
|
||||
|
||||
|
|
|
@ -1,22 +1,28 @@
|
|||
from . import native_v1
|
||||
from .audioscrobbler import Audioscrobbler
|
||||
from .audioscrobbler_legacy import AudioscrobblerLegacy
|
||||
from .listenbrainz import Listenbrainz
|
||||
from ._apikeys import apikeystore
|
||||
|
||||
|
||||
import copy
|
||||
from bottle import redirect, request, response
|
||||
from urllib.parse import urlencode
|
||||
|
||||
native_apis = [
|
||||
native_v1.api
|
||||
]
|
||||
standardized_apis = [
|
||||
Listenbrainz(),
|
||||
Audioscrobbler(),
|
||||
AudioscrobblerLegacy()
|
||||
]
|
||||
|
||||
|
||||
def init_apis(server):
|
||||
|
||||
from . import native_v1
|
||||
from .audioscrobbler import Audioscrobbler
|
||||
from .audioscrobbler_legacy import AudioscrobblerLegacy
|
||||
from .listenbrainz import Listenbrainz
|
||||
|
||||
native_apis = [
|
||||
native_v1.api
|
||||
]
|
||||
standardized_apis = [
|
||||
Listenbrainz(),
|
||||
Audioscrobbler(),
|
||||
AudioscrobblerLegacy()
|
||||
]
|
||||
|
||||
for api in native_apis:
|
||||
api.mount(server=server,path="apis/"+api.__apipath__)
|
||||
|
||||
|
@ -41,9 +47,12 @@ def init_apis(server):
|
|||
server.get(altpath_empty_cl)(alias_api)
|
||||
server.post(altpath_empty_cl)(alias_api)
|
||||
|
||||
def invalid_api(pth):
|
||||
def invalid_api(pth=''):
|
||||
response.status = 404
|
||||
return {"error":"Invalid API"}
|
||||
|
||||
server.get("/apis/<pth:path>")(invalid_api)
|
||||
server.post("/apis/<pth:path>")(invalid_api)
|
||||
|
||||
server.get("/apis")(invalid_api)
|
||||
server.post("/apis")(invalid_api)
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
### API KEYS
|
||||
### symmetric keys are fine since we hopefully use HTTPS
|
||||
|
||||
from doreah.keystore import KeyStore
|
||||
from doreah.logging import log
|
||||
|
||||
from ..pkg_global.conf import data_dir
|
||||
|
||||
apikeystore = KeyStore(file=data_dir['clients']("apikeys.yml"),save_endpoint="/apis/mlj_1/apikeys")
|
||||
|
||||
|
||||
from .. import upgrade
|
||||
upgrade.upgrade_apikeys()
|
||||
|
||||
|
||||
# skip regular authentication if api key is present in request
|
||||
# an api key now ONLY permits scrobbling tracks, no other admin tasks
|
||||
def api_key_correct(request,args,kwargs):
|
||||
if "key" in kwargs:
|
||||
apikey = kwargs.pop("key")
|
||||
elif "apikey" in kwargs:
|
||||
apikey = kwargs.pop("apikey")
|
||||
else: return False
|
||||
|
||||
client = apikeystore.check_and_identify_key(apikey)
|
||||
if client:
|
||||
return {'client':client}
|
||||
else:
|
||||
return False
|
|
@ -58,11 +58,11 @@ class APIHandler:
|
|||
|
||||
|
||||
def wrapper(self,path:Multi=[],**keys):
|
||||
log("API request: " + str(path))# + " | Keys: " + str({k:keys.get(k) for k in keys}))
|
||||
log(f"{self.__apiname__} API request: {path}")# + " | Keys: " + str({k:keys.get(k) for k in keys}))
|
||||
|
||||
try:
|
||||
response.status,result = self.handle(path,keys)
|
||||
except:
|
||||
except Exception:
|
||||
exceptiontype = sys.exc_info()[0]
|
||||
if exceptiontype in self.errors:
|
||||
response.status,result = self.errors[exceptiontype]
|
||||
|
@ -82,20 +82,17 @@ class APIHandler:
|
|||
try:
|
||||
methodname = self.get_method(path,keys)
|
||||
method = self.methods[methodname]
|
||||
except:
|
||||
except Exception:
|
||||
log("Could not find a handler for method " + str(methodname) + " in API " + self.__apiname__,module="debug")
|
||||
log("Keys: " + str(keys),module="debug")
|
||||
raise InvalidMethodException()
|
||||
return method(path,keys)
|
||||
|
||||
|
||||
def scrobble(self,artiststr,titlestr,time=None,duration=None,album=None):
|
||||
logmsg = "Incoming scrobble (API: {api}): ARTISTS: {artiststr}, TRACK: {titlestr}"
|
||||
log(logmsg.format(api=self.__apiname__,artiststr=artiststr,titlestr=titlestr))
|
||||
if time is None: time = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
|
||||
def scrobble(self,rawscrobble,client=None):
|
||||
|
||||
# fixing etc is handled by the main scrobble function
|
||||
try:
|
||||
(artists,title) = cla.fullclean(artiststr,titlestr)
|
||||
database.createScrobble(artists,title,time)
|
||||
database.sync()
|
||||
except:
|
||||
return database.incoming_scrobble(rawscrobble,api=self.__apiname__,client=client)
|
||||
except Exception:
|
||||
raise ScrobblingException()
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from ._base import APIHandler
|
||||
from ._exceptions import *
|
||||
from .. import database
|
||||
from ._apikeys import apikeystore
|
||||
|
||||
class Audioscrobbler(APIHandler):
|
||||
__apiname__ = "Audioscrobbler"
|
||||
|
@ -14,7 +15,7 @@ class Audioscrobbler(APIHandler):
|
|||
def init(self):
|
||||
|
||||
# no need to save these on disk, clients can always request a new session
|
||||
self.mobile_sessions = []
|
||||
self.mobile_sessions = {}
|
||||
self.methods = {
|
||||
"auth.getMobileSession":self.authmobile,
|
||||
"track.scrobble":self.submit_scrobble
|
||||
|
@ -30,39 +31,55 @@ class Audioscrobbler(APIHandler):
|
|||
def get_method(self,pathnodes,keys):
|
||||
return keys.get("method")
|
||||
|
||||
def generate_key(self,client):
|
||||
key = "".join(
|
||||
str(
|
||||
random.choice(
|
||||
list(range(10)) + list("abcdefghijklmnopqrstuvwxyz") +
|
||||
list("ABCDEFGHIJKLMNOPQRSTUVWXYZ"))) for _ in range(64))
|
||||
|
||||
self.mobile_sessions[key] = client
|
||||
return key
|
||||
|
||||
def authmobile(self,pathnodes,keys):
|
||||
token = keys.get("authToken")
|
||||
user = keys.get("username")
|
||||
password = keys.get("password")
|
||||
# either username and password
|
||||
if user is not None and password is not None:
|
||||
if password in database.allAPIkeys():
|
||||
sessionkey = generate_key(self.mobile_sessions)
|
||||
client = apikeystore.check_and_identify_key(password)
|
||||
if client:
|
||||
sessionkey = self.generate_key(client)
|
||||
return 200,{"session":{"key":sessionkey}}
|
||||
else:
|
||||
raise InvalidAuthException()
|
||||
# or username and token (deprecated by lastfm)
|
||||
elif user is not None and token is not None:
|
||||
for key in database.allAPIkeys():
|
||||
for client in apikeystore:
|
||||
key = apikeystore[client]
|
||||
if md5(user + md5(key)) == token:
|
||||
sessionkey = generate_key(self.mobile_sessions)
|
||||
sessionkey = self.generate_key(client)
|
||||
return 200,{"session":{"key":sessionkey}}
|
||||
raise InvalidAuthException()
|
||||
else:
|
||||
raise BadAuthException()
|
||||
|
||||
def submit_scrobble(self,pathnodes,keys):
|
||||
if keys.get("sk") is None or keys.get("sk") not in self.mobile_sessions:
|
||||
key = keys.get("sk")
|
||||
if key is None:
|
||||
raise InvalidSessionKey()
|
||||
client = self.mobile_sessions.get(key)
|
||||
if not client:
|
||||
raise InvalidSessionKey()
|
||||
if "track" in keys and "artist" in keys:
|
||||
artiststr,titlestr = keys["artist"], keys["track"]
|
||||
#(artists,title) = cla.fullclean(artiststr,titlestr)
|
||||
try:
|
||||
timestamp = int(keys["timestamp"])
|
||||
except:
|
||||
except Exception:
|
||||
timestamp = None
|
||||
#database.createScrobble(artists,title,timestamp)
|
||||
self.scrobble(artiststr,titlestr,time=timestamp)
|
||||
self.scrobble({'track_artists':[artiststr],'track_title':titlestr,'scrobble_time':timestamp},client=client)
|
||||
else:
|
||||
for num in range(50):
|
||||
if "track[" + str(num) + "]" in keys:
|
||||
|
@ -82,13 +99,3 @@ def md5(input):
|
|||
m = hashlib.md5()
|
||||
m.update(bytes(input,encoding="utf-8"))
|
||||
return m.hexdigest()
|
||||
|
||||
def generate_key(ls):
|
||||
key = "".join(
|
||||
str(
|
||||
random.choice(
|
||||
list(range(10)) + list("abcdefghijklmnopqrstuvwxyz") +
|
||||
list("ABCDEFGHIJKLMNOPQRSTUVWXYZ"))) for _ in range(64))
|
||||
|
||||
ls.append(key)
|
||||
return key
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from ._base import APIHandler
|
||||
from ._exceptions import *
|
||||
from .. import database
|
||||
from ._apikeys import apikeystore
|
||||
|
||||
from bottle import request
|
||||
|
||||
|
@ -15,7 +16,7 @@ class AudioscrobblerLegacy(APIHandler):
|
|||
def init(self):
|
||||
|
||||
# no need to save these on disk, clients can always request a new session
|
||||
self.mobile_sessions = []
|
||||
self.mobile_sessions = {}
|
||||
self.methods = {
|
||||
"handshake":self.handshake,
|
||||
"nowplaying":self.now_playing,
|
||||
|
@ -41,9 +42,10 @@ class AudioscrobblerLegacy(APIHandler):
|
|||
protocol = 'http' if (keys.get("u") == 'nossl') else request.urlparts.scheme
|
||||
|
||||
if auth is not None:
|
||||
for key in database.allAPIkeys():
|
||||
if check_token(auth, key, timestamp):
|
||||
sessionkey = generate_key(self.mobile_sessions)
|
||||
for client in apikeystore:
|
||||
key = apikeystore[client]
|
||||
if self.check_token(auth,key,timestamp):
|
||||
sessionkey = self.generate_key(client)
|
||||
return 200, (
|
||||
"OK\n"
|
||||
f"{sessionkey}\n"
|
||||
|
@ -65,10 +67,14 @@ class AudioscrobblerLegacy(APIHandler):
|
|||
return 200,"OK\n"
|
||||
|
||||
def submit_scrobble(self,pathnodes,keys):
|
||||
if keys.get("s") is None or keys.get("s") not in self.mobile_sessions:
|
||||
key = keys.get("s")
|
||||
if key is None or key not in self.mobile_sessions:
|
||||
raise InvalidSessionKey()
|
||||
client = self.mobile_sessions.get(key)
|
||||
for count in range(50):
|
||||
artist_key = f"a[{count}]"
|
||||
album_key = f"b[{count}]"
|
||||
length_key = f"l[{count}]"
|
||||
track_key = f"t[{count}]"
|
||||
time_key = f"i[{count}]"
|
||||
if artist_key not in keys or track_key not in keys:
|
||||
|
@ -76,13 +82,39 @@ class AudioscrobblerLegacy(APIHandler):
|
|||
artiststr,titlestr = keys[artist_key], keys[track_key]
|
||||
try:
|
||||
timestamp = int(keys[time_key])
|
||||
except:
|
||||
except Exception:
|
||||
timestamp = None
|
||||
|
||||
scrobble = {
|
||||
'track_artists':[artiststr],
|
||||
'track_title':titlestr,
|
||||
'scrobble_time':timestamp,
|
||||
}
|
||||
if album_key in keys:
|
||||
scrobble['album_name'] = keys[album_key]
|
||||
if length_key in keys:
|
||||
scrobble['track_length'] = keys[length_key]
|
||||
|
||||
#database.createScrobble(artists,title,timestamp)
|
||||
self.scrobble(artiststr,titlestr,time=timestamp)
|
||||
self.scrobble(scrobble, client=client)
|
||||
return 200,"OK\n"
|
||||
|
||||
|
||||
def check_token(self, received_token, expected_key, ts):
|
||||
expected_token = md5(md5(expected_key) + ts)
|
||||
return received_token == expected_token
|
||||
|
||||
def generate_key(self,client):
|
||||
key = "".join(
|
||||
str(
|
||||
random.choice(
|
||||
list(range(10)) + list("abcdefghijklmnopqrstuvwxyz") +
|
||||
list("ABCDEFGHIJKLMNOPQRSTUVWXYZ"))) for _ in range(64))
|
||||
|
||||
self.mobile_sessions[key] = client
|
||||
return key
|
||||
|
||||
|
||||
import hashlib
|
||||
import random
|
||||
|
||||
|
@ -90,20 +122,3 @@ def md5(input):
|
|||
m = hashlib.md5()
|
||||
m.update(bytes(input,encoding="utf-8"))
|
||||
return m.hexdigest()
|
||||
|
||||
def generate_key(ls):
|
||||
key = "".join(
|
||||
str(
|
||||
random.choice(
|
||||
list(range(10)) + list("abcdefghijklmnopqrstuvwxyz") +
|
||||
list("ABCDEFGHIJKLMNOPQRSTUVWXYZ"))) for _ in range(64))
|
||||
|
||||
ls.append(key)
|
||||
return key
|
||||
|
||||
def lastfm_token(password, ts):
|
||||
return md5(md5(password) + ts)
|
||||
|
||||
def check_token(received_token, expected_key, ts):
|
||||
expected_token = lastfm_token(expected_key, ts)
|
||||
return received_token == expected_token
|
||||
|
|
|
@ -2,8 +2,9 @@ from ._base import APIHandler
|
|||
from ._exceptions import *
|
||||
from .. import database
|
||||
import datetime
|
||||
from ._apikeys import apikeystore
|
||||
|
||||
from ..globalconf import malojaconfig
|
||||
from ..pkg_global.conf import malojaconfig
|
||||
|
||||
|
||||
class Listenbrainz(APIHandler):
|
||||
|
@ -33,16 +34,18 @@ class Listenbrainz(APIHandler):
|
|||
def submit(self,pathnodes,keys):
|
||||
try:
|
||||
token = self.get_token_from_request_keys(keys)
|
||||
except:
|
||||
except Exception:
|
||||
raise BadAuthException()
|
||||
|
||||
if token not in database.allAPIkeys():
|
||||
client = apikeystore.check_and_identify_key(token)
|
||||
|
||||
if not client:
|
||||
raise InvalidAuthException()
|
||||
|
||||
try:
|
||||
listentype = keys["listen_type"]
|
||||
payload = keys["payload"]
|
||||
except:
|
||||
except Exception:
|
||||
raise MalformedJSONException()
|
||||
|
||||
if listentype == "playing_now":
|
||||
|
@ -52,14 +55,31 @@ class Listenbrainz(APIHandler):
|
|||
try:
|
||||
metadata = listen["track_metadata"]
|
||||
artiststr, titlestr = metadata["artist_name"], metadata["track_name"]
|
||||
albumstr = metadata.get("release_name")
|
||||
additional = metadata.get("additional_info",{})
|
||||
try:
|
||||
timestamp = int(listen["listened_at"])
|
||||
except:
|
||||
except Exception:
|
||||
timestamp = None
|
||||
except:
|
||||
except Exception:
|
||||
raise MalformedJSONException()
|
||||
|
||||
self.scrobble(artiststr,titlestr,timestamp)
|
||||
extrafields = {
|
||||
# fields that will not be consumed by regular scrobbling
|
||||
# will go into 'extra'
|
||||
k:additional[k]
|
||||
for k in ['track_mbid', 'release_mbid', 'artist_mbids','recording_mbid','tags']
|
||||
if k in additional
|
||||
}
|
||||
|
||||
self.scrobble({
|
||||
'track_artists':[artiststr],
|
||||
'track_title':titlestr,
|
||||
'album_name':albumstr,
|
||||
'scrobble_time':timestamp,
|
||||
'track_length': additional.get("duration"),
|
||||
**extrafields
|
||||
},client=client)
|
||||
|
||||
return 200,{"status":"ok"}
|
||||
|
||||
|
@ -67,9 +87,9 @@ class Listenbrainz(APIHandler):
|
|||
def validate_token(self,pathnodes,keys):
|
||||
try:
|
||||
token = self.get_token_from_request_keys(keys)
|
||||
except:
|
||||
except Exception:
|
||||
raise BadAuthException()
|
||||
if token not in database.allAPIkeys():
|
||||
if not apikeystore.check_key(token):
|
||||
raise InvalidAuthException()
|
||||
else:
|
||||
return 200,{"code":200,"message":"Token valid.","valid":True,"user_name":malojaconfig["NAME"]}
|
||||
|
|
|
@ -1,50 +1,190 @@
|
|||
from ..database import *
|
||||
from ..globalconf import malojaconfig, apikeystore
|
||||
from ..__pkginfo__ import VERSION
|
||||
from ..malojauri import uri_to_internal
|
||||
from .. import utilities
|
||||
import os
|
||||
import math
|
||||
import traceback
|
||||
|
||||
from bottle import response, static_file
|
||||
from bottle import response, static_file, request, FormsDict
|
||||
|
||||
from doreah.logging import log
|
||||
from doreah.auth import authenticated_api, authenticated_api_with_alternate, authenticated_function
|
||||
|
||||
# nimrodel API
|
||||
from nimrodel import EAPI as API
|
||||
from nimrodel import Multi
|
||||
|
||||
|
||||
from .. import database
|
||||
from ..pkg_global.conf import malojaconfig, data_dir
|
||||
|
||||
|
||||
|
||||
from ..__pkginfo__ import VERSION
|
||||
from ..malojauri import uri_to_internal, compose_querystring, internal_to_uri
|
||||
from .. import images
|
||||
from ._apikeys import apikeystore, api_key_correct
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
api = API(delay=True)
|
||||
api.__apipath__ = "mlj_1"
|
||||
|
||||
|
||||
|
||||
|
||||
errors = {
|
||||
database.exceptions.MissingScrobbleParameters: lambda e: (400,{
|
||||
"status":"failure",
|
||||
"error":{
|
||||
'type':'missing_scrobble_data',
|
||||
'value':e.params,
|
||||
'desc':"The scrobble is missing needed parameters."
|
||||
}
|
||||
}),
|
||||
database.exceptions.MissingEntityParameter: lambda e: (400,{
|
||||
"status":"error",
|
||||
"error":{
|
||||
'type':'missing_entity_parameter',
|
||||
'value':None,
|
||||
'desc':"This API call is not valid without an entity (track or artist)."
|
||||
}
|
||||
}),
|
||||
database.exceptions.EntityExists: lambda e: (409,{
|
||||
"status":"failure",
|
||||
"error":{
|
||||
'type':'entity_exists',
|
||||
'value':e.entitydict,
|
||||
'desc':"This entity already exists in the database. Consider merging instead."
|
||||
}
|
||||
}),
|
||||
database.exceptions.DatabaseNotBuilt: lambda e: (503,{
|
||||
"status":"error",
|
||||
"error":{
|
||||
'type':'server_not_ready',
|
||||
'value':'db_upgrade',
|
||||
'desc':"The database is being upgraded. Please try again later."
|
||||
}
|
||||
}),
|
||||
images.MalformedB64: lambda e: (400,{
|
||||
"status":"failure",
|
||||
"error":{
|
||||
'type':'malformed_b64',
|
||||
'value':None,
|
||||
'desc':"The provided base 64 string is not valid."
|
||||
}
|
||||
}),
|
||||
# for http errors, use their status code
|
||||
Exception: lambda e: ((e.status_code if hasattr(e,'statuscode') else 500),{
|
||||
"status":"failure",
|
||||
"error":{
|
||||
'type':'unknown_error',
|
||||
'value':e.__repr__(),
|
||||
'desc':"The server has encountered an exception."
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
def catch_exceptions(func):
|
||||
def protector(*args,**kwargs):
|
||||
try:
|
||||
return func(*args,**kwargs)
|
||||
except Exception as e:
|
||||
print(traceback.format_exc())
|
||||
for etype in errors:
|
||||
if isinstance(e,etype):
|
||||
errorhandling = errors[etype](e)
|
||||
response.status = errorhandling[0]
|
||||
return errorhandling[1]
|
||||
|
||||
protector.__doc__ = func.__doc__
|
||||
protector.__annotations__ = func.__annotations__
|
||||
return protector
|
||||
|
||||
|
||||
def add_common_args_to_docstring(filterkeys=False,limitkeys=False,delimitkeys=False,amountkeys=False):
|
||||
def decorator(func):
|
||||
timeformats = "Possible formats include '2022', '2022/08', '2022/08/01', '2022/W42', 'today', 'thismonth', 'monday', 'august'"
|
||||
|
||||
if filterkeys:
|
||||
func.__doc__ += f"""
|
||||
:param string title: Track title
|
||||
:param string artist: Track artist. Can be specified multiple times.
|
||||
:param bool associated: Whether to include associated artists.
|
||||
"""
|
||||
if limitkeys:
|
||||
func.__doc__ += f"""
|
||||
:param string from: Start of the desired time range. Can also be called since or start. {timeformats}
|
||||
:param string until: End of the desired range. Can also be called to or end. {timeformats}
|
||||
:param string in: Desired range. Can also be called within or during. {timeformats}
|
||||
"""
|
||||
if delimitkeys:
|
||||
func.__doc__ += """
|
||||
:param string step: Step, e.g. month or week.
|
||||
:param int stepn: Number of base type units per step
|
||||
:param int trail: How many preceding steps should be factored in.
|
||||
:param bool cumulative: Instead of a fixed trail length, use all history up to this point.
|
||||
"""
|
||||
if amountkeys:
|
||||
func.__doc__ += """
|
||||
:param int page: Page to show
|
||||
:param int perpage: Entries per page.
|
||||
:param int max: Legacy. Show first page with this many entries.
|
||||
"""
|
||||
return func
|
||||
return decorator
|
||||
|
||||
|
||||
|
||||
@api.get("test")
|
||||
@catch_exceptions
|
||||
def test_server(key=None):
|
||||
"""Pings the server. If an API key is supplied, the server will respond with 200
|
||||
if the key is correct and 403 if it isn't. If no key is supplied, the server will
|
||||
always respond with 200.
|
||||
|
||||
:param string key: An API key to be tested. Optional.
|
||||
:return: status (String), error (String)
|
||||
:rtype: Dictionary
|
||||
"""
|
||||
response.set_header("Access-Control-Allow-Origin","*")
|
||||
if key is not None and not (checkAPIkey(key)):
|
||||
if key is not None and not apikeystore.check_key(key):
|
||||
response.status = 403
|
||||
return {"error":"Wrong API key"}
|
||||
return {
|
||||
"status":"error",
|
||||
"error":"Wrong API key"
|
||||
}
|
||||
|
||||
else:
|
||||
response.status = 200
|
||||
return {"status":"ok"}
|
||||
return {
|
||||
"status":"ok"
|
||||
}
|
||||
|
||||
|
||||
@api.get("serverinfo")
|
||||
@catch_exceptions
|
||||
def server_info():
|
||||
"""Returns basic information about the server.
|
||||
|
||||
:return: name (String), version (Tuple), versionstring (String), db_status (Mapping). Additional keys can be added at any point, but will not be removed within API version.
|
||||
:rtype: Dictionary
|
||||
"""
|
||||
|
||||
|
||||
response.set_header("Access-Control-Allow-Origin","*")
|
||||
response.set_header("Content-Type","application/json")
|
||||
|
||||
return {
|
||||
"name":malojaconfig["NAME"],
|
||||
"version":VERSION.split("."),
|
||||
"versionstring":VERSION,
|
||||
"db_status":dbstatus
|
||||
"db_status":database.dbstatus
|
||||
}
|
||||
|
||||
|
||||
|
@ -52,137 +192,240 @@ def server_info():
|
|||
|
||||
|
||||
@api.get("scrobbles")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True,amountkeys=True)
|
||||
def get_scrobbles_external(**keys):
|
||||
"""Returns a list of scrobbles.
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary
|
||||
"""
|
||||
k_filter, k_time, _, k_amount, _ = uri_to_internal(keys,api=True)
|
||||
ckeys = {**k_filter, **k_time, **k_amount}
|
||||
|
||||
result = get_scrobbles(**ckeys)
|
||||
result = database.get_scrobbles(**ckeys)
|
||||
|
||||
offset = (k_amount.get('page') * k_amount.get('perpage')) if k_amount.get('perpage') is not math.inf else 0
|
||||
result = result[offset:]
|
||||
if k_amount.get('perpage') is not math.inf: result = result[:k_amount.get('perpage')]
|
||||
|
||||
return {"list":result}
|
||||
|
||||
|
||||
# info for comparison
|
||||
@api.get("info")
|
||||
def info_external(**keys):
|
||||
|
||||
response.set_header("Access-Control-Allow-Origin","*")
|
||||
response.set_header("Content-Type","application/json")
|
||||
|
||||
return info()
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":result
|
||||
}
|
||||
|
||||
|
||||
@api.get("numscrobbles")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True,amountkeys=True)
|
||||
def get_scrobbles_num_external(**keys):
|
||||
"""Returns amount of scrobbles.
|
||||
|
||||
:return: amount (Integer)
|
||||
:rtype: Dictionary
|
||||
"""
|
||||
k_filter, k_time, _, k_amount, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_filter, **k_time, **k_amount}
|
||||
|
||||
result = get_scrobbles_num(**ckeys)
|
||||
return {"amount":result}
|
||||
result = database.get_scrobbles_num(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"amount":result
|
||||
}
|
||||
|
||||
|
||||
|
||||
@api.get("tracks")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True)
|
||||
def get_tracks_external(**keys):
|
||||
"""Returns all tracks (optionally of an artist).
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary
|
||||
"""
|
||||
k_filter, _, _, _, _ = uri_to_internal(keys,forceArtist=True)
|
||||
ckeys = {**k_filter}
|
||||
|
||||
result = get_tracks(**ckeys)
|
||||
return {"list":result}
|
||||
result = database.get_tracks(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":result
|
||||
}
|
||||
|
||||
|
||||
|
||||
@api.get("artists")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring()
|
||||
def get_artists_external():
|
||||
result = get_artists()
|
||||
return {"list":result}
|
||||
"""Returns all artists.
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
result = database.get_artists()
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":result
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@api.get("charts/artists")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(limitkeys=True)
|
||||
def get_charts_artists_external(**keys):
|
||||
"""Returns artist charts
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
_, k_time, _, _, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_time}
|
||||
|
||||
result = get_charts_artists(**ckeys)
|
||||
return {"list":result}
|
||||
result = database.get_charts_artists(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":result
|
||||
}
|
||||
|
||||
|
||||
|
||||
@api.get("charts/tracks")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True)
|
||||
def get_charts_tracks_external(**keys):
|
||||
"""Returns track charts
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
k_filter, k_time, _, _, _ = uri_to_internal(keys,forceArtist=True)
|
||||
ckeys = {**k_filter, **k_time}
|
||||
|
||||
result = get_charts_tracks(**ckeys)
|
||||
return {"list":result}
|
||||
result = database.get_charts_tracks(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":result
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@api.get("pulse")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True,delimitkeys=True,amountkeys=True)
|
||||
def get_pulse_external(**keys):
|
||||
"""Returns amounts of scrobbles in specified time frames
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
k_filter, k_time, k_internal, k_amount, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_filter, **k_time, **k_internal, **k_amount}
|
||||
|
||||
results = get_pulse(**ckeys)
|
||||
return {"list":results}
|
||||
results = database.get_pulse(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":results
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@api.get("performance")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True,limitkeys=True,delimitkeys=True,amountkeys=True)
|
||||
def get_performance_external(**keys):
|
||||
"""Returns artist's or track's rank in specified time frames
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
k_filter, k_time, k_internal, k_amount, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_filter, **k_time, **k_internal, **k_amount}
|
||||
|
||||
results = get_performance(**ckeys)
|
||||
return {"list":results}
|
||||
results = database.get_performance(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":results
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@api.get("top/artists")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(limitkeys=True,delimitkeys=True)
|
||||
def get_top_artists_external(**keys):
|
||||
"""Returns respective number 1 artists in specified time frames
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
_, k_time, k_internal, _, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_time, **k_internal}
|
||||
|
||||
results = get_top_artists(**ckeys)
|
||||
return {"list":results}
|
||||
results = database.get_top_artists(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":results
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@api.get("top/tracks")
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(limitkeys=True,delimitkeys=True)
|
||||
def get_top_tracks_external(**keys):
|
||||
"""Returns respective number 1 tracks in specified time frames
|
||||
|
||||
:return: list (List)
|
||||
:rtype: Dictionary"""
|
||||
_, k_time, k_internal, _, _ = uri_to_internal(keys)
|
||||
ckeys = {**k_time, **k_internal}
|
||||
|
||||
# IMPLEMENT THIS FOR TOP TRACKS OF ARTIST AS WELL?
|
||||
|
||||
results = get_top_tracks(**ckeys)
|
||||
return {"list":results}
|
||||
results = database.get_top_tracks(**ckeys)
|
||||
|
||||
return {
|
||||
"status":"ok",
|
||||
"list":results
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@api.get("artistinfo")
|
||||
def artistInfo_external(**keys):
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True)
|
||||
def artist_info_external(**keys):
|
||||
"""Returns information about an artist
|
||||
|
||||
:return: artist (String), scrobbles (Integer), position (Integer), associated (List), medals (Mapping), topweeks (Integer)
|
||||
:rtype: Dictionary"""
|
||||
k_filter, _, _, _, _ = uri_to_internal(keys,forceArtist=True)
|
||||
ckeys = {**k_filter}
|
||||
|
||||
return artistInfo(**ckeys)
|
||||
return database.artist_info(**ckeys)
|
||||
|
||||
|
||||
|
||||
@api.get("trackinfo")
|
||||
def trackInfo_external(artist:Multi[str],**keys):
|
||||
@catch_exceptions
|
||||
@add_common_args_to_docstring(filterkeys=True)
|
||||
def track_info_external(artist:Multi[str]=[],**keys):
|
||||
"""Returns information about a track
|
||||
|
||||
:return: track (Mapping), scrobbles (Integer), position (Integer), medals (Mapping), certification (String), topweeks (Integer)
|
||||
:rtype: Dictionary"""
|
||||
# transform into a multidict so we can use our nomral uri_to_internal function
|
||||
keys = FormsDict(keys)
|
||||
for a in artist:
|
||||
|
@ -190,57 +433,116 @@ def trackInfo_external(artist:Multi[str],**keys):
|
|||
k_filter, _, _, _, _ = uri_to_internal(keys,forceTrack=True)
|
||||
ckeys = {**k_filter}
|
||||
|
||||
return trackInfo(**ckeys)
|
||||
return database.track_info(**ckeys)
|
||||
|
||||
|
||||
@api.get("compare")
|
||||
def compare_external(**keys):
|
||||
return compare(keys["remote"])
|
||||
|
||||
|
||||
|
||||
@api.get("newscrobble")
|
||||
@authenticated_api_with_alternate(api_key_correct)
|
||||
def get_post_scrobble(artist:Multi,**keys):
|
||||
"""DEPRECATED. Use the equivalent POST method instead."""
|
||||
artists = artist
|
||||
title = keys.get("title")
|
||||
album = keys.get("album")
|
||||
duration = keys.get("seconds")
|
||||
time = keys.get("time")
|
||||
if time is not None: time = int(time)
|
||||
|
||||
return incoming_scrobble(artists,title,album=album,duration=duration,time=time)
|
||||
|
||||
@api.post("newscrobble")
|
||||
@authenticated_api_with_alternate(api_key_correct)
|
||||
def post_scrobble(artist:Multi=None,**keys):
|
||||
@authenticated_function(alternate=api_key_correct,api=True,pass_auth_result_as='auth_result')
|
||||
@catch_exceptions
|
||||
def post_scrobble(
|
||||
artist:Multi=None,
|
||||
artists:list=[],
|
||||
title:str="",
|
||||
album:str=None,
|
||||
albumartists:list=[],
|
||||
duration:int=None,
|
||||
length:int=None,
|
||||
time:int=None,
|
||||
nofix=None,
|
||||
auth_result=None,
|
||||
**extra_kwargs):
|
||||
"""Submit a new scrobble.
|
||||
|
||||
:param string artist: Artist. Can be submitted multiple times as query argument for multiple artists.
|
||||
:param string artists: List of artists. Overwritten by artist parameter.
|
||||
:param list artists: List of artists.
|
||||
:param string title: Title of the track.
|
||||
:param string album: Name of the album. Optional.
|
||||
:param list albumartists: Album artists. Optional.
|
||||
:param int duration: Actual listened duration of the scrobble in seconds. Optional.
|
||||
:param int length: Total length of the track in seconds. Optional.
|
||||
:param int time: UNIX timestamp of the scrobble. Optional, not needed if scrobble is at time of request.
|
||||
:param flag nofix: Skip server-side metadata parsing. Optional.
|
||||
|
||||
:return: status (String), track (Mapping)
|
||||
:rtype: Dictionary
|
||||
"""
|
||||
#artists = "/".join(artist)
|
||||
artists = artist if artist is not None else keys.get("artists")
|
||||
title = keys.get("title")
|
||||
album = keys.get("album")
|
||||
duration = keys.get("seconds")
|
||||
time = keys.get("time")
|
||||
nofix = keys.get("nofix") is not None
|
||||
if time is not None: time = int(time)
|
||||
|
||||
return incoming_scrobble(artists,title,album=album,duration=duration,time=time,fix=not nofix)
|
||||
rawscrobble = {
|
||||
'track_artists':(artist or []) + artists,
|
||||
'track_title':title,
|
||||
'album_name':album,
|
||||
'album_artists':albumartists,
|
||||
'scrobble_duration':duration,
|
||||
'track_length':length,
|
||||
'scrobble_time':time
|
||||
}
|
||||
|
||||
# for logging purposes, don't pass values that we didn't actually supply
|
||||
rawscrobble = {k:rawscrobble[k] for k in rawscrobble if rawscrobble[k]}
|
||||
|
||||
|
||||
result = database.incoming_scrobble(
|
||||
rawscrobble,
|
||||
client='browser' if auth_result.get('doreah_native_auth_check') else auth_result.get('client'),
|
||||
api='native/v1',
|
||||
fix=(nofix is None)
|
||||
)
|
||||
|
||||
responsedict = {
|
||||
'status': 'success',
|
||||
'track': {
|
||||
'artists':result['track']['artists'],
|
||||
'title':result['track']['title']
|
||||
},
|
||||
'desc':f"Scrobbled {result['track']['title']} by {', '.join(result['track']['artists'])}"
|
||||
}
|
||||
if extra_kwargs:
|
||||
responsedict['warnings'] = [
|
||||
{'type':'invalid_keyword_ignored','value':k,
|
||||
'desc':"This key was not recognized by the server and has been discarded."}
|
||||
for k in extra_kwargs
|
||||
]
|
||||
if artist and artists:
|
||||
responsedict['warnings'] = [
|
||||
{'type':'mixed_schema','value':['artist','artists'],
|
||||
'desc':"These two fields are meant as alternative methods to submit information. Use of both is discouraged, but works at the moment."}
|
||||
]
|
||||
return responsedict
|
||||
|
||||
|
||||
|
||||
|
||||
@api.post("addpicture")
|
||||
@authenticated_function(alternate=api_key_correct,api=True)
|
||||
@catch_exceptions
|
||||
def add_picture(b64,artist:Multi=[],title=None):
|
||||
"""Uploads a new image for an artist or track.
|
||||
|
||||
param string b64: Base 64 representation of the image
|
||||
param string artist: Artist name. Can be supplied multiple times for tracks with multiple artists.
|
||||
param string title: Title of the track. Optional.
|
||||
|
||||
"""
|
||||
keys = FormsDict()
|
||||
for a in artist:
|
||||
keys.append("artist",a)
|
||||
if title is not None: keys.append("title",title)
|
||||
k_filter, _, _, _, _ = uri_to_internal(keys)
|
||||
if "track" in k_filter: k_filter = k_filter["track"]
|
||||
url = images.set_image(b64,**k_filter)
|
||||
|
||||
return {
|
||||
'status': 'success',
|
||||
'url': url
|
||||
}
|
||||
|
||||
|
||||
|
||||
@api.post("importrules")
|
||||
@authenticated_api
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def import_rulemodule(**keys):
|
||||
"""Internal Use Only"""
|
||||
filename = keys.get("filename")
|
||||
remove = keys.get("remove") is not None
|
||||
validchars = "-_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
||||
|
@ -256,31 +558,34 @@ def import_rulemodule(**keys):
|
|||
|
||||
|
||||
@api.post("rebuild")
|
||||
@authenticated_api
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def rebuild(**keys):
|
||||
"""Internal Use Only"""
|
||||
log("Database rebuild initiated!")
|
||||
sync()
|
||||
database.sync()
|
||||
dbstatus['rebuildinprogress'] = True
|
||||
from ..proccontrol.tasks.fixexisting import fix
|
||||
fix()
|
||||
global cla, coa
|
||||
global cla
|
||||
cla = CleanerAgent()
|
||||
coa = CollectorAgent()
|
||||
build_db()
|
||||
invalidate_caches()
|
||||
database.build_db()
|
||||
database.invalidate_caches()
|
||||
|
||||
|
||||
|
||||
|
||||
@api.get("search")
|
||||
@catch_exceptions
|
||||
def search(**keys):
|
||||
"""Internal Use Only"""
|
||||
query = keys.get("query")
|
||||
max_ = keys.get("max")
|
||||
if max_ is not None: max_ = int(max_)
|
||||
query = query.lower()
|
||||
|
||||
artists = db_search(query,type="ARTIST")
|
||||
tracks = db_search(query,type="TRACK")
|
||||
artists = database.db_search(query,type="ARTIST")
|
||||
tracks = database.db_search(query,type="TRACK")
|
||||
|
||||
|
||||
|
||||
|
@ -293,60 +598,62 @@ def search(**keys):
|
|||
artists_result = []
|
||||
for a in artists:
|
||||
result = {
|
||||
'name': a,
|
||||
'artist': a,
|
||||
'link': "/artist?" + compose_querystring(internal_to_uri({"artist": a})),
|
||||
'image': images.get_artist_image(a)
|
||||
}
|
||||
result["image"] = "/image?" + compose_querystring(internal_to_uri({"artist":a}))
|
||||
artists_result.append(result)
|
||||
|
||||
tracks_result = []
|
||||
for t in tracks:
|
||||
result = t
|
||||
result["link"] = "/track?" + compose_querystring(internal_to_uri({"track":t}))
|
||||
result["image"] = "/image?" + compose_querystring(internal_to_uri({"track":t}))
|
||||
result = {
|
||||
'track': t,
|
||||
'link': "/track?" + compose_querystring(internal_to_uri({"track":t})),
|
||||
'image': images.get_track_image(t)
|
||||
}
|
||||
tracks_result.append(result)
|
||||
|
||||
return {"artists":artists_result[:max_],"tracks":tracks_result[:max_]}
|
||||
|
||||
|
||||
@api.post("addpicture")
|
||||
@authenticated_api
|
||||
def add_picture(b64,artist:Multi=[],title=None):
|
||||
keys = FormsDict()
|
||||
for a in artist:
|
||||
keys.append("artist",a)
|
||||
if title is not None: keys.append("title",title)
|
||||
k_filter, _, _, _, _ = uri_to_internal(keys)
|
||||
if "track" in k_filter: k_filter = k_filter["track"]
|
||||
utilities.set_image(b64,**k_filter)
|
||||
|
||||
|
||||
@api.post("newrule")
|
||||
@authenticated_api
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def newrule(**keys):
|
||||
tsv.add_entry(data_dir['rules']("webmade.tsv"),[k for k in keys])
|
||||
"""Internal Use Only"""
|
||||
pass
|
||||
# TODO after implementing new rule system
|
||||
#tsv.add_entry(data_dir['rules']("webmade.tsv"),[k for k in keys])
|
||||
#addEntry("rules/webmade.tsv",[k for k in keys])
|
||||
|
||||
|
||||
@api.post("settings")
|
||||
@authenticated_api
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def set_settings(**keys):
|
||||
"""Internal Use Only"""
|
||||
malojaconfig.update(keys)
|
||||
|
||||
@api.post("apikeys")
|
||||
@authenticated_api
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def set_apikeys(**keys):
|
||||
"""Internal Use Only"""
|
||||
apikeystore.update(keys)
|
||||
|
||||
@api.post("import")
|
||||
@authenticated_api
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def import_scrobbles(identifier):
|
||||
"""Internal Use Only"""
|
||||
from ..thirdparty import import_scrobbles
|
||||
import_scrobbles(identifier)
|
||||
|
||||
@api.get("backup")
|
||||
@authenticated_api
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def get_backup(**keys):
|
||||
"""Internal Use Only"""
|
||||
from ..proccontrol.tasks.backup import backup
|
||||
import tempfile
|
||||
|
||||
|
@ -354,3 +661,88 @@ def get_backup(**keys):
|
|||
archivefile = backup(tmpfolder)
|
||||
|
||||
return static_file(os.path.basename(archivefile),root=tmpfolder)
|
||||
|
||||
@api.get("export")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def get_export(**keys):
|
||||
"""Internal Use Only"""
|
||||
from ..proccontrol.tasks.export import export
|
||||
import tempfile
|
||||
|
||||
tmpfolder = tempfile.gettempdir()
|
||||
resultfile = export(tmpfolder)
|
||||
|
||||
return static_file(os.path.basename(resultfile),root=tmpfolder)
|
||||
|
||||
|
||||
@api.post("delete_scrobble")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def delete_scrobble(timestamp):
|
||||
"""Internal Use Only"""
|
||||
result = database.remove_scrobble(timestamp)
|
||||
return {
|
||||
"status":"success",
|
||||
"desc":f"Scrobble was deleted!"
|
||||
}
|
||||
|
||||
|
||||
@api.post("edit_artist")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def edit_artist(id,name):
|
||||
"""Internal Use Only"""
|
||||
result = database.edit_artist(id,name)
|
||||
return {
|
||||
"status":"success"
|
||||
}
|
||||
|
||||
@api.post("edit_track")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def edit_track(id,title):
|
||||
"""Internal Use Only"""
|
||||
result = database.edit_track(id,{'title':title})
|
||||
return {
|
||||
"status":"success"
|
||||
}
|
||||
|
||||
|
||||
@api.post("merge_tracks")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def merge_tracks(target_id,source_ids):
|
||||
"""Internal Use Only"""
|
||||
result = database.merge_tracks(target_id,source_ids)
|
||||
return {
|
||||
"status":"success"
|
||||
}
|
||||
|
||||
@api.post("merge_artists")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def merge_artists(target_id,source_ids):
|
||||
"""Internal Use Only"""
|
||||
result = database.merge_artists(target_id,source_ids)
|
||||
return {
|
||||
"status":"success"
|
||||
}
|
||||
|
||||
@api.post("reparse_scrobble")
|
||||
@authenticated_function(api=True)
|
||||
@catch_exceptions
|
||||
def reparse_scrobble(timestamp):
|
||||
"""Internal Use Only"""
|
||||
result = database.reparse_scrobble(timestamp)
|
||||
if result:
|
||||
return {
|
||||
"status":"success",
|
||||
"desc":f"Scrobble was reparsed!",
|
||||
"scrobble":result
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"status":"no_operation",
|
||||
"desc":"The scrobble was not changed."
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import re
|
||||
#from . import utilities
|
||||
from doreah import tsv
|
||||
from .globalconf import data_dir, malojaconfig
|
||||
import pkg_resources
|
||||
import os
|
||||
import csv
|
||||
|
||||
from .pkg_global.conf import data_dir, malojaconfig
|
||||
|
||||
# need to do this as a class so it can retain loaded settings from file
|
||||
# apparently this is not true
|
||||
|
@ -13,19 +13,29 @@ class CleanerAgent:
|
|||
self.updateRules()
|
||||
|
||||
def updateRules(self):
|
||||
raw = tsv.parse_all(data_dir["rules"](),"string","string","string","string")
|
||||
self.rules_belongtogether = [b for [a,b,c,d] in raw if a=="belongtogether"]
|
||||
self.rules_notanartist = [b for [a,b,c,d] in raw if a=="notanartist"]
|
||||
self.rules_replacetitle = {b.lower():c for [a,b,c,d] in raw if a=="replacetitle"}
|
||||
self.rules_replaceartist = {b.lower():c for [a,b,c,d] in raw if a=="replaceartist"}
|
||||
self.rules_ignoreartist = [b.lower() for [a,b,c,d] in raw if a=="ignoreartist"]
|
||||
self.rules_addartists = {c.lower():(b.lower(),d) for [a,b,c,d] in raw if a=="addartists"}
|
||||
self.rules_fixartists = {c.lower():b for [a,b,c,d] in raw if a=="fixartists"}
|
||||
self.rules_artistintitle = {b.lower():c for [a,b,c,d] in raw if a=="artistintitle"}
|
||||
|
||||
rawrules = []
|
||||
for f in os.listdir(data_dir["rules"]()):
|
||||
if f.split('.')[-1].lower() != 'tsv': continue
|
||||
filepath = data_dir["rules"](f)
|
||||
with open(filepath,'r') as filed:
|
||||
reader = csv.reader(filed,delimiter="\t")
|
||||
rawrules += [[col for col in entry if col] for entry in reader if len(entry)>0 and not entry[0].startswith('#')]
|
||||
|
||||
|
||||
self.rules_belongtogether = [r[1] for r in rawrules if r[0]=="belongtogether"]
|
||||
self.rules_notanartist = [r[1] for r in rawrules if r[0]=="notanartist"]
|
||||
self.rules_replacetitle = {r[1].lower():r[2] for r in rawrules if r[0]=="replacetitle"}
|
||||
self.rules_replaceartist = {r[1].lower():r[2] for r in rawrules if r[0]=="replaceartist"}
|
||||
self.rules_ignoreartist = [r[1].lower() for r in rawrules if r[0]=="ignoreartist"]
|
||||
self.rules_addartists = {r[2].lower():(r[1].lower(),r[3]) for r in rawrules if r[0]=="addartists"}
|
||||
self.rules_fixartists = {r[2].lower():r[1] for r in rawrules if r[0]=="fixartists"}
|
||||
self.rules_artistintitle = {r[1].lower():r[2] for r in rawrules if r[0]=="artistintitle"}
|
||||
#self.rules_regexartist = [[b,c] for [a,b,c,d] in raw if a=="regexartist"]
|
||||
#self.rules_regextitle = [[b,c] for [a,b,c,d] in raw if a=="regextitle"]
|
||||
|
||||
|
||||
|
||||
def fullclean(self,artist,title):
|
||||
artists = self.parseArtists(self.removespecial(artist))
|
||||
title = self.parseTitle(self.removespecial(title))
|
||||
|
@ -45,7 +55,7 @@ class CleanerAgent:
|
|||
artists = list(set(artists))
|
||||
artists.sort()
|
||||
|
||||
return (artists,title)
|
||||
return (artists,title.strip())
|
||||
|
||||
def removespecial(self,s):
|
||||
if isinstance(s,list):
|
||||
|
@ -72,7 +82,7 @@ class CleanerAgent:
|
|||
|
||||
def parseArtists(self,a):
|
||||
|
||||
if isinstance(a,list):
|
||||
if isinstance(a,list) or isinstance(a,tuple):
|
||||
res = [self.parseArtists(art) for art in a]
|
||||
return [a for group in res for a in group]
|
||||
|
||||
|
@ -99,9 +109,9 @@ class CleanerAgent:
|
|||
|
||||
|
||||
for d in self.delimiters_feat:
|
||||
if re.match(r"(.*) \(" + d + " (.*)\)",a) is not None:
|
||||
return self.parseArtists(re.sub(r"(.*) \(" + d + " (.*)\)",r"\1",a)) + \
|
||||
self.parseArtists(re.sub(r"(.*) \(" + d + " (.*)\)",r"\2",a))
|
||||
if re.match(r"(.*) [\(\[]" + d + " (.*)[\)\]]",a,flags=re.IGNORECASE) is not None:
|
||||
return self.parseArtists(re.sub(r"(.*) [\(\[]" + d + " (.*)[\)\]]",r"\1",a,flags=re.IGNORECASE)) + \
|
||||
self.parseArtists(re.sub(r"(.*) [\(\[]" + d + " (.*)[\)\]]",r"\2",a,flags=re.IGNORECASE))
|
||||
|
||||
|
||||
|
||||
|
@ -131,9 +141,11 @@ class CleanerAgent:
|
|||
|
||||
t = t.replace("[","(").replace("]",")")
|
||||
|
||||
t = re.sub(r" \(as made famous by .*?\)","",t)
|
||||
t = re.sub(r" \(originally by .*?\)","",t)
|
||||
t = re.sub(r" \(.*?Remaster.*?\)","",t)
|
||||
# we'll leave these matching all bracket types so future changes
|
||||
# won't require readaption
|
||||
t = re.sub(r" [\(\[]as made famous by .*?[\)\]]","",t)
|
||||
t = re.sub(r" [\(\[]originally by .*?[\)\]]","",t)
|
||||
t = re.sub(r" [\(\[].*?Remaster.*?[\)\]]","",t)
|
||||
|
||||
for s in malojaconfig["REMOVE_FROM_TITLE"]:
|
||||
if s in t:
|
||||
|
@ -144,84 +156,37 @@ class CleanerAgent:
|
|||
# t = p(t).strip()
|
||||
return t
|
||||
|
||||
def parseTitleForArtists(self,t):
|
||||
for d in self.delimiters_feat:
|
||||
if re.match(r"(.*) \(" + d + " (.*?)\)",t) is not None:
|
||||
(title,artists) = self.parseTitleForArtists(re.sub(r"(.*) \(" + d + " (.*?)\)",r"\1",t))
|
||||
artists += self.parseArtists(re.sub(r"(.*) \(" + d + " (.*?)\).*",r"\2",t))
|
||||
return (title,artists)
|
||||
if re.match(r"(.*) - " + d + " (.*)",t) is not None:
|
||||
(title,artists) = self.parseTitleForArtists(re.sub(r"(.*) - " + d + " (.*)",r"\1",t))
|
||||
artists += self.parseArtists(re.sub(r"(.*) - " + d + " (.*).*",r"\2",t))
|
||||
return (title,artists)
|
||||
if re.match(r"(.*) " + d + " (.*)",t) is not None:
|
||||
(title,artists) = self.parseTitleForArtists(re.sub(r"(.*) " + d + " (.*)",r"\1",t))
|
||||
artists += self.parseArtists(re.sub(r"(.*) " + d + " (.*).*",r"\2",t))
|
||||
return (title,artists)
|
||||
|
||||
def parseTitleForArtists(self,title):
|
||||
artists = []
|
||||
for delimiter in malojaconfig["DELIMITERS_FEAT"]:
|
||||
for pattern in [
|
||||
r" [\(\[]" + re.escape(delimiter) + " (.*?)[\)\]]",
|
||||
r" - " + re.escape(delimiter) + " (.*)",
|
||||
r" " + re.escape(delimiter) + " (.*)"
|
||||
]:
|
||||
matches = re.finditer(pattern,title,flags=re.IGNORECASE)
|
||||
for match in matches:
|
||||
title = match.re.sub('',match.string) # Remove matched part
|
||||
artists += self.parseArtists(match.group(1)) # Parse matched artist string
|
||||
|
||||
|
||||
|
||||
if malojaconfig["PARSE_REMIX_ARTISTS"]:
|
||||
for filter in malojaconfig["FILTERS_REMIX"]:
|
||||
for pattern in [
|
||||
r" [\(\[](.*)" + re.escape(filter) + "[\)\]]", # match remix in brackets
|
||||
r" - (.*)" + re.escape(filter) # match remix split with "-"
|
||||
]:
|
||||
match = re.search(pattern,title,flags=re.IGNORECASE)
|
||||
if match:
|
||||
# title stays the same
|
||||
artists += self.parseArtists(match.group(1))
|
||||
|
||||
|
||||
|
||||
for st in self.rules_artistintitle:
|
||||
if st in t.lower(): artists += self.rules_artistintitle[st].split("␟")
|
||||
return (t,artists)
|
||||
|
||||
|
||||
|
||||
#this is for all the runtime changes (counting Trouble Maker as HyunA for charts etc)
|
||||
class CollectorAgent:
|
||||
|
||||
def __init__(self):
|
||||
self.updateRules()
|
||||
|
||||
# rules_countas dict: real artist -> credited artist
|
||||
# rules_countas_id dict: real artist ID -> credited artist ID
|
||||
# rules_include dict: credited artist -> all real artists
|
||||
|
||||
def updateRules(self):
|
||||
raw = tsv.parse_all(data_dir["rules"](),"string","string","string")
|
||||
self.rules_countas = {b:c for [a,b,c] in raw if a=="countas"}
|
||||
self.rules_countas_id = {}
|
||||
self.rules_include = {} #Twice the memory, double the performance!
|
||||
# (Yes, we're saving redundant information here, but it's not unelegant if it's within a closed object!)
|
||||
for a in self.rules_countas:
|
||||
self.rules_include[self.rules_countas[a]] = self.rules_include.setdefault(self.rules_countas[a],[]) + [a]
|
||||
|
||||
# this agent needs to be aware of the current id assignment in the main program
|
||||
# unelegant, but the best way i can think of
|
||||
def updateIDs(self,artistlist):
|
||||
self.rules_countas_id = {artistlist.index(a):artistlist.index(self.rules_countas[a]) for a in self.rules_countas if a in artistlist}
|
||||
#self.rules_include_id = {artistlist.index(a):artistlist.index(self.rules_include[a]) for a in self.rules_include}
|
||||
#this needs to take lists into account
|
||||
|
||||
|
||||
# get who is credited for this artist
|
||||
def getCredited(self,artist):
|
||||
if artist in self.rules_countas:
|
||||
return self.rules_countas[artist]
|
||||
if artist in self.rules_countas_id:
|
||||
return self.rules_countas_id[artist]
|
||||
|
||||
else:
|
||||
return artist
|
||||
|
||||
# get all credited artists for the artists given
|
||||
def getCreditedList(self,artists):
|
||||
updatedArtists = [self.getCredited(artist) for artist in artists]
|
||||
return list(set(updatedArtists))
|
||||
|
||||
# get artists who the given artist is given credit for
|
||||
def getAllAssociated(self,artist):
|
||||
return self.rules_include.get(artist,[])
|
||||
|
||||
# this function is there to check for artists that we should include in the
|
||||
# database even though they never have any scrobble.
|
||||
def getAllArtists(self):
|
||||
return list({self.rules_countas[a] for a in self.rules_countas})
|
||||
# artists that count can be nonexisting (counting HyunA as 4Minute even
|
||||
# though 4Minute has never been listened to)
|
||||
# but artists that are counted as someone else are only relevant if they
|
||||
# exist (so we can preemptively declare lots of rules just in case)
|
||||
#return list(set([a for a in self.rules_countas] + [self.rules_countas[a] for a in self.rules_countas]))
|
||||
|
||||
if st in title.lower(): artists += self.rules_artistintitle[st].split("␟")
|
||||
return (title,artists)
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ countas Trouble Maker HyunA
|
|||
countas S Club 7 Tina Barrett
|
||||
countas 4Minute HyunA
|
||||
countas I.O.I Chungha
|
||||
countas TrySail Sora Amamiya
|
||||
# Group more famous than single artist
|
||||
countas RenoakRhythm Approaching Nirvana
|
||||
countas Shirley Manson Garbage
|
||||
|
@ -16,3 +17,9 @@ countas Sips The Yogscast
|
|||
countas Sjin The Yogscast
|
||||
countas Airi Suzuki ℃-ute
|
||||
countas CeeLo Green Gnarls Barkley
|
||||
countas Amelia Watson Hololive EN
|
||||
countas Gawr Gura Hololive EN
|
||||
countas Mori Calliope Hololive EN
|
||||
countas Ninomae Ina'nis Hololive EN
|
||||
countas Takanashi Kiara Hololive EN
|
||||
countas Ceres Fauna Hololive EN
|
||||
|
|
Can't render this file because it has a wrong number of fields in line 5.
|
|
@ -0,0 +1,20 @@
|
|||
# NAME: JPop
|
||||
# DESC: Fixes and romanizes various Japanese tracks and artists
|
||||
|
||||
|
||||
belongtogether Myth & Roid
|
||||
|
||||
|
||||
# Sora-chan
|
||||
replaceartist Amamiya Sora Sora Amamiya
|
||||
replacetitle エデンの旅人 Eden no Tabibito
|
||||
replacetitle 月灯り Tsukiakari
|
||||
replacetitle 火花 Hibana
|
||||
replacetitle ロンリーナイト・ディスコティック Lonely Night Discotheque
|
||||
replacetitle 羽根輪舞 Hane Rinbu
|
||||
replacetitle メリーゴーランド Merry-go-round
|
||||
replacetitle フリイジア Fressia
|
||||
replacetitle 誓い Chikai
|
||||
|
||||
# ReoNa
|
||||
replacetitle ないない nainai
|
Can't render this file because it has a wrong number of fields in line 5.
|
|
@ -21,7 +21,7 @@ addartists HyunA Change Jun Hyung
|
|||
# BLACKPINK
|
||||
countas Jennie BLACKPINK
|
||||
countas Rosé BLACKPINK
|
||||
countas Lisa BLACKPINK
|
||||
countas Lalisa BLACKPINK
|
||||
countas Jisoo BLACKPINK
|
||||
replacetitle AS IF IT'S YOUR LAST As If It's Your Last
|
||||
replacetitle BOOMBAYAH Boombayah
|
||||
|
@ -200,10 +200,13 @@ countas ACE IZ*ONE
|
|||
countas Chaewon IZ*ONE
|
||||
countas Minju IZ*ONE
|
||||
|
||||
|
||||
# ITZY
|
||||
countas Yeji ITZY
|
||||
|
||||
# IVE
|
||||
countas Wonyoung IVE
|
||||
countas Yujin IVE
|
||||
countas Gaeul IVE
|
||||
|
||||
# Popular Remixes
|
||||
artistintitle Areia Remix Areia
|
||||
|
|
Can't render this file because it has a wrong number of fields in line 5.
|
|
@ -9,3 +9,4 @@ belongtogether Case & Point
|
|||
belongtogether Selena Gomez & The Scene
|
||||
belongtogether Gerry & The Pacemakers
|
||||
belongtogether AC/DC
|
||||
belongtogether Au/Ra
|
||||
|
|
Can't render this file because it has a wrong number of fields in line 4.
|
1175
maloja/database.py
1175
maloja/database.py
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,500 @@
|
|||
# server
|
||||
from bottle import request, response, FormsDict
|
||||
|
||||
# rest of the project
|
||||
from ..cleanup import CleanerAgent
|
||||
from .. import images
|
||||
from ..malojatime import register_scrobbletime, time_stamps, ranges, alltime
|
||||
from ..malojauri import uri_to_internal, internal_to_uri, compose_querystring
|
||||
from ..thirdparty import proxy_scrobble_all
|
||||
from ..pkg_global.conf import data_dir, malojaconfig
|
||||
from ..apis import apikeystore
|
||||
#db
|
||||
from . import sqldb
|
||||
from . import cached
|
||||
from . import dbcache
|
||||
from . import exceptions
|
||||
|
||||
# doreah toolkit
|
||||
from doreah.logging import log
|
||||
from doreah.auth import authenticated_api, authenticated_api_with_alternate
|
||||
import doreah
|
||||
|
||||
|
||||
|
||||
|
||||
# technical
|
||||
import os
|
||||
import datetime
|
||||
import sys
|
||||
import unicodedata
|
||||
from collections import namedtuple
|
||||
from threading import Lock
|
||||
import yaml, json
|
||||
import math
|
||||
|
||||
# url handling
|
||||
import urllib
|
||||
|
||||
|
||||
|
||||
dbstatus = {
|
||||
"healthy":False, # we can access the db
|
||||
"rebuildinprogress":False,
|
||||
"complete":False # information is complete
|
||||
}
|
||||
|
||||
|
||||
|
||||
def waitfordb(func):
|
||||
def newfunc(*args,**kwargs):
|
||||
if not dbstatus['healthy']: raise exceptions.DatabaseNotBuilt()
|
||||
return func(*args,**kwargs)
|
||||
return newfunc
|
||||
|
||||
|
||||
|
||||
ISSUES = {}
|
||||
|
||||
cla = CleanerAgent()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
## this function accepts a flat dict - all info of the scrobble should be top level key
|
||||
## but can contain a list as value
|
||||
## the following keys are valid:
|
||||
## scrobble_duration int
|
||||
## scrobble_time int
|
||||
## track_title str, mandatory
|
||||
## track_artists list, mandatory
|
||||
## track_length int
|
||||
## album_name str
|
||||
## album_artists list
|
||||
##
|
||||
##
|
||||
##
|
||||
##
|
||||
##
|
||||
##
|
||||
|
||||
def incoming_scrobble(rawscrobble,fix=True,client=None,api=None,dbconn=None):
|
||||
|
||||
missing = []
|
||||
for necessary_arg in ["track_artists","track_title"]:
|
||||
if not necessary_arg in rawscrobble or len(rawscrobble[necessary_arg]) == 0:
|
||||
missing.append(necessary_arg)
|
||||
if len(missing) > 0:
|
||||
log(f"Invalid Scrobble [Client: {client} | API: {api}]: {rawscrobble} ",color='red')
|
||||
raise exceptions.MissingScrobbleParameters(missing)
|
||||
|
||||
|
||||
log(f"Incoming scrobble [Client: {client} | API: {api}]: {rawscrobble}")
|
||||
|
||||
scrobbledict = rawscrobble_to_scrobbledict(rawscrobble, fix, client)
|
||||
|
||||
sqldb.add_scrobble(scrobbledict,dbconn=dbconn)
|
||||
proxy_scrobble_all(scrobbledict['track']['artists'],scrobbledict['track']['title'],scrobbledict['time'])
|
||||
|
||||
dbcache.invalidate_caches(scrobbledict['time'])
|
||||
|
||||
#return {"status":"success","scrobble":scrobbledict}
|
||||
return scrobbledict
|
||||
|
||||
|
||||
@waitfordb
|
||||
def reparse_scrobble(timestamp):
|
||||
log(f"Reparsing Scrobble {timestamp}")
|
||||
scrobble = sqldb.get_scrobble(timestamp=timestamp, include_internal=True)
|
||||
|
||||
if not scrobble or not scrobble['rawscrobble']:
|
||||
return False
|
||||
|
||||
newscrobble = rawscrobble_to_scrobbledict(scrobble['rawscrobble'])
|
||||
|
||||
track_id = sqldb.get_track_id(newscrobble['track'])
|
||||
|
||||
# check if id changed
|
||||
if sqldb.get_track_id(scrobble['track']) != track_id:
|
||||
sqldb.edit_scrobble(timestamp, {'track':newscrobble['track']})
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
return sqldb.get_scrobble(timestamp=timestamp)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def rawscrobble_to_scrobbledict(rawscrobble, fix=True, client=None):
|
||||
# raw scrobble to processed info
|
||||
scrobbleinfo = {**rawscrobble}
|
||||
if fix:
|
||||
scrobbleinfo['track_artists'],scrobbleinfo['track_title'] = cla.fullclean(scrobbleinfo['track_artists'],scrobbleinfo['track_title'])
|
||||
scrobbleinfo['scrobble_time'] = scrobbleinfo.get('scrobble_time') or int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp())
|
||||
|
||||
# processed info to internal scrobble dict
|
||||
scrobbledict = {
|
||||
"time":scrobbleinfo.get('scrobble_time'),
|
||||
"track":{
|
||||
"artists":scrobbleinfo.get('track_artists'),
|
||||
"title":scrobbleinfo.get('track_title'),
|
||||
"album":{
|
||||
"name":scrobbleinfo.get('album_name'),
|
||||
"artists":scrobbleinfo.get('album_artists')
|
||||
},
|
||||
"length":scrobbleinfo.get('track_length')
|
||||
},
|
||||
"duration":scrobbleinfo.get('scrobble_duration'),
|
||||
"origin":f"client:{client}" if client else "generic",
|
||||
"extra":{
|
||||
k:scrobbleinfo[k] for k in scrobbleinfo if k not in
|
||||
['scrobble_time','track_artists','track_title','track_length','scrobble_duration']#,'album_name','album_artists']
|
||||
},
|
||||
"rawscrobble":rawscrobble
|
||||
}
|
||||
|
||||
return scrobbledict
|
||||
|
||||
|
||||
@waitfordb
|
||||
def remove_scrobble(timestamp):
|
||||
log(f"Deleting Scrobble {timestamp}")
|
||||
result = sqldb.delete_scrobble(timestamp)
|
||||
dbcache.invalidate_caches(timestamp)
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def edit_artist(id,artistinfo):
|
||||
artist = sqldb.get_artist(id)
|
||||
log(f"Renaming {artist} to {artistinfo}")
|
||||
result = sqldb.edit_artist(id,artistinfo)
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def edit_track(id,trackinfo):
|
||||
track = sqldb.get_track(id)
|
||||
log(f"Renaming {track['title']} to {trackinfo['title']}")
|
||||
result = sqldb.edit_track(id,trackinfo)
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def merge_artists(target_id,source_ids):
|
||||
sources = [sqldb.get_artist(id) for id in source_ids]
|
||||
target = sqldb.get_artist(target_id)
|
||||
log(f"Merging {sources} into {target}")
|
||||
result = sqldb.merge_artists(target_id,source_ids)
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def merge_tracks(target_id,source_ids):
|
||||
sources = [sqldb.get_track(id) for id in source_ids]
|
||||
target = sqldb.get_track(target_id)
|
||||
log(f"Merging {sources} into {target}")
|
||||
result = sqldb.merge_tracks(target_id,source_ids)
|
||||
dbcache.invalidate_entity_cache()
|
||||
dbcache.invalidate_caches()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_scrobbles(dbconn=None,**keys):
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
if 'artist' in keys:
|
||||
result = sqldb.get_scrobbles_of_artist(artist=keys['artist'],since=since,to=to,dbconn=dbconn)
|
||||
elif 'track' in keys:
|
||||
result = sqldb.get_scrobbles_of_track(track=keys['track'],since=since,to=to,dbconn=dbconn)
|
||||
else:
|
||||
result = sqldb.get_scrobbles(since=since,to=to,dbconn=dbconn)
|
||||
#return result[keys['page']*keys['perpage']:(keys['page']+1)*keys['perpage']]
|
||||
return list(reversed(result))
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_scrobbles_num(dbconn=None,**keys):
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
if 'artist' in keys:
|
||||
result = len(sqldb.get_scrobbles_of_artist(artist=keys['artist'],since=since,to=to,resolve_references=False,dbconn=dbconn))
|
||||
elif 'track' in keys:
|
||||
result = len(sqldb.get_scrobbles_of_track(track=keys['track'],since=since,to=to,resolve_references=False,dbconn=dbconn))
|
||||
else:
|
||||
result = sqldb.get_scrobbles_num(since=since,to=to,dbconn=dbconn)
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_tracks(dbconn=None,**keys):
|
||||
if keys.get('artist') is None:
|
||||
result = sqldb.get_tracks(dbconn=dbconn)
|
||||
else:
|
||||
result = sqldb.get_tracks_of_artist(keys.get('artist'),dbconn=dbconn)
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def get_artists(dbconn=None):
|
||||
return sqldb.get_artists(dbconn=dbconn)
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_charts_artists(dbconn=None,**keys):
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
result = sqldb.count_scrobbles_by_artist(since=since,to=to,dbconn=dbconn)
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def get_charts_tracks(dbconn=None,**keys):
|
||||
(since,to) = keys.get('timerange').timestamps()
|
||||
if 'artist' in keys:
|
||||
result = sqldb.count_scrobbles_by_track_of_artist(since=since,to=to,artist=keys['artist'],dbconn=dbconn)
|
||||
else:
|
||||
result = sqldb.count_scrobbles_by_track(since=since,to=to,dbconn=dbconn)
|
||||
return result
|
||||
|
||||
@waitfordb
|
||||
def get_pulse(dbconn=None,**keys):
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
results = []
|
||||
for rng in rngs:
|
||||
res = get_scrobbles_num(timerange=rng,**{k:keys[k] for k in keys if k != 'timerange'},dbconn=dbconn)
|
||||
results.append({"range":rng,"scrobbles":res})
|
||||
|
||||
return results
|
||||
|
||||
@waitfordb
|
||||
def get_performance(dbconn=None,**keys):
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
results = []
|
||||
|
||||
for rng in rngs:
|
||||
if "track" in keys:
|
||||
track = sqldb.get_track(sqldb.get_track_id(keys['track'],dbconn=dbconn),dbconn=dbconn)
|
||||
charts = get_charts_tracks(timerange=rng,dbconn=dbconn)
|
||||
rank = None
|
||||
for c in charts:
|
||||
if c["track"] == track:
|
||||
rank = c["rank"]
|
||||
break
|
||||
elif "artist" in keys:
|
||||
artist = sqldb.get_artist(sqldb.get_artist_id(keys['artist'],dbconn=dbconn),dbconn=dbconn)
|
||||
# ^this is the most useless line in programming history
|
||||
# but I like consistency
|
||||
charts = get_charts_artists(timerange=rng,dbconn=dbconn)
|
||||
rank = None
|
||||
for c in charts:
|
||||
if c["artist"] == artist:
|
||||
rank = c["rank"]
|
||||
break
|
||||
else:
|
||||
raise exceptions.MissingEntityParameter()
|
||||
results.append({"range":rng,"rank":rank})
|
||||
|
||||
return results
|
||||
|
||||
@waitfordb
|
||||
def get_top_artists(dbconn=None,**keys):
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
results = []
|
||||
|
||||
for rng in rngs:
|
||||
try:
|
||||
res = get_charts_artists(timerange=rng,dbconn=dbconn)[0]
|
||||
results.append({"range":rng,"artist":res["artist"],"scrobbles":res["scrobbles"]})
|
||||
except Exception:
|
||||
results.append({"range":rng,"artist":None,"scrobbles":0})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@waitfordb
|
||||
def get_top_tracks(dbconn=None,**keys):
|
||||
|
||||
rngs = ranges(**{k:keys[k] for k in keys if k in ["since","to","within","timerange","step","stepn","trail"]})
|
||||
results = []
|
||||
|
||||
for rng in rngs:
|
||||
try:
|
||||
res = get_charts_tracks(timerange=rng,dbconn=dbconn)[0]
|
||||
results.append({"range":rng,"track":res["track"],"scrobbles":res["scrobbles"]})
|
||||
except Exception:
|
||||
results.append({"range":rng,"track":None,"scrobbles":0})
|
||||
|
||||
return results
|
||||
|
||||
@waitfordb
|
||||
def artist_info(dbconn=None,**keys):
|
||||
|
||||
artist = keys.get('artist')
|
||||
if artist is None: raise exceptions.MissingEntityParameter()
|
||||
|
||||
artist_id = sqldb.get_artist_id(artist,dbconn=dbconn)
|
||||
artist = sqldb.get_artist(artist_id,dbconn=dbconn)
|
||||
alltimecharts = get_charts_artists(timerange=alltime(),dbconn=dbconn)
|
||||
scrobbles = get_scrobbles_num(artist=artist,timerange=alltime(),dbconn=dbconn)
|
||||
#we cant take the scrobble number from the charts because that includes all countas scrobbles
|
||||
try:
|
||||
c = [e for e in alltimecharts if e["artist"] == artist][0]
|
||||
others = sqldb.get_associated_artists(artist,dbconn=dbconn)
|
||||
position = c["rank"]
|
||||
return {
|
||||
"artist":artist,
|
||||
"scrobbles":scrobbles,
|
||||
"position":position,
|
||||
"associated":others,
|
||||
"medals":{
|
||||
"gold": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['gold']],
|
||||
"silver": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['silver']],
|
||||
"bronze": [year for year in cached.medals_artists if artist_id in cached.medals_artists[year]['bronze']],
|
||||
},
|
||||
"topweeks":len([e for e in cached.weekly_topartists if e == artist_id]),
|
||||
"id":artist_id
|
||||
}
|
||||
except Exception:
|
||||
# if the artist isnt in the charts, they are not being credited and we
|
||||
# need to show information about the credited one
|
||||
replaceartist = sqldb.get_credited_artists(artist)[0]
|
||||
c = [e for e in alltimecharts if e["artist"] == replaceartist][0]
|
||||
position = c["rank"]
|
||||
return {
|
||||
"artist":artist,
|
||||
"replace":replaceartist,
|
||||
"scrobbles":scrobbles,
|
||||
"position":position,
|
||||
"id":artist_id
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@waitfordb
|
||||
def track_info(dbconn=None,**keys):
|
||||
|
||||
track = keys.get('track')
|
||||
if track is None: raise exceptions.MissingEntityParameter()
|
||||
|
||||
track_id = sqldb.get_track_id(track,dbconn=dbconn)
|
||||
track = sqldb.get_track(track_id,dbconn=dbconn)
|
||||
alltimecharts = get_charts_tracks(timerange=alltime(),dbconn=dbconn)
|
||||
#scrobbles = get_scrobbles_num(track=track,timerange=alltime())
|
||||
|
||||
c = [e for e in alltimecharts if e["track"] == track][0]
|
||||
scrobbles = c["scrobbles"]
|
||||
position = c["rank"]
|
||||
cert = None
|
||||
threshold_gold, threshold_platinum, threshold_diamond = malojaconfig["SCROBBLES_GOLD","SCROBBLES_PLATINUM","SCROBBLES_DIAMOND"]
|
||||
if scrobbles >= threshold_diamond: cert = "diamond"
|
||||
elif scrobbles >= threshold_platinum: cert = "platinum"
|
||||
elif scrobbles >= threshold_gold: cert = "gold"
|
||||
|
||||
|
||||
return {
|
||||
"track":track,
|
||||
"scrobbles":scrobbles,
|
||||
"position":position,
|
||||
"medals":{
|
||||
"gold": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['gold']],
|
||||
"silver": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['silver']],
|
||||
"bronze": [year for year in cached.medals_tracks if track_id in cached.medals_tracks[year]['bronze']],
|
||||
},
|
||||
"certification":cert,
|
||||
"topweeks":len([e for e in cached.weekly_toptracks if e == track_id]),
|
||||
"id":track_id
|
||||
}
|
||||
|
||||
|
||||
|
||||
def get_predefined_rulesets(dbconn=None):
|
||||
validchars = "-_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
||||
|
||||
rulesets = []
|
||||
|
||||
for f in os.listdir(data_dir['rules']("predefined")):
|
||||
if f.endswith(".tsv"):
|
||||
|
||||
rawf = f.replace(".tsv","")
|
||||
valid = all(char in validchars for char in rawf)
|
||||
if not valid: continue
|
||||
if "_" not in rawf: continue
|
||||
|
||||
try:
|
||||
with open(data_dir['rules']("predefined",f)) as tsvfile:
|
||||
line1 = tsvfile.readline()
|
||||
line2 = tsvfile.readline()
|
||||
|
||||
if "# NAME: " in line1:
|
||||
name = line1.replace("# NAME: ","")
|
||||
else: name = rawf.split("_")[1]
|
||||
desc = line2.replace("# DESC: ","") if "# DESC: " in line2 else ""
|
||||
author = rawf.split("_")[0]
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
ruleset = {"file":rawf}
|
||||
rulesets.append(ruleset)
|
||||
ruleset["active"] = bool(os.path.exists(data_dir['rules'](f)))
|
||||
ruleset["name"] = name
|
||||
ruleset["author"] = author
|
||||
ruleset["desc"] = desc
|
||||
|
||||
return rulesets
|
||||
|
||||
|
||||
####
|
||||
## Server operation
|
||||
####
|
||||
|
||||
|
||||
|
||||
def start_db():
|
||||
# Upgrade database
|
||||
from .. import upgrade
|
||||
upgrade.upgrade_db(sqldb.add_scrobbles)
|
||||
|
||||
# Load temporary tables
|
||||
from . import associated
|
||||
associated.load_associated_rules()
|
||||
|
||||
dbstatus['healthy'] = True
|
||||
|
||||
# inform time module about begin of scrobbling
|
||||
try:
|
||||
firstscrobble = sqldb.get_scrobbles()[0]
|
||||
register_scrobbletime(firstscrobble['time'])
|
||||
except IndexError:
|
||||
register_scrobbletime(int(datetime.datetime.now().timestamp()))
|
||||
|
||||
|
||||
# create cached information
|
||||
cached.update_medals()
|
||||
cached.update_weekly()
|
||||
|
||||
dbstatus['complete'] = True
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Search for strings
|
||||
def db_search(query,type=None):
|
||||
results = []
|
||||
if type=="ARTIST":
|
||||
results = sqldb.search_artist(query)
|
||||
if type=="TRACK":
|
||||
results = sqldb.search_track(query)
|
||||
return results
|
|
@ -0,0 +1,49 @@
|
|||
## dealing with loading the associated artists rules into a database
|
||||
## right now this is kind of absurd because we're storing it in a db while not
|
||||
## actually using its permanence, but this makes it possible to use the information
|
||||
## directly in sql
|
||||
|
||||
|
||||
import csv
|
||||
import os
|
||||
|
||||
from . import sqldb
|
||||
from ..pkg_global.conf import data_dir
|
||||
|
||||
|
||||
def load_associated_rules():
|
||||
# delete old
|
||||
with sqldb.engine.begin() as conn:
|
||||
op = sqldb.DB['associated_artists'].delete().where()
|
||||
conn.execute(op)
|
||||
|
||||
# load from file
|
||||
rawrules = []
|
||||
for f in os.listdir(data_dir["rules"]()):
|
||||
if f.split('.')[-1].lower() != 'tsv': continue
|
||||
filepath = data_dir["rules"](f)
|
||||
with open(filepath,'r') as filed:
|
||||
reader = csv.reader(filed,delimiter="\t")
|
||||
rawrules += [[col for col in entry if col] for entry in reader if len(entry)>0 and not entry[0].startswith('#')]
|
||||
rules = [{'source_artist':r[1],'target_artist':r[2]} for r in rawrules if r[0]=="countas"]
|
||||
|
||||
#for rule in rules:
|
||||
# print(f"Rule to replace {rule['source_artist']} with {rule['target_artist']}:")
|
||||
# test = {k:sqldb.get_artist_id(rule[k],create_new=False) for k in rule}
|
||||
# if test['source_artist'] is None: print("axed")
|
||||
|
||||
#allartists = set([*[r['source_artist'] for r in rules],*[r['target_artist'] for r in rules]])
|
||||
|
||||
# find ids
|
||||
rules = [{k:sqldb.get_artist_id(rule[k],create_new=False) for k in rule} for rule in rules]
|
||||
rules = [r for r in rules if r['source_artist'] is not None]
|
||||
|
||||
# write to db
|
||||
ops = [
|
||||
sqldb.DB['associated_artists'].insert().values(**r).prefix_with('OR IGNORE')
|
||||
for r in rules
|
||||
]
|
||||
|
||||
with sqldb.engine.begin() as conn:
|
||||
for op in ops:
|
||||
conn.execute(op)
|
|
@ -0,0 +1,74 @@
|
|||
# for information that is not authorative, but should be saved anyway because it
|
||||
# changes infrequently and DB access is expensive
|
||||
|
||||
from doreah.regular import runyearly, rundaily
|
||||
from .. import database
|
||||
from . import sqldb
|
||||
from .. import malojatime as mjt
|
||||
|
||||
|
||||
|
||||
medals_artists = {
|
||||
# year: {'gold':[],'silver':[],'bronze':[]}
|
||||
}
|
||||
medals_tracks = {
|
||||
# year: {'gold':[],'silver':[],'bronze':[]}
|
||||
}
|
||||
|
||||
weekly_topartists = []
|
||||
weekly_toptracks = []
|
||||
|
||||
@runyearly
|
||||
def update_medals():
|
||||
|
||||
global medals_artists, medals_tracks
|
||||
medals_artists.clear()
|
||||
medals_tracks.clear()
|
||||
|
||||
with sqldb.engine.begin() as conn:
|
||||
for year in mjt.ranges(step="year"):
|
||||
if year == mjt.thisyear(): break
|
||||
|
||||
charts_artists = sqldb.count_scrobbles_by_artist(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=conn)
|
||||
charts_tracks = sqldb.count_scrobbles_by_track(since=year.first_stamp(),to=year.last_stamp(),resolve_ids=False,dbconn=conn)
|
||||
|
||||
entry_artists = {'gold':[],'silver':[],'bronze':[]}
|
||||
entry_tracks = {'gold':[],'silver':[],'bronze':[]}
|
||||
medals_artists[year.desc()] = entry_artists
|
||||
medals_tracks[year.desc()] = entry_tracks
|
||||
|
||||
for entry in charts_artists:
|
||||
if entry['rank'] == 1: entry_artists['gold'].append(entry['artist_id'])
|
||||
elif entry['rank'] == 2: entry_artists['silver'].append(entry['artist_id'])
|
||||
elif entry['rank'] == 3: entry_artists['bronze'].append(entry['artist_id'])
|
||||
else: break
|
||||
for entry in charts_tracks:
|
||||
if entry['rank'] == 1: entry_tracks['gold'].append(entry['track_id'])
|
||||
elif entry['rank'] == 2: entry_tracks['silver'].append(entry['track_id'])
|
||||
elif entry['rank'] == 3: entry_tracks['bronze'].append(entry['track_id'])
|
||||
else: break
|
||||
|
||||
|
||||
|
||||
|
||||
@rundaily
|
||||
def update_weekly():
|
||||
|
||||
global weekly_topartists, weekly_toptracks
|
||||
weekly_topartists.clear()
|
||||
weekly_toptracks.clear()
|
||||
|
||||
with sqldb.engine.begin() as conn:
|
||||
for week in mjt.ranges(step="week"):
|
||||
if week == mjt.thisweek(): break
|
||||
|
||||
|
||||
charts_artists = sqldb.count_scrobbles_by_artist(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,dbconn=conn)
|
||||
charts_tracks = sqldb.count_scrobbles_by_track(since=week.first_stamp(),to=week.last_stamp(),resolve_ids=False,dbconn=conn)
|
||||
|
||||
for entry in charts_artists:
|
||||
if entry['rank'] == 1: weekly_topartists.append(entry['artist_id'])
|
||||
else: break
|
||||
for entry in charts_tracks:
|
||||
if entry['rank'] == 1: weekly_toptracks.append(entry['track_id'])
|
||||
else: break
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue