├── .dockerignore ├── .github ├── FUNDING.yml └── workflows │ └── ci.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── MANIFEST.in ├── Makefile ├── Pipfile ├── Pipfile.lock ├── README.md ├── bandcampsync ├── __init__.py ├── bandcamp.py ├── config.py ├── download.py ├── logger.py ├── media.py └── notify.py ├── bin ├── bandcampsync └── bandcampsync-service ├── dev.env ├── docs └── cookies.jpg ├── entrypoint.sh ├── requirements.txt ├── setup.cfg └── setup.py /.dockerignore: -------------------------------------------------------------------------------- 1 | * 2 | !entrypoint.sh 3 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [meeb] 2 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Build a BandcampSync container 2 | 3 | env: 4 | IMAGE_NAME: bandcampsync 5 | 6 | on: 7 | push: 8 | branches: 9 | - main 10 | 11 | jobs: 12 | containerise: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - name: Set up QEMU 16 | uses: docker/setup-qemu-action@v3 17 | - name: Set up Docker Buildx 18 | uses: docker/setup-buildx-action@v3 19 | - name: Log into GitHub Container Registry 20 | uses: docker/login-action@v3 21 | with: 22 | registry: ghcr.io 23 | username: ${{ github.actor }} 24 | password: ${{ secrets.REGISTRY_ACCESS_TOKEN }} 25 | 26 | - name: Lowercase github username for ghcr 27 | id: string 28 | uses: ASzc/change-string-case-action@v6 29 | with: 30 | string: ${{ github.actor }} 31 | - name: Build and push 32 | uses: docker/build-push-action@v6 33 | with: 34 | platforms: linux/amd64,linux/arm64 35 | push: true 36 | tags: ghcr.io/${{ steps.string.outputs.lowercase }}/${{ env.IMAGE_NAME }}:latest 37 | cache-from: type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/${{ env.IMAGE_NAME }}:latest 38 | cache-to: type=inline 39 | build-args: | 40 | IMAGE_NAME=${{ env.IMAGE_NAME }} 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | 162 | cookies.txt 163 | docker-config/ 164 | docker-downloads/ 165 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bookworm-slim 2 | 3 | ENV DEBIAN_FRONTEND="noninteractive" \ 4 | HOME="/root" \ 5 | LANGUAGE="en_US.UTF-8" \ 6 | LANG="en_US.UTF-8" \ 7 | LC_ALL="en_US.UTF-8" \ 8 | TERM="xterm" 9 | 10 | # Set up the container 11 | RUN set -x && \ 12 | apt-get update && \ 13 | # Set locale 14 | apt-get -y --no-install-recommends install locales && \ 15 | echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \ 16 | locale-gen en_US.UTF-8 && \ 17 | # Install required distro packages 18 | apt-get -y --no-install-recommends install \ 19 | git \ 20 | gosu \ 21 | python3 \ 22 | python3-dev \ 23 | python3-pip && \ 24 | # Create a 'app' user which the service will run as 25 | groupadd app && \ 26 | useradd -M -d /app -s /bin/false -g app app && \ 27 | # Clean up 28 | apt-get -y autoremove && \ 29 | apt-get -y autoclean && \ 30 | rm -rf /var/lib/apt/lists/* && \ 31 | rm -rf /var/cache/apt/* && \ 32 | rm -rf /tmp/ 33 | 34 | RUN set -x && \ 35 | # Allow root to use sudo 36 | echo "root ALL = NOPASSWD: /bin/su ALL" >> /etc/sudoers && \ 37 | # Install BandcampSync 38 | python3 -m pip install --break-system-packages git+https://github.com/meeb/bandcampsync.git@v0.5.1#egg=bandcampsync 39 | 40 | # Volumes 41 | VOLUME ["/config", "/downloads"] 42 | 43 | # Set the 'app' user UID and GID in the entrypoint 44 | COPY entrypoint.sh /entrypoint.sh 45 | ENTRYPOINT ["/entrypoint.sh"] 46 | 47 | # Run the service 48 | CMD ["bandcampsync-service"] 49 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2023, meeb 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.txt 2 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | docker=/usr/bin/docker 2 | name=bandcampsync 3 | image=$(name):latest 4 | 5 | 6 | container: 7 | $(docker) build -t $(image) . 8 | 9 | 10 | runcontainer: 11 | $(docker) run --rm --name $(name) --env-file dev.env -ti -v ./docker-config:/config -v ./docker-downloads:/downloads $(image) 12 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | requests = "*" 8 | beautifulsoup4 = "*" 9 | html5lib = "*" 10 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "8d98ddd737611126458aac3423c185ee6c4c08442fb73255a94749b6c7d51b00" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": {}, 8 | "sources": [ 9 | { 10 | "name": "pypi", 11 | "url": "https://pypi.org/simple", 12 | "verify_ssl": true 13 | } 14 | ] 15 | }, 16 | "default": { 17 | "beautifulsoup4": { 18 | "hashes": [ 19 | "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b", 20 | "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195" 21 | ], 22 | "index": "pypi", 23 | "version": "==4.13.4" 24 | }, 25 | "certifi": { 26 | "hashes": [ 27 | "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", 28 | "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3" 29 | ], 30 | "markers": "python_version >= '3.6'", 31 | "version": "==2025.4.26" 32 | }, 33 | "charset-normalizer": { 34 | "hashes": [ 35 | "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4", 36 | "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45", 37 | "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", 38 | "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", 39 | "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", 40 | "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", 41 | "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d", 42 | "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", 43 | "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184", 44 | "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", 45 | "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b", 46 | "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64", 47 | "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", 48 | "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", 49 | "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", 50 | "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344", 51 | "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58", 52 | "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", 53 | "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471", 54 | "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", 55 | "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", 56 | "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836", 57 | "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", 58 | "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", 59 | "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", 60 | "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1", 61 | "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01", 62 | "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", 63 | "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58", 64 | "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", 65 | "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", 66 | "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2", 67 | "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a", 68 | "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597", 69 | "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", 70 | "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5", 71 | "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb", 72 | "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f", 73 | "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", 74 | "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", 75 | "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", 76 | "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", 77 | "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7", 78 | "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7", 79 | "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455", 80 | "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", 81 | "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4", 82 | "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", 83 | "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3", 84 | "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", 85 | "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", 86 | "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", 87 | "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", 88 | "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", 89 | "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", 90 | "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", 91 | "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12", 92 | "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa", 93 | "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", 94 | "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", 95 | "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f", 96 | "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", 97 | "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", 98 | "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5", 99 | "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02", 100 | "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", 101 | "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", 102 | "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e", 103 | "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", 104 | "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", 105 | "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", 106 | "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", 107 | "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681", 108 | "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba", 109 | "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", 110 | "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a", 111 | "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", 112 | "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", 113 | "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", 114 | "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", 115 | "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027", 116 | "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7", 117 | "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518", 118 | "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", 119 | "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", 120 | "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", 121 | "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", 122 | "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da", 123 | "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", 124 | "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f", 125 | "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", 126 | "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f" 127 | ], 128 | "markers": "python_version >= '3.7'", 129 | "version": "==3.4.2" 130 | }, 131 | "html5lib": { 132 | "hashes": [ 133 | "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d", 134 | "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f" 135 | ], 136 | "index": "pypi", 137 | "version": "==1.1" 138 | }, 139 | "idna": { 140 | "hashes": [ 141 | "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", 142 | "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3" 143 | ], 144 | "markers": "python_version >= '3.6'", 145 | "version": "==3.10" 146 | }, 147 | "requests": { 148 | "hashes": [ 149 | "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", 150 | "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" 151 | ], 152 | "index": "pypi", 153 | "version": "==2.32.3" 154 | }, 155 | "six": { 156 | "hashes": [ 157 | "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", 158 | "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81" 159 | ], 160 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 161 | "version": "==1.17.0" 162 | }, 163 | "soupsieve": { 164 | "hashes": [ 165 | "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", 166 | "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a" 167 | ], 168 | "markers": "python_version >= '3.8'", 169 | "version": "==2.7" 170 | }, 171 | "typing-extensions": { 172 | "hashes": [ 173 | "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", 174 | "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef" 175 | ], 176 | "markers": "python_version >= '3.8'", 177 | "version": "==4.13.2" 178 | }, 179 | "urllib3": { 180 | "hashes": [ 181 | "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", 182 | "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813" 183 | ], 184 | "markers": "python_version >= '3.9'", 185 | "version": "==2.4.0" 186 | }, 187 | "webencodings": { 188 | "hashes": [ 189 | "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", 190 | "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" 191 | ], 192 | "version": "==0.5.1" 193 | } 194 | }, 195 | "develop": {} 196 | } 197 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # BandcampSync 2 | 3 | BandcampSync is a Python module and command line script (also packed in 4 | a Docker container) which synchronises media purchased on a Bandcamp 5 | (http://bandcamp.com/) account with a local directory. 6 | 7 | You may use this to download media you have purchased from Bandcamp to a 8 | local media server, such as Plex or Jellyfin. 9 | 10 | Most media items purchased on Bandcamp have high quality download options 11 | available and BandcampSync defaults to `flac`. 12 | 13 | When called, `bandcampsync` will: 14 | 15 | 1. Authenticate to bandcamp.com as you using your exported session cookies 16 | 2. Scan your local media directory for existing downloaded items 17 | 3. Index a list of all of your purchased items in your Bandcamp collection 18 | 4. Download the archive of missing items not downloaded already from your collection 19 | 5. Unzip the archive and move the contents to the local media directory 20 | 21 | The media directory will have the following format: 22 | 23 | ``` 24 | /media/ 25 | /media/Artist Name 26 | /media/Artist Name/Album Name 27 | /media/Artist Name/Album Name/bandcamp_item_id.txt 28 | /media/Artist Name/Album Name/cover.jpg 29 | /media/Artist Name/Album Name/Track Name.flac 30 | ``` 31 | 32 | The directory format of `artist_name`/`item_title` is not editable. 33 | 34 | `bandcamp_item_id.txt` is a special file created in each item directory and 35 | it contains the Bandcamp item ID as an integer. This file is used by BandcampSync 36 | to track which media items have already been downloaded. You can rename the 37 | artist or album directories, but do not delete the `bandcamp_item_id.txt` file 38 | or the media item will be redownloaded the next time `bandcampsync` is run. 39 | 40 | The `bandcamp_item_id.txt` file method of tracking what items are synchronised 41 | also means you can also use media managers such as Lidarr to rename artist, 42 | album and track names automatically without issues. 43 | 44 | 45 | ## Installation 46 | 47 | `bandcampsync` is pure Python and only has a dependancy on the `requests` and 48 | `beautifulsoup4` libraries. You can install `bandcampsync` via pip: 49 | 50 | ```bash 51 | $ pip install bandcampsync 52 | ``` 53 | 54 | Any modern version of Python3 will be compatible. 55 | 56 | Alternatively, there's a batteries included Docker image available if you prefer. 57 | 58 | 59 | ## Docker 60 | 61 | The Docker image contains the `bandcampsync` Python module as well as a helper 62 | script that runs the `bandcampsync` on a timer. Configuration variables are also 63 | moved to environment variables 64 | 65 | You can pull and run the image with the following commands: 66 | 67 | ```bash 68 | # Pull image 69 | $ docker pull ghcr.io/meeb/bandcampsync:latest 70 | # Start the container using your user ID and group ID 71 | $ docker run \ 72 | -d \ 73 | --name bandcampsync \ 74 | -e TZ=Europe/London \ 75 | -e PUID=1000 \ 76 | -e PGID=1000 \ 77 | -e RUN_DAILY_AT=3 \ 78 | -v /some/directory/bandcampsync-config:/config \ 79 | -v /some/directory/bandcampsync-media:/downloads \ 80 | ghcr.io/meeb/bandcampsync:latest 81 | 82 | ``` 83 | 84 | Or an example Docker Compose entry: 85 | 86 | ```bash 87 | version: '3.7' 88 | services: 89 | bandcampsync: 90 | image: ghcr.io/meeb/bandcampsync:latest 91 | container_name: bandcampsync 92 | restart: unless-stopped 93 | volumes: 94 | - /some/directory/bandcampsync-config:/config 95 | - /some/directory/bandcampsync-media:/downloads 96 | environment: 97 | - TZ=Europe/London 98 | - PUID=1000 99 | - PGID=1000 100 | - RUN_DAILY_AT=3 101 | ``` 102 | 103 | In the above example you would save your cookies data into a file called 104 | `cookies.txt` and save it at `/some/directory/bandcampsync-config/cookies.txt`. 105 | BandcampSync will look for this location when it starts up. 106 | 107 | The `RUN_DAILY_AT` environment variable is the hour the `bandcampsync` script 108 | will run at. In this example, 3am local time. After running the container will 109 | sleep until the following 3am. It will run daily. There is also a randomised 110 | delay added to the hour to not dogpile bandcamp.com with requests on the hour 111 | so the script won't run exactly on the hour. 112 | 113 | `RUN_DAILY_AT` should be a number between 0 and 23 (specifying an hour). 114 | 115 | `PUID` and `PGID` are the user and group IDs to attempt run the download as. 116 | This sets the UID and GID of the files that are downloaded. 117 | 118 | `TEMP_DIR` variable can be set to a directory in the container. If set the 119 | directory is used as the temporary download location. 120 | 121 | `IGNORE` can be set to ignore bands, same as the `--ignore` CLI argument. 122 | 123 | `NOTIFY_URL` can be set to a URL to send an HTTP GET request to when new 124 | items have been loaded, same as the `--notify-url` CLI argument. 125 | 126 | 127 | ## Configuration 128 | 129 | BandcampSync requires minimial configuration. First, it requires your session 130 | cookies from an authenticated Bandcamp account. The easiest way to get this is 131 | to go to https://bandcamp.com/ in your browser and log in with your account. 132 | 133 | Next, open the developer tools in your browser (F12 button on most browsers, or 134 | select "developer tools" from the options menu). 135 | 136 | Reload the index page and find the index page request in your network requests 137 | tab of your browser. Go to the "Request Headers" section then select and copy 138 | the string after the `Cookie` header. The string should look something like this: 139 | 140 | ``` 141 | client_id=00B1F3C8EB48E181A185CCD041E40C0E8F; session=1%0893C88%570EE405455%%8DEC37B5BC393983DB983DD%%BDFD46C3B8A0%%580DA466D5CD; identity=1%HhehuehUFEUiuebn%%2ADB72300DAE573%BEEF389A1B526EA35AC38019FA0A6F%11B4BD5FBC18B83F720; js_logged_in=1; logout=%7B%22username%22%3A%22someuser%22%7D; download_encoding=401; BACKENDID3=some-sever-name 142 | ``` 143 | 144 | Save this string to a file called `cookies.txt`. 145 | 146 | ![Getting your session cookues](https://github.com/meeb/bandcampsync/blob/main/docs/cookies.jpg?raw=true) 147 | 148 | You need to save your session ID from cookies manually because Bandcamp has 149 | a captcha on the login form so BandcampSync can't log in with your username 150 | and password for you. 151 | 152 | IMPORTANT NOTE: Keep the `cookies.txt` file safe! Anyone with access to this file 153 | can log into your Bandcamp account, impersonate you, potentially make purchases 154 | and generally have total access to your Bandcamp account! 155 | 156 | You can also use Netscape formatted cookie export files if you have exported your 157 | cookies using a cookie exporter plugin or similar tool. 158 | 159 | 160 | ## CLI usage 161 | 162 | Once you have the Python `bandcampsync` module installed you can call it with the 163 | `bandcampsync` command: 164 | 165 | ```bash 166 | $ bandcampsync --cookies cookies.txt --directory /path/to/music 167 | ``` 168 | 169 | or in shorthand: 170 | 171 | ```bash 172 | $ bandcampsync -c cookies.txt -d /path/to/music 173 | ``` 174 | 175 | You can also use `-t` or `--temp-dir` to set the temporary download directory used. See 176 | `-h` or `--help` for the full list of command line options. 177 | 178 | You can also use `-i` or `--ignore` to bypass artists that have data issues that 179 | your OS can not handle. 180 | 181 | ```bash 182 | $ bandcampsync --cookies cookies.txt --directory /path/to/music --ignore "badband" 183 | ``` 184 | 185 | `--ignore` supports multiple strings space seperated strings, for example 186 | `--ignore "band1 band2 band3"`. 187 | 188 | 189 | You can notify an an external HTTP server when new items have been loaded with `-n` or 190 | `--notify-url`. 191 | 192 | ```bash 193 | $ bandcampsync ... --notify-url "http://some.service.local/some-uri" 194 | ``` 195 | 196 | You can use this to call a "refresh" hook on media servers (for example rescan a Plex 197 | or Jellyfin library). The `--notify-url` parameter, if set, simply makes an HTTP GET 198 | request to the specified URL and confirms it returns a `2XX` response. 199 | 200 | For advanced notify URLs you can use the following `--notify-url` format: 201 | 202 | `method url headers body` 203 | 204 | `method` must be one of `GET` or `POST`. `url` is any URL. `headers` are a comma 205 | separated list of `key=value` pairs and `body` is an body string to send if the `method` 206 | is `POST`. You can use `-` for header and body values to leave them unset. Some examples: 207 | 208 | `GET http://some.service.local/some-uri - -` 209 | 210 | `GET http://some.service.local/some-uri header1=abc,header2 -` 211 | 212 | `POST http://some.service.local/some-uri auth-header=abc somedata` 213 | 214 | 215 | ## Formats 216 | 217 | By default, BandcampSync will download your music in the `flac` format. You can specify 218 | another format with the `--format` argument. Common Bandcamp download formats are: 219 | 220 | | Name | Description | 221 | | --------------- | --------------------------------------------------------------- | 222 | | `mp3-v0` | Variable bitrate MP3. Small file sizes. OK quality. | 223 | | `mp3-320` | High quality MP3. Medium file sizes. Good quality. | 224 | | `flac` | Losses audio. Large file sizes. Original Quality. | 225 | | `aac-hi` | Apple variable bitrate format. Small file sizes. OK quality. | 226 | | `aiff-lossless` | Uncompressed audio format. Biggest file size. Original quality. | 227 | | `vorbis` | Open source lossy format. Small file sizes. OK quality. | 228 | | `alac` | Apple lossless format. Large file sizes. Original quality. | 229 | | `wav` | Uncompressed audio format. Biggest file size. Original quality. | 230 | 231 | 232 | # Contributing 233 | 234 | All properly formatted and sensible pull requests, issues and comments are welcome. 235 | -------------------------------------------------------------------------------- /bandcampsync/__init__.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | from tempfile import NamedTemporaryFile, TemporaryDirectory 4 | from .config import VERSION as version 5 | from .logger import get_logger 6 | from .bandcamp import Bandcamp, BandcampError 7 | from .media import LocalMedia 8 | from .notify import NotifyURL 9 | from .download import (download_file, unzip_file, move_file, copy_file, 10 | mask_sig, is_zip_file, DownloadInvalidContentType, 11 | DownloadBadStatusCode) 12 | 13 | 14 | log = logger.get_logger('sync') 15 | 16 | 17 | def do_sync(cookies_path, cookies, dir_path, media_format, temp_dir_root, ign_patterns, notify_url): 18 | 19 | local_media = LocalMedia(media_dir=dir_path) 20 | bandcamp = Bandcamp(cookies=cookies) 21 | bandcamp.verify_authentication() 22 | bandcamp.load_purchases() 23 | new_items_downloaded = False 24 | if notify_url: 25 | notifier = NotifyURL(notify_url) 26 | else: 27 | notifier = None 28 | 29 | for item in bandcamp.purchases: 30 | 31 | # Check if any ignore pattern matches the band name 32 | ignored = False 33 | for pattern in ign_patterns.split(): 34 | if pattern.lower() in item.band_name.lower(): 35 | log.warning(f'Skipping item due to ignore pattern: "{pattern}" found in "{item.band_name}"') 36 | ignored = True 37 | break 38 | if ignored: 39 | continue 40 | 41 | local_path = local_media.get_path_for_purchase(item) 42 | if item.is_preorder == True: 43 | log.info(f'Item is a preorder, skipping: "{item.band_name} / {item.item_title}" ' 44 | f'(id:{item.item_id})') 45 | continue 46 | elif local_media.is_locally_downloaded(item, local_path): 47 | log.info(f'Already locally downloaded, skipping: "{item.band_name} / {item.item_title}" ' 48 | f'(id:{item.item_id})') 49 | continue 50 | else: 51 | log.info(f'New media item, will download: "{item.band_name} / {item.item_title}" ' 52 | f'(id:{item.item_id}) in "{media_format}"') 53 | try: 54 | local_path.mkdir(parents=True, exist_ok=True) 55 | except OSError as e: 56 | log.error(f'Failed to create directory: {local_path} ({e}), skipping purchase...') 57 | continue 58 | try: 59 | initial_download_url = bandcamp.get_download_file_url(item, encoding=media_format) 60 | except BandcampError as e: 61 | log.error(f'Failed to locate download URL for media item "{item.band_name} / {item.item_title}" ' 62 | f'(id:{item.item_id}), unable to download release ({e}), skipping') 63 | continue 64 | download_file_url = bandcamp.check_download_stat(item, initial_download_url) 65 | with NamedTemporaryFile(mode='w+b', delete=True, dir=temp_dir_root) as temp_file: 66 | log.info(f'Downloading item "{item.band_name} / {item.item_title}" (id:{item.item_id}) ' 67 | f'from {mask_sig(download_file_url)} to {temp_file.name}') 68 | try: 69 | download_file(download_file_url, temp_file) 70 | except DownloadBadStatusCode as e: 71 | log.error(f'Download attempt returned an unexpected status code ({e}), skipping') 72 | continue 73 | except DownloadInvalidContentType as e: 74 | log.error(f'Download attempt returned an unexpected content type ({e}), skipping') 75 | continue 76 | temp_file.seek(0) 77 | temp_file_path = Path(temp_file.name) 78 | if is_zip_file(temp_file_path): 79 | with TemporaryDirectory(dir=temp_dir_root) as temp_dir: 80 | log.info(f'Decompressing downloaded zip "{temp_file.name}" to "{temp_dir}"') 81 | unzip_file(temp_file.name, temp_dir) 82 | temp_path = Path(temp_dir) 83 | for file_path in temp_path.iterdir(): 84 | file_dest = local_media.get_path_for_file(local_path, file_path.name) 85 | log.info(f'Moving extracted file: "{file_path}" to "{file_dest}"') 86 | try: 87 | move_file(file_path, file_dest) 88 | except Exception as e: 89 | log.error(f'Failed to move {file_path} to {file_dest}: {e}') 90 | local_media.write_bandcamp_id(item, local_path) 91 | elif item.item_type == 'track': 92 | slug = item.url_hints.get('slug', item.item_title) 93 | format_extension = local_media.clean_format(media_format) 94 | file_dest = local_media.get_path_for_file(local_path, f'{slug}.{format_extension}') 95 | log.info(f'Copying single track: "{temp_file_path}" to "{file_dest}"') 96 | try: 97 | copy_file(temp_file_path, file_dest) 98 | except Exception as e: 99 | log.error(f'Failed to copy {file_path} to {file_dest}: {e}') 100 | local_media.write_bandcamp_id(item, local_path) 101 | new_items_downloaded = True 102 | else: 103 | log.error(f'Downloaded file for "{item.band_name} / {item.item_title}" (id:{item.item_id}) ' 104 | f'at "{temp_file_path}" is not a zip archive or a single track, skipping') 105 | 106 | if new_items_downloaded: 107 | log.info(f'New media items downloaded') 108 | if notifier: 109 | notifier.notify() 110 | 111 | return True 112 | -------------------------------------------------------------------------------- /bandcampsync/bandcamp.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | from time import time 4 | from http.cookies import SimpleCookie 5 | from html import unescape as html_unescape 6 | from urllib.parse import urlsplit, urlunsplit 7 | from bs4 import BeautifulSoup 8 | import requests 9 | from .config import USER_AGENT 10 | from .download import mask_sig 11 | from .logger import get_logger 12 | 13 | 14 | log = get_logger('bandcamp') 15 | 16 | 17 | class BandcampError(ValueError): 18 | pass 19 | 20 | 21 | class Bandcamp: 22 | 23 | BASE_PROTO = 'https' 24 | BASE_DOMAIN = 'bandcamp.com' 25 | URLS = { 26 | 'index': '/', 27 | 'collection_items': '/api/fancollection/1/collection_items', 28 | } 29 | 30 | def __init__(self, cookies=''): 31 | self.is_authenticated = False 32 | self.user_id = 0 33 | self.user_name = '' 34 | self.user_url = '' 35 | self.user_verified = False 36 | self.user_private = False 37 | self.cookies = None 38 | self.purchases = [] 39 | self.load_cookies(cookies) 40 | identity = self.cookies.get('identity') 41 | if not identity: 42 | raise BandcampError(f'Cookie data does not contain an identity value, make sure your ' 43 | f'cookies.txt file is valid and you copied it from an ' 44 | f'authenticated browser') 45 | identity_snip = identity.value[:20] 46 | log.info(f'Located Bandcamp identity in cookies: {identity_snip}...') 47 | # Create a requests session and map our SimpleCookie to it 48 | self.session = requests.Session() 49 | for cookie_name, morsel in self.cookies.items(): 50 | self.session.cookies.set(cookie_name, morsel.value) 51 | 52 | def load_cookies(self, cookies_str): 53 | self.cookies = SimpleCookie() 54 | try: 55 | self.cookies.load(cookies_str) 56 | except Exception as e: 57 | raise BandcampError(f'Failed to parse cookies string: {e}') from e 58 | if len(self.cookies) == 0: 59 | # Failed to load any cookies, attempt to parse the cookies string as a Netscape cookies export 60 | lines = cookies_str.strip().split('\n') 61 | for line in lines: 62 | if line.startswith('#'): 63 | continue 64 | line = line.strip() 65 | if not line: 66 | continue 67 | parts = line.split('\t') 68 | if len(parts) == 7: 69 | domain, tailmatch, path, secure, expires, name, value = parts 70 | cookie_string = f"{name.strip()}={value.strip()}; Domain={domain.strip()}; Path={path.strip()}" 71 | if secure == 'TRUE': 72 | cookie_string += "; Secure" 73 | self.cookies.load(cookie_string) 74 | return True 75 | 76 | @property 77 | def cookies_str(self): 78 | return self.cookies.output(header='').strip().replace('\r\n', ';') 79 | 80 | def refresh_cookes(self, file_path): 81 | log.info(f'Refreshing cookies: {file_path}') 82 | with open(file_path, 'wt') as f: 83 | f.write(self.cookies_str) 84 | 85 | def _construct_url(self, url_name): 86 | if url_name not in self.URLS: 87 | raise BandcampError(f'URL name is unknown: {url_name}') 88 | return urlunsplit((self.BASE_PROTO, self.BASE_DOMAIN, self.URLS[url_name], '', '')) 89 | 90 | def _plain_cookies(self): 91 | cookies = {} 92 | for (cookie_name, cookie_value) in self.cookies.items(): 93 | cookies[cookie_value.key] = cookie_value.value 94 | return cookies 95 | 96 | def _request(self, method, url, data=None, json_data=None, is_json=False, as_raw=False): 97 | headers = {'User-Agent': USER_AGENT} 98 | try: 99 | # The debug logs do not mask the URL which may be a security issue if you run 100 | # with level=logging.DEBUG 101 | log.debug(f'Making {method} request to {url}') 102 | response = self.session.request( 103 | method, 104 | url, 105 | headers=headers, 106 | cookies=self._plain_cookies(), 107 | data=data, 108 | json=json_data 109 | ) 110 | except Exception as e: 111 | raise BandcampError(f'Failed to make HTTP request to {mask_sig(url)}: {e}') from e 112 | if response.status_code != 200: 113 | raise BandcampError(f'Failed to make HTTP request to {mask_sig(url)}: ' 114 | f'unknown status code response: {response.status_code}') 115 | if as_raw: 116 | return response.text 117 | elif is_json: 118 | return json.loads(response.text) 119 | else: 120 | return BeautifulSoup(response.text, 'html.parser') 121 | 122 | def _extract_pagedata_from_soup(self, soup): 123 | pagedata_tag = soup.find('div', id='pagedata') 124 | if not pagedata_tag: 125 | raise BandcampError(f'Failed to locate
in index HTML, this may ' 126 | f'be an authentication issue or it may be that bandcamp.com has ' 127 | f'updated their website and this tool needs to be updated.') 128 | encoded_pagedata = pagedata_tag.attrs.get('data-blob') 129 | if not encoded_pagedata: 130 | raise BandcampError(f'Failed to extract page data, check your cookies are from an ', 131 | f'authenticated session') 132 | pagedata_str = html_unescape(encoded_pagedata) 133 | try: 134 | return json.loads(pagedata_str) 135 | except Exception as e: 136 | raise BandcampError(f'Failed to parse pagedata as JSON: {e}') from e 137 | 138 | def _extract_pagedata_from_html(self, html): 139 | """ 140 | Wrapper for _extract_pagedata_from_soup() that can accept HTML rather than a bs4 soup. 141 | """ 142 | soup = BeautifulSoup(html, 'html.parser') 143 | return self._extract_pagedata_from_soup(soup) 144 | 145 | def _get_js_stat_url(self, body, download_url): 146 | """ 147 | Checks the "stat" download URL body, which is in JavaScript, for 148 | either the OK response or a new updated download URL. 149 | """ 150 | body = body.strip() 151 | if body == "var _statDL_result = { result: 'ok'};": 152 | # Download is OK, original download URL will work 153 | return download_url 154 | # Attempt to find the updated download_url in the JavaScript with a hacky regex 155 | pattern = re.compile('\"([^\"]+)\":\"([^\"]+)\"') 156 | for k, v in pattern.findall(body): 157 | if k == 'download_url': 158 | return v 159 | # Fallback to the original download URL 160 | return download_url 161 | 162 | def verify_authentication(self): 163 | """ 164 | Loads the initial account and session data from a request to the index page 165 | of bandcamp.com. When properly authenticated an HTML data attribute is present 166 | that contains account information in an encoded form. 167 | """ 168 | url = self._construct_url('index') 169 | soup = self._request('get', url) 170 | pagedata = self._extract_pagedata_from_soup(soup) 171 | try: 172 | identities = pagedata['identities'] 173 | except KeyError as e: 174 | raise BandcampError(f'Failed to parse pagedata JSON, does not contain an ' 175 | f'"identities" key') from e 176 | try: 177 | fan = identities['fan'] 178 | except KeyError as e: 179 | raise BandcampError(f'Failed to parse pagedata JSON, does not contain an ' 180 | f'"identities.fan" key') from e 181 | if not isinstance(fan, dict): 182 | raise BandcampError(f'Failed to parse pagedata JSON, "identities.fan" is not ' 183 | f'a dictionary. Check your cookies.txt file is valid ' 184 | f'and up to date') 185 | try: 186 | self.user_id = fan['id'] 187 | self.user_name = fan['name'] 188 | self.user_url = fan['url'] 189 | self.user_verified = fan['verified'] 190 | self.user_private = fan['private'] 191 | except (KeyError, TypeError) as e: 192 | raise BandcampError(f'Failed to parse pagedata JSON, "identities.fan" seems ' 193 | f'invalid: {fan}') from e 194 | self.is_authenticated = self.user_id > 0 195 | log.info(f'Loaded page data, session is authenticated for user ' 196 | f'"{self.user_name}" (user id:{self.user_id}, url:{self.user_url})') 197 | return True 198 | 199 | def load_purchases(self): 200 | """ 201 | Loads all purchases on the authenticated account and returns a list of 202 | purchase data. Each purchase is a dict of data. 203 | """ 204 | if not self.is_authenticated: 205 | raise BandcampError(f'Authentication not verified, call load_pagedata() first') 206 | log.info(f'Loading purchases for "{self.user_name}" (user id:{self.user_id})') 207 | self.purchases = [] 208 | now = int(time()) 209 | page_ts = 0 210 | token = f'{now}:{page_ts}:a::' 211 | per_page = 100 212 | while(True): 213 | log.info(f'Requesting {per_page} purchases using token {token}') 214 | data = { 215 | 'fan_id': self.user_id, 216 | 'count': per_page, 217 | 'older_than_token': token 218 | } 219 | url = self._construct_url('collection_items') 220 | data = self._request('POST', url, json_data=data, is_json=True) 221 | try: 222 | items = data['items'] 223 | except KeyError as e: 224 | raise BandcampError(f'Failed to extract items from collection results page') 225 | if not items: 226 | log.info(f'Reached end of items') 227 | break 228 | try: 229 | redownload_urls = data['redownload_urls'] 230 | except KeyError as e: 231 | raise BandcampError(f'Failed to extract redownload_urls from collection results page') 232 | for item_data in items: 233 | try: 234 | band_name = item_data['band_name'] 235 | except KeyError: 236 | log.error(f'Failed to locate band name in item metadata, skipping item...') 237 | continue 238 | try: 239 | title = item_data['album_title'] 240 | except KeyError: 241 | log.error(f'Failed to locate title in item metadata (possibly a subscription?) for "{band_name}", skipping item...') 242 | continue 243 | sale_item_type = item_data['sale_item_type'] 244 | sale_item_id = item_data['sale_item_id'] 245 | download_url_key = f'{sale_item_type}{sale_item_id}' 246 | try: 247 | download_url = redownload_urls[download_url_key] 248 | except KeyError: 249 | log.error(f'Failed to locate download URL for {band_name} / {title} ' 250 | f'(key:{download_url_key}), skipping item...') 251 | continue 252 | item_data['download_url'] = download_url 253 | item = BandcampItem(item_data) 254 | token = item.token 255 | log.info(f'Found item: {band_name} / {title} (id:{item.item_id})') 256 | self.purchases.append(item) 257 | log.info(f'Loaded {len(self.purchases)} purchases') 258 | return True 259 | 260 | def get_download_file_url(self, item, encoding='flac'): 261 | soup = self._request('get', item.download_url) 262 | pagedata = self._extract_pagedata_from_soup(soup) 263 | download_url = None 264 | if not pagedata: 265 | raise ValueError(f'Either "url" or "pagedata" must be supplied') 266 | try: 267 | digital_items = pagedata['digital_items'] 268 | except KeyError as e: 269 | raise BandcampError(f'Failed to parse pagedata JSON, does not contain an ' 270 | f'"digital_items" key') from e 271 | for digital_item in digital_items: 272 | try: 273 | digital_item_id = digital_item['item_id'] 274 | except KeyError as e: 275 | raise BandcampError(f'Failed to parse pagedata JSON, does not contain an ' 276 | f'"digital_items[].art_id" key') from e 277 | if digital_item_id == item.item_id: 278 | try: 279 | downloads = digital_item['downloads'] 280 | except KeyError as e: 281 | raise BandcampError(f'Failed to parse pagedata JSON, does not contain an ' 282 | f'"digital_items.downloads" key') from e 283 | try: 284 | download_format = downloads[encoding] 285 | except KeyError as e: 286 | encodings = downloads.keys() 287 | raise BandcampError(f'Download formats does not contain requested encoding: {encoding} ' 288 | f'(available encodings: {encodings})') from e 289 | try: 290 | download_url = download_format['url'] 291 | except KeyError as e: 292 | raise BandcampError(f'Failed to parse pagedata JSON, does not contain an ' 293 | f'"digital_items.downloads.[encoding].url" key') from e 294 | return download_url 295 | return False 296 | 297 | def check_download_stat(self, item, file_download_url): 298 | """ 299 | Constructs the download "stat" URL and verifies the state of the download. 300 | If the state is OK, return the existing URL (download is OK) otherwise wait 301 | for the stat to complete and return the new download URL. 302 | """ 303 | download_url_parts = urlsplit(file_download_url) 304 | path = download_url_parts.path 305 | path_parts = path.split('/') 306 | if path_parts[1] == 'download': 307 | path_parts[1] = 'statdownload' 308 | stat_url = urlunsplit(( 309 | download_url_parts.scheme, 310 | download_url_parts.netloc, 311 | '/'.join(path_parts), 312 | download_url_parts.query, 313 | '' 314 | )) 315 | body = self._request('get', stat_url, as_raw=True) 316 | return self._get_js_stat_url(body, file_download_url) 317 | 318 | 319 | class BandcampItem: 320 | 321 | def __init__(self, data): 322 | self._data = data 323 | 324 | def __repr__(self): 325 | return json.dumps(self._data, indent=4, sort_keys=True) 326 | 327 | def __getattr__(self, key): 328 | try: 329 | return self._data[key] 330 | except KeyError as e: 331 | raise KeyError(f'BandcampItem value "{key}" does not exist') from e 332 | -------------------------------------------------------------------------------- /bandcampsync/config.py: -------------------------------------------------------------------------------- 1 | VERSION = '0.5.1' 2 | USER_AGENT = 'Mozilla/5.0 (X11; Linux x86_64; rv:130.0) Gecko/20100101 Firefox/130.0' 3 | INTERNAL_USER_AGENT = 'bandcampsync/' + VERSION 4 | -------------------------------------------------------------------------------- /bandcampsync/download.py: -------------------------------------------------------------------------------- 1 | import math 2 | import shutil 3 | from zipfile import ZipFile 4 | import requests 5 | from .config import USER_AGENT 6 | from .logger import get_logger 7 | 8 | 9 | log = get_logger('download') 10 | 11 | 12 | def mask_sig(url): 13 | if '&sig=' not in url: 14 | return url 15 | url_parts = url.split('&') 16 | for i, url_part in enumerate(url_parts): 17 | if url_part[:4] == 'sig=': 18 | url_parts[i] = 'sig=[masked]' 19 | elif url_part[:6] == 'token=': 20 | url_parts[i] = 'token=[masked]' 21 | return '&'.join(url_parts) 22 | 23 | 24 | class DownloadBadStatusCode(ValueError): 25 | pass 26 | 27 | 28 | class DownloadInvalidContentType(ValueError): 29 | pass 30 | 31 | 32 | def download_file(url, target, mode='wb', chunk_size=8192, logevery=10, disallow_content_type='text/html'): 33 | """ 34 | Attempts to stream a download to an open target file handle in chunks. If the 35 | request returns a disallowed content type then return a failed state with the 36 | response content. 37 | """ 38 | text = True if 't' in mode else False 39 | data_streamed = 0 40 | last_log = 0 41 | headers = {'User-Agent': USER_AGENT} 42 | with requests.get(url, stream=True, headers=headers) as r: 43 | #r.raise_for_status() 44 | if r.status_code != 200: 45 | raise DownloadBadStatusCode(f'Got non-200 status code: {r.status_code}') 46 | try: 47 | content_type = r.headers.get('Content-Type', '') 48 | except (ValueError, KeyError): 49 | content_type = '' 50 | content_type_parts = content_type.split(';') 51 | major_content_type = content_type_parts[0].strip() 52 | if major_content_type == disallow_content_type: 53 | raise DownloadInvalidContentType(f'Invalid content type: {major_content_type}') 54 | try: 55 | content_length = int(r.headers.get('Content-Length', '0')) 56 | except (ValueError, KeyError): 57 | content_length = 0 58 | for chunk in r.iter_content(chunk_size=chunk_size): 59 | data_streamed += len(chunk) 60 | if text: 61 | chunk = chunk.decode() 62 | target.write(chunk) 63 | if content_length > 0 and logevery > 0: 64 | percent_complete = math.floor((data_streamed / content_length) * 100) 65 | if percent_complete % logevery == 0 and percent_complete > last_log: 66 | log.info(f'Downloading {mask_sig(url)}: {percent_complete}%') 67 | last_log = percent_complete 68 | return True 69 | 70 | 71 | def is_zip_file(file_path): 72 | try: 73 | with ZipFile(file_path) as z: 74 | z.infolist() 75 | return True 76 | except Exception as e: 77 | return False 78 | 79 | 80 | def unzip_file(decompress_from, decompress_to): 81 | with ZipFile(decompress_from) as z: 82 | z.extractall(decompress_to) 83 | return True 84 | 85 | 86 | def move_file(src, dst): 87 | return shutil.move(src, dst) 88 | 89 | 90 | def copy_file(src, dst): 91 | return shutil.copyfile(src, dst) 92 | -------------------------------------------------------------------------------- /bandcampsync/logger.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import logging 3 | 4 | 5 | def get_logger(name, level=logging.INFO): 6 | log = logging.getLogger(name) 7 | log.setLevel(level) 8 | ch = logging.StreamHandler() 9 | ch.setLevel(level) 10 | fmt = logging.Formatter('%(asctime)s %(name)s [%(levelname)s] %(message)s') 11 | ch.setFormatter(fmt) 12 | log.addHandler(ch) 13 | return log 14 | -------------------------------------------------------------------------------- /bandcampsync/media.py: -------------------------------------------------------------------------------- 1 | from unicodedata import normalize 2 | from .logger import get_logger 3 | 4 | 5 | log = get_logger('media') 6 | 7 | 8 | class LocalMedia: 9 | """ 10 | A local media directory indexer. This stores media in the following format: 11 | 12 | /media_dir/ 13 | /media_dir/Artist Name 14 | /media_dir/Artist Name/Album Name 15 | /media_dir/Artist Name/Album Name/bandcamp_item_id.txt 16 | /media_dir/Artist Name/Album Name/track1.flac 17 | /media_dir/Artist Name/Album Name/track2.flac 18 | """ 19 | 20 | ITEM_INDEX_FILENAME = 'bandcamp_item_id.txt' 21 | 22 | def __init__(self, media_dir): 23 | self.media_dir = media_dir 24 | self.media = {} 25 | self.dirs = set() 26 | self.item_names = set() 27 | log.info(f'Local media directory: {self.media_dir}') 28 | self.index() 29 | 30 | def _clean_path(self, path_str): 31 | path_str = str(path_str) 32 | disallowed_punctuation = '"#%\'*/?\\`:' 33 | normalized_path = normalize('NFKD', path_str) 34 | outstr = '' 35 | for c in normalized_path: 36 | if c not in disallowed_punctuation: 37 | outstr += c 38 | return outstr 39 | 40 | def clean_format(self, format_str): 41 | if '-' not in format_str: 42 | return format_str 43 | format_parts = format_str.split('-') 44 | format_prefix = format_parts[0] 45 | return format_prefix if format_prefix else format_str 46 | 47 | def index(self): 48 | for child1 in self.media_dir.iterdir(): 49 | if child1.is_dir(): 50 | for child2 in child1.iterdir(): 51 | if child2.is_dir(): 52 | self.dirs.add(child2) 53 | for child3 in child2.iterdir(): 54 | if child3.name == self.ITEM_INDEX_FILENAME: 55 | item_id = self.read_item_id(child3) 56 | self.media[item_id] = child2 57 | self.item_names.add((child2.parent.name, child2.name)) 58 | log.info(f'Detected locally downloaded media: {item_id} = {child2}') 59 | return True 60 | 61 | def read_item_id(self, filepath): 62 | with open(filepath, 'rt') as f: 63 | item_id = f.read().strip() 64 | try: 65 | return int(item_id) 66 | except Exception as e: 67 | raise ValueError(f'Failed to cast item ID from {filepath} "{item_id}" as an int: {e}') from e 68 | 69 | def is_locally_downloaded(self, item, local_path): 70 | if item.item_id in self.media: 71 | return True 72 | item_name = (local_path.parent.name, local_path.name) 73 | if item_name in self.item_names: 74 | log.info(f'Detected album at "{local_path}" but with an item ID mismatch ' 75 | f'({self.ITEM_INDEX_FILENAME} file does not contain {item.item_id}), ' 76 | f'you may want to check this item is correctly downloaded') 77 | return True 78 | return False 79 | 80 | def is_dir(self, path): 81 | return path in self.dirs 82 | 83 | def get_path_for_purchase(self, item): 84 | return self.media_dir / self._clean_path(item.band_name) / self._clean_path(item.item_title) 85 | 86 | def get_path_for_file(self, local_path, file_name): 87 | return local_path / self._clean_path(file_name) 88 | 89 | def write_bandcamp_id(self, item, dirpath): 90 | outfile = dirpath / self.ITEM_INDEX_FILENAME 91 | log.info(f'Writing bandcamp item id:{item.item_id} to: {outfile}') 92 | with open(outfile, 'wt') as f: 93 | f.write(f'{item.item_id}\n') 94 | return True 95 | -------------------------------------------------------------------------------- /bandcampsync/notify.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from .config import INTERNAL_USER_AGENT 3 | from .logger import get_logger 4 | 5 | 6 | log = get_logger('notify') 7 | 8 | 9 | class NotifyURL: 10 | 11 | def __init__(self, notify_str): 12 | self.valid = False 13 | self.notify_str = notify_str 14 | self.method = 'GET' 15 | self.url = '' 16 | self.headers = {} 17 | self.body = '' 18 | self.parse_notify_str() 19 | if self.valid: 20 | log.info(f'Notify created: {self.method} to {self.url} with headers: {list(self.headers.keys())}') 21 | 22 | def parse_notify_str(self): 23 | if not self.notify_str: 24 | return 25 | parts = self.notify_str.split() 26 | if len(parts) == 1: 27 | self.url = parts[0] 28 | self.valid = True 29 | return 30 | elif len(parts) == 4: 31 | method = parts[0].upper() 32 | if method not in ('GET', 'POST'): 33 | log.error(f'Invalid notify method (must be GET or POST): {method}') 34 | return 35 | self.method = method 36 | self.url = parts[1] 37 | headers = parts[2] 38 | if headers != '-' and ',' in headers: 39 | for header in headers.split(','): 40 | key, value = header.split('=') 41 | self.headers[key] = value 42 | body = parts[3] 43 | if body != '-': 44 | self.body = body 45 | self.valid = True 46 | return 47 | else: 48 | log.error(f'Invalid notify target: {self.notify_str}') 49 | return 50 | 51 | def notify(self): 52 | if not self.valid: 53 | log.error('No valid notify target set') 54 | return False 55 | log.info(f'Notifying with {self.method} request to: {self.url}') 56 | headers = {'User-Agent': INTERNAL_USER_AGENT} 57 | for key, value in self.headers.items(): 58 | if key not in headers: 59 | headers[key] = value 60 | if self.method == 'GET': 61 | response = requests.get(self.url, headers=headers) 62 | elif self.method == 'POST': 63 | response = requests.post(self.url, headers=headers, data=self.body) 64 | # check response status code is between 200 and 299 65 | if 200 <= response.status_code < 300: 66 | print(response.text) 67 | return True 68 | else: 69 | log.error(f'Failed {self.method} to {self.url} - got response code: HTTP/{response.status_code}') 70 | return False 71 | -------------------------------------------------------------------------------- /bin/bandcampsync: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | 4 | import sys 5 | import argparse 6 | from pathlib import Path 7 | from bandcampsync import version, logger, do_sync 8 | 9 | 10 | log = logger.get_logger('run') 11 | 12 | 13 | if __name__ == '__main__': 14 | parser = argparse.ArgumentParser( 15 | prog='bandcampsync', 16 | description='Syncs media purcahsed on bandcamp.com with a local directory', 17 | ) 18 | parser.add_argument('-v', '--version', action='store_true', 19 | help='Displays the bandcampsync version and exits') 20 | parser.add_argument('-c', '--cookies', required=True, 21 | help='Path to the cookies file') 22 | parser.add_argument('-d', '--directory', required=True, 23 | help='Path to the directory to download media to') 24 | parser.add_argument('-i', '--ignore', default='', 25 | help='A space-delimited list of patterns matching artists to bypass') 26 | parser.add_argument('-f', '--format', default='flac', 27 | help='Media format to download, defaults to "flac"') 28 | parser.add_argument('-t', '--temp-dir', default='', 29 | help='Path to use for temporary downloads') 30 | parser.add_argument('-n', '--notify-url', default='', 31 | help='URL to notify with a GET request when any new downloads have completed') 32 | args = parser.parse_args() 33 | if args.version: 34 | print(f'BandcampSync version: {version}', file=sys.stdout) 35 | sys.exit(0) 36 | cookies_path = Path(args.cookies).resolve() 37 | dir_path = Path(args.directory).resolve() 38 | ign_patterns = args.ignore 39 | media_format = args.format 40 | if not cookies_path.is_file(): 41 | raise ValueError(f'Cookies file does not exist: {cookies_path}') 42 | if not dir_path.is_dir(): 43 | raise ValueError(f'Directory does not exist: {dir_path}') 44 | if args.ignore: 45 | patterns = args.ignore 46 | log.warning(f'BandcampSync is bypassing: {patterns}') 47 | if args.temp_dir: 48 | temp_dir = Path(args.temp_dir).resolve() 49 | if not temp_dir.is_dir(): 50 | raise ValueError(f'Temporary directory does not exist: {temp_dir}') 51 | else: 52 | temp_dir = None 53 | if args.notify_url: 54 | notify_url = args.notify_url 55 | log.info(f'BandcampSync will notify: {notify_url}') 56 | else: 57 | notify_url = None 58 | log.info(f'BandcampSync v{version} starting') 59 | with open(cookies_path, 'rt') as f: 60 | cookies = f.read().strip() 61 | log.info(f'Loaded cookies from "{cookies_path}"') 62 | do_sync(cookies_path, cookies, dir_path, media_format, temp_dir, ign_patterns, notify_url) 63 | log.info(f'Done') 64 | -------------------------------------------------------------------------------- /bin/bandcampsync-service: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | 4 | import os 5 | import signal 6 | from time import sleep 7 | from random import randrange 8 | from datetime import datetime, timedelta 9 | from zoneinfo import ZoneInfo 10 | from pathlib import Path 11 | from bandcampsync import version, logger, do_sync 12 | 13 | 14 | log = logger.get_logger('service') 15 | 16 | 17 | class CatchShutdownSignal: 18 | 19 | def __init__(self): 20 | self.shutdown = False 21 | signal.signal(signal.SIGINT, self.got_exit_signal) 22 | signal.signal(signal.SIGTERM, self.got_exit_signal) 23 | 24 | def got_exit_signal(self, *args, **kwargs): 25 | self.shutdown = True 26 | 27 | 28 | if __name__ == '__main__': 29 | tz_name = os.getenv('TZ', 'UTC') 30 | cookies_path_env = os.getenv('COOKIES_FILE', '/config/cookies.txt') 31 | dir_path_env = os.getenv('DIRECTORY', '/downloads') 32 | media_format_env = os.getenv('FORMAT', 'flac') 33 | ign_patterns = os.getenv('IGNORE', '') 34 | run_daily_at_env = os.getenv('RUN_DAILY_AT', '3') 35 | exit_after_run_env = os.getenv('EXIT_AFTER_RUN', '0') 36 | temp_dir_env = os.getenv('TEMP_DIR', '') 37 | notify_url_env = os.getenv('NOTIFY_URL', '') 38 | cookies_path = Path(cookies_path_env).resolve() 39 | dir_path = Path(dir_path_env).resolve() 40 | if not cookies_path.is_file(): 41 | raise ValueError(f'Cookies file does not exist: {cookies_path}') 42 | if not dir_path.is_dir(): 43 | raise ValueError(f'Directory does not exist: {dir_path}') 44 | if temp_dir_env: 45 | temp_dir = Path(temp_dir_env).resolve() 46 | if not temp_dir.is_dir(): 47 | raise ValueError(f'Temporary directory does not exist: {temp_dir}') 48 | else: 49 | temp_dir = None 50 | if notify_url_env: 51 | notify_url = notify_url_env.strip() 52 | log.info(f'BandcampSync will notify: {notify_url}') 53 | else: 54 | notify_url = None 55 | log.info(f'BandcampSync v{version} starting') 56 | with open(cookies_path, 'rt') as f: 57 | cookies = f.read().strip() 58 | log.info(f'Loaded cookies from "{cookies_path}"') 59 | try: 60 | tz = ZoneInfo(tz_name) 61 | except Exception as e: 62 | raise ValueError(f'Not a valid timezone name: {tz_name}') from e 63 | try: 64 | run_daily_at = int(run_daily_at_env) 65 | except TypeError as e: 66 | raise ValueError(f'Invalid RUN_DAILY_AT, got: {run_daily_at}') from e 67 | if not 0 <= run_daily_at <= 23: 68 | raise ValueError(f'Invalid RUN_DAILY_AT, must be between 0 and 23, got: {run_daily_at}') 69 | try: 70 | exit_after_run = int(exit_after_run_env) 71 | except (ValueError, TypeError): 72 | exit_after_run = 0 73 | exit_after_run = True if exit_after_run else False 74 | time_now = datetime.now(tz).replace(microsecond=0) 75 | log.info(f'Time now in {tz}: {time_now}') 76 | log.info(f'Running an initial one-off synchronisation immediately') 77 | catch_shutdown = CatchShutdownSignal() 78 | try: 79 | while not catch_shutdown.shutdown: 80 | log.info(f'Starting synchronisation') 81 | do_sync(cookies_path, cookies, dir_path, media_format_env, temp_dir, ign_patterns, notify_url) 82 | random_delay = randrange(0, 3600) 83 | time_now = datetime.now(tz).replace(microsecond=0) 84 | time_tomorrow = time_now + timedelta(days=1) 85 | time_tomorrow = time_tomorrow.replace(hour=run_daily_at, minute=0, second=0, microsecond=0) 86 | next_sleep = int((time_tomorrow - time_now).total_seconds() + random_delay) 87 | if exit_after_run: 88 | log.info(f'Exiting after run') 89 | break 90 | log.info(f'Scheduling next run for {time_tomorrow} + {random_delay} second random offset') 91 | log.info(f'Sleeping for {next_sleep} seconds') 92 | sleep(next_sleep) 93 | except KeyboardInterrupt: 94 | log.error('Caught keyboard interrupt, stopping') 95 | log.info(f'Done') 96 | -------------------------------------------------------------------------------- /dev.env: -------------------------------------------------------------------------------- 1 | TZ=Europe/London 2 | PUID=1000 3 | PGID=1000 4 | RUN_DAILY_AT=3 5 | COOKIES_FILE=/config/cookies.txt 6 | -------------------------------------------------------------------------------- /docs/cookies.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/meeb/bandcampsync/3162ec395a318f7179d360e9084f93ed5ef65330/docs/cookies.jpg -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Set the 'app' user UID and GID from the ENV vars 4 | PUID="${PUID:-1000}" 5 | PGID="${PGID:-1000}" 6 | groupmod -o -g "$PGID" app 7 | usermod -o -u "$PUID" app 8 | echo "Set service UID:GID to ${PUID}:${PGID}" 9 | 10 | # Execute whatever is set in CMD as the 'app' user 11 | exec gosu app "$@" 12 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | beautifulsoup4 3 | html5lib 4 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description_file = README.md 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | from setuptools import setup, find_packages 4 | 5 | 6 | version = '0.5.1' 7 | 8 | 9 | with open('README.md', 'rt') as f: 10 | long_description = f.read() 11 | 12 | 13 | with open('requirements.txt', 'rt') as f: 14 | requirements = tuple(f.read().split()) 15 | 16 | 17 | setup( 18 | name = 'bandcampsync', 19 | version = version, 20 | url = 'https://github.com/meeb/bandcampsync', 21 | author = 'https://github.com/meeb', 22 | author_email = 'meeb@meeb.org', 23 | description = 'A Python module and script to synchronise media purchased on bandcamp.com with a local directory.', 24 | long_description = long_description, 25 | long_description_content_type = 'text/markdown', 26 | license = 'BSD', 27 | include_package_data = True, 28 | install_requires = requirements, 29 | packages = find_packages(), 30 | scripts = [ 31 | 'bin/bandcampsync', 32 | 'bin/bandcampsync-service', 33 | ], 34 | classifiers = [ 35 | 'Development Status :: 5 - Production/Stable', 36 | 'License :: OSI Approved :: BSD License', 37 | 'Operating System :: OS Independent', 38 | 'Programming Language :: Python', 39 | 'Programming Language :: Python :: 3', 40 | 'Topic :: Software Development :: Libraries :: Python Modules', 41 | ], 42 | keywords = ['bandcampsync', 'bandcamp', 'media', 'sync'] 43 | ) 44 | --------------------------------------------------------------------------------