├── .github
└── workflows
│ └── ci.yml
├── .gitignore
├── LICENCE
├── MANIFEST.in
├── README.md
├── images
└── demo-f.gif
├── manual_setup.md
├── poetry.lock
├── pyheadspace
├── __init__.py
├── __main__.py
└── auth.py
├── pyproject.toml
└── tests
└── test_pyheadspace.py
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: pyheadspace
2 |
3 | on:
4 | push:
5 | branches: [ main ]
6 | pull_request:
7 | branches: [ main ]
8 |
9 | jobs:
10 | build:
11 |
12 | runs-on: ubuntu-latest
13 | strategy:
14 | matrix:
15 | python-version: [ "3.10" ]
16 |
17 | steps:
18 | - uses: actions/checkout@v2
19 | - name: Set up Python ${{ matrix.python-version }}
20 | uses: actions/setup-python@v2
21 | with:
22 | python-version: ${{ matrix.python-version }}
23 | - name: Install dependencies
24 | run: |
25 | python -m pip install --upgrade pip
26 | pip install poetry
27 | poetry install
28 | - name: Check code style with black
29 | run: |
30 | poetry run black --check .
31 | - name: Run tests
32 | run: |
33 | poetry run pytest
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /pyheadspace/__pycache__
2 | /headspace_dl.egg-info
3 | /debug.log
4 | /dist
5 | /build
6 | /bearer_id.txt
7 | /pyheadspace/bearer_id.txt
8 | /pyheadspace/debug.log
9 | /test.py
10 |
11 | /pyheadspace/group_ids.py
12 | /pyheadspace.egg-info
13 |
14 | /pyheadspace/test.html
15 | /venv/
16 | .idea
17 |
18 | .env
19 | launch.json
--------------------------------------------------------------------------------
/LICENCE:
--------------------------------------------------------------------------------
1 | Copyright 2021 Yash Rathi
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4 |
5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
6 |
7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.md LICENSE
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # headspace-cli
2 | [](https://badge.fury.io/py/pyheadspace)
3 |
4 | Command line script to download headspace packs, singles or everyday meditation.
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 | - [👶 Dependencies](#-dependencies)
13 | - [🛠️ Installation](#️-installation)
14 | - [⚙️ Setup](#️-setup)
15 | - [🚀 Usage](#-usage)
16 |
17 |
18 |
19 | ## 👶 Dependencies
20 | * [Python 3.7 or higher](https://www.python.org/downloads/)
21 |
22 | ## 🛠️ Installation
23 | ```sh
24 | pip install --upgrade pyheadspace
25 | ```
26 | * If installing using `pip install --user`, you must add the user-level bin directory to your PATH environment variable in order to use pyheadspace. If you are using a Unix derivative (FreeBSD, GNU / Linux, OS X), you can achieve this by using `export PATH="$HOME/.local/bin:$PATH"` command.
27 |
28 |
29 | **OR install with [pipx](https://github.com/pypa/pipx)**
30 |
31 |
32 | ```sh
33 | pipx install pyheadspace
34 | ```
35 |
36 | ### This tool is only meant for personal use. Do not use this for piracy!
37 | ## ⚙️ Setup
38 |
39 | Run and enter login credentials.
40 | ```sh
41 | headspace login
42 | ```
43 | If you use other form of authentication like google(do not have username and password), you could follow
44 | [these steps](https://github.com/yashrathi-git/pyHeadspace/blob/main/manual_setup.md)
45 |
46 |
47 |
48 | ## 🚀 Usage
49 |
50 | ## Download all packs at once
51 | ```sh
52 | # Download all packs with each session of duration 15 minutes
53 | headspace pack --all --duration 15
54 |
55 | # Download all packs with session duration of 10 & 20 minutes
56 | headspace pack --all --duration 10 --duration 15
57 | ```
58 | **Exclude specific packs from downloading:**
59 |
60 |
61 | To exclude specific packs from downloading use `--exclude` option.
62 |
63 | It expects location of text file for links of packs to exclude downloading. Every link should be on separate line.
64 | **links.txt**:
65 | ```
66 | https://my.headspace.com/modes/meditate/content/154
67 | https://my.headspace.com/modes/meditate/content/150
68 | ```
69 | **command**
70 | ```sh
71 | headspace packs --all --exclude links.txt
72 | ```
73 | This would download all packs except the ones in `links.txt` file
74 |
75 | ## Downloading specific pack
76 | ```sh
77 | headspace pack [Options]
78 | ```
79 |
80 |
81 |
82 | **BASIC USAGE**
83 | ```sh
84 | # Download with all session of duration 15 minutes
85 | headspace pack https://my.headspace.com/modes/meditate/content/151 --duration 15
86 |
87 | # Download sessions of multiple duration
88 | headspace pack https://my.headspace.com/modes/meditate/content/151 -d 20 -d 15
89 |
90 | ```
91 | **Options:**
92 | ```sh
93 | --id INTEGER ID of video.
94 | -d, --duration TEXT Duration or list of duration
95 | -a --author INTEGER The author ID that you\'d like to get the audio from.
96 | You can get the author ID from a few places, including
97 | input label you find when inspecting element on the pack
98 | page.
99 | --no_meditation Only download meditation session without techniques
100 | videos.
101 | --no_techniques Only download techniques and not meditation sessions.
102 | --out TEXT Download directory
103 | --all Downloads all headspace packs.
104 | -e, --exclude TEXT Use with `--all` flag. Location of text file with links
105 | of packs to exclude downloading. Every link should be
106 | on separate line.
107 | --help Show this message and exit.
108 |
109 | ```
110 |
111 | ## Download single session
112 | ```sh
113 | headspace download [options]
114 | ```
115 |
116 |
117 |
118 |
119 | **BASIC USAGE**
120 | ```sh
121 | $ headspace download https://my.headspace.com/player/204?authorId=1&contentId=151&contentType=COURSE&mode=meditate&trackingName=Course&startIndex=1 --duration 15
122 | ```
123 | **Options:**
124 | ```sh
125 | --out TEXT Download directory.
126 | --id INTEGER ID of the video. Not required if URL is provided.
127 | -d, --duration Duration or list of duration
128 | --help Show this message and exit.
129 | ```
130 |
131 |
132 | ## Download everyday meditations
133 | ```sh
134 | headspace everyday [OPTIONS]
135 | ```
136 |
137 |
138 | **BASIC USAGE**
139 | ```sh
140 | # Downloads today's meditation
141 | headspace everyday
142 |
143 | # Download everyday meditation of specific time period.
144 | # DATE FORMAT: yyyy-mm-dd
145 | headspace everyday --from 2021-03-01 --to 2021-03-20
146 | ```
147 | **Options**
148 | ```
149 | --from TEXT Start download from specific date. DATE-FORMAT=>yyyy-
150 | mm-dd
151 | --to TEXT Download till a specific date. DATE-FORMAT=>yyyy-mm-dd
152 | -d, --duration TEXT Duration or list of duration
153 | --out TEXT Download directory
154 | --help Show this message and exit.
155 | ```
156 |
157 | ## Changing Language Preference
158 | By default the language is set to english. You could change to other languages supported by headspace.
159 | Other Languages:
160 | - de-DE
161 | - es-ES
162 | - fr-FR
163 | - pt-BR
164 |
165 | To change the language modify the environment variable `HEADSPACE_LANG` and set the value to the langauge code.
166 |
167 | - For fish/bash shell `export HEADSPACE_LANG="fr-FR"`
168 | - Powershell `$env:DESIRED_LANGUAGE="fr-FR"`
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 | **If you encounter any issue or bug, open a new issue on [github](https://github.com/yashrathi-git/pyHeadspace)**
177 |
178 |
179 |
180 |
--------------------------------------------------------------------------------
/images/demo-f.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/yashrathi-git/headspace-cli/20987a50dc85e4a480299b17bedab083336d1e2f/images/demo-f.gif
--------------------------------------------------------------------------------
/manual_setup.md:
--------------------------------------------------------------------------------
1 | ## Setup Instructions
2 |
3 |
4 |
5 |
6 | After we have installed `pyheadspace`, this is important step to set it up:
7 |
8 | 1. Go to https://my.headspace.com/ and login to your account.
9 | 2. Press `Ctrl + Shift + I` or `Command + Shift + C` to open dev tools
10 | 3. Go to the networks tab and **reload the website**
11 | 4. Now look for GET request to https://api.prod.headspace.com
12 | 5. In **request header** copy the value of authorization parameter **including the `Bearer` prefix**. **Make sure you copy it from request headers not response headers**. It would look like this:
13 | ```
14 | bearer eyJhbGciOi...
15 | ```
16 |
17 | 6. Run `headspace file` to get the location of the file. Paste the bearer token from the above step in this file.
18 |
19 | **NOTE**:
20 | `authorization` token could invalidate in the future. So if you get an authentication(Unauthorized) error, please repeat the above steps.
21 |
--------------------------------------------------------------------------------
/poetry.lock:
--------------------------------------------------------------------------------
1 | [[package]]
2 | name = "appdirs"
3 | version = "1.4.4"
4 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
5 | category = "main"
6 | optional = false
7 | python-versions = "*"
8 |
9 | [[package]]
10 | name = "atomicwrites"
11 | version = "1.4.0"
12 | description = "Atomic file writes."
13 | category = "dev"
14 | optional = false
15 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
16 |
17 | [[package]]
18 | name = "attrs"
19 | version = "21.4.0"
20 | description = "Classes Without Boilerplate"
21 | category = "dev"
22 | optional = false
23 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
24 |
25 | [package.extras]
26 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
27 | docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
28 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
29 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
30 |
31 | [[package]]
32 | name = "black"
33 | version = "22.3.0"
34 | description = "The uncompromising code formatter."
35 | category = "dev"
36 | optional = false
37 | python-versions = ">=3.6.2"
38 |
39 | [package.dependencies]
40 | click = ">=8.0.0"
41 | mypy-extensions = ">=0.4.3"
42 | pathspec = ">=0.9.0"
43 | platformdirs = ">=2"
44 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
45 | typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
46 | typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
47 |
48 | [package.extras]
49 | colorama = ["colorama (>=0.4.3)"]
50 | d = ["aiohttp (>=3.7.4)"]
51 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
52 | uvloop = ["uvloop (>=0.15.2)"]
53 |
54 | [[package]]
55 | name = "certifi"
56 | version = "2021.10.8"
57 | description = "Python package for providing Mozilla's CA Bundle."
58 | category = "main"
59 | optional = false
60 | python-versions = "*"
61 |
62 | [[package]]
63 | name = "charset-normalizer"
64 | version = "2.0.12"
65 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
66 | category = "main"
67 | optional = false
68 | python-versions = ">=3.5.0"
69 |
70 | [package.extras]
71 | unicode_backport = ["unicodedata2"]
72 |
73 | [[package]]
74 | name = "click"
75 | version = "8.1.3"
76 | description = "Composable command line interface toolkit"
77 | category = "main"
78 | optional = false
79 | python-versions = ">=3.7"
80 |
81 | [package.dependencies]
82 | colorama = {version = "*", markers = "platform_system == \"Windows\""}
83 | importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
84 |
85 | [[package]]
86 | name = "colorama"
87 | version = "0.4.4"
88 | description = "Cross-platform colored terminal text."
89 | category = "main"
90 | optional = false
91 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
92 |
93 | [[package]]
94 | name = "commonmark"
95 | version = "0.9.1"
96 | description = "Python parser for the CommonMark Markdown spec"
97 | category = "main"
98 | optional = false
99 | python-versions = "*"
100 |
101 | [package.extras]
102 | test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"]
103 |
104 | [[package]]
105 | name = "idna"
106 | version = "3.3"
107 | description = "Internationalized Domain Names in Applications (IDNA)"
108 | category = "main"
109 | optional = false
110 | python-versions = ">=3.5"
111 |
112 | [[package]]
113 | name = "importlib-metadata"
114 | version = "4.11.3"
115 | description = "Read metadata from Python packages"
116 | category = "main"
117 | optional = false
118 | python-versions = ">=3.7"
119 |
120 | [package.dependencies]
121 | typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
122 | zipp = ">=0.5"
123 |
124 | [package.extras]
125 | docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
126 | perf = ["ipython"]
127 | testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
128 |
129 | [[package]]
130 | name = "iniconfig"
131 | version = "1.1.1"
132 | description = "iniconfig: brain-dead simple config-ini parsing"
133 | category = "dev"
134 | optional = false
135 | python-versions = "*"
136 |
137 | [[package]]
138 | name = "mypy-extensions"
139 | version = "0.4.3"
140 | description = "Experimental type system extensions for programs checked with the mypy typechecker."
141 | category = "dev"
142 | optional = false
143 | python-versions = "*"
144 |
145 | [[package]]
146 | name = "packaging"
147 | version = "21.3"
148 | description = "Core utilities for Python packages"
149 | category = "dev"
150 | optional = false
151 | python-versions = ">=3.6"
152 |
153 | [package.dependencies]
154 | pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
155 |
156 | [[package]]
157 | name = "pathspec"
158 | version = "0.9.0"
159 | description = "Utility library for gitignore style pattern matching of file paths."
160 | category = "dev"
161 | optional = false
162 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
163 |
164 | [[package]]
165 | name = "platformdirs"
166 | version = "2.5.2"
167 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
168 | category = "dev"
169 | optional = false
170 | python-versions = ">=3.7"
171 |
172 | [package.extras]
173 | docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
174 | test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
175 |
176 | [[package]]
177 | name = "pluggy"
178 | version = "1.0.0"
179 | description = "plugin and hook calling mechanisms for python"
180 | category = "dev"
181 | optional = false
182 | python-versions = ">=3.6"
183 |
184 | [package.dependencies]
185 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
186 |
187 | [package.extras]
188 | dev = ["pre-commit", "tox"]
189 | testing = ["pytest", "pytest-benchmark"]
190 |
191 | [[package]]
192 | name = "py"
193 | version = "1.11.0"
194 | description = "library with cross-python path, ini-parsing, io, code, log facilities"
195 | category = "dev"
196 | optional = false
197 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
198 |
199 | [[package]]
200 | name = "pygments"
201 | version = "2.12.0"
202 | description = "Pygments is a syntax highlighting package written in Python."
203 | category = "main"
204 | optional = false
205 | python-versions = ">=3.6"
206 |
207 | [[package]]
208 | name = "pyjwt"
209 | version = "2.3.0"
210 | description = "JSON Web Token implementation in Python"
211 | category = "main"
212 | optional = false
213 | python-versions = ">=3.6"
214 |
215 | [package.extras]
216 | crypto = ["cryptography (>=3.3.1)"]
217 | dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"]
218 | docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
219 | tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"]
220 |
221 | [[package]]
222 | name = "pyparsing"
223 | version = "3.0.8"
224 | description = "pyparsing module - Classes and methods to define and execute parsing grammars"
225 | category = "dev"
226 | optional = false
227 | python-versions = ">=3.6.8"
228 |
229 | [package.extras]
230 | diagrams = ["railroad-diagrams", "jinja2"]
231 |
232 | [[package]]
233 | name = "pytest"
234 | version = "7.1.2"
235 | description = "pytest: simple powerful testing with Python"
236 | category = "dev"
237 | optional = false
238 | python-versions = ">=3.7"
239 |
240 | [package.dependencies]
241 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
242 | attrs = ">=19.2.0"
243 | colorama = {version = "*", markers = "sys_platform == \"win32\""}
244 | importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
245 | iniconfig = "*"
246 | packaging = "*"
247 | pluggy = ">=0.12,<2.0"
248 | py = ">=1.8.2"
249 | tomli = ">=1.0.0"
250 |
251 | [package.extras]
252 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
253 |
254 | [[package]]
255 | name = "requests"
256 | version = "2.27.1"
257 | description = "Python HTTP for Humans."
258 | category = "main"
259 | optional = false
260 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
261 |
262 | [package.dependencies]
263 | certifi = ">=2017.4.17"
264 | charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""}
265 | idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""}
266 | urllib3 = ">=1.21.1,<1.27"
267 |
268 | [package.extras]
269 | socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
270 | use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
271 |
272 | [[package]]
273 | name = "rich"
274 | version = "12.4.1"
275 | description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
276 | category = "main"
277 | optional = false
278 | python-versions = ">=3.6.3,<4.0.0"
279 |
280 | [package.dependencies]
281 | commonmark = ">=0.9.0,<0.10.0"
282 | pygments = ">=2.6.0,<3.0.0"
283 | typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""}
284 |
285 | [package.extras]
286 | jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
287 |
288 | [[package]]
289 | name = "tomli"
290 | version = "2.0.1"
291 | description = "A lil' TOML parser"
292 | category = "dev"
293 | optional = false
294 | python-versions = ">=3.7"
295 |
296 | [[package]]
297 | name = "typed-ast"
298 | version = "1.5.3"
299 | description = "a fork of Python 2 and 3 ast modules with type comment support"
300 | category = "dev"
301 | optional = false
302 | python-versions = ">=3.6"
303 |
304 | [[package]]
305 | name = "typing-extensions"
306 | version = "4.2.0"
307 | description = "Backported and Experimental Type Hints for Python 3.7+"
308 | category = "main"
309 | optional = false
310 | python-versions = ">=3.7"
311 |
312 | [[package]]
313 | name = "urllib3"
314 | version = "1.26.9"
315 | description = "HTTP library with thread-safe connection pooling, file post, and more."
316 | category = "main"
317 | optional = false
318 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
319 |
320 | [package.extras]
321 | brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
322 | secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
323 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
324 |
325 | [[package]]
326 | name = "zipp"
327 | version = "3.8.0"
328 | description = "Backport of pathlib-compatible object wrapper for zip files"
329 | category = "main"
330 | optional = false
331 | python-versions = ">=3.7"
332 |
333 | [package.extras]
334 | docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
335 | testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
336 |
337 | [metadata]
338 | lock-version = "1.1"
339 | python-versions = ">=3.7,<4.0.0"
340 | content-hash = "f31f646c3c27081b261945080f76be67b37001c8194dc783edda4dea192122bf"
341 |
342 | [metadata.files]
343 | appdirs = [
344 | {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
345 | {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
346 | ]
347 | atomicwrites = [
348 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
349 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
350 | ]
351 | attrs = [
352 | {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
353 | {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
354 | ]
355 | black = [
356 | {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"},
357 | {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"},
358 | {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"},
359 | {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"},
360 | {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"},
361 | {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"},
362 | {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"},
363 | {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"},
364 | {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"},
365 | {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"},
366 | {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"},
367 | {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"},
368 | {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"},
369 | {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"},
370 | {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"},
371 | {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"},
372 | {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"},
373 | {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"},
374 | {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"},
375 | {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"},
376 | {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"},
377 | {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"},
378 | {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"},
379 | ]
380 | certifi = [
381 | {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
382 | {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
383 | ]
384 | charset-normalizer = [
385 | {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
386 | {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
387 | ]
388 | click = [
389 | {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
390 | {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
391 | ]
392 | colorama = [
393 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
394 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
395 | ]
396 | commonmark = [
397 | {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"},
398 | {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"},
399 | ]
400 | idna = [
401 | {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
402 | {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
403 | ]
404 | importlib-metadata = [
405 | {file = "importlib_metadata-4.11.3-py3-none-any.whl", hash = "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6"},
406 | {file = "importlib_metadata-4.11.3.tar.gz", hash = "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539"},
407 | ]
408 | iniconfig = [
409 | {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
410 | {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
411 | ]
412 | mypy-extensions = [
413 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
414 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
415 | ]
416 | packaging = [
417 | {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
418 | {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
419 | ]
420 | pathspec = [
421 | {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
422 | {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
423 | ]
424 | platformdirs = [
425 | {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
426 | {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
427 | ]
428 | pluggy = [
429 | {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
430 | {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
431 | ]
432 | py = [
433 | {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
434 | {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
435 | ]
436 | pygments = [
437 | {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"},
438 | {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"},
439 | ]
440 | pyjwt = [
441 | {file = "PyJWT-2.3.0-py3-none-any.whl", hash = "sha256:e0c4bb8d9f0af0c7f5b1ec4c5036309617d03d56932877f2f7a0beeb5318322f"},
442 | {file = "PyJWT-2.3.0.tar.gz", hash = "sha256:b888b4d56f06f6dcd777210c334e69c737be74755d3e5e9ee3fe67dc18a0ee41"},
443 | ]
444 | pyparsing = [
445 | {file = "pyparsing-3.0.8-py3-none-any.whl", hash = "sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06"},
446 | {file = "pyparsing-3.0.8.tar.gz", hash = "sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954"},
447 | ]
448 | pytest = [
449 | {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"},
450 | {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"},
451 | ]
452 | requests = [
453 | {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
454 | {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
455 | ]
456 | rich = [
457 | {file = "rich-12.4.1-py3-none-any.whl", hash = "sha256:d13c6c90c42e24eb7ce660db397e8c398edd58acb7f92a2a88a95572b838aaa4"},
458 | {file = "rich-12.4.1.tar.gz", hash = "sha256:d239001c0fb7de985e21ec9a4bb542b5150350330bbc1849f835b9cbc8923b91"},
459 | ]
460 | tomli = [
461 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
462 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
463 | ]
464 | typed-ast = [
465 | {file = "typed_ast-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ad3b48cf2b487be140072fb86feff36801487d4abb7382bb1929aaac80638ea"},
466 | {file = "typed_ast-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:542cd732351ba8235f20faa0fc7398946fe1a57f2cdb289e5497e1e7f48cfedb"},
467 | {file = "typed_ast-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc2c11ae59003d4a26dda637222d9ae924387f96acae9492df663843aefad55"},
468 | {file = "typed_ast-1.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd5df1313915dbd70eaaa88c19030b441742e8b05e6103c631c83b75e0435ccc"},
469 | {file = "typed_ast-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:e34f9b9e61333ecb0f7d79c21c28aa5cd63bec15cb7e1310d7d3da6ce886bc9b"},
470 | {file = "typed_ast-1.5.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f818c5b81966d4728fec14caa338e30a70dfc3da577984d38f97816c4b3071ec"},
471 | {file = "typed_ast-1.5.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3042bfc9ca118712c9809201f55355479cfcdc17449f9f8db5e744e9625c6805"},
472 | {file = "typed_ast-1.5.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4fff9fdcce59dc61ec1b317bdb319f8f4e6b69ebbe61193ae0a60c5f9333dc49"},
473 | {file = "typed_ast-1.5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:8e0b8528838ffd426fea8d18bde4c73bcb4167218998cc8b9ee0a0f2bfe678a6"},
474 | {file = "typed_ast-1.5.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ef1d96ad05a291f5c36895d86d1375c0ee70595b90f6bb5f5fdbee749b146db"},
475 | {file = "typed_ast-1.5.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed44e81517364cb5ba367e4f68fca01fba42a7a4690d40c07886586ac267d9b9"},
476 | {file = "typed_ast-1.5.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f60d9de0d087454c91b3999a296d0c4558c1666771e3460621875021bf899af9"},
477 | {file = "typed_ast-1.5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9e237e74fd321a55c90eee9bc5d44be976979ad38a29bbd734148295c1ce7617"},
478 | {file = "typed_ast-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee852185964744987609b40aee1d2eb81502ae63ee8eef614558f96a56c1902d"},
479 | {file = "typed_ast-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:27e46cdd01d6c3a0dd8f728b6a938a6751f7bd324817501c15fb056307f918c6"},
480 | {file = "typed_ast-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d64dabc6336ddc10373922a146fa2256043b3b43e61f28961caec2a5207c56d5"},
481 | {file = "typed_ast-1.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8cdf91b0c466a6c43f36c1964772918a2c04cfa83df8001ff32a89e357f8eb06"},
482 | {file = "typed_ast-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:9cc9e1457e1feb06b075c8ef8aeb046a28ec351b1958b42c7c31c989c841403a"},
483 | {file = "typed_ast-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e20d196815eeffb3d76b75223e8ffed124e65ee62097e4e73afb5fec6b993e7a"},
484 | {file = "typed_ast-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:37e5349d1d5de2f4763d534ccb26809d1c24b180a477659a12c4bde9dd677d74"},
485 | {file = "typed_ast-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f1a27592fac87daa4e3f16538713d705599b0a27dfe25518b80b6b017f0a6d"},
486 | {file = "typed_ast-1.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8831479695eadc8b5ffed06fdfb3e424adc37962a75925668deeb503f446c0a3"},
487 | {file = "typed_ast-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:20d5118e494478ef2d3a2702d964dae830aedd7b4d3b626d003eea526be18718"},
488 | {file = "typed_ast-1.5.3.tar.gz", hash = "sha256:27f25232e2dd0edfe1f019d6bfaaf11e86e657d9bdb7b0956db95f560cceb2b3"},
489 | ]
490 | typing-extensions = [
491 | {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"},
492 | {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"},
493 | ]
494 | urllib3 = [
495 | {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"},
496 | {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"},
497 | ]
498 | zipp = [
499 | {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"},
500 | {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"},
501 | ]
502 |
--------------------------------------------------------------------------------
/pyheadspace/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/yashrathi-git/headspace-cli/20987a50dc85e4a480299b17bedab083336d1e2f/pyheadspace/__init__.py
--------------------------------------------------------------------------------
/pyheadspace/__main__.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 | import re
4 | from datetime import date, datetime, timedelta
5 | from typing import List, Optional, Union
6 |
7 | import jwt
8 | from appdirs import user_data_dir
9 | import click
10 | import requests
11 | from rich.console import Console
12 | from rich.progress import track
13 | from urllib.parse import urlparse, parse_qs
14 | from rich.traceback import install
15 |
16 | from pyheadspace.auth import authenticate, prompt
17 |
18 | # For better tracebacks
19 | install()
20 |
21 | BASEDIR = user_data_dir("pyheadspace")
22 | if not os.path.exists(BASEDIR):
23 | os.makedirs(BASEDIR)
24 | BEARER = os.path.abspath(os.path.join(BASEDIR, "bearer_id.txt"))
25 |
26 | AUDIO_URL = "https://api.prod.headspace.com/content/activities/{}"
27 | PACK_URL = "https://api.prod.headspace.com/content/activity-groups/{}"
28 | SIGN_URL = "https://api.prod.headspace.com/content/media-items/{}/make-signed-url"
29 | TECHNIQUE_URL = "https://api.prod.headspace.com/content/techniques/{}"
30 | EVERYDAY_URL = (
31 | "https://api.prod.headspace.com/content/view-models/everyday-headspace-banner"
32 | )
33 | GROUP_COLLECTION = "https://api.prod.headspace.com/content/group-collections"
34 | DESIRED_LANGUAGE = os.getenv("HEADSPACE_LANG", "en-US")
35 |
36 | if not os.path.exists(BEARER):
37 | with open(BEARER, "w") as file:
38 | file.write("")
39 |
40 | with open(BEARER, "r") as file:
41 | BEARER_ID = file.read().strip()
42 |
43 | if BEARER_ID:
44 | try:
45 | USER_ID = jwt.decode(
46 | BEARER_ID.split(" ")[-1], options={"verify_signature": False}
47 | )["https://api.prod.headspace.com/hsId"]
48 | except Exception as e:
49 | USER_ID = ""
50 | else:
51 | USER_ID = ""
52 |
53 | headers = {
54 | "authority": "api.prod.headspace.com",
55 | "accept": "application/vnd.api+json",
56 | "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.72 Safari/537.36",
57 | "authorization": BEARER_ID,
58 | "hs-languagepreference": DESIRED_LANGUAGE,
59 | "sec-gpc": "1",
60 | "origin": "https://my.headspace.com",
61 | "sec-fetch-site": "same-site",
62 | "sec-fetch-mode": "cors",
63 | "referer": "https://my.headspace.com/",
64 | "accept-language": "en-US,en;q=0.9",
65 | }
66 |
67 | console = Console()
68 | logger = logging.getLogger("pyHeadspace")
69 |
70 |
71 | session = requests.Session()
72 | session.headers.update(headers)
73 |
74 |
75 | URL_GROUP_CMD = [
76 | click.option("--id", type=int, default=0, help="ID of video."),
77 | click.argument("url", type=str, default="", required=False),
78 | ]
79 |
80 | COMMON_CMD = [
81 | click.option(
82 | "-d",
83 | "--duration",
84 | help="Duration or list of duration",
85 | type=int,
86 | default=[15],
87 | multiple=True,
88 | ),
89 | click.option("--out", default="", help="Download directory"),
90 | ]
91 |
92 |
93 | def shared_cmd(cmd):
94 | def _shared_cmd(func):
95 | for option in reversed(cmd):
96 | func = option(func)
97 | return func
98 |
99 | return _shared_cmd
100 |
101 |
102 | def check_bearer_id(bearer_id):
103 | if "…" in bearer_id:
104 | return False
105 | return True
106 |
107 |
108 | def get_group_ids():
109 | params = {"category": "PACK_GROUP", "limit": "-1"}
110 | response = request_url(GROUP_COLLECTION, params=params)
111 | data = response["included"]
112 | pack_ids = []
113 | for item in data:
114 | try:
115 | id = item["relationships"]["activityGroup"]["data"]["id"]
116 | except KeyError:
117 | continue
118 | pack_ids.append(int(id))
119 | return sorted(pack_ids)
120 |
121 |
122 | def request_url(
123 | url: str, *, id: Union[str, int] = None, mute: bool = False, params=None
124 | ):
125 | if params is None:
126 | params = {}
127 | url = url.format(id)
128 | if not mute:
129 | logger.info("Sending GET request to {}".format(url))
130 |
131 | response = session.get(url, params=params)
132 | try:
133 | response_js: dict = response.json()
134 | except Exception as e:
135 | logger.critical(f"status code {response.status_code}")
136 | logger.critical(f"error: {e}")
137 | console.print(f"status code {response.status_code}")
138 | raise click.Abort()
139 | if not response.ok:
140 | if "errors" in response_js.keys():
141 | errors = response_js["errors"]
142 | logger.error(errors)
143 | if response.status_code == 401:
144 | console.print(
145 | "\n[red]Unautorized : Unable to login to headspace account[/red]"
146 | )
147 | console.print("Run [green]headspace login[/green] first.")
148 | else:
149 | console.print(errors)
150 | else:
151 | console.print(response_js)
152 | logger.error(response_js)
153 | raise click.UsageError(f"HTTP error: status-code = {response.status_code}")
154 | return response_js
155 |
156 |
157 | def round_off(time: Union[int, float]):
158 | orig_duration = time / 60000
159 |
160 | time = time // 60000
161 | unit_place = time % 10
162 |
163 | if 0 < unit_place < 5:
164 | time -= unit_place
165 | elif unit_place > 5:
166 | time -= unit_place - 5
167 | if time == 0:
168 | if (orig_duration >= 2) and (orig_duration < 3):
169 | time = 2
170 | elif (orig_duration >= 3) and (orig_duration < 4):
171 | time = 3
172 | elif (orig_duration >= 4) and (orig_duration <= 5):
173 | time = 5
174 | else:
175 | time = 1
176 | return time
177 |
178 |
179 | def get_pack_attributes(
180 | *,
181 | pack_id: Union[str, int],
182 | duration: List[int],
183 | out: str,
184 | no_techniques: bool,
185 | no_meditation: bool,
186 | all_: bool = False,
187 | author: Optional[int] = None,
188 | ):
189 | response = request_url(PACK_URL, id=pack_id)
190 | attributes: dict = response["data"]["attributes"]
191 | _pack_name: str = attributes["name"]
192 | # Because it's only used for filenames, and | is mostly not allowed in filenames
193 | _pack_name = _pack_name.replace("|", "-")
194 |
195 | if all_:
196 | exists = os.path.exists(os.path.join(out, _pack_name))
197 | if exists:
198 | console.print(f"{_pack_name} already exists [red]skipping... [/red]")
199 | return
200 | # Logging
201 | logger.info(f"Downloading pack, name: {_pack_name}")
202 |
203 | # Printing
204 | console.print("Pack metadata: ")
205 | console.print(f'[green]Name: [/green] {attributes["name"]}')
206 | console.print(f'[green]Description: [/green] {attributes["description"]}')
207 |
208 | data = response["included"]
209 | for item in data:
210 | if item["type"] == "orderedActivities":
211 | if not no_meditation:
212 | id = item["relationships"]["activity"]["data"]["id"]
213 | download_pack_session(id, duration, _pack_name, out=out, author=author)
214 | elif item["type"] == "orderedTechniques":
215 | if not no_techniques:
216 | id = item["relationships"]["technique"]["data"]["id"]
217 | download_pack_techniques(
218 | id, pack_name=_pack_name, out=out, author=author
219 | )
220 |
221 |
222 | def get_signed_url(response: dict, duration: List[int]) -> dict:
223 | data = response["included"]
224 | signed_links = {}
225 | av_duration = []
226 | for item in data:
227 | try:
228 | name = response["data"]["attributes"]["name"]
229 | except KeyError:
230 | name = response["data"]["attributes"]["titleText"]
231 | if item["type"] != "mediaItems":
232 | continue
233 | try:
234 | duration_in_min = round_off(int(item["attributes"]["durationInMs"]))
235 | except KeyError:
236 | continue
237 | av_duration.append(duration_in_min)
238 | if duration_in_min not in duration:
239 | continue
240 |
241 | sign_id = item["id"]
242 | # Getting signed URL
243 | direct_url = request_url(SIGN_URL, id=sign_id)["url"]
244 | if len(duration) > 1:
245 | name += f"({duration_in_min} minutes)"
246 |
247 | signed_links[name] = direct_url
248 | if len(signed_links) == 0:
249 | msg = (
250 | f"Cannot download {name}. This could be"
251 | " because this session might not be available in "
252 | f"{', '.join(str(d) for d in duration)} min duration."
253 | )
254 | console.print(f"[yellow]{msg}[yellow]")
255 | console.print(
256 | "This session is available with duration of "
257 | f"{'/'.join(str(d) for d in av_duration)} minutes. "
258 | "Use [green]--duration[/green] option to modify required duration."
259 | "\n[red]([bold]Ctrl+C[/bold] to terminate)[/red]"
260 | )
261 | logger.warning(msg)
262 | return signed_links
263 |
264 |
265 | def download_pack_session(
266 | id: Union[int, str],
267 | duration: List[int],
268 | pack_name: Optional[str],
269 | out: str,
270 | filename_suffix=None,
271 | author: Optional[int] = None,
272 | ):
273 | params = dict(authorId=author) if author else dict()
274 | response = request_url(AUDIO_URL, id=id, params=params)
275 |
276 | signed_url = get_signed_url(response, duration=duration)
277 | for name, direct_url in signed_url.items():
278 | if filename_suffix:
279 | name += filename_suffix
280 | download(direct_url, name, filename=name, pack_name=pack_name, out=out)
281 |
282 |
283 | def download_pack_techniques(
284 | technique_id: Union[int, str],
285 | *,
286 | pack_name: Optional[str] = None,
287 | out: str,
288 | filename_suffix=None,
289 | author: Optional[int] = None,
290 | ):
291 | params = dict(authorId=author) if author else dict()
292 | response = request_url(TECHNIQUE_URL, id=technique_id, params=params)
293 | name = response["data"]["attributes"]["name"]
294 | if filename_suffix:
295 | name += filename_suffix
296 | for item in response["included"]:
297 | if not item["type"] == "mediaItems":
298 | continue
299 | if item["attributes"]["mimeType"] == "video/mp4":
300 | sign_id = item["id"]
301 | break
302 | direct_url = request_url(SIGN_URL, id=sign_id)["url"]
303 | download(
304 | direct_url, name, filename=name, pack_name=pack_name, out=out, is_technique=True
305 | )
306 |
307 |
308 | def download(
309 | direct_url: str,
310 | name: str,
311 | *,
312 | filename: str,
313 | pack_name: Optional[str] = None,
314 | out: str,
315 | is_technique: bool = False,
316 | ):
317 | console.print(f"[green]Downloading {name}[/green]")
318 | logger.info(f"Sending GET request to {direct_url}")
319 | media = requests.get(direct_url, stream=True)
320 |
321 | if not media.ok:
322 | media_json = media.json()
323 | console.print(media_json)
324 | logger.error(media_json)
325 | raise click.UsageError(f"HTTP error: status-code = {media.status_code}")
326 |
327 | media_type = media.headers.get("content-type").split("/")[-1]
328 | filename += f".{media_type}"
329 | total_length = int(media.headers.get("content-length"))
330 | chunk_size = 1024
331 |
332 | if not os.path.exists(out) and os.path.isdir(out):
333 | raise click.BadOptionUsage("--out", f"'{out}' path not valid")
334 |
335 | if pack_name:
336 | dir_path = os.path.join(out, pack_name)
337 | pattern = r"Session \d+ of (Level \d+)"
338 | level = re.findall(pattern, filename)
339 | if level:
340 | dir_path = os.path.join(dir_path, level[0])
341 |
342 | if is_technique:
343 | dir_path = os.path.join(dir_path, "Techniques")
344 | try:
345 | os.makedirs(dir_path)
346 | except FileExistsError:
347 | pass
348 | filepath = os.path.join(dir_path, filename)
349 | else:
350 | if not os.path.exists(out) and out != "":
351 | raise click.UsageError(message=f"'{out}' path does not exists.")
352 | filepath = os.path.join(out, filename)
353 |
354 | if os.path.exists(filepath):
355 | console.print(f"'{filename}' already exists [red]skipping...[/red]")
356 | return
357 |
358 | failed_tries = 0
359 | max_tries = 5
360 | while failed_tries <= max_tries:
361 | downloaded_length = 0
362 | with open(filepath, "wb") as file:
363 | for chunk in track(
364 | media.iter_content(chunk_size=chunk_size),
365 | description=f"[red]Downloading...[/red]",
366 | total=total_length // chunk_size,
367 | ):
368 | downloaded_length += len(chunk)
369 | file.write(chunk)
370 | file.flush()
371 |
372 | if downloaded_length != total_length:
373 | failed_tries += 1
374 | console.print(
375 | f"[red]Download failed. Retrying {failed_tries} out of {max_tries}...[/red]",
376 | )
377 | media.close()
378 | media = requests.get(direct_url, stream=True)
379 | else:
380 | break
381 |
382 | if failed_tries > max_tries:
383 | console.print(f"[red]Failed to download {filename}[/red]\n")
384 | logger.error(f"Failed to download {filename}")
385 | os.remove(filepath)
386 |
387 |
388 | def find_id(pattern: str, url: str):
389 | try:
390 | id = int(re.findall(pattern, url)[-1])
391 | except ValueError:
392 | raise click.UsageError("Cannot find the ID. Use --id option to provide the ID.")
393 | except IndexError:
394 | raise click.UsageError("Cannot find the ID. Use --id option to provide the ID.")
395 | return id
396 |
397 |
398 | @click.group()
399 | @click.version_option()
400 | @click.option(
401 | "--verbose", "-v", is_flag=True, help="Enable verbose mode.", default=False
402 | )
403 | def cli(verbose):
404 | """
405 | Download headspace packs or individual meditation and techniques.
406 | """
407 | logging.basicConfig(level=logging.DEBUG if verbose else logging.CRITICAL)
408 | # We don't want log messages from requests and urllib3 unless they are atleast warning
409 | logging.getLogger("requests").setLevel(logging.WARNING)
410 | logging.getLogger("urllib3").setLevel(logging.WARNING)
411 | if verbose:
412 | console.print("[bold]Verbose mode enabled[/bold]")
413 |
414 |
415 | @cli.command("help")
416 | @click.argument("command", required=False)
417 | @click.pass_context
418 | def help_(ctx, command):
419 | """
420 | Display help information
421 | """
422 | if not command:
423 | click.echo(ctx.parent.get_help())
424 | return
425 |
426 | cmd = cli.get_command(ctx, command)
427 |
428 | if not cmd:
429 | raise click.ClickException("No such command: {}".format(command))
430 |
431 | click.echo(cmd.get_help(ctx))
432 |
433 |
434 | def get_legacy_id(new_id):
435 | logger.info("Getting entity ID")
436 | url = "https://api.prod.headspace.com/content-aggregation/v2/content/view-models/content-info/skeleton"
437 | response = request_url(url, params={"contentId": new_id, "userId": USER_ID})
438 | return response["entityId"]
439 |
440 |
441 | @cli.command("pack")
442 | @click.option(
443 | "--no_meditation",
444 | is_flag=True,
445 | help="Only download meditation session without techniques videos.",
446 | default=False,
447 | )
448 | @click.option(
449 | "--no_techniques",
450 | is_flag=True,
451 | help="Only download techniques and not meditation sessions.",
452 | default=False,
453 | )
454 | @click.option(
455 | "--all", "all_", default=False, is_flag=True, help="Downloads all headspace packs."
456 | )
457 | @click.option(
458 | "--exclude",
459 | "-e",
460 | default="",
461 | help=(
462 | "Use with `--all` flag. Location of text file for"
463 | " links of packs to exclude downloading. Every link should be on separate line."
464 | ),
465 | )
466 | @click.option(
467 | "--author",
468 | "-a",
469 | type=int,
470 | default=0,
471 | help=(
472 | "Use to choose the author/narrator that you'd like to download the files of."
473 | "NOTE: If the author ID is not found, the default will download."
474 | ),
475 | )
476 | @shared_cmd(COMMON_CMD)
477 | @shared_cmd(URL_GROUP_CMD)
478 | def pack(
479 | id: int,
480 | duration: Union[list, tuple],
481 | out: str,
482 | no_techniques: bool,
483 | no_meditation: bool,
484 | url: str,
485 | all_: bool,
486 | exclude: str,
487 | author: int,
488 | ):
489 | """
490 | Download headspace packs with techniques videos.
491 | """
492 |
493 | duration = list(set(duration))
494 | pattern = r"my.headspace.com/modes/(?:meditate|focus)/content/([0-9]+)"
495 |
496 | if not all_:
497 | if url == "" and id <= 0:
498 | raise click.BadParameter("Please provide ID or URL.")
499 | if url:
500 | id = find_id(pattern, url)
501 | id = get_legacy_id(id)
502 | else:
503 | id = get_legacy_id(id)
504 | get_pack_attributes(
505 | pack_id=id,
506 | duration=duration,
507 | out=out,
508 | no_meditation=no_meditation,
509 | no_techniques=no_techniques,
510 | author=author,
511 | )
512 | else:
513 | excluded = []
514 | if exclude:
515 | try:
516 | with open(exclude, "r") as file:
517 | links = file.readlines()
518 | except FileNotFoundError:
519 | raise click.BadOptionUsage("exclude", "Exclude file not found.")
520 | for link in links:
521 | exclude_id = re.findall(pattern, link)
522 | if exclude_id:
523 | excluded.append(int(get_legacy_id(int(exclude_id[0]))))
524 | else:
525 | console.print(f"[yellow]Unable to parse: {link}[/yellow]")
526 |
527 | console.print("[red]Downloading all packs[/red]")
528 | logger.info("Downloading all packs")
529 |
530 | group_ids = get_group_ids()
531 |
532 | for pack_id in group_ids:
533 | if pack_id not in excluded:
534 | get_pack_attributes(
535 | pack_id=pack_id,
536 | duration=duration,
537 | out=out,
538 | no_meditation=no_meditation,
539 | no_techniques=no_techniques,
540 | all_=True,
541 | )
542 | else:
543 | logger.info(f"Skipping ID: {pack_id} as it is excluded")
544 |
545 |
546 | @cli.command("download")
547 | @shared_cmd(COMMON_CMD)
548 | @click.argument("url", type=str)
549 | def download_single(url: str, out: str, duration: Union[list, tuple]):
550 | """
551 | Download single headspace session.
552 | """
553 |
554 | pattern = r"my.headspace.com/player/([0-9]+)"
555 | try:
556 | pack_id = find_id(pattern, url)
557 | except click.UsageError:
558 | raise click.UsageError("Unable to parse URL.")
559 |
560 | try:
561 | index = int(parse_qs(urlparse(url).query)["startIndex"][0])
562 | except KeyError:
563 | index = 0
564 | except ValueError:
565 | raise click.Abort("Unable to parse startIndex.")
566 |
567 | response = request_url(PACK_URL, id=pack_id)
568 | attributes: dict = response["data"]["attributes"]
569 | pack_name: str = attributes["name"]
570 |
571 | data = response["included"]
572 | data = data[index]
573 | if data["type"] == "orderedActivities":
574 | id = data["relationships"]["activity"]["data"]["id"]
575 | download_pack_session(
576 | id, duration, None, out=out, filename_suffix=" - {}".format(pack_name)
577 | )
578 | elif data["type"] == "orderedTechniques":
579 | id = data["relationships"]["technique"]["data"]["id"]
580 | download_pack_techniques(
581 | id, pack_name=None, out=out, filename_suffix=" - {}".format(pack_name)
582 | )
583 |
584 |
585 | @cli.command("file")
586 | def display_file_location():
587 | """
588 | Display `bearer_id.txt` file location.
589 | """
590 | console.print(f'bearer_id.txt file is located at "{BEARER}"')
591 |
592 |
593 | def write_bearer(bearer_id):
594 | """
595 | Setup `bearer id`
596 | """
597 |
598 | if not check_bearer_id(bearer_id):
599 | console.print(
600 | "\n[red]The bearer ID is invalid. It "
601 | "is incomplete as it contains '…' in it[/red]. \n[green]Please copy the"
602 | " ID by right click on the attribute 'authorization' and "
603 | "then 'copy value' to copy full value.[/green]"
604 | )
605 | raise click.UsageError("Bearer ID not complete")
606 |
607 | with open(BEARER, "w") as file:
608 | file.write(bearer_id)
609 |
610 |
611 | @cli.command("everyday")
612 | @click.option(
613 | "--from",
614 | "_from",
615 | type=str,
616 | default=date.today().strftime("%Y-%m-%d"),
617 | help="Start download from specific date. DATE-FORMAT=>yyyy-mm-dd",
618 | )
619 | @click.option(
620 | "--to",
621 | type=str,
622 | default=date.today().strftime("%Y-%m-%d"),
623 | help="Download till a specific date. DATE-FORMAT=>yyyy-mm-dd",
624 | )
625 | @shared_cmd(COMMON_CMD)
626 | def everyday(_from: str, to: str, duration: Union[list, tuple], out: str):
627 | """
628 | Download everyday headspace.
629 | """
630 | userid = USER_ID
631 | date_format = "%Y-%m-%d"
632 | _from = datetime.strptime(_from, date_format).date()
633 | to = datetime.strptime(to, date_format).date()
634 |
635 | while _from <= to:
636 | params = {
637 | "date": _from.strftime(date_format),
638 | "userId": userid,
639 | }
640 | response = request_url(EVERYDAY_URL, params=params)
641 |
642 | signed_url = get_signed_url(response, duration=duration)
643 |
644 | for name, direct_url in signed_url.items():
645 | download(direct_url, name, filename=name, out=out)
646 | _from += timedelta(days=1)
647 |
648 |
649 | @cli.command("login")
650 | def login():
651 | email, password = prompt()
652 | bearer_token = authenticate(email, password)
653 | if not bearer_token:
654 | raise click.Abort()
655 | write_bearer(bearer_token)
656 | console.print("[green]:heavy_check_mark:[/green] Logged in successfully!")
657 |
658 |
659 | session.close()
660 |
661 | if __name__ == "__main__":
662 | cli()
663 |
--------------------------------------------------------------------------------
/pyheadspace/auth.py:
--------------------------------------------------------------------------------
1 | import json
2 | import re
3 |
4 | import requests
5 | from rich.console import Console
6 |
7 | LOGIN_URL = "https://www.headspace.com/login"
8 | AUTH_URL = "https://auth.headspace.com/co/authenticate"
9 | BEARER_TOKEN_URL = "https://auth.headspace.com/authorize"
10 |
11 | session = requests.Session()
12 | console = Console()
13 |
14 | headers = {
15 | "User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:88.0) Gecko/20100101 Firefox/88.0",
16 | "Accept": "*/*",
17 | "Accept-Language": "en-US,en;q=0.5",
18 | "Content-Type": "application/json",
19 | "Origin": "https://www.headspace.com",
20 | "Connection": "keep-alive",
21 | "TE": "Trailers",
22 | }
23 |
24 | session.headers.update(headers)
25 |
26 |
27 | def get_client_id():
28 | response = session.get(LOGIN_URL)
29 | client_id = re.findall(r'"clientId":"(.+?)",', response.text)[0]
30 | return client_id
31 |
32 |
33 | def prompt():
34 | email = console.input(f"[bold red]?[/] Email: ")
35 | password = console.input(f"[bold red]?[/] Password: ", password=True)
36 |
37 | return email, password
38 |
39 |
40 | def get_bearer_token(client_id, login_ticket):
41 | params = {
42 | "client_id": client_id,
43 | "response_type": "token",
44 | "response_mode": "web_message",
45 | "redirect_uri": "https://www.headspace.com/auth",
46 | "scope": "openid email",
47 | "audience": "https://api.prod.headspace.com",
48 | "realm": "User-Password-Headspace",
49 | "login_ticket": login_ticket,
50 | "prompt": "none",
51 | }
52 | response = session.get(BEARER_TOKEN_URL, params=params)
53 | html = response.text
54 | bearer_token = re.findall(r'"access_token":"(.+?)"', html)[0]
55 | return bearer_token
56 |
57 |
58 | def authenticate(email, password):
59 | data = {
60 | "client_id": get_client_id(),
61 | "username": email,
62 | "password": password,
63 | "realm": "User-Password-Headspace",
64 | "credential_type": "http://auth0.com/oauth/grant-type/password-realm",
65 | }
66 | response = session.post(
67 | AUTH_URL,
68 | headers=headers,
69 | data=json.dumps(data),
70 | )
71 | resp_json: dict = response.json()
72 | try:
73 | login_ticket = resp_json["login_ticket"]
74 | except KeyError:
75 | if "error" in resp_json.keys():
76 | console.print(resp_json["error"], style="red")
77 | if "error_description" in resp_json.keys():
78 | console.print(resp_json["error_description"])
79 | else:
80 | console.print(resp_json)
81 | return False
82 | bearer_token = get_bearer_token(data["client_id"], login_ticket)
83 | bearer_token = "bearer " + bearer_token
84 | return bearer_token
85 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "pyheadspace"
3 | version = "3.1.5"
4 | description = "Command line script to download packs and singles from Headspace."
5 | authors = ["Yash Rathi "]
6 | readme = "README.md"
7 | homepage = "https://github.com/yashrathi-git/pyHeadspace"
8 | documentation = "https://github.com/yashrathi-git/pyHeadspace#pyheadspace"
9 |
10 | [tool.poetry.urls]
11 | "Bug Tracker" = "https://github.com/yashrathi-git/pyHeadspace/issues"
12 |
13 | [tool.poetry.dependencies]
14 | python = ">=3.7,<4.0.0"
15 | click = "^8.0.3"
16 | rich = "^12.2.0"
17 | requests = "^2.26.0"
18 | appdirs = "^1.4.4"
19 | PyJWT = "^2.3.0"
20 |
21 | [tool.poetry.dev-dependencies]
22 | black = "^22.3.0"
23 | pytest = "^7.1.2"
24 |
25 | [build-system]
26 | requires = ["poetry-core>=1.0.0"]
27 | build-backend = "poetry.core.masonry.api"
28 |
29 | [tool.poetry.scripts]
30 | headspace = 'pyheadspace.__main__:cli'
--------------------------------------------------------------------------------
/tests/test_pyheadspace.py:
--------------------------------------------------------------------------------
1 | """
2 | Right now this only contains basic tests. Because the library depends heavily on
3 | headspace, it makes it difficult to write automated tests.
4 | """
5 |
6 | from pyheadspace.__main__ import round_off
7 |
8 |
9 | def test_round_off_duration():
10 | assert round_off(1.1 * 60_000) == 1
11 | assert round_off(1.2 * 60_000) == 1
12 | assert round_off(1.9 * 60_000) == 1
13 | assert round_off(2 * 60_000) == 2
14 | assert round_off(2.5 * 60_000) == 2
15 | assert round_off(2.9 * 60_000) == 2
16 | assert round_off(3 * 60_000) == 3
17 |
18 | assert round_off(3.1 * 60_000) == 3
19 | assert round_off(3.9 * 60_000) == 3
20 | assert round_off(4 * 60_000) == 5
21 | assert round_off(5 * 60_000) == 5
22 | assert round_off(5.2 * 60_000) == 5
23 | assert round_off(6 * 60_000) == 5
24 | assert round_off(7 * 60_000) == 5
25 | assert round_off(10.2 * 60_000) == 10
26 | assert round_off(16 * 60_000) == 15
27 |
--------------------------------------------------------------------------------