├── .editorconfig
├── .github
├── actions
│ └── setup
│ │ └── action.yml
├── release.yml
└── workflows
│ ├── ci.yml
│ └── publish-docs.yml
├── .gitignore
├── .pre-commit-config.yaml
├── LICENSE
├── Makefile
├── README.md
├── ROADMAP.md
├── docs
├── poly.py
├── sphinx
│ ├── .gitignore
│ ├── _static
│ │ └── css
│ │ │ ├── api-docs.css
│ │ │ ├── ext-links.css
│ │ │ └── version-selector.css
│ ├── _templates
│ │ ├── components
│ │ │ └── edit-this-page.html
│ │ ├── custom-module-template.rst
│ │ ├── sidebar
│ │ │ └── brand.html
│ │ └── versioning.html
│ ├── alternatives.rst
│ ├── api
│ │ └── sphinx_polyversion.rst
│ ├── conf.py
│ ├── development
│ │ └── roadmap.rst
│ ├── guide
│ │ ├── getting-started.rst
│ │ └── installation.rst
│ ├── index.rst
│ └── license.rst
├── static
│ └── .nojekyll
└── templates
│ └── index.html
├── poetry.lock
├── pyproject.toml
├── sphinx_polyversion
├── __init__.py
├── __main__.py
├── api.py
├── builder.py
├── driver.py
├── environment.py
├── git.py
├── json.py
├── log.py
├── main.py
├── pyvenv.py
├── sphinx.py
├── utils.py
└── vcs.py
├── swp.json
└── tests
├── __init__.py
├── test_api.py
├── test_git.py
├── test_json.py
├── test_pyvenv.py
├── test_sphinx.py
└── test_utils.py
/.editorconfig:
--------------------------------------------------------------------------------
1 | # http://editorconfig.org
2 |
3 | root = true
4 |
5 | [*]
6 | charset = utf-8
7 | indent_style = space
8 | indent_size = 4
9 | end_of_line = lf
10 | insert_final_newline = true
11 | trim_trailing_whitespace = true
12 |
13 | [Makefile]
14 | indent_style = tab
15 |
16 | [**.md]
17 | indent_size = 2
18 |
19 | [**.{yml,yaml}]
20 | indent_size = 2
21 |
--------------------------------------------------------------------------------
/.github/actions/setup/action.yml:
--------------------------------------------------------------------------------
1 | name: Setup
2 | description: Sets up python and poetry
3 |
4 | # note:
5 | # this is a local composite action
6 | # documentation: https://docs.github.com/en/actions/creating-actions/creating-a-composite-action
7 | # code example: https://github.com/GuillaumeFalourd/poc-github-actions/blob/main/.github/actions/local-action/action.yaml
8 |
9 | inputs:
10 | setup-pre-commit:
11 | description: Whether pre-commit shall be setup, too
12 | required: false
13 | default: "false"
14 | install-options:
15 | description: Additional arguments to pass to `poetry install`.
16 | required: false
17 | default: ""
18 | python-version:
19 | description: "The python version to install"
20 | required: false
21 | default: "3.10"
22 | cache-python:
23 | description: Whether to cache poetry venvs
24 | required: false
25 | default: "false"
26 |
27 | runs:
28 | using: "composite"
29 | steps:
30 | # Python + Poetry + Caching
31 | # See https://github.com/actions/setup-python/blob/main/docs/advanced-usage.md#caching-
32 | - name: Install poetry
33 | run: pipx install poetry
34 | shell: bash
35 |
36 | - name: setup python
37 | uses: actions/setup-python@v5
38 | if: inputs.cache-python == 'true'
39 | with:
40 | python-version: ${{ inputs.python-version }}
41 | cache: "pip"
42 |
43 | - name: setup python
44 | uses: actions/setup-python@v5
45 | if: inputs.cache-python != 'true'
46 | with:
47 | python-version: ${{ inputs.python-version }}
48 |
49 | # list cached envs
50 | # - name: Cached envs
51 | # shell: bash
52 | # run: ls $(poetry config virtualenvs.path)
53 | # if: inputs.cache-python == 'true'
54 |
55 | # Install deps
56 | - name: Install project dependencies
57 | run: |
58 | poetry install ${{ inputs.install-options }}
59 | shell: bash
60 |
61 | # Pre-commit
62 | - name: Install pre-commit
63 | if: ${{ inputs.setup-pre-commit == 'true' }}
64 | run: pipx install pre-commit
65 | shell: bash
66 | - name: Set PY variable
67 | if: ${{ inputs.setup-pre-commit == 'true' }}
68 | run: echo "PY=$(python -VV | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV
69 | shell: bash
70 | - name: Configure Caching for Pre-Commit
71 | if: ${{ inputs.setup-pre-commit == 'true' }}
72 | uses: actions/cache@v4
73 | with:
74 | path: ~/.cache/pre-commit
75 | key: pre-commit|${{ env.PY }}|${{ hashFiles('.pre-commit-config.yaml') }}
76 |
--------------------------------------------------------------------------------
/.github/release.yml:
--------------------------------------------------------------------------------
1 | changelog:
2 | categories:
3 | - title: Breaking Changes 📢
4 | labels:
5 | - major
6 | - title: New Features 💡
7 | labels:
8 | - minor
9 | - title: Patches 🩹
10 | labels:
11 | - patch
12 | - title: Other Changes
13 | labels:
14 | - "*"
15 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | pull_request:
6 | workflow_call:
7 |
8 | jobs:
9 | lint:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Checkout
13 | uses: actions/checkout@v4
14 |
15 | - name: Setup
16 | id: setup
17 | uses: ./.github/actions/setup
18 | with:
19 | setup-pre-commit: true
20 | cache-python: true
21 | install-options: --only lint --all-extras --sync
22 |
23 | # for some reason poetry won't install the extras
24 | - name: Install extras
25 | run: poetry install --all-extras
26 |
27 | # Run pre-commit hooks
28 | - name: Run pre-commit hooks
29 | run: pre-commit run --all-files
30 |
31 | # Run mypy since it cannot check imports in pre-commit hooks
32 | - name: Run linters
33 | run: make lint
34 |
35 | # Determine files changed by pre-commit hooks
36 | - name: Determine files changed by pre-commit hooks
37 | id: changed-files
38 | if: ${{ failure() }}
39 | run: echo files=$(git diff --name-only --diff-filter=AM) >> "$GITHUB_OUTPUT"
40 |
41 | # Upload changed files on pre-commit error
42 | - name: Upload changed files
43 | if: ${{ failure() && steps.changed-files.outputs.files }}
44 | uses: actions/upload-artifact@v4
45 | with:
46 | name: Files modified by pre-commit
47 | path: ${{ steps.changed-files.outputs.files }}
48 |
49 | generate_test_matrix:
50 | runs-on: ubuntu-latest
51 | outputs:
52 | matrix: ${{ steps.set-matrix.outputs.matrix }}
53 | steps:
54 | - name: Checkout
55 | uses: actions/checkout@v4
56 |
57 | - name: setup python
58 | uses: actions/setup-python@v5
59 | with:
60 | python-version: "3.11"
61 |
62 | - name: Extract extras from `pyproject.toml`
63 | id: set-matrix
64 | shell: python
65 | run: |
66 | import tomllib
67 | import os
68 | import json
69 | with open('pyproject.toml', 'rb') as f:
70 | manifest = tomllib.load(f)
71 | yaml = { 'include' : [{ 'extras' : extra} for extra in [''] + list(manifest['tool']['poetry']['extras'])]}
72 | yaml['include'].append({'extras': 'all'})
73 | out = json.dumps(yaml)
74 | print(out)
75 | with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
76 | f.write('matrix=' + out)
77 |
78 | test:
79 | name: test ${{ matrix.extras && 'with' || '' }} ${{ matrix.extras }}
80 | runs-on: ubuntu-latest
81 | needs: generate_test_matrix
82 | strategy:
83 | matrix: ${{ fromJson(needs.generate_test_matrix.outputs.matrix) }}
84 | fail-fast: false
85 | steps:
86 | - name: Checkout
87 | uses: actions/checkout@v4
88 |
89 | - name: Setup
90 | id: setup
91 | uses: ./.github/actions/setup
92 | with:
93 | install-options: --only test --sync ${{ matrix.extras && ( matrix.extras == 'all' && '--all-extras' || format('--extras "{0}"', matrix.extras)) || '' }}
94 |
95 | - name: Run Tests
96 | run: make test
97 |
98 | # Upload summary of test results
99 | - uses: test-summary/action@31493c76ec9e7aa675f1585d3ed6f1da69269a86 # v2.4
100 | if: failure() || success()
101 | with:
102 | paths: report.xml
103 |
104 | # Upload coverage to job summary
105 | - name: Summarize coverage
106 | uses: livewing/lcov-job-summary@28126fb20073f4624d0f8e2c6f4afbe1c0670cbb # v1.2.0
107 | if: matrix.extras == 'all' && (failure() || success())
108 | with:
109 | lcov: coverage.lcov
110 |
111 | docs:
112 | runs-on: ubuntu-latest
113 | steps:
114 | - name: Checkout
115 | uses: actions/checkout@v4
116 |
117 | - name: Setup
118 | id: setup
119 | uses: ./.github/actions/setup
120 | with:
121 | cache-python: true
122 | install-options: --only docs --sync
123 |
124 | - name: Build Docs
125 | run: make docs
126 |
127 | # Upload build
128 | - name: Upload build
129 | uses: actions/upload-artifact@v4
130 | with:
131 | name: Docs
132 | path: docs/build
133 |
--------------------------------------------------------------------------------
/.github/workflows/publish-docs.yml:
--------------------------------------------------------------------------------
1 | # Builds multiple versions of the Docs using the polyversion script
2 | name: Publish Docs
3 |
4 | on:
5 | push:
6 | branches:
7 | - "main"
8 |
9 | workflow_dispatch:
10 |
11 | # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
12 | permissions:
13 | contents: read
14 | pages: write
15 | id-token: write
16 |
17 | # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
18 | # We may cancel in-progress runs as their results would be overidden any ways.
19 | concurrency:
20 | group: "pages"
21 | cancel-in-progress: true
22 |
23 | jobs:
24 | publish-docs:
25 | runs-on: ubuntu-latest
26 | environment:
27 | name: github-pages
28 | url: ${{ steps.deployment.outputs.page_url }}
29 | steps:
30 | # Checkout current ref to get the current version of the build script
31 | - name: Checkout
32 | uses: actions/checkout@v4
33 |
34 | # Install poetry
35 | - name: Install poetry
36 | run: pipx install poetry
37 | shell: bash
38 |
39 | # Load cache
40 | - name: Determine poetry venv location
41 | run: echo venv_path="$(poetry config virtualenvs.path)" >> $GITHUB_ENV
42 |
43 | - name: Retrieve cache
44 | uses: actions/cache/restore@v4
45 | id: cache-restore
46 | with:
47 | path: ${{ env.venv_path }}
48 | key: "nocache"
49 | restore-keys: publish-docs|poetry|
50 |
51 | # Setup python + poetry
52 | - name: Setup build deps
53 | uses: ./.github/actions/setup
54 | with:
55 | install-options: --sync -E jinja --without docs,lint,test
56 | cache-python: false
57 |
58 | # Configure pages provides deployment URL
59 | - name: Setup Pages
60 | uses: actions/configure-pages@v5
61 |
62 | # Prepare
63 | - name: Fetch tags and branches
64 | run: git fetch -t origin
65 |
66 | - name: Retrieve CPU core count
67 | shell: python
68 | continue-on-error: true
69 | run: |
70 | import os
71 | with open(os.getenv('GITHUB_ENV'), 'a') as f:
72 | f.write('cpu_cores=' + str(len(os.sched_getaffinity(0))))
73 |
74 | # Build in a reproducible location (allows caching)
75 | - name: Build using `sphinx_polyversion`
76 | run: >
77 | poetry run sphinx-polyversion -vvv docs/poly.py build
78 |
79 | # Upload cache
80 | - name: Hash venv dir
81 | shell: bash
82 | run: |
83 | echo 'cache_key=publish-docs|poetry|'"$(find ${{ env.venv_path }} -type f -exec sha256sum {} \; | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV
84 |
85 | - name: Upload new cache
86 | uses: actions/cache/save@v4
87 | continue-on-error: true
88 | if: steps.cache-restore.outputs.cache-matched-key != env.cache_key
89 | with:
90 | path: ${{ env.venv_path }}
91 | key: ${{ env.cache_key }}
92 |
93 | # Upload built docs
94 | - name: Upload build artifact
95 | uses: actions/upload-pages-artifact@v3
96 | with:
97 | path: "build"
98 | retention-days: 7
99 |
100 | # Deploy uploaded artifact
101 | - name: Deploy to GitHub Pages
102 | id: deployment
103 | uses: actions/deploy-pages@v4
104 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | coverage.lcov
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 | cover/
54 |
55 | # Translations
56 | *.mo
57 | *.pot
58 |
59 | # Django stuff:
60 | *.log
61 | local_settings.py
62 | db.sqlite3
63 | db.sqlite3-journal
64 |
65 | # Flask stuff:
66 | instance/
67 | .webassets-cache
68 |
69 | # Scrapy stuff:
70 | .scrapy
71 |
72 | # Sphinx documentation
73 | docs/_build/
74 |
75 | # PyBuilder
76 | .pybuilder/
77 | target/
78 |
79 | # Jupyter Notebook
80 | .ipynb_checkpoints
81 |
82 | # IPython
83 | profile_default/
84 | ipython_config.py
85 |
86 | # pyenv
87 | # For a library or package, you might want to ignore these files since the code is
88 | # intended to run in multiple environments; otherwise, check them in:
89 | # .python-version
90 |
91 | # pipenv
92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
95 | # install all needed dependencies.
96 | #Pipfile.lock
97 |
98 | # poetry
99 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
100 | # This is especially recommended for binary packages to ensure reproducibility, and is more
101 | # commonly ignored for libraries.
102 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
103 | #poetry.lock
104 |
105 | # pdm
106 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
107 | #pdm.lock
108 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
109 | # in version control.
110 | # https://pdm.fming.dev/#use-with-ide
111 | .pdm.toml
112 |
113 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
114 | __pypackages__/
115 |
116 | # Celery stuff
117 | celerybeat-schedule
118 | celerybeat.pid
119 |
120 | # SageMath parsed files
121 | *.sage.py
122 |
123 | # Environments
124 | .env
125 | .venv
126 | env/
127 | venv/
128 | ENV/
129 | env.bak/
130 | venv.bak/
131 |
132 | # Spyder project settings
133 | .spyderproject
134 | .spyproject
135 |
136 | # Rope project settings
137 | .ropeproject
138 |
139 | # mkdocs documentation
140 | /site
141 |
142 | # mypy
143 | .mypy_cache/
144 | .dmypy.json
145 | dmypy.json
146 |
147 | # Pyre type checker
148 | .pyre/
149 |
150 | # pytype static type analyzer
151 | .pytype/
152 |
153 | # Cython debug symbols
154 | cython_debug/
155 |
156 | # PyCharm
157 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
158 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
159 | # and can be added to the global gitignore or merged into this file. For a more nuclear
160 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
161 | #.idea/
162 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: 2c9f875913ee60ca25ce70243dc24d5b6415598c # frozen: v4.6.0
4 | hooks:
5 | # check file system problems
6 | - id: check-case-conflict
7 | - id: check-symlinks
8 | - id: destroyed-symlinks
9 |
10 | # unify whitespace and line ending
11 | - id: trailing-whitespace
12 | args: [--markdown-linebreak-ext=md]
13 | - id: end-of-file-fixer
14 | - id: mixed-line-ending
15 |
16 | # sort requirements.txt files
17 | - id: requirements-txt-fixer
18 |
19 | # check more
20 | - id: check-yaml
21 | - id: check-toml
22 | - id: check-xml
23 | - id: check-executables-have-shebangs
24 | - id: check-merge-conflict
25 |
26 | - repo: https://github.com/python-poetry/poetry
27 | rev: "c85477da8a610a87133299f996f8d8a593aa7bff" # frozen: 1.8.0
28 | hooks:
29 | - id: poetry-check
30 | - id: poetry-lock
31 | args: ["--no-update"]
32 | files: ^pyproject\.toml|poetry\.lock$
33 |
34 | - repo: https://github.com/floatingpurr/sync_with_poetry
35 | rev: 41b6bf233c52f08f344e09107bdda253e6d8b57a # frozen: 1.1.0
36 | hooks:
37 | - id: sync_with_poetry
38 | files: ^\.pre-commit-config\.yaml|poetry\.lock$
39 | pass_filenames: false
40 | args:
41 | - "poetry.lock"
42 | - "--allow-frozen"
43 | - "--db"
44 | - "swp.json"
45 |
46 | # Enforce frozen revisions in `.pre-commit-config.yaml`
47 | - repo: https://github.com/real-yfprojects/check-pre-commit-config
48 | rev: ec697f7fda57bd81b86556212539e22f03aa86d9 # frozen: v1.0.0-alpha4
49 | hooks:
50 | - id: check-frozen
51 | args:
52 | - "--rules"
53 | - "ycamfte"
54 | - "--fix-all"
55 |
56 | # Lint python code with ruff (also sorts imports)
57 | - repo: https://github.com/charliermarsh/ruff-pre-commit
58 | rev: "f0b5944bef86f50d875305821a0ab0d8c601e465" # frozen: v0.8.4
59 | hooks:
60 | - id: ruff
61 | args: [--fix, --exit-non-zero-on-fix]
62 |
63 | # Format python code with black
64 | - repo: https://github.com/psf/black
65 | rev: ec91a2be3c44d88e1a3960a4937ad6ed3b63464e # frozen: 23.12.1
66 | hooks:
67 | - id: black
68 |
69 | # Static type checking
70 | - repo: https://github.com/pre-commit/mirrors-mypy
71 | rev: bbc3dc1f890007061f18f17e2334f216ea9e5df7 # frozen: v1.14.1
72 | hooks:
73 | - id: mypy
74 | exclude: ^(tests|docs)
75 | args: [--ignore-missing-imports]
76 |
77 | # format many other files with prettier
78 | - repo: "https://github.com/pycontribs/mirrors-prettier"
79 | rev: b28ab1718b516827e9f6cdfbd3c4c9546489d53e # frozen: v3.3.2
80 | hooks:
81 | - id: prettier
82 | exclude: ^docs/(sphinx/_)templates/
83 |
84 | - repo: https://github.com/abravalheri/validate-pyproject
85 | rev: bea368871c59605bf2471441d0c6214bd3b80c44 # frozen: v0.18
86 | hooks:
87 | - id: validate-pyproject
88 | files: pyproject.toml$
89 |
90 | # configuration for the pre-commit.ci bot
91 | # only relevant when actually using the bot
92 | ci:
93 | autofix_commit_msg: |
94 | [pre-commit.ci] auto fixes from pre-commit hooks
95 |
96 | for more information, see https://pre-commit.ci, https://pre-commit.com and
97 | the `.pre-commit-config.yaml` file in this repository.
98 |
99 | autofix_prs: true # default
100 | autoupdate_branch: "develop"
101 | autoupdate_commit_msg: |
102 | [pre-commit.ci] Autoupdate pre-commit hook versions.
103 |
104 | for more information, see https://pre-commit.ci, https://pre-commit.com and
105 | the `.pre-commit-config.yaml` file in this repository.
106 |
107 | submodules: false # default
108 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 real-yfprojects (github.com user) and contributors
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: lint docs test
2 |
3 | lint:
4 | poetry run ruff check .
5 | poetry run mypy
6 |
7 | docs:
8 | poetry run sphinx-polyversion docs/poly.py -l -vvv
9 |
10 | test:
11 | poetry run pytest --junit-xml=report.xml --cov=sphinx_polyversion/ --cov-report=term-missing --cov-report=lcov --numprocesses=auto
12 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # sphinx-polyversion
2 |
3 | [](https://real-yfprojects.github.io/sphinx-polyversion/)
4 | [](https://pypi.org/project/sphinx-polyversion/)
5 | [](https://pypi.org/project/sphinx-polyversion/)
6 | [](https://github.com/real-yfprojects/sphinx-polyversion/blob/main/LICENSE)
7 | [](https://pypi.org/project/sphinx-polyversion/)
8 | [](https://python-poetry.org/)
9 | [](https://github.com/charliermarsh/ruff)
10 | [](https://pre-commit.com)
11 | [](https://github.com/psf/black)
12 |
13 |
14 |
15 | Build multiple versions of your sphinx docs and merge them into one website.
16 |
17 | - Isolated builds using venv, virtualenv or poetry
18 | - git support
19 | - Build with `sphinx-build` or custom commands
20 | - Access and modify all versioning data inside `conf.py`
21 | - Concurrent builds
22 | - Override build configuration from commandline easily
23 | - Render templates to the root directory containing the docs for each version
24 | - Build from local working tree easily while mocking version data
25 | - Not a sphinx extension -> standalone tool
26 | - Configuration in a python script
27 | - Highly customizable and scriptable through OOP
28 | - Implement subclasses in your configuration script to add support for other VCS, Project/dependency management tools, build tools and whatever you require
29 | - IDE integration and autocompletion
30 |
31 |
32 |
33 | Have a look at the [roadmap](./ROADMAP.md) to find out about upcoming features.
34 |
35 | ## Installation
36 |
37 | ```
38 | pip install sphinx-polyversion
39 | ```
40 |
41 | ```
42 | poetry add --group docs sphinx-polyversion
43 | ```
44 |
45 | ## Usage
46 |
47 | ### Example
48 |
49 | Setup your sphinx docs in `docs/source/sphinx`. Add a `conf.py` file
50 | with the following to set directory:
51 |
52 | ```py
53 | from sphinx_polyversion.api import load
54 |
55 | load(globals())
56 | # This adds the following to the global scope
57 | # html_context = {
58 | # "revisions": [GitRef('main', ...), GitRef('v6.8.9', ...), ...],
59 | # "current": GitRef('v1.4.6', ...),
60 | # }
61 |
62 | # process the loaded version information as you wish
63 | html_context["latest"] = max(html_context["revisions"]) # latest by date
64 |
65 | # sphinx config
66 | project = "foo"
67 | # ...
68 | ```
69 |
70 | Configure `sphinx-polyversion` in the file `docs/poly.py`.
71 |
72 | ```py
73 | from pathlib import Path
74 |
75 | from sphinx_polyversion.api import apply_overrides
76 | from sphinx_polyversion.driver import DefaultDriver
77 | from sphinx_polyversion.git import Git, file_predicate
78 | from sphinx_polyversion.pyvenv import Poetry
79 | from sphinx_polyversion.sphinx import SphinxBuilder
80 |
81 | #: Regex matching the branches to build docs for
82 | BRANCH_REGEX = r".*"
83 |
84 | #: Regex matching the tags to build docs for
85 | TAG_REGEX = r".*"
86 |
87 | #: Output dir relative to project root
88 | OUTPUT_DIR = "docs/build"
89 |
90 | #: Source directory
91 | SOURCE_DIR = "docs/source"
92 |
93 | #: Arguments to pass to `poetry install`
94 | POETRY_ARGS = "--only sphinx --sync".split()
95 |
96 | #: Arguments to pass to `sphinx-build`
97 | SPHINX_ARGS = "-a -v".split()
98 |
99 | #: Mock data used for building local version
100 | MOCK_DATA = {
101 | "revisions": [
102 | GitRef("v1.8.0", "", "", GitRefType.TAG, datetime.fromtimestamp(0)),
103 | GitRef("v1.9.3", "", "", GitRefType.TAG, datetime.fromtimestamp(1)),
104 | GitRef("v1.10.5", "", "", GitRefType.TAG, datetime.fromtimestamp(2)),
105 | GitRef("master", "", "", GitRefType.BRANCH, datetime.fromtimestamp(3)),
106 | GitRef("dev", "", "", GitRefType.BRANCH, datetime.fromtimestamp(4)),
107 | GitRef("some-feature", "", "", GitRefType.BRANCH, datetime.fromtimestamp(5)),
108 | ],
109 | "current": GitRef("local", "", "", GitRefType.BRANCH, datetime.fromtimestamp(6)),
110 | }
111 | #: Whether to build using only local files and mock data
112 | MOCK = False
113 |
114 | #: Whether to run the builds in sequence or in parallel
115 | SEQUENTIAL = False
116 |
117 | # Load overrides read from commandline to global scope
118 | apply_overrides(globals())
119 | # Determine repository root directory
120 | root = Git.root(Path(__file__).parent)
121 |
122 | # Setup driver and run it
123 | src = Path(SOURCE_DIR)
124 | DefaultDriver(
125 | root,
126 | OUTPUT_DIR,
127 | vcs=Git(
128 | branch_regex=BRANCH_REGEX,
129 | tag_regex=TAG_REGEX,
130 | buffer_size=1 * 10**9, # 1 GB
131 | predicate=file_predicate([src]), # exclude refs without source dir
132 | ),
133 | builder=SphinxBuilder(src / "sphinx", args=SPHINX_ARGS),
134 | env=Poetry.factory(args=POETRY_ARGS),
135 | template_dir=root / src / "templates",
136 | static_dir=root / src / "static",
137 | mock=MOCK_DATA,
138 | ).run(MOCK, SEQUENTIAL)
139 | ```
140 |
141 | Build your docs by running
142 |
143 | ```console
144 | $ sphinx-polyversion docs/poly.py
145 | ```
146 |
147 | ### Commandline Options
148 |
149 | ```
150 | usage: sphinx-polyversion [-h] [-o [OVERRIDE [OVERRIDE...]]] [-v] [-l] [--sequential] conf [out]
151 |
152 | Build multiple versions of your sphinx docs and merge them into one site.
153 |
154 | positional arguments:
155 | conf Polyversion config file to load. This must be a python file that can be evaluated.
156 | out Output directory to build the merged docs to.
157 |
158 | optional arguments:
159 | -h, --help show this help message and exit
160 | -o [OVERRIDE [OVERRIDE ...]], --override [OVERRIDE [OVERRIDE ...]]
161 | Override config options. Pass them as `key=value` pairs.
162 | -v, --verbosity Increase output verbosity (decreases minimum log level). The default log level is ERROR.
163 | -l, --local, --mock Build the local version of your docs.
164 | --sequential Build the revisions sequentially.
165 | ```
166 |
167 | ### How To Build Versions Differently
168 |
169 | ```py
170 | #: Mapping of revisions to changes in build parameters
171 | BUILDER = {
172 | None: SphinxBuilder(Path("docs")), # default
173 | "v1.5.7": SphinxBuilder(Path("docs/source")),
174 | "v2.0.0": CommandBuilder(
175 | Path("docs/source"),
176 | ["sphinx-autodoc", Placeholder.SOURCE_DIR, Placeholder.OUTPUT_DIR],
177 | ),
178 | "v2.4.0": CommandBuilder(
179 | Path("docs/source/sphinx"),
180 | ["sphinx-autodoc", Placeholder.SOURCE_DIR, Placeholder.OUTPUT_DIR],
181 | ),
182 | }
183 |
184 | #: Mapping of revisions to changes in environment parameters
185 | ENVIRONMENT = {
186 | None: Poetry.factory(args="--sync".split()), # first version
187 | "v1.5.7": Poetry.factory(args="--only sphinx --sync".split()),
188 | "v1.8.2": Poetry.factory(args="--only dev --sync".split(), env={"MY_VAR": "value"}),
189 | # use a pre-existing environment at the location ./.venv
190 | "v3.0.0": Pip.factory(venv=Path(".venv"), args="-e . -r requirements.txt".split()),
191 | # dynamically create an environment in the temporary build directory
192 | "v4.*.*": Pip.factory(venv=Path(".venv"), args="-e . -r requirements.txt".split(), creator=VenvWrapper(), temporary=True),
193 | }
194 |
195 | # ...
196 |
197 | DefaultDriver(
198 | # ...
199 | builder=BUILDER,
200 | env=ENVIRONMENT,
201 | selector=partial(closest_tag, root),
202 | # ...
203 | ).run()
204 | ```
205 |
206 | ### Data Passed to Sphinx
207 |
208 | ```py
209 | {"revisions": [GitRef(...), GitRef(...)], "current": GitRef(...)}
210 | ```
211 |
212 | You can change the format by passing your own factory.
213 |
214 | ```py
215 | def data(driver: DefaultDriver, rev: GitRef, env: Environment):
216 | return {
217 | "tags": list(filter(lambda r: r.type_ == GitRefType.TAG, driver.targets)),
218 | "branches": list(filter(lambda r: r.type_ == GitRefType.BRANCH, driver.targets)),
219 | "current": rev,
220 | }
221 |
222 | # ...
223 |
224 | DefaultDriver(
225 | # ...
226 | data_factory=data,
227 | # ...
228 | ).run()
229 | ```
230 |
231 | ## Contributing
232 |
233 | Contributions of all kinds are welcome. That explicitely includes suggestions for enhancing the API, the architecture or the documentation of the project.
234 | PRs are greatly appreciated as well. But please make sure that your change is wanted by opening an issue about it first before you waste your time with a PR
235 | that isn't merged in the end.
236 |
237 | By contributing you affirm the [Developer Certificate of Origin](https://developercertificate.org/) and license your work under the terms of this repository.
238 |
239 | New top-level modules must be added to `docs/sphinx/api/sphinx_polyversion.rst`.
240 |
241 | ## License
242 |
243 | MIT License
244 | See the [LICENSE](./LICENSE) file which should be located in this directory.
245 |
--------------------------------------------------------------------------------
/ROADMAP.md:
--------------------------------------------------------------------------------
1 | # 0.1.0
2 |
3 | - [x] Abstract Architecture
4 | - [x] Default Driver
5 | - [x] Command Builder
6 | - [x] Sphinx Builder
7 | - [x] NoneEnvironment (doesn't do anything)
8 | - [x] PIP Environment
9 | - [x] POETRY Environment
10 | - [x] GIT Provider
11 | - [x] Implement API for conf.py
12 | - [x] Provide data class for use in conf.py
13 | - [x] Load values
14 |
15 | # 0.2.0
16 |
17 | - [x] Docstrings
18 | - [x] Venv support
19 | - [x] Virtualenv support
20 | - [x] Implement root render
21 | - [x] Implement API for polyconf.py
22 | - [x] Override conf values
23 | - [x] Basic configuration
24 | - [x] Entry point to run from terminal
25 |
26 | # 0.3.0
27 |
28 | - [x] Fix async file_predicate
29 | - [x] Fix Poetry env location
30 | - [x] Register hooks with Encoder
31 |
32 | # 0.4.0
33 |
34 | - [x] Helpers for dynamic build paths and etc.
35 | - [x] Sort tags
36 | - [x] by date
37 | - [x] by name
38 | - [x] Allow str in addition to Path in commonly used API
39 | - [x] Extend API for conf.py with quality of life improvements
40 | - [x] Make VCS provide a namer
41 | - [x] README
42 |
43 | # 0.5.0
44 |
45 | - [x] Custom data format - depending on version
46 | - [x] Verbosity flag
47 | - [x] Pre and post build commands
48 | - [x] Easily build local version and mocking
49 | - [x] PyPi package
50 |
51 | # 1.0.0
52 |
53 | - [x] 70% Test coverage
54 | - [x] Unittests
55 | - [x] Integration tests
56 | - [ ] Enhance README
57 | - [ ] Comparison to alternatives
58 | - [ ] CI
59 | - [ ] Coverage badge
60 | - [ ] Test badge
61 | - [ ] publish to pypi on release
62 | - [x] Tests
63 | - [x] Doc preview
64 | - [x] Linting
65 | - [x] Docs on Github Pages
66 |
67 | # 1.0.1
68 |
69 | - [ ] Extensive Documentation
70 | - [ ] User guide
71 | - [ ] Use different setups depending on the version
72 | - [ ] Updating docs from old versions using cherry-pick
73 | - [ ] Subclassing guide
74 | - [ ] Reference
75 | - [ ] Further explanation accompanying the reference (for new contributors) and users who want to extend inside `poly.py`
76 | - [ ] Command line syntax
77 | - [x] API
78 | - [x] Abstract classes
79 | - [x] Implementations
80 | - [ ] Contributing Standards
81 | - [ ] Contributing
82 | - [ ] Workflows, Policies
83 | - [ ] Maintaining
84 | - [ ] CI
85 | - [ ] Change in coverage
86 | - [ ] Highlight linting problems in PR
87 | - [ ] Higlight uncovered code in PR
88 |
89 | # 1.1.0
90 |
91 | - [ ] Caching (e.g. of poetry envs)
92 | - [ ] Only rebuild changed versions
93 | - [ ] Easy integration with ci
94 | - [ ] Github Action
95 | - [ ] Read conf file location from pyproject.toml?
96 |
97 | # Wishlist
98 |
99 | - typed overrides
100 |
--------------------------------------------------------------------------------
/docs/poly.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from pathlib import Path
3 |
4 | from sphinx_polyversion.api import apply_overrides
5 | from sphinx_polyversion.driver import DefaultDriver
6 | from sphinx_polyversion.git import Git, GitRef, GitRefType, file_predicate, refs_by_type
7 | from sphinx_polyversion.pyvenv import Poetry
8 | from sphinx_polyversion.sphinx import SphinxBuilder
9 |
10 | #: Regex matching the branches to build docs for
11 | BRANCH_REGEX = r".*"
12 |
13 | #: Regex matching the tags to build docs for
14 | TAG_REGEX = r".*"
15 |
16 | #: Output dir relative to project root
17 | OUTPUT_DIR = "docs/build"
18 |
19 | #: Source directory
20 | SOURCE_DIR = "docs/"
21 |
22 | #: Arguments to pass to `poetry install`
23 | POETRY_ARGS = "--only docs --sync"
24 |
25 | #: Arguments to pass to `sphinx-build`
26 | SPHINX_ARGS = "-a -v"
27 |
28 | #: Mock data used for building local version
29 | MOCK_DATA = {
30 | "revisions": [
31 | GitRef("v1.8.0", "", "", GitRefType.TAG, datetime.fromtimestamp(0)),
32 | GitRef("v1.9.3", "", "", GitRefType.TAG, datetime.fromtimestamp(1)),
33 | GitRef("v1.10.5", "", "", GitRefType.TAG, datetime.fromtimestamp(2)),
34 | GitRef("master", "", "", GitRefType.BRANCH, datetime.fromtimestamp(3)),
35 | GitRef("dev", "", "", GitRefType.BRANCH, datetime.fromtimestamp(4)),
36 | GitRef("some-feature", "", "", GitRefType.BRANCH, datetime.fromtimestamp(5)),
37 | ],
38 | "current": GitRef("local", "", "", GitRefType.TAG, datetime.fromtimestamp(6)),
39 | }
40 |
41 | #: Whether to build using only local files and mock data
42 | MOCK = False
43 |
44 | #: Whether to run the builds in sequence or in parallel
45 | SEQUENTIAL = False
46 |
47 |
48 | #: Data passed to templates
49 | def data(driver, rev, env):
50 | revisions = driver.targets
51 | branches, tags = refs_by_type(revisions)
52 | latest = max(tags or branches)
53 | return {
54 | "current": rev,
55 | "tags": tags,
56 | "branches": branches,
57 | "revisions": revisions,
58 | "latest": latest,
59 | }
60 |
61 |
62 | def root_data(driver):
63 | revisions = driver.builds
64 | branches, tags = refs_by_type(revisions)
65 | latest = max(tags or branches)
66 | return {"revisions": revisions, "latest": latest}
67 |
68 |
69 | # Load overrides read from commandline to global scope
70 | apply_overrides(globals())
71 | # Determine repository root directory
72 | root = Git.root(Path(__file__).parent)
73 |
74 | # Setup driver and run it
75 | src = Path(SOURCE_DIR)
76 | DefaultDriver(
77 | root,
78 | OUTPUT_DIR,
79 | vcs=Git(
80 | branch_regex=BRANCH_REGEX,
81 | tag_regex=TAG_REGEX,
82 | buffer_size=1 * 10**9, # 1 GB
83 | predicate=file_predicate([src]), # exclude refs without source dir
84 | ),
85 | builder=SphinxBuilder(src / "sphinx", args=SPHINX_ARGS.split()),
86 | env=Poetry.factory(args=POETRY_ARGS.split()),
87 | template_dir=root / src / "templates",
88 | static_dir=root / src / "static",
89 | data_factory=data,
90 | root_data_factory=root_data,
91 | mock=MOCK_DATA,
92 | ).run(MOCK, SEQUENTIAL)
93 |
--------------------------------------------------------------------------------
/docs/sphinx/.gitignore:
--------------------------------------------------------------------------------
1 | # document trees generated by sphinx
2 | doctrees
3 |
--------------------------------------------------------------------------------
/docs/sphinx/_static/css/api-docs.css:
--------------------------------------------------------------------------------
1 | /* align tables left in API docs (default was center) */
2 | table.autosummary.longtable {
3 | margin-left: 0;
4 | }
5 |
--------------------------------------------------------------------------------
/docs/sphinx/_static/css/ext-links.css:
--------------------------------------------------------------------------------
1 | /* Add small icon appended to external links */
2 |
3 | .reference.external:after {
4 | color: var(--color-sidebar-link-text);
5 | content: url("data:image/svg+xml;charset=utf-8,%3Csvg width='12' height='12' xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' stroke-width='1.5' stroke='%23607D8B' fill='none' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpath d='M0 0h24v24H0z' stroke='none'/%3E%3Cpath d='M11 7H6a2 2 0 0 0-2 2v9a2 2 0 0 0 2 2h9a2 2 0 0 0 2-2v-5M10 14 20 4M15 4h5v5'/%3E%3C/svg%3E");
6 | margin: 0 0.25rem;
7 | vertical-align: middle;
8 | }
9 |
--------------------------------------------------------------------------------
/docs/sphinx/_static/css/version-selector.css:
--------------------------------------------------------------------------------
1 | /* Style our version picker
2 |
3 | The version picker is defined in `_templates/versioning.html` and uses the same classes
4 | and ids as the one provided by the theme for use with readthedocs.io
5 | This allows us to load the styles by readthedocs as a basis
6 | and adjust them to our likings.
7 | */
8 |
9 | /* Import RTD styles */
10 | @import url("https://assets.readthedocs.org/static/css/readthedocs-doc-embed.css");
11 | @import url("https://assets.readthedocs.org/static/css/badge_only.css");
12 |
13 | /* remove border around version picker */
14 | #furo-readthedocs-versions:focus-within,
15 | #furo-readthedocs-versions:hover {
16 | box-shadow: none;
17 | }
18 |
19 | /* adjust the element showing the selected version */
20 | .rst-versions .rst-current-version {
21 | padding: var(--sidebar-item-spacing-vertical)
22 | var(--sidebar-item-spacing-horizontal);
23 | border-top: 1px solid var(--color-sidebar-search-border);
24 | color: var(--color-foreground-primary);
25 | }
26 |
27 | /* .rst-versions .rst-current-version.rst-out-of-date {
28 | color: #c64334;
29 | }
30 |
31 | .rst-versions .rst-current-version.rst-active-old-version {
32 | color: #634f00;
33 | } */
34 |
35 | /* adjust the element listing all available versions */
36 | #furo-readthedocs-versions > .rst-other-versions {
37 | padding: var(--sidebar-item-spacing-vertical)
38 | var(--sidebar-item-spacing-horizontal);
39 | border-style: none;
40 | border-top: 1px solid var(--color-sidebar-search-border);
41 | }
42 |
43 | /* adjust list headings */
44 | .rst-versions .rst-other-versions dt {
45 | color: var(--color-foreground-secondary);
46 | }
47 |
48 | /* adjust selectable version items */
49 | .rst-versions .rst-other-versions dd a {
50 | color: var(--color-sidebar-link-text--top-level);
51 | padding-left: 0px;
52 | padding-right: 12px;
53 | }
54 |
55 | /* adjust icons for the list headings */
56 | .bi.version-header {
57 | margin-right: 1ch;
58 | }
59 |
60 | /* adjust icon for the version picker */
61 | .rst-current-version .bi-git {
62 | float: left;
63 | color: var(--color-foreground-primary);
64 | left: var(--sidebar-item-spacing-horizontal);
65 | }
66 |
--------------------------------------------------------------------------------
/docs/sphinx/_templates/components/edit-this-page.html:
--------------------------------------------------------------------------------
1 | {# Adjust link of `edit source` button The furo theme adds an `edit source`
2 | button to the top of the page that opens the page viewed on github in edit mode.
3 | However we prefer opening the file in the standards view mode. The furo theme is
4 | based on the `basic-ng` theme which defines a view-this-page button. We reuse
5 | its code to determine the page link but extend it to use the meta field
6 | `edit_path` that can be set in every .rst file to change the path the edit
7 | button links to. See
8 | https://www.sphinx-doc.org/en/master/usage/restructuredtext/field-lists.html#file-wide-metadata
9 | #} {% extends "furo/components/edit-this-page.html" %} {% from
10 | "furo/components/edit-this-page.html" import furo_edit_button with context %} {%
11 | from "basic-ng/components/edit-this-page.html" import sanitise_trailing_slash
12 | with context %} {#- Modified from
13 | https://github.com/pradyunsg/sphinx-basic-ng/blob/main/src/sphinx_basic_ng/theme/basic-ng/components/view-this-page.html#L5
14 | #} {%- macro determine_page_view_link() -%} {%- if theme_source_view_link -%} {{
15 | theme_source_view_link.format(filename=pagename+page_source_suffix) }} {%- elif
16 | theme_source_repository -%} {#- First, sanitise the trailing slashes. -#} {%-
17 | set repo = sanitise_trailing_slash(theme_source_repository) -%} {%- set branch =
18 | theme_source_branch -%} {%- set subdirectory =
19 | sanitise_trailing_slash(theme_source_directory) -%} {#- Figure out the
20 | document's source file path. -#} {% if meta.edit_path %} {# Modify path based on
21 | the meta field `edit_path` #} {% if meta.edit_path.startswith("/") %} {%- set
22 | relative_path = meta.edit_path[1:] -%} {%- set subdirectory = "" -%} {%- else
23 | -%} {%- set relative_path = meta.edit_path -%} {%- endif -%} {%- else -%} {%-
24 | set relative_path = pagename + page_source_suffix -%} {%- endif -%} {%- if not
25 | subdirectory -%} {%- set document_path = relative_path -%} {%- else -%} {%- set
26 | document_path = subdirectory + "/" + relative_path -%} {%- endif -%} {#- Don't
27 | allow http:// URLs -#} {%- if repo.startswith( ( "http://github.com/",
28 | "http://gitlab.com/", "http://bitbucket.org/", ) ) -%} {{ warning("Could not use
29 | `source_repository` provided. Please use https:// links in your `conf.py` file's
30 | `html_theme_options`.") }} {#- Handle the relevant cases -#} {%- elif
31 | repo.startswith("https://github.com/") -%} {{ repo }}/blob/{{ branch }}/{{
32 | document_path }} {%- elif repo.startswith("https://gitlab.com/") -%} {{ repo
33 | }}/blob/{{ branch }}/{{ document_path }} {%- elif
34 | repo.startswith("https://bitbucket.org/") -%} {{ repo }}/src/{{ branch }}/{{
35 | document_path }} {#- Fail with a warning -#} {%- else -%} {{ warning( "Could not
36 | understand `source_repository` provided: " + repo + "\n" + "You should set
37 | `source_view_link`, so that the view link is presented." ) }} {%- endif -%} {%-
38 | elif show_source and has_source -%} {{ pathto('_sources/' + sourcename, true) }}
39 | {%- endif -%} {%- endmacro -%} {# use the edit button code by furo but use above
40 | macro to determine URL #} {% block link_available -%} {{
41 | furo_edit_button(determine_page_view_link()) }} {%- endblock %}
42 |
--------------------------------------------------------------------------------
/docs/sphinx/_templates/custom-module-template.rst:
--------------------------------------------------------------------------------
1 | {{ fullname | escape | underline}}
2 |
3 | .. automodule:: {{ fullname }}
4 |
5 | {% block attributes %}
6 | {% if attributes %}
7 | .. rubric:: Module Attributes
8 |
9 | .. autosummary::
10 | {% for item in attributes %}
11 | {{ item }}
12 | {%- endfor %}
13 | {% endif %}
14 | {% endblock %}
15 |
16 | {% block functions %}
17 | {% if functions %}
18 | .. rubric:: {{ _('Functions') }}
19 |
20 | .. autosummary::
21 | {% for item in functions %}
22 | {{ item }}
23 | {%- endfor %}
24 | {% endif %}
25 | {% endblock %}
26 |
27 | {% block classes %}
28 | {% if classes %}
29 | .. rubric:: {{ _('Classes') }}
30 |
31 | .. autosummary::
32 | {% for item in classes %}
33 | {{ item }}
34 | {%- endfor %}
35 | {% endif %}
36 | {% endblock %}
37 |
38 | {% block exceptions %}
39 | {% if exceptions %}
40 | .. rubric:: {{ _('Exceptions') }}
41 |
42 | .. autosummary::
43 | {% for item in exceptions %}
44 | {{ item }}
45 | {%- endfor %}
46 | {% endif %}
47 | {% endblock %}
48 |
49 | {% block modules %}
50 | {% if modules %}
51 | .. rubric:: Modules
52 |
53 | .. autosummary::
54 | :toctree:
55 | :template: custom-module-template.rst
56 | :recursive:
57 | {% for item in modules %}
58 | {{ item }}
59 | {%- endfor %}
60 |
61 | {% endif %}
62 | {% endblock %}
63 |
--------------------------------------------------------------------------------
/docs/sphinx/_templates/sidebar/brand.html:
--------------------------------------------------------------------------------
1 | {# Style darglint logo in the sidebar This adds the version number as a
2 | superscript. #} {% extends "!sidebar/brand.html" %} {% block brand_content %}
3 | {{ project }}{{ release }}
6 | {% endblock brand_content %}
7 |
--------------------------------------------------------------------------------
/docs/sphinx/_templates/versioning.html:
--------------------------------------------------------------------------------
1 | {# Add version selector This generates a version selector similar to the rtd
2 | version selector using the data exposed by `sphinx-multiversion` through
3 | current, latest_version and versions. It uses the same classes and ids as the
4 | version picker provided by the theme for use with readthedocs.io The css styling
5 | can be found in `css/version-selector.css`. The template doesn't fail when the
6 | needed data isn't provided but the result doesn't work as expected. #}
7 |
8 |
16 | {# this element shows the current version and is visible by default It hides
17 | on hover while the element below becomes appears in its place. #}
18 |
19 | {# git icon indicating the version selector #}
20 |
21 | {# show current version; prepend `v` in case of branches #} {% if not
22 | current or not current.name.startswith("v") %} v: {% endif %} {{
23 | current.name if current else "undefined" }}
24 |
25 | {% if revisions %} {# This item lists the avaible versions grouped into
26 | branches and tags. The item is hidden by default but appears when the user
27 | hovers over the version selector. #}
28 |
29 | {% if tags %} {# List of tags #}
30 |
31 |
32 | {{ _('Tags') }}
33 |
34 | {% for item in tags %}
35 | {{ item.name }}
36 | {% endfor %}
37 |
38 | {% endif %} {% if branches %} {# List of branches #}
39 |
40 | {{ _('Branches') }}
41 | {% for item in branches %}
42 | {{ item.name }}
43 | {% endfor %}
44 |
45 | {% endif %}
46 |
47 | {% endif %}
48 |
49 |
--------------------------------------------------------------------------------
/docs/sphinx/alternatives.rst:
--------------------------------------------------------------------------------
1 | ------------
2 | Alternatives
3 | ------------
4 |
5 | - `sphinx-multiversion `_
6 | - `sphinxcontrib-versioning `_ (unmaintained)
7 | - `sphinx_simpleversion `_
8 | - `sphinx_versioning `_
9 |
--------------------------------------------------------------------------------
/docs/sphinx/api/sphinx_polyversion.rst:
--------------------------------------------------------------------------------
1 | .. currentmodule:: sphinx_polyversion
2 |
3 | Public API
4 | ==========
5 |
6 | .. autosummary::
7 | :toctree: .
8 | :template: custom-module-template.rst
9 | :recursive:
10 |
11 | api
12 | builder
13 | driver
14 | environment
15 | git
16 | json
17 | log
18 | main
19 | pyvenv
20 | sphinx
21 | utils
22 | vcs
23 |
--------------------------------------------------------------------------------
/docs/sphinx/conf.py:
--------------------------------------------------------------------------------
1 | import glob
2 | import importlib
3 | import inspect
4 | import time
5 |
6 | from sphinx_polyversion import load
7 | from sphinx_polyversion.git import GitRef
8 |
9 | # -- Load versioning data ----------------------------------------------------
10 |
11 | data = load(globals()) # adds variables `current` and `revisions`
12 | current: GitRef = data["current"]
13 |
14 | # -- Dynamic fields ----------------------------------------------------------
15 |
16 | year = time.strftime("%Y")
17 |
18 | # -- Project information -----------------------------------------------------
19 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
20 |
21 | project = "sphinx-polyversion"
22 | author = "real-yfprojects (github user)"
23 | copyright = f"2023-{year} {author} and contributors" # noqa: A001
24 | release = version = current.name
25 | repository = f"https://github.com/real-yfprojects/{project}/"
26 |
27 | # -- General configuration ---------------------------------------------------
28 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
29 |
30 | # some extensions are bundled with sphinx,
31 | # the others can be found in the `docs` dependencies group
32 | extensions = [
33 | # nice read the docs theme
34 | "sphinx_rtd_theme",
35 | # custom 404 not found page
36 | "notfound.extension",
37 | # copy code button
38 | "sphinx_copybutton",
39 | # include markdown files in docs
40 | "myst_parser",
41 | # tabs, badgets and more html widgets
42 | "sphinx_design",
43 | # automatically generated api docs
44 | "sphinx.ext.autodoc",
45 | "sphinx.ext.autosummary",
46 | "sphinx.ext.napoleon",
47 | # in code blocks automatically link to the autodoc reference
48 | "sphinx_codeautolink",
49 | # link references to source code
50 | "sphinx.ext.linkcode",
51 | # link shortcuts
52 | "sphinx.ext.extlinks",
53 | # automatically generated argparse commandline docs
54 | "sphinxarg.ext",
55 | # show previews on external websites
56 | "sphinxext.opengraph",
57 | # No idea whether its needed
58 | "sphinx.ext.githubpages",
59 | ]
60 |
61 | autosummary_ignore_module_all = False
62 | autodoc_default_options = {
63 | "members": True,
64 | "show-inheritance": True,
65 | "special-members": "__call__, __aenter__, __aexit__",
66 | }
67 |
68 | exclude_patterns = []
69 |
70 | myst_enable_extensions = [
71 | "amsmath",
72 | "attrs_inline",
73 | "colon_fence",
74 | "deflist",
75 | "dollarmath",
76 | "fieldlist",
77 | "html_admonition",
78 | "html_image",
79 | # "linkify", # needs additional dep
80 | "replacements",
81 | "smartquotes",
82 | "strikethrough",
83 | "tasklist",
84 | ]
85 |
86 | # -- Options for HTML output -------------------------------------------------
87 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
88 |
89 | html_theme = "furo"
90 | html_static_path = ["_static"]
91 |
92 | # the html title is used for contructing the title show for the browser tab
93 | html_title = f"{project} {release}"
94 |
95 | html_css_files = [
96 | # add markers to external links
97 | # "css/ext-links.css",
98 | #
99 | # load the open source bootstrap icon set
100 | "https://cdn.jsdelivr.net/npm/bootstrap-icons@1.10.5/font/bootstrap-icons.css",
101 | #
102 | # style the version selector
103 | "css/version-selector.css",
104 | "css/api-docs.css",
105 | ]
106 |
107 | templates_path = [
108 | "_templates",
109 | ]
110 |
111 | # override sidebar contents to add `versioning.html`
112 | # The template uses data exposed by `sphinx-multiversion` to generate
113 | # a version selector that is show in the sidebar
114 | html_sidebars = {
115 | "**": [
116 | "sidebar/brand.html", # override `furo/sidebar/brand.html`
117 | "sidebar/search.html",
118 | "sidebar/scroll-start.html",
119 | "sidebar/navigation.html",
120 | "sidebar/scroll-end.html",
121 | "versioning.html",
122 | ],
123 | }
124 |
125 | # configure theme options exposed by the furo theme
126 | html_theme_options = {
127 | # configure 'edit source' button
128 | "source_repository": repository,
129 | "source_branch": current.obj,
130 | "source_directory": "docs/sphinx/",
131 | # Add link to repository
132 | "footer_icons": [
133 | {
134 | "name": "GitHub",
135 | "url": repository,
136 | "class": "bi bi-github bi-2x",
137 | },
138 | ],
139 | }
140 |
141 | # -- Extension Options -------------------------------------------------------
142 |
143 | # sphinx_copybutton
144 | copybutton_exclude = ".linenos, .gp" # exclude these elements from being copied
145 |
146 | # sphinx.ext.extlinks
147 | extlinks_detect_hardcoded_links = True
148 | extlinks = {"issue": (f"{repository}/issues/%s", "issue %s")}
149 |
150 |
151 | # sphinx.ext.linkcode
152 | def linkcode_resolve(domain, info):
153 | """
154 | Provide an URL for a given python source object.
155 |
156 | The function should return None if no link is to be added.
157 | The argument domain specifies the language domain the object is in.
158 | `info` is a dictionary with the following keys
159 | guaranteed to be present (in the case of `py` domain):
160 |
161 | - module (name of the module)
162 | - fullname (name of the object)
163 |
164 | Arguments:
165 | ---------
166 | domain : str
167 | directive domain
168 | info : dict
169 | The objects data.
170 |
171 | Returns:
172 | -------
173 | str
174 | The github URL of the corresponding source
175 |
176 | .. seealso::
177 |
178 | https://www.sphinx-doc.org/en/master/usage/extensions/linkcode.html#confval-linkcode_resolve
179 |
180 | """
181 | if domain != "py":
182 | return None
183 | if not info["module"]:
184 | return None
185 |
186 | filename = info["module"].replace(".", "/")
187 |
188 | # import module to determine lineno
189 | module_str = info["module"]
190 | object_str = info["fullname"]
191 | module = importlib.import_module(module_str)
192 |
193 | members = dict(inspect.getmembers(module))
194 | if object_str not in members:
195 | return None
196 | lines, lineno = inspect.getsourcelines(members[object_str])
197 |
198 | return f"{repository}/blob/{current.obj}/{filename}.py#L{lineno}"
199 |
200 |
201 | # ---- Remove link to blog of the furo creator --------------------------
202 | # the furo sphinx theme contains attribution to sphinx and furo in the page footer.
203 | # However this attribution includes a link to the blog of the creator of furo.
204 | # The following hook runs over the build directory after sphinx has finished building.
205 |
206 |
207 | def edit_html(app, exception):
208 | """
209 | Modify generated html after building.
210 |
211 | Removes the link to the blog of the furo creator.
212 | """
213 | if exception:
214 | raise exception
215 |
216 | for file in glob.glob(f"{app.outdir}/**/*.html", recursive=True):
217 | with open(file, "r", errors="surrogateescape") as f:
218 | text = f.read()
219 |
220 | text = text.replace(
221 | '@pradyunsg \'s', ""
222 | )
223 | with open(file, "w", errors="surrogateescape") as f:
224 | f.write(text)
225 |
226 |
227 | def setup(app):
228 | app.connect("build-finished", edit_html)
229 |
--------------------------------------------------------------------------------
/docs/sphinx/development/roadmap.rst:
--------------------------------------------------------------------------------
1 | -------
2 | Roadmap
3 | -------
4 |
5 |
6 | .. include:: ../../../ROADMAP.md
7 | :parser: myst_parser.sphinx_
8 |
--------------------------------------------------------------------------------
/docs/sphinx/guide/getting-started.rst:
--------------------------------------------------------------------------------
1 | ---------------
2 | Getting Started
3 | ---------------
4 |
5 | This guide will show you how to create a minimal setup for your project.
6 | At the same time it will link resources where you can find more information
7 | for specific customizations and use cases.
8 | This guide assumes that you installed `sphinx-polyversion` into the development
9 | environment of your project and have written some documentation that builds
10 | with `sphinx-build`.
11 |
12 | For this tool it doesn't matter which branch you are working on as it will
13 | query the whole git repository for branches and tags to build documentation for.
14 | However you will need to fetch all the references that should be visible to the tool.
15 | However the tool will always use the configuration currently checked out.
16 |
17 | .. TODO: link sphinx docs / sphinx build
18 |
19 | Configuring `sphinx-polyversion`
20 | --------------------------------
21 |
22 | With `sphinx-polyversion` everything revolves around its configuration file
23 | which is a python script conventionally named `poly.py`.
24 | This configuration file will be executed when calling `sphinx-polyversion`.
25 | This tool is designed in such a way that `poly.py` does all the heavy lifting.
26 | In fact there is no need to run :code:`sphinx-polyversion` from the commandline.
27 | Instead you can execute `poly.py` directly to build your documentation.
28 | However the `sphinx_polyversion`
29 | python package provides the underlying logic as well as helpful utilities.
30 | This design makes sphinx-polyversion highly customizable and extendable allowing
31 | it to be used in all kinds of applications - even those that the developers
32 | of `sphinx-polyversion` didn't even think about.
33 |
34 | By convention the `poly.py` file follows a specific structure that provides
35 | some benefits in combination with this tool. This section will walk you through
36 | this structure.
37 |
38 | Start off with defining the config options to be used for building
39 | your versioned documentation. This is done by initializing variables
40 | in the global scope. You can find a reasonable example below.
41 | Since `poly.py` is a self contained python script you decide every detail
42 | of the build process including all configuration options. There are
43 | no specifications you have to conform to when deciding on the config options
44 | you define and how you name them.
45 |
46 | .. warning::
47 |
48 | To be able to override configuration options using the `sphix-polyversion`
49 | command, you have to use the following naming specifications for config
50 | variables:
51 | The output directory must always be called :code:`OUTPUT_DIR`.
52 | The flag to use the local version and mock data must be called :code:`MOCK`.
53 | The flag to use sequential builds must be called :code:`SEQUENTIAL`.
54 | Finally, you have to pass :code:`MOCK` and :code:`SEQUENTIAL` to :code:`DefaultDriver.run`.
55 |
56 | .. TODO link reference
57 |
58 | .. note::
59 |
60 | Config options will be passed to the build logic later.
61 | This requires types like :code:`tuple` or :code:`Path` and fortunally
62 | any type can be used for config options.
63 | However it makes sense to stick to :code:`string` where possible
64 | since the overrides will always be a string entered from the commandline.
65 | Currently there is no system to convert these string to other python
66 | types. If you have an idea how to design a system
67 | that follows the philosophy of this project please open a discussion on github.
68 |
69 | .. TODO: link override section
70 | .. TODO link philosophy and discussions
71 |
72 | Defining the options as variables at the beginning not only makes
73 | the configuration file easier to understand but also allows those variables to
74 | be overridden from the commandline before being used to build the documentation.
75 | This is a major feature of `sphinx-polyversion` which will be explained in this
76 | section and :ref:`further down this guide`.
77 |
78 | .. autolink-concat:: section
79 |
80 | .. code-block:: py
81 | :caption: `docs/poly.py` - imports and config variables
82 | :linenos:
83 |
84 | from pathlib import Path
85 | from datetime import datetime
86 | from sphinx_polyversion import *
87 | from sphinx_polyversion.git import *
88 | from sphinx_polyversion.pyvenv import Poetry
89 | from sphinx_polyversion.sphinx import SphinxBuilder
90 |
91 | #: Regex matching the branches to build docs for
92 | BRANCH_REGEX = r".*"
93 |
94 | #: Regex matching the tags to build docs for
95 | TAG_REGEX = r".*"
96 |
97 | #: Output dir relative to project root
98 | #: !!! This name has to be choosen !!!
99 | OUTPUT_DIR = "docs/build"
100 |
101 | #: Source directory
102 | SOURCE_DIR = "docs/"
103 |
104 | #: Arguments to pass to `poetry install`
105 | POETRY_ARGS = "--only docs --sync"
106 |
107 | #: Arguments to pass to `sphinx-build`
108 | SPHINX_ARGS = "-a -v"
109 |
110 | #: Mock data used for building local version
111 | MOCK_DATA = {
112 | "revisions": [
113 | GitRef("v1.8.0", "", "", GitRefType.TAG, datetime.fromtimestamp(0)),
114 | GitRef("v1.9.3", "", "", GitRefType.TAG, datetime.fromtimestamp(1)),
115 | GitRef("v1.10.5", "", "", GitRefType.TAG, datetime.fromtimestamp(2)),
116 | GitRef("master", "", "", GitRefType.BRANCH, datetime.fromtimestamp(3)),
117 | GitRef("dev", "", "", GitRefType.BRANCH, datetime.fromtimestamp(4)),
118 | GitRef("some-feature", "", "", GitRefType.BRANCH, datetime.fromtimestamp(5)),
119 | ],
120 | "current": GitRef("local", "", "", GitRefType.BRANCH, datetime.fromtimestamp(6)),
121 | }
122 |
123 | #: Whether to build using only local files and mock data
124 | MOCK = False
125 |
126 | #: Whether to run the builds in sequence or in parallel
127 | SEQUENTIAL = False
128 |
129 | Next you add the code handling the overrides read from the commandline.
130 | This is straightforward since `sphinx-polyversion` provides the function :code:`apply_overrides` that
131 | takes care of that. It parses the commandline arguments and overrides
132 | the config variables with the given values. For that you need to pass
133 | the :code:`globals()` dictionary to the function.
134 |
135 | .. TODO link function
136 |
137 |
138 | .. code-block:: py
139 | :caption: `docs/poly.py` - overrides
140 | :linenos:
141 | :lineno-start: 38
142 |
143 | # Load overrides read from commandline to global scope
144 | apply_overrides(globals())
145 |
146 | The `poly.py` file is finished with adding the code that actually builds
147 | the different versions of the documentation.
148 |
149 | First you determine the root folder of the repository.
150 | It makes sense to use the method provided since
151 | you might call the script from arbitrary locations. The root will be used
152 | for determining the locations of the template, source and static directories.
153 |
154 | After that you initialize the :code:`DefaultDriver` class using the config options
155 | you defined earlier. The driver uses the passed :code:`vcs` object to determine which
156 | versions to build. It will proceed with running the :code:`builder` object
157 | in the :code:`env` environment. In this case :code:`sphinx-build` is run in a python
158 | virtual environment created with *poetry* for each version. This means that each
159 | version is build in an isolated environment with the dependencies defined
160 | in its revision.
161 |
162 | .. TODO link reference
163 | .. TODO link poetry
164 |
165 | .. code-block:: py
166 | :caption: `docs/poly.py` - building the docs
167 | :linenos:
168 | :lineno-start: 41
169 |
170 | # Determine repository root directory
171 | root = Git.root(Path(__file__).parent)
172 |
173 | # Setup driver and run it
174 | src = Path(SOURCE_DIR) # convert from string
175 | DefaultDriver(
176 | root,
177 | OUTPUT_DIR,
178 | vcs=Git(
179 | branch_regex=BRANCH_REGEX,
180 | tag_regex=TAG_REGEX,
181 | buffer_size=1 * 10**9, # 1 GB
182 | predicate=file_predicate([src]), # exclude refs without source dir
183 | ),
184 | builder=SphinxBuilder(src / "sphinx", args=SPHINX_ARGS.split()),
185 | env=Poetry.factory(args=POETRY_ARGS.split()),
186 | template_dir=root / src / "templates",
187 | static_dir=root / src / "static",
188 | mock=MOCK_DATA,
189 | ).run(MOCK, SEQUENTIAL)
190 |
191 | Using versioning data in :code:`conf.py`
192 | ----------------------------------------
193 |
194 | When using sphinx the versioning data (current revision, list of all revisions,
195 | ...)
196 | can be accessed inside the `conf.py` file and inside the jinja templates used
197 | to render the docs. For that the version data is serialized to json and
198 | exposed through an environment variable to sphinx. The data can the be
199 | read in `conf.py` and written to `html_context`. This sphinx configuration
200 | variable holds a dictionary with fields available in jinja templates.
201 |
202 | Luckily you don't have to worry about that, the :code:`load` function takes
203 | care of everything for you. After calling this function the following data
204 | is merged into `html_context`. You can customize what data is passed to sphinx
205 | though.
206 |
207 | .. TODO: link docs for data format
208 |
209 | .. autolink-preface::
210 |
211 | from sphinx_polyversion.git import GitRef
212 |
213 | .. code-block:: py
214 | :caption: default data exposed to sphinx docs
215 |
216 | {
217 | # All revisions to be build
218 | "revisions": Tuple[GitRef, ...],
219 | # The revision sphinx is currently building
220 | "current": GitRef,
221 | }
222 |
223 | .. code-block:: py
224 | :caption: `docs/conf.py` - loading versioning data
225 | :linenos:
226 | :lineno-start: 6
227 |
228 | # -- Load versioning data ----------------------------------------------------
229 |
230 | from sphinx_polyversion import load
231 | from sphinx_polyversion.git import GitRef
232 |
233 | data = load(globals()) # adds variables `current` and `revisions`
234 | current: GitRef = data['current']
235 |
236 | Populating the root of the merged docs
237 | --------------------------------------
238 |
239 | The docs for each revision will be build into a subfolder of the `docs/build`:
240 |
241 | .. code-block::
242 |
243 | docs/build
244 | ├───dev
245 | ├───master
246 | ├───v2.3
247 | ├───v2.4
248 | └───v3.7
249 |
250 | You can add global pages to the root of the documentation. That is `docs/build/`.
251 | Those can either be static files that are copied or templates that are rendered
252 | using `jinja2`. In this example static files will be located in `docs/static`
253 | and templates in `docs/templates`. This results in the following layout:
254 |
255 | .. TODO link jinja2
256 |
257 | .. code-block::
258 |
259 | docs
260 | ├───build
261 | ├───sphinx
262 | │ ├───_static
263 | │ ├───_templates
264 | │ └───conf.py
265 | ├───static
266 | ├───templates
267 | │ └───index.html
268 | └───poly.py
269 |
270 | The :code:`index.html` file is optional but makes sense since it will be the page
271 | shown when entering the url to your documentation. In most cases you will want
272 | the it to redirect to the latest revision of the sphinx docs. The following jinja
273 | template generates the corresponding html.
274 |
275 | .. code-block:: html+jinja
276 | :linenos:
277 | :caption: `docs/templates/index.html`
278 |
279 |
280 |
281 |
282 |
283 | Redirecting to master branch
284 |
285 |
289 |
290 |
291 |
292 |
293 | You will have to add some lines to `poly.py` since the template requires
294 | a `latest` field that isn't provided by default since `sphinx-polyversion` can't
295 | know which tag represents the latest revision. First you have to implement
296 | :code:`root_data` (see below) and then pass :code:`root_data_factory=root_data`
297 | to :code:`DefaultDriver`.
298 |
299 | .. TODO mention that max sorts by creation date
300 | .. TODO link reference
301 |
302 | .. autolink-preface::
303 |
304 | from sphinx_polyversion import *
305 |
306 | .. code-block:: py
307 | :caption: `docs/poly.py` - calculate and expose latest revision
308 | :linenos:
309 | :lineno-start: 40
310 |
311 | from sphinx_polyversion.git import refs_by_type
312 |
313 | def root_data(driver: DefaultDriver):
314 | revisions = driver.builds
315 | branches, tags = refs_by_type(revisions)
316 | latest = max(tags or branches)
317 | return {"revisions": revisions, "latest": latest}
318 |
319 |
320 |
321 | Building with `sphinx-polyversion`
322 | ----------------------------------
323 |
324 | Now that everything is setup you can actually run `sphinx-polyversion` and
325 | build your versioned documentation. All versions configured in `poly.py` will
326 | be build. However if you want to test local changes you can use the :code:`-l`
327 | flag to build a documentation from the files in the local filesystem. When passing
328 | this flag all other versions are not build.
329 |
330 | .. argparse::
331 | :ref: sphinx_polyversion.main.get_parser
332 | :prog: sphinx_polyversion
333 | :nodescription:
334 |
335 | .. _Overriding config options:
336 |
337 | Overriding config options
338 | -------------------------
339 |
340 | You can override the defaults set in `poly.py` by specifying values on the
341 | commandline. Specifying an output location will override :code:`OUTPUT_DIR` while
342 | specifying :code:`--local` will set :code:`MOCK` to :code:`True`.
343 | Specifying :code:`--sequential` will set :code:`SEQUENTIAL` to :code:`True`.
344 | All other variables can be overidden through the :code:`-o` flag. You can
345 | override the arguments passed to `sphinx-build` by entering the following:
346 |
347 | .. code-block:: bash
348 |
349 | sphinx-polyversion docs/poly.py -o SPHINX_BUILD='-T -v'
350 |
351 |
352 | Adding a version picker to the docs
353 | -----------------------------------
354 |
355 | There are plenty of ways how to add a widget to your rendered documentation that allows
356 | the user to select the version to view. Some themes might come with a version picker build-in
357 | while for the others you have to add one yourself. Usually you can leverage sphinx template
358 | system for that. For a reference you can have a look how this documentation implemented
359 | the version picker.
360 |
361 | .. TODO link relevant code
362 |
--------------------------------------------------------------------------------
/docs/sphinx/guide/installation.rst:
--------------------------------------------------------------------------------
1 | ------------
2 | Installation
3 | ------------
4 |
5 | This tool can be installed as a os-independent python package. You will therefore
6 | need a Python installation and a python package manager like *pip*.
7 | Sphinx-polyversion is available on pypi.org. But you can also install the package
8 | directly from its git repository.
9 |
10 | .. TODO: link pypi
11 |
12 | Sphinx-polyversion provides integration with virtualenv and jinja. By specifying
13 | the respective dependency groups you can install them alongside the tool.
14 |
15 | .. TODO: link to pages explaining jinja and virtualenv integration
16 | .. TODO: link article about dependency groups
17 |
18 | .. tab-set::
19 |
20 | .. tab-item:: Pip
21 | :sync: pip
22 |
23 | .. code-block:: bash
24 |
25 | pip install sphinx-polyversion[jinja,virtualenv]
26 |
27 | .. tab-item:: Poetry
28 | :sync: poetry
29 | :selected:
30 |
31 | .. code-block:: bash
32 |
33 | poetry add --group docs sphinx-polyversion[jinja,virtualenv]
34 |
35 | .. tab-item:: Pipenv
36 | :sync: pipenv
37 |
38 | .. code-block:: bash
39 |
40 | pipenv install --dev sphinx-polyversion[jinja,virtualenv]
41 |
42 | .. note:: The minimum supported Python version is **3.8**.
43 |
44 | Installing from Source
45 | ----------------------
46 |
47 | If you want to setup a development environment refer to this guide.
48 |
49 | .. TODO: Link dev guide
50 |
51 | .. tab-set::
52 |
53 | .. tab-item:: Pip
54 | :sync: pip
55 |
56 | .. code-block:: bash
57 |
58 | pip install git+https://github.com/real-yfprojects/sphinx-polyversion[jinja,virtualenv]
59 |
60 | .. tab-item:: Poetry
61 | :sync: poetry
62 | :selected:
63 |
64 | .. code-block:: bash
65 |
66 | poetry add --group docs git+https://github.com/real-yfprojects/sphinx-polyversion[jinja,virtualenv]
67 |
68 | .. tab-item:: Pipenv
69 | :sync: pipenv
70 |
71 | .. code-block:: bash
72 |
73 | pipenv install --dev git+https://github.com/real-yfprojects/sphinx-polyversion[jinja,virtualenv]
74 |
--------------------------------------------------------------------------------
/docs/sphinx/index.rst:
--------------------------------------------------------------------------------
1 | :hide-toc:
2 |
3 | ==================
4 | Sphinx Polyversion
5 | ==================
6 |
7 | .. include:: ../../README.md
8 | :parser: myst_parser.sphinx_
9 | :start-after:
10 | :end-before:
11 |
12 |
13 | Table of Contents
14 | ~~~~~~~~~~~~~~~~~
15 |
16 | .. toctree::
17 | :maxdepth: 2
18 |
19 | guide/installation
20 | guide/getting-started
21 | alternatives
22 |
23 | .. toctree::
24 | :maxdepth: 2
25 | :caption: Reference
26 |
27 | api/sphinx_polyversion
28 |
29 | .. toctree::
30 | :maxdepth: 1
31 | :caption: Development
32 |
33 | development/roadmap
34 | license
35 |
36 | .. toctree::
37 | :caption: Links
38 |
39 | Github
40 | PyPI
41 |
--------------------------------------------------------------------------------
/docs/sphinx/license.rst:
--------------------------------------------------------------------------------
1 | -------
2 | License
3 | -------
4 |
5 | .. include:: ../../LICENSE
6 |
--------------------------------------------------------------------------------
/docs/static/.nojekyll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/real-yfprojects/sphinx-polyversion/b077a71af36606a6d542b4016a84461537d30ea6/docs/static/.nojekyll
--------------------------------------------------------------------------------
/docs/templates/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Redirecting to latest version
6 |
7 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["poetry-core>=1.0.0"]
3 | build-backend = "poetry.core.masonry.api"
4 |
5 | [tool.poetry]
6 | name = "sphinx_polyversion"
7 | version = "1.1.0"
8 | description = "Build multiple versions of your sphinx docs and merge them into one website."
9 | readme = "README.md"
10 | authors = ["yfprojects "]
11 | license = "MIT"
12 | repository = "https://github.com/real-yfprojects/sphinx-polyversion"
13 | documentation = "https://real-yfprojects.github.io/sphinx-polyversion/"
14 | keywords = ["utils", "documentation", "sphinx"]
15 | classifiers = [
16 | "Development Status :: 2 - Pre-Alpha",
17 | "License :: OSI Approved :: MIT License",
18 | "Intended Audience :: Developers",
19 | "Typing :: Typed",
20 | "Topic :: Documentation",
21 | "Topic :: Documentation :: Sphinx",
22 | "Topic :: Software Development :: Documentation",
23 | "Topic :: Utilities",
24 | ]
25 |
26 |
27 | [tool.poetry.dependencies]
28 | python = ">=3.8"
29 | virtualenv = { version = ">=20", optional = true }
30 | jinja2 = { version = ">=3", optional = true }
31 |
32 | [tool.poetry.extras]
33 | virtualenv = ["virtualenv"]
34 | jinja = ["jinja2"]
35 |
36 | [tool.poetry.scripts]
37 | sphinx-polyversion = "sphinx_polyversion.main:main"
38 |
39 | [tool.poetry.group.lint.dependencies]
40 | mypy = "^1.7.1"
41 | ruff = "^0.8.4"
42 | black = "^23.11.0"
43 |
44 | [tool.poetry.group.test.dependencies]
45 | pytest = "^8.3.4"
46 | pytest-asyncio = { version = "^0.25.0", python = ">=3.9" }
47 | pytest-cov = { version = "^6.0.0", python = ">=3.9" }
48 | pytest-xdist = "^3.6.1"
49 |
50 |
51 | [tool.poetry.group.docs.dependencies]
52 | sphinx = { version = "^8.1.3", python = ">=3.10" }
53 | sphinx-argparse = { version = "^0.5.2", python = ">=3.10" }
54 | sphinx-rtd-theme-github-versions = { version = "^1.1", python = ">=3.9" }
55 | sphinx-rtd-theme = { version = "^3.0.2", python = ">=3.9" }
56 | myst-parser = { version = "^4.0.0", python = ">=3.10" }
57 | sphinx-notfound-page = { version = "^1.0.2", python = ">=3.9" }
58 | sphinx-copybutton = { version = "^0.5.2", python = ">=3.9" }
59 | furo = { version = "^2024.5.6", python = ">=3.9" }
60 | sphinxext-opengraph = { version = "^0.9.1", python = ">=3.9" }
61 | sphinx-autobuild = { version = "^2024.4.16", python = ">=3.9" }
62 | sphinx-design = { version = "^0.6", python = ">=3.9" }
63 | jinja2 = { version = "^3.1.4", python = ">=3.9" }
64 | sphinx-codeautolink = { version = "^0.17.4", python = ">=3.10" }
65 |
66 |
67 | [tool.black]
68 | target-version = ['py38']
69 |
70 | [tool.mypy]
71 | files = "sphinx_polyversion/**.py"
72 | python_version = "3.8"
73 | strict = true
74 | allow_redefinition = true
75 | exclude = ["^(tests|docs)"]
76 |
77 | [[tool.mypy.overrides]]
78 | module = "virtualenv.*"
79 | ignore_missing_imports = true
80 |
81 | [tool.ruff]
82 | target-version = "py38"
83 | line-length = 88 # Same as Black.
84 |
85 | [tool.ruff.lint]
86 | select = [
87 | "T", # flake8-print
88 | "I", # isort
89 | "F", # pyflakes
90 | "D", # pydocstyle
91 | "E", # pycodestyle
92 | "W",
93 | "FLY", # flynt
94 | "RUF", # ruff
95 | "PL", # pylint
96 | "TCH", # flake8-type-checking
97 | "ASYNC", # flake8-async
98 | "A", # flake8-builtins
99 | "C", # flake8-comprehensions
100 | "FA", # flake8-future-annotations
101 | "ISC", # flake8-implicit-str-concat
102 | "G", # flake8-implicit-str-concat
103 | "INP", # flake8-no-pep420
104 | "PT", # flake8-pytest-style
105 | "RSE", # flake8-raise
106 | "RET", # flake8-return
107 | "SIM", # flake8-simplify
108 | "PTH", # flake8-use-pathlib
109 | "TD", # flake8-todos
110 | "PYI", # flake8-pyi - lints .pyi type stubs
111 | "ERA", # ERA
112 | ]
113 | unfixable = ['ERA']
114 |
115 | ignore = [
116 | # pydocstyle
117 | # "D202", # no-blank-line-after-function
118 | "D203", # one-blank-line-before-class
119 | # "D205", # blank-line-after-summary
120 | "D212", # multi-line-summary-first-line
121 | # "D401", # non-imperative-mood
122 | # pycodestyle
123 | "E501", # line-too-long
124 | # pylint
125 | "PLR0913", # too-many-arguments
126 | # "PLR0912", # too-many-branches
127 | # "PLR0915", # too-many-statements
128 | # flake8-todos
129 | "TD002", # missing author
130 | ]
131 |
132 | # Allow unused variables when underscore-prefixed or of form `dummyN`.
133 | dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?)|dummy\\d*)$"
134 |
135 | # Less strict rules for docs/* and tests/*.
136 | [tool.ruff.lint.per-file-ignores]
137 | "docs/*" = ["PLR", "D1", "PTH", "INP"]
138 | "tests/*" = ["PLR2004"]
139 |
140 | [tool.ruff.lint.pylint]
141 | # Allow more arguments for complexity check
142 | max-args = 8
143 |
144 | [tool.pytest.ini_options]
145 | addopts = [
146 | "--import-mode=importlib",
147 | # "--cov=sphinx_polyversion",
148 | # "--cov-report=lcov",
149 | # "--cov-report=term-missing",
150 | # "--numprocesses=auto",
151 | ]
152 | testpaths = ["tests"]
153 |
--------------------------------------------------------------------------------
/sphinx_polyversion/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Build multiple versions of your sphinx docs and merge them into one website.
3 |
4 | Attributes
5 | ----------
6 | logger: Logger
7 | The root logger used by this package.
8 |
9 |
10 | """
11 | from logging import DEBUG, NullHandler, getLogger
12 |
13 | from sphinx_polyversion.api import apply_overrides, load, order_versions
14 | from sphinx_polyversion.driver import DefaultDriver
15 | from sphinx_polyversion.json import GLOBAL_DECODER, GLOBAL_ENCODER
16 |
17 | __all__ = (
18 | "GLOBAL_DECODER",
19 | "GLOBAL_ENCODER",
20 | "DefaultDriver",
21 | "apply_overrides",
22 | "load",
23 | "order_versions",
24 | )
25 |
26 | logger = getLogger(__name__)
27 | logger.setLevel(DEBUG)
28 | logger.addHandler(NullHandler())
29 |
--------------------------------------------------------------------------------
/sphinx_polyversion/__main__.py:
--------------------------------------------------------------------------------
1 | """Build multiple versions of your sphinx docs and merge them into one website."""
2 |
3 | from sphinx_polyversion.main import main
4 |
5 | main()
6 |
--------------------------------------------------------------------------------
/sphinx_polyversion/api.py:
--------------------------------------------------------------------------------
1 | """API to use in config files like `conf.py`."""
2 |
3 | from __future__ import annotations
4 |
5 | import os
6 | import re
7 | from typing import Any
8 |
9 | from sphinx_polyversion.json import GLOBAL_DECODER
10 | from sphinx_polyversion.main import get_parser
11 |
12 | __all__ = ["apply_overrides", "load"]
13 |
14 |
15 | class LoadError(RuntimeError):
16 | """An error occurred during loading of the metadata."""
17 |
18 |
19 | def load(namespace: dict[str, Any] | None = None) -> Any:
20 | """
21 | Load metadata and sphinx config vars.
22 |
23 | This loads the polyversion metadata about the current revision
24 | and more from the `POLYVERSION_DATA` environment variable.
25 | You can pass this method the `globals()` dictionary to load
26 | the needed sphinx config vars and make them available as global variables.
27 |
28 | Parameters
29 | ----------
30 | namespace : dict[str, Any] | None, optional
31 | The dictionary to load the data into, by default None
32 |
33 | Returns
34 | -------
35 | Any
36 | The data loaded from the env var.
37 |
38 | Raises
39 | ------
40 | LoadError
41 | The environment variable isn't set.
42 |
43 | """
44 | namespace = namespace or {}
45 |
46 | key = "POLYVERSION_DATA"
47 | if not (str_data := os.getenv(key)):
48 | raise LoadError(f"Env var {key} isn't set.")
49 |
50 | data = GLOBAL_DECODER.decode(str_data)
51 |
52 | html_context: dict[str, Any] = namespace.setdefault("html_context", {})
53 | if isinstance(data, dict):
54 | html_context.update(data)
55 | else:
56 | html_context["data"] = data
57 |
58 | return data
59 |
60 |
61 | def apply_overrides(namespace: dict[str, Any]) -> dict[str, Any]:
62 | """
63 | Override global config vars with values provided from the cmd line.
64 |
65 | You will usually want to pass `globals()`.
66 |
67 | Parameters
68 | ----------
69 | namespace : dict[str, Any]
70 | The dictionary to alter.
71 |
72 | Returns
73 | -------
74 | dict[str, Any]
75 | The values that were applied to the namespace.
76 |
77 | """
78 | args = get_parser(expect_config=False).parse_args()
79 | overrides: dict[str, Any] = args.override
80 | if args.out:
81 | overrides["OUTPUT_DIR"] = args.out
82 | overrides.setdefault("MOCK", False)
83 | if args.local:
84 | overrides["MOCK"] = True
85 | overrides.setdefault("SEQUENTIAL", False)
86 | if args.sequential:
87 | overrides["SEQUENTIAL"] = True
88 | namespace.update(overrides)
89 | return overrides
90 |
91 |
92 | def order_versions(name: str, form_regex: str) -> tuple[str, ...]:
93 | match = re.fullmatch(form_regex, name)
94 | if not match:
95 | raise ValueError(f"Tag {name} doesn't match supplied format.")
96 | return match.groups()
97 |
--------------------------------------------------------------------------------
/sphinx_polyversion/builder.py:
--------------------------------------------------------------------------------
1 | """Abstract Building framework."""
2 |
3 | from abc import ABCMeta, abstractmethod
4 | from pathlib import Path
5 | from typing import Generic, TypeVar
6 |
7 | from sphinx_polyversion.environment import Environment
8 | from sphinx_polyversion.json import JSONable
9 |
10 | __all__ = ["BuildError", "Builder"]
11 |
12 |
13 | class BuildError(Exception):
14 | """Building a specific version failed."""
15 |
16 |
17 | ENV = TypeVar("ENV", bound=Environment)
18 | R = TypeVar("R")
19 |
20 |
21 | class Builder(Generic[ENV, R], metaclass=ABCMeta):
22 | """Base class for builders creating a documentation from source files."""
23 |
24 | @abstractmethod
25 | async def build(self, environment: ENV, output_dir: Path, data: JSONable) -> R:
26 | """
27 | Build and render a documentation.
28 |
29 | This method should actually carry out the work of building and rendering
30 | a documentation.
31 |
32 | Parameters
33 | ----------
34 | environment : Environment
35 | The environment to use for building.
36 | output_dir : Path
37 | The output directory to build to.
38 | data : JSONable
39 | The metadata to use for building.
40 |
41 | Returns
42 | -------
43 | Any
44 | Arbitrary data that results from building.
45 | This data can be used by custom :class:`Driver` implementations.
46 |
47 | """
48 | ...
49 |
--------------------------------------------------------------------------------
/sphinx_polyversion/environment.py:
--------------------------------------------------------------------------------
1 | """Build Environment Base API."""
2 |
3 | from __future__ import annotations
4 |
5 | import asyncio
6 | from functools import partial
7 | from logging import getLogger
8 | from typing import (
9 | TYPE_CHECKING,
10 | Any,
11 | Callable,
12 | Tuple,
13 | Type,
14 | TypeVar,
15 | cast,
16 | )
17 |
18 | from sphinx_polyversion.log import ContextAdapter
19 |
20 | if TYPE_CHECKING:
21 | from pathlib import Path
22 |
23 | __all__ = ["Environment"]
24 |
25 |
26 | class Environment:
27 | """
28 | A build environment and contextmanager to run commands in.
29 |
30 | This is a base class but it can be instanciated as well to have a
31 | environment that does nothing.
32 |
33 | Parameters
34 | ----------
35 | path : Path
36 | The location of the current revision.
37 | name : str
38 | The name of the environment (usually the name of the current revision).
39 |
40 | Methods
41 | -------
42 | run(*cmd: str, decode: bool = True, **kwargs: Any)
43 | Run a OS process in the environment.
44 |
45 | """
46 |
47 | path: Path
48 |
49 | def __init__(self, path: Path, name: str):
50 | """
51 | Init the build environment and contextmanager to run commands in.
52 |
53 | Parameters
54 | ----------
55 | path : Path
56 | The location of the current revision.
57 | name : str
58 | The name of the environment (usually the name of the current revision).
59 |
60 | """
61 | self.path = path.resolve()
62 | self.logger = ContextAdapter(getLogger(__name__), {"context": name})
63 |
64 | async def __aenter__(self: ENV) -> ENV:
65 | """Set the environment up."""
66 | return self
67 |
68 | async def __aexit__(self, *exc_info) -> bool | None: # type: ignore[no-untyped-def]
69 | """Clean the environment up."""
70 | return None
71 |
72 | async def run(
73 | self, *cmd: str, decode: bool = True, **kwargs: Any
74 | ) -> Tuple[str | bytes | None, str | bytes | None, int]:
75 | """
76 | Run a OS process in the environment.
77 |
78 | This implementation passes the arguments to
79 | :func:`asyncio.create_subprocess_exec`.
80 |
81 | Returns
82 | -------
83 | stdout : str | None
84 | The output of the command,
85 | stderr : str | None
86 | The error output of the command
87 | returncode : int | None
88 | The returncode of the command
89 |
90 | """
91 | process = await asyncio.create_subprocess_exec(*cmd, **kwargs)
92 | out, err = await process.communicate()
93 | if decode:
94 | out = out.decode(errors="ignore") if out is not None else None # type: ignore[assignment]
95 | err = err.decode(errors="ignore") if err is not None else None # type: ignore[assignment]
96 | return out, err, cast(int, process.returncode)
97 |
98 | @classmethod
99 | def factory(cls: Type[ENV], **kwargs: Any) -> Callable[[Path, str], ENV]:
100 | """
101 | Create a factory function for this environment class.
102 |
103 | This returns a factory that can be used with :class:`DefaultDriver`.
104 | This method works similiar to :func:`functools.partial`. The arguments
105 | passed to this function will be used by the factory to instantiate
106 | the actual environment class.
107 |
108 | Parameters
109 | ----------
110 | **kwargs
111 | Arguments to use when creating the instance.
112 |
113 | Returns
114 | -------
115 | Callable[[Path, str], ENV]
116 | The factory function.
117 |
118 | """
119 | return partial(cls, **kwargs)
120 |
121 |
122 | ENV = TypeVar("ENV", bound=Environment)
123 |
--------------------------------------------------------------------------------
/sphinx_polyversion/git.py:
--------------------------------------------------------------------------------
1 | """Git VCS support."""
2 |
3 | from __future__ import annotations
4 |
5 | import asyncio
6 | import enum
7 | import re
8 | import subprocess
9 | import tarfile
10 | import tempfile
11 | from asyncio.subprocess import PIPE
12 | from datetime import datetime
13 | from functools import total_ordering
14 | from inspect import isawaitable
15 | from logging import getLogger
16 | from pathlib import Path, PurePath
17 | from subprocess import DEVNULL, CalledProcessError
18 | from typing import (
19 | Any,
20 | AsyncGenerator,
21 | Awaitable,
22 | Callable,
23 | Coroutine,
24 | Iterable,
25 | Iterator,
26 | NamedTuple,
27 | Tuple,
28 | TypeVar,
29 | cast,
30 | )
31 |
32 | from sphinx_polyversion.json import GLOBAL_DECODER
33 | from sphinx_polyversion.utils import async_all
34 | from sphinx_polyversion.vcs import VersionProvider
35 |
36 | __all__ = ["Git", "GitRef", "GitRefType", "file_predicate", "refs_by_type"]
37 |
38 | logger = getLogger(__name__)
39 |
40 | # -- Low level Git functions -------------------------------------------------
41 |
42 |
43 | async def _get_git_root(directory: Path) -> Path:
44 | """
45 | Determine the root folder of the current git repo.
46 |
47 | Parameters
48 | ----------
49 | directory : Path
50 | Any directory in the repo.
51 |
52 | Returns
53 | -------
54 | Path
55 | The root folder.
56 |
57 | """
58 | cmd = (
59 | "git",
60 | "rev-parse",
61 | "--show-toplevel",
62 | )
63 | process = await asyncio.create_subprocess_exec(*cmd, cwd=directory, stdout=PIPE)
64 | out, err = await process.communicate()
65 | return Path(out.decode().rstrip("\n"))
66 |
67 |
68 | regex_ref = r"refs/(?P\w+|remotes/(?P[^/]+))/(?P\S+)"
69 | pattern_ref = re.compile(regex_ref)
70 |
71 | GIT_FORMAT_STRING = "%(objectname)\t%(refname)\t%(creatordate:iso)"
72 |
73 |
74 | def _parse_ref(line: str) -> GitRef | None:
75 | obj, ref, date_str = line.split("\t")
76 | date = datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S %z")
77 |
78 | match = pattern_ref.fullmatch(ref)
79 | if not match:
80 | logger.warning("Invalid ref %s", ref)
81 | return None
82 | name = match["name"]
83 | type_str = match["type"]
84 | remote = None
85 | if type_str == "heads":
86 | type_ = GitRefType.BRANCH
87 | elif type_str == "tags":
88 | type_ = GitRefType.TAG
89 | elif match["remote"]:
90 | type_ = GitRefType.BRANCH
91 | remote = match["remote"]
92 | else:
93 | logger.info("Ignoring ref %s", ref)
94 | return None
95 |
96 | return GitRef(name, obj, ref, type_, date, remote)
97 |
98 |
99 | def get_current_commit(repo: Path) -> str:
100 | """
101 | Determine the hash of the currently checkedout commit.
102 |
103 | Parameters
104 | ----------
105 | repo : Path
106 | The git repository.
107 |
108 | Returns
109 | -------
110 | str
111 | The hex obj hash of the commit.
112 |
113 | """
114 | cmd = (
115 | "git",
116 | "rev-parse",
117 | "HEAD",
118 | )
119 |
120 | process = subprocess.run(cmd, stdout=PIPE, cwd=repo, check=True)
121 | return process.stdout.decode().rstrip("\n")
122 |
123 |
124 | async def _get_all_refs(
125 | repo: Path, pattern: str = "refs"
126 | ) -> AsyncGenerator[GitRef, None]:
127 | """
128 | Get a list of refs (tags/branches) for a git repo.
129 |
130 | Parameters
131 | ----------
132 | repo : Path
133 | The repo to return the refs for
134 | pattern : str
135 | The pattern of refs to retrieve. Passed to `git for-each-ref`.
136 |
137 | Yields
138 | ------
139 | GitRef: The refs
140 |
141 | Raises
142 | ------
143 | ValueError
144 | Unknown ref type returned by git
145 |
146 | """
147 | cmd = (
148 | "git",
149 | "for-each-ref",
150 | "--format",
151 | GIT_FORMAT_STRING,
152 | pattern,
153 | )
154 | process = await asyncio.create_subprocess_exec(*cmd, stdout=PIPE, cwd=repo)
155 | out, err = await process.communicate()
156 | lines = out.decode().splitlines()
157 | for line in lines:
158 | ref = _parse_ref(line)
159 | if ref is not None:
160 | yield ref
161 |
162 |
163 | async def _is_ancestor(repo: Path, ancestor: str, descendant: str) -> bool:
164 | cmd = (
165 | "git",
166 | "merge-base",
167 | "--is-ancestor",
168 | ancestor,
169 | descendant,
170 | )
171 | process = await asyncio.create_subprocess_exec(*cmd, stdout=PIPE, cwd=repo)
172 | out, err = await process.communicate()
173 | rc = cast(int, process.returncode)
174 | if rc > 1:
175 | raise CalledProcessError(rc, " ".join(cmd), stderr=err)
176 | return rc == 0
177 |
178 |
179 | async def _copy_tree(
180 | repo: Path, ref: str, dest: str | Path, buffer_size: int = 0
181 | ) -> None:
182 | """
183 | Copy the contents of a ref into a location in the file system.
184 |
185 | Parameters
186 | ----------
187 | repo : Path
188 | The repo of the ref
189 | ref : str
190 | The ref
191 | dest : Union[str, Path]
192 | The destination to copy the contents to
193 | buffer_size : int
194 | The buffer size in memory which is filled before
195 | a temporary file is used for retrieving the contents. Defaults to 0.
196 |
197 | Raises
198 | ------
199 | CalledProcessError
200 | The git process exited with an error.
201 |
202 | """
203 | # retrieve commit contents as tar archive
204 | cmd = ("git", "archive", "--format", "tar", ref)
205 | with tempfile.SpooledTemporaryFile(max_size=buffer_size) as f:
206 | process = await asyncio.create_subprocess_exec(
207 | *cmd, cwd=repo, stdout=f, stderr=PIPE
208 | )
209 | out, err = await process.communicate()
210 | if process.returncode:
211 | raise CalledProcessError(process.returncode, " ".join(cmd), stderr=err)
212 | # extract tar archive to dir
213 | f.seek(0)
214 | with tarfile.open(fileobj=f) as tf:
215 | tf.extractall(str(dest))
216 |
217 |
218 | async def file_exists(repo: Path, ref: GitRef, file: PurePath) -> bool:
219 | """
220 | Check whether a file exists in a given ref.
221 |
222 | Parameters
223 | ----------
224 | repo : Path
225 | The repo of the ref
226 | ref : GitRef
227 | The ref
228 | file : PurePath
229 | The file to check for
230 |
231 | Returns
232 | -------
233 | bool
234 | Whether the file was found in the contents of the ref
235 |
236 | """
237 | cmd = (
238 | "git",
239 | "cat-file",
240 | "-e",
241 | "{}:{}".format(ref.obj, file.as_posix()),
242 | )
243 | process = await asyncio.create_subprocess_exec(
244 | *cmd,
245 | cwd=repo,
246 | stdout=DEVNULL,
247 | stderr=DEVNULL,
248 | )
249 | rc = await process.wait()
250 | return rc == 0
251 |
252 |
253 | # -- VersionProvider API -----------------------------------------------------
254 |
255 |
256 | async def closest_tag(root: Path, ref: GitRef, tags: tuple[str]) -> str | None:
257 | """
258 | Find the closest ancestor to a given ref.
259 |
260 | Parameters
261 | ----------
262 | root : Path
263 | The repository root.
264 | ref : GitRef
265 | The ref to find the ancestor for.
266 | tags : tuple[str]
267 | A list of git references to map to.
268 |
269 | Returns
270 | -------
271 | str | None
272 | The closest ancestor or None if no ancestor was found.
273 |
274 | """
275 | for tag in reversed(tags):
276 | if await _is_ancestor(root, tag, ref.obj):
277 | return tag
278 | return None
279 |
280 |
281 | S = TypeVar("S")
282 |
283 |
284 | @GLOBAL_DECODER.register
285 | class GitRefType(enum.Enum):
286 | """Types of git refs."""
287 |
288 | TAG = enum.auto()
289 | BRANCH = enum.auto()
290 |
291 | def _json_fields(self) -> str:
292 | return self.name
293 |
294 | @classmethod
295 | def _from_json_fields(cls, o: str) -> GitRefType:
296 | return cls[o]
297 |
298 |
299 | @GLOBAL_DECODER.register
300 | @total_ordering
301 | class GitRef(NamedTuple):
302 | """A git ref representing a possible doc version."""
303 |
304 | name: str # tag or branch name
305 | obj: str # hash
306 | ref: str # git ref
307 | type_: GitRefType
308 | date: datetime # creation
309 | remote: str | None = None # if remote ref: name of the remote
310 |
311 | def _json_fields(self) -> tuple[Any, ...]:
312 | return tuple(self)
313 |
314 | @classmethod
315 | def _from_json_fields(cls, o: Any) -> GitRef:
316 | return cls(*o)
317 |
318 | def __lt__(self, other: GitRef) -> bool: # type: ignore[override]
319 | """Lower than."""
320 | return self.date < other.date
321 |
322 |
323 | def refs_by_type(refs: Iterator[GitRef]) -> Tuple[list[GitRef], list[GitRef]]:
324 | """
325 | Group refs by type.
326 |
327 | Parameters
328 | ----------
329 | refs : Iterator[GitRef]
330 | The refs to group.
331 |
332 | Returns
333 | -------
334 | branches : list[GitRef]
335 | tags : list[GitRef]
336 |
337 | """
338 | return (
339 | list(filter(lambda r: r.type_ == GitRefType.BRANCH, refs)),
340 | list(filter(lambda r: r.type_ == GitRefType.TAG, refs)),
341 | )
342 |
343 |
344 | def file_predicate(
345 | files: Iterable[str | PurePath],
346 | ) -> Callable[[Path, GitRef], Coroutine[None, None, bool]]:
347 | """
348 | Return a predicate that checks for files in a git revision.
349 |
350 | The returned predicate calls :func:`file_exists` for each file and
351 | checks whether all files exists in a given revision.
352 |
353 | Parameters
354 | ----------
355 | files : Iterable[str | PurePath]
356 | The files to check for.
357 |
358 | Returns
359 | -------
360 | Callable[[Path, GitRef], Coroutine[None, None, bool]]
361 | The predicate.
362 |
363 | """
364 | files = [PurePath(file) for file in files]
365 |
366 | async def predicate(repo: Path, ref: GitRef) -> bool:
367 | return await async_all(file_exists(repo, ref, file) for file in files) # type: ignore[arg-type]
368 |
369 | return predicate
370 |
371 |
372 | class Git(VersionProvider[GitRef]):
373 | """
374 | Provide versions from git repository.
375 |
376 | Parameters
377 | ----------
378 | branch_regex : str | re.Pattern
379 | Regex branches must match completely
380 | tag_regex : str | re.Pattern
381 | Regex tags must match completely
382 | remote : str | None, optional
383 | Limit to this remote or to local refs if not specified, by default None
384 |
385 | """
386 |
387 | def __init__(
388 | self,
389 | branch_regex: str | re.Pattern[Any],
390 | tag_regex: str | re.Pattern[Any],
391 | remote: str | None = None,
392 | *,
393 | predicate: Callable[[Path, GitRef], bool | Awaitable[bool]] | None = None,
394 | buffer_size: int = 0,
395 | ) -> None:
396 | """Init."""
397 | super().__init__()
398 | self.remote = remote
399 | self.buffer_size = buffer_size
400 |
401 | if isinstance(branch_regex, str):
402 | branch_regex = re.compile(branch_regex)
403 | if isinstance(tag_regex, str):
404 | tag_regex = re.compile(tag_regex)
405 | self.branch_regex = branch_regex
406 | self.tag_regex = tag_regex
407 |
408 | self._predicate = predicate
409 |
410 | def name(self, revision: GitRef) -> str:
411 | """
412 | Get the (unique) name of a revision.
413 |
414 | This name will usually be used for creating the subdirectories
415 | of the revision.
416 |
417 | Parameters
418 | ----------
419 | root : Path
420 | The root path of the project.
421 | revision : Any
422 | The revision whose name is requested.
423 |
424 | Returns
425 | -------
426 | str
427 | The name of the revision.
428 |
429 | """
430 | return revision.name
431 |
432 | @staticmethod
433 | async def aroot(path: str | Path) -> Path:
434 | """
435 | Determine the root of the current git repository (async).
436 |
437 | Parameters
438 | ----------
439 | path : Path
440 | A path inside the repo. (Usually the current working directory)
441 |
442 | Returns
443 | -------
444 | Path
445 | The root path of the repo.
446 |
447 | """
448 | return await _get_git_root(Path(path))
449 |
450 | @classmethod
451 | def root(cls, path: str | Path) -> Path:
452 | """
453 | Determine the root of the current git repository.
454 |
455 | Parameters
456 | ----------
457 | path : Path
458 | A path inside the repo. (Usually the current working directory)
459 |
460 | Returns
461 | -------
462 | Path
463 | The root path of the repo.
464 |
465 | """
466 | return asyncio.run(cls.aroot(path))
467 |
468 | async def checkout(self, root: Path, dest: Path, revision: GitRef) -> None:
469 | """
470 | Extract a specific revision to the given path.
471 |
472 | Parameters
473 | ----------
474 | root : Path
475 | The root path of the git repository.
476 | dest : Path
477 | The destination to copy the revision to.
478 | revision : Any
479 | The revision to extract.
480 |
481 | """
482 | await _copy_tree(root, revision.obj, dest, self.buffer_size)
483 |
484 | async def predicate(self, root: Path, ref: GitRef) -> bool:
485 | """
486 | Check whether a revision should be build.
487 |
488 | This predicate is used by :meth:`retrieve` to filter the
489 | git references retrieved.
490 |
491 | Parameters
492 | ----------
493 | root : Path
494 | The root path of the git repo.
495 | ref : GitRef
496 | The git reference to check.
497 |
498 | Returns
499 | -------
500 | bool
501 | Whether to build the revision referenced.
502 |
503 | """
504 | match = True
505 | if ref.type_ == GitRefType.TAG:
506 | match = bool(self.tag_regex.fullmatch(ref.name))
507 | if ref.type_ == GitRefType.BRANCH:
508 | match = bool(self.branch_regex.fullmatch(ref.name))
509 |
510 | if not (ref.remote == self.remote and match):
511 | return False
512 |
513 | if self._predicate:
514 | r = self._predicate(root, ref)
515 | if isawaitable(r):
516 | r = await r
517 | if not r:
518 | return False
519 | return True
520 |
521 | async def retrieve(self, root: Path) -> Iterable[GitRef]:
522 | """
523 | List all build targets.
524 |
525 | This retrieves all references from git and filters them using the
526 | options this instance was initialized with.
527 |
528 | Parameters
529 | ----------
530 | root : Path
531 | The root path of the project.
532 |
533 | Returns
534 | -------
535 | tuple[GitRef]
536 | The revisions/git references to build.
537 |
538 | """
539 |
540 | async def handle(ref: GitRef) -> GitRef | None:
541 | if await self.predicate(root, ref):
542 | return ref
543 | return None
544 |
545 | tasks = []
546 | async for ref in _get_all_refs(root):
547 | tasks.append(handle(ref))
548 |
549 | return tuple(filter(None, await asyncio.gather(*tasks)))
550 |
--------------------------------------------------------------------------------
/sphinx_polyversion/json.py:
--------------------------------------------------------------------------------
1 | """(De-)Serialize python objects to/from json."""
2 |
3 | from __future__ import annotations
4 |
5 | import json
6 | import warnings
7 | from datetime import datetime
8 | from inspect import getmodule
9 | from typing import (
10 | Any,
11 | Callable,
12 | Dict,
13 | Iterable,
14 | Iterator,
15 | List,
16 | Protocol,
17 | Tuple,
18 | Type,
19 | TypeVar,
20 | Union,
21 | cast,
22 | overload,
23 | runtime_checkable,
24 | )
25 |
26 | __all__ = ["Decoder", "Encoder", "RecursionWarning", "std_hook"]
27 |
28 |
29 | #: Python types representing a key in JSON mapping
30 | JSON_TYPE_KEY = Union[None, bool, int, float, str]
31 |
32 | #: Python types representing a JSON object (as returned by json.load)
33 | JSON_TYPE = Union[
34 | JSON_TYPE_KEY,
35 | List["JSON_TYPE"],
36 | Tuple["JSON_TYPE", ...],
37 | Dict[str, "JSON_TYPE"],
38 | ]
39 |
40 | #: Python types supported by the build-in json module (json.dump)
41 | JSON_TYPE_DUMP = Union[
42 | JSON_TYPE_KEY,
43 | List["JSON_TYPE_DUMP"],
44 | Tuple["JSON_TYPE_DUMP", ...],
45 | Dict[JSON_TYPE_KEY, "JSON_TYPE_DUMP"],
46 | ]
47 |
48 |
49 | class RecursionWarning(UserWarning):
50 | """
51 | A transformable object returns its own type.
52 |
53 | This usually results in an infinite recursion since
54 | `_json_fields` is called over and over.
55 | """
56 |
57 |
58 | class Transformable(Protocol):
59 | """Protocol for classes whose instances can be converted from and to json."""
60 |
61 | def _json_fields(self) -> JSONable:
62 | """
63 | Return a representation of the objects fields.
64 |
65 | This representation should in turn be serializable by this module.
66 | """
67 |
68 | @classmethod
69 | def _from_json_fields(cls: Type[T], o: Any) -> T:
70 | """
71 | Instantiate this class from the deserialized json.
72 |
73 | Parameters
74 | ----------
75 | o : Any
76 | The deserialized fields as they were returned
77 | by :meth:`_json_fields` earlier.
78 |
79 | Returns
80 | -------
81 | An instance of this class.
82 |
83 | """
84 |
85 |
86 | #: Python types that this module can encode (without using hooks)
87 | JSONable = Union[
88 | JSON_TYPE_KEY,
89 | List["JSONable"],
90 | Tuple["JSONable", ...],
91 | Dict[JSON_TYPE_KEY, "JSONable"],
92 | Transformable,
93 | ]
94 |
95 |
96 | class Encoder(json.JSONEncoder):
97 | """
98 | JSON Encoder supporting all kinds of python objects.
99 |
100 | This Encoder supports types/instances implementing the `Transformable`
101 | protocol. You can also pass hooks to the Encoder for supporting types
102 | not implementing set protocol.
103 |
104 | Parameters
105 | ----------
106 | hooks : Iterable[JSONHook] | JSONHook, optional
107 | The object hooks to use, by default []
108 | **kwargs
109 | Keyword arguments passed to :class:`json.JSONEncoder`
110 |
111 | """
112 |
113 | @overload
114 | def __init__(self, hook: JSONHook, /, **kwargs: Any) -> None:
115 | ...
116 |
117 | @overload
118 | def __init__(self, hooks: Iterable[JSONHook] = [], /, **kwargs: Any) -> None:
119 | ...
120 |
121 | def __init__(
122 | self, hooks: Iterable[JSONHook] | JSONHook = [], /, **kwargs: Any
123 | ) -> None:
124 | """
125 | Init the JSON Encoder.
126 |
127 | Parameters
128 | ----------
129 | hooks : Iterable[JSONHook] | JSONHook, optional
130 | The object hooks to use, by default []
131 | **kwargs
132 | Keyword arguments passed to :class:`json.JSONEncoder`
133 |
134 | """
135 | super().__init__(**kwargs)
136 | self.hooks = {hooks} if isinstance(hooks, JSONHook) else set(hooks)
137 |
138 | @overload
139 | def register(
140 | self,
141 | t1: Type[JSONHook],
142 | t2: Type[JSONHook],
143 | /,
144 | *types: Type[JSONHook],
145 | ) -> None:
146 | ...
147 |
148 | @overload
149 | def register(self, hook: Type[JSONHook], /) -> Type[JSONHook]:
150 | ...
151 |
152 | def register(self, *ts: Type[JSONHook]) -> Type[JSONHook] | None:
153 | """
154 | Register a hook or a transformable type.
155 |
156 | A decoder can only decode serialized objects if their type or a
157 | corresponding hook was registered with the decoder.
158 |
159 | This method can be used as decorator for :class:`Tranformable` classes
160 | or hook classes.
161 |
162 | Raises
163 | ------
164 | ValueError
165 | Hook or class already registered
166 | TypeError
167 | Invalid type that doesn't implement :class:`JSONHook` or :class:`Transformable`.
168 |
169 | """
170 | for t in ts:
171 | self.hooks.add(t)
172 | return ts[0] if len(ts) == 1 else None
173 |
174 | @staticmethod
175 | def determine_classname(o: object | type, instance: bool = True) -> str:
176 | """
177 | Determine a unique identifier for a python class or instance.
178 |
179 | This method is put in the produced json to encode classes that aren't
180 | natively supported by JSON.
181 |
182 | Parameters
183 | ----------
184 | o : object | type
185 | The object to identify
186 | instance : bool, optional
187 | Whether the object is a class/type or an instance, by default True
188 |
189 | Returns
190 | -------
191 | str
192 | The identifier
193 |
194 | """
195 | module = getmodule(o)
196 | mod_name = module.__name__ if module else ""
197 | c = type(o) if instance else cast(type, o)
198 | cls_name = c.__qualname__
199 | return f"{mod_name}.{cls_name}"
200 |
201 | def transform(self, o: JSONable) -> JSON_TYPE_DUMP:
202 | """
203 | Replace custom types by an encodable dictionary.
204 |
205 | Parameters
206 | ----------
207 | o : JSONable
208 | The json object to iterate over.
209 |
210 | Returns
211 | -------
212 | JSON_TYPE_DUMP
213 | The resulting json object.
214 |
215 | """
216 | for hook in self.hooks:
217 | if (fields := hook.fields(o)) is not None:
218 | return {
219 | "__jsonhook__": (
220 | self.determine_classname(hook, instance=False),
221 | self.determine_classname(o),
222 | self.transform(fields),
223 | )
224 | }
225 |
226 | if hasattr(o, "_json_fields"):
227 | # type JSONable
228 | fields = o._json_fields() # type: ignore
229 | if t := type(o) is type(fields):
230 | warnings.warn(
231 | f"Class {t} returns itself as json field container",
232 | RecursionWarning,
233 | )
234 | return {
235 | "__jsonclass__": (
236 | self.determine_classname(o),
237 | self.transform(fields),
238 | )
239 | }
240 |
241 | if isinstance(o, dict):
242 | return {k: self.transform(v) for k, v in o.items()}
243 | if isinstance(o, (list, tuple)):
244 | return [self.transform(v) for v in o]
245 |
246 | # do not use cast for performance reasons
247 | return o
248 |
249 | def __call__(self, o: JSONable) -> JSON_TYPE_DUMP:
250 | """
251 | Replace custom types by an encodable dictionary.
252 |
253 | Parameters
254 | ----------
255 | o : JSONable
256 | The json object to iterate over.
257 |
258 | Returns
259 | -------
260 | JSON_TYPE_DUMP
261 | The resulting json object.
262 |
263 | Notes
264 | -----
265 | Calls :meth:`transform` internally.
266 |
267 | """
268 | return self.transform(o)
269 |
270 | def iterencode(self, o: JSONable, _one_shot: bool = False) -> Iterator[str]:
271 | """Encode an object."""
272 | # called for every top level object to encode
273 | return super().iterencode(self.transform(o), _one_shot)
274 |
275 |
276 | T = TypeVar("T", bound=Transformable)
277 |
278 |
279 | class Decoder(json.JSONDecoder):
280 | """
281 | A json decoder supporting all kinds of python objects.
282 |
283 | To decode such an object, three requirements have to be fullfilled:
284 | 1. The object has to implement the :class:`Tranformable`
285 | protocol or a :class:`JSONHook` has to be implemented for the type.
286 | 2. The object has to be encoded in the correct format as done by :class:`Encoder`.
287 | 3. THe hook or class has to be registered with this decoder. You can use
288 | :meth:`register` for that. This method can also be used as a class decorator.
289 |
290 | Parameters
291 | ----------
292 | decoder : Decoder | None, optional
293 | A decoder to inherit properties from, by default None
294 | parse_float : Callable[[str], Any] | None, optional
295 | Float parser, by default None
296 | parse_int : Callable[[str], Any] | None, optional
297 | Int parser, by default None
298 | parse_constant : Callable[[str], Any] | None, optional
299 | Constant parser, by default None
300 | strict : bool, optional
301 | Whether to disallow control characters, by default True
302 |
303 | Attributes
304 | ----------
305 | registered_types : List[type]
306 | The transformable types registered for decoding.
307 | hooks : List[Type[JSONHook]]
308 | hooks registered for decoding.
309 |
310 | Methods
311 | -------
312 | register(*t)
313 | Register a hook or a tranformable type.
314 | register_from(decoder)
315 | Register all types registered by another decoder.
316 |
317 | """
318 |
319 | def __init__(
320 | self,
321 | decoder: Decoder | None = None,
322 | *,
323 | parse_float: Callable[[str], Any] | None = None,
324 | parse_int: Callable[[str], Any] | None = None,
325 | parse_constant: Callable[[str], Any] | None = None,
326 | strict: bool = True,
327 | ) -> None:
328 | """
329 | Init the json decoder.
330 |
331 | Parameters
332 | ----------
333 | decoder : Decoder | None, optional
334 | A decoder to inherit properties from, by default None
335 | parse_float : Callable[[str], Any] | None, optional
336 | Float parser, by default None
337 | parse_int : Callable[[str], Any] | None, optional
338 | Int parser, by default None
339 | parse_constant : Callable[[str], Any] | None, optional
340 | Constant parser, by default None
341 | strict : bool, optional
342 | Whether to disallow control characters, by default True
343 |
344 | """
345 | if decoder:
346 | parse_float = parse_float or decoder.parse_float
347 | parse_int = parse_int or decoder.parse_int
348 | parse_constant = parse_constant or decoder.parse_constant
349 | strict = strict or decoder.strict
350 | super().__init__(
351 | parse_float=parse_float,
352 | parse_int=parse_int,
353 | parse_constant=parse_constant,
354 | strict=strict,
355 | object_hook=self.object_hook,
356 | )
357 | self.type_dict: Dict[str, Type[Transformable]] = {}
358 | self.type_hooks: Dict[str, Type[JSONHook]] = {}
359 | if decoder:
360 | self.register_from(decoder)
361 |
362 | @property
363 | def registered_types(self) -> List[type]:
364 | """List of transformable types registered for decoding."""
365 | return list(self.type_dict.values())
366 |
367 | @property
368 | def hooks(self) -> List[Type[JSONHook]]:
369 | """List of hooks registered for decoding."""
370 | return list(self.type_hooks.values())
371 |
372 | @staticmethod
373 | def determine_classname(t: type) -> str:
374 | """
375 | Determine a unique identifier for a class/type.
376 |
377 | This identifier is used to store hooks and types but also
378 | to select the correct one when its identifier is found in the
379 | json to decode.
380 |
381 | Parameters
382 | ----------
383 | t : type
384 | The class/type to identify.
385 |
386 | Returns
387 | -------
388 | str
389 | The identifier.
390 |
391 | """
392 | mod_name = getmodule(t).__name__ # type: ignore
393 | cls_name = t.__qualname__
394 | return f"{mod_name}.{cls_name}"
395 |
396 | def register_from(self, decoder: Decoder) -> None:
397 | """Register all types registered by another decoder."""
398 | self.register(*decoder.registered_types)
399 | self.register(*decoder.hooks)
400 |
401 | @overload
402 | def register(self, t: Type[T], /) -> Type[T]:
403 | ...
404 |
405 | @overload
406 | def register(
407 | self,
408 | t1: Type[JSONHook] | Type[T],
409 | t2: Type[JSONHook] | Type[T],
410 | /,
411 | *types: Type[JSONHook] | Type[T],
412 | ) -> None:
413 | ...
414 |
415 | @overload
416 | def register(self, hook: Type[JSONHook], /) -> Type[JSONHook]:
417 | ...
418 |
419 | def register(
420 | self, *ts: Type[JSONHook] | Type[T]
421 | ) -> Type[T] | Type[JSONHook] | None:
422 | """
423 | Register a hook or a transformable type.
424 |
425 | A decoder can only decode serialized objects if their type or a
426 | corresponding hook was registered with the decoder.
427 |
428 | This method can be used as decorator for :class:`Tranformable` classes
429 | or hook classes.
430 |
431 | Raises
432 | ------
433 | ValueError
434 | Hook or class already registered
435 | TypeError
436 | Invalid type that doesn't implement :class:`JSONHook` or :class:`Transformable`.
437 |
438 | """
439 | for t in ts:
440 | key = self.determine_classname(t)
441 | if isinstance(t, JSONHook):
442 | # register hook
443 | if key in self.type_hooks:
444 | raise ValueError(f"Hook {key} already registered")
445 | self.type_hooks[key] = t
446 | else:
447 | # register Transformable class
448 | if not hasattr(t, "_from_json_fields"):
449 | raise TypeError(
450 | "Registered Type must implement `_from_json_fields`"
451 | )
452 | if key in self.type_dict:
453 | raise ValueError(f"Class {key} already registered")
454 | self.type_dict[key] = t
455 |
456 | return ts[0] if len(ts) == 1 else None
457 |
458 | def object_hook(self, o: Dict[str, JSON_TYPE]) -> Any:
459 | """Alter objects after deserialization."""
460 | classname: str
461 | fields: JSON_TYPE
462 | if "__jsonclass__" in o:
463 | # deserializable object
464 | classname, fields = o["__jsonclass__"] # type: ignore
465 |
466 | if classname in self.type_dict:
467 | return self.type_dict[classname]._from_json_fields(fields)
468 | elif "__jsonhook__" in o:
469 | # object that can be deserialized through a hook
470 | hookname: str
471 | hookname, classname, fields = o["__jsonhook__"] # type: ignore
472 | if hookname in self.type_hooks:
473 | return self.type_hooks[hookname].from_json(classname, fields)
474 | return o
475 |
476 |
477 | @runtime_checkable
478 | class JSONHook(Protocol):
479 | """Base for hooks for arbitrary python objects."""
480 |
481 | @staticmethod
482 | def fields(o: Any) -> None | JSONable:
483 | """
484 | Return serializable representation of an instances state.
485 |
486 | If an instance isn't support this method should return None.
487 | Otherwise it should return a representation of the instances
488 | fields.
489 |
490 | Parameters
491 | ----------
492 | o : Any
493 | Any object that should be encoded.
494 |
495 | Returns
496 | -------
497 | None | JSONable
498 | The fields of the objects if it is supported else None
499 |
500 | """
501 |
502 | @staticmethod
503 | def from_json(cls: str, o: JSON_TYPE) -> Any: # noqa: PLW0211
504 | """
505 | Instanciate an object from its fields.
506 |
507 | This method is only called with supported instances that were
508 | encoded with the help of :meth:`fields`.
509 |
510 | Parameters
511 | ----------
512 | cls : str
513 | The identifier of the class to create an instance of.
514 | o : JSON_TYPE
515 | The decoded fields.
516 |
517 | Returns
518 | -------
519 | Any
520 | The deserialized object.
521 |
522 | """
523 |
524 |
525 | #: Constant and global convenience decoder instance
526 | #: that has all types and hooks in this package registered
527 | #: (if the corresponding types were loaded through an import of the containing module)
528 | GLOBAL_DECODER = Decoder()
529 |
530 | #: Constant and global convenience encoder instance
531 | #: that has all hooks in this package registered
532 | #: (if they were loaded through an import of the containing module)
533 | GLOBAL_ENCODER = Encoder()
534 |
535 |
536 | @GLOBAL_ENCODER.register
537 | @GLOBAL_DECODER.register
538 | class std_hook(JSONHook):
539 | """
540 | A set of standard hooks implemented by this module.
541 |
542 | This currently on supports the `datetime` class.
543 | """
544 |
545 | @staticmethod
546 | def fields(o: Any) -> str | None:
547 | """Make an object encodable."""
548 | if isinstance(o, datetime):
549 | return o.isoformat()
550 | return None
551 |
552 | @staticmethod
553 | def from_json(cls: str, o: JSON_TYPE) -> Any: # noqa: PLW0211
554 | """Decode an object."""
555 | o = cast(str, o)
556 | return datetime.fromisoformat(o)
557 |
--------------------------------------------------------------------------------
/sphinx_polyversion/log.py:
--------------------------------------------------------------------------------
1 | """Utilities for logging."""
2 |
3 | from __future__ import annotations
4 |
5 | import logging
6 | from typing import Any, MutableMapping
7 |
8 |
9 | class ContextAdapter(logging.LoggerAdapter): # type: ignore[type-arg]
10 | """A adapter adding arbitrary context information to a log message."""
11 |
12 | def process(
13 | self, msg: str, kwargs: MutableMapping[str, Any]
14 | ) -> tuple[str, MutableMapping[str, Any]]:
15 | """
16 | Process the message of a logging call.
17 |
18 | Process the logging message and keyword arguments passed in to a
19 | logging call to insert contextual information.
20 | You can either manipulate the message itself,
21 | the keyword args or both. Return the message and kwargs modified
22 | (or not) to suit your needs.
23 | """
24 | return "[%s] %s" % (self.extra["context"], msg), kwargs
25 |
--------------------------------------------------------------------------------
/sphinx_polyversion/main.py:
--------------------------------------------------------------------------------
1 | """The entry point of the module."""
2 |
3 | from __future__ import annotations
4 |
5 | import argparse
6 | from collections.abc import Iterable
7 | from logging import StreamHandler
8 | from pathlib import Path
9 | from typing import Sequence
10 |
11 | from sphinx_polyversion.utils import import_file
12 |
13 |
14 | class ParseKwargs(argparse.Action):
15 | """Action for keyword, value pairs seperated by equality signs."""
16 |
17 | def __call__(
18 | self,
19 | parser: argparse.ArgumentParser,
20 | namespace: argparse.Namespace,
21 | arguments: str | Sequence[str] | None,
22 | option_string: str | None = None,
23 | ) -> None:
24 | """Parse an argument."""
25 | if not isinstance(arguments, Iterable):
26 | raise TypeError(
27 | "Expected iterable of arguments. Use this type with nargs='*'"
28 | )
29 | kwargs = getattr(namespace, self.dest, {})
30 | for option in arguments:
31 | if "=" not in option:
32 | parser.error(f"Not a key=value pair: {option}")
33 | key, value = option.split("=")
34 | kwargs[key] = value
35 | setattr(namespace, self.dest, kwargs)
36 |
37 |
38 | def get_parser(expect_config: bool = True) -> argparse.ArgumentParser:
39 | """Define cmd line signature."""
40 | parser = argparse.ArgumentParser(
41 | description="Build multiple versions of your sphinx docs and merge them into one site."
42 | )
43 |
44 | # config file
45 | conf_arg = parser.add_argument(
46 | "conf",
47 | type=Path,
48 | help="Polyversion config file to load. This must be a python file that can be evaluated.",
49 | )
50 | if not expect_config:
51 | conf_arg.nargs = "?"
52 |
53 | # config options
54 | parser.add_argument(
55 | "out",
56 | type=Path,
57 | help="Output directory to build the merged docs to.",
58 | nargs="?",
59 | default=False,
60 | )
61 | parser.add_argument(
62 | "-o",
63 | "--override",
64 | nargs="*",
65 | action=ParseKwargs,
66 | help="Override config options. Pass them as `key=value` pairs.",
67 | default={},
68 | )
69 | parser.add_argument(
70 | "-v",
71 | "--verbosity",
72 | action="count",
73 | default=0,
74 | help="Increase output verbosity (decreases minimum log level). The default log level is ERROR.",
75 | )
76 | parser.add_argument(
77 | "-l",
78 | "--local",
79 | "--mock",
80 | action="store_true",
81 | help="Build the local version of your docs.",
82 | )
83 | parser.add_argument(
84 | "--sequential",
85 | action="store_true",
86 | help="Build the revisions sequentially.",
87 | )
88 | return parser
89 |
90 |
91 | def main() -> None:
92 | """Run the `poly.py` config file."""
93 | parser = get_parser()
94 |
95 | args, _ = parser.parse_known_args()
96 | conf: Path = args.conf
97 |
98 | # handle logging verbosity
99 | from sphinx_polyversion import logger
100 |
101 | handler = StreamHandler()
102 | logger.addHandler(handler)
103 | handler.setLevel(max(10, 40 - 10 * args.verbosity))
104 |
105 | # run config file
106 | if not conf.is_file():
107 | parser.error("Config file doesn't exist.")
108 |
109 | import_file(conf)
110 |
--------------------------------------------------------------------------------
/sphinx_polyversion/pyvenv.py:
--------------------------------------------------------------------------------
1 | """Subclasses of :class:`Environment` supporting python virtual environments."""
2 |
3 | from __future__ import annotations
4 |
5 | import asyncio
6 | import os
7 | from asyncio.subprocess import PIPE
8 | from inspect import isawaitable
9 | from logging import getLogger
10 | from pathlib import Path
11 | from subprocess import CalledProcessError
12 | from typing import (
13 | TYPE_CHECKING,
14 | Any,
15 | Callable,
16 | Iterable,
17 | Sequence,
18 | Tuple,
19 | cast,
20 | )
21 | from venv import EnvBuilder
22 |
23 | from sphinx_polyversion.builder import BuildError
24 | from sphinx_polyversion.environment import Environment
25 | from sphinx_polyversion.utils import to_thread
26 |
27 | if TYPE_CHECKING:
28 | from typing_extensions import Self
29 |
30 | logger = getLogger(__name__)
31 |
32 |
33 | class VenvWrapper(EnvBuilder):
34 | """
35 | Build your virtual environments using the built-in venv module.
36 |
37 | Parameters
38 | ----------
39 | system_site_packages : bool, optional
40 | If True, the system (global) site-packages dir is available to created
41 | environments.
42 | clear : bool, optional
43 | If True, delete the contents of the environment directory if
44 | it already exists, before environment creation.
45 | symlinks : bool, optional
46 | If True, attempt to symlink rather than copy files into
47 | virtual environment.
48 | upgrade : bool, optional
49 | If True, upgrade an existing virtual environment.
50 | with_pip : bool, optional
51 | If True, ensure pip is installed in the virtual environment
52 | prompt : bool, optional
53 | Alternative terminal prefix for the environment.
54 | kwargs : Any
55 | Additional keyword arguments passed to EnvBuilder.__init__
56 |
57 | """
58 |
59 | def __init__(
60 | self,
61 | system_site_packages: bool = False,
62 | clear: bool = False,
63 | symlinks: bool = False,
64 | upgrade: bool = False,
65 | with_pip: bool = True,
66 | prompt: str | None = None,
67 | **kwargs: Any,
68 | ) -> None:
69 | """
70 | Build your virtual environments using the built-in venv module.
71 |
72 | Parameters
73 | ----------
74 | system_site_packages : bool, optional
75 | If True, the system (global) site-packages dir is available to created
76 | environments.
77 | clear : bool, optional
78 | If True, delete the contents of the environment directory if
79 | it already exists, before environment creation.
80 | symlinks : bool, optional
81 | If True, attempt to symlink rather than copy files into
82 | virtual environment.
83 | upgrade : bool, optional
84 | If True, upgrade an existing virtual environment.
85 | with_pip : bool, optional
86 | If True, ensure pip is installed in the virtual environment
87 | prompt : bool, optional
88 | Alternative terminal prefix for the environment.
89 | kwargs : Any
90 | Additional keyword arguments passed to EnvBuilder.__init__
91 |
92 | """
93 | super().__init__(
94 | system_site_packages=system_site_packages,
95 | clear=clear,
96 | symlinks=symlinks,
97 | upgrade=upgrade,
98 | with_pip=with_pip,
99 | prompt=prompt,
100 | **kwargs,
101 | )
102 |
103 | async def __call__(self, path: Path) -> None:
104 | """
105 | Create a virtual environment at the given location.
106 |
107 | This runs `self.create` in a separate thread that can be awaited.
108 |
109 | Parameters
110 | ----------
111 | path : Path
112 | directory for the created venv
113 |
114 | """
115 | await to_thread(self.create, path)
116 |
117 |
118 | class VirtualenvWrapper:
119 | """
120 | Build your virtual environments using the virtualenv package.
121 |
122 | The package can be found on pypi.
123 | Call instances of this class with a path to create a venv at the given location.
124 | """
125 |
126 | def __init__(self, args: Sequence[str]) -> None:
127 | """
128 | Build your virtual environments using the virtualenv package.
129 |
130 | Parameters
131 | ----------
132 | args : Sequence[str]
133 | Commandline arguments to pass to `virtualenv`.
134 |
135 | """
136 | self.args = args
137 |
138 | # check that virtualenv is installed
139 | import virtualenv # noqa: F401
140 |
141 | async def __call__(self, path: Path) -> None:
142 | """Build the venv at the given location in a separate thread."""
143 | from virtualenv import cli_run
144 |
145 | await to_thread(cli_run, [*self.args, str(path)])
146 |
147 |
148 | class VirtualPythonEnvironment(Environment):
149 | """
150 | An environment for running build commands in a python virtual environment.
151 |
152 | If you want to create the venv when this environment is entered you can
153 | provide a `creator` which will be called with the `venv` location to create
154 | the environment. You can use the :class:`VenvWrapper` and
155 | :class:`VirtualenvWrapper` classes for that.
156 | If `creator` isn't provided it is expected that a python venv already exists
157 | at the given location.
158 |
159 | Parameters
160 | ----------
161 | path : Path
162 | The location of the current revision.
163 | name : str
164 | The name of the environment (usually the name of the current revision).
165 | venv : Path
166 | The path of the python venv.
167 | creator : Callable[[Path], Any], optional
168 | A callable for creating the venv, by default None
169 | env : dict[str, str], optional
170 | A dictionary of environment variables which are overridden in the
171 | virtual environment, by default None
172 |
173 | Attributes
174 | ----------
175 | path : Path
176 | The location of the current revision.
177 | name : str
178 | The name of the environment.
179 | venv : Path
180 | The path of the python venv.
181 | env : dict
182 | The user-specified environment variables for the virtual environment.
183 |
184 | """
185 |
186 | def __init__(
187 | self,
188 | path: Path,
189 | name: str,
190 | venv: str | Path,
191 | *,
192 | creator: Callable[[Path], Any] | None = None,
193 | env: dict[str, str] | None = None,
194 | ):
195 | """
196 | Environment for building inside a python virtual environment.
197 |
198 | Parameters
199 | ----------
200 | path : Path
201 | The location of the current revision.
202 | name : str
203 | The name of the environment (usually the name of the current revision).
204 | venv : Path
205 | The path of the python venv.
206 | creator : Callable[[Path], Any], optional
207 | A callable for creating the venv, by default None
208 | env : dict[str, str], optional
209 | A dictionary of environment variables which are forwarded to the
210 | virtual environment, by default None
211 |
212 | """
213 | super().__init__(path, name)
214 | self.venv = Path(venv).resolve()
215 | self._creator = creator
216 | self.env = env or {}
217 |
218 | async def create_venv(self) -> None:
219 | """
220 | Create the virtual python environment.
221 |
222 | This calls `creator` if provided otherwise it does nothing.
223 |
224 | Override this to customize how and when the venv is created.
225 | """
226 | if self._creator:
227 | logger.info("Creating venv...")
228 | result = self._creator(self.venv)
229 | if isawaitable(result):
230 | await result
231 |
232 | async def __aenter__(self: Self) -> Self:
233 | """Set the build environment up."""
234 | await super().__aenter__()
235 | # create the virtualenv if creator is specified
236 | await self.create_venv()
237 | return self
238 |
239 | def activate(self, env: dict[str, str]) -> dict[str, str]:
240 | """
241 | Activate a python venv in a dictionary of environment variables.
242 |
243 | .. warning:: This modifies the given dictionary in-place.
244 |
245 | Parameters
246 | ----------
247 | env : dict[str, str]
248 | The environment variable mapping to update.
249 |
250 | Returns
251 | -------
252 | dict[str, str]
253 | The dictionary that was passed with `env`.
254 |
255 | Raises
256 | ------
257 | FileNotFoundError
258 | If no environment is located at the location `venv`.
259 |
260 | """
261 | if not self.venv.exists():
262 | raise FileNotFoundError(
263 | f"""There is no virtual environment at the path {self.venv}.
264 | Please ensure that the path points to an existing virtual environment, or
265 | supply a creator to automatically create the environment."""
266 | )
267 | env["VIRTUAL_ENV"] = str(self.venv)
268 | env["PATH"] = str(self.venv / "bin") + ":" + env["PATH"]
269 | return env
270 |
271 | def apply_overrides(self, env: dict[str, str]) -> dict[str, str]:
272 | """
273 | Prepare the environment for the build.
274 |
275 | This method is used to modify the environment before running a
276 | build command. It :py:meth:`activates ` the python venv
277 | and overrides those environment variables that were passed to the
278 | :py:class:`constructor `.
279 | `PATH` is never replaced but extended instead.
280 |
281 | .. warning:: This modifies the given dictionary in-place.
282 |
283 | Parameters
284 | ----------
285 | env : dict[str, str]
286 | The environment to modify.
287 |
288 | Returns
289 | -------
290 | dict[str, str]
291 | The updated environment.
292 |
293 | """
294 | # add user-supplied values to env
295 | for key, value in self.env.items():
296 | if key == "PATH":
297 | # extend PATH instead of replacing
298 | env["PATH"] = value + ":" + env["PATH"]
299 | continue
300 | if key in env:
301 | logger.info(
302 | "Overwriting environment variable %s=%s with user-specified value '%s'.",
303 | key,
304 | env[key],
305 | value,
306 | )
307 | env[key] = value
308 | return env
309 |
310 | async def run(
311 | self, *cmd: str, **kwargs: Any
312 | ) -> Tuple[str | bytes | None, str | bytes | None, int]:
313 | """
314 | Run a OS process in the environment.
315 |
316 | This implementation passes the arguments to
317 | :func:`asyncio.create_subprocess_exec`. But alters `env` to
318 | :py:meth:`activates ` the correct python
319 | and overrides the use-specified vars using :py:meth:`prepare_env`.
320 | If a python venv is already activated this activation is overridden.
321 |
322 | Returns
323 | -------
324 | stdout : str | None
325 | The output of the command,
326 | stderr : str | None
327 | The error output of the command
328 | returncode : int | None
329 | The returncode of the command
330 |
331 | """
332 | # activate venv
333 | kwargs["env"] = self.activate(
334 | self.apply_overrides(kwargs.get("env", os.environ).copy())
335 | )
336 | return await super().run(*cmd, **kwargs)
337 |
338 |
339 | class Poetry(VirtualPythonEnvironment):
340 | """
341 | Build Environment for isolated builds with poetry.
342 |
343 | Use this to use poetry to create an isolated python venv for each
344 | build and to install specific poetry dependency groups.
345 |
346 | Parameters
347 | ----------
348 | path : Path
349 | The path of the current revision.
350 | name : str
351 | The name of the environment (usually the name of the revision).
352 | args : Iterable[str]
353 | The cmd arguments to pass to `poetry install`.
354 | env : dict[str, str], optional
355 | A dictionary of environment variables which are overidden in the
356 | virtual environment, by default None
357 |
358 | """
359 |
360 | def __init__(
361 | self,
362 | path: Path,
363 | name: str,
364 | *,
365 | args: Iterable[str],
366 | env: dict[str, str] | None = None,
367 | ):
368 | """
369 | Build Environment for isolated builds using poetry.
370 |
371 | Parameters
372 | ----------
373 | path : Path
374 | The path of the current revision.
375 | name : str
376 | The name of the environment (usually the name of the revision).
377 | args : Iterable[str]
378 | The cmd arguments to pass to `poetry install`.
379 | env : dict[str, str], optional
380 | A dictionary of environment variables which are forwarded to the
381 | virtual environment, by default None
382 |
383 | """
384 | super().__init__(
385 | path,
386 | name,
387 | path / ".venv", # placeholder, determined later
388 | env=env,
389 | )
390 | self.args = args
391 |
392 | async def __aenter__(self) -> Self:
393 | """
394 | Set the poetry venv up.
395 |
396 | Raises
397 | ------
398 | BuildError
399 | Running `poetry install` failed.
400 |
401 | """
402 | # create venv and install deps
403 | self.logger.info("`poetry install`")
404 |
405 | cmd: list[str] = ["poetry", "install"]
406 | cmd += self.args
407 |
408 | env = os.environ.copy()
409 | self.apply_overrides(env)
410 |
411 | env.pop("VIRTUAL_ENV", None) # unset poetry env
412 | env["POETRY_VIRTUALENVS_IN_PROJECT"] = "False"
413 | venv_path = self.path / ".venv"
414 | i = 0
415 | while venv_path.exists():
416 | venv_path = self.path / f".venv-{i}"
417 | i += 1
418 | env["POETRY_VIRTUALENVS_PATH"] = str(venv_path)
419 |
420 | process = await asyncio.create_subprocess_exec(
421 | *cmd,
422 | cwd=self.path,
423 | env=env,
424 | stdout=PIPE,
425 | stderr=PIPE,
426 | )
427 | out, err = await process.communicate()
428 | out = out.decode(errors="ignore")
429 | err = err.decode(errors="ignore")
430 |
431 | self.logger.debug("Installation output:\n %s", out)
432 | if process.returncode != 0:
433 | self.logger.error("Installation error:\n %s", err)
434 | raise BuildError from CalledProcessError(
435 | cast(int, process.returncode), " ".join(cmd), out, err
436 | )
437 |
438 | # ---- locate venv
439 | # In the previous process poetry will have created or
440 | # ensured the existence of a venv in `venv_path` path folder.
441 | # However the venv itself constitutes a subdirectory with
442 | # an arbitrary name generated by poetry.
443 | # Thus we ask poetry to give as the name of the venv folder.
444 | cmd: list[str] = ["poetry", "env", "info", "--path"]
445 | process = await asyncio.create_subprocess_exec(
446 | *cmd, cwd=self.path, env=env, stdout=PIPE, stderr=PIPE
447 | )
448 | out, err = await process.communicate()
449 | out = out.decode().rstrip("\n")
450 | err = err.decode(errors="ignore")
451 |
452 | self.logger.debug("Venv location: %s", out)
453 | if process.returncode != 0:
454 | self.logger.error("Error locating venv:\n %s", err)
455 | raise BuildError from CalledProcessError(
456 | cast(int, process.returncode), " ".join(cmd), out, err
457 | )
458 | self.venv = Path(out) # actual venv location
459 |
460 | return self
461 |
462 |
463 | class Pip(VirtualPythonEnvironment):
464 | """
465 | Build Environment for using a venv and installing deps with pip.
466 |
467 | Use this to run the build commands in a python virtual environment
468 | and install dependencies with pip into the venv before the build.
469 |
470 | Parameters
471 | ----------
472 | path : Path
473 | The path of the current revision.
474 | name : str
475 | The name of the environment (usually the name of the revision).
476 | venv : Path
477 | The path of the python venv.
478 | args : Iterable[str]
479 | The cmd arguments to pass to `pip install`.
480 | creator : Callable[[Path], Any] | None, optional
481 | A callable for creating the venv, by default None
482 | temporary : bool, optional
483 | A flag to specify whether the environment should be created in the
484 | temporary directory, by default False. If this is True, `creator`
485 | must not be None and `venv` will be treated relative to `path`.
486 | env : dict[str, str], optional
487 | A dictionary of environment variables which are overridden in the
488 | virtual environment, by default None
489 |
490 | """
491 |
492 | def __init__(
493 | self,
494 | path: Path,
495 | name: str,
496 | venv: str | Path,
497 | *,
498 | args: Iterable[str],
499 | creator: Callable[[Path], Any] | None = None,
500 | temporary: bool = False,
501 | env: dict[str, str] | None = None,
502 | ):
503 | """
504 | Build Environment for using a venv and pip.
505 |
506 | Parameters
507 | ----------
508 | path : Path
509 | The path of the current revision.
510 | name : str
511 | The name of the environment (usually the name of the revision).
512 | venv : Path
513 | The path of the python venv.
514 | args : Iterable[str]
515 | The cmd arguments to pass to `pip install`.
516 | creator : Callable[[Path], Any], optional
517 | A callable for creating the venv, by default None
518 | temporary : bool, optional
519 | A flag to specify whether the environment should be created in the
520 | temporary directory, by default False. If this is True, `creator`
521 | must not be None and `venv` will be treated relative to `path`.
522 | env : dict[str, str], optional
523 | A dictionary of environment variables which are overridden in the
524 | virtual environment, by default None
525 |
526 | Raises
527 | ------
528 | ValueError
529 | If `temporary` is enabled but no valid creator is provided.
530 |
531 | """
532 | if temporary:
533 | if creator is None:
534 | raise ValueError(
535 | "Cannot create temporary virtual environment when creator is None.\n"
536 | "Please set creator to enable temporary virtual environments, or "
537 | "set temporary to False to use a pre-existing local environment "
538 | f"at path '{venv}'."
539 | )
540 | venv = path / venv
541 | super().__init__(path, name, venv, creator=creator, env=env)
542 | self.args = args
543 |
544 | async def __aenter__(self) -> Self:
545 | """
546 | Set the venv up.
547 |
548 | Raises
549 | ------
550 | BuildError
551 | Running `pip install` failed.
552 |
553 | """
554 | await super().__aenter__()
555 |
556 | logger.info("Running `pip install`...")
557 |
558 | cmd: list[str] = ["pip", "install"]
559 | cmd += self.args
560 |
561 | process = await asyncio.create_subprocess_exec(
562 | *cmd,
563 | cwd=self.path,
564 | env=self.activate(self.apply_overrides(os.environ.copy())),
565 | stdout=PIPE,
566 | stderr=PIPE,
567 | )
568 | out, err = await process.communicate()
569 | out = out.decode(errors="ignore")
570 | err = err.decode(errors="ignore")
571 |
572 | self.logger.debug("Installation output:\n %s", out)
573 | if process.returncode != 0:
574 | self.logger.error("Installation error:\n %s", err)
575 | raise BuildError from CalledProcessError(
576 | cast(int, process.returncode), " ".join(cmd), out, err
577 | )
578 | return self
579 |
--------------------------------------------------------------------------------
/sphinx_polyversion/sphinx.py:
--------------------------------------------------------------------------------
1 | """Builder Implementations for running sphinx."""
2 |
3 | from __future__ import annotations
4 |
5 | import enum
6 | import os
7 | from logging import getLogger
8 | from pathlib import Path, PurePath
9 | from subprocess import CalledProcessError
10 | from typing import TYPE_CHECKING, Any, Iterable
11 |
12 | from sphinx_polyversion.builder import Builder, BuildError
13 | from sphinx_polyversion.environment import Environment
14 | from sphinx_polyversion.json import GLOBAL_ENCODER, JSONable
15 |
16 | if TYPE_CHECKING:
17 | import json
18 |
19 | logger = getLogger(__name__)
20 |
21 |
22 | class Placeholder(enum.Enum):
23 | """Placeholders that can be used in commands."""
24 |
25 | #: represents the location of the source files to render the docs from
26 | SOURCE_DIR = enum.auto()
27 | #: represents the output location to render the docs to
28 | OUTPUT_DIR = enum.auto()
29 |
30 |
31 | class CommandBuilder(Builder[Environment, None]):
32 | """
33 | A builder that starts another process.
34 |
35 | This allows you to run any command for building your docs.
36 | You can use the placeholders from the :class:`Placeholder` enum in the
37 | command provided. These placeholders will be replaced with their actual
38 | values before the subprocess is run.
39 |
40 | Parameters
41 | ----------
42 | source : PurePath
43 | The relative source location to pass to the command.
44 | cmd : Iterable[str | Placeholder]
45 | The command to run.
46 | encoder : json.JSONEncoder | None, optional
47 | The encoder to use for serializing the metadata, by default None
48 | pre_cmd : Iterable[str | Placeholder], optional
49 | Additional command to run before `cmd`.
50 | post_cmd : Iterable[str | Placeholder], optional
51 | Additional command to run after `cmd`.
52 |
53 | """
54 |
55 | def __init__(
56 | self,
57 | source: str | PurePath,
58 | cmd: Iterable[str | Placeholder],
59 | encoder: json.JSONEncoder | None = None,
60 | pre_cmd: Iterable[str | Placeholder] | None = None,
61 | post_cmd: Iterable[str | Placeholder] | None = None,
62 | ) -> None:
63 | """
64 | Init the builder.
65 |
66 | Parameters
67 | ----------
68 | source : PurePath
69 | The relative source location to pass to the command.
70 | cmd : Iterable[str | Placeholder]
71 | The command to run.
72 | encoder : json.JSONEncoder | None, optional
73 | The encoder to use for serializing the metadata, by default None
74 | pre_cmd : Iterable[str | Placeholder], optional
75 | Additional command to run before `cmd`.
76 | post_cmd : Iterable[str | Placeholder], optional
77 | Additional command to run after `cmd`.
78 |
79 | """
80 | super().__init__()
81 | self.cmd = cmd
82 | self.source = PurePath(source)
83 | self.logger = logger
84 | self.encoder = encoder or GLOBAL_ENCODER
85 | self.pre_cmd = pre_cmd
86 | self.post_cmd = post_cmd
87 |
88 | async def build(
89 | self, environment: Environment, output_dir: Path, data: JSONable
90 | ) -> None:
91 | """
92 | Build and render a documentation.
93 |
94 | This method runs the command the instance was created with.
95 | The metadata will be passed to the subprocess encoded as json
96 | using the `POLYVERSION_DATA` environment variable.
97 |
98 | Parameters
99 | ----------
100 | environment : Environment
101 | The environment to use for building.
102 | output_dir : Path
103 | The output directory to build to.
104 | data : JSONable
105 | The metadata to use for building.
106 |
107 | """
108 | self.logger.info("Building...")
109 | source_dir = str(environment.path.absolute() / self.source)
110 |
111 | def replace(v: Any) -> str:
112 | if v == Placeholder.OUTPUT_DIR:
113 | return str(output_dir)
114 | if v == Placeholder.SOURCE_DIR:
115 | return source_dir
116 | return str(v)
117 |
118 | env = os.environ.copy()
119 | env["POLYVERSION_DATA"] = self.encoder.encode(data)
120 |
121 | cmd = tuple(map(replace, self.cmd))
122 |
123 | # create output directory
124 | output_dir.mkdir(exist_ok=True, parents=True)
125 |
126 | # pre hook
127 | if self.pre_cmd:
128 | out, err, rc = await environment.run(*map(replace, self.pre_cmd), env=env)
129 | if rc:
130 | raise BuildError from CalledProcessError(rc, " ".join(cmd), out, err)
131 |
132 | # build command
133 | out, err, rc = await environment.run(*cmd, env=env)
134 |
135 | self.logger.debug("Installation output:\n %s", out)
136 | if rc:
137 | raise BuildError from CalledProcessError(rc, " ".join(cmd), out, err)
138 |
139 | # post hook
140 | if self.post_cmd:
141 | out, err, rc = await environment.run(*map(replace, self.post_cmd), env=env)
142 | if rc:
143 | raise BuildError from CalledProcessError(rc, " ".join(cmd), out, err)
144 |
145 |
146 | class SphinxBuilder(CommandBuilder):
147 | """
148 | A CommandBuilder running `sphinx-build`.
149 |
150 | Parameters
151 | ----------
152 | source : PurePath
153 | The relative source location to pass to the command.
154 | args : Iterable[str], optional
155 | The arguments to pass to `sphinx-build`, by default []
156 | encoder : json.JSONEncoder | None, optional
157 | The encoder to use for serializing the metadata, by default None
158 | pre_cmd : Iterable[str | Placeholder], optional
159 | Additional command to run before `cmd`.
160 | post_cmd : Iterable[str | Placeholder], optional
161 | Additional command to run after `cmd`.
162 |
163 | """
164 |
165 | def __init__(
166 | self,
167 | source: str | PurePath,
168 | *,
169 | args: Iterable[str] = [],
170 | encoder: json.JSONEncoder | None = None,
171 | pre_cmd: Iterable[str | Placeholder] | None = None,
172 | post_cmd: Iterable[str | Placeholder] | None = None,
173 | ) -> None:
174 | """
175 | Init the builder.
176 |
177 | Parameters
178 | ----------
179 | source : PurePath
180 | The relative source location to pass to the command.
181 | args : Iterable[str], optional
182 | The arguments to pass to `sphinx-build`, by default []
183 | encoder : json.JSONEncoder | None, optional
184 | The encoder to use for serializing the metadata, by default None
185 | pre_cmd : Iterable[str | Placeholder], optional
186 | Additional command to run before `cmd`.
187 | post_cmd : Iterable[str | Placeholder], optional
188 | Additional command to run after `cmd`.
189 |
190 | """
191 | cmd: Iterable[str | Placeholder] = [
192 | "sphinx-build",
193 | "--color",
194 | *args,
195 | Placeholder.SOURCE_DIR,
196 | Placeholder.OUTPUT_DIR,
197 | ]
198 | super().__init__(
199 | source,
200 | cmd,
201 | encoder=encoder,
202 | pre_cmd=pre_cmd,
203 | post_cmd=post_cmd,
204 | )
205 | self.args = args
206 |
--------------------------------------------------------------------------------
/sphinx_polyversion/utils.py:
--------------------------------------------------------------------------------
1 | """Helpers for the other modules."""
2 |
3 | from __future__ import annotations
4 |
5 | import asyncio
6 | import importlib
7 | import importlib.util
8 | import sys
9 | from functools import partial
10 | from pathlib import PurePath
11 | from typing import (
12 | TYPE_CHECKING,
13 | Any,
14 | Awaitable,
15 | Callable,
16 | Iterable,
17 | Set,
18 | TypeVar,
19 | cast,
20 | )
21 |
22 | if TYPE_CHECKING:
23 | from pathlib import Path
24 |
25 | from typing_extensions import ParamSpec
26 |
27 | P = ParamSpec("P")
28 | R = TypeVar("R")
29 |
30 | if sys.version_info >= (3, 9):
31 | from asyncio import to_thread
32 | else:
33 |
34 | async def to_thread(
35 | func: Callable[P, R], /, *args: P.args, **kwargs: P.kwargs
36 | ) -> R:
37 | """
38 | Run a synchronous function asynchronously in a new thread.
39 |
40 | Parameters
41 | ----------
42 | func : Callable[P, R]
43 | The function to call.
44 | *args
45 | The arguments to call `func` with.
46 | **kwargs
47 | The keyword arguments to call `func` with.
48 |
49 | Returns
50 | -------
51 | The return value of the called function.
52 |
53 | """
54 | loop = asyncio.get_running_loop()
55 | func_call = partial(func, *args, **kwargs)
56 | return await loop.run_in_executor(None, func_call)
57 |
58 |
59 | PA = TypeVar("PA", bound=PurePath)
60 |
61 |
62 | def shift_path(src_anchor: PA, dst_anchor: PA, src: PA) -> PA:
63 | """
64 | Shift a path from one anchor (root) directory to another.
65 |
66 | Parameters
67 | ----------
68 | src_anchor : Path
69 | The anchor
70 | dst_anchor : Path
71 | The destination
72 | src : Path
73 | The path to shift
74 |
75 | Returns
76 | -------
77 | Path
78 | The shifted path
79 |
80 | """
81 | return dst_anchor / src.relative_to(src_anchor)
82 |
83 |
84 | def import_file(path: Path) -> Any:
85 | """
86 | Import a module from its location in the file system.
87 |
88 | Parameters
89 | ----------
90 | path : Path
91 | The location of the python file to import.
92 |
93 | Returns
94 | -------
95 | Any
96 | The imported module.
97 |
98 | Raises
99 | ------
100 | OSError
101 | The module spec couldn't be created.
102 | ImportError
103 | No loader was found for the module.
104 |
105 | """
106 | module_name = path.stem
107 | spec = importlib.util.spec_from_file_location(module_name, path)
108 | if not spec:
109 | raise OSError(f"Failed to load {path}")
110 | module = importlib.util.module_from_spec(spec)
111 | sys.modules[module_name] = module
112 | if not spec.loader:
113 | raise ImportError(f"Failed to load {path}")
114 | spec.loader.exec_module(module)
115 |
116 | return module
117 |
118 |
119 | async def async_all(awaitables: Iterable[Awaitable[Any]]) -> bool:
120 | """
121 | Return True if all awaitables return True.
122 |
123 | If the iterator is empty, True is returned. The awaitables may
124 | return any value. These values are converted to boolean.
125 |
126 | Parameters
127 | ----------
128 | awaitables : Iterator[Awaitable[Any]]
129 | The awaitables to check.
130 |
131 | Returns
132 | -------
133 | bool
134 | Whether all awaitables returned True.
135 |
136 | """
137 | tasks = cast(
138 | Set["asyncio.Task[Any]"],
139 | {
140 | asyncio.create_task(aws) if asyncio.iscoroutine(aws) else aws
141 | for aws in awaitables
142 | },
143 | )
144 | while tasks:
145 | done, tasks = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
146 | for finished_task in done:
147 | result = finished_task.result()
148 |
149 | if not result:
150 | # if one fails we already know the return value
151 | # cancel the other tasks
152 | for unfinished_task in tasks:
153 | unfinished_task.cancel()
154 | if tasks:
155 | await asyncio.wait(tasks)
156 | return False
157 | return True
158 |
--------------------------------------------------------------------------------
/sphinx_polyversion/vcs.py:
--------------------------------------------------------------------------------
1 | """Abstract version provider API."""
2 |
3 | from abc import abstractmethod
4 | from pathlib import Path
5 | from typing import Iterable, Protocol, TypeVar, runtime_checkable
6 |
7 | RT = TypeVar("RT")
8 |
9 |
10 | @runtime_checkable
11 | class VersionProvider(Protocol[RT]):
12 | """Base for classes providing target revisions of the docs to build."""
13 |
14 | def name(self, revision: RT) -> str:
15 | """
16 | Get the (unique) name of a revision.
17 |
18 | This name will usually be used for creating the subdirectories
19 | of the revision.
20 |
21 | Parameters
22 | ----------
23 | root : Path
24 | The root path of the project.
25 | revision : Any
26 | The revision whose name is requested.
27 |
28 | Returns
29 | -------
30 | str
31 | The name of the revision.
32 |
33 | """
34 |
35 | @abstractmethod
36 | async def checkout(self, root: Path, dest: Path, revision: RT) -> None:
37 | """
38 | Create copy of a specific revision at the given path.
39 |
40 | Parameters
41 | ----------
42 | root : Path
43 | The root path of the project.
44 | dest : Path
45 | The destination to extract the revision to.
46 | revision : Any
47 | The revision to extract.
48 |
49 | """
50 |
51 | @abstractmethod
52 | async def retrieve(self, root: Path) -> Iterable[RT]:
53 | """
54 | List all build targets.
55 |
56 | The build targets comprise all revisions that should be build.
57 |
58 | Parameters
59 | ----------
60 | root : Path
61 | The root path of the project.
62 |
63 | Returns
64 | -------
65 | Iterable[RT]
66 | The build targets.
67 |
68 | """
69 |
--------------------------------------------------------------------------------
/swp.json:
--------------------------------------------------------------------------------
1 | {
2 | "ruff": {
3 | "repo": "https://github.com/charliermarsh/ruff-pre-commit",
4 | "rev": "v${rev}"
5 | },
6 | "black": {
7 | "repo": "https://github.com/psf/black",
8 | "rev": "${rev}"
9 | },
10 | "mypy": {
11 | "repo": "https://github.com/pre-commit/mirrors-mypy",
12 | "rev": "v${rev}"
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """Unittests for this tool."""
2 |
--------------------------------------------------------------------------------
/tests/test_api.py:
--------------------------------------------------------------------------------
1 | """Test the api exposed to conf.py and poly.py files."""
2 |
3 |
4 | import json
5 | import os
6 | import sys
7 |
8 | import pytest
9 |
10 | from sphinx_polyversion.api import LoadError, apply_overrides, load
11 |
12 |
13 | def test_load():
14 | """Test the `load` function."""
15 | # test that the function raises an error if the env var isn't set
16 | with pytest.raises(LoadError):
17 | load()
18 |
19 | # test that the function returns the data from the env var
20 | data = {"a": 1, "b": 2}
21 | os.environ["POLYVERSION_DATA"] = json.dumps(data)
22 | assert load() == data
23 |
24 |
25 | def test_apply_overrides():
26 | """Test the `apply_overrides` function."""
27 | # override sys.argv add overrides
28 | sys.argv = ["poly.py", "--override", "a=1", "--override", "b=2"]
29 |
30 | # test that the function applies the overrides
31 | namespace = {}
32 | apply_overrides(namespace)
33 |
34 | assert namespace == {"a": "1", "b": "2", "MOCK": False, "SEQUENTIAL": False}
35 |
--------------------------------------------------------------------------------
/tests/test_git.py:
--------------------------------------------------------------------------------
1 | """Test the `Git` class."""
2 |
3 | from __future__ import annotations
4 |
5 | import os
6 | import subprocess
7 | from datetime import datetime
8 | from pathlib import Path
9 | from typing import List, Mapping, Tuple
10 |
11 | import pytest
12 |
13 | from sphinx_polyversion.git import (
14 | Git,
15 | GitRef,
16 | GitRefType,
17 | closest_tag,
18 | file_exists,
19 | file_predicate,
20 | )
21 |
22 | # Fragments of the following git logic are sourced from
23 | # https://github.com/pre-commit/pre-commit/blob/main/pre_commit/git.py
24 | #
25 | # Original Copyright (c) 2014 pre-commit dev team: Anthony Sottile, Ken Struys
26 | # MIT License
27 |
28 | # prevents errors on windows
29 | NO_FS_MONITOR = ("-c", "core.useBuiltinFSMonitor=false")
30 | PARTIAL_CLONE = ("-c", "extensions.partialClone=true")
31 |
32 |
33 | def no_git_env(_env: Mapping[str, str] | None = None) -> dict[str, str]:
34 | """
35 | Clear problematic git env vars.
36 |
37 | Git sometimes sets some environment variables that alter its behaviour.
38 | You can pass `os.environ` to this method and then pass its return value
39 | to `subprocess.run` as a environment.
40 |
41 | Parameters
42 | ----------
43 | _env : Mapping[str, str] | None, optional
44 | A dictionary of env vars, by default None
45 |
46 | Returns
47 | -------
48 | dict[str, str]
49 | The same dictionary but without the problematic vars
50 |
51 | """
52 | # Too many bugs dealing with environment variables and GIT:
53 | # https://github.com/pre-commit/pre-commit/issues/300
54 | # In git 2.6.3 (maybe others), git exports GIT_WORK_TREE while running
55 | # pre-commit hooks
56 | # In git 1.9.1 (maybe others), git exports GIT_DIR and GIT_INDEX_FILE
57 | # while running pre-commit hooks in submodules.
58 | # GIT_DIR: Causes git clone to clone wrong thing
59 | # GIT_INDEX_FILE: Causes 'error invalid object ...' during commit
60 | _env = _env if _env is not None else os.environ
61 | return {
62 | k: v
63 | for k, v in _env.items()
64 | if not k.startswith("GIT_")
65 | or k.startswith(("GIT_CONFIG_KEY_", "GIT_CONFIG_VALUE_"))
66 | or k
67 | in {
68 | "GIT_EXEC_PATH",
69 | "GIT_SSH",
70 | "GIT_SSH_COMMAND",
71 | "GIT_SSL_CAINFO",
72 | "GIT_SSL_NO_VERIFY",
73 | "GIT_CONFIG_COUNT",
74 | "GIT_HTTP_PROXY_AUTHMETHOD",
75 | "GIT_ALLOW_PROTOCOL",
76 | "GIT_ASKPASS",
77 | }
78 | }
79 |
80 |
81 | @pytest.fixture
82 | def git_testrepo(tmp_path: Path) -> Tuple[Path, List[GitRef]]:
83 | """Create a git repository for testing."""
84 | git = ("git", *NO_FS_MONITOR)
85 | env = no_git_env()
86 |
87 | def run_git(*args: str) -> None:
88 | subprocess.run(git + args, cwd=tmp_path, env=env, check=False)
89 |
90 | # init repo
91 | run_git("init")
92 | run_git("config", "user.email", "example@example.com")
93 | run_git("config", "user.name", "example")
94 | run_git("config", "commit.gpgsign", "false")
95 | run_git("config", "init.defaultBranch", "main")
96 |
97 | # create some files and directories to commit
98 | tmp_path.joinpath("test.txt").write_text("test")
99 | tmp_path.joinpath("dir1").mkdir()
100 | tmp_path.joinpath("dir2").mkdir()
101 | tmp_path.joinpath("dir1", "file1.txt").write_text("file1")
102 | tmp_path.joinpath("dir2", "file3.txt").write_text("file3")
103 |
104 | run_git("add", ".")
105 | run_git("commit", "-m", "test")
106 |
107 | # create a branch
108 | run_git("branch", "dev")
109 |
110 | # create changes to commit
111 | tmp_path.joinpath("test.txt").write_text("test2")
112 | tmp_path.joinpath("dir1", "file2.txt").write_text("file2")
113 | tmp_path.joinpath("dir1", "file1.txt").write_text("file1a")
114 | tmp_path.joinpath("dir2", "file3.txt").write_text("file3a")
115 |
116 | run_git("add", ".")
117 | run_git("commit", "-m", "test2")
118 |
119 | # create a tag
120 | run_git("tag", "1.0", "-m", "")
121 |
122 | # commit some more changes
123 | tmp_path.joinpath("test.txt").write_text("test3")
124 | tmp_path.joinpath("dir1", "file2.txt").write_text("file2a")
125 | tmp_path.joinpath("dir1", "file1.txt").write_text("file1b")
126 |
127 | run_git("add", ".")
128 | run_git("commit", "-m", "test3")
129 |
130 | # create another branch
131 | run_git("branch", "feature")
132 |
133 | # tag the latest commit
134 | run_git("tag", "2.0", "-m", "")
135 |
136 | p = subprocess.run(
137 | ["git", "for-each-ref", "--format=%(objectname) %(refname)"],
138 | cwd=tmp_path,
139 | stdout=subprocess.PIPE,
140 | check=False,
141 | )
142 |
143 | types = [
144 | GitRefType.BRANCH,
145 | GitRefType.BRANCH,
146 | GitRefType.BRANCH,
147 | GitRefType.TAG,
148 | GitRefType.TAG,
149 | ]
150 | dates = [
151 | datetime(2023, 8, 17, 17, 16, 34),
152 | datetime(2023, 8, 26, 19, 45, 9),
153 | datetime(2023, 8, 29, 19, 45, 9),
154 | datetime(2023, 6, 29, 11, 43, 11),
155 | datetime(2023, 8, 29, 19, 45, 9),
156 | ]
157 | lines = [line.split() for line in p.stdout.decode().splitlines()]
158 | refs = [
159 | GitRef(r.split("/")[-1], h, r, t, d)
160 | for t, (h, r), d in zip(types, lines, dates)
161 | ]
162 | return tmp_path, refs
163 |
164 |
165 | @pytest.fixture
166 | def git() -> Git:
167 | """Create a `Git` instance for testing."""
168 | return Git(branch_regex=".*", tag_regex=".*")
169 |
170 |
171 | @pytest.fixture
172 | def git_with_predicate() -> Git:
173 | """Create a `Git` instance with a predicate for testing."""
174 |
175 | async def predicate(root: Path, ref: GitRef) -> bool:
176 | return ref.name in ["test", "feature", "1.0"]
177 |
178 | return Git(
179 | branch_regex=".*",
180 | tag_regex=".*",
181 | predicate=predicate,
182 | )
183 |
184 |
185 | @pytest.fixture
186 | def git_with_buffer_size() -> Git:
187 | """Create a `Git` instance with a buffer size for testing."""
188 | return Git(branch_regex=".*", tag_regex=".*", buffer_size=1024)
189 |
190 |
191 | @pytest.mark.asyncio
192 | async def test_aroot(git: Git, git_testrepo: Tuple[Path, List[GitRef]]):
193 | """Test the `aroot` method."""
194 | repo_path, _ = git_testrepo
195 | root = await git.aroot(repo_path)
196 | assert root == repo_path
197 |
198 | root = await git.aroot(repo_path / "dir1")
199 | assert root == repo_path
200 |
201 |
202 | @pytest.mark.asyncio
203 | async def test_checkout(
204 | git: Git,
205 | git_testrepo: Tuple[Path, List[GitRef]],
206 | tmp_path: Path,
207 | ):
208 | """Test the `checkout` method."""
209 | repo_path, refs = git_testrepo
210 | await git.checkout(repo_path, tmp_path, refs[0])
211 | assert (tmp_path / "test.txt").read_text() == "test"
212 |
213 |
214 | @pytest.mark.asyncio
215 | async def test_checkout_with_buffer(
216 | git_with_buffer_size: Git,
217 | git_testrepo: Tuple[Path, List[GitRef]],
218 | tmp_path: Path,
219 | ):
220 | """Test the `checkout` method."""
221 | repo_path, refs = git_testrepo
222 | await git_with_buffer_size.checkout(repo_path, tmp_path, refs[0])
223 | assert (tmp_path / "test.txt").read_text() == "test"
224 |
225 |
226 | @pytest.mark.asyncio
227 | async def test_predicate(git_with_predicate: Git):
228 | """Test the `predicate` method."""
229 | root = "."
230 | assert await git_with_predicate.predicate(root, GitRef("test", "", "", None, None))
231 | assert not await git_with_predicate.predicate(
232 | root, GitRef("test2", "", "", None, None)
233 | )
234 |
235 |
236 | def compare_refs(ref1: GitRef, ref2: GitRef) -> bool:
237 | """
238 | Determine euqality of two `GitRef` instances.
239 |
240 | This is used with `assert` later on.
241 | """
242 | # do not compare data since the expected value is not known
243 | return (
244 | ref1.name == ref2.name
245 | and ref1.obj == ref2.obj
246 | and ref1.ref == ref2.ref
247 | and ref1.type_ == ref2.type_
248 | )
249 |
250 |
251 | @pytest.mark.asyncio
252 | async def test_retrieve(git: Git, git_testrepo: Tuple[Path, List[GitRef]]):
253 | """Test the `retrieve` method."""
254 | root, git_refs = git_testrepo
255 | refs = await git.retrieve(root)
256 | assert len(refs) == 5
257 |
258 | for ref1, ref2 in zip(refs, git_refs):
259 | assert compare_refs(ref1, ref2)
260 |
261 |
262 | @pytest.mark.asyncio
263 | async def test_retrieve_with_predicate(
264 | git_with_predicate: Git, git_testrepo: Tuple[Path, List[GitRef]]
265 | ):
266 | """Test the `retrieve` method with a predicate."""
267 | root, git_refs = git_testrepo
268 | refs = await git_with_predicate.retrieve(root)
269 | assert len(refs) == 2
270 | assert compare_refs(refs[0], git_refs[1])
271 | assert compare_refs(refs[1], git_refs[3])
272 |
273 |
274 | @pytest.mark.asyncio
275 | async def test_closest_tag(git_testrepo: Tuple[Path, List[GitRef]]):
276 | """Test the `closest_tag` method."""
277 | root, git_refs = git_testrepo
278 | # main branch
279 | assert (
280 | await closest_tag(root, git_refs[2], [git_refs[0].obj, "1.0", "2.0"]) == "2.0"
281 | )
282 | # 1.0 tag should map to HEAD of dev branch
283 | assert (
284 | await closest_tag(root, git_refs[3], [git_refs[0].obj, "2.0"])
285 | == git_refs[0].obj
286 | )
287 | # 1.0 tag should map to itself
288 | assert (
289 | await closest_tag(root, git_refs[3], [git_refs[0].obj, "1.0", "2.0"]) == "1.0"
290 | )
291 | # 2.0 tag should map to itself
292 | assert (
293 | await closest_tag(root, git_refs[4], [git_refs[0].obj, "1.0", "2.0"]) == "2.0"
294 | )
295 | # if their is no ancestor None should be returned
296 | assert await closest_tag(root, git_refs[0], ["1.0", "2.0"]) is None
297 |
298 |
299 | @pytest.mark.asyncio
300 | async def test_file_exists(git_testrepo: Tuple[Path, List[GitRef]]):
301 | """Test the `file_exists` method."""
302 | root, git_refs = git_testrepo
303 |
304 | # dev branch
305 | assert await file_exists(root, git_refs[0], Path("test.txt"))
306 | assert await file_exists(root, git_refs[0], Path("dir1"))
307 | assert await file_exists(root, git_refs[0], Path("dir2/file3.txt"))
308 | assert not await file_exists(root, git_refs[0], Path("dir1/file2.txt"))
309 | # future branch
310 | assert await file_exists(root, git_refs[1], Path("test.txt"))
311 | assert await file_exists(root, git_refs[1], Path("dir2"))
312 | assert await file_exists(root, git_refs[1], Path("dir1/file2.txt"))
313 | assert not await file_exists(root, git_refs[1], Path("dir3"))
314 |
315 |
316 | @pytest.mark.asyncio
317 | async def test_file_predicate(git_testrepo: Tuple[Path, List[GitRef]]):
318 | """Test the `file_exists` method."""
319 | root, git_refs = git_testrepo
320 | git = Git(
321 | branch_regex=".*",
322 | tag_regex=".*",
323 | predicate=file_predicate([Path("dir1/file2.txt"), Path("dir2")]),
324 | )
325 |
326 | refs = await git.retrieve(root)
327 | assert len(refs) == 4
328 | for i in range(4):
329 | assert compare_refs(refs[i], git_refs[i + 1])
330 |
--------------------------------------------------------------------------------
/tests/test_json.py:
--------------------------------------------------------------------------------
1 | """Test encoding and deconding of python types and objects to the json format."""
2 |
3 | from datetime import datetime
4 |
5 | from sphinx_polyversion.git import GitRef, GitRefType
6 | from sphinx_polyversion.json import Decoder, Encoder, std_hook
7 |
8 |
9 | class TestEncoder:
10 | """Unittests for the `Encoder` class."""
11 |
12 | def test_register_hook(self):
13 | """Test that register() adds a hook to the encoder's hooks dictionary."""
14 | encoder = Encoder()
15 |
16 | assert encoder.hooks == set()
17 |
18 | result = encoder.register(std_hook)
19 |
20 | assert result == std_hook
21 | assert encoder.hooks == {std_hook}
22 |
23 | encoder.register(std_hook, std_hook())
24 |
25 | assert len(encoder.hooks) == 2
26 |
27 | def test_register_type(self):
28 | """Test that register() adds a type to the encoder's hooks dictionary."""
29 | encoder = Encoder()
30 |
31 | assert encoder.hooks == set()
32 |
33 | result = encoder.register(GitRefType)
34 |
35 | assert result == GitRefType
36 | assert encoder.hooks == {GitRefType}
37 |
38 | encoder.register(GitRef, GitRefType)
39 |
40 | assert len(encoder.hooks) == 2
41 |
42 | def test_determine_classname(self):
43 | """Test that determine_classname() returns the expected class name for a given object."""
44 | encoder = Encoder()
45 |
46 | assert (
47 | encoder.determine_classname(GitRef, instance=False)
48 | == "sphinx_polyversion.git.GitRef"
49 | )
50 | assert encoder.determine_classname(3) == ".int"
51 |
52 | from pathlib import Path
53 |
54 | assert encoder.determine_classname(Path, instance=False) == "pathlib.Path"
55 |
56 | assert (
57 | encoder.determine_classname(datetime, instance=False) == "datetime.datetime"
58 | )
59 | assert encoder.determine_classname(datetime(2023, 1, 1)) == ".datetime"
60 |
61 | def test_transform_hook(self):
62 | """Test transforming hook."""
63 | encoder = Encoder()
64 | encoder.register(std_hook)
65 |
66 | assert encoder.transform(datetime(2023, 12, 2)) == {
67 | "__jsonhook__": (
68 | "sphinx_polyversion.json.std_hook",
69 | ".datetime",
70 | "2023-12-02T00:00:00",
71 | )
72 | }
73 |
74 | def test_transform_class(self) -> None:
75 | """Test transforming the `Transformable` class.."""
76 | encoder = Encoder()
77 | assert encoder.transform(GitRef("master", "3434", "", None, None)) == {
78 | "__jsonclass__": (
79 | "sphinx_polyversion.git.GitRef",
80 | ["master", "3434", "", None, None, None],
81 | )
82 | }
83 |
84 | def test_transform_dict(self):
85 | """Test that transform() returns the expected dictionary for a given dictionary with nested objects."""
86 | encoder = Encoder()
87 | assert encoder.transform({"ref": GitRef("master", "3434", "", None, None)}) == {
88 | "ref": {
89 | "__jsonclass__": (
90 | "sphinx_polyversion.git.GitRef",
91 | ["master", "3434", "", None, None, None],
92 | )
93 | }
94 | }
95 |
96 | def test_transform_list(self):
97 | """Test that transform() returns the expected list for a given list with nested objects."""
98 | encoder = Encoder()
99 | assert encoder.transform([GitRef("master", "3434", "", None, None)]) == [
100 | {
101 | "__jsonclass__": (
102 | "sphinx_polyversion.git.GitRef",
103 | ["master", "3434", "", None, None, None],
104 | )
105 | }
106 | ]
107 |
108 | def test_transform_any(self):
109 | """Test that transform() returns the input object for an unknown object."""
110 | encoder = Encoder()
111 | o = object()
112 | assert encoder.transform(o) == o
113 |
114 | def test_encode(self):
115 | """
116 | Test that encode() returns the expected JSON string for a given object.
117 |
118 | This uses dict, list, transformable, hook and some standard datatypes
119 | """
120 | encoder = Encoder()
121 | encoder.register(std_hook)
122 | obj = {
123 | "ref": GitRef(
124 | "master",
125 | "3434",
126 | "refs/tags/v1.0.0",
127 | GitRefType.TAG,
128 | datetime(200, 2, 6, 6, 3, 6),
129 | ),
130 | "date": datetime(2023, 12, 2),
131 | "list": [1, 2, 3],
132 | "dict": {"a": 1, "b": 2},
133 | }
134 | assert (
135 | encoder.encode(obj)
136 | == '{"ref": {"__jsonclass__": ["sphinx_polyversion.git.GitRef", ["master", "3434", "refs/tags/v1.0.0", {"__jsonclass__": ["sphinx_polyversion.git.GitRefType", "TAG"]}, {"__jsonhook__": ["sphinx_polyversion.json.std_hook", ".datetime", "0200-02-06T06:03:06"]}, null]]}, "date": {"__jsonhook__": ["sphinx_polyversion.json.std_hook", ".datetime", "2023-12-02T00:00:00"]}, "list": [1, 2, 3], "dict": {"a": 1, "b": 2}}'
137 | )
138 |
139 |
140 | class TestDecoder:
141 | """Unittests for the `Decoder` class."""
142 |
143 | def test_register_hook(self):
144 | """Test that register() adds a hook to the decoder's hooks dictionary."""
145 | decoder = Decoder()
146 | decoder.register(std_hook)
147 | assert std_hook in decoder.hooks
148 |
149 | def test_register_type(self):
150 | """Test that register() adds a type to the decoder's registered_types dictionary."""
151 | decoder = Decoder()
152 | decoder.register(GitRefType)
153 | assert GitRefType in decoder.registered_types
154 |
155 | def test_register_from(self):
156 | """Test that register_from() adds the same hooks and types to both the encoder and decoder."""
157 | decoder_1 = Decoder()
158 | decoder_1.register(GitRef, GitRefType)
159 | assert decoder_1.registered_types == [GitRef, GitRefType]
160 |
161 | decoder_2 = Decoder()
162 | decoder_2.register_from(decoder_1)
163 | assert GitRef in decoder_2.registered_types
164 | assert GitRefType in decoder_2.registered_types
165 |
166 | def test_determine_classname(self):
167 | """Test that determine_classname() returns the expected class name for a given object."""
168 | decoder = Decoder()
169 |
170 | assert decoder.determine_classname(GitRef) == "sphinx_polyversion.git.GitRef"
171 |
172 | from pathlib import Path
173 |
174 | assert decoder.determine_classname(Path) == "pathlib.Path"
175 |
176 | def test_decode(self):
177 | """Test that decode() returns the expected object for a given JSON string."""
178 | obj = {
179 | "ref": GitRef(
180 | "master",
181 | "3434",
182 | "refs/tags/v1.0.0",
183 | GitRefType.TAG,
184 | datetime(200, 2, 6, 6, 3, 6),
185 | ),
186 | "date": datetime(2023, 12, 2),
187 | "list": [1, 2, 3],
188 | "dict": {"a": 1, "b": 2},
189 | }
190 | encoded = '{"ref": {"__jsonclass__": ["sphinx_polyversion.git.GitRef", ["master", "3434", "refs/tags/v1.0.0", {"__jsonclass__": ["sphinx_polyversion.git.GitRefType", "TAG"]}, {"__jsonhook__": ["sphinx_polyversion.json.std_hook", ".datetime", "0200-02-06T06:03:06"]}, null]]}, "date": {"__jsonhook__": ["sphinx_polyversion.json.std_hook", ".datetime", "2023-12-02T00:00:00"]}, "list": [1, 2, 3], "dict": {"a": 1, "b": 2}}'
191 |
192 | decoder = Decoder()
193 | decoder.register(std_hook)
194 | decoder.register(GitRef, GitRefType)
195 | assert decoder.decode(encoded) == obj
196 |
197 |
198 | class TestStd_Hook:
199 | """Ensure the hooks provided by this module work."""
200 |
201 | def test_encode_datetime(self):
202 | """Test the returned fields for `datetime`."""
203 | dt = datetime(2023, 8, 6, 6, 3, 6)
204 | assert std_hook.fields(dt) == "2023-08-06T06:03:06"
205 |
206 | def test_decode_datetime(self):
207 | """Test calling `from_json` for a `datetime`."""
208 | dt = std_hook.from_json("datetime.datetime", "2023-08-06T06:03:06")
209 | assert dt == datetime(2023, 8, 6, 6, 3, 6)
210 |
--------------------------------------------------------------------------------
/tests/test_pyvenv.py:
--------------------------------------------------------------------------------
1 | """Test the python environments in the `pyvenv` module."""
2 |
3 | import asyncio
4 | from pathlib import Path
5 |
6 | import pytest
7 |
8 | from sphinx_polyversion.pyvenv import (
9 | Pip,
10 | Poetry,
11 | VenvWrapper,
12 | VirtualenvWrapper,
13 | VirtualPythonEnvironment,
14 | )
15 |
16 |
17 | @pytest.mark.asyncio
18 | async def test_venv_creation(tmp_path: Path):
19 | """Test the creation of a python virtual environment."""
20 | location = tmp_path / "venv"
21 | await VenvWrapper()(location)
22 | assert location.exists()
23 | assert (location / "bin" / "python").exists()
24 |
25 |
26 | @pytest.mark.asyncio
27 | async def test_virtualvenv_creation(tmp_path: Path):
28 | """Test the creation of a python virtual environment."""
29 | pytest.importorskip("virtualenv")
30 |
31 | location = tmp_path / "venv"
32 | await VirtualenvWrapper([])(location)
33 | assert location.exists()
34 | assert (location / "bin" / "python").exists()
35 |
36 |
37 | class TestVirtualPythonEnvionment:
38 | """Test the `VirtualPythonEnvironment` class."""
39 |
40 | @pytest.mark.asyncio
41 | async def test_creation_with_venv(self, tmp_path: Path):
42 | """Test the `create_venv` method with a `VenvWrapper`."""
43 | location = tmp_path / "venv"
44 | env = VirtualPythonEnvironment(
45 | tmp_path, "main", location, creator=VenvWrapper()
46 | )
47 |
48 | await env.create_venv()
49 | assert (location / "bin" / "python").exists()
50 |
51 | @pytest.mark.asyncio
52 | async def test_creation_without_creator(self, tmp_path: Path):
53 | """Test the `create_venv` method without any creator."""
54 | location = tmp_path / "venv"
55 | env = VirtualPythonEnvironment(tmp_path, "main", location)
56 |
57 | await env.create_venv()
58 | assert not (location / "bin" / "python").exists()
59 |
60 | @pytest.mark.asyncio
61 | async def test_run_without_creator_no_existing(self, tmp_path: Path):
62 | """Test running a command without an existing venv and without creator."""
63 | location = tmp_path / "novenv"
64 |
65 | async with VirtualPythonEnvironment(tmp_path, "main", location) as env:
66 | with pytest.raises(FileNotFoundError, match="There is no virtual"):
67 | out, err, rc = await env.run(
68 | "python",
69 | "-c",
70 | "import sys; print(sys.prefix)",
71 | stdout=asyncio.subprocess.PIPE,
72 | )
73 |
74 | @pytest.mark.asyncio
75 | async def test_run_without_creator(self, tmp_path: Path):
76 | """Test running a command in an existing venv."""
77 | location = tmp_path / "venv"
78 |
79 | # create env
80 | await VenvWrapper([])(location)
81 |
82 | async with VirtualPythonEnvironment(tmp_path, "main", location) as env:
83 | out, err, rc = await env.run(
84 | "python",
85 | "-c",
86 | "import sys; print(sys.prefix)",
87 | stdout=asyncio.subprocess.PIPE,
88 | )
89 | assert rc == 0
90 | assert str(location) == out.strip()
91 |
92 | @pytest.mark.asyncio
93 | async def test_run_with_creator(self, tmp_path: Path):
94 | """Test running a command in a new venv."""
95 | location = tmp_path / "venv"
96 |
97 | async with VirtualPythonEnvironment(
98 | tmp_path, "main", location, creator=VenvWrapper()
99 | ) as env:
100 | out, err, rc = await env.run(
101 | "python",
102 | "-c",
103 | "import sys; print(sys.prefix)",
104 | stdout=asyncio.subprocess.PIPE,
105 | )
106 | assert rc == 0
107 | assert str(location) == out.strip()
108 |
109 | @pytest.mark.asyncio
110 | async def test_run_with_env_variables(self, tmp_path: Path):
111 | """Test passing an environment variable to a venv."""
112 | location = tmp_path / "venv"
113 |
114 | async with VirtualPythonEnvironment(
115 | tmp_path, "main", location, creator=VenvWrapper(), env={"TESTVAR": "1"}
116 | ) as env:
117 | out, err, rc = await env.run(
118 | "python",
119 | "-c",
120 | "import os; print(os.environ['TESTVAR'])",
121 | stdout=asyncio.subprocess.PIPE,
122 | )
123 | assert rc == 0
124 | assert out.strip() == "1"
125 |
126 |
127 | class TestPip:
128 | """Test the `Pip` class."""
129 |
130 | @pytest.mark.asyncio
131 | @pytest.mark.asyncio
132 | async def test_creation_with_venv(self, tmp_path: Path):
133 | """Test the `create_venv` method with a `VenvWrapper`."""
134 | location = tmp_path / "venv"
135 | env = Pip(
136 | tmp_path,
137 | "main",
138 | location,
139 | args=["tomli"],
140 | creator=VenvWrapper(),
141 | temporary=False,
142 | )
143 |
144 | await env.create_venv()
145 | assert (location / "bin" / "python").exists()
146 |
147 | @pytest.mark.asyncio
148 | async def test_creation_without_creator(self, tmp_path: Path):
149 | """Test the `create_venv` method without any creator."""
150 | location = tmp_path / "venv"
151 | env = Pip(tmp_path, "main", location, args=["tomli"], temporary=False)
152 |
153 | await env.create_venv()
154 | assert not (location / "bin" / "python").exists()
155 |
156 | @pytest.mark.asyncio
157 | async def test_run_without_creator(self, tmp_path: Path):
158 | """Test running a command in an existing venv."""
159 | location = tmp_path / "venv"
160 |
161 | # create env
162 | await VenvWrapper([])(location)
163 |
164 | async with Pip(
165 | tmp_path, "main", location, args=["tomli"], temporary=False
166 | ) as env:
167 | out, err, rc = await env.run(
168 | "python",
169 | "-c",
170 | "import sys; print(sys.prefix)",
171 | stdout=asyncio.subprocess.PIPE,
172 | )
173 | assert rc == 0
174 | assert str(location) == out.strip()
175 |
176 | @pytest.mark.asyncio
177 | async def test_run_with_creator(self, tmp_path: Path):
178 | """Test running a command in a new venv."""
179 | location = tmp_path / "venv"
180 |
181 | async with Pip(
182 | tmp_path,
183 | "main",
184 | location,
185 | args=["tomli"],
186 | creator=VenvWrapper(),
187 | temporary=False,
188 | ) as env:
189 | out, err, rc = await env.run(
190 | "python",
191 | "-c",
192 | "import sys; print(sys.prefix)",
193 | stdout=asyncio.subprocess.PIPE,
194 | )
195 | assert rc == 0
196 | assert str(location) == out.strip()
197 |
198 | @pytest.mark.asyncio
199 | async def test_creation_with_venv_temporary(self, tmp_path: Path):
200 | """Test the `create_venv` method with a `VenvWrapper`."""
201 | location = "tmpvenv"
202 | env = Pip(
203 | tmp_path,
204 | "main",
205 | location,
206 | args=["tomli"],
207 | creator=VenvWrapper(),
208 | temporary=True,
209 | )
210 |
211 | await env.create_venv()
212 | assert (tmp_path / location / "bin" / "python").exists()
213 |
214 | @pytest.mark.asyncio
215 | async def test_creation_without_creator_temporary(self, tmp_path: Path):
216 | """Test the `create_venv` method without any creator."""
217 | location = "tmpvenv"
218 | with pytest.raises(
219 | ValueError,
220 | match="Cannot create temporary virtual environment when creator is None",
221 | ):
222 | Pip(tmp_path, "main", location, args=["tomli"], temporary=True)
223 |
224 | @pytest.mark.asyncio
225 | async def test_run_with_creator_temporary(self, tmp_path: Path):
226 | """Test running a command in a new venv."""
227 | location = "tmpvenv"
228 |
229 | async with Pip(
230 | tmp_path,
231 | "main",
232 | location,
233 | args=["tomli"],
234 | creator=VenvWrapper(),
235 | temporary=True,
236 | ) as env:
237 | out, err, rc = await env.run(
238 | "python",
239 | "-c",
240 | "import sys; print(sys.prefix)",
241 | stdout=asyncio.subprocess.PIPE,
242 | )
243 | assert rc == 0
244 | assert str(tmp_path / location) == out.strip()
245 |
246 | @pytest.mark.asyncio
247 | async def test_install_into_existing_venv(self, tmp_path: Path):
248 | """Test installing a package into an existing venv."""
249 | location = tmp_path / "venv"
250 |
251 | # create env
252 | await VenvWrapper(with_pip=True)(location)
253 |
254 | # test that tomli is not installed
255 | proc = await asyncio.create_subprocess_exec(
256 | str(location / "bin/python"),
257 | "-c",
258 | "import tomli",
259 | stdout=asyncio.subprocess.PIPE,
260 | )
261 | rc = await proc.wait()
262 | assert rc == 1
263 |
264 | # init env with tomli
265 | async with Pip(tmp_path, "main", location, args=["tomli"]) as env:
266 | # test that tomli is installed
267 | out, err, rc = await env.run(
268 | "python",
269 | "-c",
270 | "import tomli",
271 | stdout=asyncio.subprocess.PIPE,
272 | )
273 | assert rc == 0
274 |
275 | @pytest.mark.asyncio
276 | async def test_run_with_env_variables(self, tmp_path: Path):
277 | """Test passing an environment variable to a venv."""
278 | location = tmp_path / "venv"
279 |
280 | # create env
281 | await VenvWrapper(with_pip=True)(location)
282 |
283 | # test that tomli is not installed
284 | proc = await asyncio.create_subprocess_exec(
285 | str(location / "bin/python"),
286 | "-c",
287 | "import tomli",
288 | stdout=asyncio.subprocess.PIPE,
289 | )
290 | rc = await proc.wait()
291 | assert rc == 1
292 |
293 | # init env with tomli
294 | async with Pip(
295 | tmp_path, "main", location, args=["tomli"], env={"TESTVAR": "1"}
296 | ) as env:
297 | # test that tomli is installed
298 | out, err, rc = await env.run(
299 | "python",
300 | "-c",
301 | "import os; print(os.environ['TESTVAR'])",
302 | stdout=asyncio.subprocess.PIPE,
303 | )
304 | assert rc == 0
305 | assert out.strip() == "1"
306 |
307 |
308 | class TestPoetry:
309 | """Test the `Poetry` environment."""
310 |
311 | @pytest.mark.asyncio
312 | async def test_simple_project(self, tmp_path: Path):
313 | """Test installing a simple project with poetry."""
314 | # create source files
315 | src_location = tmp_path / "test"
316 | src_location.mkdir()
317 | src_location.joinpath("__init__.py").touch()
318 |
319 | # create config
320 | config_location = tmp_path / "pyproject.toml"
321 | config_location.write_text(
322 | """
323 | [tool.poetry]
324 | name = "test"
325 | version = "0.1.0"
326 | description = ""
327 | authors = ["Author "]
328 | license = "MIT"
329 |
330 | [tool.poetry.dependencies]
331 | python = "^3.8"
332 | tomli = "*"
333 | """
334 | )
335 |
336 | # create poetry env
337 | async with Poetry(tmp_path, "main", args=[], env={"TESTVAR": "1"}) as env:
338 | # check sourcing works
339 | out, err, rc = await env.run(
340 | "python",
341 | "-c",
342 | "import sys; print(sys.prefix)",
343 | stdout=asyncio.subprocess.PIPE,
344 | )
345 | assert rc == 0
346 | assert str(tmp_path) in out.strip()
347 |
348 | # test that project is installed
349 | out, err, rc = await env.run(
350 | "python",
351 | "-c",
352 | "import test",
353 | stdout=asyncio.subprocess.PIPE,
354 | )
355 | assert rc == 0
356 |
357 | # test that tomli is installed
358 | out, err, rc = await env.run(
359 | "python",
360 | "-c",
361 | "import tomli",
362 | stdout=asyncio.subprocess.PIPE,
363 | )
364 | assert rc == 0
365 |
366 | # test that custom environment variables are passed correctly
367 | out, err, rc = await env.run(
368 | "python",
369 | "-c",
370 | "import os; print(os.environ['TESTVAR'])",
371 | stdout=asyncio.subprocess.PIPE,
372 | )
373 | assert rc == 0
374 | assert out.strip() == "1"
375 |
376 | @pytest.mark.asyncio
377 | async def test_simple_project_with_optional_deps(self, tmp_path: Path):
378 | """Test installing a simple project with poetry."""
379 | # create source files
380 | src_location = tmp_path / "test"
381 | src_location.mkdir()
382 | src_location.joinpath("__init__.py").touch()
383 |
384 | # create config
385 | config_location = tmp_path / "pyproject.toml"
386 | config_location.write_text(
387 | """
388 | [tool.poetry]
389 | name = "test"
390 | version = "0.1.0"
391 | description = ""
392 | authors = ["Author "]
393 | license = "MIT"
394 |
395 | [tool.poetry.dependencies]
396 | python = "^3.8"
397 |
398 | [tool.poetry.group.dev]
399 | optional = true
400 |
401 | [tool.poetry.group.dev.dependencies]
402 | tomli = "*"
403 | """
404 | )
405 |
406 | # create poetry env
407 | async with Poetry(tmp_path, "main", args=[]) as env:
408 | # check sourcing works
409 | out, err, rc = await env.run(
410 | "python",
411 | "-c",
412 | "import sys; print(sys.prefix)",
413 | stdout=asyncio.subprocess.PIPE,
414 | )
415 | assert rc == 0
416 | assert str(tmp_path) in out.strip()
417 |
418 | # test that project is installed
419 | out, err, rc = await env.run(
420 | "python",
421 | "-c",
422 | "import test",
423 | stdout=asyncio.subprocess.PIPE,
424 | )
425 | assert rc == 0
426 |
427 | # test that tomli is not installed
428 | out, err, rc = await env.run(
429 | "python",
430 | "-c",
431 | "import tomli",
432 | stdout=asyncio.subprocess.PIPE,
433 | )
434 | assert rc == 1
435 |
436 | # create poetry env
437 | async with Poetry(tmp_path, "main", args=["--with=dev"]) as env:
438 | # check sourcing works
439 | out, err, rc = await env.run(
440 | "python",
441 | "-c",
442 | "import sys; print(sys.prefix)",
443 | stdout=asyncio.subprocess.PIPE,
444 | )
445 | assert rc == 0
446 | assert str(tmp_path) in out.strip()
447 |
448 | # test that project is installed
449 | out, err, rc = await env.run(
450 | "python",
451 | "-c",
452 | "import test",
453 | stdout=asyncio.subprocess.PIPE,
454 | )
455 | assert rc == 0
456 |
457 | # test that tomli is installed
458 | out, err, rc = await env.run(
459 | "python",
460 | "-c",
461 | "import tomli",
462 | stdout=asyncio.subprocess.PIPE,
463 | )
464 | assert rc == 0
465 |
466 | @pytest.mark.asyncio
467 | async def test_create_two_concurrently(self, tmp_path: Path):
468 | """Test creating two environments concurrently."""
469 | # create source files
470 | src_location = tmp_path / "test"
471 | src_location.mkdir()
472 | src_location.joinpath("__init__.py").touch()
473 |
474 | # create config
475 | config_location = tmp_path / "pyproject.toml"
476 | config_location.write_text(
477 | """
478 | [tool.poetry]
479 | name = "test"
480 | version = "0.1.0"
481 | description = ""
482 | authors = ["Author "]
483 | license = "MIT"
484 |
485 | [tool.poetry.dependencies]
486 | python = "^3.8"
487 |
488 | [tool.poetry.group.dev]
489 | optional = true
490 |
491 | [tool.poetry.group.dev.dependencies]
492 | tomli = "*"
493 | """
494 | )
495 |
496 | # create poetry env
497 | async with Poetry(tmp_path, "main", args=[]) as env:
498 | # check sourcing works
499 | first_path, err, rc = await env.run(
500 | "python",
501 | "-c",
502 | "import sys; print(sys.prefix)",
503 | stdout=asyncio.subprocess.PIPE,
504 | )
505 | first_path = first_path.strip()
506 | assert rc == 0
507 | assert str(tmp_path) in first_path
508 |
509 | # test that project is installed
510 | out, err, rc = await env.run(
511 | "python",
512 | "-c",
513 | "import test",
514 | stdout=asyncio.subprocess.PIPE,
515 | )
516 | assert rc == 0
517 |
518 | # test that tomli is not installed
519 | out, err, rc = await env.run(
520 | "python",
521 | "-c",
522 | "import tomli",
523 | stdout=asyncio.subprocess.PIPE,
524 | )
525 | assert rc == 1
526 |
527 | # create second poetry env
528 | async with Poetry(tmp_path, "main", args=["--with=dev"]) as env2:
529 | # check sourcing works
530 | out, err, rc = await env2.run(
531 | "python",
532 | "-c",
533 | "import sys; print(sys.prefix)",
534 | stdout=asyncio.subprocess.PIPE,
535 | )
536 | assert rc == 0
537 | assert str(tmp_path) in out.strip()
538 | assert out.strip() != first_path
539 |
540 | # check that old env still works
541 | out, err, rc = await env.run(
542 | "python",
543 | "-c",
544 | "import sys; print(sys.prefix)",
545 | stdout=asyncio.subprocess.PIPE,
546 | )
547 | assert rc == 0
548 | assert first_path == out.strip()
549 |
550 | # test that project is installed
551 | out, err, rc = await env2.run(
552 | "python",
553 | "-c",
554 | "import test",
555 | stdout=asyncio.subprocess.PIPE,
556 | )
557 | assert rc == 0
558 |
559 | # test that tomli is installed
560 | out, err, rc = await env2.run(
561 | "python",
562 | "-c",
563 | "import tomli",
564 | stdout=asyncio.subprocess.PIPE,
565 | )
566 | assert rc == 0
567 |
--------------------------------------------------------------------------------
/tests/test_sphinx.py:
--------------------------------------------------------------------------------
1 | """Test the sphinx integration."""
2 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | """Test the `utils` submodule."""
2 |
3 | import asyncio
4 | from pathlib import Path, PurePath
5 | from typing import TypeVar
6 |
7 | import pytest
8 |
9 | from sphinx_polyversion.utils import async_all, import_file, shift_path
10 |
11 |
12 | @pytest.mark.parametrize(
13 | ("anchor1", "anchor2", "path", "solution"),
14 | [
15 | ("a", "b", "a/c", "b/c"),
16 | ("a", "b", "a/b/c", "b/b/c"),
17 | ("a/b", "a", "a/b/c", "a/c"),
18 | ("a/b", "b", "a/b/c/d", "b/c/d"),
19 | ],
20 | )
21 | def test_shift_path(anchor1, anchor2, path, solution):
22 | """Test the shift_path function."""
23 | assert shift_path(PurePath(anchor1), PurePath(anchor2), PurePath(path)) == PurePath(
24 | solution
25 | )
26 |
27 |
28 | T = TypeVar("T")
29 |
30 |
31 | def test_async_all():
32 | """Test the `async_all` implementation."""
33 |
34 | async def future(value: T) -> T:
35 | return value
36 |
37 | assert asyncio.run(async_all([]))
38 |
39 | all_true = [future(True) for i in range(8)]
40 | assert asyncio.run(async_all(all_true))
41 |
42 | all_false = [future(False) for i in range(8)]
43 | assert not asyncio.run(async_all(all_false))
44 |
45 | first_false = [future(False)] + [future(True) for i in range(8)]
46 | assert not asyncio.run(async_all(first_false))
47 |
48 | last_false = [future(True) for i in range(8)] + [future(False)]
49 | assert not asyncio.run(async_all(last_false))
50 |
51 | some_false = (
52 | [future(True) for i in range(5)]
53 | + [future(False)]
54 | + [future(True) for i in range(5)]
55 | + [future(False)]
56 | + [future(True) for i in range(5)]
57 | )
58 | assert not asyncio.run(async_all(some_false))
59 |
60 |
61 | def test_import_file(tmp_path: Path):
62 | """Test the `import_file` function."""
63 | # create a python module to import
64 | module_path = tmp_path / "module.py"
65 | module_path.write_text("a = 1")
66 |
67 | # import the module
68 | m = import_file(module_path)
69 |
70 | assert m is not None
71 | assert hasattr(m, "a")
72 | assert m.a == 1
73 |
--------------------------------------------------------------------------------