├── .github
├── CODEOWNERS
├── codeql-config.yml
├── dependabot.yml
├── PULL_REQUEST_TEMPLATE.md
└── workflows
│ ├── publish_docs.yml
│ ├── static_analysis.yml
│ ├── codeql-analysis.yml
│ ├── tests.yml
│ └── release.yml
├── docs
├── tasks.md
└── gen_ref_pages.py
├── requirements.txt
├── .gitattributes
├── MANIFEST.in
├── prefect_cubejs
├── __init__.py
├── exceptions.py
├── utils.py
├── tasks.py
└── _version.py
├── requirements-dev.txt
├── .pre-commit-config.yaml
├── CHANGELOG.md
├── CONTRIBUTORS.md
├── setup.cfg
├── mkdocs.yml
├── setup.py
├── README.md
├── .gitignore
├── tests
├── test_tasks.py
└── test_jobs.py
├── MAINTAINERS.md
├── LICENSE
└── versioneer.py
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/tasks.md:
--------------------------------------------------------------------------------
1 | ::: prefect_cubejs.tasks
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | PyJWT>=2.3.0
2 | prefect>=2.0a13
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | prefect_cubejs/_version.py export-subst
2 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include versioneer.py
2 | include prefect_cubejs/_version.py
3 |
--------------------------------------------------------------------------------
/prefect_cubejs/__init__.py:
--------------------------------------------------------------------------------
1 | from . import _version
2 |
3 | __version__ = _version.get_versions()["version"]
4 |
--------------------------------------------------------------------------------
/.github/codeql-config.yml:
--------------------------------------------------------------------------------
1 | paths-ignore:
2 | - tests/**/test_*.py
3 | - versioneer.py
4 | - prefect_cubejs/_version.py
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | pytest
2 | black
3 | flake8
4 | mypy
5 | mkdocs
6 | mkdocs-material
7 | mkdocstrings[python]
8 | isort
9 | pre-commit
10 | pytest-asyncio
11 | mock; python_version < '3.8'
12 | mkdocs-gen-files
13 | interrogate
14 | coverage
15 | responses
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 |
4 | - package-ecosystem: "pip"
5 | directory: "/"
6 | schedule:
7 | interval: "daily"
8 |
9 | - package-ecosystem: "github-actions"
10 | directory: "/"
11 | schedule:
12 | interval: "daily"
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## Summary
4 |
5 |
6 | ## Relevant Issue(s)
7 |
8 |
9 | ## Checklist
10 | - [ ] Summarized PR's changes in [CHANGELOG.md](https://github.com/AlessandroLollo/prefect-cubejs/blob/main/CHANGELOG.md)
11 |
--------------------------------------------------------------------------------
/prefect_cubejs/exceptions.py:
--------------------------------------------------------------------------------
1 | """
2 | Exceptions to be used when interacting with Cube.js
3 | """
4 |
5 |
6 | class CubeJSConfigurationException(Exception):
7 | """
8 | Exception to raise when a Cube.js task is misconfigured.
9 | """
10 |
11 | pass
12 |
13 |
14 | class CubeJSAPIFailureException(Exception):
15 | """
16 | Exception to raise when a Cube.js task fails to execute.
17 | """
18 |
19 | pass
20 |
--------------------------------------------------------------------------------
/docs/gen_ref_pages.py:
--------------------------------------------------------------------------------
1 | "Copies README.md to index.md."
2 |
3 | from pathlib import Path
4 |
5 | import mkdocs_gen_files
6 |
7 | readme_path = Path("README.md")
8 | docs_index_path = Path("index.md")
9 |
10 | with open(readme_path, "r") as readme:
11 | with mkdocs_gen_files.open(docs_index_path, "w") as generated_file:
12 | for line in readme:
13 | generated_file.write(line)
14 |
15 | mkdocs_gen_files.set_edit_path(Path(docs_index_path), readme_path)
16 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pycqa/isort
3 | rev: 5.12.0
4 | hooks:
5 | - id: isort
6 | language_version: python3
7 | - repo: https://github.com/psf/black
8 | rev: 22.3.0
9 | hooks:
10 | - id: black
11 | language_version: python3
12 | - repo: https://github.com/pycqa/flake8
13 | rev: 4.0.1
14 | hooks:
15 | - id: flake8
16 | - repo: https://github.com/econchick/interrogate
17 | rev: 1.5.0
18 | hooks:
19 | - id: interrogate
20 | args: [-vv]
21 | pass_filenames: false
22 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this project will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7 |
8 | ## Unreleased
9 |
10 | ### Added
11 |
12 | ### Changed
13 |
14 | ### Deprecated
15 |
16 | ### Removed
17 |
18 | ### Fixed
19 |
20 | ### Security
21 |
22 | ## 0.1.0
23 |
24 | Released on ????? ?th, 20??.
25 |
26 | ### Added
27 |
28 | - `task_name` task - [#1](https://github.com/AlessandroLollo/prefect-cubejs/pull/1)
29 |
--------------------------------------------------------------------------------
/.github/workflows/publish_docs.yml:
--------------------------------------------------------------------------------
1 | name: Publish docs
2 |
3 | on:
4 | workflow_dispatch
5 |
6 | jobs:
7 | build-and-publish-docs:
8 | name: Build and publish docs
9 | runs-on: ubuntu-latest
10 |
11 | steps:
12 | - uses: actions/checkout@v3
13 |
14 | - name: Build docs
15 | run: |
16 | python -m pip install --upgrade pip
17 | python -m pip install --upgrade --upgrade-strategy eager -e .[dev]
18 | mkdocs build
19 | - name: Publish docs
20 | uses: JamesIves/github-pages-deploy-action@v4.4.3
21 | with:
22 | branch: docs
23 | folder: site
--------------------------------------------------------------------------------
/CONTRIBUTORS.md:
--------------------------------------------------------------------------------
1 | # Contributors
2 |
3 | ## Special thanks for all the people who had helped this project so far:
4 |
5 | * [name](https://link)
6 |
7 | ## I would like to join this list. How can I help the project?
8 |
9 | We're currently looking for contributions for the following:
10 |
11 | - [ ] Bug fixes
12 | - [ ] Additional tasks
13 | - [ ] Improved documentation
14 | - [ ] Any open issues
15 |
16 | And anything else you can think of!
17 |
18 | Uncertain about something? Let us help guide you by [submitting an issue](https://github.com/AlessandroLollo/prefect-cubejs/issues).
19 |
20 | For more information, please refer to our [MAINTAINERS](MAINTAINERS.md) guide.
21 |
--------------------------------------------------------------------------------
/.github/workflows/static_analysis.yml:
--------------------------------------------------------------------------------
1 | name: Static analysis
2 |
3 | on: [pull_request]
4 |
5 | jobs:
6 | pre-commit-checks:
7 | name: Pre-commit checks
8 | runs-on: ubuntu-latest
9 |
10 | steps:
11 | - uses: actions/checkout@v3
12 | with:
13 | persist-credentials: false
14 |
15 | - name: Set up Python
16 | uses: actions/setup-python@v4
17 | with:
18 | python-version: 3.9
19 |
20 | - name: Install pre-commit
21 | run: |
22 | python -m pip install --upgrade pip
23 | pip install pre-commit
24 |
25 | - name: Run pre-commit
26 | run: |
27 | pre-commit run --show-diff-on-failure --color=always --all-files
28 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | name: CodeQL
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 |
8 | jobs:
9 | analyze:
10 | name: Analyze
11 | runs-on: ubuntu-latest
12 | permissions:
13 | actions: read
14 | contents: read
15 | security-events: write
16 |
17 | strategy:
18 | fail-fast: false
19 | matrix:
20 | language:
21 | - python
22 |
23 | steps:
24 | - name: Checkout repository
25 | uses: actions/checkout@v3
26 |
27 | - name: Initialize CodeQL
28 | uses: github/codeql-action/init@v2
29 | with:
30 | languages: ${{ matrix.language }}
31 | config-file: ./.github/codeql-config.yml
32 | queries: security-and-quality
33 |
34 | - name: Perform CodeQL Analysis
35 | uses: github/codeql-action/analyze@v2
36 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [flake8]
2 | exclude = .git,__pycache__,build,dist
3 | per-file-ignores =
4 | setup.py:E501
5 | # Match black line-length
6 | max-line-length = 88
7 | extend-ignore =
8 | E203,
9 |
10 | [isort]
11 | skip = __init__.py
12 | profile = black
13 | skip_gitignore = True
14 | multi_line_output = 3
15 |
16 | [versioneer]
17 | VCS = git
18 | style = pep440
19 | versionfile_source = prefect_cubejs/_version.py
20 | versionfile_build = prefect_cubejs/_version.py
21 | tag_prefix = v
22 | parentdir_prefix =
23 |
24 | [tool:interrogate]
25 | ignore-init-module = True
26 | ignore_init_method = True
27 | exclude = prefect_cubejs/_version.py, tests, setup.py, versioneer.py, docs, site
28 | fail-under = 95
29 | omit-covered-files = True
30 |
31 | [coverage:run]
32 | omit = tests/*, prefect_cubejs/_version.py
33 |
34 | [coverage:report]
35 | fail_under = 80
36 | show_missing = True
37 |
38 | [tool:pytest]
39 | asyncio_mode = auto
40 |
--------------------------------------------------------------------------------
/.github/workflows/tests.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 |
3 | on: [pull_request]
4 |
5 | jobs:
6 | run-tests:
7 | name: Run Tests
8 | runs-on: ubuntu-latest
9 | strategy:
10 | matrix:
11 | python-version:
12 | - "3.7"
13 | - "3.8"
14 | - "3.9"
15 | - "3.10"
16 | fail-fast: false
17 | steps:
18 | - uses: actions/checkout@v3
19 |
20 | - name: Set up Python ${{ matrix.python-version }}
21 | uses: actions/setup-python@v4
22 | with:
23 | python-version: ${{ matrix.python-version }}
24 | cache: pip
25 | cache-dependency-path: requirements*.txt
26 |
27 | - name: Install dependencies
28 | run: |
29 | python -m pip install --upgrade pip
30 | python -m pip install --upgrade --upgrade-strategy eager -e ".[dev]"
31 |
32 | - name: Run tests
33 | env:
34 | PREFECT_ORION_DATABASE_CONNECTION_URL: "sqlite+aiosqlite:///./orion-tests.db"
35 | run: |
36 | coverage run --branch -m pytest tests -vv
37 | coverage report
38 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: prefect-cubejs
2 | site_url: https://AlessandroLollo.github.io/prefect-cubejs
3 | repo_url: https://github.com/AlessandroLollo/prefect-cubejs
4 | edit_uri: /edit/main/docs/
5 | theme:
6 | name: material
7 | favicon: img/favicon.ico
8 | palette:
9 | primary: blue
10 | accent: blue
11 | icon:
12 | repo: fontawesome/brands/github
13 | logo:
14 | img/prefect-logo-white.png
15 | font:
16 | text: Inter
17 | code: Source Code Pro
18 | extra_css:
19 | - stylesheets/extra.css
20 | markdown_extensions:
21 | - admonition
22 | - attr_list
23 | - codehilite
24 | - md_in_html
25 | - meta
26 | - pymdownx.highlight:
27 | use_pygments: true
28 | - pymdownx.superfences
29 | - pymdownx.tabbed
30 | plugins:
31 | - search
32 | - gen-files:
33 | scripts:
34 | - docs/gen_ref_pages.py
35 | - mkdocstrings:
36 | handlers:
37 | python:
38 | rendering:
39 | show_root_heading: True
40 | show_object_full_path: False
41 | show_category_heading: False
42 | show_bases: False
43 | show_signature: False
44 | heading_level: 1
45 | watch:
46 | - prefect_cubejs/
47 | - README.md
48 |
49 | nav:
50 | - Home: index.md
51 | - Tasks: tasks.md
52 |
53 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import find_packages, setup
2 |
3 | import versioneer
4 |
5 | with open("requirements.txt") as install_requires_file:
6 | install_requires = install_requires_file.read().strip().split("\n")
7 |
8 | with open("requirements-dev.txt") as dev_requires_file:
9 | dev_requires = dev_requires_file.read().strip().split("\n")
10 |
11 | with open("README.md") as readme_file:
12 | readme = readme_file.read()
13 |
14 | setup(
15 | name="prefect-cubejs",
16 | description="Prefect collection of tasks to interact with Cube.js",
17 | license="Apache License 2.0",
18 | author="Alessandro Lollo",
19 | author_email="alessandro.lollo@gmail.com",
20 | keywords="prefect",
21 | url="https://github.com/AlessandroLollo/prefect-cubejs",
22 | long_description=readme,
23 | long_description_content_type="text/markdown",
24 | version=versioneer.get_version(),
25 | cmdclass=versioneer.get_cmdclass(),
26 | packages=find_packages(exclude=("tests", "docs")),
27 | python_requires=">=3.7",
28 | install_requires=install_requires,
29 | extras_require={"dev": dev_requires},
30 | classifiers=[
31 | "Natural Language :: English",
32 | "Intended Audience :: Developers",
33 | "Intended Audience :: System Administrators",
34 | "License :: OSI Approved :: Apache Software License",
35 | "Programming Language :: Python :: 3 :: Only",
36 | "Programming Language :: Python :: 3.7",
37 | "Programming Language :: Python :: 3.8",
38 | "Programming Language :: Python :: 3.9",
39 | "Programming Language :: Python :: 3.10",
40 | "Topic :: Software Development :: Libraries",
41 | ],
42 | )
43 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # prefect-cubejs
2 |
3 | ## Welcome!
4 |
5 | Prefect collection of tasks to interact with Cube.js
6 |
7 | ## Getting Started
8 |
9 | ### Python setup
10 |
11 | Requires an installation of Python 3.7+.
12 |
13 | We recommend using a Python virtual environment manager such as pipenv, conda or virtualenv.
14 |
15 | These tasks are designed to work with Prefect 2.0. For more information about how to use Prefect, please refer to the [Prefect documentation](https://orion-docs.prefect.io/).
16 |
17 | ### Installation
18 |
19 | Install `prefect-cubejs` with `pip`:
20 |
21 | ```bash
22 | pip install prefect-cubejs
23 | ```
24 |
25 | ### Write and run a flow
26 |
27 | ```python
28 | from prefect import flow
29 | from prefect_cubejs.tasks import (
30 | run_query
31 | )
32 |
33 |
34 | @flow
35 | def example_flow():
36 | run_query(
37 | subdomain="",
38 | api_secret="",
39 | query=""
40 | )
41 |
42 | example_flow()
43 | ```
44 |
45 | ## Resources
46 |
47 | If you encounter any bugs while using `prefect-cubejs`, feel free to open an issue in the [prefect-cubejs](https://github.com/AlessandroLollo/prefect-cubejs) repository.
48 |
49 | If you have any questions or issues while using `prefect-cubejs`, you can find help in either the [Prefect Discourse forum](https://discourse.prefect.io/) or the [Prefect Slack community](https://prefect.io/slack).
50 |
51 | ## Development
52 |
53 | If you'd like to install a version of `prefect-cubejs` for development, clone the repository and perform an editable install with `pip`:
54 |
55 | ```bash
56 | git clone https://github.com/AlessandroLollo/prefect-cubejs.git
57 |
58 | cd prefect-cubejs/
59 |
60 | pip install -e ".[dev]"
61 |
62 | # Install linting pre-commit hooks
63 | pre-commit install
64 | ```
65 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Build & Release
2 |
3 | on:
4 | push:
5 | tags:
6 | - "v*"
7 |
8 | jobs:
9 | build-release:
10 | name: Build Release
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v3
14 |
15 | - name: Set up Python
16 | uses: actions/setup-python@v4
17 | with:
18 | python-version: 3.7
19 |
20 | - name: Install packages
21 | run: |
22 | python -m pip install --upgrade pip build
23 | python -m pip install --upgrade --upgrade-strategy eager -e .[dev]
24 |
25 | - name: Build a binary wheel and a source tarball
26 | run: |
27 | python -m build --sdist --wheel --outdir dist/
28 |
29 | - name: Publish build artifacts
30 | uses: actions/upload-artifact@v3.1.2
31 | with:
32 | name: built-package
33 | path: "./dist"
34 |
35 | publish-release:
36 | name: Publish release to PyPI
37 | needs: [build-release]
38 | environment: "prod"
39 | runs-on: ubuntu-latest
40 |
41 | steps:
42 | - name: Download build artifacts
43 | uses: actions/download-artifact@v3
44 | with:
45 | name: built-package
46 | path: "./dist"
47 |
48 | - name: Publish distribution to PyPI
49 | uses: pypa/gh-action-pypi-publish@release/v1
50 | with:
51 | password: ${{ secrets.PYPI_API_TOKEN }}
52 | verbose: true
53 |
54 | build-and-publish-docs:
55 | name: Build and publish docs
56 | needs: [build-release, publish-release]
57 | runs-on: ubuntu-latest
58 |
59 | steps:
60 | - uses: actions/checkout@v3
61 |
62 | - name: Build docs
63 | run: |
64 | python -m pip install --upgrade pip
65 | python -m pip install --upgrade --upgrade-strategy eager -e .[dev]
66 | mkdocs build
67 |
68 | - name: Publish docs
69 | uses: JamesIves/github-pages-deploy-action@v4.4.3
70 | with:
71 | branch: docs
72 | folder: site
73 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | # OS files
132 | .DS_Store
133 |
134 | # VS Code
135 | .vscode
136 |
137 | # Jupyter notebook
138 | *.ipynb
139 |
140 | # Local E2E test scripts
141 | tests/e2e_no_wait_flow.py
142 | tests/e2e_wait_flow.py
--------------------------------------------------------------------------------
/tests/test_tasks.py:
--------------------------------------------------------------------------------
1 | from urllib.parse import quote_plus
2 |
3 | import jwt
4 | import pytest
5 | import responses
6 | from prefect import flow
7 |
8 | from prefect_cubejs.exceptions import (
9 | CubeJSAPIFailureException,
10 | CubeJSConfigurationException,
11 | )
12 | from prefect_cubejs.tasks import run_query
13 |
14 |
15 | def test_run_with_no_values_raises():
16 | @flow(name="test_flow_1")
17 | def test_flow():
18 | return run_query(query={"measure": "count"})
19 |
20 | msg_match = "Missing both `subdomain` and `url`."
21 | with pytest.raises(CubeJSConfigurationException, match=msg_match):
22 | test_flow()
23 |
24 |
25 | def test_run_without_api_secret_api_secret_env_var():
26 | @flow(name="test_flow_2")
27 | def test_flow():
28 | return run_query(subdomain="xyz", query={"measure": "count"})
29 |
30 | msg_match = "Missing `api_secret` and `api_secret_env_var` not found."
31 | with pytest.raises(CubeJSConfigurationException, match=msg_match):
32 | test_flow().result().result()
33 |
34 |
35 | def test_run_without_query_raises():
36 | @flow(name="test_flow_3")
37 | def test_flow():
38 | return run_query(subdomain="xyz", api_secret="secret", query=None)
39 |
40 | msg_match = "Missing `query`."
41 | with pytest.raises(CubeJSConfigurationException, match=msg_match):
42 | test_flow()
43 |
44 |
45 | @responses.activate
46 | def test_run_with_failing_api_raises():
47 | @flow(name="test_flow_4")
48 | def test_flow():
49 | return run_query(
50 | subdomain="test", api_secret="foo", query={"measures": "count"}
51 | )
52 |
53 | responses.add(
54 | responses.GET, "https://test.cubecloud.dev/cubejs-api/v1/load", status=123
55 | )
56 |
57 | msg_match = "Cube.js load API failed!"
58 | with pytest.raises(CubeJSAPIFailureException, match=msg_match):
59 | test_flow()
60 |
61 |
62 | @responses.activate
63 | def test_run_with_continue_waiting():
64 | api_url = "https://test.cubecloud.dev/cubejs-api/v1/load"
65 |
66 | responses.add(
67 | responses.GET,
68 | api_url,
69 | status=200,
70 | json={"error": "Continue wait"},
71 | )
72 |
73 | responses.add(
74 | responses.GET,
75 | api_url,
76 | status=200,
77 | json={"data": "result"},
78 | )
79 |
80 | @flow(name="test_flow_5")
81 | def test_flow():
82 | return run_query(
83 | subdomain="test", api_secret="foo", query={"measures": "count"}
84 | )
85 |
86 | expected_url = api_url + "?query=" + quote_plus('{"measures": "count"}')
87 |
88 | data = test_flow()
89 |
90 | assert responses.assert_call_count(expected_url, 2) is True
91 | assert isinstance(data, dict)
92 |
93 |
94 | @responses.activate
95 | def test_run_with_security_context():
96 | responses.add(
97 | responses.GET,
98 | "https://test.cubecloud.dev/cubejs-api/v1/load",
99 | status=200,
100 | json={"data": "result"},
101 | )
102 |
103 | @flow(name="test_flow_6")
104 | def test_flow():
105 | return run_query(
106 | subdomain="test",
107 | api_secret="foo",
108 | query={"measures": "count"},
109 | security_context={"foo": "bar"},
110 | )
111 |
112 | expected_jwt = jwt.encode(
113 | payload={"foo": "bar", "expiresIn": "7d"}, key="foo", algorithm="HS256"
114 | )
115 |
116 | test_flow()
117 |
118 | assert responses.calls[0].request.headers["Authorization"] == expected_jwt
119 |
120 |
121 | @responses.activate
122 | def test_run_with_max_wait_time_raises():
123 | responses.add(
124 | responses.GET,
125 | "https://test.cubecloud.dev/cubejs-api/v1/load",
126 | status=200,
127 | json={"error": "Continue wait"},
128 | )
129 |
130 | @flow(name="test_flow_7")
131 | def test_flow():
132 | return run_query(
133 | subdomain="test",
134 | api_secret="foo",
135 | query={"measures": "count"},
136 | security_context={"foo": "bar"},
137 | wait_time_between_api_calls=1,
138 | max_wait_time=3,
139 | )
140 |
141 | msg_match = "Cube.js API took longer than 3 seconds to provide a response."
142 |
143 | with pytest.raises(CubeJSAPIFailureException, match=msg_match):
144 | test_flow()
145 |
146 |
147 | @responses.activate
148 | def test_run_with_include_generated_sql():
149 | responses.add(
150 | responses.GET,
151 | "https://test.cubecloud.dev/cubejs-api/v1/load",
152 | status=200,
153 | json={"data": "result"},
154 | )
155 |
156 | responses.add(
157 | responses.GET,
158 | "https://test.cubecloud.dev/cubejs-api/v1/sql",
159 | status=200,
160 | json={"sql": "sql"},
161 | )
162 |
163 | @flow(name="test_flow_8")
164 | def test_flow():
165 | return run_query(
166 | subdomain="test",
167 | api_secret="foo",
168 | query={"measures": "count"},
169 | include_generated_sql=True,
170 | )
171 |
172 | data = test_flow()
173 |
174 | assert isinstance(data, dict)
175 | assert "data" in data.keys()
176 | assert "sql" in data.keys()
177 |
--------------------------------------------------------------------------------
/MAINTAINERS.md:
--------------------------------------------------------------------------------
1 | # prefect-cubejs
2 |
3 | ## Getting Started
4 |
5 | ### Python setup
6 |
7 | Requires an installation of Python 3.7+
8 |
9 | We recommend using a Python virtual environment manager such as pipenv, conda or virtualenv.
10 |
11 | ### GitHub setup
12 |
13 | Generate a Prefect Collection project in the terminal:
14 |
15 | ```bash
16 | cookiecutter https://github.com/PrefectHQ/prefect-collection-template
17 | ```
18 |
19 | Create a Git respoitory for the newly generated collection and create the first commit:
20 |
21 | ```bash
22 | git init
23 | git add .
24 | git commit -m "Initial commit: project generated by prefect-collection-template"
25 | ```
26 |
27 | Then, create a new repo following the prompts at:
28 | https://github.com/organizations/AlessandroLollo/repositories/new
29 |
30 | Upon creation, push the repository to GitHub:
31 | ```bash
32 | git remote add origin https://github.com/AlessandroLollo/prefect-cubejs.git
33 | git branch -M main
34 | git push -u origin main
35 | ```
36 |
37 | It's recommended to setup some protection rules for main at:
38 | https://github.com/AlessandroLollo/prefect-cubejs/settings/branches
39 |
40 | - Require a pull request before merging
41 | - Require approvals
42 |
43 | Lastly, [code owners](https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners) for the repository can be set, like this [example here](https://github.com/PrefectHQ/prefect/blob/master/.github/CODEOWNERS).
44 |
45 | ### Project setup
46 |
47 | To setup your project run the following:
48 |
49 | ```bash
50 | # Create an editable install of your project
51 | pip install -e ".[dev]"
52 |
53 | # Configure pre-commit hooks
54 | pre-commit install
55 | ```
56 |
57 | To verify the set up was successful you can run the following:
58 |
59 | - Run the tests for tasks and flows in the collection:
60 | ```bash
61 | pytest tests
62 | ```
63 | - Serve the docs with `mkdocs`:
64 | ```bash
65 | mkdocs serve
66 | ```
67 |
68 | ## Developing tasks and flows
69 |
70 | For information about the use and development of tasks and flow, check out the [flows](https://orion-docs.prefect.io/concepts/flows/) and [tasks](https://orion-docs.prefect.io/concepts/tasks/) concepts docs in the Prefect docs.
71 |
72 | ## Writing documentation
73 |
74 | This collection has been setup to with [mkdocs](https://www.mkdocs.org/) for automatically generated documentation. The signatures and docstrings of your tasks and flow will be used to generate documentation for the users of this collection. You can make changes to the structure of the generated documentation by editing the `mkdocs.yml` file in this project.
75 |
76 | To add a new page for a module in your collection, create a new markdown file in the `docs` directory and add that file to the `nav` section of `mkdocs.yml`. If you want to automatically generate documentation based on the docstrings and signatures of the contents of the module with `mkdocstrings`, add a line to the new markdown file in the following format:
77 |
78 | ```markdown
79 | ::: prefect_cubejs.{module_name}
80 | ```
81 |
82 | You can also refer to the `flows.md` and `tasks.md` files included in your generated project as examples.
83 |
84 | ## Development lifecycle
85 |
86 | ### CI Pipeline
87 |
88 | This collection comes with [GitHub Actions](https://docs.github.com/en/actions) for testing and linting. To add additional actions, you can add jobs in the `.github/workflows` folder. On pull request, the pipeline will run linting via [`black`](https://black.readthedocs.io/en/stable/), [`flake8`](https://flake8.pycqa.org/en/latest/), [`interrogate`](https://interrogate.readthedocs.io/en/latest/), and unit tests via `pytest` alongside `coverage`.
89 |
90 | `interrogate` will tell you which methods, functions, classes, and modules have docstrings, and which do not--the job has a fail threshold of 95%, meaning that it will fail if more than 5% of the codebase is undocumented. We recommend following the [Google Python Style Guide](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings) for docstring format.
91 |
92 | Simiarly, `coverage` ensures that the codebase includes tests--the job has a fail threshold of 80%, meaning that it will fail if more than 20% of the codebase is missing tests.
93 |
94 | ### Package and Publish
95 |
96 | GitHub actions will handle packaging and publishing of your collection to [PyPI](https://pypi.org/) so other Prefect users can your collection in their flows.
97 |
98 | In order to publish to PyPI, you'll need a PyPI account and generate an API token to authenticate with PyPI when publishing new versions of your collection. The [PyPI documentation](https://pypi.org/help/#apitoken) outlines the steps needed to get an API token.
99 |
100 | Once you've obtained a PyPI API token, [create a GitHub secret](https://docs.github.com/en/actions/security-guides/encrypted-secrets#creating-encrypted-secrets-for-a-repository) named `PYPI_API_TOKEN`.
101 |
102 | To create publish a new version of your collection, [create a new GitHub release](https://docs.github.com/en/repositories/releasing-projects-on-github/managing-releases-in-a-repository#creating-a-release) and tag it with the version that you want to deploy (e.g. v0.3.2). This will trigger workflow to publish the new version on PyPI and deploy the updated docs to GitHub pages.
103 |
104 | Upon publishing, a `docs` branch is automatically created. To hook this up to GitHub Pages, simply head over to https://github.com/PrefectHQ/prefect-cubejs/settings/pages, select `docs` under the dropdown menu, keep the default `/root` folder, `Save`, and upon refresh, you should see a prompt stating "Your site is published at https://.github.io/".
105 |
106 | ## Further guidance
107 |
108 | If you run into any issues during the bootstrapping process, feel free to open an issue in the [prefect-collection-template](https://github.com/PrefectHQ/prefect-collection-template) repository.
109 |
110 | If you have any questions or issues while developing your collection, you can find help in either the [Prefect Discourse forum](https://discourse.prefect.io/) or the [Prefect Slack community](https://prefect.io/slack).
111 |
--------------------------------------------------------------------------------
/prefect_cubejs/utils.py:
--------------------------------------------------------------------------------
1 | """
2 | Cube.js utils classes
3 | """
4 | import time
5 | from typing import Dict, Union
6 |
7 | import jwt
8 | from requests import Session
9 |
10 | from prefect_cubejs.exceptions import CubeJSAPIFailureException
11 |
12 |
13 | class CubeJSClient:
14 | """
15 | Class that represents a Cube.js client that can be used
16 | to interact with Cube.js APIs.
17 | """
18 |
19 | # Cube Cloud base URL
20 | __CUBEJS_CLOUD_BASE_URL = "https://{subdomain}.cubecloud.dev"
21 |
22 | def __init__(
23 | self,
24 | subdomain: str,
25 | url: str,
26 | security_context: Union[str, Dict],
27 | secret: str,
28 | wait_api_call_secs: int,
29 | max_wait_time: int,
30 | ):
31 | """
32 | Initialize a `CubeJSClient`.
33 | The client can be used to interact with Cube.js APIs.
34 |
35 | Args:
36 | - subdomain (str): Cube Cloud subdomain.
37 | - url (str): Cube.js URL (likely to be used in self-hosted Cube.js
38 | deployments).
39 | - security_context (str, dict): The security context to be used
40 | when interacting with Cube.js APIs.
41 | - secret (str): The secret string to be used, together with the
42 | `security_context`, to generate the API token to pass in the
43 | authorization header.
44 | - wait_api_call_secs (int): Number of seconds to wait
45 | between API calls.
46 | - max_wait_time (int): The maximum amount of seconds to wait for
47 | an API call to respond.
48 | """
49 | self.subdomain = subdomain
50 | self.url = url
51 | self.security_context = security_context
52 | self.secret = secret
53 | self.cube_base_url = self._get_cube_base_url()
54 | self.api_token = self.get_api_token()
55 | self.query_api_url = self._get_query_api_url()
56 | self.generated_sql_api_url = self._get_generated_sql_api_url()
57 | self.pre_aggregations_jobs_api_url = self._get_pre_aggregations_jobs_api_url()
58 | self.wait_api_call_secs = wait_api_call_secs
59 | self.max_wait_time = max_wait_time
60 |
61 | def _get_cube_base_url(self) -> str:
62 | """
63 | Get Cube.js base URL.
64 |
65 | Returns:
66 | - Cube.js API base url.
67 | """
68 | cube_base_url = self.__CUBEJS_CLOUD_BASE_URL
69 | if self.subdomain:
70 | cube_base_url = (
71 | f"{cube_base_url.format(subdomain=self.subdomain)}/cubejs-api"
72 | )
73 | else:
74 | cube_base_url = self.url
75 | return cube_base_url
76 |
77 | def _get_query_api_url(self) -> str:
78 | """
79 | Get Cube.js Query API URL.
80 |
81 | Returns:
82 | - Cube.js Query API URL.
83 | """
84 | return f"{self.cube_base_url}/v1/load"
85 |
86 | def _get_generated_sql_api_url(self) -> str:
87 | """
88 | Get Cube.js Query SQL API URL.
89 |
90 | Returns:
91 | - Cube.js Query SQL API URL.
92 | """
93 |
94 | return f"{self.cube_base_url}/v1/sql"
95 |
96 | def _get_pre_aggregations_jobs_api_url(self) -> str:
97 | """
98 | Get Cube Pre-Aggregations Jobs API URL.
99 |
100 | Returns:
101 | - Cube Pre-Aggregations Jobs API URL.
102 | """
103 | return f"{self.cube_base_url}/v1/pre-aggregations/jobs"
104 |
105 | def get_api_token(self) -> str:
106 | """
107 | Build API Token given the security context and the secret.
108 |
109 | Returns:
110 | - The API Token to include in the authorization headers
111 | when calling Cube.js APIs.
112 | """
113 | api_token = jwt.encode(payload={}, key=self.secret)
114 | if self.security_context:
115 |
116 | extended_context = self.security_context
117 | if (
118 | "exp" not in self.security_context
119 | and "expiresIn" not in self.security_context
120 | ):
121 | extended_context["expiresIn"] = "7d"
122 | api_token = jwt.encode(
123 | payload=extended_context, key=self.secret, algorithm="HS256"
124 | )
125 |
126 | return api_token
127 |
128 | def _get_data_from_url(self, api_url: str, params: Dict) -> Dict:
129 | """
130 | Retrieve data from a Cube.js API.
131 |
132 | Args:
133 | - api_url (str): The URL of the Cube API to call.
134 | - params (dict): Parameters to be passed to the API call.
135 |
136 | Raises:
137 | - `CubeJSAPIFailureException` if the response has `status_code != 200`.
138 | - `CubeJSAPIFailureException` if the REST APIs takes too long to respond,
139 | with regards to `max_wait_time`.
140 |
141 | Returns:
142 | - Cube.js REST API JSON response
143 | """
144 | session = Session()
145 | session.headers = {
146 | "Content-type": "application/json",
147 | "Authorization": self.api_token,
148 | }
149 | elapsed_wait_time = 0
150 | while not self.max_wait_time or elapsed_wait_time <= self.max_wait_time:
151 |
152 | with session.get(url=api_url, params=params) as response:
153 | if response.status_code == 200:
154 | data = response.json()
155 |
156 | if "error" in data.keys() and "Continue wait" in data["error"]:
157 | time.sleep(self.wait_api_call_secs)
158 | elapsed_wait_time += self.wait_api_call_secs
159 | continue
160 |
161 | else:
162 | return data
163 |
164 | else:
165 | msg = f"Cube.js load API failed! Error is: {response.reason}"
166 | raise CubeJSAPIFailureException(msg)
167 | msg = f"""
168 | Cube.js API took longer than {self.max_wait_time} seconds to provide a response.
169 | """
170 | raise CubeJSAPIFailureException(msg)
171 |
172 | def get_data(
173 | self,
174 | params: Dict,
175 | include_generated_sql: bool,
176 | ) -> Dict:
177 | """
178 | Retrieve data from Cube.js `/load` REST API.
179 |
180 | Args:
181 | - params (dict): Parameters to pass to the `/load` REST API.
182 | - include_generated_sql (bool): Whether to include the
183 | corresponding generated SQL or not.
184 |
185 | Returns:
186 | - Cube.js `/load` API JSON response, augmented with SQL
187 | information if `include_generated_sql` is `True`.
188 | """
189 | data = self._get_data_from_url(api_url=self.query_api_url, params=params)
190 |
191 | if include_generated_sql:
192 | data["sql"] = self._get_data_from_url(
193 | api_url=self.generated_sql_api_url, params=params
194 | )["sql"]
195 |
196 | return data
197 |
198 | def pre_aggregations_jobs(
199 | self,
200 | query: Dict,
201 | ) -> Dict:
202 | """
203 | Call Cube `pre-aggregations/jobs` REST API enpoint and return list of
204 | added jobs ids as a JSON object.
205 |
206 | Args:
207 | - query (dict): Parameters to pass to the `pre-aggregations/jobs` REST API.
208 |
209 | Returns:
210 | - Cube `pre-aggregations/jobs` API JSON response.
211 | """
212 |
213 | session = Session()
214 | session.headers = {
215 | "Content-type": "application/json",
216 | "Authorization": self.api_token,
217 | }
218 |
219 | with session.post(
220 | url=self.pre_aggregations_jobs_api_url, data=query
221 | ) as response:
222 | if response.status_code == 200:
223 | res = response.json()
224 | return res
225 | else:
226 | msg = f"""
227 | Cube `pre-aggregations/jobs` API failed: {response.reason}
228 | """
229 | raise CubeJSAPIFailureException(msg)
230 |
--------------------------------------------------------------------------------
/prefect_cubejs/tasks.py:
--------------------------------------------------------------------------------
1 | """
2 | Collection of tasks to interact with Cube.js
3 | """
4 | import json
5 | import os
6 | import time
7 | from typing import Dict, List, Optional, Union
8 |
9 | from prefect import get_run_logger, task
10 |
11 | from prefect_cubejs.exceptions import (
12 | CubeJSAPIFailureException,
13 | CubeJSConfigurationException,
14 | )
15 | from prefect_cubejs.utils import CubeJSClient
16 |
17 |
18 | @task
19 | def run_query(
20 | query: Union[Dict, List[Dict]],
21 | subdomain: Optional[str] = None,
22 | url: Optional[str] = None,
23 | api_secret: Optional[str] = None,
24 | api_secret_env_var: Optional[str] = "CUBEJS_API_SECRET",
25 | include_generated_sql: Optional[bool] = False,
26 | security_context: Optional[Union[str, Dict]] = None,
27 | wait_time_between_api_calls: Optional[int] = 10,
28 | max_wait_time: Optional[int] = None,
29 | ) -> Dict:
30 | """
31 | This task calls Cube.js load API and returns the result
32 | as a JSON object.
33 | More info about Cube.js load API at
34 | https://cube.dev/docs/rest-api#api-reference-v-1-load.
35 |
36 | Args:
37 | subdomain: The subdomain to use to get the data.
38 | If provided, `subdomain` takes precedence over `url`.
39 | This is likely to be useful to Cube Cloud users.
40 | url: The URL to use to get the data.
41 | This is likely to be useful to users of self-hosted Cube.js.
42 | api_secret: The API secret used to generate an
43 | API token for authentication.
44 | If provided, it takes precedence over `api_secret_env_var`.
45 | api_secret_env_var: The name of the env var that contains
46 | the API secret to use to generate an API token for authentication.
47 | Defaults to `CUBEJS_API_SECRET`.
48 | query: `dict` or `list` representing
49 | valid Cube.js queries.
50 | If you pass multiple queries, then be aware of Cube.js Data Blending.
51 | More info at https://cube.dev/docs/rest-api#api-reference-v-1-load
52 | and at https://cube.dev/docs/schema/advanced/data-blending.
53 | Query format can be found at: https://cube.dev/docs/query-format.
54 | include_generated_sql: Whether the return object should
55 | include SQL info or not.
56 | Default to `False`.
57 | security_context: The security context to use
58 | during authentication.
59 | If the security context does not contain an expiration period,
60 | then a 7-day expiration period is added automatically.
61 | More info at: https://cube.dev/docs/security/context.
62 | wait_time_between_api_calls: The number of seconds to
63 | wait between API calls.
64 | Default to 10.
65 | max_wait_time: The number of seconds to wait for the
66 | Cube.js load API to return a response.
67 |
68 | Raises:
69 | - `CubeJSConfigurationException` if both `subdomain` and `url` are missing.
70 | - `CubeJSConfigurationException` if `api_token` is missing
71 | and `api_token_env_var` cannot be found.
72 | - `CubeJSConfigurationException` if `query` is missing.
73 | - `CubeJSAPIFailureException` if the Cube.js load API fails.
74 | - `CubeJSAPIFailureException` if the Cube.js load API takes more than
75 | `max_wait_time` seconds to respond.
76 |
77 | Returns:
78 | The Cube.js JSON response, augmented with SQL
79 | information if `include_generated_sql` is `True`.
80 | """
81 |
82 | if not subdomain and not url:
83 | msg = "Missing both `subdomain` and `url`."
84 | raise CubeJSConfigurationException(msg)
85 |
86 | if not api_secret and api_secret_env_var not in os.environ:
87 | msg = "Missing `api_secret` and `api_secret_env_var` not found."
88 | raise CubeJSConfigurationException(msg)
89 |
90 | if not query:
91 | msg = "Missing `query`."
92 | raise CubeJSConfigurationException(msg)
93 |
94 | secret = api_secret if api_secret else os.environ[api_secret_env_var]
95 |
96 | wait_api_call_secs = (
97 | wait_time_between_api_calls if wait_time_between_api_calls > 0 else 10
98 | )
99 |
100 | cubejs_client = CubeJSClient(
101 | subdomain=subdomain,
102 | url=url,
103 | security_context=security_context,
104 | secret=secret,
105 | wait_api_call_secs=wait_api_call_secs,
106 | max_wait_time=max_wait_time,
107 | )
108 |
109 | params = {"query": json.dumps(query)}
110 |
111 | # Retrieve data from Cube.js
112 | data = cubejs_client.get_data(
113 | params=params, include_generated_sql=include_generated_sql
114 | )
115 |
116 | return data
117 |
118 |
119 | @task
120 | def build_pre_aggregations(
121 | subdomain: Optional[str] = None,
122 | url: Optional[str] = None,
123 | api_secret: Optional[str] = None,
124 | api_secret_env_var: Optional[str] = "CUBEJS_API_SECRET",
125 | security_context: Optional[Union[str, Dict]] = None,
126 | selector: Dict = None,
127 | wait_for_job_run_completion: bool = False,
128 | wait_time_between_api_calls: Optional[int] = 10,
129 | ):
130 | """
131 | Task run method to perform pre-aggregations build.
132 |
133 | Args:
134 | - subdomain (str, optional): The subdomain to use to get the data.
135 | If provided, `subdomain` takes precedence over `url`.
136 | This is likely to be useful to Cube Cloud users.
137 | - url (str, optional): The URL to use to get the data.
138 | This is likely the preferred method for self-hosted Cube
139 | deployments.
140 | For Cube Cloud deployments, the URL should be in the form
141 | `https:///cubejs-api`.
142 | - api_secret (str, optional): The API secret used to generate an
143 | API token for authentication.
144 | If provided, it takes precedence over `api_secret_env_var`.
145 | - api_secret_env_var (str, optional): The name of the env var that contains
146 | the API secret to use to generate an API token for authentication.
147 | Defaults to `CUBEJS_API_SECRET`.
148 | - security_context (str, dict, optional): The security context to use
149 | during authentication.
150 | If the security context does not contain an expiration period,
151 | then a 7-day expiration period is added automatically.
152 | More info at https://cube.dev/docs/security/context.
153 | - selector (dict): `dict` representing valid Cube `pre-aggregations/jobs`
154 | API `selector` object.
155 | - wait_for_job_run_completion (boolean, optional):
156 | Whether the task should wait for the job run completion or not.
157 | Default to False.
158 | - wait_time_between_api_calls (int, optional): The number of seconds to
159 | wait between API calls.
160 | Default to 10.
161 |
162 | Raises:
163 | - `CubeJSConfigurationException` if both `subdomain` and `url` are missing.
164 | - `CubeJSConfigurationException` if `api_token` is missing and
165 | `api_token_env_var` cannot be found.
166 | - `CubeJSConfigurationException` if `selector` is missing.
167 | - `CubeJSAPIFailureException` if the Cube `pre-aggregations/jobs` API fails.
168 | - `CubeJSAPIFailureException` if any pre-aggregations were not built.
169 |
170 | Returns:
171 | - If `wait_for_job_run_completion = False`, then returns the Cube
172 | `pre-aggregations/jobs` API trigger run result.
173 | - If `wait_for_job_run_completion = True`, then returns `True` if
174 | pre-aggregations were successfully built. Raise otherwise.
175 | """
176 |
177 | logger = get_run_logger()
178 |
179 | # assert
180 | if not subdomain and not url:
181 | raise CubeJSConfigurationException("Missing both `subdomain` and `url`.")
182 |
183 | if not api_secret and api_secret_env_var not in os.environ:
184 | raise CubeJSConfigurationException(
185 | "Missing `api_secret` and `api_secret_env_var` not found."
186 | )
187 |
188 | if not selector:
189 | raise CubeJSConfigurationException("Missing `selector`.")
190 |
191 | # client
192 | secret = api_secret if api_secret else os.environ[api_secret_env_var]
193 | cubejs_client = CubeJSClient(
194 | subdomain=subdomain,
195 | url=url,
196 | security_context=security_context,
197 | secret=secret,
198 | wait_api_call_secs=None,
199 | max_wait_time=None,
200 | )
201 |
202 | # post
203 | query = json.dumps(
204 | {
205 | "action": "post",
206 | "selector": selector,
207 | }
208 | )
209 | tokens = cubejs_client.pre_aggregations_jobs(query=query)
210 | if not wait_for_job_run_completion:
211 | return tokens
212 |
213 | # wait for the job completion
214 | iterate = len(tokens) > 0
215 | while iterate:
216 |
217 | # fetch
218 | logger.info(
219 | f"waiting {wait_time_between_api_calls}sec for the job completion..."
220 | )
221 | time.sleep(wait_time_between_api_calls)
222 | query = json.dumps(
223 | {
224 | "action": "get",
225 | "resType": "object",
226 | "tokens": tokens,
227 | }
228 | )
229 | statuses = cubejs_client.pre_aggregations_jobs(query=query)
230 |
231 | # check
232 | missing_only = True
233 | all_tokens = statuses.keys()
234 | in_process = []
235 | for token in all_tokens:
236 | status = statuses[token]["status"]
237 | if status.find("failure") >= 0:
238 | msg = f"""
239 | Cube pre-aggregations build failed: {status}.
240 | """
241 | raise CubeJSAPIFailureException(msg)
242 | if status != "missing_partition":
243 | missing_only = False
244 | if status != "done":
245 | in_process.append(token)
246 |
247 | if missing_only:
248 | msg = """
249 | Cube pre-aggregations build failed: missing partitions.
250 | """
251 | raise CubeJSAPIFailureException(msg)
252 |
253 | iterate = len(in_process) > 0
254 |
255 | # result
256 | return True
257 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright 2021 Prefect Technologies, Inc.
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
--------------------------------------------------------------------------------
/tests/test_jobs.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import responses
3 | from prefect import flow
4 |
5 | from prefect_cubejs.exceptions import (
6 | CubeJSAPIFailureException,
7 | CubeJSConfigurationException,
8 | )
9 | from prefect_cubejs.tasks import build_pre_aggregations
10 |
11 | # run: pytest -s tests/test_jobs.py
12 |
13 | security_context = {
14 | "expiresIn": 1,
15 | "foo": "bar",
16 | }
17 |
18 | selector = {
19 | "action": "post",
20 | "selector": {
21 | "contexts": [
22 | {"securityContext": {"tenant": "t1"}},
23 | {"securityContext": {"tenant": "t2"}},
24 | ],
25 | "timezones": ["UTC", "America/Los_Angeles"],
26 | },
27 | }
28 |
29 | response_tokens = [
30 | "be598e318484848cbb06291baa59ca3a",
31 | "d4bb22530aa9905219b2f0e6a214c39f",
32 | "e1578a60514a7c55689016adf0863965",
33 | ]
34 |
35 | response_status_missing_partition = {
36 | "e1578a60514a7c55689016adf0863965": {
37 | "table": "preaggs.e_commerce__manual_updates20201201_kuggpskn_alfb3s4u_1hmrdkc",
38 | "status": "missing_partition",
39 | "selector": {
40 | "cubes": ["ECommerce"],
41 | "preAggregations": ["ECommerce.ManualUpdates"],
42 | "contexts": [{"securityContext": {"tenant": "t2"}}],
43 | "timezones": ["America/Los_Angeles"],
44 | "dataSources": ["default"],
45 | },
46 | },
47 | "d4bb22530aa9905219b2f0e6a214c39f": {
48 | "table": "preaggs.e_commerce__manual_updates20201101_rvfrwirb_ucnfhp2g_1hmrdkc",
49 | "status": "missing_partition",
50 | "selector": {
51 | "cubes": ["ECommerce"],
52 | "preAggregations": ["ECommerce.ManualUpdates"],
53 | "contexts": [{"securityContext": {"tenant": "t2"}}],
54 | "timezones": ["America/Los_Angeles"],
55 | "dataSources": ["default"],
56 | },
57 | },
58 | "be598e318484848cbb06291baa59ca3a": {
59 | "table": "preaggs.e_commerce__manual_updates20201201_kbn0y0iy_fvfip33o_1hmrdkc",
60 | "status": "missing_partition",
61 | "selector": {
62 | "cubes": ["ECommerce"],
63 | "preAggregations": ["ECommerce.ManualUpdates"],
64 | "contexts": [{"securityContext": {"tenant": "t2"}}],
65 | "timezones": ["UTC"],
66 | "dataSources": ["default"],
67 | },
68 | },
69 | }
70 |
71 | response_status_failure = {
72 | "e1578a60514a7c55689016adf0863965": {
73 | "table": "preaggs.e_commerce__manual_updates20201201_kuggpskn_alfb3s4u_1hmrdkc",
74 | "status": "done",
75 | "selector": {
76 | "cubes": ["ECommerce"],
77 | "preAggregations": ["ECommerce.ManualUpdates"],
78 | "contexts": [{"securityContext": {"tenant": "t2"}}],
79 | "timezones": ["America/Los_Angeles"],
80 | "dataSources": ["default"],
81 | },
82 | },
83 | "d4bb22530aa9905219b2f0e6a214c39f": {
84 | "table": "preaggs.e_commerce__manual_updates20201101_rvfrwirb_ucnfhp2g_1hmrdkc",
85 | "status": "done",
86 | "selector": {
87 | "cubes": ["ECommerce"],
88 | "preAggregations": ["ECommerce.ManualUpdates"],
89 | "contexts": [{"securityContext": {"tenant": "t2"}}],
90 | "timezones": ["America/Los_Angeles"],
91 | "dataSources": ["default"],
92 | },
93 | },
94 | "be598e318484848cbb06291baa59ca3a": {
95 | "table": "preaggs.e_commerce__manual_updates20201201_kbn0y0iy_fvfip33o_1hmrdkc",
96 | "status": "failure: returned error",
97 | "selector": {
98 | "cubes": ["ECommerce"],
99 | "preAggregations": ["ECommerce.ManualUpdates"],
100 | "contexts": [{"securityContext": {"tenant": "t2"}}],
101 | "timezones": ["UTC"],
102 | "dataSources": ["default"],
103 | },
104 | },
105 | }
106 |
107 | response_status_processing = {
108 | "e1578a60514a7c55689016adf0863965": {
109 | "table": "preaggs.e_commerce__manual_updates20201201_kuggpskn_alfb3s4u_1hmrdkc",
110 | "status": "done",
111 | "selector": {
112 | "cubes": ["ECommerce"],
113 | "preAggregations": ["ECommerce.ManualUpdates"],
114 | "contexts": [{"securityContext": {"tenant": "t2"}}],
115 | "timezones": ["America/Los_Angeles"],
116 | "dataSources": ["default"],
117 | },
118 | },
119 | "d4bb22530aa9905219b2f0e6a214c39f": {
120 | "table": "preaggs.e_commerce__manual_updates20201101_rvfrwirb_ucnfhp2g_1hmrdkc",
121 | "status": "processing",
122 | "selector": {
123 | "cubes": ["ECommerce"],
124 | "preAggregations": ["ECommerce.ManualUpdates"],
125 | "contexts": [{"securityContext": {"tenant": "t2"}}],
126 | "timezones": ["America/Los_Angeles"],
127 | "dataSources": ["default"],
128 | },
129 | },
130 | "be598e318484848cbb06291baa59ca3a": {
131 | "table": "preaggs.e_commerce__manual_updates20201201_kbn0y0iy_fvfip33o_1hmrdkc",
132 | "status": "scheduled",
133 | "selector": {
134 | "cubes": ["ECommerce"],
135 | "preAggregations": ["ECommerce.ManualUpdates"],
136 | "contexts": [{"securityContext": {"tenant": "t2"}}],
137 | "timezones": ["UTC"],
138 | "dataSources": ["default"],
139 | },
140 | },
141 | }
142 |
143 | response_status_done = {
144 | "e1578a60514a7c55689016adf0863965": {
145 | "table": "preaggs.e_commerce__manual_updates20201201_kuggpskn_alfb3s4u_1hmrdkc",
146 | "status": "done",
147 | "selector": {
148 | "cubes": ["ECommerce"],
149 | "preAggregations": ["ECommerce.ManualUpdates"],
150 | "contexts": [{"securityContext": {"tenant": "t2"}}],
151 | "timezones": ["America/Los_Angeles"],
152 | "dataSources": ["default"],
153 | },
154 | },
155 | "d4bb22530aa9905219b2f0e6a214c39f": {
156 | "table": "preaggs.e_commerce__manual_updates20201101_rvfrwirb_ucnfhp2g_1hmrdkc",
157 | "status": "done",
158 | "selector": {
159 | "cubes": ["ECommerce"],
160 | "preAggregations": ["ECommerce.ManualUpdates"],
161 | "contexts": [{"securityContext": {"tenant": "t2"}}],
162 | "timezones": ["America/Los_Angeles"],
163 | "dataSources": ["default"],
164 | },
165 | },
166 | "be598e318484848cbb06291baa59ca3a": {
167 | "table": "preaggs.e_commerce__manual_updates20201201_kbn0y0iy_fvfip33o_1hmrdkc",
168 | "status": "done",
169 | "selector": {
170 | "cubes": ["ECommerce"],
171 | "preAggregations": ["ECommerce.ManualUpdates"],
172 | "contexts": [{"securityContext": {"tenant": "t2"}}],
173 | "timezones": ["UTC"],
174 | "dataSources": ["default"],
175 | },
176 | },
177 | }
178 |
179 |
180 | def test_no_params():
181 | @flow(name="test_no_params")
182 | def test_flow():
183 | result = build_pre_aggregations()
184 | return result
185 |
186 | msg_match = "Missing both `subdomain` and `url`."
187 | with pytest.raises(CubeJSConfigurationException, match=msg_match):
188 | test_flow()
189 |
190 |
191 | def test_no_secret():
192 | @flow(name="test_no_secret")
193 | def test_flow():
194 | result = build_pre_aggregations(
195 | url="http://localhost:4000/cubejs-system",
196 | )
197 | return result
198 |
199 | msg_match = "Missing `api_secret` and `api_secret_env_var` not found."
200 | with pytest.raises(CubeJSConfigurationException, match=msg_match):
201 | test_flow()
202 |
203 |
204 | def test_no_selector():
205 | @flow(name="test_no_selector")
206 | def test_flow():
207 | result = build_pre_aggregations(
208 | url="http://localhost:4000/cubejs-system",
209 | api_secret="23dff8b29cf20df38a4c78dfaf689fa55916add4d27ee3dd9ba75d1",
210 | )
211 | return result
212 |
213 | msg_match = "Missing `selector`."
214 | with pytest.raises(CubeJSConfigurationException, match=msg_match):
215 | test_flow()
216 |
217 |
218 | @responses.activate
219 | def test_internal_error():
220 | @flow(name="test_internal_error")
221 | def test_flow():
222 | result = build_pre_aggregations(
223 | url="http://localhost:4000/cubejs-system",
224 | api_secret="23dff8b29cf20df38a4c78dfaf689fa55916add4d27ee3dd9ba75d1",
225 | selector=selector,
226 | )
227 | return result
228 |
229 | responses.add(
230 | responses.POST,
231 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
232 | json={"error": "500"},
233 | status=500,
234 | )
235 |
236 | msg_match = "Cube `pre-aggregations/jobs` API failed: Internal Server Error"
237 | with pytest.raises(CubeJSAPIFailureException, match=msg_match):
238 | test_flow()
239 |
240 |
241 | @responses.activate
242 | def test_no_wait_completion():
243 | @flow(name="test_no_wait_completion")
244 | def test_flow():
245 | result = build_pre_aggregations(
246 | url="http://localhost:4000/cubejs-system",
247 | api_secret="23dff8b29cf20df38a4c78dfaf689fa55916add4d27ee3dd9ba75d1",
248 | selector=selector,
249 | )
250 | return result
251 |
252 | responses.add(
253 | responses.POST,
254 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
255 | json=response_tokens,
256 | status=200,
257 | )
258 | test_flow()
259 | assert len(responses.calls) == 1
260 |
261 |
262 | @responses.activate
263 | def test_wait_completion_one_step():
264 | @flow(name="test_wait_completion_one_step")
265 | def test_flow():
266 | result = build_pre_aggregations(
267 | url="http://localhost:4000/cubejs-system",
268 | api_secret="23dff8b29cf20df38a4c78dfaf689fa55916add4d27ee3dd9ba75d1",
269 | selector=selector,
270 | wait_for_job_run_completion=True,
271 | wait_time_between_api_calls=0,
272 | )
273 | return result
274 |
275 | responses.add(
276 | responses.POST,
277 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
278 | json=response_tokens,
279 | status=200,
280 | )
281 | responses.add(
282 | responses.POST,
283 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
284 | json=response_status_done,
285 | status=200,
286 | )
287 | test_flow()
288 | assert len(responses.calls) == 2
289 |
290 |
291 | @responses.activate
292 | def test_wait_completion_two_step():
293 | @flow(name="test_wait_completion_two_step")
294 | def test_flow():
295 | result = build_pre_aggregations(
296 | url="http://localhost:4000/cubejs-system",
297 | api_secret="23dff8b29cf20df38a4c78dfaf689fa55916add4d27ee3dd9ba75d1",
298 | selector=selector,
299 | wait_for_job_run_completion=True,
300 | wait_time_between_api_calls=0,
301 | )
302 | return result
303 |
304 | responses.add(
305 | responses.POST,
306 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
307 | json=response_tokens,
308 | status=200,
309 | )
310 | responses.add(
311 | responses.POST,
312 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
313 | json=response_status_processing,
314 | status=200,
315 | )
316 | responses.add(
317 | responses.POST,
318 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
319 | json=response_status_done,
320 | status=200,
321 | )
322 | test_flow()
323 | assert len(responses.calls) == 3
324 |
325 |
326 | @responses.activate
327 | def test_wait_completion_three_step():
328 | @flow(name="test_wait_completion_three_step")
329 | def test_flow():
330 | result = build_pre_aggregations(
331 | url="http://localhost:4000/cubejs-system",
332 | api_secret="23dff8b29cf20df38a4c78dfaf689fa55916add4d27ee3dd9ba75d1",
333 | selector=selector,
334 | wait_for_job_run_completion=True,
335 | wait_time_between_api_calls=0,
336 | )
337 | return result
338 |
339 | responses.add(
340 | responses.POST,
341 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
342 | json=response_tokens,
343 | status=200,
344 | )
345 | responses.add(
346 | responses.POST,
347 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
348 | json=response_status_processing,
349 | status=200,
350 | )
351 | responses.add(
352 | responses.POST,
353 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
354 | json=response_status_processing,
355 | status=200,
356 | )
357 | responses.add(
358 | responses.POST,
359 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
360 | json=response_status_done,
361 | status=200,
362 | )
363 |
364 | test_flow()
365 | assert len(responses.calls) == 4
366 |
367 |
368 | @responses.activate
369 | def test_missing_partitions():
370 | @flow(name="test_missing_partitions")
371 | def test_flow():
372 | result = build_pre_aggregations(
373 | url="http://localhost:4000/cubejs-system",
374 | api_secret="23dff8b29cf20df38a4c78dfaf689fa55916add4d27ee3dd9ba75d1",
375 | selector=selector,
376 | wait_for_job_run_completion=True,
377 | wait_time_between_api_calls=0,
378 | )
379 | return result
380 |
381 | responses.add(
382 | responses.POST,
383 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
384 | json=response_tokens,
385 | status=200,
386 | )
387 | responses.add(
388 | responses.POST,
389 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
390 | json=response_status_missing_partition,
391 | status=200,
392 | )
393 | msg_match = "Cube pre-aggregations build failed: missing partitions."
394 | with pytest.raises(CubeJSAPIFailureException, match=msg_match):
395 | test_flow()
396 | assert len(responses.calls) == 2
397 |
398 |
399 | @responses.activate
400 | def test_failure():
401 | @flow(name="test_failure")
402 | def test_flow():
403 | result = build_pre_aggregations(
404 | url="http://localhost:4000/cubejs-system",
405 | api_secret="23dff8b29cf20df38a4c78dfaf689fa55916add4d27ee3dd9ba75d1",
406 | selector=selector,
407 | wait_for_job_run_completion=True,
408 | wait_time_between_api_calls=0,
409 | )
410 | return result
411 |
412 | responses.add(
413 | responses.POST,
414 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
415 | json=response_tokens,
416 | status=200,
417 | )
418 | responses.add(
419 | responses.POST,
420 | "http://localhost:4000/cubejs-system/v1/pre-aggregations/jobs",
421 | json=response_status_failure,
422 | status=200,
423 | )
424 | msg_match = "Cube pre-aggregations build failed: failure: returned error."
425 | with pytest.raises(CubeJSAPIFailureException, match=msg_match):
426 | test_flow()
427 | assert len(responses.calls) == 2
428 |
--------------------------------------------------------------------------------
/prefect_cubejs/_version.py:
--------------------------------------------------------------------------------
1 | # This file helps to compute a version number in source trees obtained from
2 | # git-archive tarball (such as those provided by githubs download-from-tag
3 | # feature). Distribution tarballs (built by setup.py sdist) and build
4 | # directories (produced by setup.py build) will contain a much shorter file
5 | # that just contains the computed version number.
6 |
7 | # This file is released into the public domain. Generated by
8 | # versioneer-0.21 (https://github.com/python-versioneer/python-versioneer)
9 |
10 | """Git implementation of _version.py."""
11 |
12 | import errno
13 | import os
14 | import re
15 | import subprocess
16 | import sys
17 | from typing import Callable, Dict
18 |
19 |
20 | def get_keywords():
21 | """Get the keywords needed to look up the version information."""
22 | # these strings will be replaced by git during git-archive.
23 | # setup.py/versioneer.py will grep for the variable names, so they must
24 | # each be defined on a line of their own. _version.py will just call
25 | # get_keywords().
26 | git_refnames = " (HEAD -> main)"
27 | git_full = "290e9d1052ea5256a003c588e63650d7a4e1fa15"
28 | git_date = "2023-11-17 19:23:58 +0100"
29 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
30 | return keywords
31 |
32 |
33 | class VersioneerConfig:
34 | """Container for Versioneer configuration parameters."""
35 |
36 |
37 | def get_config():
38 | """Create, populate and return the VersioneerConfig() object."""
39 | # these strings are filled in when 'setup.py versioneer' creates
40 | # _version.py
41 | cfg = VersioneerConfig()
42 | cfg.VCS = "git"
43 | cfg.style = "pep440"
44 | cfg.tag_prefix = ""
45 | cfg.parentdir_prefix = ""
46 | cfg.versionfile_source = "prefect_cubejs/_version.py"
47 | cfg.verbose = False
48 | return cfg
49 |
50 |
51 | class NotThisMethod(Exception):
52 | """Exception raised if a method is not valid for the current scenario."""
53 |
54 |
55 | LONG_VERSION_PY: Dict[str, str] = {}
56 | HANDLERS: Dict[str, Dict[str, Callable]] = {}
57 |
58 |
59 | def register_vcs_handler(vcs, method): # decorator
60 | """Create decorator to mark a method as the handler of a VCS."""
61 |
62 | def decorate(f):
63 | """Store f in HANDLERS[vcs][method]."""
64 | if vcs not in HANDLERS:
65 | HANDLERS[vcs] = {}
66 | HANDLERS[vcs][method] = f
67 | return f
68 |
69 | return decorate
70 |
71 |
72 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):
73 | """Call the given command(s)."""
74 | assert isinstance(commands, list)
75 | process = None
76 | for command in commands:
77 | try:
78 | dispcmd = str([command] + args)
79 | # remember shell=False, so use git.cmd on windows, not just git
80 | process = subprocess.Popen(
81 | [command] + args,
82 | cwd=cwd,
83 | env=env,
84 | stdout=subprocess.PIPE,
85 | stderr=(subprocess.PIPE if hide_stderr else None),
86 | )
87 | break
88 | except OSError:
89 | e = sys.exc_info()[1]
90 | if e.errno == errno.ENOENT:
91 | continue
92 | if verbose:
93 | print("unable to run %s" % dispcmd)
94 | print(e)
95 | return None, None
96 | else:
97 | if verbose:
98 | print("unable to find command, tried %s" % (commands,))
99 | return None, None
100 | stdout = process.communicate()[0].strip().decode()
101 | if process.returncode != 0:
102 | if verbose:
103 | print("unable to run %s (error)" % dispcmd)
104 | print("stdout was %s" % stdout)
105 | return None, process.returncode
106 | return stdout, process.returncode
107 |
108 |
109 | def versions_from_parentdir(parentdir_prefix, root, verbose):
110 | """Try to determine the version from the parent directory name.
111 |
112 | Source tarballs conventionally unpack into a directory that includes both
113 | the project name and a version string. We will also support searching up
114 | two directory levels for an appropriately named parent directory
115 | """
116 | rootdirs = []
117 |
118 | for _ in range(3):
119 | dirname = os.path.basename(root)
120 | if dirname.startswith(parentdir_prefix):
121 | return {
122 | "version": dirname[len(parentdir_prefix) :],
123 | "full-revisionid": None,
124 | "dirty": False,
125 | "error": None,
126 | "date": None,
127 | }
128 | rootdirs.append(root)
129 | root = os.path.dirname(root) # up a level
130 |
131 | if verbose:
132 | print(
133 | "Tried directories %s but none started with prefix %s"
134 | % (str(rootdirs), parentdir_prefix)
135 | )
136 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
137 |
138 |
139 | @register_vcs_handler("git", "get_keywords")
140 | def git_get_keywords(versionfile_abs):
141 | """Extract version information from the given file."""
142 | # the code embedded in _version.py can just fetch the value of these
143 | # keywords. When used from setup.py, we don't want to import _version.py,
144 | # so we do it with a regexp instead. This function is not used from
145 | # _version.py.
146 | keywords = {}
147 | try:
148 | with open(versionfile_abs, "r") as fobj:
149 | for line in fobj:
150 | if line.strip().startswith("git_refnames ="):
151 | mo = re.search(r'=\s*"(.*)"', line)
152 | if mo:
153 | keywords["refnames"] = mo.group(1)
154 | if line.strip().startswith("git_full ="):
155 | mo = re.search(r'=\s*"(.*)"', line)
156 | if mo:
157 | keywords["full"] = mo.group(1)
158 | if line.strip().startswith("git_date ="):
159 | mo = re.search(r'=\s*"(.*)"', line)
160 | if mo:
161 | keywords["date"] = mo.group(1)
162 | except OSError:
163 | pass
164 | return keywords
165 |
166 |
167 | @register_vcs_handler("git", "keywords")
168 | def git_versions_from_keywords(keywords, tag_prefix, verbose):
169 | """Get version information from git keywords."""
170 | if "refnames" not in keywords:
171 | raise NotThisMethod("Short version file found")
172 | date = keywords.get("date")
173 | if date is not None:
174 | # Use only the last line. Previous lines may contain GPG signature
175 | # information.
176 | date = date.splitlines()[-1]
177 |
178 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
179 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
180 | # -like" string, which we must then edit to make compliant), because
181 | # it's been around since git-1.5.3, and it's too difficult to
182 | # discover which version we're using, or to work around using an
183 | # older one.
184 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
185 | refnames = keywords["refnames"].strip()
186 | if refnames.startswith("$Format"):
187 | if verbose:
188 | print("keywords are unexpanded, not using")
189 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
190 | refs = {r.strip() for r in refnames.strip("()").split(",")}
191 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
192 | # just "foo-1.0". If we see a "tag: " prefix, prefer those.
193 | TAG = "tag: "
194 | tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)}
195 | if not tags:
196 | # Either we're using git < 1.8.3, or there really are no tags. We use
197 | # a heuristic: assume all version tags have a digit. The old git %d
198 | # expansion behaves like git log --decorate=short and strips out the
199 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish
200 | # between branches and tags. By ignoring refnames without digits, we
201 | # filter out many common branch names like "release" and
202 | # "stabilization", as well as "HEAD" and "master".
203 | tags = {r for r in refs if re.search(r"\d", r)}
204 | if verbose:
205 | print("discarding '%s', no digits" % ",".join(refs - tags))
206 | if verbose:
207 | print("likely tags: %s" % ",".join(sorted(tags)))
208 | for ref in sorted(tags):
209 | # sorting will prefer e.g. "2.0" over "2.0rc1"
210 | if ref.startswith(tag_prefix):
211 | r = ref[len(tag_prefix) :]
212 | # Filter out refs that exactly match prefix or that don't start
213 | # with a number once the prefix is stripped (mostly a concern
214 | # when prefix is '')
215 | if not re.match(r"\d", r):
216 | continue
217 | if verbose:
218 | print("picking %s" % r)
219 | return {
220 | "version": r,
221 | "full-revisionid": keywords["full"].strip(),
222 | "dirty": False,
223 | "error": None,
224 | "date": date,
225 | }
226 | # no suitable tags, so version is "0+unknown", but full hex is still there
227 | if verbose:
228 | print("no suitable tags, using unknown + full revision id")
229 | return {
230 | "version": "0+unknown",
231 | "full-revisionid": keywords["full"].strip(),
232 | "dirty": False,
233 | "error": "no suitable tags",
234 | "date": None,
235 | }
236 |
237 |
238 | @register_vcs_handler("git", "pieces_from_vcs")
239 | def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
240 | """Get version from 'git describe' in the root of the source tree.
241 |
242 | This only gets called if the git-archive 'subst' keywords were *not*
243 | expanded, and _version.py hasn't already been rewritten with a short
244 | version string, meaning we're inside a checked out source tree.
245 | """
246 | GITS = ["git"]
247 | TAG_PREFIX_REGEX = "*"
248 | if sys.platform == "win32":
249 | GITS = ["git.cmd", "git.exe"]
250 | TAG_PREFIX_REGEX = r"\*"
251 |
252 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True)
253 | if rc != 0:
254 | if verbose:
255 | print("Directory %s not under git control" % root)
256 | raise NotThisMethod("'git rev-parse --git-dir' returned error")
257 |
258 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
259 | # if there isn't one, this yields HEX[-dirty] (no NUM)
260 | describe_out, rc = runner(
261 | GITS,
262 | [
263 | "describe",
264 | "--tags",
265 | "--dirty",
266 | "--always",
267 | "--long",
268 | "--match",
269 | "%s%s" % (tag_prefix, TAG_PREFIX_REGEX),
270 | ],
271 | cwd=root,
272 | )
273 | # --long was added in git-1.5.5
274 | if describe_out is None:
275 | raise NotThisMethod("'git describe' failed")
276 | describe_out = describe_out.strip()
277 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
278 | if full_out is None:
279 | raise NotThisMethod("'git rev-parse' failed")
280 | full_out = full_out.strip()
281 |
282 | pieces = {}
283 | pieces["long"] = full_out
284 | pieces["short"] = full_out[:7] # maybe improved later
285 | pieces["error"] = None
286 |
287 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root)
288 | # --abbrev-ref was added in git-1.6.3
289 | if rc != 0 or branch_name is None:
290 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
291 | branch_name = branch_name.strip()
292 |
293 | if branch_name == "HEAD":
294 | # If we aren't exactly on a branch, pick a branch which represents
295 | # the current commit. If all else fails, we are on a branchless
296 | # commit.
297 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
298 | # --contains was added in git-1.5.4
299 | if rc != 0 or branches is None:
300 | raise NotThisMethod("'git branch --contains' returned error")
301 | branches = branches.split("\n")
302 |
303 | # Remove the first line if we're running detached
304 | if "(" in branches[0]:
305 | branches.pop(0)
306 |
307 | # Strip off the leading "* " from the list of branches.
308 | branches = [branch[2:] for branch in branches]
309 | if "master" in branches:
310 | branch_name = "master"
311 | elif not branches:
312 | branch_name = None
313 | else:
314 | # Pick the first branch that is returned. Good or bad.
315 | branch_name = branches[0]
316 |
317 | pieces["branch"] = branch_name
318 |
319 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
320 | # TAG might have hyphens.
321 | git_describe = describe_out
322 |
323 | # look for -dirty suffix
324 | dirty = git_describe.endswith("-dirty")
325 | pieces["dirty"] = dirty
326 | if dirty:
327 | git_describe = git_describe[: git_describe.rindex("-dirty")]
328 |
329 | # now we have TAG-NUM-gHEX or HEX
330 |
331 | if "-" in git_describe:
332 | # TAG-NUM-gHEX
333 | mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
334 | if not mo:
335 | # unparsable. Maybe git-describe is misbehaving?
336 | pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
337 | return pieces
338 |
339 | # tag
340 | full_tag = mo.group(1)
341 | if not full_tag.startswith(tag_prefix):
342 | if verbose:
343 | fmt = "tag '%s' doesn't start with prefix '%s'"
344 | print(fmt % (full_tag, tag_prefix))
345 | pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
346 | full_tag,
347 | tag_prefix,
348 | )
349 | return pieces
350 | pieces["closest-tag"] = full_tag[len(tag_prefix) :]
351 |
352 | # distance: number of commits since tag
353 | pieces["distance"] = int(mo.group(2))
354 |
355 | # commit: short hex revision ID
356 | pieces["short"] = mo.group(3)
357 |
358 | else:
359 | # HEX: no tags
360 | pieces["closest-tag"] = None
361 | count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
362 | pieces["distance"] = int(count_out) # total number of commits
363 |
364 | # commit date: see ISO-8601 comment in git_versions_from_keywords()
365 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
366 | # Use only the last line. Previous lines may contain GPG signature
367 | # information.
368 | date = date.splitlines()[-1]
369 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
370 |
371 | return pieces
372 |
373 |
374 | def plus_or_dot(pieces):
375 | """Return a + if we don't already have one, else return a ."""
376 | if "+" in pieces.get("closest-tag", ""):
377 | return "."
378 | return "+"
379 |
380 |
381 | def render_pep440(pieces):
382 | """Build up version string, with post-release "local version identifier".
383 |
384 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
385 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
386 |
387 | Exceptions:
388 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
389 | """
390 | if pieces["closest-tag"]:
391 | rendered = pieces["closest-tag"]
392 | if pieces["distance"] or pieces["dirty"]:
393 | rendered += plus_or_dot(pieces)
394 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
395 | if pieces["dirty"]:
396 | rendered += ".dirty"
397 | else:
398 | # exception #1
399 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
400 | if pieces["dirty"]:
401 | rendered += ".dirty"
402 | return rendered
403 |
404 |
405 | def render_pep440_branch(pieces):
406 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
407 |
408 | The ".dev0" means not master branch. Note that .dev0 sorts backwards
409 | (a feature branch will appear "older" than the master branch).
410 |
411 | Exceptions:
412 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
413 | """
414 | if pieces["closest-tag"]:
415 | rendered = pieces["closest-tag"]
416 | if pieces["distance"] or pieces["dirty"]:
417 | if pieces["branch"] != "master":
418 | rendered += ".dev0"
419 | rendered += plus_or_dot(pieces)
420 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
421 | if pieces["dirty"]:
422 | rendered += ".dirty"
423 | else:
424 | # exception #1
425 | rendered = "0"
426 | if pieces["branch"] != "master":
427 | rendered += ".dev0"
428 | rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
429 | if pieces["dirty"]:
430 | rendered += ".dirty"
431 | return rendered
432 |
433 |
434 | def pep440_split_post(ver):
435 | """Split pep440 version string at the post-release segment.
436 |
437 | Returns the release segments before the post-release and the
438 | post-release version number (or -1 if no post-release segment is present).
439 | """
440 | vc = str.split(ver, ".post")
441 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
442 |
443 |
444 | def render_pep440_pre(pieces):
445 | """TAG[.postN.devDISTANCE] -- No -dirty.
446 |
447 | Exceptions:
448 | 1: no tags. 0.post0.devDISTANCE
449 | """
450 | if pieces["closest-tag"]:
451 | if pieces["distance"]:
452 | # update the post release segment
453 | tag_version, post_version = pep440_split_post(pieces["closest-tag"])
454 | rendered = tag_version
455 | if post_version is not None:
456 | rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
457 | else:
458 | rendered += ".post0.dev%d" % (pieces["distance"])
459 | else:
460 | # no commits, use the tag as the version
461 | rendered = pieces["closest-tag"]
462 | else:
463 | # exception #1
464 | rendered = "0.post0.dev%d" % pieces["distance"]
465 | return rendered
466 |
467 |
468 | def render_pep440_post(pieces):
469 | """TAG[.postDISTANCE[.dev0]+gHEX] .
470 |
471 | The ".dev0" means dirty. Note that .dev0 sorts backwards
472 | (a dirty tree will appear "older" than the corresponding clean one),
473 | but you shouldn't be releasing software with -dirty anyways.
474 |
475 | Exceptions:
476 | 1: no tags. 0.postDISTANCE[.dev0]
477 | """
478 | if pieces["closest-tag"]:
479 | rendered = pieces["closest-tag"]
480 | if pieces["distance"] or pieces["dirty"]:
481 | rendered += ".post%d" % pieces["distance"]
482 | if pieces["dirty"]:
483 | rendered += ".dev0"
484 | rendered += plus_or_dot(pieces)
485 | rendered += "g%s" % pieces["short"]
486 | else:
487 | # exception #1
488 | rendered = "0.post%d" % pieces["distance"]
489 | if pieces["dirty"]:
490 | rendered += ".dev0"
491 | rendered += "+g%s" % pieces["short"]
492 | return rendered
493 |
494 |
495 | def render_pep440_post_branch(pieces):
496 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
497 |
498 | The ".dev0" means not master branch.
499 |
500 | Exceptions:
501 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
502 | """
503 | if pieces["closest-tag"]:
504 | rendered = pieces["closest-tag"]
505 | if pieces["distance"] or pieces["dirty"]:
506 | rendered += ".post%d" % pieces["distance"]
507 | if pieces["branch"] != "master":
508 | rendered += ".dev0"
509 | rendered += plus_or_dot(pieces)
510 | rendered += "g%s" % pieces["short"]
511 | if pieces["dirty"]:
512 | rendered += ".dirty"
513 | else:
514 | # exception #1
515 | rendered = "0.post%d" % pieces["distance"]
516 | if pieces["branch"] != "master":
517 | rendered += ".dev0"
518 | rendered += "+g%s" % pieces["short"]
519 | if pieces["dirty"]:
520 | rendered += ".dirty"
521 | return rendered
522 |
523 |
524 | def render_pep440_old(pieces):
525 | """TAG[.postDISTANCE[.dev0]] .
526 |
527 | The ".dev0" means dirty.
528 |
529 | Exceptions:
530 | 1: no tags. 0.postDISTANCE[.dev0]
531 | """
532 | if pieces["closest-tag"]:
533 | rendered = pieces["closest-tag"]
534 | if pieces["distance"] or pieces["dirty"]:
535 | rendered += ".post%d" % pieces["distance"]
536 | if pieces["dirty"]:
537 | rendered += ".dev0"
538 | else:
539 | # exception #1
540 | rendered = "0.post%d" % pieces["distance"]
541 | if pieces["dirty"]:
542 | rendered += ".dev0"
543 | return rendered
544 |
545 |
546 | def render_git_describe(pieces):
547 | """TAG[-DISTANCE-gHEX][-dirty].
548 |
549 | Like 'git describe --tags --dirty --always'.
550 |
551 | Exceptions:
552 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
553 | """
554 | if pieces["closest-tag"]:
555 | rendered = pieces["closest-tag"]
556 | if pieces["distance"]:
557 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
558 | else:
559 | # exception #1
560 | rendered = pieces["short"]
561 | if pieces["dirty"]:
562 | rendered += "-dirty"
563 | return rendered
564 |
565 |
566 | def render_git_describe_long(pieces):
567 | """TAG-DISTANCE-gHEX[-dirty].
568 |
569 | Like 'git describe --tags --dirty --always -long'.
570 | The distance/hash is unconditional.
571 |
572 | Exceptions:
573 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
574 | """
575 | if pieces["closest-tag"]:
576 | rendered = pieces["closest-tag"]
577 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
578 | else:
579 | # exception #1
580 | rendered = pieces["short"]
581 | if pieces["dirty"]:
582 | rendered += "-dirty"
583 | return rendered
584 |
585 |
586 | def render(pieces, style):
587 | """Render the given version pieces into the requested style."""
588 | if pieces["error"]:
589 | return {
590 | "version": "unknown",
591 | "full-revisionid": pieces.get("long"),
592 | "dirty": None,
593 | "error": pieces["error"],
594 | "date": None,
595 | }
596 |
597 | if not style or style == "default":
598 | style = "pep440" # the default
599 |
600 | if style == "pep440":
601 | rendered = render_pep440(pieces)
602 | elif style == "pep440-branch":
603 | rendered = render_pep440_branch(pieces)
604 | elif style == "pep440-pre":
605 | rendered = render_pep440_pre(pieces)
606 | elif style == "pep440-post":
607 | rendered = render_pep440_post(pieces)
608 | elif style == "pep440-post-branch":
609 | rendered = render_pep440_post_branch(pieces)
610 | elif style == "pep440-old":
611 | rendered = render_pep440_old(pieces)
612 | elif style == "git-describe":
613 | rendered = render_git_describe(pieces)
614 | elif style == "git-describe-long":
615 | rendered = render_git_describe_long(pieces)
616 | else:
617 | raise ValueError("unknown style '%s'" % style)
618 |
619 | return {
620 | "version": rendered,
621 | "full-revisionid": pieces["long"],
622 | "dirty": pieces["dirty"],
623 | "error": None,
624 | "date": pieces.get("date"),
625 | }
626 |
627 |
628 | def get_versions():
629 | """Get version information or return default if unable to do so."""
630 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
631 | # __file__, we can work backwards from there to the root. Some
632 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
633 | # case we can only use expanded keywords.
634 |
635 | cfg = get_config()
636 | verbose = cfg.verbose
637 |
638 | try:
639 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose)
640 | except NotThisMethod:
641 | pass
642 |
643 | try:
644 | root = os.path.realpath(__file__)
645 | # versionfile_source is the relative path from the top of the source
646 | # tree (where the .git directory might live) to this file. Invert
647 | # this to find the root from __file__.
648 | for _ in cfg.versionfile_source.split("/"):
649 | root = os.path.dirname(root)
650 | except NameError:
651 | return {
652 | "version": "0+unknown",
653 | "full-revisionid": None,
654 | "dirty": None,
655 | "error": "unable to find root of source tree",
656 | "date": None,
657 | }
658 |
659 | try:
660 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
661 | return render(pieces, cfg.style)
662 | except NotThisMethod:
663 | pass
664 |
665 | try:
666 | if cfg.parentdir_prefix:
667 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
668 | except NotThisMethod:
669 | pass
670 |
671 | return {
672 | "version": "0+unknown",
673 | "full-revisionid": None,
674 | "dirty": None,
675 | "error": "unable to compute version",
676 | "date": None,
677 | }
678 |
--------------------------------------------------------------------------------
/versioneer.py:
--------------------------------------------------------------------------------
1 | # Version: 0.21
2 |
3 | """The Versioneer - like a rocketeer, but for versions.
4 |
5 | The Versioneer
6 | ==============
7 |
8 | * like a rocketeer, but for versions!
9 | * https://github.com/python-versioneer/python-versioneer
10 | * Brian Warner
11 | * License: Public Domain
12 | * Compatible with: Python 3.6, 3.7, 3.8, 3.9 and pypy3
13 | * [![Latest Version][pypi-image]][pypi-url]
14 | * [![Build Status][travis-image]][travis-url]
15 |
16 | This is a tool for managing a recorded version number in distutils-based
17 | python projects. The goal is to remove the tedious and error-prone "update
18 | the embedded version string" step from your release process. Making a new
19 | release should be as easy as recording a new tag in your version-control
20 | system, and maybe making new tarballs.
21 |
22 |
23 | ## Quick Install
24 |
25 | * `pip install versioneer` to somewhere in your $PATH
26 | * add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md))
27 | * run `versioneer install` in your source tree, commit the results
28 | * Verify version information with `python setup.py version`
29 |
30 | ## Version Identifiers
31 |
32 | Source trees come from a variety of places:
33 |
34 | * a version-control system checkout (mostly used by developers)
35 | * a nightly tarball, produced by build automation
36 | * a snapshot tarball, produced by a web-based VCS browser, like github's
37 | "tarball from tag" feature
38 | * a release tarball, produced by "setup.py sdist", distributed through PyPI
39 |
40 | Within each source tree, the version identifier (either a string or a number,
41 | this tool is format-agnostic) can come from a variety of places:
42 |
43 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
44 | about recent "tags" and an absolute revision-id
45 | * the name of the directory into which the tarball was unpacked
46 | * an expanded VCS keyword ($Id$, etc)
47 | * a `_version.py` created by some earlier build step
48 |
49 | For released software, the version identifier is closely related to a VCS
50 | tag. Some projects use tag names that include more than just the version
51 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
52 | needs to strip the tag prefix to extract the version identifier. For
53 | unreleased software (between tags), the version identifier should provide
54 | enough information to help developers recreate the same tree, while also
55 | giving them an idea of roughly how old the tree is (after version 1.2, before
56 | version 1.3). Many VCS systems can report a description that captures this,
57 | for example `git describe --tags --dirty --always` reports things like
58 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
59 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
60 | uncommitted changes).
61 |
62 | The version identifier is used for multiple purposes:
63 |
64 | * to allow the module to self-identify its version: `myproject.__version__`
65 | * to choose a name and prefix for a 'setup.py sdist' tarball
66 |
67 | ## Theory of Operation
68 |
69 | Versioneer works by adding a special `_version.py` file into your source
70 | tree, where your `__init__.py` can import it. This `_version.py` knows how to
71 | dynamically ask the VCS tool for version information at import time.
72 |
73 | `_version.py` also contains `$Revision$` markers, and the installation
74 | process marks `_version.py` to have this marker rewritten with a tag name
75 | during the `git archive` command. As a result, generated tarballs will
76 | contain enough information to get the proper version.
77 |
78 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to
79 | the top level of your source tree, next to `setup.py` and the `setup.cfg`
80 | that configures it. This overrides several distutils/setuptools commands to
81 | compute the version when invoked, and changes `setup.py build` and `setup.py
82 | sdist` to replace `_version.py` with a small static file that contains just
83 | the generated version data.
84 |
85 | ## Installation
86 |
87 | See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
88 |
89 | ## Version-String Flavors
90 |
91 | Code which uses Versioneer can learn about its version string at runtime by
92 | importing `_version` from your main `__init__.py` file and running the
93 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
94 | import the top-level `versioneer.py` and run `get_versions()`.
95 |
96 | Both functions return a dictionary with different flavors of version
97 | information:
98 |
99 | * `['version']`: A condensed version string, rendered using the selected
100 | style. This is the most commonly used value for the project's version
101 | string. The default "pep440" style yields strings like `0.11`,
102 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
103 | below for alternative styles.
104 |
105 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the
106 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
107 |
108 | * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
109 | commit date in ISO 8601 format. This will be None if the date is not
110 | available.
111 |
112 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
113 | this is only accurate if run in a VCS checkout, otherwise it is likely to
114 | be False or None
115 |
116 | * `['error']`: if the version string could not be computed, this will be set
117 | to a string describing the problem, otherwise it will be None. It may be
118 | useful to throw an exception in setup.py if this is set, to avoid e.g.
119 | creating tarballs with a version string of "unknown".
120 |
121 | Some variants are more useful than others. Including `full-revisionid` in a
122 | bug report should allow developers to reconstruct the exact code being tested
123 | (or indicate the presence of local changes that should be shared with the
124 | developers). `version` is suitable for display in an "about" box or a CLI
125 | `--version` output: it can be easily compared against release notes and lists
126 | of bugs fixed in various releases.
127 |
128 | The installer adds the following text to your `__init__.py` to place a basic
129 | version in `YOURPROJECT.__version__`:
130 |
131 | from ._version import get_versions
132 | __version__ = get_versions()['version']
133 | del get_versions
134 |
135 | ## Styles
136 |
137 | The setup.cfg `style=` configuration controls how the VCS information is
138 | rendered into a version string.
139 |
140 | The default style, "pep440", produces a PEP440-compliant string, equal to the
141 | un-prefixed tag name for actual releases, and containing an additional "local
142 | version" section with more detail for in-between builds. For Git, this is
143 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
144 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
145 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
146 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released
147 | software (exactly equal to a known tag), the identifier will only contain the
148 | stripped tag, e.g. "0.11".
149 |
150 | Other styles are available. See [details.md](details.md) in the Versioneer
151 | source tree for descriptions.
152 |
153 | ## Debugging
154 |
155 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
156 | to return a version of "0+unknown". To investigate the problem, run `setup.py
157 | version`, which will run the version-lookup code in a verbose mode, and will
158 | display the full contents of `get_versions()` (including the `error` string,
159 | which may help identify what went wrong).
160 |
161 | ## Known Limitations
162 |
163 | Some situations are known to cause problems for Versioneer. This details the
164 | most significant ones. More can be found on Github
165 | [issues page](https://github.com/python-versioneer/python-versioneer/issues).
166 |
167 | ### Subprojects
168 |
169 | Versioneer has limited support for source trees in which `setup.py` is not in
170 | the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
171 | two common reasons why `setup.py` might not be in the root:
172 |
173 | * Source trees which contain multiple subprojects, such as
174 | [Buildbot](https://github.com/buildbot/buildbot), which contains both
175 | "master" and "slave" subprojects, each with their own `setup.py`,
176 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
177 | distributions (and upload multiple independently-installable tarballs).
178 | * Source trees whose main purpose is to contain a C library, but which also
179 | provide bindings to Python (and perhaps other languages) in subdirectories.
180 |
181 | Versioneer will look for `.git` in parent directories, and most operations
182 | should get the right version string. However `pip` and `setuptools` have bugs
183 | and implementation details which frequently cause `pip install .` from a
184 | subproject directory to fail to find a correct version string (so it usually
185 | defaults to `0+unknown`).
186 |
187 | `pip install --editable .` should work correctly. `setup.py install` might
188 | work too.
189 |
190 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
191 | some later version.
192 |
193 | [Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking
194 | this issue. The discussion in
195 | [PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the
196 | issue from the Versioneer side in more detail.
197 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and
198 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
199 | pip to let Versioneer work correctly.
200 |
201 | Versioneer-0.16 and earlier only looked for a `.git` directory next to the
202 | `setup.cfg`, so subprojects were completely unsupported with those releases.
203 |
204 | ### Editable installs with setuptools <= 18.5
205 |
206 | `setup.py develop` and `pip install --editable .` allow you to install a
207 | project into a virtualenv once, then continue editing the source code (and
208 | test) without re-installing after every change.
209 |
210 | "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
211 | convenient way to specify executable scripts that should be installed along
212 | with the python package.
213 |
214 | These both work as expected when using modern setuptools. When using
215 | setuptools-18.5 or earlier, however, certain operations will cause
216 | `pkg_resources.DistributionNotFound` errors when running the entrypoint
217 | script, which must be resolved by re-installing the package. This happens
218 | when the install happens with one version, then the egg_info data is
219 | regenerated while a different version is checked out. Many setup.py commands
220 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
221 | a different virtualenv), so this can be surprising.
222 |
223 | [Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes
224 | this one, but upgrading to a newer version of setuptools should probably
225 | resolve it.
226 |
227 |
228 | ## Updating Versioneer
229 |
230 | To upgrade your project to a new release of Versioneer, do the following:
231 |
232 | * install the new Versioneer (`pip install -U versioneer` or equivalent)
233 | * edit `setup.cfg`, if necessary, to include any new configuration settings
234 | indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
235 | * re-run `versioneer install` in your source tree, to replace
236 | `SRC/_version.py`
237 | * commit any changed files
238 |
239 | ## Future Directions
240 |
241 | This tool is designed to make it easily extended to other version-control
242 | systems: all VCS-specific components are in separate directories like
243 | src/git/ . The top-level `versioneer.py` script is assembled from these
244 | components by running make-versioneer.py . In the future, make-versioneer.py
245 | will take a VCS name as an argument, and will construct a version of
246 | `versioneer.py` that is specific to the given VCS. It might also take the
247 | configuration arguments that are currently provided manually during
248 | installation by editing setup.py . Alternatively, it might go the other
249 | direction and include code from all supported VCS systems, reducing the
250 | number of intermediate scripts.
251 |
252 | ## Similar projects
253 |
254 | * [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time
255 | dependency
256 | * [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of
257 | versioneer
258 | * [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools
259 | plugin
260 |
261 | ## License
262 |
263 | To make Versioneer easier to embed, all its code is dedicated to the public
264 | domain. The `_version.py` that it creates is also in the public domain.
265 | Specifically, both are released under the Creative Commons "Public Domain
266 | Dedication" license (CC0-1.0), as described in
267 | https://creativecommons.org/publicdomain/zero/1.0/ .
268 |
269 | [pypi-image]: https://img.shields.io/pypi/v/versioneer.svg
270 | [pypi-url]: https://pypi.python.org/pypi/versioneer/
271 | [travis-image]:
272 | https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg
273 | [travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer
274 |
275 | """
276 | # pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring
277 | # pylint:disable=missing-class-docstring,too-many-branches,too-many-statements
278 | # pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error
279 | # pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with
280 | # pylint:disable=attribute-defined-outside-init,too-many-arguments
281 |
282 | import configparser
283 | import errno
284 | import json
285 | import os
286 | import re
287 | import subprocess
288 | import sys
289 | from typing import Callable, Dict
290 |
291 |
292 | class VersioneerConfig:
293 | """Container for Versioneer configuration parameters."""
294 |
295 |
296 | def get_root():
297 | """Get the project root directory.
298 |
299 | We require that all commands are run from the project root, i.e. the
300 | directory that contains setup.py, setup.cfg, and versioneer.py .
301 | """
302 | root = os.path.realpath(os.path.abspath(os.getcwd()))
303 | setup_py = os.path.join(root, "setup.py")
304 | versioneer_py = os.path.join(root, "versioneer.py")
305 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
306 | # allow 'python path/to/setup.py COMMAND'
307 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
308 | setup_py = os.path.join(root, "setup.py")
309 | versioneer_py = os.path.join(root, "versioneer.py")
310 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
311 | err = (
312 | "Versioneer was unable to run the project root directory. "
313 | "Versioneer requires setup.py to be executed from "
314 | "its immediate directory (like 'python setup.py COMMAND'), "
315 | "or in a way that lets it use sys.argv[0] to find the root "
316 | "(like 'python path/to/setup.py COMMAND')."
317 | )
318 | raise VersioneerBadRootError(err)
319 | try:
320 | # Certain runtime workflows (setup.py install/develop in a setuptools
321 | # tree) execute all dependencies in a single python process, so
322 | # "versioneer" may be imported multiple times, and python's shared
323 | # module-import table will cache the first one. So we can't use
324 | # os.path.dirname(__file__), as that will find whichever
325 | # versioneer.py was first imported, even in later projects.
326 | my_path = os.path.realpath(os.path.abspath(__file__))
327 | me_dir = os.path.normcase(os.path.splitext(my_path)[0])
328 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
329 | if me_dir != vsr_dir:
330 | print(
331 | "Warning: build in %s is using versioneer.py from %s"
332 | % (os.path.dirname(my_path), versioneer_py)
333 | )
334 | except NameError:
335 | pass
336 | return root
337 |
338 |
339 | def get_config_from_root(root):
340 | """Read the project setup.cfg file to determine Versioneer config."""
341 | # This might raise OSError (if setup.cfg is missing), or
342 | # configparser.NoSectionError (if it lacks a [versioneer] section), or
343 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at
344 | # the top of versioneer.py for instructions on writing your setup.cfg .
345 | setup_cfg = os.path.join(root, "setup.cfg")
346 | parser = configparser.ConfigParser()
347 | with open(setup_cfg, "r") as cfg_file:
348 | parser.read_file(cfg_file)
349 | VCS = parser.get("versioneer", "VCS") # mandatory
350 |
351 | # Dict-like interface for non-mandatory entries
352 | section = parser["versioneer"]
353 |
354 | cfg = VersioneerConfig()
355 | cfg.VCS = VCS
356 | cfg.style = section.get("style", "")
357 | cfg.versionfile_source = section.get("versionfile_source")
358 | cfg.versionfile_build = section.get("versionfile_build")
359 | cfg.tag_prefix = section.get("tag_prefix")
360 | if cfg.tag_prefix in ("''", '""'):
361 | cfg.tag_prefix = ""
362 | cfg.parentdir_prefix = section.get("parentdir_prefix")
363 | cfg.verbose = section.get("verbose")
364 | return cfg
365 |
366 |
367 | class NotThisMethod(Exception):
368 | """Exception raised if a method is not valid for the current scenario."""
369 |
370 |
371 | # these dictionaries contain VCS-specific tools
372 | LONG_VERSION_PY: Dict[str, str] = {}
373 | HANDLERS: Dict[str, Dict[str, Callable]] = {}
374 |
375 |
376 | def register_vcs_handler(vcs, method): # decorator
377 | """Create decorator to mark a method as the handler of a VCS."""
378 |
379 | def decorate(f):
380 | """Store f in HANDLERS[vcs][method]."""
381 | HANDLERS.setdefault(vcs, {})[method] = f
382 | return f
383 |
384 | return decorate
385 |
386 |
387 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):
388 | """Call the given command(s)."""
389 | assert isinstance(commands, list)
390 | process = None
391 | for command in commands:
392 | try:
393 | dispcmd = str([command] + args)
394 | # remember shell=False, so use git.cmd on windows, not just git
395 | process = subprocess.Popen(
396 | [command] + args,
397 | cwd=cwd,
398 | env=env,
399 | stdout=subprocess.PIPE,
400 | stderr=(subprocess.PIPE if hide_stderr else None),
401 | )
402 | break
403 | except OSError:
404 | e = sys.exc_info()[1]
405 | if e.errno == errno.ENOENT:
406 | continue
407 | if verbose:
408 | print("unable to run %s" % dispcmd)
409 | print(e)
410 | return None, None
411 | else:
412 | if verbose:
413 | print("unable to find command, tried %s" % (commands,))
414 | return None, None
415 | stdout = process.communicate()[0].strip().decode()
416 | if process.returncode != 0:
417 | if verbose:
418 | print("unable to run %s (error)" % dispcmd)
419 | print("stdout was %s" % stdout)
420 | return None, process.returncode
421 | return stdout, process.returncode
422 |
423 |
424 | LONG_VERSION_PY[
425 | "git"
426 | ] = r'''
427 | # This file helps to compute a version number in source trees obtained from
428 | # git-archive tarball (such as those provided by githubs download-from-tag
429 | # feature). Distribution tarballs (built by setup.py sdist) and build
430 | # directories (produced by setup.py build) will contain a much shorter file
431 | # that just contains the computed version number.
432 |
433 | # This file is released into the public domain. Generated by
434 | # versioneer-0.21 (https://github.com/python-versioneer/python-versioneer)
435 |
436 | """Git implementation of _version.py."""
437 |
438 | import errno
439 | import os
440 | import re
441 | import subprocess
442 | import sys
443 | from typing import Callable, Dict
444 |
445 |
446 | def get_keywords():
447 | """Get the keywords needed to look up the version information."""
448 | # these strings will be replaced by git during git-archive.
449 | # setup.py/versioneer.py will grep for the variable names, so they must
450 | # each be defined on a line of their own. _version.py will just call
451 | # get_keywords().
452 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
453 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
454 | git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
455 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
456 | return keywords
457 |
458 |
459 | class VersioneerConfig:
460 | """Container for Versioneer configuration parameters."""
461 |
462 |
463 | def get_config():
464 | """Create, populate and return the VersioneerConfig() object."""
465 | # these strings are filled in when 'setup.py versioneer' creates
466 | # _version.py
467 | cfg = VersioneerConfig()
468 | cfg.VCS = "git"
469 | cfg.style = "%(STYLE)s"
470 | cfg.tag_prefix = "%(TAG_PREFIX)s"
471 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
472 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
473 | cfg.verbose = False
474 | return cfg
475 |
476 |
477 | class NotThisMethod(Exception):
478 | """Exception raised if a method is not valid for the current scenario."""
479 |
480 |
481 | LONG_VERSION_PY: Dict[str, str] = {}
482 | HANDLERS: Dict[str, Dict[str, Callable]] = {}
483 |
484 |
485 | def register_vcs_handler(vcs, method): # decorator
486 | """Create decorator to mark a method as the handler of a VCS."""
487 | def decorate(f):
488 | """Store f in HANDLERS[vcs][method]."""
489 | if vcs not in HANDLERS:
490 | HANDLERS[vcs] = {}
491 | HANDLERS[vcs][method] = f
492 | return f
493 | return decorate
494 |
495 |
496 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
497 | env=None):
498 | """Call the given command(s)."""
499 | assert isinstance(commands, list)
500 | process = None
501 | for command in commands:
502 | try:
503 | dispcmd = str([command] + args)
504 | # remember shell=False, so use git.cmd on windows, not just git
505 | process = subprocess.Popen([command] + args, cwd=cwd, env=env,
506 | stdout=subprocess.PIPE,
507 | stderr=(subprocess.PIPE if hide_stderr
508 | else None))
509 | break
510 | except OSError:
511 | e = sys.exc_info()[1]
512 | if e.errno == errno.ENOENT:
513 | continue
514 | if verbose:
515 | print("unable to run %%s" %% dispcmd)
516 | print(e)
517 | return None, None
518 | else:
519 | if verbose:
520 | print("unable to find command, tried %%s" %% (commands,))
521 | return None, None
522 | stdout = process.communicate()[0].strip().decode()
523 | if process.returncode != 0:
524 | if verbose:
525 | print("unable to run %%s (error)" %% dispcmd)
526 | print("stdout was %%s" %% stdout)
527 | return None, process.returncode
528 | return stdout, process.returncode
529 |
530 |
531 | def versions_from_parentdir(parentdir_prefix, root, verbose):
532 | """Try to determine the version from the parent directory name.
533 |
534 | Source tarballs conventionally unpack into a directory that includes both
535 | the project name and a version string. We will also support searching up
536 | two directory levels for an appropriately named parent directory
537 | """
538 | rootdirs = []
539 |
540 | for _ in range(3):
541 | dirname = os.path.basename(root)
542 | if dirname.startswith(parentdir_prefix):
543 | return {"version": dirname[len(parentdir_prefix):],
544 | "full-revisionid": None,
545 | "dirty": False, "error": None, "date": None}
546 | rootdirs.append(root)
547 | root = os.path.dirname(root) # up a level
548 |
549 | if verbose:
550 | print("Tried directories %%s but none started with prefix %%s" %%
551 | (str(rootdirs), parentdir_prefix))
552 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
553 |
554 |
555 | @register_vcs_handler("git", "get_keywords")
556 | def git_get_keywords(versionfile_abs):
557 | """Extract version information from the given file."""
558 | # the code embedded in _version.py can just fetch the value of these
559 | # keywords. When used from setup.py, we don't want to import _version.py,
560 | # so we do it with a regexp instead. This function is not used from
561 | # _version.py.
562 | keywords = {}
563 | try:
564 | with open(versionfile_abs, "r") as fobj:
565 | for line in fobj:
566 | if line.strip().startswith("git_refnames ="):
567 | mo = re.search(r'=\s*"(.*)"', line)
568 | if mo:
569 | keywords["refnames"] = mo.group(1)
570 | if line.strip().startswith("git_full ="):
571 | mo = re.search(r'=\s*"(.*)"', line)
572 | if mo:
573 | keywords["full"] = mo.group(1)
574 | if line.strip().startswith("git_date ="):
575 | mo = re.search(r'=\s*"(.*)"', line)
576 | if mo:
577 | keywords["date"] = mo.group(1)
578 | except OSError:
579 | pass
580 | return keywords
581 |
582 |
583 | @register_vcs_handler("git", "keywords")
584 | def git_versions_from_keywords(keywords, tag_prefix, verbose):
585 | """Get version information from git keywords."""
586 | if "refnames" not in keywords:
587 | raise NotThisMethod("Short version file found")
588 | date = keywords.get("date")
589 | if date is not None:
590 | # Use only the last line. Previous lines may contain GPG signature
591 | # information.
592 | date = date.splitlines()[-1]
593 |
594 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
595 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
596 | # -like" string, which we must then edit to make compliant), because
597 | # it's been around since git-1.5.3, and it's too difficult to
598 | # discover which version we're using, or to work around using an
599 | # older one.
600 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
601 | refnames = keywords["refnames"].strip()
602 | if refnames.startswith("$Format"):
603 | if verbose:
604 | print("keywords are unexpanded, not using")
605 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
606 | refs = {r.strip() for r in refnames.strip("()").split(",")}
607 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
608 | # just "foo-1.0". If we see a "tag: " prefix, prefer those.
609 | TAG = "tag: "
610 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
611 | if not tags:
612 | # Either we're using git < 1.8.3, or there really are no tags. We use
613 | # a heuristic: assume all version tags have a digit. The old git %%d
614 | # expansion behaves like git log --decorate=short and strips out the
615 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish
616 | # between branches and tags. By ignoring refnames without digits, we
617 | # filter out many common branch names like "release" and
618 | # "stabilization", as well as "HEAD" and "master".
619 | tags = {r for r in refs if re.search(r'\d', r)}
620 | if verbose:
621 | print("discarding '%%s', no digits" %% ",".join(refs - tags))
622 | if verbose:
623 | print("likely tags: %%s" %% ",".join(sorted(tags)))
624 | for ref in sorted(tags):
625 | # sorting will prefer e.g. "2.0" over "2.0rc1"
626 | if ref.startswith(tag_prefix):
627 | r = ref[len(tag_prefix):]
628 | # Filter out refs that exactly match prefix or that don't start
629 | # with a number once the prefix is stripped (mostly a concern
630 | # when prefix is '')
631 | if not re.match(r'\d', r):
632 | continue
633 | if verbose:
634 | print("picking %%s" %% r)
635 | return {"version": r,
636 | "full-revisionid": keywords["full"].strip(),
637 | "dirty": False, "error": None,
638 | "date": date}
639 | # no suitable tags, so version is "0+unknown", but full hex is still there
640 | if verbose:
641 | print("no suitable tags, using unknown + full revision id")
642 | return {"version": "0+unknown",
643 | "full-revisionid": keywords["full"].strip(),
644 | "dirty": False, "error": "no suitable tags", "date": None}
645 |
646 |
647 | @register_vcs_handler("git", "pieces_from_vcs")
648 | def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
649 | """Get version from 'git describe' in the root of the source tree.
650 |
651 | This only gets called if the git-archive 'subst' keywords were *not*
652 | expanded, and _version.py hasn't already been rewritten with a short
653 | version string, meaning we're inside a checked out source tree.
654 | """
655 | GITS = ["git"]
656 | TAG_PREFIX_REGEX = "*"
657 | if sys.platform == "win32":
658 | GITS = ["git.cmd", "git.exe"]
659 | TAG_PREFIX_REGEX = r"\*"
660 |
661 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
662 | hide_stderr=True)
663 | if rc != 0:
664 | if verbose:
665 | print("Directory %%s not under git control" %% root)
666 | raise NotThisMethod("'git rev-parse --git-dir' returned error")
667 |
668 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
669 | # if there isn't one, this yields HEX[-dirty] (no NUM)
670 | describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty",
671 | "--always", "--long",
672 | "--match",
673 | "%%s%%s" %% (tag_prefix, TAG_PREFIX_REGEX)],
674 | cwd=root)
675 | # --long was added in git-1.5.5
676 | if describe_out is None:
677 | raise NotThisMethod("'git describe' failed")
678 | describe_out = describe_out.strip()
679 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
680 | if full_out is None:
681 | raise NotThisMethod("'git rev-parse' failed")
682 | full_out = full_out.strip()
683 |
684 | pieces = {}
685 | pieces["long"] = full_out
686 | pieces["short"] = full_out[:7] # maybe improved later
687 | pieces["error"] = None
688 |
689 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
690 | cwd=root)
691 | # --abbrev-ref was added in git-1.6.3
692 | if rc != 0 or branch_name is None:
693 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
694 | branch_name = branch_name.strip()
695 |
696 | if branch_name == "HEAD":
697 | # If we aren't exactly on a branch, pick a branch which represents
698 | # the current commit. If all else fails, we are on a branchless
699 | # commit.
700 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
701 | # --contains was added in git-1.5.4
702 | if rc != 0 or branches is None:
703 | raise NotThisMethod("'git branch --contains' returned error")
704 | branches = branches.split("\n")
705 |
706 | # Remove the first line if we're running detached
707 | if "(" in branches[0]:
708 | branches.pop(0)
709 |
710 | # Strip off the leading "* " from the list of branches.
711 | branches = [branch[2:] for branch in branches]
712 | if "master" in branches:
713 | branch_name = "master"
714 | elif not branches:
715 | branch_name = None
716 | else:
717 | # Pick the first branch that is returned. Good or bad.
718 | branch_name = branches[0]
719 |
720 | pieces["branch"] = branch_name
721 |
722 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
723 | # TAG might have hyphens.
724 | git_describe = describe_out
725 |
726 | # look for -dirty suffix
727 | dirty = git_describe.endswith("-dirty")
728 | pieces["dirty"] = dirty
729 | if dirty:
730 | git_describe = git_describe[:git_describe.rindex("-dirty")]
731 |
732 | # now we have TAG-NUM-gHEX or HEX
733 |
734 | if "-" in git_describe:
735 | # TAG-NUM-gHEX
736 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
737 | if not mo:
738 | # unparsable. Maybe git-describe is misbehaving?
739 | pieces["error"] = ("unable to parse git-describe output: '%%s'"
740 | %% describe_out)
741 | return pieces
742 |
743 | # tag
744 | full_tag = mo.group(1)
745 | if not full_tag.startswith(tag_prefix):
746 | if verbose:
747 | fmt = "tag '%%s' doesn't start with prefix '%%s'"
748 | print(fmt %% (full_tag, tag_prefix))
749 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
750 | %% (full_tag, tag_prefix))
751 | return pieces
752 | pieces["closest-tag"] = full_tag[len(tag_prefix):]
753 |
754 | # distance: number of commits since tag
755 | pieces["distance"] = int(mo.group(2))
756 |
757 | # commit: short hex revision ID
758 | pieces["short"] = mo.group(3)
759 |
760 | else:
761 | # HEX: no tags
762 | pieces["closest-tag"] = None
763 | count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
764 | pieces["distance"] = int(count_out) # total number of commits
765 |
766 | # commit date: see ISO-8601 comment in git_versions_from_keywords()
767 | date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip()
768 | # Use only the last line. Previous lines may contain GPG signature
769 | # information.
770 | date = date.splitlines()[-1]
771 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
772 |
773 | return pieces
774 |
775 |
776 | def plus_or_dot(pieces):
777 | """Return a + if we don't already have one, else return a ."""
778 | if "+" in pieces.get("closest-tag", ""):
779 | return "."
780 | return "+"
781 |
782 |
783 | def render_pep440(pieces):
784 | """Build up version string, with post-release "local version identifier".
785 |
786 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
787 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
788 |
789 | Exceptions:
790 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
791 | """
792 | if pieces["closest-tag"]:
793 | rendered = pieces["closest-tag"]
794 | if pieces["distance"] or pieces["dirty"]:
795 | rendered += plus_or_dot(pieces)
796 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
797 | if pieces["dirty"]:
798 | rendered += ".dirty"
799 | else:
800 | # exception #1
801 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
802 | pieces["short"])
803 | if pieces["dirty"]:
804 | rendered += ".dirty"
805 | return rendered
806 |
807 |
808 | def render_pep440_branch(pieces):
809 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
810 |
811 | The ".dev0" means not master branch. Note that .dev0 sorts backwards
812 | (a feature branch will appear "older" than the master branch).
813 |
814 | Exceptions:
815 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
816 | """
817 | if pieces["closest-tag"]:
818 | rendered = pieces["closest-tag"]
819 | if pieces["distance"] or pieces["dirty"]:
820 | if pieces["branch"] != "master":
821 | rendered += ".dev0"
822 | rendered += plus_or_dot(pieces)
823 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
824 | if pieces["dirty"]:
825 | rendered += ".dirty"
826 | else:
827 | # exception #1
828 | rendered = "0"
829 | if pieces["branch"] != "master":
830 | rendered += ".dev0"
831 | rendered += "+untagged.%%d.g%%s" %% (pieces["distance"],
832 | pieces["short"])
833 | if pieces["dirty"]:
834 | rendered += ".dirty"
835 | return rendered
836 |
837 |
838 | def pep440_split_post(ver):
839 | """Split pep440 version string at the post-release segment.
840 |
841 | Returns the release segments before the post-release and the
842 | post-release version number (or -1 if no post-release segment is present).
843 | """
844 | vc = str.split(ver, ".post")
845 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
846 |
847 |
848 | def render_pep440_pre(pieces):
849 | """TAG[.postN.devDISTANCE] -- No -dirty.
850 |
851 | Exceptions:
852 | 1: no tags. 0.post0.devDISTANCE
853 | """
854 | if pieces["closest-tag"]:
855 | if pieces["distance"]:
856 | # update the post release segment
857 | tag_version, post_version = pep440_split_post(pieces["closest-tag"])
858 | rendered = tag_version
859 | if post_version is not None:
860 | rendered += ".post%%d.dev%%d" %% (post_version+1, pieces["distance"])
861 | else:
862 | rendered += ".post0.dev%%d" %% (pieces["distance"])
863 | else:
864 | # no commits, use the tag as the version
865 | rendered = pieces["closest-tag"]
866 | else:
867 | # exception #1
868 | rendered = "0.post0.dev%%d" %% pieces["distance"]
869 | return rendered
870 |
871 |
872 | def render_pep440_post(pieces):
873 | """TAG[.postDISTANCE[.dev0]+gHEX] .
874 |
875 | The ".dev0" means dirty. Note that .dev0 sorts backwards
876 | (a dirty tree will appear "older" than the corresponding clean one),
877 | but you shouldn't be releasing software with -dirty anyways.
878 |
879 | Exceptions:
880 | 1: no tags. 0.postDISTANCE[.dev0]
881 | """
882 | if pieces["closest-tag"]:
883 | rendered = pieces["closest-tag"]
884 | if pieces["distance"] or pieces["dirty"]:
885 | rendered += ".post%%d" %% pieces["distance"]
886 | if pieces["dirty"]:
887 | rendered += ".dev0"
888 | rendered += plus_or_dot(pieces)
889 | rendered += "g%%s" %% pieces["short"]
890 | else:
891 | # exception #1
892 | rendered = "0.post%%d" %% pieces["distance"]
893 | if pieces["dirty"]:
894 | rendered += ".dev0"
895 | rendered += "+g%%s" %% pieces["short"]
896 | return rendered
897 |
898 |
899 | def render_pep440_post_branch(pieces):
900 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
901 |
902 | The ".dev0" means not master branch.
903 |
904 | Exceptions:
905 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
906 | """
907 | if pieces["closest-tag"]:
908 | rendered = pieces["closest-tag"]
909 | if pieces["distance"] or pieces["dirty"]:
910 | rendered += ".post%%d" %% pieces["distance"]
911 | if pieces["branch"] != "master":
912 | rendered += ".dev0"
913 | rendered += plus_or_dot(pieces)
914 | rendered += "g%%s" %% pieces["short"]
915 | if pieces["dirty"]:
916 | rendered += ".dirty"
917 | else:
918 | # exception #1
919 | rendered = "0.post%%d" %% pieces["distance"]
920 | if pieces["branch"] != "master":
921 | rendered += ".dev0"
922 | rendered += "+g%%s" %% pieces["short"]
923 | if pieces["dirty"]:
924 | rendered += ".dirty"
925 | return rendered
926 |
927 |
928 | def render_pep440_old(pieces):
929 | """TAG[.postDISTANCE[.dev0]] .
930 |
931 | The ".dev0" means dirty.
932 |
933 | Exceptions:
934 | 1: no tags. 0.postDISTANCE[.dev0]
935 | """
936 | if pieces["closest-tag"]:
937 | rendered = pieces["closest-tag"]
938 | if pieces["distance"] or pieces["dirty"]:
939 | rendered += ".post%%d" %% pieces["distance"]
940 | if pieces["dirty"]:
941 | rendered += ".dev0"
942 | else:
943 | # exception #1
944 | rendered = "0.post%%d" %% pieces["distance"]
945 | if pieces["dirty"]:
946 | rendered += ".dev0"
947 | return rendered
948 |
949 |
950 | def render_git_describe(pieces):
951 | """TAG[-DISTANCE-gHEX][-dirty].
952 |
953 | Like 'git describe --tags --dirty --always'.
954 |
955 | Exceptions:
956 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
957 | """
958 | if pieces["closest-tag"]:
959 | rendered = pieces["closest-tag"]
960 | if pieces["distance"]:
961 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
962 | else:
963 | # exception #1
964 | rendered = pieces["short"]
965 | if pieces["dirty"]:
966 | rendered += "-dirty"
967 | return rendered
968 |
969 |
970 | def render_git_describe_long(pieces):
971 | """TAG-DISTANCE-gHEX[-dirty].
972 |
973 | Like 'git describe --tags --dirty --always -long'.
974 | The distance/hash is unconditional.
975 |
976 | Exceptions:
977 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
978 | """
979 | if pieces["closest-tag"]:
980 | rendered = pieces["closest-tag"]
981 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
982 | else:
983 | # exception #1
984 | rendered = pieces["short"]
985 | if pieces["dirty"]:
986 | rendered += "-dirty"
987 | return rendered
988 |
989 |
990 | def render(pieces, style):
991 | """Render the given version pieces into the requested style."""
992 | if pieces["error"]:
993 | return {"version": "unknown",
994 | "full-revisionid": pieces.get("long"),
995 | "dirty": None,
996 | "error": pieces["error"],
997 | "date": None}
998 |
999 | if not style or style == "default":
1000 | style = "pep440" # the default
1001 |
1002 | if style == "pep440":
1003 | rendered = render_pep440(pieces)
1004 | elif style == "pep440-branch":
1005 | rendered = render_pep440_branch(pieces)
1006 | elif style == "pep440-pre":
1007 | rendered = render_pep440_pre(pieces)
1008 | elif style == "pep440-post":
1009 | rendered = render_pep440_post(pieces)
1010 | elif style == "pep440-post-branch":
1011 | rendered = render_pep440_post_branch(pieces)
1012 | elif style == "pep440-old":
1013 | rendered = render_pep440_old(pieces)
1014 | elif style == "git-describe":
1015 | rendered = render_git_describe(pieces)
1016 | elif style == "git-describe-long":
1017 | rendered = render_git_describe_long(pieces)
1018 | else:
1019 | raise ValueError("unknown style '%%s'" %% style)
1020 |
1021 | return {"version": rendered, "full-revisionid": pieces["long"],
1022 | "dirty": pieces["dirty"], "error": None,
1023 | "date": pieces.get("date")}
1024 |
1025 |
1026 | def get_versions():
1027 | """Get version information or return default if unable to do so."""
1028 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
1029 | # __file__, we can work backwards from there to the root. Some
1030 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
1031 | # case we can only use expanded keywords.
1032 |
1033 | cfg = get_config()
1034 | verbose = cfg.verbose
1035 |
1036 | try:
1037 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
1038 | verbose)
1039 | except NotThisMethod:
1040 | pass
1041 |
1042 | try:
1043 | root = os.path.realpath(__file__)
1044 | # versionfile_source is the relative path from the top of the source
1045 | # tree (where the .git directory might live) to this file. Invert
1046 | # this to find the root from __file__.
1047 | for _ in cfg.versionfile_source.split('/'):
1048 | root = os.path.dirname(root)
1049 | except NameError:
1050 | return {"version": "0+unknown", "full-revisionid": None,
1051 | "dirty": None,
1052 | "error": "unable to find root of source tree",
1053 | "date": None}
1054 |
1055 | try:
1056 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
1057 | return render(pieces, cfg.style)
1058 | except NotThisMethod:
1059 | pass
1060 |
1061 | try:
1062 | if cfg.parentdir_prefix:
1063 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
1064 | except NotThisMethod:
1065 | pass
1066 |
1067 | return {"version": "0+unknown", "full-revisionid": None,
1068 | "dirty": None,
1069 | "error": "unable to compute version", "date": None}
1070 | '''
1071 |
1072 |
1073 | @register_vcs_handler("git", "get_keywords")
1074 | def git_get_keywords(versionfile_abs):
1075 | """Extract version information from the given file."""
1076 | # the code embedded in _version.py can just fetch the value of these
1077 | # keywords. When used from setup.py, we don't want to import _version.py,
1078 | # so we do it with a regexp instead. This function is not used from
1079 | # _version.py.
1080 | keywords = {}
1081 | try:
1082 | with open(versionfile_abs, "r") as fobj:
1083 | for line in fobj:
1084 | if line.strip().startswith("git_refnames ="):
1085 | mo = re.search(r'=\s*"(.*)"', line)
1086 | if mo:
1087 | keywords["refnames"] = mo.group(1)
1088 | if line.strip().startswith("git_full ="):
1089 | mo = re.search(r'=\s*"(.*)"', line)
1090 | if mo:
1091 | keywords["full"] = mo.group(1)
1092 | if line.strip().startswith("git_date ="):
1093 | mo = re.search(r'=\s*"(.*)"', line)
1094 | if mo:
1095 | keywords["date"] = mo.group(1)
1096 | except OSError:
1097 | pass
1098 | return keywords
1099 |
1100 |
1101 | @register_vcs_handler("git", "keywords")
1102 | def git_versions_from_keywords(keywords, tag_prefix, verbose):
1103 | """Get version information from git keywords."""
1104 | if "refnames" not in keywords:
1105 | raise NotThisMethod("Short version file found")
1106 | date = keywords.get("date")
1107 | if date is not None:
1108 | # Use only the last line. Previous lines may contain GPG signature
1109 | # information.
1110 | date = date.splitlines()[-1]
1111 |
1112 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
1113 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
1114 | # -like" string, which we must then edit to make compliant), because
1115 | # it's been around since git-1.5.3, and it's too difficult to
1116 | # discover which version we're using, or to work around using an
1117 | # older one.
1118 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
1119 | refnames = keywords["refnames"].strip()
1120 | if refnames.startswith("$Format"):
1121 | if verbose:
1122 | print("keywords are unexpanded, not using")
1123 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
1124 | refs = {r.strip() for r in refnames.strip("()").split(",")}
1125 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
1126 | # just "foo-1.0". If we see a "tag: " prefix, prefer those.
1127 | TAG = "tag: "
1128 | tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)}
1129 | if not tags:
1130 | # Either we're using git < 1.8.3, or there really are no tags. We use
1131 | # a heuristic: assume all version tags have a digit. The old git %d
1132 | # expansion behaves like git log --decorate=short and strips out the
1133 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish
1134 | # between branches and tags. By ignoring refnames without digits, we
1135 | # filter out many common branch names like "release" and
1136 | # "stabilization", as well as "HEAD" and "master".
1137 | tags = {r for r in refs if re.search(r"\d", r)}
1138 | if verbose:
1139 | print("discarding '%s', no digits" % ",".join(refs - tags))
1140 | if verbose:
1141 | print("likely tags: %s" % ",".join(sorted(tags)))
1142 | for ref in sorted(tags):
1143 | # sorting will prefer e.g. "2.0" over "2.0rc1"
1144 | if ref.startswith(tag_prefix):
1145 | r = ref[len(tag_prefix) :]
1146 | # Filter out refs that exactly match prefix or that don't start
1147 | # with a number once the prefix is stripped (mostly a concern
1148 | # when prefix is '')
1149 | if not re.match(r"\d", r):
1150 | continue
1151 | if verbose:
1152 | print("picking %s" % r)
1153 | return {
1154 | "version": r,
1155 | "full-revisionid": keywords["full"].strip(),
1156 | "dirty": False,
1157 | "error": None,
1158 | "date": date,
1159 | }
1160 | # no suitable tags, so version is "0+unknown", but full hex is still there
1161 | if verbose:
1162 | print("no suitable tags, using unknown + full revision id")
1163 | return {
1164 | "version": "0+unknown",
1165 | "full-revisionid": keywords["full"].strip(),
1166 | "dirty": False,
1167 | "error": "no suitable tags",
1168 | "date": None,
1169 | }
1170 |
1171 |
1172 | @register_vcs_handler("git", "pieces_from_vcs")
1173 | def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
1174 | """Get version from 'git describe' in the root of the source tree.
1175 |
1176 | This only gets called if the git-archive 'subst' keywords were *not*
1177 | expanded, and _version.py hasn't already been rewritten with a short
1178 | version string, meaning we're inside a checked out source tree.
1179 | """
1180 | GITS = ["git"]
1181 | TAG_PREFIX_REGEX = "*"
1182 | if sys.platform == "win32":
1183 | GITS = ["git.cmd", "git.exe"]
1184 | TAG_PREFIX_REGEX = r"\*"
1185 |
1186 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True)
1187 | if rc != 0:
1188 | if verbose:
1189 | print("Directory %s not under git control" % root)
1190 | raise NotThisMethod("'git rev-parse --git-dir' returned error")
1191 |
1192 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
1193 | # if there isn't one, this yields HEX[-dirty] (no NUM)
1194 | describe_out, rc = runner(
1195 | GITS,
1196 | [
1197 | "describe",
1198 | "--tags",
1199 | "--dirty",
1200 | "--always",
1201 | "--long",
1202 | "--match",
1203 | "%s%s" % (tag_prefix, TAG_PREFIX_REGEX),
1204 | ],
1205 | cwd=root,
1206 | )
1207 | # --long was added in git-1.5.5
1208 | if describe_out is None:
1209 | raise NotThisMethod("'git describe' failed")
1210 | describe_out = describe_out.strip()
1211 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
1212 | if full_out is None:
1213 | raise NotThisMethod("'git rev-parse' failed")
1214 | full_out = full_out.strip()
1215 |
1216 | pieces = {}
1217 | pieces["long"] = full_out
1218 | pieces["short"] = full_out[:7] # maybe improved later
1219 | pieces["error"] = None
1220 |
1221 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root)
1222 | # --abbrev-ref was added in git-1.6.3
1223 | if rc != 0 or branch_name is None:
1224 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
1225 | branch_name = branch_name.strip()
1226 |
1227 | if branch_name == "HEAD":
1228 | # If we aren't exactly on a branch, pick a branch which represents
1229 | # the current commit. If all else fails, we are on a branchless
1230 | # commit.
1231 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
1232 | # --contains was added in git-1.5.4
1233 | if rc != 0 or branches is None:
1234 | raise NotThisMethod("'git branch --contains' returned error")
1235 | branches = branches.split("\n")
1236 |
1237 | # Remove the first line if we're running detached
1238 | if "(" in branches[0]:
1239 | branches.pop(0)
1240 |
1241 | # Strip off the leading "* " from the list of branches.
1242 | branches = [branch[2:] for branch in branches]
1243 | if "master" in branches:
1244 | branch_name = "master"
1245 | elif not branches:
1246 | branch_name = None
1247 | else:
1248 | # Pick the first branch that is returned. Good or bad.
1249 | branch_name = branches[0]
1250 |
1251 | pieces["branch"] = branch_name
1252 |
1253 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
1254 | # TAG might have hyphens.
1255 | git_describe = describe_out
1256 |
1257 | # look for -dirty suffix
1258 | dirty = git_describe.endswith("-dirty")
1259 | pieces["dirty"] = dirty
1260 | if dirty:
1261 | git_describe = git_describe[: git_describe.rindex("-dirty")]
1262 |
1263 | # now we have TAG-NUM-gHEX or HEX
1264 |
1265 | if "-" in git_describe:
1266 | # TAG-NUM-gHEX
1267 | mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
1268 | if not mo:
1269 | # unparsable. Maybe git-describe is misbehaving?
1270 | pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
1271 | return pieces
1272 |
1273 | # tag
1274 | full_tag = mo.group(1)
1275 | if not full_tag.startswith(tag_prefix):
1276 | if verbose:
1277 | fmt = "tag '%s' doesn't start with prefix '%s'"
1278 | print(fmt % (full_tag, tag_prefix))
1279 | pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
1280 | full_tag,
1281 | tag_prefix,
1282 | )
1283 | return pieces
1284 | pieces["closest-tag"] = full_tag[len(tag_prefix) :]
1285 |
1286 | # distance: number of commits since tag
1287 | pieces["distance"] = int(mo.group(2))
1288 |
1289 | # commit: short hex revision ID
1290 | pieces["short"] = mo.group(3)
1291 |
1292 | else:
1293 | # HEX: no tags
1294 | pieces["closest-tag"] = None
1295 | count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
1296 | pieces["distance"] = int(count_out) # total number of commits
1297 |
1298 | # commit date: see ISO-8601 comment in git_versions_from_keywords()
1299 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
1300 | # Use only the last line. Previous lines may contain GPG signature
1301 | # information.
1302 | date = date.splitlines()[-1]
1303 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
1304 |
1305 | return pieces
1306 |
1307 |
1308 | def do_vcs_install(manifest_in, versionfile_source, ipy):
1309 | """Git-specific installation logic for Versioneer.
1310 |
1311 | For Git, this means creating/changing .gitattributes to mark _version.py
1312 | for export-subst keyword substitution.
1313 | """
1314 | GITS = ["git"]
1315 | if sys.platform == "win32":
1316 | GITS = ["git.cmd", "git.exe"]
1317 | files = [manifest_in, versionfile_source]
1318 | if ipy:
1319 | files.append(ipy)
1320 | try:
1321 | my_path = __file__
1322 | if my_path.endswith(".pyc") or my_path.endswith(".pyo"):
1323 | my_path = os.path.splitext(my_path)[0] + ".py"
1324 | versioneer_file = os.path.relpath(my_path)
1325 | except NameError:
1326 | versioneer_file = "versioneer.py"
1327 | files.append(versioneer_file)
1328 | present = False
1329 | try:
1330 | with open(".gitattributes", "r") as fobj:
1331 | for line in fobj:
1332 | if line.strip().startswith(versionfile_source):
1333 | if "export-subst" in line.strip().split()[1:]:
1334 | present = True
1335 | break
1336 | except OSError:
1337 | pass
1338 | if not present:
1339 | with open(".gitattributes", "a+") as fobj:
1340 | fobj.write(f"{versionfile_source} export-subst\n")
1341 | files.append(".gitattributes")
1342 | run_command(GITS, ["add", "--"] + files)
1343 |
1344 |
1345 | def versions_from_parentdir(parentdir_prefix, root, verbose):
1346 | """Try to determine the version from the parent directory name.
1347 |
1348 | Source tarballs conventionally unpack into a directory that includes both
1349 | the project name and a version string. We will also support searching up
1350 | two directory levels for an appropriately named parent directory
1351 | """
1352 | rootdirs = []
1353 |
1354 | for _ in range(3):
1355 | dirname = os.path.basename(root)
1356 | if dirname.startswith(parentdir_prefix):
1357 | return {
1358 | "version": dirname[len(parentdir_prefix) :],
1359 | "full-revisionid": None,
1360 | "dirty": False,
1361 | "error": None,
1362 | "date": None,
1363 | }
1364 | rootdirs.append(root)
1365 | root = os.path.dirname(root) # up a level
1366 |
1367 | if verbose:
1368 | print(
1369 | "Tried directories %s but none started with prefix %s"
1370 | % (str(rootdirs), parentdir_prefix)
1371 | )
1372 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
1373 |
1374 |
1375 | SHORT_VERSION_PY = """
1376 | # This file was generated by 'versioneer.py' (0.21) from
1377 | # revision-control system data, or from the parent directory name of an
1378 | # unpacked source archive. Distribution tarballs contain a pre-generated copy
1379 | # of this file.
1380 |
1381 | import json
1382 |
1383 | version_json = '''
1384 | %s
1385 | ''' # END VERSION_JSON
1386 |
1387 |
1388 | def get_versions():
1389 | return json.loads(version_json)
1390 | """
1391 |
1392 |
1393 | def versions_from_file(filename):
1394 | """Try to determine the version from _version.py if present."""
1395 | try:
1396 | with open(filename) as f:
1397 | contents = f.read()
1398 | except OSError:
1399 | raise NotThisMethod("unable to read _version.py")
1400 | mo = re.search(
1401 | r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S
1402 | )
1403 | if not mo:
1404 | mo = re.search(
1405 | r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S
1406 | )
1407 | if not mo:
1408 | raise NotThisMethod("no version_json in _version.py")
1409 | return json.loads(mo.group(1))
1410 |
1411 |
1412 | def write_to_version_file(filename, versions):
1413 | """Write the given version number to the given _version.py file."""
1414 | os.unlink(filename)
1415 | contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": "))
1416 | with open(filename, "w") as f:
1417 | f.write(SHORT_VERSION_PY % contents)
1418 |
1419 | print("set %s to '%s'" % (filename, versions["version"]))
1420 |
1421 |
1422 | def plus_or_dot(pieces):
1423 | """Return a + if we don't already have one, else return a ."""
1424 | if "+" in pieces.get("closest-tag", ""):
1425 | return "."
1426 | return "+"
1427 |
1428 |
1429 | def render_pep440(pieces):
1430 | """Build up version string, with post-release "local version identifier".
1431 |
1432 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
1433 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
1434 |
1435 | Exceptions:
1436 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
1437 | """
1438 | if pieces["closest-tag"]:
1439 | rendered = pieces["closest-tag"]
1440 | if pieces["distance"] or pieces["dirty"]:
1441 | rendered += plus_or_dot(pieces)
1442 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
1443 | if pieces["dirty"]:
1444 | rendered += ".dirty"
1445 | else:
1446 | # exception #1
1447 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
1448 | if pieces["dirty"]:
1449 | rendered += ".dirty"
1450 | return rendered
1451 |
1452 |
1453 | def render_pep440_branch(pieces):
1454 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
1455 |
1456 | The ".dev0" means not master branch. Note that .dev0 sorts backwards
1457 | (a feature branch will appear "older" than the master branch).
1458 |
1459 | Exceptions:
1460 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
1461 | """
1462 | if pieces["closest-tag"]:
1463 | rendered = pieces["closest-tag"]
1464 | if pieces["distance"] or pieces["dirty"]:
1465 | if pieces["branch"] != "master":
1466 | rendered += ".dev0"
1467 | rendered += plus_or_dot(pieces)
1468 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
1469 | if pieces["dirty"]:
1470 | rendered += ".dirty"
1471 | else:
1472 | # exception #1
1473 | rendered = "0"
1474 | if pieces["branch"] != "master":
1475 | rendered += ".dev0"
1476 | rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
1477 | if pieces["dirty"]:
1478 | rendered += ".dirty"
1479 | return rendered
1480 |
1481 |
1482 | def pep440_split_post(ver):
1483 | """Split pep440 version string at the post-release segment.
1484 |
1485 | Returns the release segments before the post-release and the
1486 | post-release version number (or -1 if no post-release segment is present).
1487 | """
1488 | vc = str.split(ver, ".post")
1489 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
1490 |
1491 |
1492 | def render_pep440_pre(pieces):
1493 | """TAG[.postN.devDISTANCE] -- No -dirty.
1494 |
1495 | Exceptions:
1496 | 1: no tags. 0.post0.devDISTANCE
1497 | """
1498 | if pieces["closest-tag"]:
1499 | if pieces["distance"]:
1500 | # update the post release segment
1501 | tag_version, post_version = pep440_split_post(pieces["closest-tag"])
1502 | rendered = tag_version
1503 | if post_version is not None:
1504 | rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
1505 | else:
1506 | rendered += ".post0.dev%d" % (pieces["distance"])
1507 | else:
1508 | # no commits, use the tag as the version
1509 | rendered = pieces["closest-tag"]
1510 | else:
1511 | # exception #1
1512 | rendered = "0.post0.dev%d" % pieces["distance"]
1513 | return rendered
1514 |
1515 |
1516 | def render_pep440_post(pieces):
1517 | """TAG[.postDISTANCE[.dev0]+gHEX] .
1518 |
1519 | The ".dev0" means dirty. Note that .dev0 sorts backwards
1520 | (a dirty tree will appear "older" than the corresponding clean one),
1521 | but you shouldn't be releasing software with -dirty anyways.
1522 |
1523 | Exceptions:
1524 | 1: no tags. 0.postDISTANCE[.dev0]
1525 | """
1526 | if pieces["closest-tag"]:
1527 | rendered = pieces["closest-tag"]
1528 | if pieces["distance"] or pieces["dirty"]:
1529 | rendered += ".post%d" % pieces["distance"]
1530 | if pieces["dirty"]:
1531 | rendered += ".dev0"
1532 | rendered += plus_or_dot(pieces)
1533 | rendered += "g%s" % pieces["short"]
1534 | else:
1535 | # exception #1
1536 | rendered = "0.post%d" % pieces["distance"]
1537 | if pieces["dirty"]:
1538 | rendered += ".dev0"
1539 | rendered += "+g%s" % pieces["short"]
1540 | return rendered
1541 |
1542 |
1543 | def render_pep440_post_branch(pieces):
1544 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
1545 |
1546 | The ".dev0" means not master branch.
1547 |
1548 | Exceptions:
1549 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
1550 | """
1551 | if pieces["closest-tag"]:
1552 | rendered = pieces["closest-tag"]
1553 | if pieces["distance"] or pieces["dirty"]:
1554 | rendered += ".post%d" % pieces["distance"]
1555 | if pieces["branch"] != "master":
1556 | rendered += ".dev0"
1557 | rendered += plus_or_dot(pieces)
1558 | rendered += "g%s" % pieces["short"]
1559 | if pieces["dirty"]:
1560 | rendered += ".dirty"
1561 | else:
1562 | # exception #1
1563 | rendered = "0.post%d" % pieces["distance"]
1564 | if pieces["branch"] != "master":
1565 | rendered += ".dev0"
1566 | rendered += "+g%s" % pieces["short"]
1567 | if pieces["dirty"]:
1568 | rendered += ".dirty"
1569 | return rendered
1570 |
1571 |
1572 | def render_pep440_old(pieces):
1573 | """TAG[.postDISTANCE[.dev0]] .
1574 |
1575 | The ".dev0" means dirty.
1576 |
1577 | Exceptions:
1578 | 1: no tags. 0.postDISTANCE[.dev0]
1579 | """
1580 | if pieces["closest-tag"]:
1581 | rendered = pieces["closest-tag"]
1582 | if pieces["distance"] or pieces["dirty"]:
1583 | rendered += ".post%d" % pieces["distance"]
1584 | if pieces["dirty"]:
1585 | rendered += ".dev0"
1586 | else:
1587 | # exception #1
1588 | rendered = "0.post%d" % pieces["distance"]
1589 | if pieces["dirty"]:
1590 | rendered += ".dev0"
1591 | return rendered
1592 |
1593 |
1594 | def render_git_describe(pieces):
1595 | """TAG[-DISTANCE-gHEX][-dirty].
1596 |
1597 | Like 'git describe --tags --dirty --always'.
1598 |
1599 | Exceptions:
1600 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
1601 | """
1602 | if pieces["closest-tag"]:
1603 | rendered = pieces["closest-tag"]
1604 | if pieces["distance"]:
1605 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
1606 | else:
1607 | # exception #1
1608 | rendered = pieces["short"]
1609 | if pieces["dirty"]:
1610 | rendered += "-dirty"
1611 | return rendered
1612 |
1613 |
1614 | def render_git_describe_long(pieces):
1615 | """TAG-DISTANCE-gHEX[-dirty].
1616 |
1617 | Like 'git describe --tags --dirty --always -long'.
1618 | The distance/hash is unconditional.
1619 |
1620 | Exceptions:
1621 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
1622 | """
1623 | if pieces["closest-tag"]:
1624 | rendered = pieces["closest-tag"]
1625 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
1626 | else:
1627 | # exception #1
1628 | rendered = pieces["short"]
1629 | if pieces["dirty"]:
1630 | rendered += "-dirty"
1631 | return rendered
1632 |
1633 |
1634 | def render(pieces, style):
1635 | """Render the given version pieces into the requested style."""
1636 | if pieces["error"]:
1637 | return {
1638 | "version": "unknown",
1639 | "full-revisionid": pieces.get("long"),
1640 | "dirty": None,
1641 | "error": pieces["error"],
1642 | "date": None,
1643 | }
1644 |
1645 | if not style or style == "default":
1646 | style = "pep440" # the default
1647 |
1648 | if style == "pep440":
1649 | rendered = render_pep440(pieces)
1650 | elif style == "pep440-branch":
1651 | rendered = render_pep440_branch(pieces)
1652 | elif style == "pep440-pre":
1653 | rendered = render_pep440_pre(pieces)
1654 | elif style == "pep440-post":
1655 | rendered = render_pep440_post(pieces)
1656 | elif style == "pep440-post-branch":
1657 | rendered = render_pep440_post_branch(pieces)
1658 | elif style == "pep440-old":
1659 | rendered = render_pep440_old(pieces)
1660 | elif style == "git-describe":
1661 | rendered = render_git_describe(pieces)
1662 | elif style == "git-describe-long":
1663 | rendered = render_git_describe_long(pieces)
1664 | else:
1665 | raise ValueError("unknown style '%s'" % style)
1666 |
1667 | return {
1668 | "version": rendered,
1669 | "full-revisionid": pieces["long"],
1670 | "dirty": pieces["dirty"],
1671 | "error": None,
1672 | "date": pieces.get("date"),
1673 | }
1674 |
1675 |
1676 | class VersioneerBadRootError(Exception):
1677 | """The project root directory is unknown or missing key files."""
1678 |
1679 |
1680 | def get_versions(verbose=False):
1681 | """Get the project version from whatever source is available.
1682 |
1683 | Returns dict with two keys: 'version' and 'full'.
1684 | """
1685 | if "versioneer" in sys.modules:
1686 | # see the discussion in cmdclass.py:get_cmdclass()
1687 | del sys.modules["versioneer"]
1688 |
1689 | root = get_root()
1690 | cfg = get_config_from_root(root)
1691 |
1692 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
1693 | handlers = HANDLERS.get(cfg.VCS)
1694 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS
1695 | verbose = verbose or cfg.verbose
1696 | assert (
1697 | cfg.versionfile_source is not None
1698 | ), "please set versioneer.versionfile_source"
1699 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
1700 |
1701 | versionfile_abs = os.path.join(root, cfg.versionfile_source)
1702 |
1703 | # extract version from first of: _version.py, VCS command (e.g. 'git
1704 | # describe'), parentdir. This is meant to work for developers using a
1705 | # source checkout, for users of a tarball created by 'setup.py sdist',
1706 | # and for users of a tarball/zipball created by 'git archive' or github's
1707 | # download-from-tag feature or the equivalent in other VCSes.
1708 |
1709 | get_keywords_f = handlers.get("get_keywords")
1710 | from_keywords_f = handlers.get("keywords")
1711 | if get_keywords_f and from_keywords_f:
1712 | try:
1713 | keywords = get_keywords_f(versionfile_abs)
1714 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
1715 | if verbose:
1716 | print("got version from expanded keyword %s" % ver)
1717 | return ver
1718 | except NotThisMethod:
1719 | pass
1720 |
1721 | try:
1722 | ver = versions_from_file(versionfile_abs)
1723 | if verbose:
1724 | print("got version from file %s %s" % (versionfile_abs, ver))
1725 | return ver
1726 | except NotThisMethod:
1727 | pass
1728 |
1729 | from_vcs_f = handlers.get("pieces_from_vcs")
1730 | if from_vcs_f:
1731 | try:
1732 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
1733 | ver = render(pieces, cfg.style)
1734 | if verbose:
1735 | print("got version from VCS %s" % ver)
1736 | return ver
1737 | except NotThisMethod:
1738 | pass
1739 |
1740 | try:
1741 | if cfg.parentdir_prefix:
1742 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
1743 | if verbose:
1744 | print("got version from parentdir %s" % ver)
1745 | return ver
1746 | except NotThisMethod:
1747 | pass
1748 |
1749 | if verbose:
1750 | print("unable to compute version")
1751 |
1752 | return {
1753 | "version": "0+unknown",
1754 | "full-revisionid": None,
1755 | "dirty": None,
1756 | "error": "unable to compute version",
1757 | "date": None,
1758 | }
1759 |
1760 |
1761 | def get_version():
1762 | """Get the short version string for this project."""
1763 | return get_versions()["version"]
1764 |
1765 |
1766 | def get_cmdclass(cmdclass=None):
1767 | """Get the custom setuptools/distutils subclasses used by Versioneer.
1768 |
1769 | If the package uses a different cmdclass (e.g. one from numpy), it
1770 | should be provide as an argument.
1771 | """
1772 | if "versioneer" in sys.modules:
1773 | del sys.modules["versioneer"]
1774 | # this fixes the "python setup.py develop" case (also 'install' and
1775 | # 'easy_install .'), in which subdependencies of the main project are
1776 | # built (using setup.py bdist_egg) in the same python process. Assume
1777 | # a main project A and a dependency B, which use different versions
1778 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in
1779 | # sys.modules by the time B's setup.py is executed, causing B to run
1780 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a
1781 | # sandbox that restores sys.modules to it's pre-build state, so the
1782 | # parent is protected against the child's "import versioneer". By
1783 | # removing ourselves from sys.modules here, before the child build
1784 | # happens, we protect the child from the parent's versioneer too.
1785 | # Also see https://github.com/python-versioneer/python-versioneer/issues/52
1786 |
1787 | cmds = {} if cmdclass is None else cmdclass.copy()
1788 |
1789 | # we add "version" to both distutils and setuptools
1790 | from distutils.core import Command
1791 |
1792 | class cmd_version(Command):
1793 | description = "report generated version string"
1794 | user_options = []
1795 | boolean_options = []
1796 |
1797 | def initialize_options(self):
1798 | pass
1799 |
1800 | def finalize_options(self):
1801 | pass
1802 |
1803 | def run(self):
1804 | vers = get_versions(verbose=True)
1805 | print("Version: %s" % vers["version"])
1806 | print(" full-revisionid: %s" % vers.get("full-revisionid"))
1807 | print(" dirty: %s" % vers.get("dirty"))
1808 | print(" date: %s" % vers.get("date"))
1809 | if vers["error"]:
1810 | print(" error: %s" % vers["error"])
1811 |
1812 | cmds["version"] = cmd_version
1813 |
1814 | # we override "build_py" in both distutils and setuptools
1815 | #
1816 | # most invocation pathways end up running build_py:
1817 | # distutils/build -> build_py
1818 | # distutils/install -> distutils/build ->..
1819 | # setuptools/bdist_wheel -> distutils/install ->..
1820 | # setuptools/bdist_egg -> distutils/install_lib -> build_py
1821 | # setuptools/install -> bdist_egg ->..
1822 | # setuptools/develop -> ?
1823 | # pip install:
1824 | # copies source tree to a tempdir before running egg_info/etc
1825 | # if .git isn't copied too, 'git describe' will fail
1826 | # then does setup.py bdist_wheel, or sometimes setup.py install
1827 | # setup.py egg_info -> ?
1828 |
1829 | # we override different "build_py" commands for both environments
1830 | if "build_py" in cmds:
1831 | _build_py = cmds["build_py"]
1832 | elif "setuptools" in sys.modules:
1833 | from setuptools.command.build_py import build_py as _build_py
1834 | else:
1835 | from distutils.command.build_py import build_py as _build_py
1836 |
1837 | class cmd_build_py(_build_py):
1838 | def run(self):
1839 | root = get_root()
1840 | cfg = get_config_from_root(root)
1841 | versions = get_versions()
1842 | _build_py.run(self)
1843 | # now locate _version.py in the new build/ directory and replace
1844 | # it with an updated value
1845 | if cfg.versionfile_build:
1846 | target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build)
1847 | print("UPDATING %s" % target_versionfile)
1848 | write_to_version_file(target_versionfile, versions)
1849 |
1850 | cmds["build_py"] = cmd_build_py
1851 |
1852 | if "build_ext" in cmds:
1853 | _build_ext = cmds["build_ext"]
1854 | elif "setuptools" in sys.modules:
1855 | from setuptools.command.build_ext import build_ext as _build_ext
1856 | else:
1857 | from distutils.command.build_ext import build_ext as _build_ext
1858 |
1859 | class cmd_build_ext(_build_ext):
1860 | def run(self):
1861 | root = get_root()
1862 | cfg = get_config_from_root(root)
1863 | versions = get_versions()
1864 | _build_ext.run(self)
1865 | if self.inplace:
1866 | # build_ext --inplace will only build extensions in
1867 | # build/lib<..> dir with no _version.py to write to.
1868 | # As in place builds will already have a _version.py
1869 | # in the module dir, we do not need to write one.
1870 | return
1871 | # now locate _version.py in the new build/ directory and replace
1872 | # it with an updated value
1873 | target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build)
1874 | print("UPDATING %s" % target_versionfile)
1875 | write_to_version_file(target_versionfile, versions)
1876 |
1877 | cmds["build_ext"] = cmd_build_ext
1878 |
1879 | if "cx_Freeze" in sys.modules: # cx_freeze enabled?
1880 | from cx_Freeze.dist import build_exe as _build_exe
1881 |
1882 | # nczeczulin reports that py2exe won't like the pep440-style string
1883 | # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
1884 | # setup(console=[{
1885 | # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
1886 | # "product_version": versioneer.get_version(),
1887 | # ...
1888 |
1889 | class cmd_build_exe(_build_exe):
1890 | def run(self):
1891 | root = get_root()
1892 | cfg = get_config_from_root(root)
1893 | versions = get_versions()
1894 | target_versionfile = cfg.versionfile_source
1895 | print("UPDATING %s" % target_versionfile)
1896 | write_to_version_file(target_versionfile, versions)
1897 |
1898 | _build_exe.run(self)
1899 | os.unlink(target_versionfile)
1900 | with open(cfg.versionfile_source, "w") as f:
1901 | LONG = LONG_VERSION_PY[cfg.VCS]
1902 | f.write(
1903 | LONG
1904 | % {
1905 | "DOLLAR": "$",
1906 | "STYLE": cfg.style,
1907 | "TAG_PREFIX": cfg.tag_prefix,
1908 | "PARENTDIR_PREFIX": cfg.parentdir_prefix,
1909 | "VERSIONFILE_SOURCE": cfg.versionfile_source,
1910 | }
1911 | )
1912 |
1913 | cmds["build_exe"] = cmd_build_exe
1914 | del cmds["build_py"]
1915 |
1916 | if "py2exe" in sys.modules: # py2exe enabled?
1917 | from py2exe.distutils_buildexe import py2exe as _py2exe
1918 |
1919 | class cmd_py2exe(_py2exe):
1920 | def run(self):
1921 | root = get_root()
1922 | cfg = get_config_from_root(root)
1923 | versions = get_versions()
1924 | target_versionfile = cfg.versionfile_source
1925 | print("UPDATING %s" % target_versionfile)
1926 | write_to_version_file(target_versionfile, versions)
1927 |
1928 | _py2exe.run(self)
1929 | os.unlink(target_versionfile)
1930 | with open(cfg.versionfile_source, "w") as f:
1931 | LONG = LONG_VERSION_PY[cfg.VCS]
1932 | f.write(
1933 | LONG
1934 | % {
1935 | "DOLLAR": "$",
1936 | "STYLE": cfg.style,
1937 | "TAG_PREFIX": cfg.tag_prefix,
1938 | "PARENTDIR_PREFIX": cfg.parentdir_prefix,
1939 | "VERSIONFILE_SOURCE": cfg.versionfile_source,
1940 | }
1941 | )
1942 |
1943 | cmds["py2exe"] = cmd_py2exe
1944 |
1945 | # we override different "sdist" commands for both environments
1946 | if "sdist" in cmds:
1947 | _sdist = cmds["sdist"]
1948 | elif "setuptools" in sys.modules:
1949 | from setuptools.command.sdist import sdist as _sdist
1950 | else:
1951 | from distutils.command.sdist import sdist as _sdist
1952 |
1953 | class cmd_sdist(_sdist):
1954 | def run(self):
1955 | versions = get_versions()
1956 | self._versioneer_generated_versions = versions
1957 | # unless we update this, the command will keep using the old
1958 | # version
1959 | self.distribution.metadata.version = versions["version"]
1960 | return _sdist.run(self)
1961 |
1962 | def make_release_tree(self, base_dir, files):
1963 | root = get_root()
1964 | cfg = get_config_from_root(root)
1965 | _sdist.make_release_tree(self, base_dir, files)
1966 | # now locate _version.py in the new base_dir directory
1967 | # (remembering that it may be a hardlink) and replace it with an
1968 | # updated value
1969 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
1970 | print("UPDATING %s" % target_versionfile)
1971 | write_to_version_file(
1972 | target_versionfile, self._versioneer_generated_versions
1973 | )
1974 |
1975 | cmds["sdist"] = cmd_sdist
1976 |
1977 | return cmds
1978 |
1979 |
1980 | CONFIG_ERROR = """
1981 | setup.cfg is missing the necessary Versioneer configuration. You need
1982 | a section like:
1983 |
1984 | [versioneer]
1985 | VCS = git
1986 | style = pep440
1987 | versionfile_source = src/myproject/_version.py
1988 | versionfile_build = myproject/_version.py
1989 | tag_prefix =
1990 | parentdir_prefix = myproject-
1991 |
1992 | You will also need to edit your setup.py to use the results:
1993 |
1994 | import versioneer
1995 | setup(version=versioneer.get_version(),
1996 | cmdclass=versioneer.get_cmdclass(), ...)
1997 |
1998 | Please read the docstring in ./versioneer.py for configuration instructions,
1999 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
2000 | """
2001 |
2002 | SAMPLE_CONFIG = """
2003 | # See the docstring in versioneer.py for instructions. Note that you must
2004 | # re-run 'versioneer.py setup' after changing this section, and commit the
2005 | # resulting files.
2006 |
2007 | [versioneer]
2008 | #VCS = git
2009 | #style = pep440
2010 | #versionfile_source =
2011 | #versionfile_build =
2012 | #tag_prefix =
2013 | #parentdir_prefix =
2014 |
2015 | """
2016 |
2017 | OLD_SNIPPET = """
2018 | from ._version import get_versions
2019 | __version__ = get_versions()['version']
2020 | del get_versions
2021 | """
2022 |
2023 | INIT_PY_SNIPPET = """
2024 | from . import {0}
2025 | __version__ = {0}.get_versions()['version']
2026 | """
2027 |
2028 |
2029 | def do_setup():
2030 | """Do main VCS-independent setup function for installing Versioneer."""
2031 | root = get_root()
2032 | try:
2033 | cfg = get_config_from_root(root)
2034 | except (OSError, configparser.NoSectionError, configparser.NoOptionError) as e:
2035 | if isinstance(e, (OSError, configparser.NoSectionError)):
2036 | print("Adding sample versioneer config to setup.cfg", file=sys.stderr)
2037 | with open(os.path.join(root, "setup.cfg"), "a") as f:
2038 | f.write(SAMPLE_CONFIG)
2039 | print(CONFIG_ERROR, file=sys.stderr)
2040 | return 1
2041 |
2042 | print(" creating %s" % cfg.versionfile_source)
2043 | with open(cfg.versionfile_source, "w") as f:
2044 | LONG = LONG_VERSION_PY[cfg.VCS]
2045 | f.write(
2046 | LONG
2047 | % {
2048 | "DOLLAR": "$",
2049 | "STYLE": cfg.style,
2050 | "TAG_PREFIX": cfg.tag_prefix,
2051 | "PARENTDIR_PREFIX": cfg.parentdir_prefix,
2052 | "VERSIONFILE_SOURCE": cfg.versionfile_source,
2053 | }
2054 | )
2055 |
2056 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py")
2057 | if os.path.exists(ipy):
2058 | try:
2059 | with open(ipy, "r") as f:
2060 | old = f.read()
2061 | except OSError:
2062 | old = ""
2063 | module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0]
2064 | snippet = INIT_PY_SNIPPET.format(module)
2065 | if OLD_SNIPPET in old:
2066 | print(" replacing boilerplate in %s" % ipy)
2067 | with open(ipy, "w") as f:
2068 | f.write(old.replace(OLD_SNIPPET, snippet))
2069 | elif snippet not in old:
2070 | print(" appending to %s" % ipy)
2071 | with open(ipy, "a") as f:
2072 | f.write(snippet)
2073 | else:
2074 | print(" %s unmodified" % ipy)
2075 | else:
2076 | print(" %s doesn't exist, ok" % ipy)
2077 | ipy = None
2078 |
2079 | # Make sure both the top-level "versioneer.py" and versionfile_source
2080 | # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
2081 | # they'll be copied into source distributions. Pip won't be able to
2082 | # install the package without this.
2083 | manifest_in = os.path.join(root, "MANIFEST.in")
2084 | simple_includes = set()
2085 | try:
2086 | with open(manifest_in, "r") as f:
2087 | for line in f:
2088 | if line.startswith("include "):
2089 | for include in line.split()[1:]:
2090 | simple_includes.add(include)
2091 | except OSError:
2092 | pass
2093 | # That doesn't cover everything MANIFEST.in can do
2094 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so
2095 | # it might give some false negatives. Appending redundant 'include'
2096 | # lines is safe, though.
2097 | if "versioneer.py" not in simple_includes:
2098 | print(" appending 'versioneer.py' to MANIFEST.in")
2099 | with open(manifest_in, "a") as f:
2100 | f.write("include versioneer.py\n")
2101 | else:
2102 | print(" 'versioneer.py' already in MANIFEST.in")
2103 | if cfg.versionfile_source not in simple_includes:
2104 | print(
2105 | " appending versionfile_source ('%s') to MANIFEST.in"
2106 | % cfg.versionfile_source
2107 | )
2108 | with open(manifest_in, "a") as f:
2109 | f.write("include %s\n" % cfg.versionfile_source)
2110 | else:
2111 | print(" versionfile_source already in MANIFEST.in")
2112 |
2113 | # Make VCS-specific changes. For git, this means creating/changing
2114 | # .gitattributes to mark _version.py for export-subst keyword
2115 | # substitution.
2116 | do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
2117 | return 0
2118 |
2119 |
2120 | def scan_setup_py():
2121 | """Validate the contents of setup.py against Versioneer's expectations."""
2122 | found = set()
2123 | setters = False
2124 | errors = 0
2125 | with open("setup.py", "r") as f:
2126 | for line in f.readlines():
2127 | if "import versioneer" in line:
2128 | found.add("import")
2129 | if "versioneer.get_cmdclass()" in line:
2130 | found.add("cmdclass")
2131 | if "versioneer.get_version()" in line:
2132 | found.add("get_version")
2133 | if "versioneer.VCS" in line:
2134 | setters = True
2135 | if "versioneer.versionfile_source" in line:
2136 | setters = True
2137 | if len(found) != 3:
2138 | print("")
2139 | print("Your setup.py appears to be missing some important items")
2140 | print("(but I might be wrong). Please make sure it has something")
2141 | print("roughly like the following:")
2142 | print("")
2143 | print(" import versioneer")
2144 | print(" setup( version=versioneer.get_version(),")
2145 | print(" cmdclass=versioneer.get_cmdclass(), ...)")
2146 | print("")
2147 | errors += 1
2148 | if setters:
2149 | print("You should remove lines like 'versioneer.VCS = ' and")
2150 | print("'versioneer.versionfile_source = ' . This configuration")
2151 | print("now lives in setup.cfg, and should be removed from setup.py")
2152 | print("")
2153 | errors += 1
2154 | return errors
2155 |
2156 |
2157 | if __name__ == "__main__":
2158 | cmd = sys.argv[1]
2159 | if cmd == "setup":
2160 | errors = do_setup()
2161 | errors += scan_setup_py()
2162 | if errors:
2163 | sys.exit(1)
2164 |
--------------------------------------------------------------------------------