├── .github
├── dependabot.yml
├── pull_request_template.md
└── workflows
│ ├── ci.yml
│ ├── documentation.yaml
│ ├── pip_install.yml
│ ├── poetry_build.yml
│ ├── poetry_update.yml
│ ├── precommit.yml
│ ├── project-management.yml
│ └── publish_pypi.yml
├── .gitignore
├── .pre-commit-config.yaml
├── CHANGELOG.md
├── LICENSE
├── README.md
├── docs
├── .gitignore
├── changelog.md
├── development.md
├── doc-requirements.txt
├── extra.css
├── gen_ref_pages.py
├── index.md
├── install.md
├── overrides
│ └── 404.html
├── parser.py
└── versions.md
├── fractal_client
├── __init__.py
├── authclient.py
├── client.py
├── cmd
│ ├── __init__.py
│ ├── _aux_task_caching.py
│ ├── _dataset.py
│ ├── _group.py
│ ├── _job.py
│ ├── _project.py
│ ├── _task.py
│ ├── _task_collection.py
│ ├── _user.py
│ └── _workflow.py
├── config.py
├── interface.py
├── parser.py
└── response.py
├── mkdocs.yml
├── poetry.lock
├── pyproject.toml
└── tests
├── .gitignore
├── __init__.py
├── conftest.py
├── data
├── .gitignore
├── import-export
│ ├── wf3.json
│ ├── workflow.json
│ └── workflow_2.json
└── task_edit_json
│ ├── default_args.json
│ └── meta.json
├── fixtures_testserver.py
├── test_client.py
├── test_dataset.py
├── test_fixtures.py
├── test_group.py
├── test_invalid_commands.py
├── test_job.py
├── test_main.py
├── test_project.py
├── test_task.py
├── test_task_collection.py
├── test_unit_response.py
├── test_unit_task_cache.py
├── test_unit_versions.py
├── test_user.py
└── test_workflow.py
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "pip"
9 | directory: "/"
10 | versioning-strategy: "lockfile-only"
11 | schedule:
12 | interval: "weekly"
13 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | ## Checklist before merging
2 | - [ ] I added an appropriate entry to `CHANGELOG.md`
3 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: ci
2 |
3 | on:
4 | push:
5 | branches: ["main"]
6 | pull_request:
7 | branches: ["main"]
8 |
9 | jobs:
10 |
11 | tests:
12 | name: "Python ${{ matrix.python-version }}"
13 | runs-on: ubuntu-22.04
14 | timeout-minutes: 30
15 |
16 | strategy:
17 | matrix:
18 | python-version: ["3.11", "3.12"]
19 |
20 | steps:
21 |
22 | - name: Start Postgres
23 | run: |
24 | sudo systemctl start postgresql.service
25 | sudo --login -u postgres psql -c "ALTER USER postgres WITH PASSWORD 'postgres';"
26 |
27 | - uses: actions/checkout@v4
28 | with:
29 | persist-credentials: false
30 |
31 | - name: Install poetry
32 | run: pipx install poetry==2.1.3
33 |
34 | - name: Set up Python ${{ matrix.python-version }}
35 | uses: actions/setup-python@v5
36 | with:
37 | python-version: ${{ matrix.python-version }}
38 | cache: "poetry"
39 |
40 | - name: Install dependencies
41 | run: poetry install --with dev --no-interaction
42 |
43 | - name: Test with pytest
44 | env:
45 | COVERAGE_FILE: coverage-data-${{ matrix.python-version }}
46 | GHA_FRACTAL_SERVER_LOG: /tmp
47 | run: poetry run coverage run -m pytest
48 |
49 | - name: Log server STDOUT if pytest failed
50 | if: failure()
51 | run: cat /tmp/server_out
52 |
53 | - name: Log server STDERR if pytest failed
54 | if: failure()
55 | run: cat /tmp/server_err
56 |
57 | - name: Upload coverage data
58 | uses: actions/upload-artifact@v4
59 | with:
60 | name: coverage-data-${{ matrix.python-version }}
61 | path: coverage-data-${{ matrix.python-version }}*
62 |
63 | coverage:
64 | name: Coverage
65 | runs-on: ubuntu-22.04
66 | needs: tests
67 | steps:
68 | - uses: actions/checkout@v4
69 |
70 | - run: pipx install poetry==2.1.3
71 |
72 | - uses: actions/setup-python@v5
73 | with:
74 | python-version: "3.11"
75 | cache: "poetry"
76 |
77 | - name: Install dependencies
78 | run: poetry install --only dev
79 |
80 | - name: Download data
81 | uses: actions/download-artifact@v4
82 | with:
83 | pattern: coverage-data-*
84 | merge-multiple: true
85 |
86 | - name: Combine coverage
87 | # Combines all the downloaded coverage artifacts in a single `.coverage` file,
88 | # which will then be used by `py-cov-action/python-coverage-comment-action`.
89 | # We added this step to replace the variable `MERGE_COVERAGE_FILES: true`
90 | # in the next step, which had started to raise errors
91 | # (https://github.com/fractal-analytics-platform/fractal-server/pull/1725).
92 | run: poetry run coverage combine coverage-data-*
93 |
94 | - name: Add coverage comment to Pull Requests
95 | id: coverage_comment
96 | uses: py-cov-action/python-coverage-comment-action@v3
97 | with:
98 | GITHUB_TOKEN: ${{ github.token }}
99 | MINIMUM_GREEN: 90
100 | MINIMUM_ORANGE: 60
101 | ANNOTATE_MISSING_LINES: true
102 | ANNOTATION_TYPE: notice
103 |
--------------------------------------------------------------------------------
/.github/workflows/documentation.yaml:
--------------------------------------------------------------------------------
1 | name: docs
2 |
3 | on:
4 | push:
5 | branches: ["main"]
6 | pull_request:
7 | branches: ["main"]
8 |
9 | jobs:
10 |
11 | docs:
12 | name: "Deploy docs"
13 | runs-on: ubuntu-22.04
14 | if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
15 |
16 | steps:
17 | - uses: actions/checkout@v4
18 | with:
19 | persist-credentials: false
20 |
21 | - name: Deploy docs
22 | uses: mhausenblas/mkdocs-deploy-gh-pages@master
23 | env:
24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
25 | REQUIREMENTS: docs/doc-requirements.txt
26 |
27 | docs-build:
28 | name: "Test docs build"
29 | runs-on: ubuntu-22.04
30 | if: ${{ github.event_name != 'push' || github.ref != 'refs/heads/main' }}
31 |
32 | steps:
33 | - uses: actions/checkout@v4
34 | with:
35 | persist-credentials: false
36 |
37 |
38 | - uses: actions/setup-python@v5
39 | with:
40 | python-version: '3.11'
41 | cache: pip
42 |
43 | - name: Install current fractal
44 | run: python -m pip install -e .
45 |
46 | - name: Install dependencies
47 | run: python -m pip install -r docs/doc-requirements.txt
48 |
49 | - name: MKDocs build
50 | run: mkdocs build --config-file mkdocs.yml --strict
51 |
--------------------------------------------------------------------------------
/.github/workflows/pip_install.yml:
--------------------------------------------------------------------------------
1 | name: pip_install
2 |
3 | on:
4 | push:
5 | branches: ["main"]
6 | pull_request:
7 | branches: ["main"]
8 |
9 | jobs:
10 |
11 | tests:
12 | name: "Python ${{ matrix.python-version }}"
13 | runs-on: ubuntu-22.04
14 | timeout-minutes: 30
15 |
16 | strategy:
17 | matrix:
18 | python-version: ["3.11", "3.12"]
19 |
20 | steps:
21 |
22 | - uses: actions/checkout@v4
23 | with:
24 | persist-credentials: false
25 |
26 | - name: Set up Python ${{ matrix.python-version }}
27 | uses: actions/setup-python@v5
28 | with:
29 | python-version: ${{ matrix.python-version }}
30 | cache: "pip"
31 |
32 | - name: Install package
33 | run: pip install -e .
34 |
35 | - name: Call fractal
36 | run: fractal -h
37 |
--------------------------------------------------------------------------------
/.github/workflows/poetry_build.yml:
--------------------------------------------------------------------------------
1 | name: Build package
2 |
3 | on:
4 | push:
5 | branches: ["main"]
6 | pull_request:
7 | branches: ["main"]
8 |
9 | jobs:
10 | build:
11 | runs-on: ubuntu-22.04
12 | steps:
13 |
14 | - uses: actions/checkout@v4
15 | with:
16 | persist-credentials: false
17 |
18 | - name: "Set up Python"
19 | uses: actions/setup-python@v5
20 | with:
21 | python-version: "3.11"
22 |
23 | - name: "Install poetry"
24 | run: curl -sSL https://install.python-poetry.org | python3 - --version 2.0.0
25 |
26 | - name: Build package
27 | run: poetry build
28 |
--------------------------------------------------------------------------------
/.github/workflows/poetry_update.yml:
--------------------------------------------------------------------------------
1 | # NOTE: in order for this action to trigger other GitHub workflows (typically
2 | # the CI ones), the "Create Pull Request" step must use a Personal Access Token
3 | # (PAT) rather than the standard GITHUB_TOKEN. This PAT must be generated
4 | # elsewhere by one of the developers, and set as a secret for the current
5 | # repositories.
6 |
7 |
8 | name: poetry-update
9 |
10 | on:
11 | schedule:
12 | - cron: "0 9 * * 2" # Tuesday Mondays at 9AM
13 | workflow_dispatch:
14 |
15 | jobs:
16 | poetry_update:
17 | runs-on: ubuntu-22.04
18 | steps:
19 | - uses: actions/checkout@v3
20 | with:
21 | persist-credentials: false
22 | - run: pipx install poetry==2.1.3
23 |
24 | - uses: actions/setup-python@v3
25 | with:
26 | python-version: "3.11"
27 |
28 | - uses: fredrikaverpil/poetry-update@v1.2
29 |
30 | - name: Create Pull Request
31 | uses: peter-evans/create-pull-request@v6
32 | with:
33 | token: ${{ secrets.PAT }}
34 | delete-branch: true
35 | branch: poetry-update
36 | add-paths: poetry.lock
37 | commit-message: "Update dependencies in `poetry.lock`"
38 | title: "[auto] Update dependencies in `poetry.lock`"
39 | draft: true
40 | body: |
41 |
42 | ### Updated dependencies:
43 |
44 | ```bash
45 | ${{ env.POETRY_UPDATED }}
46 | ```
47 |
48 | ### Outdated dependencies _before_ PR:
49 |
50 | ```bash
51 | ${{ env.POETRY_OUTDATED_BEFORE }}
52 | ```
53 |
54 | ### Outdated dependencies _after_ PR:
55 |
56 | ```bash
57 | ${{ env.POETRY_OUTDATED_AFTER }}
58 | ```
59 |
60 | _Note: there may be dependencies in the table above which were not updated as part of this PR.
61 | The reason is they require manual updating due to the way they are pinned._
62 |
--------------------------------------------------------------------------------
/.github/workflows/precommit.yml:
--------------------------------------------------------------------------------
1 | name: precommit
2 |
3 | on:
4 | push:
5 | branches: ["main"]
6 | pull_request:
7 | branches: ["main"]
8 |
9 | jobs:
10 | precommit:
11 | runs-on: ubuntu-22.04
12 | timeout-minutes: 3
13 | steps:
14 | - uses: actions/checkout@v4
15 | with:
16 | persist-credentials: false
17 | - uses: actions/setup-python@v5
18 | with:
19 | python-version: "3.11"
20 | cache: "pip"
21 | - uses: pre-commit/action@v3.0.0
22 |
--------------------------------------------------------------------------------
/.github/workflows/project-management.yml:
--------------------------------------------------------------------------------
1 | name: Project automation
2 |
3 | on:
4 | issues:
5 | types:
6 | - opened
7 |
8 | jobs:
9 | add-to-project:
10 | name: Add issue to project board
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/add-to-project@v0.4.0
14 | with:
15 | project-url: https://github.com/orgs/fractal-analytics-platform/projects/1
16 | github-token: ${{ secrets.MY_GITHUB_TOKEN }}
17 |
--------------------------------------------------------------------------------
/.github/workflows/publish_pypi.yml:
--------------------------------------------------------------------------------
1 | name: Publish package to PyPI
2 | on:
3 | push:
4 | tags:
5 | # See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#filter-pattern-cheat-sheet
6 | - '[0-9]+.[0-9]+.[0-9]+'
7 | - '[0-9]+.[0-9]+.[0-9]+[a-c][0-9]+'
8 | - '[0-9]+.[0-9]+.[0-9]+alpha[0-9]+'
9 | - '[0-9]+.[0-9]+.[0-9]+beta[0-9]+'
10 | - '[0-9]+.[0-9]+.[0-9]+rc[0-9]+'
11 |
12 | jobs:
13 | build_and_publish:
14 | runs-on: ubuntu-22.04
15 | environment: pypi
16 | steps:
17 |
18 | - name: Check that PYPI_TOKEN secret exists
19 | env:
20 | CURRENT_PYPI_TOKEN: ${{ secrets.PYPI_TOKEN}}
21 | if: ${{ env.CURRENT_PYPI_TOKEN == '' }}
22 | run: echo "Secret PYPI_TOKEN is not defined." && exit 1
23 |
24 | - uses: actions/checkout@v4
25 | with:
26 | persist-credentials: false
27 |
28 | - name: "Set up Python"
29 | uses: actions/setup-python@v5
30 | with:
31 | python-version: "3.11"
32 |
33 | - name: "Install poetry"
34 | run: curl -sSL https://install.python-poetry.org | python3 - --version 2.1.3
35 |
36 | - name: Build package
37 | run: poetry build
38 |
39 | - name: Publish package to PyPI
40 | run: poetry publish --username "__token__" --password ${{ secrets.PYPI_TOKEN}}
41 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .pytest_cache/
2 | artifacts
3 | __pycache__
4 | *.cpython-*.pyc
5 | *.env
6 | .coverage
7 |
8 | dist/*
9 | .mypy_cache/*
10 | .python-version
11 | file:cachedb
12 | monitoring.db
13 | runinfo
14 | .*.swp
15 | parsl_executors.log
16 | fractal.log
17 | site
18 | reference
19 | venv
20 | .DS_Store
21 | token
22 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v5.0.0
4 | hooks:
5 | - id: check-yaml
6 | - id: end-of-file-fixer
7 | - id: trailing-whitespace
8 | - repo: https://github.com/asottile/reorder_python_imports
9 | rev: v3.1.0
10 | hooks:
11 | - id: reorder-python-imports
12 | - repo: https://github.com/psf/black
13 | rev: 22.3.0
14 | hooks:
15 | - id: black
16 | args: [--line-length=79]
17 | - repo: https://github.com/PyCQA/flake8
18 | rev: 7.1.1
19 | hooks:
20 | - id: flake8
21 | args: ["--exclude", "examples/*"]
22 | - repo: https://github.com/PyCQA/bandit
23 | rev: '1.7.4'
24 | hooks:
25 | - id: bandit
26 | args: ["--exclude", "tests/*"]
27 | - repo: local
28 | hooks:
29 | - id: grep-for-devtools
30 | name: Check for devtools in fractal_client files
31 | entry: bash -c 'for file in "$@"; do if grep -n "devtools" "$file"; then echo "$file" && exit 1; fi; done' --
32 | language: system
33 | files: fractal_client/
34 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | **Note**: Numbers like (\#123) point to closed Pull Requests on the fractal repository.
2 |
3 | # 2.9.1
4 |
5 | * Bump versions of `python-dotenv` and `packaging` (\#803).
6 |
7 | # 2.9.0
8 |
9 | * Add `--fractal-server` CLI option (\#801).
10 | * Add `--token-path` CLI option and `FRACTAL_TOKEN_PATH` env variable (\#801).
11 | * Stop caching token, in favor of making a call to the login API for each CLI command if needed (\#801).
12 | * Drop `AuthToken` class (\#801).
13 | * Expose `--task-type` in task-creation command (\#801).
14 | * Remove default `FRACTAL_SERVER="http://localhost:8000"` configuration (\#801).
15 |
16 | # 2.8.1
17 |
18 | This version deprecates Python 3.10.
19 |
20 | # 2.8.0
21 |
22 | This version is aligned with [fractal-server 2.14.0](https://github.com/fractal-analytics-platform/fractal-server/blob/main/CHANGELOG.md#2140).
23 |
24 | * Dependencies:
25 | * Bump `httpx` to `0.28.*` (\#783).
26 |
27 | # 2.7.1
28 |
29 | * Testing:
30 | * Adapt to fractal-server switch to Pydantic V2 (\#781).
31 |
32 | # 2.7.0
33 |
34 | This version is aligned with [fractal-server 2.11.0](https://github.com/fractal-analytics-platform/fractal-server/blob/main/CHANGELOG.md#2110).
35 |
36 | * Commands:
37 | * Align with fractal-server 2.11 changes of data structure and API for filters (\#776).
38 | * Package:
39 | * Remove `exclude` field, and move `packages` field to `tool.poetry` section, in pyproject.toml (\#771).
40 | * Testing:
41 | * Improve configuration for coverage GitHub Action step (\#772).
42 | * Add `persist-credentials: false` to all `actions/checkout@v4` GitHub Action steps (\#773).
43 |
44 | # 2.6.1
45 |
46 | * Package:
47 | * Move to `poetry` v2 (\#770).
48 | * Require `Python<3.13` (\#770).
49 | * Testing:
50 | * Use default Postgres service in GitHub CI (\#761).
51 |
52 | # 2.6.0
53 |
54 | * Align with new task-collection endpoint in `fractal-server` 2.10.0 (\#760).
55 | * Update versions of pre-commit hooks and add precommit GitHub Action (\#757).
56 |
57 | # 2.5.1
58 |
59 | * Deprecate user `cache_dir` , to align with [fractal-server 2.9.2](https://github.com/fractal-analytics-platform/fractal-server/blob/main/CHANGELOG.md#292) (\#758).
60 |
61 | # 2.5.0
62 |
63 | * Update task-collection commands, to align with [fractal-server 2.9.0](https://github.com/fractal-analytics-platform/fractal-server/blob/main/CHANGELOG.md#290) (\#738).
64 | * Remove (internal) obsolete `do_not_separate_logs` argument (\#738).
65 | * Add `group {add|remove}-user` commands, and deprecate `--new-user-ids` argument from `group update` (\#748).
66 | * Update `user whoami --viewer-paths` to call the new dedicated [server endpoint](https://github.com/fractal-analytics-platform/fractal-server/pull/2096) (\#748).
67 | * Add `user set-groups` commands (\#753).
68 | * Testing:
69 | * Align with fractal-server 2.9.0 removal of `DB_ENGINE` variable (\#743).
70 |
71 | # 2.4.0
72 |
73 | > WARNING: This release has a breaking change in the `project add-dataset` command.
74 |
75 | * Move to from positional `zarr_dir` argument to optional `--zarr-dir` argument, for `project add-dataset` (\#736).
76 | * Add support for user-settings `project_dir`, introduced in fractal-server 2.8.0 (\#736).
77 | * Internal:
78 | * Update effect of `include_logs` for task-collection check command (\#730).
79 |
80 | # 2.3.0
81 |
82 | > WARNING: Starting from this release, Python3.9 is not supported any more.
83 |
84 | * Align with [`fractal-server` 2.7.0](https://fractal-analytics-platform.github.io/fractal-server/changelog/#270) (\#712).
85 | * Align with `fractal-server 2.7.1 and drop use of pip extras (\#727).
86 | * Remove `--new-name` and `--new-version` options from `task edit` command (\#712).
87 | * Rename `source` into `label`, for `task collect-custom` command (\#712).
88 | * Do not refer to obsolete task attributes `source` or `owner` (\#712, \#717).
89 | * Add `--new-ssh-settings-json` option to `fractal user edit` (\#715).
90 | * Add `--ssh-settings-json` option to `fractal user register` (\#722).
91 | * Add `--private` option to task-creating commands (\#717).
92 | * Drop `task delete` command (\#717).
93 | * Handle missing server in `fractal version` (\#724).
94 | * Testing:
95 | * Run all tests against a single `fractal-server` instance (\#717).
96 | * Run tests in random module order, based on `pytest-randomly` (\#717).
97 | * Include Python3.12 in GitHub CI (\#717).
98 |
99 | # 2.2.1
100 |
101 | * Support new `viewer-paths` commands in `fractal group` commands (\#709).
102 | * Make `--new-user-ids` optional `fractal group` command (\#709).
103 | * Add `--viewer-paths` argument in `fractal user whoami` (\#709).
104 |
105 | # 2.2.0
106 |
107 | * Align with [`fractal-server` 2.6.0](https://fractal-analytics-platform.github.io/fractal-server/changelog/#260) (\#705).
108 |
109 | # 2.1.0
110 |
111 | * Align with [`fractal-server` 2.4.0](https://fractal-analytics-platform.github.io/fractal-server/changelog/#240) (\#695).
112 | * Add `fractal group` command (\#695).
113 | * Testing
114 | * Update GitHub actions for upload/download/coverage (\#690, \#691).
115 | * Switch from SQLite to Postgres in CI (\#702).
116 |
117 | # 2.0.3
118 |
119 | * Improve `workflow import` command (\#686).
120 |
121 | # 2.0.2
122 |
123 | * Improve error handling in `task collect-custom` command (\#680).
124 | * Documentation
125 | * Bump `mkdocstrings[python]` to 0.25.2 (\#682).
126 |
127 | # 2.0.1
128 |
129 | * Add new command `task collect-custom` (\#667).
130 | * Update `poetry` version for development to 1.8.2.
131 | * Testing:
132 | * Update CI for fractal-server 2.1.0 compatibility (\#655).
133 | * Remove obsolete folders from `tests/data` (\#656).
134 |
135 | # 2.0.0
136 |
137 | Major version to align with `fractal-server` API v2.
138 |
139 | # 1.4.4
140 |
141 | * Require user's verification to be specified when editing user's email (\#620).
142 |
143 | # 1.4.3
144 |
145 | * Make `fractal-client` a fully synchronous client, by removing all `async`/`await` (\#592).
146 | * Improve handling of `AuthenticationError` and `ConnectionError`, and handle uncaught `Exception` (\#587).
147 | * Deprecate environment variable `FRACTAL_LOGGING_LEVEL`, remove flag `--verbose` and replace it with `--debug`, improve debugging of http requests (\#597).
148 | * Testing:
149 | * Adapt `job_factory` and tests to the presence of new `timestamp_created` attributes in `fractal-server` (\#589).
150 | * Align with `fractal-server` 1.4.3a2 (\#598).
151 | * Documentation:
152 | * Add info about server/client version compatibility (\#591).
153 | * Dependencies:
154 | * Update python-dotenv to `^0.21.0` (\#589).
155 | * Introduce automatic updates of `poetry.lock` (\#609 and commits to `main`).
156 |
157 | # 1.4.2
158 |
159 | * Always make new users verified, within `user register` command (\#580).
160 | * Expose verification-related features in `user edit` command (\#580).
161 | * Update expected status code of stop-job to 202 (\#585).
162 | * Testing:
163 | * Adapt `job_factory` to new strict response-validation models in `fractal-server` (\#580).
164 | * Adapt `job_factory` by including the `timestamp_created` attribute (\#582).
165 |
166 | # 1.4.1
167 |
168 | We are skipping this patch release, to remain aligned with `fractal-server` version.
169 |
170 | # 1.4.0
171 |
172 | * Align with [`fractal-server` 1.4.0](https://fractal-analytics-platform.github.io/fractal-server/changelog/#140) (\#573).
173 | * Testing:
174 | * Use ubuntu-22 for GitHub CI (commit e1c8bd3da670c24a0ac48b1163cd1c7833746aaf).
175 | * Development:
176 | * Do not include `sqlmodel` dependency explicitly (\#577).
177 | * Use poetry 1.7.1 (\#577).
178 |
179 | # 1.3.7
180 |
181 | This release is up-to-date with `fractal-server` 1.3.12.
182 |
183 | * Remove `project new --dataset` argument (\#566).
184 | * Add `project new --make-read-only` argument (\#566).
185 | * Do not use deprecated fractal-server `deployment_type` variable (\#566).
186 | * Align tests with `fractal-server` 1.3.12, which deprecates the legacy history version (\#569).
187 | * Automate procedure for publishing package to PyPI (\#561).
188 |
189 | # 1.3.6
190 |
191 | * Main features:
192 | * Remove client-side validation of API request/response bodies (\#551).
193 | * Make error messages more readable for request-body validation errors (\#551).
194 | * Include `--batch` option for workflow apply command (commit 06c7ff0e92602f08a98097d3597a8ce39c6ae1a8).
195 | * Revamp `config.py`, making `Settings` a standard Python class (\#559).
196 | * Package and repository:
197 | * Rename repository from `fractal` to `fractal-client`.
198 | * Change package name from `fractal` to `fractal-client` (\#555).
199 | * Remove `fractal-common` submodule (\#551).
200 | * Remove `pydantic` dependency (\#559).
201 | * Tests:
202 | * Review tests: make them stateless, improve handling of cache, remove obsolete fixtures (\#559).
203 |
204 | # 1.3.5
205 |
206 | * Implement more robust sorting of versions (e.g. in the presence of pre-releases), via `packaging.version` (\#550).
207 |
208 | # 1.3.4
209 |
210 | * Add new commands `dataset status` and `dataset history` (\#544).
211 | * Align with fractal-server/fractal-common new `TaskUpdate` model, that accepts `version=None` (\#540).
212 | * Align with fractal-server/fractal-common new attributes in the Task model (\#534).
213 | * Align with fractal-common new `ApplyWorkflowRead` model, with optional `workflow_dump` (\#547).
214 | * Move documentation from sphinx to mkdocs (\#532).
215 |
216 | # 1.3.3
217 |
218 | * Support `workflow apply --start/--end` arguments for submitting a workflow subset (\#527).
219 | * Exclude `common/tests` and other files from build (\#523).
220 | * Remove obsolete folders from `tests/data` (\#526).
221 |
222 | # 1.3.2
223 |
224 | * Fix wrong build in 1.3.1 (include missing `fractal.common` submodule).
225 |
226 | # 1.3.1
227 |
228 | WARNING: wrong build, do not use
229 |
230 | * Pin Pydantic to V1 (\#520).
231 |
232 | # 1.3.0
233 |
234 | * Align with [fractal-server 1.3.0](https://fractal-analytics-platform.github.io/fractal-server/changelog/#130), by updating all relevant endpoint path/query/body parameters (\#479).
235 | * Add `fractal job stop` command (\#485).
236 | * Add `fractal task delete` command (\#510).
237 | * Add task ID/name/version disambiguation to `task edit` and `workflow add-task` (\#499).
238 | * Specific changes to existing commands:
239 | * Make `project_id` a required positional argument of `fractal {workflow,dataset,job}` commands (\#479).
240 | * For `edit` commands, always prepend the new arguments with `new`, e.g. as in `task edit ... --new-version` (\#498).
241 | * Add `--type` optional argument to `fractal dataset new` command (\#479).
242 | * For `fractal workflow apply`:
243 | * Transform `project_id` from option to positional argument (\#479).
244 | * Make `output_dataset_id` a required positional argument (\#483).
245 | * Add `--username/--new-username` to `fractal user` subcommands (\#493).
246 | * Remove `--private` option for `fractal task collect` (\#493).
247 | * Add `--version` to `fractal task {new,edit}` subcommands (\#493).
248 | * Split `task-id-or-name` argument of `task edit` and `workflow add-task` into two arguments (\#504).
249 | * Add `--pinned-dependency` argument to `task collect` (\#508).
250 | * Add `--args-schema` and `--args-schema-version` arguments to `task new` command (\#511).
251 | * Add `--new-args-schema` and `--new-args-schema-version` arguments to `task edit` command (\#511).
252 | * Raise warning when importing/exporting workflows with custom tasks (\#513).
253 | * Package and repository:
254 | * Fix a bug in tests, by starting the fractal-server FastAPI app in a more standard way (\#481).
255 | * Require pydantic version to be `>=1.10.8` (\#486, \#490).
256 | * Make `sqlmodel` a development depedency (\#493).
257 | * Improve handling of a `ConnectError` in the CI (\#502).
258 | * Remove arbitrary `kwargs` from internal functions (\#503).
259 | * Align with latest fractal-server version and update tests (\#517).
260 |
261 | # 1.2.0
262 |
263 | * Align with [fractal-server 1.2.0](https://fractal-analytics-platform.github.io/fractal-server/changelog/#120) (\#472).
264 |
265 | # 1.1.0
266 |
267 | * Align with [fractal-server 1.1.0](https://fractal-analytics-platform.github.io/fractal-server/changelog/#110) (\#446).
268 | * Improve validation of API request payloads (\#447).
269 | * Drop support for python 3.8 (\#438).
270 | * Update `_TaskBase` schema from `fractal-common` (\#431).
271 | * Update `DatasetUpdate` schema (\#461).
272 | * Update `fractal task edit` command (\#439 and \#461).
273 | * Add `fractal project edit` command (\#465).
274 | * Improve task-collection log formatting (\#443).
275 | * Disable `argparse` abbreviation for CLI commands (\#441).
276 |
277 | # 1.0.5
278 |
279 | * Minor updates to `fractal workflow export` (\#429).
280 |
281 | # 1.0.4
282 |
283 | * Add `fractal workflow {import,export}` commands (\#426).
284 | * Remove `--project-id` argument from `fractal workflow edit` commands (\#426).
285 |
286 | # 1.0.3
287 |
288 | * Add `fractal task new` command (\#421).
289 | * Remove obsolete `-j/--json` argument from `fractal` command (\#421).
290 |
291 | # 1.0.2
292 |
293 | * Remove obsolete references to SLURM user, either as a CLI argument or an environment variable (\#419).
294 |
295 | # 1.0.1
296 |
297 | * Make `FRACTAL_USER/FRACTAL_PASSWORD` env variables optional (\#416).
298 |
299 | # 1.0.0
300 |
301 | * First version in this CHANGELOG.
302 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and University of Zurich
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | * Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Fractal Client
2 |
3 |
4 |
5 |
6 |
7 | [](https://pypi.org/project/fractal-client/)
8 | [](https://opensource.org/licenses/BSD-3-Clause)
9 | [](https://github.com/fractal-analytics-platform/fractal-client/actions/workflows/ci.yml)
10 | [](https://htmlpreview.github.io/?https://github.com/fractal-analytics-platform/fractal-client/blob/python-coverage-comment-action-data/htmlcov/index.html)
11 | [](https://fractal-analytics-platform.github.io/fractal-client)
12 |
13 | [Fractal](https://fractal-analytics-platform.github.io/) is a framework developed at the [BioVisionCenter](https://www.biovisioncenter.uzh.ch/en.html) to process bioimaging data at scale in the OME-Zarr format and prepare the images for interactive visualization.
14 |
15 | 
16 |
17 | This is the repository that contains the **Fractal client**. Find more information about Fractal in general and the other repositories on the [Fractal home page](https://fractal-analytics-platform.github.io).
18 |
19 | ## Documentation
20 |
21 | See https://fractal-analytics-platform.github.io/fractal-client.
22 |
23 | # Contributors and license
24 |
25 | Fractal was conceived in the Liberali Lab at the Friedrich Miescher Institute for Biomedical Research and in the Pelkmans Lab at the University of Zurich by [@jluethi](https://github.com/jluethi) and [@gusqgm](https://github.com/gusqgm). The Fractal project is now developed at the [BioVisionCenter](https://www.biovisioncenter.uzh.ch/en.html) at the University of Zurich and the project lead is with [@jluethi](https://github.com/jluethi). The core development is done under contract by [eXact lab S.r.l.](https://www.exact-lab.it).
26 |
27 | Unless otherwise specified, Fractal components are released under the BSD 3-Clause License, and copyright is with the BioVisionCenter at the University of Zurich.
28 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | build
2 | api_files
3 |
--------------------------------------------------------------------------------
/docs/changelog.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | {%
4 | include-markdown "../CHANGELOG.md"
5 | %}
6 |
--------------------------------------------------------------------------------
/docs/development.md:
--------------------------------------------------------------------------------
1 | # Contribute to Fractal Client development
2 |
3 | The development of Fractal Client takes place on the [fractal-client Github
4 | repository](https://github.com/fractal-analytics-platform/fractal-client). To
5 | ask questions or to inform us of a bug or unexpected behavior, please feel free
6 | to [open an issue](https://github.com/fractal-analytics-platform/fractal-client/issues/new).
7 |
8 |
9 | ## Set up the development environment
10 |
11 | ### Clone repository
12 |
13 | First, you should clone the repository
14 | ```
15 | git clone https://github.com/fractal-analytics-platform/fractal-client.git
16 | cd fractal-client
17 | ```
18 |
19 | ### Install package
20 |
21 | We use [poetry](https://python-poetry.org/docs) to manage the development environment and the dependencies. A simple way to install it is `pipx install poetry==2.1.3`, or you can look at the installation section [here](https://python-poetry.org/docs#installation).
22 | Running
23 | ```console
24 | poetry install [--with dev] [--with docs]
25 | ```
26 | will take care of installing all the dependencies in a separate environment, optionally installing also the dependencies for developement and to build the documentation.
27 |
28 |
29 | ## Build and release
30 |
31 | We also use `poetry` to build the package and publish it to PyPI.
32 |
33 | Preliminary check-list:
34 |
35 | * The `main` branch is checked out.
36 | * You reviewed dependencies, and the lock file is up to date with `pyproject.toml`.
37 | * The current HEAD of the `main` branch passes all the tests (note: make sure
38 | that you are using `poetry run pytest`, and not simply `pytest`).
39 | * You updated the `CHANGELOG.md` file.
40 | * You updated `docs/versions.md` with the constraints for the new version.
41 |
42 | Actual **release instructions**:
43 |
44 | 1. Use one of the following
45 | ```
46 | poetry run bumpver update --tag-num --tag-commit --commit --dry
47 | poetry run bumpver update --patch --tag-commit --commit --dry
48 | poetry run bumpver update --minor --tag-commit --commit --dry
49 | poetry run bumpver update --set-version X.Y.Z --tag-commit --commit --dry
50 | ```
51 | to test updating the version bump.
52 |
53 | 2. If the previous step looks good, remove `--dry` and re-run to actually bump the
54 | version. This will trigger a dedicated GitHub action to build the new package
55 | and publish it to PyPI.
56 |
57 |
58 | ## Run tests
59 |
60 | Unit and integration testing of Fractal Server uses the
61 | [pytest](https://docs.pytest.org/en/7.1.x/) testing framework.
62 |
63 | If you installed the development dependencies, you may run
64 | the test suite by invoking
65 | ```
66 | poetry run pytest
67 | ```
68 | from the main directory of the `fractal-client` repository. It is sometimes
69 | useful to specify additional arguments, e.g.
70 | ```
71 | poetry run pytest -s -vvv --log-cli-level info --full-trace
72 | ```
73 |
74 | Tests are also run as part of [GitHub Actions Continuous
75 | Integration](https://github.com/fractal-analytics-platform/fractal-client/actions/workflows/ci.yml)
76 | for the `fractal-client` repository.
77 |
78 |
79 | ## Documentation
80 |
81 | The documentations is built with mkdocs, and we bundle a module from
82 | [sphinx-argparse plugin](https://sphinx-argparse.readthedocs.io), customized to
83 | our needs.
84 |
85 | To build the documentation locally, setup a development python environment (e.g. with `poetry install --with docs`) and then run one of these commands:
86 | ```
87 | poetry run mkdocs serve --config-file mkdocs.yml # serves the docs at http://127.0.0.1:8000
88 | poetry run mkdocs build --config-file mkdocs.yml # creates a build in the `site` folder
89 | ```
90 |
--------------------------------------------------------------------------------
/docs/doc-requirements.txt:
--------------------------------------------------------------------------------
1 | mkdocs==1.5.2
2 | mkdocs-material==9.1.21
3 | mkdocs-literate-nav==0.5.0
4 | mkdocs-autorefs==0.5.0
5 | mkdocs-gen-files==0.4.0
6 | mkdocs-section-index==0.3.5
7 | mkdocstrings[python]==0.25.2
8 | mkdocs-include-markdown-plugin==4.0.4
9 |
--------------------------------------------------------------------------------
/docs/extra.css:
--------------------------------------------------------------------------------
1 | /* Custom style for blockquotes */
2 | blockquote {
3 | background-color: #e7e3e3d8; /* Light gray background */
4 | border: 1px solid #000000; /* Black border */
5 | padding: 10px;
6 | margin: 20px 0;
7 | border-radius: 4px;
8 | font-size: 16px;
9 | line-height: 1.6;
10 | box-shadow: 2px 2px 5px rgba(0, 0, 0, 0.1); /* Optional: Add a subtle shadow */
11 | }
12 |
13 | /* Style the text inside blockquotes */
14 | blockquote p {
15 | margin: 0;
16 | color: #333; /* Dark text color */
17 | }
18 |
--------------------------------------------------------------------------------
/docs/gen_ref_pages.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from pathlib import Path
3 | from textwrap import fill
4 | from typing import Any
5 |
6 | import mkdocs_gen_files # type: ignore[import]
7 | from mkdocs_gen_files import Nav
8 |
9 | sys.path.append(Path(__file__).parent.as_posix()) # noqa: E402
10 | from parser import parse_parser # noqa: E402
11 |
12 | sys.path.append(Path(__file__).parents[1].as_posix()) # noqa: E402
13 | from fractal_client.parser import parser_main # noqa
14 |
15 |
16 | def to_markdown(
17 | data: dict[str, Any],
18 | level: int,
19 | parent_cmd: str | None = None,
20 | ) -> str:
21 | """
22 | Given a `data` object with keys `name`, `description` and `usage`, produce
23 | a markdown string.
24 | """
25 |
26 | # Create MarkDown string for title
27 | name = data["name"]
28 | if parent_cmd:
29 | title_str = "#" * (level + 2) + f" {parent_cmd} {name}\n"
30 | else:
31 | title_str = "#" * (level + 1) + f" {name}\n"
32 |
33 | # Create MarkDown string for description
34 | description = data["description"]
35 | description_str = f"{description}\n"
36 |
37 | # Create MarkDown string for usage code block
38 | usage = data["bare_usage"].replace(Path(__file__).name, "fractal")
39 | while " " in usage:
40 | usage = usage.replace(" ", " ")
41 | usage = fill(
42 | usage,
43 | width=80,
44 | initial_indent="",
45 | subsequent_indent=(" " * 8),
46 | break_on_hyphens=False,
47 | )
48 | usage_str = f"```\n{usage}\n```\n"
49 |
50 | # Create MarkDown string for action groups
51 | action_groups_strings = []
52 | if "action_groups" in data.keys():
53 | for group in data["action_groups"]:
54 | title = group["title"]
55 |
56 | if title == "Commands":
57 | continue
58 | elif title == "Valid sub-commands":
59 | action_groups_strings.append(
60 | "#" * (level + 2) + " Sub-commands"
61 | )
62 | elif title in [
63 | "Named Arguments",
64 | "Positional Arguments",
65 | ]:
66 | options = group["options"]
67 | action_groups_strings.append("#" * (level + 3) + f" {title}\n")
68 | for opt in options:
69 | opt_name = ",".join(opt["name"])
70 | opt_help = opt["help"]
71 | default = str(opt["default"])
72 | if (default == "None") or ("==SUPPRESS==" in default):
73 | default = ""
74 | else:
75 | default = f" *Default*: `{default}`."
76 | action_groups_strings.append(
77 | f"- **`{opt_name}`**: {opt_help}{default}\n"
78 | )
79 | else:
80 | raise NotImplementedError(title)
81 |
82 | action_groups_str = "\n".join(action_groups_strings)
83 |
84 | # Combine strings together
85 | md_string = (
86 | "\n".join(
87 | (
88 | title_str,
89 | description_str,
90 | usage_str,
91 | action_groups_str,
92 | )
93 | )
94 | + "\n"
95 | )
96 |
97 | return md_string
98 |
99 |
100 | nav = Nav()
101 |
102 | # Parse main parser
103 | main = parse_parser(parser_main)
104 |
105 | # Parser level 0
106 | nav[["fractal"]] = "fractal/index.md"
107 | main["name"] = "fractal"
108 | with mkdocs_gen_files.open("reference/fractal/index.md", "w") as f:
109 | f.write(to_markdown(main, level=0))
110 |
111 | # Parser levels 1 and 2 (commands and subcommands)
112 | for child in main["children"]:
113 | # Level 1
114 | name = child["name"]
115 |
116 | nav[["fractal", name]] = f"fractal/{name}/index.md"
117 | with mkdocs_gen_files.open(f"reference/fractal/{name}/index.md", "w") as f:
118 | f.write(to_markdown(child, level=0))
119 | if "children" not in child.keys():
120 | continue
121 | # Level 2
122 | for grandchild in child["children"]:
123 | f.write(to_markdown(grandchild, level=1, parent_cmd=name))
124 |
125 | with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as nav_file:
126 | nav_file.writelines(nav.build_literate_nav())
127 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | hide:
3 | - toc
4 | ---
5 |
6 | # Welcome to Fractal Command-line Client's documentation!
7 |
8 |
9 |
10 |
11 |
12 | Fractal is a framework developed at the [BioVisionCenter](https://www.biovisioncenter.uzh.ch/en.html) to process bioimaging data at scale in the OME-Zarr format and prepare the images for interactive visualization.
13 |
14 | > This project is under active development 🔨. If you need help or found a bug, **open an issue [here](https://github.com/fractal-analytics-platform/fractal/issues/new)**.
15 |
16 | This documentation concerns the **Fractal Command-line Client**. Find more information about Fractal in general and the other repositories at the [Fractal home page](https://fractal-analytics-platform.github.io).
17 |
18 | ## Licence and Copyright
19 |
20 | Fractal was conceived in the Liberali Lab at the Friedrich Miescher Institute for Biomedical Research and in the Pelkmans Lab at the University of Zurich by [@jluethi](https://github.com/jluethi) and [@gusqgm](https://github.com/gusqgm). The Fractal project is now developed at the [BioVisionCenter](https://www.biovisioncenter.uzh.ch/en.html) at the University of Zurich and the project lead is with [@jluethi](https://github.com/jluethi). The core development is done under contract by [eXact lab S.r.l.](https://www.exact-lab.it).
21 |
22 | Unless otherwise specified, Fractal components are released under the BSD 3-Clause License, and copyright is with the BioVisionCenter at the University of Zurich.
23 |
--------------------------------------------------------------------------------
/docs/install.md:
--------------------------------------------------------------------------------
1 | # Installation and usage
2 |
3 | ## Installation
4 |
5 | Fractal Client is hosted on [the PyPI
6 | index](https://pypi.org/project/fractal-client), and it can be installed with
7 | `pip` via
8 | ```
9 | pip install fractal-client
10 | ```
11 |
12 | ## Usage
13 |
14 | You may invoke the Fractal Client via the custom command `fractal`, from the
15 | command line (see its documentation [here](/reference/fractal/)).
16 |
17 | You must set the `FRACTAL_SERVER` variable, which is a fully qualified URL to
18 | the Fractal server installation (e.g. http://localhost:8000). This can be an
19 | environment variable or it can be stored in a an environment file
20 | `.fractal.env` as in
21 | ```
22 | FRACTAL_SERVER=http://localhost:8010
23 | ```
24 |
25 | ### Credentials
26 |
27 | Most `fractal` commands are restricted to authenticated users, and user
28 | credentials can be specified in multiple ways:
29 | * Set `FRACTAL_USER` and `FRACTAL_PASSWORD` variables as environment variables;
30 | * Add `FRACTAL_USER` and `FRACTAL_PASSWORD` variables in `.fractal.env`;
31 | * Explicitly provide `--user` and `--password` arguments for `fractal` commands, see [here](/reference/fractal/).
32 |
33 | ### Cache
34 |
35 | By default, `fractal` caches some information (namely a valid token for the
36 | current session on `fractal-server` and a list of tasks) in `~/.cache/fractal`.
37 | This destination can be customized by setting the `FRACTAL_CACHE_PATH`
38 | variables.
39 |
40 | ### Full example
41 |
42 | Here is an example of a valid `.fractal.env` file:
43 | ```
44 | FRACTAL_USER=user@something.com
45 | FRACTAL_PASSWORD=myuser
46 | FRACTAL_SERVER=http://localhost:8010
47 | FRACTAL_CACHE_PATH=/some/path/fractal-cache
48 | ```
49 |
--------------------------------------------------------------------------------
/docs/overrides/404.html:
--------------------------------------------------------------------------------
1 |
6 | {% block content %}
7 | 404 - Not found
8 | {% endblock %}
9 |
--------------------------------------------------------------------------------
/docs/parser.py:
--------------------------------------------------------------------------------
1 | """
2 | The current `parser.py` module belongs to the sphinx-argparse
3 | (https://github.com/ashb/sphinx-argparse, v0.4.0).
4 | Original Copyright 2023 Ash Berlin-Taylor
5 | License: MIT
6 |
7 | The first version of the same module is from sphinx-argparse
8 | (https://github.com/alex-rudakov/sphinx-argparse).
9 | Original Copyright 2013 Aleksandr Rudakov
10 | License: MIT
11 | """
12 | import re
13 | from argparse import _HelpAction
14 | from argparse import _StoreConstAction
15 | from argparse import _SubParsersAction
16 |
17 |
18 | class NavigationException(Exception): # noqa: N818
19 | pass
20 |
21 |
22 | def parser_navigate(parser_result, path, current_path=None):
23 | if isinstance(path, str):
24 | if path == "":
25 | return parser_result
26 | path = re.split(r"\s+", path)
27 | current_path = current_path or []
28 | if len(path) == 0:
29 | return parser_result
30 | if "children" not in parser_result:
31 | raise NavigationException(
32 | "Current parser has no child elements. "
33 | f"(path: {' '.join(current_path)})"
34 | )
35 | next_hop = path.pop(0)
36 | for child in parser_result["children"]:
37 | # identifer is only used for aliased subcommands
38 | identifier = (
39 | child["identifier"] if "identifier" in child else child["name"]
40 | )
41 | if identifier == next_hop:
42 | current_path.append(next_hop)
43 | return parser_navigate(child, path, current_path)
44 | raise NavigationException(
45 | f"Current parser has no child element with name: {next_hop} "
46 | f"(path: {' '.join(current_path)})"
47 | )
48 |
49 |
50 | def _try_add_parser_attribute(data, parser, attribname):
51 | attribval = getattr(parser, attribname, None)
52 | if attribval is None:
53 | return
54 | if not isinstance(attribval, str):
55 | return
56 | if len(attribval) > 0:
57 | data[attribname] = attribval
58 |
59 |
60 | def _format_usage_without_prefix(parser):
61 | """
62 | Use private argparse APIs to get the usage string without
63 | the 'usage: ' prefix.
64 | """
65 | fmt = parser._get_formatter()
66 | fmt.add_usage(
67 | parser.usage,
68 | parser._actions,
69 | parser._mutually_exclusive_groups,
70 | prefix="",
71 | )
72 | return fmt.format_help().strip()
73 |
74 |
75 | def parse_parser(parser, data=None, **kwargs):
76 | if data is None:
77 | data = {
78 | "name": "",
79 | "usage": parser.format_usage().strip(),
80 | "bare_usage": _format_usage_without_prefix(parser),
81 | "prog": parser.prog,
82 | }
83 | _try_add_parser_attribute(data, parser, "description")
84 | _try_add_parser_attribute(data, parser, "epilog")
85 | for action in parser._get_positional_actions():
86 | if not isinstance(action, _SubParsersAction):
87 | continue
88 | helps = {}
89 | for item in action._choices_actions:
90 | helps[item.dest] = item.help
91 |
92 | # commands which share an existing parser are an alias,
93 | # don't duplicate docs
94 | subsection_alias = {}
95 | subsection_alias_names = set()
96 | for name, subaction in action._name_parser_map.items():
97 | if subaction not in subsection_alias:
98 | subsection_alias[subaction] = []
99 | else:
100 | subsection_alias[subaction].append(name)
101 | subsection_alias_names.add(name)
102 |
103 | for name, subaction in action._name_parser_map.items():
104 | if name in subsection_alias_names:
105 | continue
106 | subalias = subsection_alias[subaction]
107 | subaction.prog = f"{parser.prog} {name}"
108 | subdata = {
109 | "name": name
110 | if not subalias
111 | else f"{name} ({', '.join(subalias)})",
112 | "help": helps.get(name, ""),
113 | "usage": subaction.format_usage().strip(),
114 | "bare_usage": _format_usage_without_prefix(subaction),
115 | }
116 | if subalias:
117 | subdata["identifier"] = name
118 | parse_parser(subaction, subdata, **kwargs)
119 | data.setdefault("children", []).append(subdata)
120 |
121 | show_defaults = True
122 | if kwargs.get("skip_default_values", False) is True:
123 | show_defaults = False
124 | show_defaults_const = show_defaults
125 | if kwargs.get("skip_default_const_values", False) is True:
126 | show_defaults_const = False
127 |
128 | # argparse stores the different groups as a list in parser._action_groups
129 | # the first element of the list holds the positional arguments, the
130 | # second the option arguments not in groups, and subsequent elements
131 | # argument groups with positional and optional parameters
132 | action_groups = []
133 | for action_group in parser._action_groups:
134 | options_list = []
135 | for action in action_group._group_actions:
136 | if isinstance(action, _HelpAction):
137 | continue
138 |
139 | # Quote default values for string/None types
140 | default = action.default
141 | if (
142 | action.default not in ["", None, True, False]
143 | and action.type in [None, str]
144 | and isinstance(action.default, str)
145 | ):
146 | default = f'"{default}"'
147 |
148 | # fill in any formatters, like %(default)s
149 | format_dict = dict(
150 | vars(action), prog=data.get("prog", ""), default=default
151 | )
152 | format_dict["default"] = default
153 | help_str = action.help or "" # Ensure we don't print None
154 | try:
155 | help_str = help_str % format_dict
156 | except Exception: # nosec
157 | pass
158 |
159 | # Options have the option_strings set, positional arguments don't
160 | name = action.option_strings
161 | if name == []:
162 | if action.metavar is None:
163 | name = [action.dest]
164 | else:
165 | name = [action.metavar]
166 | # Skip lines for subcommands
167 | if name == ["==SUPPRESS=="]:
168 | continue
169 |
170 | if isinstance(action, _StoreConstAction):
171 | option = {
172 | "name": name,
173 | "default": default
174 | if show_defaults_const
175 | else "==SUPPRESS==",
176 | "help": help_str,
177 | }
178 | else:
179 | option = {
180 | "name": name,
181 | "default": default if show_defaults else "==SUPPRESS==",
182 | "help": help_str,
183 | }
184 | if action.choices:
185 | option["choices"] = action.choices
186 | if "==SUPPRESS==" not in option["help"]:
187 | options_list.append(option)
188 |
189 | if len(options_list) == 0:
190 | continue
191 |
192 | # Upper case "Positional Arguments" and "Optional Arguments" titles
193 | # Since python-3.10 'optional arguments' changed to 'options'
194 | # more info: https://github.com/python/cpython/pull/23858
195 | if (
196 | action_group.title == "optional arguments"
197 | or action_group.title == "options"
198 | ):
199 | action_group.title = "Named Arguments"
200 | if action_group.title == "positional arguments":
201 | action_group.title = "Positional Arguments"
202 |
203 | group = {
204 | "title": action_group.title,
205 | "description": action_group.description,
206 | "options": options_list,
207 | }
208 |
209 | action_groups.append(group)
210 |
211 | if len(action_groups) > 0:
212 | data["action_groups"] = action_groups
213 |
214 | return data
215 |
--------------------------------------------------------------------------------
/docs/versions.md:
--------------------------------------------------------------------------------
1 | # Version Compatibility
2 |
3 | During the current development phase, the latest stable releases of
4 | `fractal-server` and `fractal-client` are always considered as compatible.
5 |
6 | The following table shows some additional compatibility constraints for each
7 | `fractal-client` version, which may be useful for working with a
8 | `fractal-server` instance which is not fully up-to-date.
9 |
10 | > **WARNING**: This table only shows the `fractal-server`-version lower bounds
11 | > for each `fractal-client` version, but not the corresponding upper bounds.
12 |
13 |
14 | | fractal-client | fractal-server |
15 | |----------------|----------------|
16 | | 2.0.0 | >=2.0.0 |
17 | | 1.4.3 | >=1.4.2 |
18 | | 1.4.2 | >=1.4.2 |
19 | | 1.4.1 | >=1.4.0 |
20 | | 1.4.0 | >=1.4.0 |
21 |
--------------------------------------------------------------------------------
/fractal_client/__init__.py:
--------------------------------------------------------------------------------
1 | __VERSION__ = "2.9.1"
2 |
--------------------------------------------------------------------------------
/fractal_client/authclient.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from httpx import Client
4 |
5 |
6 | logging.getLogger("httpx").setLevel(logging.WARNING)
7 |
8 |
9 | def debug_request(verb: str, url: str, **kwargs):
10 | body = kwargs.get("json")
11 | log = f"\nFractal Client sending HTTP request to:\n {verb} {url}"
12 | if body is not None:
13 | log += "\nRequest body:\n" + "\n".join(
14 | [f" {k}: {v}" for k, v in body.items()]
15 | )
16 | logging.debug(log)
17 |
18 |
19 | class AuthenticationError(ValueError):
20 | pass
21 |
22 |
23 | class AuthClient:
24 | def __init__(
25 | self,
26 | *,
27 | fractal_server: str,
28 | username: str | None,
29 | password: str | None,
30 | token: str | None,
31 | ):
32 | self.fractal_server = fractal_server.rstrip("/")
33 | self.auth = None
34 | self.client = None
35 | self.username = username
36 | self.password = password
37 | self.token = token
38 |
39 | def __enter__(self):
40 | self.client = Client()
41 | if self.token is None:
42 | self.token = self._get_fresh_token()
43 |
44 | return self
45 |
46 | def __exit__(self, *args):
47 | self.client.close()
48 |
49 | def _get_fresh_token(self) -> str:
50 | res = self.client.post(
51 | f"{self.fractal_server}/auth/token/login/",
52 | data=dict(
53 | username=self.username,
54 | password=self.password,
55 | ),
56 | )
57 | if res.status_code != 200:
58 | data = res.text
59 | raise AuthenticationError(
60 | f"Error at {res.request.url}.\n"
61 | f"Status code: {res.status_code}.\n"
62 | f"Response data: {data}.\n"
63 | )
64 | raw_token = res.json()
65 | return raw_token["access_token"]
66 |
67 | @property
68 | def auth_headers(self) -> dict[str, str]:
69 | return {"Authorization": f"Bearer {self.token}"}
70 |
71 | def _get_url(self, relative_url: str) -> str:
72 | relative_url_no_leading_slash = relative_url.lstrip("/")
73 | return f"{self.fractal_server}/{relative_url_no_leading_slash}"
74 |
75 | def get(self, relative_url: str):
76 | url = self._get_url(relative_url)
77 | debug_request("GET", url)
78 | return self.client.get(url=url, headers=self.auth_headers)
79 |
80 | def post(self, relative_url: str, **kwargs):
81 | url = self._get_url(relative_url)
82 | debug_request("POST", relative_url, **kwargs)
83 | return self.client.post(url=url, headers=self.auth_headers, **kwargs)
84 |
85 | def patch(self, relative_url: str, **kwargs):
86 | url = self._get_url(relative_url)
87 | debug_request("PATCH", relative_url, **kwargs)
88 | return self.client.patch(url=url, headers=self.auth_headers, **kwargs)
89 |
90 | def delete(self, relative_url: str):
91 | url = self._get_url(relative_url)
92 | debug_request("DELETE", url)
93 | return self.client.delete(url=url, headers=self.auth_headers)
94 |
--------------------------------------------------------------------------------
/fractal_client/client.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and
3 | University of Zurich
4 |
5 | Original authors:
6 | Jacopo Nespolo
7 | Marco Franzon
8 | Tommaso Comparin
9 |
10 | This file is part of Fractal and was originally developed by eXact lab S.r.l.
11 | under contract with Liberali Lab from the Friedrich Miescher
12 | Institute for Biomedical Research and Pelkmans Lab from the University of
13 | Zurich.
14 | """
15 | import logging
16 | import sys
17 |
18 | from httpx import ConnectError
19 |
20 | from . import cmd
21 | from .authclient import AuthClient
22 | from .authclient import AuthenticationError
23 | from .config import settings
24 | from .interface import Interface
25 | from .parser import parser_main
26 |
27 |
28 | def _verify_authentication_branch(
29 | *,
30 | username: str | None,
31 | password: str | None,
32 | token_path: str | None,
33 | ) -> None:
34 | """
35 | Fail if credentials are not either username&password or token
36 |
37 | Arguments:
38 | username: Username
39 | password: Password
40 | token_path: Path of token
41 | """
42 | which_parameters_are_set = (
43 | bool(username),
44 | bool(password),
45 | bool(token_path),
46 | )
47 | valid_cases = (
48 | (True, True, False),
49 | (False, False, True),
50 | )
51 | if which_parameters_are_set not in valid_cases:
52 | msg = (
53 | "Invalid authentication credentials. "
54 | "You should either set username&password or the token path.\n\n"
55 | "You can set these variables in multiple ways "
56 | "(see `fractal --help`):\n"
57 | " 1. Through command-line arguments.\n"
58 | " 2. Through environment variables.\n"
59 | " 3. Through environment variables in a `.fractal.env` file.\n"
60 | )
61 | raise ValueError(msg)
62 |
63 |
64 | def handle(cli_args: list[str] = sys.argv) -> Interface:
65 |
66 | args = parser_main.parse_args(cli_args[1:])
67 |
68 | # Set logging level
69 | if args.debug is True:
70 | logging.basicConfig(level=logging.DEBUG)
71 | else:
72 | logging.basicConfig(level=logging.INFO)
73 |
74 | show_args = "\n".join(
75 | [
76 | f" {k}: {v}"
77 | if not (k == "password" and v is not None)
78 | else " password: ***"
79 | for k, v in args.__dict__.items()
80 | ]
81 | )
82 | logging.debug(f"\nArguments:\n{show_args}")
83 |
84 | if args.cmd is not None:
85 | handler = getattr(cmd, args.cmd)
86 | else:
87 | parser_main.print_help()
88 | sys.exit(1)
89 |
90 | try:
91 | # Make a copy of vars(args), and remove cmd (which is not a relevant
92 | # argument for functions called with **kwargs)
93 | kwargs = vars(args).copy()
94 | kwargs.pop("cmd")
95 | fractal_server = (
96 | kwargs.pop("fractal_server") or settings.FRACTAL_SERVER
97 | )
98 | logging.debug(f"Fractal server URL: {fractal_server}")
99 | if fractal_server is None:
100 | return Interface(
101 | data=(
102 | "Missing argument: You should set the "
103 | "fractal-server URL (see `fractal --help`)."
104 | ),
105 | retcode=1,
106 | )
107 |
108 | if args.cmd == "version":
109 | interface = handler(fractal_server, **kwargs)
110 | else:
111 | # Extract (and remove) credentials-related variables from kwargs
112 | username = kwargs.pop("user") or settings.FRACTAL_USER
113 | password = kwargs.pop("password") or settings.FRACTAL_PASSWORD
114 | token_path = (
115 | kwargs.pop("token_path") or settings.FRACTAL_TOKEN_PATH
116 | )
117 | try:
118 | _verify_authentication_branch(
119 | username=username,
120 | password=password,
121 | token_path=token_path,
122 | )
123 | except ValueError as e:
124 | return Interface(data=str(e), retcode=1)
125 | # Read token from file
126 | if token_path is not None:
127 | try:
128 | with open(token_path) as f:
129 | token = f.read().strip("\n")
130 | except Exception as e:
131 | msg = (
132 | f"Could not read token from {token_path=}.\n"
133 | f"Original error:\n{str(e)}"
134 | )
135 | return Interface(data=msg, retcode=1)
136 |
137 | else:
138 | token = None
139 |
140 | with AuthClient(
141 | fractal_server=fractal_server,
142 | username=username,
143 | password=password,
144 | token=token,
145 | ) as client:
146 | interface = handler(client, **kwargs)
147 | except AuthenticationError as e:
148 | return Interface(retcode=1, data=e.args[0])
149 | except ConnectError as e:
150 | return Interface(
151 | retcode=1,
152 | data=(
153 | f"ConnectError at {e.request.url}\n"
154 | f"Original error: '{e.args[0]}'\n"
155 | f"Hint: is {settings.FRACTAL_SERVER} alive?"
156 | ),
157 | )
158 | except Exception as e:
159 | return Interface(retcode=1, data=str(e))
160 |
161 | return interface
162 |
163 |
164 | def main():
165 | interface = handle()
166 | interface.show()
167 | sys.exit(interface.retcode)
168 |
169 |
170 | if __name__ == "__main__":
171 | main()
172 |
--------------------------------------------------------------------------------
/fractal_client/cmd/__init__.py:
--------------------------------------------------------------------------------
1 | from httpx import Client
2 | from httpx import ConnectError
3 |
4 | from ..authclient import AuthClient
5 | from ..interface import Interface
6 | from ._dataset import delete_dataset
7 | from ._dataset import get_dataset
8 | from ._dataset import patch_dataset
9 | from ._dataset import post_dataset
10 | from ._group import group_add_user
11 | from ._group import group_get
12 | from ._group import group_list
13 | from ._group import group_new
14 | from ._group import group_remove_user
15 | from ._group import group_update
16 | from ._job import get_job
17 | from ._job import get_job_list
18 | from ._job import get_job_logs
19 | from ._job import job_submit
20 | from ._job import stop_job
21 | from ._project import delete_project
22 | from ._project import get_project
23 | from ._project import get_project_list
24 | from ._project import patch_project
25 | from ._project import post_project
26 | from ._task import get_task_list
27 | from ._task import patch_task
28 | from ._task import post_task
29 | from ._task_collection import show_task_group_activity
30 | from ._task_collection import task_collect_custom
31 | from ._task_collection import task_collect_pip
32 | from ._user import user_edit
33 | from ._user import user_list
34 | from ._user import user_register
35 | from ._user import user_set_groups
36 | from ._user import user_show
37 | from ._user import user_whoami
38 | from ._workflow import delete_workflow
39 | from ._workflow import delete_workflowtask
40 | from ._workflow import get_workflow
41 | from ._workflow import get_workflow_list
42 | from ._workflow import patch_workflow
43 | from ._workflow import patch_workflowtask
44 | from ._workflow import post_workflow
45 | from ._workflow import post_workflowtask
46 | from ._workflow import workflow_export
47 | from ._workflow import workflow_import
48 | from fractal_client import __VERSION__
49 |
50 |
51 | class NoCommandError(ValueError):
52 | pass
53 |
54 |
55 | def get_kwargs(_parameters, _kwargs):
56 | return {k: _kwargs.get(k) for k in _parameters if k in _kwargs}
57 |
58 |
59 | def project(
60 | client: AuthClient,
61 | subcmd: str,
62 | batch: bool = False,
63 | **kwargs,
64 | ) -> Interface:
65 | if subcmd == "new":
66 | parameters = ["name"]
67 | function_kwargs = get_kwargs(parameters, kwargs)
68 | iface = post_project(client, batch=batch, **function_kwargs)
69 | elif subcmd == "show":
70 | parameters = ["project_id"]
71 | function_kwargs = get_kwargs(parameters, kwargs)
72 | iface = get_project(client, **function_kwargs)
73 | elif subcmd == "list":
74 | iface = get_project_list(client)
75 | elif subcmd == "edit":
76 | parameters = [
77 | "project_id",
78 | "new_name",
79 | ]
80 | function_kwargs = get_kwargs(parameters, kwargs)
81 | iface = patch_project(client, **function_kwargs)
82 | elif subcmd == "add-dataset":
83 | parameters = [
84 | "project_id",
85 | "dataset_name",
86 | "zarr_dir",
87 | ]
88 | function_kwargs = get_kwargs(parameters, kwargs)
89 | iface = post_dataset(client, batch=batch, **function_kwargs)
90 | elif subcmd == "delete":
91 | parameters = ["project_id"]
92 | function_kwargs = get_kwargs(parameters, kwargs)
93 | iface = delete_project(client, **function_kwargs)
94 | else:
95 | raise NoCommandError(f"Command 'project {subcmd}' not found")
96 |
97 | return iface
98 |
99 |
100 | def dataset(
101 | client: AuthClient,
102 | subcmd: str,
103 | batch: bool = False,
104 | **kwargs,
105 | ) -> Interface:
106 | if subcmd == "show":
107 | parameters = ["project_id", "dataset_id"]
108 | function_kwargs = get_kwargs(parameters, kwargs)
109 | iface = get_dataset(client, **function_kwargs)
110 | elif subcmd == "edit":
111 | parameters = [
112 | "project_id",
113 | "dataset_id",
114 | "new_name",
115 | ]
116 | function_kwargs = get_kwargs(parameters, kwargs)
117 | iface = patch_dataset(client, **function_kwargs)
118 | elif subcmd == "delete":
119 | parameters = ["project_id", "dataset_id"]
120 | function_kwargs = get_kwargs(parameters, kwargs)
121 | iface = delete_dataset(client, **function_kwargs)
122 | else:
123 | raise NoCommandError(f"Command 'dataset {subcmd}' not found")
124 | return iface
125 |
126 |
127 | def task(
128 | client: AuthClient,
129 | subcmd: str,
130 | batch: bool = False,
131 | **kwargs,
132 | ) -> Interface:
133 | if subcmd == "list":
134 | iface = get_task_list(client)
135 | elif subcmd == "collect":
136 | parameters = [
137 | "package",
138 | "package_version",
139 | "python_version",
140 | "package_extras",
141 | "pinned_dependency",
142 | "private",
143 | ]
144 | function_kwargs = get_kwargs(parameters, kwargs)
145 | iface = task_collect_pip(client, batch=batch, **function_kwargs)
146 | elif subcmd == "collect-custom":
147 | parameters = [
148 | "label",
149 | "python_interpreter",
150 | "manifest",
151 | "version",
152 | "package_name",
153 | "package_root",
154 | "private",
155 | ]
156 | function_kwargs = get_kwargs(parameters, kwargs)
157 | iface = task_collect_custom(client, batch=batch, **function_kwargs)
158 | elif subcmd == "check-collection":
159 | parameters = [
160 | "task_group_activity_id",
161 | "include_logs",
162 | ]
163 | function_kwargs = get_kwargs(parameters, kwargs)
164 | iface = show_task_group_activity(client, **function_kwargs)
165 | elif subcmd == "new":
166 | parameters = [
167 | "name",
168 | "task_type",
169 | "version",
170 | "command_non_parallel",
171 | "command_parallel",
172 | "meta_non_parallel",
173 | "meta_parallel",
174 | "args_schema_non_parallel",
175 | "args_schema_parallel",
176 | "args_schema_version",
177 | "private",
178 | ]
179 | function_kwargs = get_kwargs(parameters, kwargs)
180 | iface = post_task(client, batch=batch, **function_kwargs)
181 | elif subcmd == "edit":
182 | parameters = [
183 | "id",
184 | "name",
185 | "version",
186 | "command_non_parallel",
187 | "command_parallel",
188 | "input_types",
189 | "output_types",
190 | ]
191 | function_kwargs = get_kwargs(parameters, kwargs)
192 | iface = patch_task(client, **function_kwargs)
193 | else:
194 | raise NoCommandError(f"Command 'task {subcmd}' not found")
195 | return iface
196 |
197 |
198 | def workflow(
199 | client: AuthClient,
200 | subcmd: str,
201 | batch: bool = False,
202 | **kwargs,
203 | ) -> Interface:
204 | if subcmd == "show":
205 | parameters = ["project_id", "workflow_id"]
206 | function_kwargs = get_kwargs(parameters, kwargs)
207 | iface = get_workflow(client, **function_kwargs)
208 | elif subcmd == "new":
209 | parameters = ["name", "project_id"]
210 | function_kwargs = get_kwargs(parameters, kwargs)
211 | iface = post_workflow(client, batch=batch, **function_kwargs)
212 | elif subcmd == "list":
213 | parameters = ["project_id"]
214 | function_kwargs = get_kwargs(parameters, kwargs)
215 | iface = get_workflow_list(client, batch=batch, **function_kwargs)
216 | elif subcmd == "edit":
217 | parameters = ["project_id", "workflow_id", "new_name"]
218 | function_kwargs = get_kwargs(parameters, kwargs)
219 | iface = patch_workflow(client, **function_kwargs)
220 | elif subcmd == "delete":
221 | parameters = ["project_id", "workflow_id"]
222 | function_kwargs = get_kwargs(parameters, kwargs)
223 | iface = delete_workflow(client, **function_kwargs)
224 | elif subcmd == "add-task":
225 | parameters = [
226 | "project_id",
227 | "workflow_id",
228 | "task_id",
229 | "task_name",
230 | "task_version",
231 | "order",
232 | "args_non_parallel",
233 | "args_parallel",
234 | "type_filters",
235 | "meta_parallel",
236 | "meta_non_parallel",
237 | ]
238 | function_kwargs = get_kwargs(parameters, kwargs)
239 | iface = post_workflowtask(client, batch=batch, **function_kwargs)
240 | elif subcmd == "edit-task":
241 | parameters = [
242 | "project_id",
243 | "workflow_id",
244 | "workflow_task_id",
245 | "type_filters",
246 | "args_non_parallel",
247 | "args_parallel",
248 | "meta_parallel",
249 | "meta_non_parallel",
250 | ]
251 | function_kwargs = get_kwargs(parameters, kwargs)
252 | iface = patch_workflowtask(client, **function_kwargs)
253 | elif subcmd == "rm-task":
254 | parameters = ["project_id", "workflow_id", "workflow_task_id"]
255 | function_kwargs = get_kwargs(parameters, kwargs)
256 | iface = delete_workflowtask(client, **function_kwargs)
257 | elif subcmd == "import":
258 | parameters = ["project_id", "json_file", "workflow_name"]
259 | function_kwargs = get_kwargs(parameters, kwargs)
260 | iface = workflow_import(client, batch=batch, **function_kwargs)
261 | elif subcmd == "export":
262 | parameters = ["project_id", "workflow_id", "json_file"]
263 | function_kwargs = get_kwargs(parameters, kwargs)
264 | iface = workflow_export(client, **function_kwargs)
265 | else:
266 | raise NoCommandError(f"Command 'workflow {subcmd}' not found")
267 | return iface
268 |
269 |
270 | def job(
271 | client: AuthClient,
272 | subcmd: str,
273 | batch: bool = False,
274 | **kwargs,
275 | ) -> Interface:
276 | if subcmd == "list":
277 | parameters = ["project_id"]
278 | function_kwargs = get_kwargs(parameters, kwargs)
279 | iface = get_job_list(client, batch=batch, **function_kwargs)
280 | elif subcmd == "show":
281 | parameters = ["project_id", "job_id"]
282 | function_kwargs = get_kwargs(parameters, kwargs)
283 | iface = get_job(client, batch=batch, **function_kwargs)
284 | elif subcmd == "download-logs":
285 | parameters = ["project_id", "job_id", "output_folder"]
286 | function_kwargs = get_kwargs(parameters, kwargs)
287 | iface = get_job_logs(client, **function_kwargs)
288 | elif subcmd == "stop":
289 | parameters = ["project_id", "job_id"]
290 | function_kwargs = get_kwargs(parameters, kwargs)
291 | iface = stop_job(client, **function_kwargs)
292 | elif subcmd == "submit":
293 | parameters = [
294 | "project_id",
295 | "workflow_id",
296 | "dataset_id",
297 | "worker_init",
298 | "first_task_index",
299 | "last_task_index",
300 | "attribute_filters_json",
301 | "type_filters_json",
302 | ]
303 | function_kwargs = get_kwargs(parameters, kwargs)
304 | iface = job_submit(client, batch=batch, **function_kwargs)
305 | else:
306 | raise NoCommandError(f"Command 'job {subcmd}' not found")
307 | return iface
308 |
309 |
310 | def version(fractal_server: str, **kwargs) -> Interface:
311 | with Client() as client:
312 | try:
313 | res = client.get(f"{fractal_server}/api/alive/")
314 | server_version = res.json()["version"]
315 | server_str = f" url: {fractal_server} version: {server_version}"
316 | except ConnectError:
317 | server_str = f" Connection to '{fractal_server}' refused"
318 |
319 | return Interface(
320 | retcode=0,
321 | data=(
322 | f"Fractal client\n version: {__VERSION__}\n"
323 | f"Fractal server:\n{server_str}"
324 | ),
325 | )
326 |
327 |
328 | def user(
329 | client: AuthClient, subcmd: str, batch: bool = False, **kwargs
330 | ) -> Interface:
331 | if subcmd == "register":
332 | parameters = [
333 | "new_email",
334 | "new_password",
335 | "slurm_user",
336 | "project_dir",
337 | "username",
338 | "ssh_settings_json",
339 | "superuser",
340 | ]
341 | function_kwargs = get_kwargs(parameters, kwargs)
342 | iface = user_register(client, batch=batch, **function_kwargs)
343 | elif subcmd == "list":
344 | iface = user_list(client)
345 | elif subcmd == "show":
346 | parameters = ["user_id"]
347 | function_kwargs = get_kwargs(parameters, kwargs)
348 | iface = user_show(client, **function_kwargs)
349 | elif subcmd == "edit":
350 | parameters = [
351 | "user_id",
352 | "new_email",
353 | "new_password",
354 | "new_username",
355 | "new_slurm_user",
356 | "new_project_dir",
357 | "new_ssh_settings_json",
358 | "make_superuser",
359 | "remove_superuser",
360 | "make_verified",
361 | "remove_verified",
362 | ]
363 | function_kwargs = get_kwargs(parameters, kwargs)
364 | iface = user_edit(client, **function_kwargs)
365 | elif subcmd == "set-groups":
366 | parameters = [
367 | "user_id",
368 | "group_ids",
369 | ]
370 | function_kwargs = get_kwargs(parameters, kwargs)
371 | iface = user_set_groups(client, **function_kwargs)
372 | elif subcmd == "whoami":
373 | parameters = ["viewer_paths"]
374 | function_kwargs = get_kwargs(parameters, kwargs)
375 | iface = user_whoami(client, **function_kwargs, batch=batch)
376 | else:
377 | raise NoCommandError(f"Command 'user {subcmd}' not found")
378 |
379 | return iface
380 |
381 |
382 | def group(
383 | client: AuthClient, subcmd: str, batch: bool = False, **kwargs
384 | ) -> Interface:
385 | if subcmd == "list":
386 | parameters = ["user_ids"]
387 | function_kwargs = get_kwargs(parameters, kwargs)
388 | iface = group_list(client, batch=batch, **function_kwargs)
389 | elif subcmd == "get":
390 | parameters = ["group_id"]
391 | function_kwargs = get_kwargs(parameters, kwargs)
392 | iface = group_get(client, **function_kwargs)
393 | elif subcmd == "new":
394 | parameters = ["name", "viewer_paths"]
395 | function_kwargs = get_kwargs(parameters, kwargs)
396 | iface = group_new(client, batch=batch, **function_kwargs)
397 | elif subcmd == "update":
398 | parameters = ["group_id", "new_viewer_paths"]
399 | function_kwargs = get_kwargs(parameters, kwargs)
400 | iface = group_update(client, **function_kwargs)
401 | elif subcmd == "add-user":
402 | parameters = ["group_id", "user_id"]
403 | function_kwargs = get_kwargs(parameters, kwargs)
404 | iface = group_add_user(client, **function_kwargs)
405 | elif subcmd == "remove-user":
406 | parameters = ["group_id", "user_id"]
407 | function_kwargs = get_kwargs(parameters, kwargs)
408 | iface = group_remove_user(client, **function_kwargs)
409 | else:
410 | raise NoCommandError(f"Command 'group {subcmd}' not found")
411 |
412 | return iface
413 |
--------------------------------------------------------------------------------
/fractal_client/cmd/_aux_task_caching.py:
--------------------------------------------------------------------------------
1 | import json
2 | from pathlib import Path
3 | from typing import Any
4 |
5 | import packaging.version
6 |
7 | from ..authclient import AuthClient
8 | from ..config import settings
9 | from ..response import check_response
10 |
11 | TASKS_CACHE_FILENAME = "tasks"
12 |
13 |
14 | def _loose_version_parse(v: str) -> packaging.version.Version:
15 | """
16 | Catch `InvalidVersion` error and return `Version("0")`.
17 |
18 | This function is used in the comparison of different version strings. If a
19 | version cannot be parsed correctly, then it should not be considered the
20 | "latest"; we obtain this behavior by returning the "0" version when
21 | version-string parsing fails.
22 |
23 | Args:
24 | v: Version string (e.g. `0.10.0a2`).
25 |
26 | Returns:
27 | A `Version` object, parsed with `packaging.version.parse`
28 | """
29 | try:
30 | return packaging.version.parse(v)
31 | except packaging.version.InvalidVersion:
32 | return packaging.version.parse("0")
33 |
34 |
35 | class FractalCacheError(RuntimeError):
36 | """
37 | Custom error raised by functions of this module
38 | """
39 |
40 | pass
41 |
42 |
43 | # Define a useful type
44 | _TaskList = list[dict[str, Any]]
45 |
46 |
47 | def _fetch_task_list(client: AuthClient) -> _TaskList:
48 | """
49 | Fetch task list through an API request.
50 | """
51 | res = client.get("api/v2/task/")
52 | task_list = check_response(res, expected_status_code=200)
53 | return task_list
54 |
55 |
56 | def _sort_task_list(task_list: _TaskList) -> _TaskList:
57 | """
58 | Sort tasks according to their (name, version) attributes.
59 | """
60 | new_task_list = sorted(
61 | task_list,
62 | key=lambda task: (
63 | task["name"],
64 | task["version"] or "",
65 | ),
66 | )
67 | return new_task_list
68 |
69 |
70 | def _write_task_list_to_cache(task_list: _TaskList) -> None:
71 | """
72 | Write task list to cache file
73 | """
74 | cache_dir = Path(f"{settings.FRACTAL_CACHE_PATH}")
75 | cache_dir.mkdir(parents=True, exist_ok=True)
76 | with (cache_dir / TASKS_CACHE_FILENAME).open("w") as f:
77 | json.dump(task_list, f, indent=4)
78 |
79 |
80 | def refresh_task_cache(client: AuthClient) -> _TaskList:
81 | """
82 | Return task list after fetching it, sorting it and writing to cache file.
83 | """
84 | task_list = _fetch_task_list(client)
85 | task_list = _sort_task_list(task_list)
86 | _write_task_list_to_cache(task_list)
87 | return task_list
88 |
89 |
90 | def _get_matching_tasks(
91 | task_list: _TaskList,
92 | *,
93 | name: str,
94 | version: str | None = None,
95 | ) -> _TaskList:
96 | """
97 | Given a task list, extract all the tasks matching some conditions.
98 | """
99 |
100 | def _condition(_task):
101 | if _task["name"] == name:
102 | if (version is None) or (_task["version"] == version):
103 | return True
104 | return False
105 | else:
106 | return False
107 |
108 | return [_task for _task in task_list if _condition(_task)]
109 |
110 |
111 | def _format_task_list(task_list: _TaskList) -> str:
112 | """
113 | Helper function to print a formatted task list with only a few task
114 | attributes, to be used in error messages.
115 | """
116 | header = " ID, Name, Version"
117 | formatted_list = "\n".join(
118 | [
119 | f' {task["id"]}, "{task["name"]}", {task["version"]}'
120 | for task in task_list
121 | ]
122 | )
123 | return f"{header}\n{formatted_list}"
124 |
125 |
126 | def _search_in_task_list(
127 | *,
128 | task_list: _TaskList,
129 | name: str,
130 | version: str | None = None,
131 | ) -> int:
132 | """
133 | Search for a single task in `task_list` based on the provided `name`
134 | and `version`, and return its `id`.
135 |
136 | If `version` is not provided, use the maximum available version (that is,
137 | the latest version).
138 |
139 | If the task is not found or is not unique, raise a `FractalCacheError`.
140 | """
141 | matching_task_list = _get_matching_tasks(
142 | task_list, name=name, version=version
143 | )
144 | formatted_matching_task_list = _format_task_list(matching_task_list)
145 |
146 | if len(matching_task_list) == 0:
147 | formatted_task_list = _format_task_list(task_list)
148 | if version is not None:
149 | raise FractalCacheError(
150 | f'There is no task with (name, version)=("{name}", {version}) '
151 | f"in the following task list:\n{formatted_task_list}\n"
152 | )
153 | else:
154 | raise FractalCacheError(
155 | f'There is no task with name "{name}" '
156 | f"in the following task list:\n{formatted_task_list}\n"
157 | )
158 | elif len(matching_task_list) == 1:
159 | return matching_task_list[0]["id"]
160 | else: # i.e. len(matching_task_list) > 1
161 | if version is not None:
162 | raise FractalCacheError(
163 | f"Multiple tasks with version {version} in the following "
164 | f"task list:\n{formatted_matching_task_list}"
165 | "Please make your request more specific.\n"
166 | )
167 | else: # i.e. version is None
168 | if any(task["version"] is None for task in matching_task_list):
169 | raise FractalCacheError(
170 | "Cannot determine the latest version in the following "
171 | f"task list:\n{formatted_matching_task_list}"
172 | "Please make your request more specific.\n"
173 | )
174 | available_versions = [
175 | _task["version"] for _task in matching_task_list
176 | ]
177 | max_version = max(available_versions, key=_loose_version_parse)
178 | max_version_tasks = [
179 | _task
180 | for _task in matching_task_list
181 | if _task["version"] == max_version
182 | ]
183 | formatted_matching_task_list = _format_task_list(max_version_tasks)
184 | if len(max_version_tasks) == 1:
185 | return max_version_tasks[0]["id"]
186 | else:
187 | raise FractalCacheError(
188 | "Multiple tasks with latest version "
189 | f"({max_version}) in the following task "
190 | f"list:\n{formatted_matching_task_list}"
191 | "Please make your request more specific.\n"
192 | )
193 |
194 |
195 | def get_task_id_from_cache(
196 | client: AuthClient, task_name: str, version: str | None = None
197 | ) -> int:
198 | """
199 | Retrieve the `id` of a task from the cache based on the provided
200 | `task_name` and `version`.
201 |
202 | If `version` is not provided, the latest (i.e. maximum) available version
203 | is used.
204 |
205 | Return the `id` of the single matching task, if found.
206 |
207 | If the task is not found or is not unique, re-try after refreshing the
208 | cache, and then raise a `FractalCacheError`.
209 | """
210 |
211 | # If cache is missing, create it
212 | cache_dir = Path(f"{settings.FRACTAL_CACHE_PATH}")
213 | cache_file = cache_dir / TASKS_CACHE_FILENAME
214 | if cache_file.exists():
215 | with cache_file.open("r") as f:
216 | task_list = json.load(f)
217 | already_refreshed_cache = False
218 | else:
219 | task_list = refresh_task_cache(client)
220 | already_refreshed_cache = True
221 |
222 | try:
223 | task_id = _search_in_task_list(
224 | task_list=task_list,
225 | name=task_name,
226 | version=version,
227 | )
228 | except FractalCacheError as e:
229 | if already_refreshed_cache:
230 | # Cache is already up to date, fail
231 | raise e
232 | else:
233 | # Cache may be out-of-date, refresh it and try again
234 | task_list = refresh_task_cache(client)
235 | task_id = _search_in_task_list(
236 | task_list=task_list,
237 | name=task_name,
238 | version=version,
239 | )
240 | return task_id
241 |
--------------------------------------------------------------------------------
/fractal_client/cmd/_dataset.py:
--------------------------------------------------------------------------------
1 | from ..authclient import AuthClient
2 | from ..interface import Interface
3 | from ..response import check_response
4 |
5 |
6 | def post_dataset(
7 | client: AuthClient,
8 | *,
9 | project_id: int,
10 | dataset_name: str,
11 | zarr_dir: str | None = None,
12 | batch: bool = False,
13 | ) -> Interface:
14 | """
15 | Arguments:
16 | project_id: ID of project to add the new dataset to
17 | dataset_name: Name of new dataset
18 | zarr_dir:
19 | """
20 | dataset = dict(name=dataset_name)
21 | if zarr_dir is not None:
22 | dataset["zarr_dir"] = zarr_dir
23 |
24 | res = client.post(
25 | f"api/v2/project/{project_id}/dataset/",
26 | json=dataset,
27 | )
28 | new_dataset = check_response(res, expected_status_code=201)
29 | if batch:
30 | return Interface(retcode=0, data=new_dataset["id"])
31 | else:
32 | return Interface(retcode=0, data=new_dataset)
33 |
34 |
35 | def patch_dataset(
36 | client: AuthClient,
37 | *,
38 | project_id: int,
39 | dataset_id: int,
40 | new_name: str | None = None,
41 | ) -> Interface:
42 | # Prepare payload
43 | dataset_update = {}
44 | if new_name is not None:
45 | dataset_update["name"] = new_name
46 |
47 | res = client.patch(
48 | (f"api/v2/project/{project_id}/" f"dataset/{dataset_id}/"),
49 | json=dataset_update,
50 | )
51 | data = check_response(res, expected_status_code=200)
52 | return Interface(retcode=0, data=data)
53 |
54 |
55 | def get_dataset(
56 | client: AuthClient, *, project_id: int, dataset_id: int
57 | ) -> Interface:
58 | res = client.get(f"api/v2/project/{project_id}/dataset/{dataset_id}/")
59 | return Interface(retcode=0, data=res.json())
60 |
61 |
62 | def delete_dataset(
63 | client: AuthClient, *, project_id: int, dataset_id: int
64 | ) -> Interface:
65 |
66 | res = client.delete(f"api/v2/project/{project_id}/dataset/{dataset_id}/")
67 | check_response(res, expected_status_code=204)
68 | return Interface(retcode=0, data="")
69 |
--------------------------------------------------------------------------------
/fractal_client/cmd/_group.py:
--------------------------------------------------------------------------------
1 | from ..authclient import AuthClient
2 | from ..interface import Interface
3 | from ..response import check_response
4 |
5 |
6 | def group_list(
7 | client: AuthClient,
8 | *,
9 | user_ids: bool = False,
10 | batch: bool = False,
11 | ):
12 | query_params = "?user_ids=true" if user_ids else ""
13 | res = client.get(f"auth/group/{query_params}")
14 | data = check_response(res, expected_status_code=200)
15 | if batch:
16 | return Interface(
17 | retcode=0, data=" ".join([str(d["id"]) for d in data])
18 | )
19 | else:
20 | return Interface(retcode=0, data=data)
21 |
22 |
23 | def group_get(client: AuthClient, *, group_id: int):
24 | res = client.get(f"auth/group/{group_id}/")
25 | data = check_response(res, expected_status_code=200)
26 | return Interface(retcode=0, data=data)
27 |
28 |
29 | def group_new(
30 | client: AuthClient,
31 | *,
32 | name: str,
33 | viewer_paths: list[str] | None = None,
34 | batch: bool = False,
35 | ):
36 | request_body = dict(name=name)
37 | if viewer_paths is not None:
38 | request_body["viewer_paths"] = viewer_paths
39 |
40 | res = client.post(
41 | "auth/group/",
42 | json=request_body,
43 | )
44 | data = check_response(res, expected_status_code=201)
45 | if batch:
46 | return Interface(retcode=0, data=data["id"])
47 | else:
48 | return Interface(retcode=0, data=data)
49 |
50 |
51 | def group_update(
52 | client: AuthClient,
53 | *,
54 | group_id: int,
55 | new_viewer_paths: list[str],
56 | ):
57 | res = client.patch(
58 | f"auth/group/{group_id}/",
59 | json=dict(viewer_paths=new_viewer_paths),
60 | )
61 | data = check_response(res, expected_status_code=200)
62 | return Interface(retcode=0, data=data)
63 |
64 |
65 | def group_add_user(
66 | client: AuthClient,
67 | *,
68 | group_id: int,
69 | user_id: int,
70 | ):
71 | res = client.post(f"auth/group/{group_id}/add-user/{user_id}/")
72 | data = check_response(res, expected_status_code=200)
73 | return Interface(retcode=0, data=data)
74 |
75 |
76 | def group_remove_user(
77 | client: AuthClient,
78 | *,
79 | group_id: int,
80 | user_id: int,
81 | ):
82 | res = client.post(f"auth/group/{group_id}/remove-user/{user_id}/")
83 | data = check_response(res, expected_status_code=200)
84 | return Interface(retcode=0, data=data)
85 |
--------------------------------------------------------------------------------
/fractal_client/cmd/_job.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import os
4 | from pathlib import Path
5 | from zipfile import ZipFile
6 |
7 | from ..authclient import AuthClient
8 | from ..interface import Interface
9 | from ..response import check_response
10 |
11 |
12 | def get_job(
13 | client: AuthClient,
14 | *,
15 | project_id: int,
16 | job_id: int,
17 | batch: bool = False,
18 | ) -> Interface:
19 | """
20 | Query the status of a workflow-execution job
21 | """
22 |
23 | res = client.get(f"api/v2/project/{project_id}/job/{job_id}/")
24 | job = check_response(res, expected_status_code=200)
25 | if batch:
26 | return Interface(retcode=0, data=job["status"])
27 | else:
28 | return Interface(retcode=0, data=job)
29 |
30 |
31 | def get_job_list(
32 | client: AuthClient, *, project_id: int, batch: bool = False
33 | ) -> Interface:
34 |
35 | res = client.get(f"api/v2/project/{project_id}/job/")
36 | jobs = check_response(res, expected_status_code=200)
37 |
38 | if batch:
39 | job_ids = " ".join(str(job["id"]) for job in jobs)
40 | return Interface(retcode=0, data=job_ids)
41 | else:
42 | return Interface(retcode=0, data=jobs)
43 |
44 |
45 | def get_job_logs(
46 | client: AuthClient,
47 | *,
48 | project_id: int,
49 | job_id: int,
50 | output_folder: str,
51 | ) -> Interface:
52 |
53 | # Check that output_folder does not already exist
54 | if Path(output_folder).exists():
55 | return Interface(
56 | retcode=1, data=f"ERROR: {output_folder=} already exists"
57 | )
58 |
59 | # Send request to server
60 | res = client.get(f"api/v2/project/{project_id}/job/{job_id}/download/")
61 |
62 | # NOTE: We cannot use our default check_response here, because res._content
63 | # is binary. Therefore we check the status code by hand
64 | if res.status_code != 200:
65 | logging.error(f"Server returned {res.status_code}")
66 | logging.error(
67 | f"Original request: {res._request.method} {res._request.url}"
68 | )
69 | logging.error(
70 | f"Original payload: {res._request._content.decode('utf-8')}"
71 | )
72 | logging.error("Terminating.\n")
73 | exit(1)
74 |
75 | # Check the content-type entry in the response headers
76 | content_type = res.headers["content-type"]
77 | expected_content_type = "application/x-zip-compressed"
78 | if content_type != expected_content_type:
79 | logging.error(
80 | f"Unexpected {content_type=} in headers of server "
81 | f"response, instead of {expected_content_type=}"
82 | )
83 | logging.error(
84 | f"Original request: {res._request.method} {res._request.url}"
85 | )
86 | logging.error(
87 | f"Original payload: {res._request._content.decode('utf-8')}"
88 | )
89 | logging.error("Terminating.\n")
90 | exit(1)
91 |
92 | # Write response into a temporary zipped file
93 | zipped_archive_path = output_folder + "_tmp.zip"
94 | with open(zipped_archive_path, "wb") as f:
95 | f.write(res.content)
96 |
97 | # Unzip the log archive
98 | unzipped_archived_path = output_folder
99 | os.mkdir(unzipped_archived_path)
100 | with ZipFile(zipped_archive_path, mode="r") as zipfile:
101 | zipfile.extractall(path=unzipped_archived_path)
102 |
103 | # Remove zipped temporary file
104 | os.unlink(zipped_archive_path)
105 |
106 | return Interface(retcode=0, data=f"Logs downloaded to {output_folder=}")
107 |
108 |
109 | def stop_job(client: AuthClient, *, project_id: int, job_id: int) -> Interface:
110 | """
111 | Stop a workflow-execution job
112 | """
113 |
114 | res = client.get(f"api/v2/project/{project_id}/job/{job_id}/stop/")
115 | check_response(res, expected_status_code=202)
116 | return Interface(
117 | retcode=0, data="Correctly called the job-stopping endpoint"
118 | )
119 |
120 |
121 | def job_submit(
122 | client: AuthClient,
123 | *,
124 | project_id: int,
125 | workflow_id: int,
126 | dataset_id: int,
127 | first_task_index: int | None = None,
128 | last_task_index: int | None = None,
129 | worker_init: str | None = None,
130 | attribute_filters_json: str | None = None,
131 | type_filters_json: str | None = None,
132 | batch: bool = False,
133 | ) -> Interface:
134 |
135 | job_submit = dict()
136 | # Prepare JobV2 object, without None attributes
137 | if worker_init is not None:
138 | job_submit["worker_init"] = worker_init
139 | if first_task_index is not None:
140 | job_submit["first_task_index"] = first_task_index
141 | if last_task_index is not None:
142 | job_submit["last_task_index"] = last_task_index
143 |
144 | if attribute_filters_json is not None:
145 | with Path(attribute_filters_json).open("r") as f:
146 | job_submit["attribute_filters"] = json.load(f)
147 | if type_filters_json is not None:
148 | with Path(type_filters_json).open("r") as f:
149 | job_submit["type_filters"] = json.load(f)
150 |
151 | # Prepare query parameters
152 | query_parameters = f"workflow_id={workflow_id}" f"&dataset_id={dataset_id}"
153 |
154 | res = client.post(
155 | (f"api/v2/project/{project_id}/job/" f"submit/?{query_parameters}"),
156 | json=job_submit,
157 | )
158 | job_read = check_response(res, expected_status_code=202)
159 |
160 | if batch:
161 | return Interface(retcode=0, data=job_read["id"])
162 | else:
163 | return Interface(retcode=0, data=job_read)
164 |
--------------------------------------------------------------------------------
/fractal_client/cmd/_project.py:
--------------------------------------------------------------------------------
1 | from ..authclient import AuthClient
2 | from ..interface import Interface
3 | from ..response import check_response
4 |
5 |
6 | def post_project(
7 | client: AuthClient,
8 | *,
9 | name: str,
10 | batch: bool = False,
11 | ) -> Interface:
12 | # Prepare a ProjectCreate request body
13 | payload = dict(name=name)
14 | # Send API request
15 | res = client.post("api/v2/project/", json=payload)
16 | project = check_response(res, expected_status_code=201)
17 | if batch:
18 | return Interface(retcode=0, data=project["id"])
19 | else:
20 | return Interface(retcode=0, data=project)
21 |
22 |
23 | def get_project_list(client: AuthClient) -> Interface:
24 |
25 | res = client.get("api/v2/project/")
26 | projects = check_response(res, expected_status_code=200)
27 | return Interface(retcode=0, data=projects)
28 |
29 |
30 | def get_project(client: AuthClient, *, project_id: int) -> Interface:
31 | res = client.get(f"api/v2/project/{project_id}/")
32 | project = check_response(res, expected_status_code=200)
33 | return Interface(retcode=0, data=project)
34 |
35 |
36 | def delete_project(client: AuthClient, *, project_id: int) -> Interface:
37 |
38 | res = client.delete(f"api/v2/project/{project_id}/")
39 | check_response(res, expected_status_code=204)
40 | return Interface(retcode=0, data="")
41 |
42 |
43 | def patch_project(
44 | client: AuthClient,
45 | *,
46 | project_id: int,
47 | new_name: str | None = None,
48 | ) -> Interface:
49 | project_update = {}
50 | if new_name:
51 | project_update["name"] = new_name
52 |
53 | res = client.patch(f"api/v2/project/{project_id}/", json=project_update)
54 | new_project = check_response(res, expected_status_code=200)
55 | return Interface(retcode=0, data=new_project)
56 |
--------------------------------------------------------------------------------
/fractal_client/cmd/_task.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import sys
4 |
5 | from ..authclient import AuthClient
6 | from ..interface import Interface
7 | from ..response import check_response
8 | from ._aux_task_caching import FractalCacheError
9 | from ._aux_task_caching import get_task_id_from_cache
10 | from ._aux_task_caching import refresh_task_cache
11 |
12 |
13 | def get_task_list(client: AuthClient) -> Interface:
14 | task_list = refresh_task_cache(client=client)
15 | return Interface(retcode=0, data=task_list)
16 |
17 |
18 | def post_task(
19 | client: AuthClient,
20 | *,
21 | name: str,
22 | task_type: str | None = None,
23 | batch: bool = False,
24 | command_non_parallel: str | None = None,
25 | command_parallel: str | None = None,
26 | version: str | None = None,
27 | meta_non_parallel: str | None = None,
28 | meta_parallel: str | None = None,
29 | args_schema_non_parallel: str | None = None,
30 | args_schema_parallel: str | None = None,
31 | args_schema_version: str | None = None,
32 | private: bool = False,
33 | ) -> Interface:
34 | task = dict(name=name)
35 | if task_type:
36 | task["type"] = task_type
37 | if command_non_parallel:
38 | task["command_non_parallel"] = command_non_parallel
39 | if command_parallel:
40 | task["command_parallel"] = command_parallel
41 | if version:
42 | task["version"] = version
43 | if meta_non_parallel:
44 | with open(meta_non_parallel) as f:
45 | task["meta_non_parallel"] = json.load(f)
46 | if meta_parallel:
47 | with open(meta_parallel) as f:
48 | task["meta_parallel"] = json.load(f)
49 | if args_schema_parallel:
50 | with open(args_schema_parallel) as f:
51 | task["args_schema_parallel"] = json.load(f)
52 | if args_schema_non_parallel:
53 | with open(args_schema_non_parallel) as f:
54 | task["args_schema_non_parallel"] = json.load(f)
55 | if args_schema_version:
56 | task["args_schema_version"] = args_schema_version
57 | is_private = "?private=true" if private else ""
58 |
59 | res = client.post(f"api/v2/task/{is_private}", json=task)
60 | new_task = check_response(res, expected_status_code=201)
61 |
62 | if batch:
63 | return Interface(retcode=0, data=str(new_task["id"]))
64 | else:
65 | return Interface(retcode=0, data=new_task)
66 |
67 |
68 | def patch_task(
69 | client: AuthClient,
70 | *,
71 | id: int | None = None,
72 | name: str | None = None,
73 | version: str | None = None,
74 | command_non_parallel: str | None = None,
75 | command_parallel: str | None = None,
76 | input_types: str | None = None,
77 | output_types: str | None = None,
78 | ) -> Interface:
79 |
80 | if id:
81 | if version:
82 | logging.error(
83 | "Too many arguments: cannot provide both `id` and `version`."
84 | )
85 | sys.exit(1)
86 | else:
87 | try:
88 | id = get_task_id_from_cache(
89 | client=client, task_name=name, version=version
90 | )
91 | except FractalCacheError as e:
92 | print(e)
93 | sys.exit(1)
94 |
95 | task_update = {}
96 | if command_non_parallel:
97 | task_update["command_non_parallel"] = command_non_parallel
98 | if command_parallel:
99 | task_update["command_parallel"] = command_parallel
100 | if input_types:
101 | with open(input_types) as f:
102 | task_update["input_types"] = json.load(f)
103 | if output_types:
104 | with open(output_types) as f:
105 | task_update["output_types"] = json.load(f)
106 |
107 | res = client.patch(f"api/v2/task/{id}/", json=task_update)
108 | new_task = check_response(res, expected_status_code=200)
109 | return Interface(retcode=0, data=new_task)
110 |
--------------------------------------------------------------------------------
/fractal_client/cmd/_task_collection.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import sys
4 | from pathlib import Path
5 |
6 | from fractal_client.authclient import AuthClient
7 | from fractal_client.interface import Interface
8 | from fractal_client.response import check_response
9 |
10 |
11 | def task_collect_pip(
12 | client: AuthClient,
13 | *,
14 | package: str,
15 | package_version: str | None = None,
16 | python_version: str | None = None,
17 | package_extras: str | None = None,
18 | pinned_dependency: list[str] | None = None,
19 | private: bool = False,
20 | batch: bool = False,
21 | ) -> Interface:
22 | # Construct TaskCollectPip object
23 | task_collect = dict()
24 | if package_version:
25 | task_collect["package_version"] = package_version
26 | if python_version:
27 | task_collect["python_version"] = python_version
28 | if package_extras:
29 | task_collect["package_extras"] = package_extras
30 | if pinned_dependency:
31 | for pin in pinned_dependency:
32 | if len(pin.split("=")) != 2:
33 | logging.error(
34 | f"Invalid pin: {pin}.\nPins must be written as "
35 | "'--pinned-dependency PACKAGE_NAME=PACKAGE_VERSION'"
36 | )
37 | sys.exit(1)
38 | task_collect["pinned_package_versions"] = json.dumps(
39 | {
40 | _name: _version
41 | for _name, _version in (
42 | p.split("=") for p in pinned_dependency
43 | )
44 | }
45 | )
46 |
47 | is_private = "?private=true" if private else ""
48 | endpoint_url = f"api/v2/task/collect/pip/{is_private}"
49 | if package.endswith(".whl"):
50 | with open(package, "rb") as f:
51 | file = {
52 | "file": (
53 | Path(package).name,
54 | f.read(),
55 | "application/zip",
56 | )
57 | }
58 | res = client.post(
59 | endpoint_url,
60 | data=task_collect,
61 | files=file,
62 | )
63 | else:
64 | task_collect["package"] = package
65 | res = client.post(
66 | endpoint_url,
67 | data=task_collect,
68 | )
69 | task_group_activity = check_response(res, expected_status_code=202)
70 | if batch:
71 | return Interface(retcode=0, data=task_group_activity["id"])
72 | else:
73 | return Interface(retcode=0, data=task_group_activity)
74 |
75 |
76 | def task_collect_custom(
77 | client: AuthClient,
78 | *,
79 | label: str,
80 | python_interpreter: str,
81 | manifest: str,
82 | version: str | None = None,
83 | package_name: str | None = None,
84 | package_root: str | None = None,
85 | private: bool = False,
86 | batch: bool = False,
87 | ) -> Interface:
88 | try:
89 | with open(manifest) as f:
90 | manifest_dict = json.load(f)
91 | except FileNotFoundError as e:
92 | raise FileNotFoundError(
93 | f"Fractal Client cannot find the file {manifest}. "
94 | "Note that the file must be on the same machine where Fractal "
95 | f"Client is running.\nOriginal error: {e}."
96 | )
97 |
98 | task_collect = dict(
99 | label=label,
100 | python_interpreter=python_interpreter,
101 | manifest=manifest_dict,
102 | )
103 | if version:
104 | task_collect["version"] = version
105 | if package_name:
106 | task_collect["package_name"] = package_name
107 | if package_root:
108 | task_collect["package_root"] = package_root
109 | is_private = "?private=true" if private else ""
110 |
111 | res = client.post(
112 | f"api/v2/task/collect/custom/{is_private}",
113 | json=task_collect,
114 | )
115 |
116 | task_list = check_response(
117 | res, expected_status_code=201, redact_long_payload=True
118 | )
119 |
120 | if batch:
121 | task_ids = [str(task["id"]) for task in task_list]
122 | return Interface(retcode=0, data=" ".join(task_ids))
123 | else:
124 | return Interface(retcode=0, data=task_list)
125 |
126 |
127 | def show_task_group_activity(
128 | client: AuthClient,
129 | *,
130 | task_group_activity_id: int,
131 | include_logs: bool,
132 | ) -> Interface:
133 | res = client.get(f"api/v2/task-group/activity/{task_group_activity_id}/")
134 | task_group_activity = check_response(res, expected_status_code=200)
135 |
136 | # Remove key-value pairs with None value
137 | if include_logs is False:
138 | task_group_activity["log"] = None
139 |
140 | return Interface(retcode=0, data=task_group_activity)
141 |
--------------------------------------------------------------------------------
/fractal_client/cmd/_user.py:
--------------------------------------------------------------------------------
1 | import json
2 | import sys
3 | from json.decoder import JSONDecodeError
4 | from pathlib import Path
5 |
6 | from ..authclient import AuthClient
7 | from ..interface import Interface
8 | from ..response import check_response
9 |
10 |
11 | def _read_ssh_settings_json(ssh_settings_json: str) -> dict:
12 | """
13 | Read, validate and return as a dict the user's ssh-settings json file
14 | """
15 | ssh_settings_json_path = Path(ssh_settings_json)
16 | if not ssh_settings_json_path.exists():
17 | sys.exit(f"Invalid {ssh_settings_json=}. File does not exist.")
18 | with ssh_settings_json_path.open("r") as f:
19 | try:
20 | ssh_settings = json.load(f)
21 | except JSONDecodeError:
22 | sys.exit(f"{ssh_settings_json_path} is not a valid JSON.")
23 | __ALLOWED_KEYS__ = (
24 | "ssh_host",
25 | "ssh_username",
26 | "ssh_private_key_path",
27 | "ssh_tasks_dir",
28 | "ssh_jobs_dir",
29 | )
30 | settings = dict()
31 | for key, value in ssh_settings.items():
32 | if key in __ALLOWED_KEYS__:
33 | settings[key] = value
34 | else:
35 | sys.exit(f"Invalid {key=} in {ssh_settings_json=}.")
36 |
37 | return settings
38 |
39 |
40 | def user_register(
41 | client: AuthClient,
42 | *,
43 | new_email: str,
44 | new_password: str,
45 | slurm_user: str | None = None,
46 | project_dir: str | None = None,
47 | username: str | None = None,
48 | ssh_settings_json: str | None = None,
49 | superuser: bool = False,
50 | verified: bool = True, # TODO: this is not currently exposed in the CLI
51 | batch: bool = False,
52 | ) -> Interface:
53 | new_user = dict(
54 | email=new_email,
55 | password=new_password,
56 | )
57 |
58 | if username:
59 | new_user["username"] = username
60 |
61 | new_settings = dict()
62 | if slurm_user:
63 | new_settings["slurm_user"] = slurm_user
64 | if project_dir:
65 | new_settings["project_dir"] = project_dir
66 | if ssh_settings_json is not None:
67 | ssh_settings = _read_ssh_settings_json(ssh_settings_json)
68 | new_settings.update(ssh_settings)
69 |
70 | res = client.post("auth/register/", json=new_user)
71 | user_data = check_response(res, expected_status_code=201)
72 |
73 | if superuser or verified:
74 | patch_payload = dict(is_superuser=superuser, is_verified=verified)
75 | user_id = user_data["id"]
76 | res = client.patch(
77 | f"auth/users/{user_id}/",
78 | json=patch_payload,
79 | )
80 | user_data = check_response(res, expected_status_code=200)
81 |
82 | user_id = user_data["id"]
83 | if new_settings == {}:
84 | res = client.get(f"auth/users/{user_id}/settings/")
85 | user_settings = check_response(res, expected_status_code=200)
86 | else:
87 | res = client.patch(
88 | f"auth/users/{user_id}/settings/",
89 | json=new_settings,
90 | )
91 | user_settings = check_response(res, expected_status_code=200)
92 |
93 | if batch:
94 | return Interface(retcode=0, data=user_data["id"])
95 | else:
96 | user_data_with_settings = dict(settings=user_settings, **user_data)
97 | return Interface(retcode=0, data=user_data_with_settings)
98 |
99 |
100 | def user_list(client: AuthClient) -> Interface:
101 | res = client.get("auth/users/")
102 | users = check_response(res, expected_status_code=200)
103 | return Interface(retcode=0, data=users)
104 |
105 |
106 | def user_show(client: AuthClient, *, user_id: str) -> Interface:
107 | res = client.get(f"auth/users/{user_id}/")
108 | user = check_response(res, expected_status_code=200)
109 | user_id = user["id"]
110 | res = client.get(f"auth/users/{user_id}/settings/")
111 | user_settings = check_response(res, expected_status_code=200)
112 | user_with_settings = dict(settings=user_settings, **user)
113 | return Interface(retcode=0, data=user_with_settings)
114 |
115 |
116 | def user_edit(
117 | client: AuthClient,
118 | *,
119 | user_id: str,
120 | new_email: str | None = None,
121 | new_password: str | None = None,
122 | new_username: str | None = None,
123 | new_slurm_user: str | None = None,
124 | new_project_dir: str | None = None,
125 | new_ssh_settings_json: str | None = None,
126 | make_superuser: bool = False,
127 | remove_superuser: bool = False,
128 | make_verified: bool = False,
129 | remove_verified: bool = False,
130 | ) -> Interface:
131 | user_update = dict()
132 | settings_update = dict()
133 | if new_email is not None:
134 | if (make_verified is False) and (remove_verified is False):
135 | # Since `fastapi-users` sets `is_verified` to `False` each time the
136 | # email is updated, we force the user to make explicit whether the
137 | # account is verified or not.
138 | return Interface(
139 | retcode=1,
140 | data=(
141 | "Cannot use `--new-email` without `--make-verified` or "
142 | "`--remove-verified`"
143 | ),
144 | )
145 | user_update["email"] = new_email
146 | if new_password is not None:
147 | user_update["password"] = new_password
148 | if make_superuser:
149 | user_update["is_superuser"] = True
150 | if remove_superuser:
151 | user_update["is_superuser"] = False
152 | if make_verified:
153 | user_update["is_verified"] = True
154 | if remove_verified:
155 | user_update["is_verified"] = False
156 | if new_username is not None:
157 | user_update["username"] = new_username
158 | if new_slurm_user is not None:
159 | settings_update["slurm_user"] = new_slurm_user
160 | if new_project_dir is not None:
161 | settings_update["project_dir"] = new_project_dir
162 | if new_ssh_settings_json is not None:
163 | ssh_settings = _read_ssh_settings_json(new_ssh_settings_json)
164 | settings_update.update(ssh_settings)
165 |
166 | res = client.patch(f"auth/users/{user_id}/", json=user_update)
167 | new_user = check_response(res, expected_status_code=200)
168 |
169 | if new_email is not None:
170 | # Since `fastapi-users` sets `is_verified` to `False` each time the
171 | # email is updated, we set `is_verified` as specified by the user.
172 | res = client.patch(
173 | f"auth/users/{user_id}/",
174 | json=dict(is_verified=user_update["is_verified"]),
175 | )
176 | new_user = check_response(res, expected_status_code=200)
177 |
178 | if settings_update == {}:
179 | res = client.get(f"auth/users/{user_id}/settings/")
180 | user_settings = check_response(res, expected_status_code=200)
181 | else:
182 | res = client.patch(
183 | f"auth/users/{user_id}/settings/",
184 | json=settings_update,
185 | )
186 | user_settings = check_response(res, expected_status_code=200)
187 |
188 | new_user_with_settings = dict(settings=user_settings, **new_user)
189 | return Interface(retcode=0, data=new_user_with_settings)
190 |
191 |
192 | def user_set_groups(
193 | client: AuthClient, *, user_id: int, group_ids: list[int]
194 | ) -> Interface:
195 | res = client.post(
196 | f"auth/users/{user_id}/set-groups/",
197 | json=dict(group_ids=group_ids),
198 | )
199 | user = check_response(res, expected_status_code=200)
200 | return Interface(retcode=0, data=user)
201 |
202 |
203 | def user_whoami(
204 | client: AuthClient, *, batch: bool, viewer_paths: bool = False
205 | ) -> Interface:
206 | res = client.get("auth/current-user/")
207 | user = check_response(res, expected_status_code=200)
208 |
209 | if batch:
210 | return Interface(retcode=0, data=user["id"])
211 |
212 | res = client.get("auth/current-user/settings/")
213 | user_settings = check_response(res, expected_status_code=200)
214 | user_with_settings = dict(**user, settings=user_settings)
215 |
216 | if viewer_paths:
217 | res = client.get("auth/current-user/allowed-viewer-paths/")
218 | returned_viewer_paths = check_response(res, expected_status_code=200)
219 | return Interface(
220 | retcode=0,
221 | data=dict(
222 | **user_with_settings,
223 | viewer_paths=returned_viewer_paths,
224 | ),
225 | )
226 | else:
227 | return Interface(retcode=0, data=user_with_settings)
228 |
--------------------------------------------------------------------------------
/fractal_client/cmd/_workflow.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import sys
4 | from pathlib import Path
5 |
6 | from ..authclient import AuthClient
7 | from ..interface import Interface
8 | from ..response import check_response
9 | from ._aux_task_caching import FractalCacheError
10 | from ._aux_task_caching import get_task_id_from_cache
11 |
12 |
13 | def post_workflow(
14 | client: AuthClient, *, name: str, project_id: int, batch: bool = False
15 | ) -> Interface:
16 | workflow = dict(
17 | name=name,
18 | )
19 | res = client.post(
20 | f"api/v2/project/{project_id}/workflow/",
21 | json=workflow,
22 | )
23 | workflow = check_response(res, expected_status_code=201)
24 | if batch:
25 | return Interface(retcode=0, data=workflow["id"])
26 | else:
27 | return Interface(retcode=0, data=workflow)
28 |
29 |
30 | def get_workflow_list(
31 | client: AuthClient, *, project_id: int, batch: bool = False
32 | ) -> Interface:
33 |
34 | res = client.get(f"api/v2/project/{project_id}/workflow/")
35 | workflow_list = check_response(res, expected_status_code=200)
36 | return Interface(retcode=0, data=workflow_list)
37 |
38 |
39 | def delete_workflow(
40 | client: AuthClient, *, project_id: int, workflow_id: int
41 | ) -> Interface:
42 | res = client.delete(f"api/v2/project/{project_id}/workflow/{workflow_id}/")
43 | check_response(res, expected_status_code=204)
44 | return Interface(retcode=0, data="")
45 |
46 |
47 | def get_workflow(
48 | client: AuthClient, *, project_id: int, workflow_id: int
49 | ) -> Interface:
50 | res = client.get(f"api/v2/project/{project_id}/workflow/{workflow_id}/")
51 | workflow = check_response(res, expected_status_code=200)
52 | return Interface(retcode=0, data=workflow)
53 |
54 |
55 | def post_workflowtask(
56 | client: AuthClient,
57 | *,
58 | project_id: int,
59 | workflow_id: int,
60 | type_filters: str | None = None,
61 | args_non_parallel: str | None = None,
62 | args_parallel: str | None = None,
63 | meta_non_parallel: str | None = None,
64 | meta_parallel: str | None = None,
65 | task_id: int | None = None,
66 | task_name: str | None = None,
67 | task_version: str | None = None,
68 | batch: bool = False,
69 | order: int | None = None,
70 | ) -> Interface:
71 |
72 | if task_id:
73 | if task_version:
74 | logging.error(
75 | "Too many arguments: cannot provide both "
76 | "`task_id` and `task_version`."
77 | )
78 | sys.exit(1)
79 | else:
80 | try:
81 | task_id = get_task_id_from_cache(
82 | client=client, task_name=task_name, version=task_version
83 | )
84 | except FractalCacheError as e:
85 | print(e)
86 | sys.exit(1)
87 |
88 | if order is None:
89 | workflow_task = dict()
90 | else:
91 | workflow_task = dict(order=order)
92 |
93 | if type_filters is not None:
94 | with Path(type_filters).open("r") as f:
95 | type_filters = json.load(f)
96 | workflow_task["type_filters"] = type_filters
97 |
98 | if args_non_parallel is not None:
99 | with Path(args_non_parallel).open("r") as f:
100 | a_n_p = json.load(f)
101 | workflow_task["args_non_parallel"] = a_n_p
102 |
103 | if args_parallel is not None:
104 | with Path(args_parallel).open("r") as f:
105 | a_p = json.load(f)
106 | workflow_task["args_parallel"] = a_p
107 |
108 | if meta_non_parallel is not None:
109 | with Path(meta_non_parallel).open("r") as f:
110 | m_n_p = json.load(f)
111 | workflow_task["meta_non_parallel"] = m_n_p
112 |
113 | if meta_parallel is not None:
114 | with Path(meta_parallel).open("r") as f:
115 | m_p = json.load(f)
116 | workflow_task["meta_parallel"] = m_p
117 |
118 | res = client.post(
119 | (
120 | f"api/v2/project/{project_id}/"
121 | f"workflow/{workflow_id}/wftask/"
122 | f"?{task_id=}"
123 | ),
124 | json=workflow_task,
125 | )
126 | workflow_task = check_response(res, expected_status_code=201)
127 |
128 | if batch:
129 | return Interface(retcode=0, data=str(workflow_task["id"]))
130 | else:
131 | return Interface(retcode=0, data=workflow_task)
132 |
133 |
134 | def patch_workflowtask(
135 | client: AuthClient,
136 | *,
137 | project_id: int,
138 | workflow_id: int,
139 | workflow_task_id: int,
140 | type_filters: str | None = None,
141 | args_non_parallel: str | None = None,
142 | args_parallel: str | None = None,
143 | meta_non_parallel: str | None = None,
144 | meta_parallel: str | None = None,
145 | ) -> Interface:
146 |
147 | payload = {}
148 | if type_filters is not None:
149 | with Path(type_filters).open("r") as f:
150 | filters = json.load(f)
151 | payload["type_filters"] = filters
152 |
153 | if args_non_parallel is not None:
154 | with Path(args_non_parallel).open("r") as f:
155 | a_n_p = json.load(f)
156 | payload["args_non_parallel"] = a_n_p
157 |
158 | if args_parallel is not None:
159 | with Path(args_parallel).open("r") as f:
160 | a_p = json.load(f)
161 | payload["args_parallel"] = a_p
162 |
163 | if meta_non_parallel is not None:
164 | with Path(meta_non_parallel).open("r") as f:
165 | m_n_p = json.load(f)
166 | payload["meta_non_parallel"] = m_n_p
167 |
168 | if meta_parallel is not None:
169 | with Path(meta_parallel).open("r") as f:
170 | m_p = json.load(f)
171 | payload["meta_parallel"] = m_p
172 |
173 | res = client.patch(
174 | (
175 | f"api/v2/project/{project_id}/"
176 | f"workflow/{workflow_id}/wftask/{workflow_task_id}/"
177 | ),
178 | json=payload,
179 | )
180 | workflow_task = check_response(res, expected_status_code=200)
181 |
182 | return Interface(retcode=0, data=workflow_task)
183 |
184 |
185 | def delete_workflowtask(
186 | client: AuthClient,
187 | *,
188 | project_id: int,
189 | workflow_id: int,
190 | workflow_task_id: int,
191 | ) -> Interface:
192 |
193 | res = client.delete(
194 | f"api/v2/project/{project_id}/"
195 | f"workflow/{workflow_id}/wftask/{workflow_task_id}/"
196 | )
197 | check_response(res, expected_status_code=204)
198 | return Interface(retcode=0, data="")
199 |
200 |
201 | def patch_workflow(
202 | client: AuthClient,
203 | *,
204 | project_id: int,
205 | workflow_id: int,
206 | new_name: str,
207 | ) -> Interface:
208 |
209 | workflow_update = dict(name=new_name)
210 |
211 | res = client.patch(
212 | f"api/v2/project/{project_id}/workflow/{workflow_id}/",
213 | json=workflow_update,
214 | )
215 | new_workflow = check_response(res, expected_status_code=200)
216 | return Interface(retcode=0, data=new_workflow)
217 |
218 |
219 | def workflow_import(
220 | client: AuthClient,
221 | *,
222 | project_id: int,
223 | json_file: str,
224 | workflow_name: str | None = None,
225 | batch: bool = False,
226 | ) -> Interface:
227 | with Path(json_file).open("r") as f:
228 | workflow = json.load(f)
229 |
230 | if workflow_name is not None:
231 | workflow["name"] = workflow_name
232 |
233 | res = client.post(
234 | f"api/v2/project/{project_id}/workflow/import/",
235 | json=workflow,
236 | )
237 | wf_read = check_response(res, expected_status_code=201)
238 |
239 | if batch:
240 | datastr = f"{wf_read['id']}"
241 | for wftask in wf_read["task_list"]:
242 | datastr += f" {wftask['id']}"
243 | return Interface(retcode=0, data=datastr)
244 | else:
245 | return Interface(retcode=0, data=wf_read)
246 |
247 |
248 | def workflow_export(
249 | client: AuthClient,
250 | *,
251 | project_id: int,
252 | workflow_id: int,
253 | json_file: str,
254 | ) -> Interface:
255 | res = client.get(
256 | (f"api/v2/project/{project_id}/" f"workflow/{workflow_id}/export/"),
257 | )
258 | workflow = check_response(res, expected_status_code=200)
259 |
260 | with Path(json_file).open("w") as f:
261 | json.dump(workflow, f, indent=2)
262 | return Interface(
263 | retcode=0, data=f"Workflow {workflow_id} exported at {json_file}"
264 | )
265 |
--------------------------------------------------------------------------------
/fractal_client/config.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and
3 | University of Zurich
4 |
5 | Original authors:
6 | Jacopo Nespolo
7 | Tommaso Comparin
8 |
9 | This file is part of Fractal and was originally developed by eXact lab S.r.l.
10 | under contract with Liberali Lab from the Friedrich Miescher
11 | Institute for Biomedical Research and Pelkmans Lab from the University of
12 | Zurich.
13 | """
14 | from os import getenv
15 | from pathlib import Path
16 |
17 | from dotenv import load_dotenv
18 |
19 | load_dotenv(".fractal.env")
20 |
21 |
22 | class Settings:
23 | def __init__(self):
24 |
25 | self.FRACTAL_USER: str | None = getenv("FRACTAL_USER")
26 | self.FRACTAL_PASSWORD: str | None = getenv("FRACTAL_PASSWORD")
27 | self.FRACTAL_TOKEN_PATH: str | None = getenv("FRACTAL_TOKEN")
28 |
29 | self.FRACTAL_SERVER: str = getenv("FRACTAL_SERVER")
30 | self.FRACTAL_CACHE_PATH: str = getenv(
31 | "FRACTAL_CACHE_PATH", str(Path.home() / ".cache/fractal")
32 | )
33 |
34 |
35 | settings = Settings()
36 |
--------------------------------------------------------------------------------
/fractal_client/interface.py:
--------------------------------------------------------------------------------
1 | import json
2 | from typing import Any
3 |
4 |
5 | class Interface:
6 | def __init__(self, retcode: int, data: Any) -> None:
7 | self.retcode = retcode
8 | self.data = data
9 |
10 | def show(self, *args, **kwargs):
11 | if isinstance(self.data, (dict, list)):
12 | print(json.dumps(self.data, indent=2, sort_keys=True))
13 | else:
14 | print(self.data)
15 |
--------------------------------------------------------------------------------
/fractal_client/response.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import sys
4 | from json.decoder import JSONDecodeError
5 | from typing import Any
6 |
7 | from httpx import Response
8 |
9 | LONG_PAYLOAD_VALUE_LIMIT = 300
10 |
11 |
12 | def check_response(
13 | res: Response,
14 | expected_status_code: int | list[int] = 200,
15 | redact_long_payload: bool = False,
16 | ) -> list[Any] | dict[str, Any] | str | int | float | bool:
17 | """
18 | Check the validity of the http response from fractal server
19 |
20 | If the status code of the response is not one of the expected values, print
21 | the error to stderr and terminate with exit status 1.
22 | Some errors (422 errors due for failed validation of request body) are
23 | handled in a specific way, to make their error message more readable.
24 |
25 | Args:
26 | res: Response from `fractal-server`.
27 | expected_status_code: Expected status code(s).
28 | redact_long_payload: If `True`, redact payload values of more than
29 | `LONG_PAYLOAD_VALUE_LIMIT` characters.
30 |
31 | Returns:
32 | The output of `res.json()`.
33 | """
34 |
35 | try:
36 | data = res.json()
37 | except JSONDecodeError:
38 | data = {}
39 |
40 | # Also allow a list of expected status codes
41 | if isinstance(expected_status_code, list):
42 | expected_status_codes = expected_status_code
43 | else:
44 | expected_status_codes = [expected_status_code]
45 |
46 | logging.debug(f"\nResponse status code:\n {res.status_code}")
47 | if res.status_code not in expected_status_codes:
48 | logging.error(f"Server returned {res.status_code}")
49 |
50 | # The following block relies on private methods, and it may fail for
51 | # unexpected reasons (e.g. it is now aware of the difference between
52 | # 'application/json' and 'multipart/form-data' requests, and it will
53 | # fail for non-jons requests). For this reason it is within a
54 | # broad-scope try/except block.
55 | try:
56 | logging.error(
57 | f"Original request: {res._request.method} {res._request.url}"
58 | )
59 | payload = res._request._content.decode("utf-8")
60 | if redact_long_payload and len(payload) > 0:
61 | payload_dict = json.loads(payload)
62 | for key, value in payload_dict.items():
63 | if len(str(value)) > LONG_PAYLOAD_VALUE_LIMIT:
64 | payload_dict[key] = "[value too long - redacted]"
65 | payload = json.dumps(payload_dict)
66 | logging.error(f"Original payload: {payload}")
67 | except Exception:
68 | logging.info("Could not display original-request information.")
69 |
70 | error_msg = data
71 |
72 | # Detect whether the error is due to failed request-body validation,
73 | # and make the error message more readable
74 | if (
75 | res.status_code == 422
76 | and isinstance(data, dict)
77 | and list(data.keys()) == ["detail"]
78 | and isinstance(data["detail"], list)
79 | and len(data["detail"]) == 1
80 | and isinstance(data["detail"][0], dict)
81 | and set(data["detail"][0].keys()) == {"msg", "type", "loc"}
82 | ):
83 | msg = data["detail"][0]["msg"]
84 | _type = data["detail"][0]["type"]
85 | loc = data["detail"][0]["loc"]
86 | error_msg = f"\n\tmsg: {msg}\n\ttype: {_type}\n\tloc: {loc}"
87 |
88 | logging.error(f"Server error message: {error_msg}\n")
89 | logging.error("Terminating.\n")
90 | sys.exit(1)
91 |
92 | return data
93 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: Fractal Client
2 |
3 | repo_url: https://github.com/fractal-analytics-platform/fractal-client
4 | repo_name: fractal-client
5 |
6 | markdown_extensions:
7 | - admonition
8 | - pymdownx.details
9 | - pymdownx.emoji
10 | - pymdownx.magiclink
11 | - pymdownx.superfences
12 | - pymdownx.tabbed:
13 | alternate_style: true
14 | - pymdownx.tasklist
15 | - toc:
16 | permalink: true
17 | toc_depth: 3
18 |
19 | theme:
20 | name: "material"
21 | logo: https://raw.githubusercontent.com/fractal-analytics-platform/fractal-logos/refs/heads/main/common/fractal_logo.png
22 | favicon: https://raw.githubusercontent.com/fractal-analytics-platform/fractal-logos/refs/heads/main/common/fractal_favicon.png
23 | custom_dir: "docs/overrides"
24 | features:
25 | - content.code.annotate
26 | - content.code.copy
27 | palette:
28 | - media: "(prefers-color-scheme: light)"
29 | scheme: default
30 | primary: blue grey
31 | accent: light blue
32 | toggle:
33 | icon: material/weather-sunny
34 | name: Switch to dark mode
35 | - media: "(prefers-color-scheme: dark)"
36 | scheme: slate
37 | primary: blue grey
38 | accent: light blue
39 | toggle:
40 | icon: material/weather-night
41 | name: Switch to light mode
42 | icon:
43 | repo: fontawesome/brands/github
44 | navigation:
45 | expand: true
46 |
47 |
48 | nav:
49 | - Home page: index.md
50 | - Install: install.md
51 | - Version compatibility: versions.md
52 | - CLI reference: reference/fractal/
53 | - Development: development.md
54 | - Changelog: changelog.md
55 |
56 | watch:
57 | - fractal_client
58 |
59 | plugins:
60 | - search
61 | - autorefs
62 | - gen-files:
63 | scripts:
64 | - docs/gen_ref_pages.py
65 | - literate-nav:
66 | nav_file: SUMMARY.md
67 | - section-index
68 | - mkdocstrings:
69 | default_handler: python
70 | handlers:
71 | python:
72 | options:
73 | show_signature_annotations: false
74 | docstring_section_style: "spacy"
75 | docstring_style: "google"
76 | show_source: true
77 | filters: []
78 | show_root_full_path: false
79 | - include-markdown
80 |
81 | extra_css:
82 | - extra.css
83 |
84 | copyright: |
85 | © Copyright 2024
86 | University of Zurich
87 | (see
88 | LICENSE
89 | ).
90 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "fractal-client"
3 | version = "2.9.1"
4 | description = "Command-line client of the Fractal analytics platform"
5 | authors = [
6 | { name="Tommaso Comparin", email="tommaso.comparin@exact-lab.it" },
7 | { name="Marco Franzon", email="marco.franzon@exact-lab.it" },
8 | { name="Yuri Chiucconi", email="yuri.chiucconi@exact-lab.it" },
9 | { name="Jacopo Nespolo", email="jacopo.nespolo@exact-lab.it" },
10 | ]
11 | readme = "README.md"
12 | license = { text = "BSD-3-Clause" }
13 | requires-python = ">=3.11,<3.13"
14 | dependencies = [
15 | "python-dotenv >=1.1.0,<1.2.0",
16 | "httpx >= 0.28.0, <0.29.0",
17 | "PyJWT >= 2.8.0,<3.0.0",
18 | "packaging >= 25.0.0, <26.0.0",
19 |
20 | ]
21 |
22 | [project.scripts]
23 | fractal = "fractal_client.client:main"
24 |
25 | [project.urls]
26 | homepage = "https://github.com/fractal-analytics-platform/fractal-client"
27 | repository = "https://github.com/fractal-analytics-platform/fractal-client"
28 | documentation = "https://fractal-analytics-platform.github.io/fractal-client"
29 | changelog = "https://github.com/fractal-analytics-platform/fractal-client/blob/main/CHANGELOG.md"
30 |
31 | [build-system]
32 | requires = ["poetry-core"]
33 | build-backend = "poetry.core.masonry.api"
34 |
35 | [tool.poetry]
36 | requires-poetry = ">=2.0"
37 | packages = [
38 | {include="fractal_client"}
39 | ]
40 |
41 | [tool.poetry.group.dev]
42 | optional = true
43 |
44 | [tool.poetry.group.dev.dependencies]
45 | devtools = "^0.12.0"
46 | pre-commit = ">=3.0.0,<4.0.0"
47 | pytest = "^7.1.2"
48 | pytest-randomly = "^3.15.0"
49 | bumpver = "^2022.1118"
50 | coverage = {extras = ["toml"], version = "^6.5.0"}
51 | fractal-server = { git = "https://github.com/fractal-analytics-platform/fractal-server.git", branch = "main" }
52 |
53 | [tool.poetry.group.docs]
54 | optional = true
55 |
56 | [tool.poetry.group.docs.dependencies]
57 | mkdocs = "1.5.2"
58 | mkdocs-material = "9.1.21"
59 | mkdocs-literate-nav = "0.5.0"
60 | mkdocs-gen-files = "0.4.0"
61 | mkdocs-section-index = "0.3.5"
62 | mkdocstrings = { extras = ["python"], version = "0.25.2" }
63 | mkdocs-include-markdown-plugin = { version = "4.0.4", python = "^3.8,<3.12" }
64 |
65 | [tool.bumpver]
66 | current_version = "2.9.1"
67 | version_pattern = "MAJOR.MINOR.PATCH[PYTAGNUM]"
68 | commit_message = "bump version {old_version} -> {new_version}"
69 | commit = true
70 | tag = true
71 | push = true
72 |
73 | [tool.bumpver.file_patterns]
74 | "pyproject.toml" = [
75 | 'version = "{version}"$',
76 | ]
77 | "fractal_client/__init__.py" = [
78 | '__VERSION__ = "{version}"$'
79 | ]
80 |
81 | [tool.coverage.run]
82 | branch = true
83 | parallel = true
84 | relative_files = true
85 | omit = ["tests/*", "*/.venv/*"]
86 |
87 | [tool.coverage.report]
88 | omit = ["tests/*", "*/.venv/*"]
89 |
90 | [tool.mypy.overrides]
91 | module = ["devtools", "uvicorn", "pytest", "httpx", "fractal_server"]
92 | ignore_missing_imports = true
93 |
--------------------------------------------------------------------------------
/tests/.gitignore:
--------------------------------------------------------------------------------
1 | output*
2 | tmp_data
3 | dictionary_tasks.py
4 | tmp*
5 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fractal-analytics-platform/fractal-client/a2be2caf9ddd18d8bbf1991a661c29d7d09f8bb9/tests/__init__.py
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import multiprocessing
2 | import shlex
3 | from os import environ
4 | from pathlib import Path
5 |
6 | import pytest
7 |
8 |
9 | # This variable must be defined before the first import of config.py
10 | environ["FRACTAL_SERVER"] = "http://127.0.0.1:8765"
11 | from fractal_client.client import handle # noqa: E402
12 |
13 | # set_start_method("fork") necessary to run tests on MacOS
14 | # https://github.com/pytest-dev/pytest-flask/issues/104#issuecomment-577908228
15 | # https://docs.python.org/3/library/multiprocessing.html#multiprocessing.get_start_method
16 | multiprocessing.set_start_method("fork")
17 |
18 |
19 | @pytest.fixture(autouse=True, scope="function")
20 | def clear_cache(tmp_path, monkeypatch):
21 | """
22 | Note that this fixture is automatically used in **all** tests.
23 | """
24 | import fractal_client.config
25 |
26 | monkeypatch.setattr(
27 | fractal_client.config.settings,
28 | "FRACTAL_CACHE_PATH",
29 | str(tmp_path),
30 | )
31 |
32 |
33 | @pytest.fixture(scope="session")
34 | def testdata_path() -> Path:
35 | return Path(__file__).parent / "data"
36 |
37 |
38 | def _clisplit(args: str):
39 | return shlex.split(f"fractal {args}")
40 |
41 |
42 | @pytest.fixture(scope="session")
43 | def tester():
44 | return dict(email="client_tester@example.org", password="pytest")
45 |
46 |
47 | @pytest.fixture
48 | def invoke(tester):
49 | def __invoke(args: str):
50 |
51 | new_args = (
52 | f"--user {tester['email']} --password {tester['password']} {args}"
53 | )
54 | return handle(_clisplit(new_args))
55 |
56 | return __invoke
57 |
58 |
59 | @pytest.fixture
60 | def invoke_as_superuser():
61 | def __invoke(args: str):
62 | new_args = f"--user admin@fractal.xy --password 1234 {args}"
63 | return handle(_clisplit(new_args))
64 |
65 | return __invoke
66 |
67 |
68 | @pytest.fixture
69 | def invoke_as_custom_user():
70 | def __invoke(args: str, email: str, password: str):
71 |
72 | new_args = f"--user {email} --password {password} {args}"
73 | return handle(_clisplit(new_args))
74 |
75 | return __invoke
76 |
77 |
78 | @pytest.fixture
79 | def superuser(invoke_as_superuser):
80 | return invoke_as_superuser("user whoami").data
81 |
82 |
83 | @pytest.fixture(scope="function")
84 | def override_settings(monkeypatch, tmp_path):
85 | import fractal_client.config
86 |
87 | def _override_settings(
88 | FRACTAL_CACHE_PATH=str(tmp_path),
89 | FRACTAL_USER=None,
90 | FRACTAL_PASSWORD=None,
91 | FRACTAL_SERVER=None,
92 | ):
93 | monkeypatch.setattr(
94 | fractal_client.config.settings,
95 | "FRACTAL_CACHE_PATH",
96 | FRACTAL_CACHE_PATH,
97 | )
98 | monkeypatch.setattr(
99 | fractal_client.config.settings,
100 | "FRACTAL_USER",
101 | FRACTAL_USER,
102 | )
103 | monkeypatch.setattr(
104 | fractal_client.config.settings,
105 | "FRACTAL_PASSWORD",
106 | FRACTAL_PASSWORD,
107 | )
108 | if FRACTAL_SERVER is not None:
109 | monkeypatch.setattr(
110 | fractal_client.config.settings,
111 | "FRACTAL_SERVER",
112 | FRACTAL_SERVER,
113 | )
114 |
115 | return _override_settings
116 |
117 |
118 | @pytest.fixture(scope="session")
119 | def new_name():
120 | class Counter:
121 | ind: int = 0
122 |
123 | def __next__(self):
124 | self.ind = self.ind + 1
125 | return f"name{self.ind - 1}"
126 |
127 | names = Counter()
128 |
129 | return lambda: next(names)
130 |
131 |
132 | from .fixtures_testserver import * # noqa: 401
133 |
--------------------------------------------------------------------------------
/tests/data/.gitignore:
--------------------------------------------------------------------------------
1 | out.json
2 |
--------------------------------------------------------------------------------
/tests/data/import-export/wf3.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Generic WF",
3 | "task_list": [
4 | {
5 | "task": {
6 | "name": "generic_task",
7 | "pkg_name": "fractal-tasks-mock"
8 | }
9 | },
10 | {
11 | "args_non_parallel": {
12 | "raise_error": true
13 | },
14 | "task": {
15 | "name": "generic_task",
16 | "pkg_name": "fractal-tasks-mock"
17 | }
18 | }
19 | ]
20 | }
21 |
--------------------------------------------------------------------------------
/tests/data/import-export/workflow.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "MyWorkflow-V2",
3 | "task_list": [
4 | {
5 | "args_parallel": {
6 | "message_x": "task_x",
7 | "arg_x": "value_x"
8 | },
9 | "meta_parallel": {"key1": "value1"},
10 | "args_non_parallel": {
11 | "message_y": "task_y",
12 | "arg_y": "value_y"
13 | },
14 | "meta_non_parallel": {"key2": "value2"},
15 | "task": {
16 | "name": "dummy",
17 | "pkg_name": "dummy"
18 | }
19 | }
20 | ]
21 | }
22 |
--------------------------------------------------------------------------------
/tests/data/import-export/workflow_2.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "MyWorkflow-V2-2",
3 | "task_list": [
4 | {
5 | "args_parallel": {
6 | "message_x": "task_x",
7 | "arg_x": "value_x"
8 | },
9 | "args_non_parallel": {
10 | "message_y": "task_y",
11 | "arg_y": "value_y"
12 | },
13 | "meta_parallel": {"key1": "value1"},
14 | "meta_non_parallel": {"key2": "value2"},
15 | "task": {
16 | "name": "dummy2",
17 | "pkg_name": "dummy2"
18 | }
19 | }
20 | ]
21 | }
22 |
--------------------------------------------------------------------------------
/tests/data/task_edit_json/default_args.json:
--------------------------------------------------------------------------------
1 | {
2 | "Lorem": "ipsum",
3 | "dolor": ["sit", "amet"],
4 | "consectetur": "adipiscing elit",
5 | "sed": 123
6 | }
7 |
--------------------------------------------------------------------------------
/tests/data/task_edit_json/meta.json:
--------------------------------------------------------------------------------
1 | {
2 | "ut": ["labore", 3.14],
3 | "et": "dolore magna",
4 | "aliqua": null
5 | }
6 |
--------------------------------------------------------------------------------
/tests/fixtures_testserver.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 | import shlex
4 | import subprocess
5 | import time
6 | from pathlib import Path
7 |
8 | import pytest
9 | from httpx import ConnectError
10 |
11 | from fractal_client.client import handle
12 |
13 | DB_NAME = "pytest-fractal-client"
14 |
15 | logger = logging.getLogger("fractal-client")
16 | logger.setLevel(logging.DEBUG)
17 |
18 | PORT = 8765
19 |
20 |
21 | @pytest.fixture
22 | def superuser(invoke_as_superuser):
23 | return invoke_as_superuser("user whoami").data
24 |
25 |
26 | @pytest.fixture(scope="session")
27 | def tester():
28 | return dict(email="client_tester@example.org", password="pytest")
29 |
30 |
31 | def _run_command(cmd: str) -> str:
32 | logging.warning(f"Now running {cmd=}")
33 | res = subprocess.run(
34 | shlex.split(cmd),
35 | capture_output=True,
36 | env=dict(PGPASSWORD="postgres", **os.environ),
37 | encoding="utf-8",
38 | )
39 | if res.returncode != 0:
40 | logging.error(f"{res.stdout=}")
41 | logging.error(f"{res.stderr=}")
42 | raise RuntimeError(res.stderr)
43 | else:
44 | return res.stdout
45 |
46 |
47 | @pytest.fixture(scope="session", autouse=True)
48 | def testserver(tester, tmpdir_factory, request):
49 | FRACTAL_TASK_DIR = str(tmpdir_factory.mktemp("TASKS"))
50 | FRACTAL_RUNNER_WORKING_BASE_DIR = str(tmpdir_factory.mktemp("JOBS"))
51 |
52 | env_file = Path(".fractal_server.env")
53 | with env_file.open("w") as f:
54 | f.write(
55 | "POSTGRES_HOST=localhost\n"
56 | f"POSTGRES_DB={DB_NAME}\n"
57 | "POSTGRES_USER=postgres\n"
58 | "POSTGRES_PASSWORD=postgres\n"
59 | "FRACTAL_RUNNER_BACKEND=local\n"
60 | "JWT_SECRET_KEY=secret_key\n"
61 | f"FRACTAL_TASKS_DIR={FRACTAL_TASK_DIR}\n"
62 | "FRACTAL_RUNNER_WORKING_BASE_DIR="
63 | f"{FRACTAL_RUNNER_WORKING_BASE_DIR}\n"
64 | "FRACTAL_LOGGING_LEVEL=0\n"
65 | "FRACTAL_VIEWER_AUTHORIZATION_SCHEME=viewer-paths\n"
66 | )
67 | _run_command(
68 | f"dropdb --username=postgres --host localhost --if-exists {DB_NAME}"
69 | )
70 | _run_command(f"createdb --username=postgres --host localhost {DB_NAME}")
71 | _run_command("poetry run fractalctl set-db")
72 |
73 | LOG_DIR = Path(
74 | os.environ.get(
75 | "GHA_FRACTAL_SERVER_LOG",
76 | tmpdir_factory.mktemp("LOGS"),
77 | ),
78 | )
79 | path_out = LOG_DIR / "server_out"
80 | path_err = LOG_DIR / "server_err"
81 | f_out = path_out.open("w")
82 | f_err = path_err.open("w")
83 |
84 | server_process = subprocess.Popen(
85 | shlex.split(f"poetry run fractalctl start --port {PORT}"),
86 | stdout=f_out,
87 | stderr=f_err,
88 | )
89 |
90 | # Wait until the server is up
91 | TIMEOUT = 8.0
92 | t_start = time.perf_counter()
93 | while True:
94 | try:
95 | res = handle(shlex.split("fractal version"))
96 | if "refused" not in res.data:
97 | break
98 | else:
99 | raise ConnectError("fractal-server not ready")
100 | except ConnectError:
101 | logger.debug("Fractal server not ready, wait one more second.")
102 | if time.perf_counter() - t_start > TIMEOUT:
103 | raise RuntimeError(
104 | f"Could not start up server within {TIMEOUT} seconds,"
105 | " in `testserver` fixture."
106 | )
107 | time.sleep(0.1)
108 |
109 | handle(
110 | shlex.split(
111 | "fractal --user admin@fractal.xy --password 1234 "
112 | f"user register {tester['email']} {tester['password']}"
113 | )
114 | )
115 |
116 | yield
117 |
118 | request.session.warn(
119 | Warning(
120 | f"\n\nTerminating Fractal Server (PID: {server_process.pid}).\n"
121 | f"stdout -> {path_out}\n"
122 | f"stderr -> {path_err}\n"
123 | )
124 | )
125 |
126 | server_process.terminate()
127 | server_process.kill()
128 | _run_command(f"dropdb --username=postgres --host localhost {DB_NAME}")
129 | env_file.unlink()
130 | f_out.close()
131 | f_err.close()
132 |
133 |
134 | @pytest.fixture
135 | def task_factory(invoke):
136 | def _task_factory(
137 | name: str,
138 | command_non_parallel: str | None = None,
139 | command_parallel: str | None = None,
140 | version: str | None = None,
141 | meta_non_parallel: str | None = None,
142 | meta_parallel: str | None = None,
143 | args_schema_non_parallel: str | None = None,
144 | args_schema_parallel: str | None = None,
145 | args_schema_version: str | None = None,
146 | ):
147 | cmd = "task new"
148 | if command_non_parallel is not None:
149 | cmd += f" --command-non-parallel {command_non_parallel}"
150 | if command_parallel is not None:
151 | cmd += f" --command-parallel {command_parallel}"
152 | if version is not None:
153 | cmd += f" --version {version}"
154 | if meta_non_parallel is not None:
155 | cmd += f" --meta-non-parallel {meta_non_parallel}"
156 | if meta_parallel is not None:
157 | cmd += f" --meta-parallel {meta_parallel}"
158 | if args_schema_non_parallel is not None:
159 | cmd += f" --args-schema-non-parallel {args_schema_non_parallel}"
160 | if args_schema_parallel is not None:
161 | cmd += f" --args-schema-parallel {args_schema_parallel}"
162 | if args_schema_version is not None:
163 | cmd += f" --args-schema-version {args_schema_version}"
164 | if command_non_parallel is not None and command_parallel is not None:
165 | cmd += " --task-type compound"
166 | elif command_non_parallel is not None:
167 | cmd += " --task-type non_parallel"
168 | else:
169 | cmd += " --task-type parallel"
170 |
171 | cmd += f" {name}"
172 |
173 | res = invoke(cmd)
174 | return res.data
175 |
176 | return _task_factory
177 |
178 |
179 | @pytest.fixture
180 | def project_factory(invoke):
181 | def _project_factory(name: str):
182 | res = invoke(f"project new {name}")
183 | return res.data
184 |
185 | return _project_factory
186 |
187 |
188 | @pytest.fixture
189 | def workflow_factory(invoke):
190 | def _workflow_factory(name: str, project_id: int):
191 | res = invoke(f"workflow new {name} {project_id}")
192 | return res.data
193 |
194 | return _workflow_factory
195 |
196 |
197 | @pytest.fixture
198 | def dataset_factory(invoke):
199 | def _dataset_factory(
200 | project_id: int,
201 | name: str,
202 | zarr_dir: str,
203 | ):
204 | cmd = "project add-dataset"
205 | cmd += f" {project_id} {name} --zarr-dir {zarr_dir}"
206 |
207 | res = invoke(cmd)
208 | return res.data
209 |
210 | return _dataset_factory
211 |
212 |
213 | @pytest.fixture
214 | def user_factory(invoke_as_superuser):
215 | def __user_factory(
216 | email: str,
217 | password: str,
218 | project_dir: str | None = None,
219 | slurm_user: str | None = None,
220 | username: str | None = None,
221 | superuser: bool = False,
222 | ):
223 | cmd = "user register"
224 | if project_dir is not None:
225 | cmd += f" --project-dir {project_dir}"
226 | if slurm_user is not None:
227 | cmd += f" --slurm-user {slurm_user}"
228 | if username is not None:
229 | cmd += f" --username {username}"
230 | if superuser is True:
231 | cmd += " --superuser"
232 | cmd += f" {email} {password}"
233 |
234 | res = invoke_as_superuser(cmd)
235 | return res.data
236 |
237 | return __user_factory
238 |
--------------------------------------------------------------------------------
/tests/test_client.py:
--------------------------------------------------------------------------------
1 | import shlex
2 | from pathlib import Path
3 |
4 | import httpx
5 | import pytest
6 | from devtools import debug
7 |
8 | from fractal_client import __VERSION__
9 | from fractal_client.authclient import AuthClient
10 | from fractal_client.client import _verify_authentication_branch
11 | from fractal_client.client import handle
12 | from fractal_client.cmd import version
13 |
14 |
15 | def test_debug(invoke):
16 | res = invoke("--debug version")
17 | assert res.retcode == 0
18 | debug(res.data)
19 |
20 |
21 | def test_version(invoke):
22 | iface = invoke("version")
23 | debug(iface.data)
24 | assert f"version: {__VERSION__}" in iface.data
25 | assert iface.retcode == 0
26 |
27 |
28 | def test_version_connect_error():
29 | iface = version("http://localhost:9999")
30 | debug(iface.data)
31 | assert f"version: {__VERSION__}" in iface.data
32 | assert "refused" in iface.data
33 | assert iface.retcode == 0
34 |
35 |
36 | def test_server_is_up():
37 | """
38 | GIVEN a testserver
39 | WHEN it gets called
40 | THEN it replies
41 | """
42 | res = httpx.get("http://localhost:8765/api/alive/")
43 | debug(res.json())
44 | assert res.status_code == 200
45 |
46 |
47 | def test_register_user(tester, invoke):
48 | res = invoke("user whoami")
49 | user = res.data
50 | debug(user)
51 | assert res.retcode == 0
52 | assert user["email"] == tester["email"]
53 |
54 |
55 | def test_user_override(user_factory, invoke):
56 | """
57 | GIVEN a user whose credentials differ from those of the environment
58 | WHEN the client is invoked with -u and -p
59 | THEN the credentials are overridden
60 | """
61 | EMAIL = "other_user@exact-lab.it"
62 | PASSWORD = "other_password"
63 | user_factory(email=EMAIL, password=PASSWORD)
64 |
65 | res = invoke(f"-u {EMAIL} -p {PASSWORD} project list")
66 | assert res.retcode == 0
67 |
68 |
69 | def test_bad_credentials(invoke):
70 | """
71 | GIVEN a registered user
72 | WHEN wrong credentials are passed
73 | THEN the client returns an error
74 | """
75 | res = invoke("-u nouser@exact-lab.it -p nopassword project list")
76 | res.show()
77 | assert res.retcode != 0
78 | assert "BAD_CREDENTIALS" in res.data
79 |
80 |
81 | def test_connecterror(override_settings):
82 | override_settings(
83 | FRACTAL_USER="admin@fractal.xy",
84 | FRACTAL_PASSWORD="1234",
85 | FRACTAL_SERVER="http://localhost:12345",
86 | )
87 | res = handle(shlex.split("fractal user whoami"))
88 | debug(res.data)
89 | assert "ConnectError" in res.data
90 | assert "Hint: is http://localhost:12345 alive?" in res.data
91 |
92 |
93 | def test_argparse_abbreviation(invoke_as_superuser):
94 | """
95 | Check that argparse abbreviations are disabled on at least one command.
96 |
97 | Refs:
98 | * https://github.com/fractal-analytics-platform/fractal/issues/440
99 | * https://docs.python.org/3/library/argparse.html#prefix-matching
100 | """
101 |
102 | # Successful invoke
103 | res = invoke_as_superuser("user register test@mail.com secret --superuser")
104 | res.show()
105 | assert res.retcode == 0
106 |
107 | # Failed (abbreviation-based) invoke
108 | with pytest.raises(SystemExit):
109 | invoke_as_superuser("user register test2@mail.com secret2 --super")
110 |
111 |
112 | def test_unit_verify_authentication_branch():
113 | # Valid cases
114 | _verify_authentication_branch(
115 | username="xxx",
116 | password="xxx",
117 | token_path=None,
118 | )
119 | _verify_authentication_branch(
120 | username=None,
121 | password=None,
122 | token_path="xxx",
123 | )
124 |
125 | # Invalid cases
126 | for username, password, token_path in [
127 | (None, None, None),
128 | ("xx", None, None),
129 | (None, "xx", None),
130 | ("xx", "xx", "xx"),
131 | ("xx", None, "xx"),
132 | (None, "xx", "xx"),
133 | ]:
134 | with pytest.raises(
135 | ValueError,
136 | match="Invalid authentication credentials",
137 | ):
138 | _verify_authentication_branch(
139 | username=username,
140 | password=password,
141 | token_path=token_path,
142 | )
143 |
144 |
145 | def test_invalid_credentials(monkeypatch):
146 | import fractal_client.client
147 |
148 | monkeypatch.setattr(
149 | fractal_client.client.settings, "FRACTAL_USER", "some-user"
150 | )
151 | monkeypatch.setattr(
152 | fractal_client.client.settings, "FRACTAL_PASSWORD", None
153 | )
154 | interface = handle(shlex.split("fractal user whoami"))
155 | assert "Invalid authentication credentials" in interface.data
156 | assert interface.retcode == 1
157 |
158 |
159 | def test_invalid_token_path():
160 | cmd = "fractal --token-path missingfile user whoami"
161 | interface = handle(shlex.split(cmd))
162 | interface.show()
163 | assert interface.retcode == 1
164 |
165 |
166 | def test_valid_token_path(
167 | tmp_path: Path,
168 | monkeypatch,
169 | tester,
170 | ):
171 | # Get valid token
172 | with AuthClient(
173 | fractal_server="http://localhost:8765",
174 | username=tester["email"],
175 | password=tester["password"],
176 | token=None,
177 | ) as client:
178 | token_data = client.token
179 | debug(token_data)
180 | token_path = (tmp_path / "token").as_posix()
181 |
182 | import fractal_client.client
183 |
184 | monkeypatch.setattr(
185 | fractal_client.client.settings,
186 | "FRACTAL_SERVER",
187 | "http://localhost:8765",
188 | )
189 |
190 | # Use valid token
191 | with open(token_path, "w") as f:
192 | f.write(token_data)
193 | cmd = f"fractal --token-path {token_path} user whoami"
194 | interface = handle(shlex.split(cmd))
195 | assert interface.data["email"] == tester["email"]
196 | assert interface.retcode == 0
197 |
198 | # Use valid token, with newlines
199 | with open(token_path, "w") as f:
200 | f.write(f"\n\n{token_data}\n\n\n")
201 | cmd = f"fractal --token-path {token_path} user whoami"
202 | interface = handle(shlex.split(cmd))
203 | assert interface.data["email"] == tester["email"]
204 | assert interface.retcode == 0
205 |
206 |
207 | def test_missing_fractal_server(monkeypatch):
208 | import fractal_client.client
209 |
210 | monkeypatch.setattr(
211 | fractal_client.client.settings,
212 | "FRACTAL_SERVER",
213 | None,
214 | )
215 | interface = handle(shlex.split("fractal user whoami"))
216 | assert "You should set the fractal-server URL" in interface.data
217 | assert interface.retcode == 1
218 |
--------------------------------------------------------------------------------
/tests/test_dataset.py:
--------------------------------------------------------------------------------
1 | from devtools import debug
2 |
3 |
4 | def test_create_dataset(
5 | invoke,
6 | new_name,
7 | invoke_as_superuser,
8 | ):
9 | """
10 | Test some specific branches of the post_dataset function and parser.
11 | """
12 |
13 | res = invoke(f"project new {new_name()}")
14 | project_id = res.data["id"]
15 |
16 | res = invoke(
17 | f"project add-dataset {project_id} {new_name()} --zarr-dir /tmp"
18 | )
19 |
20 | debug(res.data)
21 | assert res.retcode == 0
22 |
23 | # Add a project_dir to user-settings
24 | res = invoke("--batch user whoami")
25 | assert res.retcode == 0
26 | user_id = res.data
27 | res = invoke_as_superuser(
28 | f"user edit {user_id} --new-project-dir /something"
29 | )
30 | assert res.retcode == 0
31 | res = invoke(f"--batch project add-dataset {project_id} {new_name()}")
32 | debug(res.data)
33 | assert res.retcode == 0
34 |
35 |
36 | def test_edit_dataset(invoke, tmp_path, new_name):
37 | res = invoke(f"project new {new_name()}")
38 | project_id = res.data["id"]
39 |
40 | res = invoke(
41 | f"project add-dataset {project_id} {new_name()} --zarr-dir /tmp"
42 | )
43 | dataset_id = res.data["id"]
44 |
45 | NAME = new_name()
46 |
47 | res = invoke(f"dataset edit {project_id} {dataset_id} --new-name {NAME}")
48 | res.show()
49 | assert res.data["name"] == NAME
50 | assert res.retcode == 0
51 |
52 |
53 | def test_delete_dataset(invoke, new_name):
54 | # Create a project with its default dataset
55 | res = invoke(f"project new {new_name()}")
56 | project_id = res.data["id"]
57 |
58 | res = invoke(
59 | f"project add-dataset {project_id} {new_name()} --zarr-dir /tmp"
60 | )
61 | dataset_id = res.data["id"]
62 |
63 | # Delete dataset
64 | res = invoke(f"dataset delete {project_id} {dataset_id}")
65 | debug(res.data)
66 | # Check that dataset show fails
67 | res = invoke(f"dataset show {project_id} {dataset_id}")
68 | assert res.data["detail"] == "Dataset not found"
69 |
70 |
71 | def test_show_dataset(invoke, new_name):
72 | # Create a project with its default dataset
73 | res = invoke(f"project new {new_name()}")
74 | project_id = res.data["id"]
75 | res = invoke(
76 | f"project add-dataset {project_id} {new_name()} --zarr-dir /tmp"
77 | )
78 | dataset_id = res.data["id"]
79 |
80 | res = invoke(f"dataset show {project_id} {dataset_id}")
81 | res.show()
82 | assert res.retcode == 0
83 |
--------------------------------------------------------------------------------
/tests/test_fixtures.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | from devtools import debug
4 |
5 | from fractal_client.config import settings
6 |
7 |
8 | def test_clear_cache():
9 | cache_dir = settings.FRACTAL_CACHE_PATH
10 | debug(cache_dir)
11 | assert cache_dir != str(Path.home() / ".cache/fractal")
12 |
13 |
14 | def test_override_settings(override_settings):
15 | override_settings(FRACTAL_CACHE_PATH="/tmp/xy")
16 | cache_dir = settings.FRACTAL_CACHE_PATH
17 | assert cache_dir == "/tmp/xy"
18 |
--------------------------------------------------------------------------------
/tests/test_group.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from fractal_server.app.security import FRACTAL_DEFAULT_GROUP_NAME
3 |
4 |
5 | def test_group_commands_auth(invoke, caplog):
6 | """
7 | Assert 'group' commands are not accessible to standard users
8 | """
9 |
10 | def _assert_403(cmd):
11 | caplog.clear()
12 | with pytest.raises(SystemExit):
13 | invoke(cmd)
14 | assert "403" in caplog.text
15 |
16 | _assert_403(cmd="group list")
17 | _assert_403(cmd="group get 1")
18 | _assert_403(cmd="group new foo")
19 | _assert_403(cmd="group update 1 --new-viewer-paths foo")
20 |
21 |
22 | def test_group_commands(
23 | user_factory, invoke_as_superuser, new_name, superuser
24 | ):
25 |
26 | # get default group id and superuser id
27 | res = invoke_as_superuser("group list --user-ids")
28 | assert res.retcode == 0
29 | initial_number_of_groups = len(res.data)
30 | default_group = next(
31 | group
32 | for group in res.data
33 | if group["name"] == FRACTAL_DEFAULT_GROUP_NAME
34 | )
35 | initial_number_of_users = len(default_group["user_ids"])
36 |
37 | default_group_id = default_group["id"]
38 | superuser_id = superuser["id"]
39 |
40 | # create 3 standard users (by default in default group)
41 | user1 = user_factory(email=f"{new_name()}@example.org", password="psw1")
42 | user1_id = user1["id"]
43 | assert user1["group_ids_names"] == [[default_group_id, "All"]]
44 | user2 = user_factory(email=f"{new_name()}@example.org", password="psw2")
45 | user2_id = user2["id"]
46 | assert user2["group_ids_names"] == [[default_group_id, "All"]]
47 | user3 = user_factory(email=f"{new_name()}@example.org", password="psw3")
48 | user3_id = user3["id"]
49 | assert user3["group_ids_names"] == [[default_group_id, "All"]]
50 |
51 | res = invoke_as_superuser("group list --user-ids")
52 | assert len(res.data) == initial_number_of_groups
53 | assert len(res.data[0]["user_ids"]) == initial_number_of_users + 3
54 |
55 | # Create 2 new empty groups (`group new`)
56 |
57 | with pytest.raises(SystemExit):
58 | # missing 'name'
59 | invoke_as_superuser("group new")
60 |
61 | NEW_NAME = new_name()
62 | res = invoke_as_superuser(f"group new {NEW_NAME} --viewer-paths /a /b")
63 | assert res.retcode == 0
64 | assert res.data["name"] == NEW_NAME
65 | assert res.data["user_ids"] == []
66 | group1_viewer_paths = res.data["viewer_paths"]
67 | assert group1_viewer_paths == ["/a", "/b"]
68 | group1_id = res.data["id"]
69 |
70 | res = invoke_as_superuser(f"group new {new_name()}")
71 | group2_id = res.data["id"]
72 | group2_viewer_paths = res.data["viewer_paths"]
73 | assert group2_viewer_paths == []
74 |
75 | # Add users to groups (`group add-user/remove-user`)
76 |
77 | with pytest.raises(SystemExit):
78 | # missing both arguments
79 | invoke_as_superuser("group add-user")
80 | with pytest.raises(SystemExit):
81 | # missing one argument
82 | invoke_as_superuser("group add-user 1")
83 |
84 | with pytest.raises(SystemExit):
85 | # user already in group
86 | invoke_as_superuser(
87 | f"group add-user {default_group_id} {superuser_id}"
88 | )
89 | with pytest.raises(SystemExit):
90 | # non existing user
91 | invoke_as_superuser(f"group add-user {default_group_id} 9999")
92 |
93 | # add `user1` and `user2` to `group1`
94 | invoke_as_superuser(f"group add-user {group1_id} {user1_id}")
95 | res = invoke_as_superuser(f"group add-user {group1_id} {user2_id}")
96 | assert res.retcode == 0
97 | assert res.data["id"] == group1_id
98 | assert res.data["user_ids"] == [user1_id, user2_id]
99 | assert res.data["viewer_paths"] == group1_viewer_paths
100 |
101 | # add `user3` and `user2` to `group2`
102 | invoke_as_superuser(f"group add-user {group2_id} {user3_id}")
103 | res = invoke_as_superuser(f"group add-user {group2_id} {user2_id}")
104 | assert res.retcode == 0
105 | assert res.data["id"] == group2_id
106 | assert set(res.data["user_ids"]) == {user3_id, user2_id}
107 | # add also `superuser` to `group2`
108 | res = invoke_as_superuser(f"group add-user {group2_id} {superuser_id}")
109 | assert set(res.data["user_ids"]) == {user3_id, user2_id, superuser_id}
110 | assert res.data["viewer_paths"] == group2_viewer_paths
111 |
112 | # Check groups are updated
113 |
114 | res = invoke_as_superuser("group list --user-ids")
115 | assert len(res.data) == initial_number_of_groups + 2
116 | users_default_group = next(
117 | g["user_ids"] for g in res.data if g["id"] == default_group_id
118 | )
119 | assert len(users_default_group) == initial_number_of_users + 3
120 | users_group_1 = next(
121 | g["user_ids"] for g in res.data if g["id"] == group1_id
122 | )
123 | assert set(users_group_1) == {user1_id, user2_id}
124 | users_group_2 = next(
125 | g["user_ids"] for g in res.data if g["id"] == group2_id
126 | )
127 | assert set(users_group_2) == {user3_id, user2_id, superuser_id}
128 |
129 | # Remove users from group
130 | res = invoke_as_superuser(f"group remove-user {group2_id} {user3_id}")
131 | assert set(res.data["user_ids"]) == {user2_id, superuser_id}
132 | res = invoke_as_superuser(f"group remove-user {group2_id} {user2_id}")
133 | assert set(res.data["user_ids"]) == {superuser_id}
134 | res = invoke_as_superuser(f"group remove-user {group2_id} {superuser_id}")
135 | assert set(res.data["user_ids"]) == set()
136 |
137 | # Test `group get` command
138 |
139 | with pytest.raises(SystemExit):
140 | # missing 'group_id'
141 | invoke_as_superuser("group get")
142 |
143 | res = invoke_as_superuser(f"group get {default_group_id}")
144 | assert res.retcode == 0
145 | assert res.data["name"] == FRACTAL_DEFAULT_GROUP_NAME
146 | assert len(res.data["user_ids"]) == initial_number_of_users + 3
147 |
148 | # Test `list` without `--user-ids`
149 |
150 | res = invoke_as_superuser("group list")
151 | for group in res.data:
152 | assert group["user_ids"] is None
153 |
154 | # Test `--batch`
155 |
156 | res = invoke_as_superuser("--batch group list")
157 | assert len(res.data.split(" ")) == initial_number_of_groups + 2
158 |
159 | res = invoke_as_superuser(f"--batch group new {new_name()}")
160 | assert isinstance(res.data, int)
161 |
162 | # Test update of viewer-paths
163 |
164 | res_pre_patch = invoke_as_superuser(f"group get {group1_id}")
165 | assert res_pre_patch.retcode == 0
166 | res_pre_patch.data.pop("viewer_paths")
167 | res_post_patch = invoke_as_superuser(
168 | f"group update {group1_id} --new-viewer-paths /a/b /c/d"
169 | )
170 | assert res_post_patch.retcode == 0
171 | viewer_paths_post_pach = res_post_patch.data.pop("viewer_paths")
172 | assert viewer_paths_post_pach == ["/a/b", "/c/d"]
173 | assert res_post_patch.data == res_pre_patch.data
174 |
175 | # Test `whoami --viewer-paths`
176 | invoke_as_superuser(f"group add-user {group1_id} {superuser_id}")
177 | assert "viewer_paths" not in superuser
178 | res = invoke_as_superuser("user whoami --viewer-paths")
179 | assert set(res.data.get("viewer_paths")) == {"/a/b", "/c/d"}
180 |
--------------------------------------------------------------------------------
/tests/test_invalid_commands.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from fractal_client.cmd import dataset
4 | from fractal_client.cmd import group
5 | from fractal_client.cmd import job
6 | from fractal_client.cmd import NoCommandError
7 | from fractal_client.cmd import project
8 | from fractal_client.cmd import task
9 | from fractal_client.cmd import user
10 | from fractal_client.cmd import workflow
11 |
12 |
13 | def test_invalid_commands(invoke):
14 | for arg in ["", " INVALID"]:
15 | for command in [
16 | "",
17 | "dataset",
18 | "project",
19 | "job",
20 | "workflow",
21 | "task",
22 | "user",
23 | "group",
24 | ]:
25 | with pytest.raises(SystemExit):
26 | invoke(f"{command}{arg}")
27 |
28 |
29 | def test_unit_invalid_subcommand():
30 | for _function in [project, dataset, task, workflow, job, user, group]:
31 | with pytest.raises(NoCommandError):
32 | _function(client=None, subcmd="invalid")
33 |
--------------------------------------------------------------------------------
/tests/test_job.py:
--------------------------------------------------------------------------------
1 | import json
2 | import time
3 | from pathlib import Path
4 | from urllib.request import urlretrieve
5 |
6 | import pytest
7 | from devtools import debug
8 |
9 | TIMEOUT = 15.0
10 |
11 |
12 | def test_job_submit(
13 | invoke,
14 | project_factory,
15 | dataset_factory,
16 | tmp_path: Path,
17 | testdata_path: Path,
18 | new_name,
19 | ):
20 | # Collect tasks
21 | PACKAGE_URL = (
22 | "https://github.com/fractal-analytics-platform/fractal-server/"
23 | "raw/main/tests/v2/fractal_tasks_mock/dist/"
24 | "fractal_tasks_mock-0.0.1-py3-none-any.whl"
25 | )
26 | PACKAGE_PATH = "/tmp/fractal_tasks_mock-0.0.1-py3-none-any.whl"
27 | urlretrieve(PACKAGE_URL, PACKAGE_PATH)
28 |
29 | res = invoke(f"--batch task collect {PACKAGE_PATH} --private")
30 | assert res.retcode == 0
31 | activity_id = res.data
32 |
33 | # Create a project
34 | project = project_factory(name=new_name())
35 | project_id = project["id"]
36 | zarr_dir = (tmp_path / "zarr_dir").as_posix()
37 |
38 | type_filters = {"a": True, "b": False}
39 | type_filters_file = tmp_path / "type_filters.json"
40 | with type_filters_file.open("w") as f:
41 | json.dump(type_filters, f)
42 |
43 | dataset = dataset_factory(
44 | name=new_name(),
45 | project_id=project_id,
46 | zarr_dir=zarr_dir,
47 | )
48 | dataset_id = dataset["id"]
49 |
50 | # Wait for task collection to end
51 | starting_time = time.perf_counter()
52 | while True:
53 | res1 = invoke(f"task check-collection {activity_id}")
54 | if res1.data["status"] == "OK":
55 | debug(res1.data)
56 | break
57 | time.sleep(0.1)
58 | assert time.perf_counter() - starting_time < TIMEOUT
59 |
60 | wf_json = (testdata_path / "import-export/wf3.json").as_posix()
61 | res = invoke(
62 | f"workflow import --project-id {project_id} --json-file {wf_json}"
63 | )
64 | workflow = res.data
65 | workflow_id = workflow["id"]
66 | debug(workflow)
67 |
68 | FIRST_TASK_INDEX = 0
69 | LAST_TASK_INDEX = 0
70 | WORKER_INIT = "export MYVARIABLE=MYVALUE"
71 |
72 | res = invoke(
73 | f"job submit {project_id} {workflow_id} {dataset_id} "
74 | f"--start {FIRST_TASK_INDEX} --end {LAST_TASK_INDEX} "
75 | f'--worker-init "{WORKER_INIT}"'
76 | )
77 | assert res.retcode == 0
78 | job1 = res.data
79 | job1_id = job1["id"]
80 | assert job1["status"] == "submitted"
81 | assert job1["first_task_index"] == FIRST_TASK_INDEX
82 | assert job1["last_task_index"] == LAST_TASK_INDEX
83 | assert job1["worker_init"] == WORKER_INIT
84 |
85 | # Check that job completed successfully
86 | cmd = f"job show {project_id} {job1_id}"
87 | starting_time = time.perf_counter()
88 | debug(cmd)
89 | while True:
90 | res = invoke(cmd)
91 | job1 = res.data
92 | debug(job1)
93 | assert res.retcode == 0
94 | if job1["status"] == "done":
95 | break
96 | elif job1["status"] == "failed":
97 | raise RuntimeError(job1)
98 | time.sleep(0.1)
99 | assert time.perf_counter() - starting_time < TIMEOUT
100 | assert job1["log"] is not None
101 |
102 | # Prepare and run a workflow with a failing task
103 | FIRST_TASK_INDEX = 0
104 | LAST_TASK_INDEX = 1
105 | res = invoke(
106 | f"--batch "
107 | f"job submit {project_id} {workflow_id} {dataset_id} "
108 | f"--start {FIRST_TASK_INDEX} --end {LAST_TASK_INDEX} "
109 | f'--worker-init "{WORKER_INIT}"'
110 | )
111 | assert res.retcode == 0
112 | job2_id = res.data
113 |
114 | # Verify that status is failed, and that there is a log
115 | cmd = f"--batch job show {project_id} {job2_id}"
116 | starting_time = time.perf_counter()
117 | while True:
118 | res = invoke(cmd)
119 | status = res.data
120 | debug(status)
121 | assert res.retcode == 0
122 | if status == "failed":
123 | break
124 | time.sleep(0.1)
125 | assert time.perf_counter() - starting_time < TIMEOUT
126 |
127 | # Run job list with/without --batch
128 | res = invoke(f"--batch job list {project_id}")
129 | assert res.retcode == 0
130 | assert res.data == f"{job1_id} {job2_id}"
131 | res = invoke(f"job list {project_id}")
132 | assert res.retcode == 0
133 | assert {job["id"] for job in res.data} == {job1_id, job2_id}
134 |
135 | # Download logs / success
136 | log1_dir = tmp_path / "log1"
137 | cmd = (
138 | f"job download-logs {project_id} {job1_id} "
139 | f"--output {log1_dir.as_posix()}"
140 | )
141 | res = invoke(cmd)
142 | assert res.retcode == 0
143 | files = log1_dir.glob("*")
144 | assert "workflow.log" in [f.name for f in files]
145 |
146 | # Download logs / fail because folder already exists
147 | log1_dir = tmp_path / "log1"
148 | cmd = (
149 | f"job download-logs {project_id} {job1_id} "
150 | f"--output {log1_dir.as_posix()}"
151 | )
152 | res = invoke(cmd)
153 | assert res.retcode == 1
154 |
155 | # Download logs / fail because of invalid job_id
156 | cmd = f"job download-logs {project_id} 9999 --output /tmp/invalid/"
157 | with pytest.raises(SystemExit):
158 | invoke(cmd)
159 |
160 | # --attribute-filters-json and --type-filters-json
161 | attribute_filters = {"x": [1, 2], "y": ["foo", "bar"]}
162 | attribute_filters_file = tmp_path / "attribute_filters.json"
163 | with attribute_filters_file.open("w") as f:
164 | json.dump(attribute_filters, f)
165 |
166 | type_filters = {"x": True, "y": False}
167 | type_filters_file = tmp_path / "type_filters.json"
168 | with type_filters_file.open("w") as f:
169 | json.dump(type_filters, f)
170 |
171 | res = invoke(
172 | f"job submit {project_id} {workflow_id} {dataset_id} "
173 | f"--attribute-filters-json {attribute_filters_file} "
174 | f"--type-filters-json {type_filters_file}"
175 | )
176 | assert res.retcode == 0
177 | assert res.data["attribute_filters"] == attribute_filters
178 | assert res.data["type_filters"] == type_filters
179 |
180 |
181 | def test_job_stop(invoke, caplog):
182 | with pytest.raises(SystemExit):
183 | invoke("job stop 123456 1234546")
184 | EXPECTED_MSG = (
185 | "Stopping a job execution is not implemented "
186 | "for FRACTAL_RUNNER_BACKEND=local"
187 | )
188 | assert EXPECTED_MSG in caplog.text
189 |
--------------------------------------------------------------------------------
/tests/test_main.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from fractal_client.client import main
4 | from fractal_client.interface import Interface
5 |
6 |
7 | def test_unit_main(monkeypatch):
8 | """
9 | Run the `main` function.
10 |
11 | NOTE: Mocking `handle()` is necessary because there is no
12 | appropriate `sys.argv`.
13 | """
14 | import fractal_client.client
15 |
16 | monkeypatch.setattr(
17 | fractal_client.client,
18 | "handle",
19 | lambda: Interface(data="data", retcode=0),
20 | )
21 | with pytest.raises(SystemExit):
22 | main()
23 |
--------------------------------------------------------------------------------
/tests/test_project.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from devtools import debug
3 |
4 |
5 | def test_project_create(invoke, new_name):
6 | PROJECT_NAME = new_name()
7 | res = invoke(f"project new {PROJECT_NAME}")
8 | debug(res)
9 | assert res.data["name"] == PROJECT_NAME
10 |
11 |
12 | def test_project_delete(invoke, new_name):
13 |
14 | # Create project
15 | res = invoke(f"project new {new_name()}")
16 | res.show()
17 | project_id_1 = res.data["id"]
18 |
19 | # Show project
20 | res = invoke(f"project show {project_id_1}")
21 | res.show()
22 |
23 | # Delete project
24 | res = invoke(f"project delete {project_id_1}")
25 | assert res.retcode == 0
26 |
27 | # Try to show deleted project, and fail
28 | with pytest.raises(SystemExit):
29 | res = invoke(f"project show {project_id_1}")
30 |
31 |
32 | def test_project_create_batch(invoke, new_name):
33 | res = invoke("project list")
34 | initial_projects = res.data
35 |
36 | res = invoke(f"--batch project new {new_name()}")
37 | debug(res)
38 | debug(res.data)
39 | project_id = res.data
40 |
41 | res = invoke("project list")
42 | assert len(res.data) == len(initial_projects) + 1
43 | assert any(project["id"] == project_id for project in res.data)
44 |
45 |
46 | def test_project_list(invoke, new_name):
47 | res = invoke("project list")
48 | initial_projects = len(res.data)
49 |
50 | res.show()
51 |
52 | res = invoke(f"--batch project new {new_name()}")
53 | project0_id = res.data
54 | res = invoke(
55 | "--batch "
56 | f"project add-dataset {project0_id} {new_name()} --zarr-dir /tmp"
57 | )
58 | res = invoke(f"--batch project new {new_name()}")
59 |
60 | res = invoke("project list")
61 | debug(res)
62 | res.show()
63 | assert len(res.data) == initial_projects + 2
64 |
65 |
66 | @pytest.mark.parametrize("patch_name", [True, False])
67 | def test_edit_project(invoke, new_name, patch_name: bool):
68 | name = new_name()
69 | res = invoke(f"project new {name}")
70 | project = res.data
71 | project_id = project["id"]
72 |
73 | cmd = f"project edit {project_id}"
74 | if patch_name:
75 | NEW_NAME = new_name()
76 | cmd += f" --new-name {NEW_NAME}"
77 |
78 | res = invoke(cmd)
79 | debug(res)
80 |
81 | assert res.retcode == 0
82 | new_project = res.data
83 | if patch_name:
84 | assert new_project["name"] == NEW_NAME
85 | else:
86 | assert new_project["name"] == name
87 |
--------------------------------------------------------------------------------
/tests/test_task.py:
--------------------------------------------------------------------------------
1 | import json
2 | from pathlib import Path
3 |
4 | import pytest
5 | from devtools import debug
6 |
7 | from fractal_client.cmd._aux_task_caching import TASKS_CACHE_FILENAME
8 | from fractal_client.config import settings
9 |
10 |
11 | COLLECTION_TIMEOUT = 15.0
12 |
13 |
14 | def test_task_new(
15 | invoke,
16 | invoke_as_custom_user,
17 | tmp_path,
18 | new_name,
19 | user_factory,
20 | ):
21 |
22 | # create a new task with just positional required args
23 | args_path = str(tmp_path / "args.json")
24 | args = {"image_dir": "/asdasd"}
25 | with open(args_path, "w") as f:
26 | json.dump(args, f)
27 |
28 | meta_path = str(tmp_path / "meta.json")
29 | meta = {"a": "b"}
30 | with open(meta_path, "w") as f:
31 | json.dump(meta, f)
32 |
33 | TASK_NAME = new_name()
34 | res = invoke(
35 | f"task new {TASK_NAME} --command-parallel _command "
36 | f"--version _version --meta-parallel {meta_path} "
37 | f"--args-schema-parallel {args_path} "
38 | f"--args-schema-version 1.0.0 "
39 | "--private"
40 | )
41 | debug(res.data)
42 | assert res.retcode == 0
43 | assert res.data["name"] == TASK_NAME
44 | assert res.data["command_parallel"] == "_command"
45 | assert res.data["version"] == "_version"
46 | assert res.data["meta_parallel"] == meta
47 | assert res.data["args_schema_version"] == "1.0.0"
48 | first_task_id = int(res.data["id"])
49 |
50 | # Check that task is actually private
51 | new_user_credentials = dict(
52 | email=f"{new_name()}@example.org",
53 | password="1234",
54 | )
55 | user_factory(**new_user_credentials)
56 | with pytest.raises(SystemExit):
57 | res = invoke_as_custom_user(
58 | f"task show {first_task_id}",
59 | **new_user_credentials,
60 | )
61 |
62 | # create a new task with batch option
63 | TASK_NAME_2 = new_name()
64 | res = invoke(
65 | f"--batch task new {TASK_NAME_2} --command-parallel _command2"
66 | )
67 | res.show()
68 | assert res.retcode == 0
69 | assert res.data == str(first_task_id + 1)
70 |
71 | # create a new task with same name as before. Note that in check_response
72 | # we have sys.exit(1) when status code is not the expecte one
73 | with pytest.raises(SystemExit) as e:
74 | invoke(f"task new {TASK_NAME_2} --command-parallel _command2")
75 | assert e.value.code == 1
76 |
77 | # create a new task passing not existing file
78 | res = invoke(
79 | f"task new {new_name()} --command-parallel _command "
80 | "--meta-parallel ./foo.pdf"
81 | )
82 | assert res.retcode == 1
83 |
84 | metanp_path = str(tmp_path / "meta.json")
85 | metanp = {"a": "b"}
86 | with open(metanp_path, "w") as f:
87 | json.dump(metanp, f)
88 | res = invoke(
89 | f"task new {new_name()} --command-non-parallel _command_np "
90 | f"--meta-non-parallel {metanp_path} "
91 | f"--args-schema-non-parallel {args_path} "
92 | )
93 | assert res.data["args_schema_non_parallel"] == args
94 |
95 |
96 | def test_task_edit(
97 | caplog,
98 | invoke,
99 | tmp_path,
100 | new_name,
101 | ):
102 |
103 | args_path = str(tmp_path / "args.json")
104 | args = {"image_dir": "/asdasd"}
105 | with open(args_path, "w") as f:
106 | json.dump(args, f)
107 |
108 | meta_path = str(tmp_path / "meta.json")
109 | meta = {"a": "b"}
110 | with open(meta_path, "w") as f:
111 | json.dump(meta, f)
112 |
113 | NAME = new_name()
114 | task = invoke(
115 | f"task new {NAME} --command-parallel _command "
116 | f"--version _version --meta-parallel {meta_path} "
117 | f"--args-schema-parallel {args_path} "
118 | f"--args-schema-version 1.0.0"
119 | )
120 |
121 | task.show()
122 | assert task.retcode == 0
123 | task_id = task.data["id"]
124 |
125 | # Test successful edit of string attributes
126 | NEW_COMMAND_PARALLEL = "run_parallel"
127 | res = invoke(
128 | f"task edit --id {task_id} "
129 | f"--command-parallel {NEW_COMMAND_PARALLEL}"
130 | )
131 | assert res.data["command_parallel"] == NEW_COMMAND_PARALLEL
132 | assert res.retcode == 0
133 |
134 | # Add non-parallel task and test command-non-parallel
135 |
136 | meta_path = str(tmp_path / "meta.json")
137 | meta = {"a": "b"}
138 | with open(meta_path, "w") as f:
139 | json.dump(meta, f)
140 |
141 | task_np = invoke(
142 | f"task new {new_name()} --command-non-parallel _command_np "
143 | f"--version 1.0.1 --meta-non-parallel {meta_path}"
144 | )
145 |
146 | NEW_COMMAND_NON_PARALLEL = "run_non_parallel"
147 | res = invoke(
148 | f"task edit --id {task_np.data['id']} "
149 | f"--command-non-parallel {NEW_COMMAND_NON_PARALLEL}"
150 | )
151 | assert res.data["command_non_parallel"] == NEW_COMMAND_NON_PARALLEL
152 | assert res.retcode == 0
153 |
154 | # Test fail with no task_id nor task_name
155 | with pytest.raises(SystemExit):
156 | res = invoke("task edit")
157 | # Test fail with both task_id and task_name
158 | with pytest.raises(SystemExit):
159 | res = invoke(f"task edit --id {task_id} --name {task.data['name']}")
160 | # Test fail with both task_id and task_version
161 | with pytest.raises(SystemExit):
162 | res = invoke(f"task edit --id {task_id} --version 1.2.3.4.5.6")
163 | assert caplog.records[-1].msg == (
164 | "Too many arguments: cannot provide both `id` and `version`."
165 | )
166 | # Test fail "name and wrong version"
167 | with pytest.raises(SystemExit):
168 | invoke("task delete --name INVALID_NAME --version INVALID_VERSION")
169 |
170 | input_types = {"input": True, "output": False}
171 |
172 | i_types_path = str(tmp_path / "itypes.json")
173 | with open(i_types_path, "w") as f:
174 | json.dump(input_types, f)
175 |
176 | output_types = {"input": False, "output": True}
177 |
178 | o_types_path = str(tmp_path / "otypes.json")
179 | with open(o_types_path, "w") as f:
180 | json.dump(output_types, f)
181 |
182 | # Test regular updates (both by id and name)
183 | res = invoke(f"task edit --id {task_id} --input-types {i_types_path}")
184 | assert res.data["input_types"] == input_types
185 | assert res.retcode == 0
186 | res = invoke(f"task edit --name {NAME} --output-types {o_types_path}")
187 | assert res.data["output_types"] == output_types
188 | assert res.retcode == 0
189 |
190 | # Test regular update by name, after deleting cache
191 | cache_dir = Path(settings.FRACTAL_CACHE_PATH)
192 | cache_file = cache_dir / TASKS_CACHE_FILENAME
193 | cache_file.unlink(missing_ok=True)
194 |
195 | res = invoke(f"task edit --name {NAME} --output-types {o_types_path}")
196 | assert res.data["output_types"] == output_types
197 | assert res.retcode == 0
198 |
199 | # Test failing invalid name
200 | fail_output_types = {"input": True, "output": False}
201 |
202 | f_o_types_path = str(tmp_path / "fotypes.json")
203 | with open(f_o_types_path, "w") as f:
204 | json.dump(fail_output_types, f)
205 |
206 | with pytest.raises(SystemExit):
207 | res = invoke(
208 | f"task edit --name INVALID_NAME --output-types {f_o_types_path}"
209 | )
210 |
211 | # Test regular update by name, after creating an invalid cache
212 | with cache_file.open("w") as f:
213 | json.dump([], f)
214 |
215 | new_output_types = {"input": False, "output": True}
216 | n_o_types_path = str(tmp_path / "notypes.json")
217 | with open(n_o_types_path, "w") as f:
218 | json.dump(new_output_types, f)
219 |
220 | res = invoke(f"task edit --name {NAME} --output-types {n_o_types_path}")
221 | assert res.data["output_types"] == new_output_types
222 | assert res.retcode == 0
223 |
--------------------------------------------------------------------------------
/tests/test_task_collection.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import sys
4 | import time
5 | from urllib.request import urlopen
6 | from urllib.request import urlretrieve
7 |
8 | import pytest
9 | from devtools import debug
10 |
11 | logging.getLogger("httpx").setLevel(logging.DEBUG)
12 |
13 |
14 | def test_task_collection_command(invoke, caplog):
15 | """
16 | Test that all `task collect` options are correctly parsed and included in
17 | the the payload for the API request.
18 | """
19 | INVALID_PYTHON_VERSION = "xxx"
20 | PACKAGE = "devtools"
21 | PACKAGE_VERSION = "0.11.0"
22 | PYTHON_VERSION = INVALID_PYTHON_VERSION
23 | PACKAGE_EXTRAS = "a,b,c"
24 | with pytest.raises(SystemExit):
25 | invoke(
26 | "task collect "
27 | f"{PACKAGE} "
28 | f"--package-version {PACKAGE_VERSION} "
29 | f"--python-version {PYTHON_VERSION} "
30 | f"--package-extras {PACKAGE_EXTRAS} "
31 | "--pinned-dependency pydantic=1.10.0"
32 | )
33 | debug(caplog.text)
34 | assert "Server returned 422" in caplog.text
35 | assert f"input_value='{INVALID_PYTHON_VERSION}'" in caplog.text
36 |
37 |
38 | def test_task_collection_invalid_pinned_dependency(invoke, caplog):
39 | """
40 | Test the case where `pinned_package_versions` has the wrong format.
41 | """
42 | PACKAGE = "devtools"
43 | with pytest.raises(SystemExit):
44 | invoke(f"task collect {PACKAGE} --pinned-dependency invalid-string")
45 | # Check that payload was prepared correctly
46 | error_line = next(
47 | record.message
48 | for record in caplog.records
49 | if "Invalid pin:" in record.message
50 | )
51 | debug(error_line)
52 | assert error_line is not None
53 |
54 |
55 | def test_task_collection(invoke_as_custom_user, user_factory, new_name):
56 | """
57 | GIVEN a pip installable package containing fractal-compatible tasks
58 | WHEN the collection subcommand is called
59 | THEN
60 | * the collection is initiated in the background
61 | * the server returns immediately
62 | """
63 | COLLECTION_TIMEOUT = 15.0
64 |
65 | PACKAGE_URL = (
66 | "https://github.com/fractal-analytics-platform/fractal-server/"
67 | "raw/main/tests/v2/fractal_tasks_mock/dist/"
68 | "fractal_tasks_mock-0.0.1-py3-none-any.whl"
69 | )
70 | PACKAGE_PATH = "/tmp/fractal_tasks_mock-0.0.1-py3-none-any.whl"
71 | urlretrieve(PACKAGE_URL, PACKAGE_PATH)
72 |
73 | new_user = dict(email=f"{new_name()}@example.org", password="1234")
74 | user_factory(**new_user)
75 |
76 | res = invoke_as_custom_user("task list", **new_user)
77 | initial_task_list = len(res.data)
78 |
79 | res0 = invoke_as_custom_user(
80 | f"task collect --private {PACKAGE_PATH}",
81 | **new_user,
82 | )
83 | debug(res0.data)
84 | activity_id = res0.data["id"]
85 |
86 | # Wait until collection is complete
87 | starting_time = time.perf_counter()
88 | while True:
89 | res1 = invoke_as_custom_user(
90 | f"task check-collection {activity_id}", **new_user
91 | )
92 | assert res1.retcode == 0
93 | time.sleep(0.1)
94 | if res1.data["status"] == "OK":
95 | debug(res1.data)
96 | break
97 | assert time.perf_counter() - starting_time < COLLECTION_TIMEOUT
98 |
99 | # Check successful status and no logs
100 | res2 = invoke_as_custom_user(
101 | f"task check-collection {activity_id}", **new_user
102 | )
103 | assert res2.retcode == 0
104 | assert res2.data["status"] == "OK"
105 | assert res2.data["log"] is None
106 |
107 | # Check logs
108 | res3 = invoke_as_custom_user(
109 | f"task check-collection {activity_id} --include-logs", **new_user
110 | )
111 | assert res3.retcode == 0
112 | assert res3.data["status"] == "OK"
113 | assert res3.data["log"] is not None
114 |
115 | # Check task list
116 | res = invoke_as_custom_user("task list", **new_user)
117 | assert len(res.data) == initial_task_list + 15
118 |
119 | # Second collection
120 | with pytest.raises(SystemExit):
121 | invoke_as_custom_user(f"task collect {PACKAGE_PATH}", **new_user)
122 |
123 |
124 | def test_task_collection_custom(
125 | user_factory, new_name, tmp_path, invoke_as_custom_user, caplog
126 | ):
127 | new_user = dict(email=f"{new_name()}@example.org", password="1234")
128 | user_factory(**new_user)
129 |
130 | python_interpreter = sys.executable
131 | package_name = "fractal-client"
132 | manifest = str(tmp_path / "manifest.json")
133 |
134 | # Download and write a valid Manifest
135 | manifest_url = (
136 | "https://github.com/fractal-analytics-platform/fractal-server/"
137 | "raw/main/tests/v2/fractal_tasks_mock/src/fractal_tasks_mock/"
138 | "__FRACTAL_MANIFEST__.json"
139 | )
140 | with urlopen(manifest_url) as f:
141 | manifest_dict = json.loads(f.read())
142 | with open(manifest, "w") as f:
143 | json.dump(manifest_dict, f)
144 |
145 | cmd = (
146 | f"task collect-custom --private --package-name {package_name} "
147 | f"label {python_interpreter} {manifest}"
148 | )
149 | res = invoke_as_custom_user(cmd, **new_user)
150 | debug(res.data)
151 | assert res.retcode == 0
152 | assert isinstance(res.data, list)
153 |
154 | # Second API call fails (tasks with the same identity already exist)
155 | caplog.clear()
156 | with pytest.raises(SystemExit):
157 | res = invoke_as_custom_user(cmd, **new_user)
158 | # Manifest was redacted, when logging the payload
159 | assert '"manifest": "[value too long - redacted]"' in caplog.text
160 |
161 | # Missing manifest file
162 | cmd = (
163 | f"task collect-custom --package-name {package_name} "
164 | f"label {python_interpreter} /foo/bar"
165 | )
166 | res = invoke_as_custom_user(cmd, **new_user)
167 | assert res.retcode == 1
168 | assert "file must be on the same machine" in res.data
169 |
170 | cmd = (
171 | "--batch task collect-custom --private --package-root /tmp --version 2"
172 | f" label2 {python_interpreter} {manifest}"
173 | )
174 | res = invoke_as_custom_user(cmd, **new_user)
175 | assert res.retcode == 0
176 | assert isinstance(res.data, str)
177 |
178 | # test that '--package-root' and '--package-name' are mutually exclusive
179 | cmd = (
180 | "task collect-custom --private"
181 | f"--package-root /tmp --package-name {package_name} "
182 | f"label3 {python_interpreter} {manifest}"
183 | )
184 | with pytest.raises(SystemExit):
185 | res = invoke_as_custom_user(cmd, **new_user)
186 |
--------------------------------------------------------------------------------
/tests/test_unit_response.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from httpx import Request
3 | from httpx import Response
4 |
5 | from fractal_client.response import check_response
6 |
7 |
8 | def test_check_response(caplog):
9 |
10 | JSON = {"something": "else"}
11 | response = Response(status_code=200, json=JSON)
12 | checked_response = check_response(response)
13 | assert checked_response == JSON
14 |
15 | REQUEST_BODY = {"cache_dir": "xxxxxx"}
16 | RESPONSE_BODY = {
17 | "detail": [
18 | {
19 | "loc": [
20 | "body",
21 | "cache_dir",
22 | ],
23 | "msg": (
24 | "String attribute 'cache_dir' must be an absolute path"
25 | " (given 'xxxxxx')."
26 | ),
27 | "type": "value_error",
28 | },
29 | ],
30 | }
31 | response = Response(
32 | status_code=422,
33 | json=RESPONSE_BODY,
34 | request=Request("GET", "http://example.org", json=REQUEST_BODY),
35 | )
36 | caplog.clear()
37 | with pytest.raises(SystemExit):
38 | check_response(response)
39 | assert "Original request" in caplog.records[-4].getMessage()
40 | assert "Original payload" in caplog.records[-3].getMessage()
41 | print(caplog.records[-2].getMessage())
42 | assert "msg: String attribute " in caplog.records[-2].getMessage()
43 | assert "type: value_error" in caplog.records[-2].getMessage()
44 | assert "loc: ['body', 'cache_dir']" in caplog.records[-2].getMessage()
45 | assert "Terminating" in caplog.records[-1].getMessage()
46 |
--------------------------------------------------------------------------------
/tests/test_unit_task_cache.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from fractal_client.cmd._aux_task_caching import _get_matching_tasks
4 | from fractal_client.cmd._aux_task_caching import _search_in_task_list
5 | from fractal_client.cmd._aux_task_caching import FractalCacheError
6 |
7 |
8 | def test_get_matching_tasks():
9 | """Test all possible cases for function `_get_matching_tasks`"""
10 |
11 | a = dict(name="bob", id=1, version="0.1.1")
12 | b = dict(name="bob", id=2, version="1.2.0")
13 | c = dict(name="bob", id=3, version="1.3.1")
14 | d = dict(name="bar", id=4, version="0.0.0")
15 | e = dict(name="bar", id=5, version=None)
16 |
17 | TASK_LIST = [a, b, c, d, e]
18 |
19 | res = _get_matching_tasks(TASK_LIST, name="alice")
20 | assert res == []
21 |
22 | res = _get_matching_tasks(TASK_LIST, name="bob")
23 | assert res == [a, b, c]
24 |
25 | res = _get_matching_tasks(TASK_LIST, name="bob", version="1.2.0")
26 | assert res == [b]
27 |
28 | res = _get_matching_tasks(TASK_LIST, name="bar")
29 | assert res == [d, e]
30 |
31 | res = _get_matching_tasks(TASK_LIST, name="bar", version="3.1.4")
32 | assert res == []
33 |
34 |
35 | def test_search_in_task_list():
36 | """Test all possible cases for function `_search_in_task_list`"""
37 |
38 | TASK_LIST = [
39 | dict(name="dummy1", id=101, version="1.0.1"),
40 | dict(name="dummy2", id=201, version=None),
41 | dict(name="dummy2", id=202, version="2.0.0"),
42 | dict(name="dummy3", id=301, version="3.0.0"),
43 | dict(name="dummy3", id=302, version="3.1.4"),
44 | dict(name="dummy4", id=401, version="4.0.0"),
45 | dict(name="dummy4", id=402, version="4.1.1"),
46 | dict(name="dummy4", id=401, version="4.1.1"),
47 | ]
48 |
49 | # TEST zero matching
50 |
51 | # case 1
52 | with pytest.raises(FractalCacheError) as err:
53 | res = _search_in_task_list(task_list=TASK_LIST, name="dummy0")
54 | print(err.value.args[0])
55 | assert 'There is no task with name "dummy0"' in err.value.args[0] # noqa
56 |
57 | # case 2
58 | with pytest.raises(FractalCacheError) as err:
59 | res = _search_in_task_list(
60 | task_list=TASK_LIST, name="dummy1", version="3.1.4"
61 | )
62 | print(err.value.args[0])
63 | assert (
64 | 'There is no task with (name, version)=("dummy1", 3.1.4)'
65 | in err.value.args[0]
66 | ) # noqa
67 |
68 | # TEST one matching
69 | # case 1
70 | res = _search_in_task_list(task_list=TASK_LIST, name="dummy1")
71 | assert res == 101
72 | # case 2
73 | res = _search_in_task_list(
74 | task_list=TASK_LIST, name="dummy1", version="1.0.1"
75 | )
76 | assert res == 101
77 |
78 | # TEST multiple matching
79 | # case 1
80 | with pytest.raises(FractalCacheError) as err:
81 | res = _search_in_task_list(task_list=TASK_LIST, name="dummy2")
82 | print(err.value.args[0])
83 | assert "Cannot determine the latest version" in err.value.args[0]
84 | # case 2
85 | res = _search_in_task_list(task_list=TASK_LIST, name="dummy3")
86 | assert res == 302
87 | # case 3
88 | with pytest.raises(FractalCacheError) as err:
89 | res = _search_in_task_list(task_list=TASK_LIST, name="dummy4")
90 | print(err.value.args[0])
91 | assert "Multiple tasks with latest version (4.1.1)" in err.value.args[0]
92 | print(err.value.args[0])
93 | # case 4
94 | with pytest.raises(FractalCacheError) as err:
95 | res = _search_in_task_list(
96 | task_list=TASK_LIST, name="dummy4", version="4.1.1"
97 | )
98 | print(err.value.args[0])
99 | assert "Multiple tasks with version 4.1.1" in err.value.args[0]
100 |
--------------------------------------------------------------------------------
/tests/test_unit_versions.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from devtools import debug
3 | from packaging import version
4 |
5 | from fractal_client.cmd._aux_task_caching import _loose_version_parse
6 |
7 |
8 | # string, major, minor, micro, is_prerelease
9 | VERSIONS = [
10 | ("0.10.0c0", 0, 10, 0, True),
11 | ("0.10.0b4", 0, 10, 0, True),
12 | ("0.10.0", 0, 10, 0, False),
13 | ("0.10.0alpha3", 0, 10, 0, True),
14 | ("0.10.0a2", 0, 10, 0, True),
15 | ("1.0.0", 1, 0, 0, False),
16 | ("0.10.0a0", 0, 10, 0, True),
17 | ("1.0.0rc4.dev7", 1, 0, 0, True),
18 | ("0.10.0beta5", 0, 10, 0, True),
19 | ("0.10.0alpha0", 0, 10, 0, True),
20 | ("3.2", 3, 2, 0, False),
21 | ("2", 2, 0, 0, False),
22 | ]
23 |
24 | SORTED_VERSIONS = [
25 | "0.10.0a0",
26 | "0.10.0alpha0",
27 | "0.10.0a2",
28 | "0.10.0alpha3",
29 | "0.10.0b4",
30 | "0.10.0beta5",
31 | "0.10.0c0",
32 | "0.10.0",
33 | "1.0.0rc4.dev7",
34 | "1.0.0",
35 | "2",
36 | "3.2",
37 | ]
38 |
39 |
40 | def test_version_parsing():
41 | for (v_string, major, minor, micro, is_prerelease) in VERSIONS:
42 | v = version.parse(v_string)
43 | debug(v_string, v.major, v.minor, v.micro, v.is_prerelease, v.pre)
44 | if major is not None:
45 | assert v.major == major
46 | if minor is not None:
47 | assert v.minor == minor
48 | if micro is not None:
49 | assert v.micro == micro
50 | if is_prerelease is not None:
51 | assert v.is_prerelease == is_prerelease
52 |
53 | with pytest.raises(version.InvalidVersion):
54 | version.parse("invalid")
55 |
56 |
57 | def test_version_sorting():
58 | sorted_versions = sorted([v[0] for v in VERSIONS], key=version.parse)
59 | debug(sorted_versions)
60 | assert sorted_versions == SORTED_VERSIONS
61 |
62 |
63 | def test_max_with_loose_version_parse():
64 | versions = ["invalid_1"] + [v[0] for v in VERSIONS] + ["invalid_2"]
65 | sorted_versions = sorted(versions, key=_loose_version_parse)
66 | debug(sorted_versions)
67 | assert sorted_versions == ["invalid_1", "invalid_2"] + SORTED_VERSIONS
68 |
--------------------------------------------------------------------------------
/tests/test_user.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | import pytest
4 | from devtools import debug
5 | from fractal_server.app.security import FRACTAL_DEFAULT_GROUP_NAME
6 |
7 | PWD_USER = "1234"
8 |
9 |
10 | def test_register_as_user(invoke, caplog):
11 | with pytest.raises(SystemExit):
12 | invoke("user register aaa bbb")
13 | debug(caplog.text)
14 | assert "403" in caplog.text
15 |
16 |
17 | @pytest.mark.parametrize("is_superuser", [True, False])
18 | def test_register_as_superuser(
19 | invoke_as_superuser, is_superuser: bool, new_name
20 | ):
21 | EMAIL_USER = f"{new_name()}@example.org"
22 | if is_superuser:
23 | res = invoke_as_superuser(
24 | f"user register {EMAIL_USER} {PWD_USER} --superuser"
25 | )
26 | debug(res.data)
27 | assert res.retcode == 0
28 | assert res.data["is_superuser"]
29 | else:
30 | res = invoke_as_superuser(
31 | f"user register {EMAIL_USER} {PWD_USER} "
32 | "--slurm-user SOMETHING --username X"
33 | )
34 | debug(res.data)
35 | assert res.retcode == 0
36 | assert not res.data["is_superuser"]
37 | assert res.data["email"] == EMAIL_USER
38 |
39 | # Test that new user is verified (note: for the moment we don't expose the
40 | # possibility of registering a non-verified user)
41 | assert res.data["is_verified"]
42 |
43 |
44 | def test_register_with_ssh_settings(invoke_as_superuser, new_name, tmp_path):
45 |
46 | EMAIL_USER = f"{new_name()}@example.org"
47 |
48 | with pytest.raises(SystemExit, match="File does not exist"):
49 | invoke_as_superuser(
50 | f"user register {EMAIL_USER} {PWD_USER} --ssh-settings-json xy.z"
51 | )
52 |
53 | invalid_json = tmp_path / "not-a-json.foo"
54 | with invalid_json.open("w") as f:
55 | f.write("hello world")
56 | with pytest.raises(SystemExit, match="not a valid JSON"):
57 | invoke_as_superuser(
58 | f"user register {EMAIL_USER} {PWD_USER} "
59 | f"--ssh-settings-json {invalid_json}"
60 | )
61 |
62 | invalid_key_json = tmp_path / "invalid-key.json"
63 | with invalid_key_json.open("w") as f:
64 | json.dump(dict(invalid="invalid"), f)
65 | with pytest.raises(SystemExit, match="Invalid key"):
66 | invoke_as_superuser(
67 | f"user register {EMAIL_USER} {PWD_USER} "
68 | f"--ssh-settings-json {invalid_key_json}"
69 | )
70 |
71 | valid_json = tmp_path / "ssh-config.json"
72 | with valid_json.open("w") as f:
73 | json.dump(
74 | dict(
75 | ssh_host="SSH_HOST",
76 | ssh_private_key_path="/SSH_PRIVATE_KEY_PATH",
77 | ),
78 | f,
79 | )
80 | PROJECT_DIR = "/somewhere/"
81 | res = invoke_as_superuser(
82 | f"user register {EMAIL_USER} {PWD_USER} "
83 | f"--project-dir {PROJECT_DIR} "
84 | f"--ssh-settings-json {valid_json}"
85 | )
86 | assert res.retcode == 0
87 | assert res.data["settings"]["project_dir"] == PROJECT_DIR
88 | assert res.data["settings"]["ssh_host"] == "SSH_HOST"
89 | assert res.data["settings"]["ssh_private_key_path"] == (
90 | "/SSH_PRIVATE_KEY_PATH"
91 | )
92 |
93 |
94 | def test_register_as_superuser_with_batch(invoke_as_superuser, new_name):
95 | EMAIL_USER = f"{new_name()}@example.org"
96 | # Register a user with the --batch flag
97 | res = invoke_as_superuser(f"--batch user register {EMAIL_USER} {PWD_USER}")
98 | user_id = res.data
99 | debug(user_id)
100 | assert res.retcode == 0
101 | # Check that the user exists
102 | res = invoke_as_superuser(f"user show {user_id}")
103 | debug(res.data)
104 | assert res.data["email"] == EMAIL_USER
105 | assert res.retcode == 0
106 |
107 |
108 | def test_list_as_user(invoke, caplog):
109 | with pytest.raises(SystemExit):
110 | invoke("user list")
111 | debug(caplog.text)
112 | assert "403" in caplog.text
113 |
114 |
115 | def test_list_as_superuser(invoke_as_superuser, superuser, tester):
116 | res = invoke_as_superuser("user list")
117 | debug(res.data)
118 | assert res.retcode == 0
119 | list_emails = [user["email"] for user in res.data]
120 | debug(list_emails)
121 | assert superuser["email"] in list_emails
122 | assert tester["email"] in list_emails
123 |
124 |
125 | def test_show_as_user(invoke, invoke_as_superuser, caplog, new_name):
126 | EMAIL_USER = f"{new_name()}@example.org"
127 | # Register a new user
128 | res = invoke_as_superuser(f"user register {EMAIL_USER} {PWD_USER}")
129 | user_id = res.data["id"]
130 | # Call fractal user show
131 | with pytest.raises(SystemExit):
132 | invoke(f"user show {user_id}")
133 | debug(caplog.text)
134 | assert "403" in caplog.text
135 |
136 |
137 | def test_show_as_superuser(invoke_as_superuser, new_name):
138 | EMAIL_USER = f"{new_name()}@example.org"
139 | # Register a new user
140 | res = invoke_as_superuser(f"user register {EMAIL_USER} {PWD_USER}")
141 | user_id = res.data["id"]
142 | # Call fractal user show
143 | invoke_as_superuser(f"user show {user_id}")
144 | debug(res.data)
145 | assert res.retcode == 0
146 | assert res.data["email"] == EMAIL_USER
147 |
148 |
149 | def test_edit_as_user(invoke, invoke_as_superuser, caplog, new_name):
150 | EMAIL_USER = f"{new_name()}@example.org"
151 | # Register a new user
152 | res = invoke_as_superuser(f"user register {EMAIL_USER} {PWD_USER}")
153 | user_id = res.data["id"]
154 | # Call fractal user edit
155 | with pytest.raises(SystemExit):
156 | res = invoke(
157 | f"user edit {user_id} "
158 | "--new-email email@something.xy --make-verified"
159 | )
160 | debug(caplog.text)
161 | assert "403" in caplog.text
162 |
163 |
164 | @pytest.mark.parametrize("new_is_superuser", [True, False])
165 | @pytest.mark.parametrize("new_is_verified", [True, False])
166 | @pytest.mark.parametrize("new_is_non_verified", [True, False])
167 | def test_edit_as_superuser(
168 | invoke_as_superuser,
169 | new_is_superuser,
170 | new_is_verified,
171 | new_is_non_verified,
172 | new_name,
173 | ):
174 | EMAIL_USER = f"{new_name()}@example.org"
175 | # Register a new user
176 | res = invoke_as_superuser(f"user register {EMAIL_USER} {PWD_USER}")
177 | assert res.retcode == 0
178 | user_id = res.data["id"]
179 | # Call fractal user edit
180 | NEW_EMAIL = f"{new_name()}@example.org"
181 | NEW_SLURM_USER = "new_slurm"
182 | NEW_USERNAME = "new_username"
183 | cmd = (
184 | f"user edit {user_id} "
185 | f"--new-email {NEW_EMAIL} "
186 | f"--new-password SOMETHING "
187 | f"--new-slurm-user {NEW_SLURM_USER} "
188 | f"--new-username {NEW_USERNAME} "
189 | )
190 | if new_is_superuser:
191 | cmd = f"{cmd} --make-superuser"
192 | if new_is_verified:
193 | cmd = f"{cmd} --make-verified"
194 | if new_is_non_verified:
195 | cmd = f"{cmd} --remove-verified"
196 |
197 | if new_is_verified and new_is_non_verified:
198 | with pytest.raises(SystemExit):
199 | invoke_as_superuser(cmd)
200 | elif new_is_verified or new_is_non_verified:
201 | res = invoke_as_superuser(cmd)
202 | assert res.retcode == 0
203 | assert res.data["email"] == NEW_EMAIL
204 | assert res.data["username"] == NEW_USERNAME
205 | assert res.data["is_superuser"] == new_is_superuser
206 | assert (
207 | res.data["is_verified"]
208 | if new_is_verified
209 | else not res.data["is_verified"]
210 | )
211 | assert res.data["settings"]["slurm_user"] == NEW_SLURM_USER
212 | else:
213 | res = invoke_as_superuser(cmd)
214 | assert res.retcode == 1
215 | assert res.data == (
216 | "Cannot use `--new-email` without `--make-verified` or "
217 | "`--remove-verified`"
218 | )
219 |
220 | # If the user was made a superuser, check that we can go back to normal
221 | # user
222 | if new_is_superuser:
223 | cmd = f"user edit {user_id} --remove-superuser"
224 | debug(cmd)
225 | res = invoke_as_superuser(cmd)
226 | debug(res.data)
227 | assert res.retcode == 0
228 | assert not res.data["is_superuser"]
229 |
230 | # If the user was made verified, check that we can go back to normal
231 | # user
232 | if new_is_non_verified:
233 | cmd = f"user edit {user_id} --make-verified"
234 | debug(cmd)
235 | res = invoke_as_superuser(cmd)
236 | debug(res.data)
237 | assert res.retcode == 0
238 | assert res.data["is_verified"]
239 |
240 |
241 | def test_edit_user_settings(invoke_as_superuser, tmp_path, new_name):
242 | EMAIL_USER = f"{new_name()}@example.org"
243 |
244 | EMPTY_USER_SETTINGS = {
245 | "ssh_host": None,
246 | "ssh_username": None,
247 | "ssh_private_key_path": None,
248 | "ssh_tasks_dir": None,
249 | "ssh_jobs_dir": None,
250 | "slurm_user": None,
251 | "slurm_accounts": [],
252 | "project_dir": None,
253 | }
254 | SSH_HOST = "something.somewhere"
255 | SSH_PRIVATE_KEY_PATH = "/tmp/something.key"
256 | NEW_PROJECT_DIR = "/somewhere/else/"
257 | NEW_USER_SETTINGS = {
258 | "ssh_host": SSH_HOST,
259 | "ssh_username": None,
260 | "ssh_private_key_path": SSH_PRIVATE_KEY_PATH,
261 | "ssh_tasks_dir": None,
262 | "ssh_jobs_dir": None,
263 | "slurm_user": None,
264 | "slurm_accounts": [],
265 | "project_dir": NEW_PROJECT_DIR,
266 | }
267 |
268 | # Register a new user
269 | res = invoke_as_superuser(f"user register {EMAIL_USER} {PWD_USER}")
270 | assert res.retcode == 0
271 | user_id = res.data["id"]
272 |
273 | # Check empty user settings
274 | res = invoke_as_superuser(f"user show {user_id}")
275 | assert res.retcode == 0
276 | user_settings = {
277 | key: value
278 | for key, value in res.data["settings"].items()
279 | if key != "id"
280 | }
281 | debug(user_settings)
282 | assert user_settings == EMPTY_USER_SETTINGS
283 |
284 | # Call fractal user edit
285 | ssh_settings_file = tmp_path / "ssh.json"
286 | with ssh_settings_file.open("w") as f:
287 | json.dump(
288 | {
289 | "ssh_host": SSH_HOST,
290 | "ssh_private_key_path": SSH_PRIVATE_KEY_PATH,
291 | },
292 | f,
293 | )
294 | cmd = (
295 | f"user edit {user_id} "
296 | f"--new-project-dir {NEW_PROJECT_DIR} "
297 | f"--new-ssh-settings-json {ssh_settings_file.as_posix()}"
298 | )
299 | res = invoke_as_superuser(cmd)
300 | assert res.retcode == 0
301 | debug(res.data)
302 |
303 | # Check edited user settings
304 | res = invoke_as_superuser(f"user show {user_id}")
305 | assert res.retcode == 0
306 | user_settings = {
307 | key: value
308 | for key, value in res.data["settings"].items()
309 | if key != "id"
310 | }
311 | debug(user_settings)
312 | assert user_settings == NEW_USER_SETTINGS
313 |
314 | # Failure due to missing file
315 | ssh_settings_file = tmp_path / "invalid-ssh.json"
316 | cmd = (
317 | f"user edit {user_id} "
318 | f"--new-ssh-settings-json {ssh_settings_file.as_posix()}"
319 | )
320 | with pytest.raises(SystemExit, match="File does not exist."):
321 | res = invoke_as_superuser(cmd)
322 |
323 | # Failure due to file not being a valid JSON
324 | invalid_json = tmp_path / "invalid-json.foo"
325 | with invalid_json.open("w") as f:
326 | f.write("hello world")
327 | cmd = (
328 | f"user edit {user_id} "
329 | f"--new-ssh-settings-json {invalid_json.as_posix()}"
330 | )
331 | with pytest.raises(SystemExit, match="not a valid JSON"):
332 | res = invoke_as_superuser(cmd)
333 |
334 | # Failure due to invalid keys
335 | ssh_settings_file = tmp_path / "invalid-ssh.json"
336 | with ssh_settings_file.open("w") as f:
337 | json.dump(
338 | dict(invalid="invalid"),
339 | f,
340 | )
341 | cmd = (
342 | f"user edit {user_id} "
343 | f"--new-ssh-settings-json {ssh_settings_file.as_posix()}"
344 | )
345 | with pytest.raises(SystemExit, match="Invalid key"):
346 | res = invoke_as_superuser(cmd)
347 |
348 |
349 | def test_edit_arguments(invoke_as_superuser):
350 | # Test that superuser flags are mutually exclusive
351 | with pytest.raises(SystemExit):
352 | cmd = "user edit SOME_USER_ID --make-superuser --remove-superuser"
353 | invoke_as_superuser(cmd)
354 |
355 |
356 | def test_whoami_as_user(invoke, tester):
357 | res = invoke("user whoami")
358 | assert res.retcode == 0
359 | debug(res.data)
360 | assert res.data["email"] == tester["email"]
361 | assert not res.data["is_superuser"]
362 | user_id = res.data["id"]
363 |
364 | # Test user whoami with --batch flag
365 | res = invoke("--batch user whoami")
366 | debug(res.data)
367 | assert res.data == user_id
368 | assert res.retcode == 0
369 |
370 |
371 | def test_whoami_as_superuser(invoke_as_superuser, superuser):
372 | res = invoke_as_superuser("user whoami")
373 | assert res.retcode == 0
374 | debug(res.data)
375 | assert res.data["email"] == superuser["email"]
376 | assert res.data["is_superuser"]
377 |
378 |
379 | def test_user_set_groups(invoke_as_superuser, user_factory, new_name):
380 | # get default group
381 | res = invoke_as_superuser("group list --user-ids")
382 | default_group = next(
383 | group
384 | for group in res.data
385 | if group["name"] == FRACTAL_DEFAULT_GROUP_NAME
386 | )
387 | default_group_id = default_group["id"]
388 | # create 2 new users
389 | user1 = user_factory(email=f"{new_name()}@example.org", password="psw1")
390 | assert len(user1["group_ids_names"]) == 1
391 | user1_id = user1["id"]
392 | user2 = user_factory(email=f"{new_name()}@example.org", password="psw2")
393 | assert len(user2["group_ids_names"]) == 1
394 | user2_id = user2["id"]
395 | # create 2 new groups
396 | group1 = invoke_as_superuser(f"group new {new_name()}")
397 | assert len(group1.data["user_ids"]) == 0
398 | group1_id = group1.data["id"]
399 | group2 = invoke_as_superuser(f"group new {new_name()}")
400 | assert len(group2.data["user_ids"]) == 0
401 | group2_id = group2.data["id"]
402 |
403 | with pytest.raises(SystemExit):
404 | # no arguments
405 | invoke_as_superuser("user set-groups")
406 | with pytest.raises(SystemExit):
407 | # no group_ids list
408 | invoke_as_superuser(f"user set-groups {user1_id}")
409 | with pytest.raises(SystemExit):
410 | # group_ids must be a list of integers
411 | invoke_as_superuser(f"user set-groups {user1_id} {group1_id} foo")
412 | with pytest.raises(SystemExit):
413 | # there must always be the default group id in group_ids
414 | invoke_as_superuser(f"user set-groups {user1_id} {group1_id}")
415 | with pytest.raises(SystemExit):
416 | # repeated elements in group_ids are forbidden
417 | invoke_as_superuser(
418 | "user set-groups "
419 | f"{user1_id} {default_group_id} {group1_id} {group1_id}"
420 | )
421 |
422 | # Add user1 to group1
423 | res = invoke_as_superuser(
424 | f"user set-groups {user1_id} {group1_id} {default_group_id}"
425 | )
426 | assert len(res.data["group_ids_names"]) == 2
427 | group1 = invoke_as_superuser(f"group get {group1_id}")
428 | assert len(group1.data["user_ids"]) == 1
429 |
430 | # Add user2 to group1 and group2
431 | res = invoke_as_superuser(
432 | "user set-groups "
433 | f"{user2_id} {group2_id} {group1_id} {default_group_id}"
434 | )
435 | assert len(res.data["group_ids_names"]) == 3
436 | group1 = invoke_as_superuser(f"group get {group1_id}")
437 | assert len(group1.data["user_ids"]) == 2
438 | group2 = invoke_as_superuser(f"group get {group2_id}")
439 | assert len(group2.data["user_ids"]) == 1
440 |
441 | # Add user1 to group2 and remove them from group1
442 | res = invoke_as_superuser(
443 | f"user set-groups {user1_id} {group2_id} {default_group_id}"
444 | )
445 | assert len(res.data["group_ids_names"]) == 2
446 | group1 = invoke_as_superuser(f"group get {group1_id}")
447 | assert len(group1.data["user_ids"]) == 1
448 | group2 = invoke_as_superuser(f"group get {group2_id}")
449 | assert len(group2.data["user_ids"]) == 2
450 |
--------------------------------------------------------------------------------