├── .git-blame-ignore-revs
├── .github
└── workflows
│ ├── deploy.yaml
│ ├── lint.yaml
│ └── test.yaml
├── .gitignore
├── .isort.cfg
├── .readthedocs.yaml
├── ABOUT.rst
├── CHANGELOG.md
├── CITATION
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── MANIFEST.in
├── Makefile
├── README.rst
├── bioblend
├── __init__.py
├── _tests
│ ├── GalaxyTestBase.py
│ ├── README.TXT
│ ├── TestGalaxyConfig.py
│ ├── TestGalaxyDatasetCollections.py
│ ├── TestGalaxyDatasets.py
│ ├── TestGalaxyFolders.py
│ ├── TestGalaxyGroups.py
│ ├── TestGalaxyHistories.py
│ ├── TestGalaxyInstance.py
│ ├── TestGalaxyInvocations.py
│ ├── TestGalaxyJobs.py
│ ├── TestGalaxyLibraries.py
│ ├── TestGalaxyObjects.py
│ ├── TestGalaxyQuotas.py
│ ├── TestGalaxyRoles.py
│ ├── TestGalaxyToolContainerResolution.py
│ ├── TestGalaxyToolData.py
│ ├── TestGalaxyToolDependencies.py
│ ├── TestGalaxyToolInputs.py
│ ├── TestGalaxyToolShed.py
│ ├── TestGalaxyTools.py
│ ├── TestGalaxyUsers.py
│ ├── TestGalaxyWorkflows.py
│ ├── TestToolshed.py
│ ├── __init__.py
│ ├── data
│ │ ├── 1.bed
│ │ ├── Galaxy-History-Test-history-for-export.tar.gz
│ │ ├── paste_columns.ga
│ │ ├── paste_columns_collections.ga
│ │ ├── paste_columns_subworkflow.ga
│ │ ├── select_first.ga
│ │ ├── test_workflow_pause.ga
│ │ └── workflow_with_parameter_input.ga
│ ├── pytest_galaxy_test_wrapper.py
│ ├── template_galaxy.ini
│ ├── template_galaxy.yml
│ └── test_util.py
├── config.py
├── galaxy
│ ├── __init__.py
│ ├── client.py
│ ├── config
│ │ └── __init__.py
│ ├── container_resolution
│ │ └── __init__.py
│ ├── dataset_collections
│ │ └── __init__.py
│ ├── datasets
│ │ └── __init__.py
│ ├── datatypes
│ │ └── __init__.py
│ ├── folders
│ │ └── __init__.py
│ ├── forms
│ │ └── __init__.py
│ ├── ftpfiles
│ │ └── __init__.py
│ ├── genomes
│ │ └── __init__.py
│ ├── groups
│ │ └── __init__.py
│ ├── histories
│ │ └── __init__.py
│ ├── invocations
│ │ └── __init__.py
│ ├── jobs
│ │ └── __init__.py
│ ├── libraries
│ │ └── __init__.py
│ ├── objects
│ │ ├── __init__.py
│ │ ├── client.py
│ │ ├── galaxy_instance.py
│ │ └── wrappers.py
│ ├── quotas
│ │ └── __init__.py
│ ├── roles
│ │ └── __init__.py
│ ├── tool_data
│ │ └── __init__.py
│ ├── tool_dependencies
│ │ └── __init__.py
│ ├── tools
│ │ ├── __init__.py
│ │ └── inputs.py
│ ├── toolshed
│ │ └── __init__.py
│ ├── users
│ │ └── __init__.py
│ ├── visual
│ │ └── __init__.py
│ └── workflows
│ │ └── __init__.py
├── galaxyclient.py
├── py.typed
├── toolshed
│ ├── __init__.py
│ ├── categories
│ │ └── __init__.py
│ ├── repositories
│ │ └── __init__.py
│ └── tools
│ │ └── __init__.py
└── util
│ └── __init__.py
├── docs
├── Makefile
├── _static
│ └── .empty
├── api_docs
│ ├── galaxy
│ │ ├── all.rst
│ │ └── docs.rst
│ ├── lib_config.rst
│ └── toolshed
│ │ └── all.rst
├── conf.py
├── examples
│ ├── create_user_get_api_key.py
│ ├── list_data_libraries.py
│ ├── list_histories.py
│ ├── list_workflows.py
│ ├── objects
│ │ ├── README.txt
│ │ ├── __init__.py
│ │ ├── common.py
│ │ ├── list_data_libraries.py
│ │ ├── list_histories.py
│ │ ├── list_workflows.py
│ │ ├── small.ga
│ │ ├── small.py
│ │ ├── w2_bacterial_reseq.py
│ │ ├── w3_bacterial_denovo.py
│ │ ├── w5_galaxy_api.py
│ │ └── w5_metagenomics.py
│ ├── run_imported_workflow.py
│ └── tophat_cufflinks_pairedend_workflow.ga
├── index.rst
└── requirements.txt
├── pyproject.toml
├── pytest.ini
├── run_bioblend_tests.sh
├── run_galaxy.sh
├── setup.cfg
├── setup.py
├── tests
└── tox.ini
/.git-blame-ignore-revs:
--------------------------------------------------------------------------------
1 | # Format Python code with black and isort
2 | 7bcd07db8392ac790d1b0b92f4a377945197e43d
3 |
--------------------------------------------------------------------------------
/.github/workflows/deploy.yaml:
--------------------------------------------------------------------------------
1 | name: Deploy
2 | on: [push, pull_request]
3 | jobs:
4 | build_packages:
5 | runs-on: ubuntu-latest
6 | steps:
7 | - uses: actions/checkout@v4
8 | - uses: actions/setup-python@v5
9 | with:
10 | python-version: '3.13'
11 | - name: Install tox
12 | run: |
13 | python3 -m pip install 'tox>=1.8.0'
14 | - name: Create and check sdist and wheel packages
15 | run: tox -e build
16 | - uses: actions/upload-artifact@v4
17 | with:
18 | name: packages
19 | path: dist/
20 | pypi-publish:
21 | needs: [build_packages]
22 | name: Upload release to PyPI
23 | runs-on: ubuntu-latest
24 | permissions:
25 | id-token: write
26 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && github.repository_owner == 'galaxyproject'
27 | steps:
28 | - uses: actions/download-artifact@v4
29 | with:
30 | name: packages
31 | path: dist
32 | - name: Publish to PyPI
33 | uses: pypa/gh-action-pypi-publish@release/v1
34 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yaml:
--------------------------------------------------------------------------------
1 | name: Lint
2 | on: [push, pull_request]
3 | concurrency:
4 | group: lint-${{ github.ref }}
5 | cancel-in-progress: true
6 | jobs:
7 | lint:
8 | runs-on: ubuntu-latest
9 | strategy:
10 | matrix:
11 | python-version: ['3.9', '3.13']
12 | steps:
13 | - uses: actions/checkout@v4
14 | - uses: actions/setup-python@v5
15 | with:
16 | python-version: ${{ matrix.python-version }}
17 | - name: Install tox
18 | run: python -m pip install 'tox>=1.8.0'
19 | - name: Lint
20 | run: tox -e lint
21 |
--------------------------------------------------------------------------------
/.github/workflows/test.yaml:
--------------------------------------------------------------------------------
1 | name: Tests
2 | on:
3 | push:
4 | pull_request:
5 | schedule:
6 | # Run at midnight UTC every Tuesday
7 | - cron: '0 0 * * 2'
8 | concurrency:
9 | group: test-${{ github.ref }}
10 | cancel-in-progress: true
11 | jobs:
12 | test:
13 | if: github.event_name != 'schedule' || github.repository_owner == 'galaxyproject'
14 | runs-on: ${{ matrix.os }}
15 | services:
16 | postgres:
17 | image: postgres
18 | # Provide the password for postgres
19 | env:
20 | POSTGRES_PASSWORD: postgres
21 | # Set health checks to wait until postgres has started
22 | options: >-
23 | --health-cmd pg_isready
24 | --health-interval 10s
25 | --health-timeout 5s
26 | --health-retries 5
27 | ports:
28 | - 5432:5432
29 | strategy:
30 | fail-fast: false
31 | matrix:
32 | include:
33 | - os: ubuntu-latest
34 | tox_env: py39
35 | galaxy_version: dev
36 | galaxy_python_version: '3.9'
37 | - os: ubuntu-latest
38 | tox_env: py39
39 | galaxy_version: release_25.0
40 | galaxy_python_version: '3.9'
41 | - os: ubuntu-latest
42 | tox_env: py39
43 | galaxy_version: release_24.2
44 | galaxy_python_version: '3.8'
45 | - os: ubuntu-latest
46 | tox_env: py39
47 | galaxy_version: release_24.1
48 | galaxy_python_version: '3.8'
49 | - os: ubuntu-latest
50 | tox_env: py39
51 | galaxy_version: release_24.0
52 | galaxy_python_version: '3.8'
53 | # Python 3.7 is not available via setup-python on ubuntu >=24.04
54 | - os: ubuntu-22.04
55 | tox_env: py39
56 | galaxy_version: release_23.2
57 | galaxy_python_version: '3.7'
58 | - os: ubuntu-22.04
59 | tox_env: py39
60 | galaxy_version: release_23.1
61 | galaxy_python_version: '3.7'
62 | - os: ubuntu-22.04
63 | tox_env: py39
64 | galaxy_version: release_23.0
65 | galaxy_python_version: '3.7'
66 | - os: ubuntu-22.04
67 | tox_env: py39
68 | galaxy_version: release_22.05
69 | galaxy_python_version: '3.7'
70 | - os: ubuntu-22.04
71 | tox_env: py39
72 | galaxy_version: release_22.01
73 | galaxy_python_version: '3.7'
74 | # The minimum Python supported version by the following releases is
75 | # 3.6, but it is EOL
76 | - os: ubuntu-22.04
77 | tox_env: py39
78 | galaxy_version: release_21.09
79 | galaxy_python_version: '3.7'
80 | - os: ubuntu-22.04
81 | tox_env: py39
82 | galaxy_version: release_21.05
83 | galaxy_python_version: '3.7'
84 | - os: ubuntu-22.04
85 | tox_env: py39
86 | galaxy_version: release_21.01
87 | galaxy_python_version: '3.7'
88 | # The minimum Python supported version by the following releases is
89 | # 3.5, but it is EOL
90 | - os: ubuntu-22.04
91 | tox_env: py39
92 | galaxy_version: release_20.09
93 | galaxy_python_version: '3.7'
94 | - os: ubuntu-22.04
95 | tox_env: py39
96 | galaxy_version: release_20.05
97 | galaxy_python_version: '3.7'
98 | # The minimum Python supported version by the following releases is
99 | # 2.7, but it is EOL
100 | - os: ubuntu-22.04
101 | tox_env: py39
102 | galaxy_version: release_20.01
103 | galaxy_python_version: '3.7'
104 | - os: ubuntu-22.04
105 | tox_env: py39
106 | galaxy_version: release_19.09
107 | galaxy_python_version: '3.7'
108 | - os: ubuntu-22.04
109 | tox_env: py39
110 | galaxy_version: release_19.05
111 | galaxy_python_version: '3.7'
112 | - os: ubuntu-latest
113 | tox_env: py313
114 | galaxy_version: dev
115 | galaxy_python_version: '3.9'
116 | # Cannot test on macOS because service containers are not supported
117 | # yet: https://github.community/t/github-actions-services-available-on-others-vms/16916
118 | # - os: macos-latest
119 | # tox_env: py39
120 | # galaxy_version: dev
121 | # galaxy_python_version: '3.8'
122 | steps:
123 | - uses: actions/checkout@v4
124 | - name: Cache pip dir
125 | uses: actions/cache@v4
126 | with:
127 | path: ~/.cache/pip
128 | key: pip-cache-${{ matrix.tox_env }}-${{ matrix.galaxy_version }}
129 | - name: Calculate Python version for BioBlend from tox_env
130 | id: get_bioblend_python_version
131 | run: echo "bioblend_python_version=$(echo "${{ matrix.tox_env }}" | sed -e 's/^py\([3-9]\)\([0-9]\+\)/\1.\2/')" >> $GITHUB_OUTPUT
132 | - name: Set up Python for BioBlend
133 | uses: actions/setup-python@v5
134 | with:
135 | python-version: ${{ steps.get_bioblend_python_version.outputs.bioblend_python_version }}
136 | - name: Install tox
137 | run: |
138 | python3 -m pip install --upgrade pip setuptools
139 | python3 -m pip install 'tox>=1.8.0'
140 | - name: Set up Python for Galaxy
141 | uses: actions/setup-python@v5
142 | with:
143 | python-version: ${{ matrix.galaxy_python_version }}
144 | - name: Run tests
145 | env:
146 | PGPASSWORD: postgres
147 | PGPORT: 5432
148 | PGHOST: localhost
149 | run: |
150 | # Create a PostgreSQL database for Galaxy. The default SQLite3 database makes test fail randomly because of "database locked" error.
151 | createdb -U postgres galaxy
152 | # Run ToolShed tests only once per Python version
153 | if [ "${{ matrix.galaxy_version }}" = 'dev' ]; then
154 | export BIOBLEND_TOOLSHED_URL=https://testtoolshed.g2.bx.psu.edu/
155 | fi
156 | # Install Galaxy
157 | GALAXY_DIR=galaxy-${{ matrix.galaxy_version }}
158 | git clone --depth=1 -b ${{ matrix.galaxy_version }} https://github.com/galaxyproject/galaxy $GALAXY_DIR
159 | export DATABASE_CONNECTION=postgresql://postgres:@localhost/galaxy
160 | ./run_bioblend_tests.sh -g $GALAXY_DIR -v python${{ matrix.galaxy_python_version }} -e ${{ matrix.tox_env }}
161 | - name: The job has failed
162 | if: ${{ failure() }}
163 | run: |
164 | cat galaxy-${{ matrix.galaxy_version }}/*.log
165 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.py[co]
2 | *~
3 | # Packages
4 | *.egg
5 | *.egg-info
6 | dist
7 | build
8 | eggs
9 | parts
10 | bin
11 | var
12 | sdist
13 | develop-eggs
14 | .installed.cfg
15 | .eggs
16 |
17 | # Installer logs
18 | pip-log.txt
19 |
20 | # Unit test / coverage reports
21 | .coverage
22 | .tox
23 |
24 | #Translations
25 | *.mo
26 |
27 | #Mr Developer
28 | .mr.developer.cfg
29 |
30 | #Vim
31 | *.swp
32 |
33 | #Code coverage
34 | cover
35 |
36 | #eclipse/pydev
37 | .project
38 | .pydevproject
39 | .idea
40 |
41 | # compiled docs
42 | docs/_build
43 |
44 | # Python virtual environment
45 | .venv
46 |
--------------------------------------------------------------------------------
/.isort.cfg:
--------------------------------------------------------------------------------
1 | [settings]
2 | combine_as_imports=true
3 | force_alphabetical_sort_within_sections=true
4 | # Override force_grid_wrap value from profile=black, but black is still happy
5 | force_grid_wrap=2
6 | # Same line length as for black
7 | line_length=120
8 | no_lines_before=LOCALFOLDER
9 | profile=black
10 | reverse_relative=true
11 | skip_gitignore=true
12 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yaml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | # Required
6 | version: 2
7 |
8 | # Set the OS, Python version and other tools you might need
9 | build:
10 | os: ubuntu-22.04
11 | tools:
12 | python: "3.11"
13 |
14 | # Build documentation in the docs/ directory with Sphinx
15 | sphinx:
16 | configuration: docs/conf.py
17 |
18 | # Optionally build your docs in additional formats such as PDF and ePub
19 | formats:
20 | - pdf
21 |
22 | # Optional but recommended, declare the Python requirements required
23 | # to build your documentation
24 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
25 | python:
26 | install:
27 | - path: .
28 | - requirements: docs/requirements.txt
29 |
--------------------------------------------------------------------------------
/ABOUT.rst:
--------------------------------------------------------------------------------
1 | `BioBlend `_ is a Python library for
2 | interacting with the `Galaxy`_ API.
3 |
4 | BioBlend is supported and tested on:
5 |
6 | - Python 3.9 - 3.13
7 | - Galaxy release 19.05 and later.
8 |
9 | BioBlend's goal is to make it easier to script and automate the running of
10 | Galaxy analyses and administering of a Galaxy server.
11 | In practice, it makes it possible to do things like this:
12 |
13 | - Interact with Galaxy via a straightforward API::
14 |
15 | from bioblend.galaxy import GalaxyInstance
16 | gi = GalaxyInstance('', key='your API key')
17 | libs = gi.libraries.get_libraries()
18 | gi.workflows.show_workflow('workflow ID')
19 | wf_invocation = gi.workflows.invoke_workflow('workflow ID', inputs)
20 |
21 | - Interact with Galaxy via an object-oriented API::
22 |
23 | from bioblend.galaxy.objects import GalaxyInstance
24 | gi = GalaxyInstance("URL", "API_KEY")
25 | wf = gi.workflows.list()[0]
26 | hist = gi.histories.list()[0]
27 | inputs = hist.get_datasets()[:2]
28 | input_map = dict(zip(wf.input_labels, inputs))
29 | params = {"Paste1": {"delimiter": "U"}}
30 | wf_invocation = wf.invoke(input_map, params=params)
31 |
32 | About the library name
33 | ~~~~~~~~~~~~~~~~~~~~~~
34 |
35 | The library was originally called just ``Blend`` but we
36 | `renamed it `_
37 | to reflect more of its domain and a make it bit more unique so it can be easier to find.
38 | The name was intended to be short and easily pronounceable. In its original
39 | implementation, the goal was to provide a lot more support for `CloudMan`_
40 | and other integration capabilities, allowing them to be *blended* together
41 | via code. ``BioBlend`` fitted the bill.
42 |
43 | .. References/hyperlinks used above
44 | .. _CloudMan: https://galaxyproject.org/cloudman/
45 | .. _Galaxy: https://galaxyproject.org/
46 |
--------------------------------------------------------------------------------
/CITATION:
--------------------------------------------------------------------------------
1 | If you use BioBlend in your published work, please cite the following article:
2 |
3 | - Clare Sloggett, Nuwan Goonasekera, Enis Afgan "BioBlend: automating pipeline
4 | analyses within Galaxy and CloudMan"
5 | Bioinformatics (2013) 29(13):1685-1686 doi:10.1093/bioinformatics/btt199
6 |
7 | BibTeX format:
8 |
9 | @article{10.1093/bioinformatics/btt199,
10 | author = {Sloggett, Clare and Goonasekera, Nuwan and Afgan, Enis},
11 | doi = {10.1093/bioinformatics/btt199},
12 | journal = {Bioinformatics},
13 | number = {13},
14 | pages = {1685-1686},
15 | title = {{BioBlend: automating pipeline analyses within Galaxy and CloudMan}},
16 | url = {https://doi.org/10.1093/bioinformatics/btt199},
17 | volume = {29},
18 | year = {2013},
19 | }
20 |
21 | If you use BioBlend.objects in your published work, please cite the following
22 | article:
23 |
24 | - Simone Leo, Luca Pireddu, Gianmauro Cuccuru, Luca Lianas, Nicola Soranzo, Enis
25 | Afgan, Gianluigi Zanetti "BioBlend.objects: metacomputing with Galaxy"
26 | Bioinformatics (2014) 30(19):2816-2817 doi:10.1093/bioinformatics/btu386
27 |
28 | BibTeX format:
29 |
30 | @article{10.1093/bioinformatics/btu386,
31 | author = {Leo, Simone and Pireddu, Luca and Cuccuru, Gianmauro and Lianas, Luca and Soranzo, Nicola and Afgan, Enis and Zanetti, Gianluigi},
32 | doi = {10.1093/bioinformatics/btu386},
33 | journal = {Bioinformatics},
34 | number = {19},
35 | pages = {2816-2817},
36 | title = {{BioBlend.objects: metacomputing with Galaxy}},
37 | url = {https://doi.org/10.1093/bioinformatics/btu386},
38 | volume = {30},
39 | year = {2014},
40 | }
41 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | Code of Conduct
2 | ===============
3 |
4 | As part of the Galaxy Community, this project is committed to providing a
5 | welcoming and harassment-free experience for everyone. We therefore expect
6 | participants to abide by our Code of Conduct, which can be found at:
7 |
8 | https://galaxyproject.org/community/coc/
9 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | Making a new release
2 | --------------------
3 |
4 | 1. For a new major release, remove stuff (e.g. parameters, methods) deprecated in the previous cycle.
5 | 2. Update the `__version__` string in `bioblend/__init__.py` .
6 | 3. Update `CHANGELOG.md` .
7 | 4. Commit the changes above, push to GitHub, and wait for Continuous Integration (CI) tests to pass.
8 | 5. Make a new release through the GitHub interface. A CI job will automatically upload the packages to PyPI.
9 | 7. Check and merge the automatic pull request to update the [Bioconda package](https://github.com/bioconda/bioconda-recipes/blob/master/recipes/bioblend/meta.yaml).
10 |
11 | How to run BioBlend tests
12 | -------------------------
13 |
14 | 1. Clone Galaxy to a directory outside of BioBlend source directory via `git clone https://github.com/galaxyproject/galaxy.git`
15 |
16 | 2. Change directory to your BioBlend source and run the tests via `./run_bioblend_tests.sh -g GALAXY_PATH [-r GALAXY_REV] [-e TOX_ENV]` where `GALAXY_PATH` is the directory where the galaxy repository was cloned, `GALAXY_REV` is the branch or commit of Galaxy that you would like to test against (if different from the current state of your galaxy clone), and `TOX_ENV` is used to specify the Python version to use for BioBlend, e.g. `py39` for Python 3.9.
17 |
18 | You can also add `2>&1 | tee log.txt` to the command above to contemporarily view the test output and save it to the `log.txt` file.
19 |
20 | 3. If needed, you can temporarily increase the Galaxy job timeout used by BioBlend tests with e.g. `export BIOBLEND_TEST_JOB_TIMEOUT=100`, and re-run the tests.
21 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2012-2023 Galaxy Project
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | # Add non-Python files
2 | graft bioblend/_tests
3 | # Add documentation
4 | graft docs
5 |
6 | global-exclude *.swp *.pyc .gitignore
7 |
8 | include *.rst CITATION LICENSE
9 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | IN_VENV=. .venv/bin/activate
2 |
3 | .PHONY: clean release venv
4 |
5 | all:
6 | @echo "This makefile is used for the release process. A sensible all target is not implemented."
7 |
8 | clean:
9 | rm -rf bioblend.egg-info/ build/ dist/
10 | find . -type d -name '.mypy_cache' -exec rm -rf {} +
11 | make -C docs/ clean
12 |
13 | venv:
14 | # Create and activate a virtual environment
15 | [ -f .venv/bin/activate ] || python3 -m venv .venv || virtualenv -p python3 .venv
16 | # Install latest versions of pip and setuptools
17 | ( $(IN_VENV) \
18 | && python3 -m pip install --upgrade pip setuptools \
19 | )
20 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | .. image:: https://img.shields.io/pypi/v/bioblend.svg
2 | :target: https://pypi.org/project/bioblend/
3 | :alt: latest version available on PyPI
4 |
5 | .. image:: https://readthedocs.org/projects/bioblend/badge/
6 | :alt: Documentation Status
7 | :target: https://bioblend.readthedocs.io/
8 |
9 | .. image:: https://badges.gitter.im/galaxyproject/bioblend.svg
10 | :alt: Join the chat at https://gitter.im/galaxyproject/bioblend
11 | :target: https://gitter.im/galaxyproject/bioblend?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
12 |
13 |
14 | BioBlend is a Python library for interacting with the `Galaxy`_ API.
15 |
16 | BioBlend is supported and tested on:
17 |
18 | - Python 3.9 - 3.13
19 | - Galaxy release 19.05 and later.
20 |
21 | Full docs are available at https://bioblend.readthedocs.io/ with a quick library
22 | overview also available in `ABOUT.rst <./ABOUT.rst>`_.
23 |
24 | .. References/hyperlinks used above
25 | .. _Galaxy: https://galaxyproject.org/
26 |
--------------------------------------------------------------------------------
/bioblend/__init__.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 | import logging
3 | import logging.config
4 | import os
5 | import time
6 | from typing import (
7 | Callable,
8 | Optional,
9 | TypeVar,
10 | Union,
11 | )
12 |
13 | from bioblend.config import (
14 | BioBlendConfigLocations,
15 | Config,
16 | )
17 |
18 | # Current version of the library
19 | __version__ = "1.5.0"
20 |
21 | # default chunk size (in bytes) for reading remote data
22 | try:
23 | import resource
24 |
25 | CHUNK_SIZE = resource.getpagesize()
26 | except Exception:
27 | CHUNK_SIZE = 4096
28 |
29 |
30 | config = Config()
31 |
32 |
33 | def get_version() -> str:
34 | """
35 | Returns a string with the current version of the library (e.g., "0.2.0")
36 | """
37 | return __version__
38 |
39 |
40 | def init_logging() -> None:
41 | """
42 | Initialize BioBlend's logging from a configuration file.
43 | """
44 | for config_file in BioBlendConfigLocations:
45 | with contextlib.suppress(Exception):
46 | logging.config.fileConfig(os.path.expanduser(config_file))
47 |
48 |
49 | class NullHandler(logging.Handler):
50 | def emit(self, record: logging.LogRecord) -> None:
51 | pass
52 |
53 |
54 | # By default, do not force any logging by the library. If you want to see the
55 | # log messages in your scripts, add the following to the top of your script:
56 | # import logging
57 | # logging.basicConfig(filename="bioblend.log", level=logging.DEBUG)
58 | default_format_string = "%(asctime)s %(name)s [%(levelname)s]: %(message)s"
59 | log = logging.getLogger("bioblend")
60 | log.addHandler(NullHandler())
61 | init_logging()
62 |
63 | # Convenience functions to set logging to a particular file or stream
64 | # To enable either of these, simply add the following at the top of a
65 | # bioblend module:
66 | # import bioblend
67 | # bioblend.set_stream_logger(__name__)
68 |
69 |
70 | def set_file_logger(
71 | name: str, filepath: str, level: Union[int, str] = logging.INFO, format_string: Optional[str] = None
72 | ) -> None:
73 | global log
74 | if not format_string:
75 | format_string = default_format_string
76 | logger = logging.getLogger(name)
77 | logger.setLevel(level)
78 | fh = logging.FileHandler(filepath)
79 | fh.setLevel(level)
80 | formatter = logging.Formatter(format_string)
81 | fh.setFormatter(formatter)
82 | logger.addHandler(fh)
83 | log = logger
84 |
85 |
86 | def set_stream_logger(name: str, level: Union[int, str] = logging.DEBUG, format_string: Optional[str] = None) -> None:
87 | global log
88 | if not format_string:
89 | format_string = default_format_string
90 | logger = logging.getLogger(name)
91 | logger.setLevel(level)
92 | fh = logging.StreamHandler()
93 | fh.setLevel(level)
94 | formatter = logging.Formatter(format_string)
95 | fh.setFormatter(formatter)
96 | logger.addHandler(fh)
97 | log = logger
98 |
99 |
100 | class ConnectionError(Exception):
101 | """
102 | An exception class that is raised when unexpected HTTP responses come back.
103 |
104 | Should make it easier to debug when strange HTTP things happen such as a
105 | proxy server getting in the way of the request etc.
106 | @see: body attribute to see the content of the http response
107 | """
108 |
109 | def __init__(
110 | self, message: str, body: Optional[Union[bytes, str]] = None, status_code: Optional[int] = None
111 | ) -> None:
112 | super().__init__(message)
113 | self.body = body
114 | self.status_code = status_code
115 |
116 | def __str__(self) -> str:
117 | return f"{self.args[0]}: {self.body!s}"
118 |
119 |
120 | class TimeoutException(Exception):
121 | pass
122 |
123 |
124 | class NotReady(Exception):
125 | pass
126 |
127 |
128 | T = TypeVar("T")
129 |
130 |
131 | def wait_on(func: Callable[[], T], maxwait: float = 60, interval: float = 3) -> T:
132 | """
133 | Wait until a function returns without raising a NotReady exception
134 |
135 | :param func: function to wait on. It should accept no parameters.
136 |
137 | :param maxwait: Total time (in seconds) to wait for the function to return
138 | without raising a NotReady exception. After this time, a
139 | ``TimeoutException`` will be raised.
140 |
141 | :param interval: Time (in seconds) to wait between 2 consecutive checks.
142 | """
143 | assert maxwait >= 0
144 | assert interval > 0
145 |
146 | time_left = maxwait
147 | while True:
148 | try:
149 | return func()
150 | except NotReady as e:
151 | if time_left > 0:
152 | log.info("%s. Will wait %s more s", e, time_left)
153 | time.sleep(min(time_left, interval))
154 | time_left -= interval
155 | else:
156 | raise TimeoutException(f"{e} after {maxwait} s")
157 |
--------------------------------------------------------------------------------
/bioblend/_tests/GalaxyTestBase.py:
--------------------------------------------------------------------------------
1 | import os
2 | import unittest
3 | from typing import (
4 | Any,
5 | Literal,
6 | )
7 |
8 | import bioblend
9 | from bioblend.galaxy import GalaxyInstance
10 | from . import test_util
11 |
12 | bioblend.set_stream_logger("test", level="INFO")
13 |
14 | BIOBLEND_TEST_JOB_TIMEOUT = int(os.environ.get("BIOBLEND_TEST_JOB_TIMEOUT", "60"))
15 |
16 |
17 | @test_util.skip_unless_galaxy()
18 | class GalaxyTestBase(unittest.TestCase):
19 | gi: GalaxyInstance
20 |
21 | @classmethod
22 | def setUpClass(cls) -> None:
23 | galaxy_key = os.environ["BIOBLEND_GALAXY_API_KEY"]
24 | galaxy_url = os.environ["BIOBLEND_GALAXY_URL"]
25 | cls.gi = GalaxyInstance(url=galaxy_url, key=galaxy_key)
26 |
27 | def _test_dataset(self, history_id: str, contents: str = "1\t2\t3", **kwargs: Any) -> str:
28 | tool_output = self.gi.tools.paste_content(contents, history_id, **kwargs)
29 | return tool_output["outputs"][0]["id"]
30 |
31 | def _wait_and_verify_dataset(
32 | self, dataset_id: str, expected_contents: bytes, timeout_seconds: float = BIOBLEND_TEST_JOB_TIMEOUT
33 | ) -> None:
34 | dataset_contents = self.gi.datasets.download_dataset(dataset_id, maxwait=timeout_seconds)
35 | assert dataset_contents == expected_contents
36 |
37 | def _run_random_lines1(
38 | self, history_id: str, dataset_id: str, input_format: Literal["21.01", "legacy"] = "legacy"
39 | ) -> dict[str, Any]:
40 | tool_inputs = {
41 | "num_lines": "1",
42 | "input": {"src": "hda", "id": dataset_id},
43 | }
44 | if input_format == "21.01":
45 | tool_inputs.update({"seed_source": {"seed_source_selector": "set_seed", "seed": "asdf"}})
46 | else:
47 | # legacy format
48 | tool_inputs.update({"seed_source|seed_source_selector": "set_seed", "seed_source|seed": "asdf"})
49 | return self.gi.tools.run_tool(
50 | history_id=history_id, tool_id="random_lines1", tool_inputs=tool_inputs, input_format=input_format
51 | )
52 |
--------------------------------------------------------------------------------
/bioblend/_tests/README.TXT:
--------------------------------------------------------------------------------
1 | To run Galaxy tests, the following environment variables must be set:
2 |
3 | BIOBLEND_GALAXY_API_KEY =
4 | BIOBLEND_GALAXY_URL =
5 |
6 | To run ToolShed tests, the following environment variable must be set:
7 |
8 | BIOBLEND_TOOLSHED_URL =
9 |
10 | If you wish to run the entire suite, set all of the above. The integration
11 | tests can subsequently be run by invoking `pytest` from the command line.
12 | pytest should be invoked from the project root folder, and not the tests
13 | child folder, since the test data is resolved relative to the bioblend folder.
14 |
--------------------------------------------------------------------------------
/bioblend/_tests/TestGalaxyConfig.py:
--------------------------------------------------------------------------------
1 | from . import (
2 | GalaxyTestBase,
3 | test_util,
4 | )
5 |
6 |
7 | class TestGalaxyConfig(GalaxyTestBase.GalaxyTestBase):
8 | def test_get_config(self):
9 | response = self.gi.config.get_config()
10 | assert isinstance(response, dict)
11 | assert "brand" in response.keys()
12 |
13 | def test_get_version(self):
14 | response = self.gi.config.get_version()
15 | assert isinstance(response, dict)
16 | assert "version_major" in response.keys()
17 |
18 | def test_whoami(self):
19 | response = self.gi.config.whoami()
20 | assert isinstance(response, dict)
21 | assert "username" in response.keys()
22 |
23 | def test_reload_toolbox(self):
24 | response = self.gi.config.reload_toolbox()
25 | assert response is None
26 |
27 | @test_util.skip_unless_galaxy("release_24.0")
28 | def test_encode_decode_id(self):
29 | int_id = 42
30 | encoded_id = self.gi.config.encode_id(int_id)
31 | assert isinstance(encoded_id, str)
32 | decoded_id = self.gi.config.decode_id(encoded_id)
33 | assert isinstance(decoded_id, int)
34 | assert decoded_id == int_id
35 |
--------------------------------------------------------------------------------
/bioblend/_tests/TestGalaxyFolders.py:
--------------------------------------------------------------------------------
1 | from . import (
2 | GalaxyTestBase,
3 | test_util,
4 | )
5 |
6 | FOO_DATA = "foo\nbar\n"
7 |
8 |
9 | class TestGalaxyFolders(GalaxyTestBase.GalaxyTestBase):
10 | def setUp(self):
11 | super().setUp()
12 | self.name = "automated test folder"
13 | self.library = self.gi.libraries.create_library(
14 | self.name, description="automated test", synopsis="automated test synopsis"
15 | )
16 | self.folder = self.gi.folders.create_folder(
17 | self.library["root_folder_id"], self.name, description="automatically created folder"
18 | )
19 |
20 | def tearDown(self):
21 | self.gi.libraries.delete_library(self.library["id"])
22 |
23 | def test_create_folder(self):
24 | assert self.folder["name"] == self.name
25 | assert self.folder["description"] == "automatically created folder"
26 |
27 | def test_show_folder(self):
28 | f2 = self.gi.folders.show_folder(self.folder["id"])
29 | assert f2["id"] == self.folder["id"]
30 |
31 | def test_show_folder_contents(self):
32 | f2 = self.gi.folders.show_folder(self.folder["id"], contents=True)
33 | assert "folder_contents" in f2
34 | assert "metadata" in f2
35 | assert self.name == f2["metadata"]["folder_name"]
36 |
37 | @test_util.skip_unless_galaxy("release_21.05")
38 | def test_show_folder_contents_limit(self):
39 | for i in range(12):
40 | self.gi.folders.create_folder(self.folder["id"], f"{self.name} {i}")
41 |
42 | # check defaults for limit and offset
43 | f2 = self.gi.folders.show_folder(self.folder["id"], contents=True)
44 | assert len(f2["folder_contents"]) == 10
45 | assert f2["folder_contents"][0]["name"] == f"{self.name} 0"
46 |
47 | # check non defaults
48 | f2 = self.gi.folders.show_folder(self.folder["id"], contents=True, limit=1, offset=1)
49 | assert len(f2["folder_contents"]) == 1
50 | assert f2["folder_contents"][0]["name"] == f"{self.name} 1"
51 |
52 | @test_util.skip_unless_galaxy("release_21.05")
53 | def test_folder_contents_iter(self):
54 | for i in range(12):
55 | self.gi.folders.create_folder(self.folder["id"], f"{self.name} {i}")
56 |
57 | # check defaults for limit and offset
58 | f2 = list(self.gi.folders.contents_iter(self.folder["id"]))
59 | assert len(f2) == 12
60 | assert f2[0]["name"] == f"{self.name} 0"
61 |
62 | # check non defaults
63 | f2 = list(self.gi.folders.contents_iter(self.folder["id"], batch_size=1))
64 | assert len(f2) == 12
65 | assert f2[0]["name"] == f"{self.name} 0"
66 |
67 | @test_util.skip_unless_galaxy("release_21.01")
68 | def test_show_folder_contents_include_deleted(self):
69 | history = self.gi.histories.create_history(name="Test History")
70 | hda_id = self._test_dataset(history["id"])
71 |
72 | # Create 2 library datasets into the library folder
73 | ldda1 = self.gi.libraries.copy_from_dataset(
74 | library_id=self.library["id"], dataset_id=hda_id, folder_id=self.folder["id"], message="Added HDA"
75 | )
76 | ldda2 = self.gi.libraries.copy_from_dataset(
77 | library_id=self.library["id"], dataset_id=hda_id, folder_id=self.folder["id"], message="Added HDA"
78 | )
79 | folder_info = self.gi.folders.show_folder(self.folder["id"], contents=True)
80 | assert len(folder_info["folder_contents"]) == 2
81 | assert folder_info["folder_contents"][0]["type"] == "file"
82 |
83 | # Delete the library datasets and check if include_deleted works
84 | self.gi.libraries.delete_library_dataset(self.library["id"], ldda1["id"])
85 | self.gi.libraries.delete_library_dataset(self.library["id"], ldda2["id"], purged=True)
86 | folder_info = self.gi.folders.show_folder(self.folder["id"], contents=True, include_deleted=True)
87 | # check if there are 2 contents and the number is correct
88 | assert len(folder_info["folder_contents"]) == 2
89 | assert folder_info["metadata"]["total_rows"] == 2
90 |
91 | folder_info = self.gi.folders.show_folder(self.folder["id"], contents=True)
92 | assert len(folder_info["folder_contents"]) == 0
93 | assert folder_info["metadata"]["total_rows"] == 0
94 | # show folders with contents=False does not respect include_deleted
95 | folder_info = self.gi.folders.show_folder(self.folder["id"])
96 | assert folder_info["item_count"] == 2
97 |
98 | self.gi.histories.delete_history(history["id"])
99 |
100 | def test_delete_folder(self):
101 | self.sub_folder = self.gi.folders.create_folder(self.folder["id"], self.name)
102 | self.gi.folders.delete_folder(self.sub_folder["id"])
103 |
104 | def test_update_folder(self):
105 | self.folder = self.gi.folders.update_folder(self.folder["id"], "new-name", "new-description")
106 | assert self.folder["name"] == "new-name"
107 | assert self.folder["description"] == "new-description"
108 |
109 | def test_get_set_permissions(self):
110 | empty_permission: dict[str, list] = {
111 | "add_library_item_role_list": [],
112 | "modify_folder_role_list": [],
113 | "manage_folder_role_list": [],
114 | }
115 | # They should be empty to start with
116 | assert self.gi.folders.get_permissions(self.folder["id"], scope="current") == empty_permission
117 | assert self.gi.folders.get_permissions(self.folder["id"], scope="available") == empty_permission
118 | # Then we'll add a role
119 | role = self.gi.roles.get_roles()[0]
120 | self.gi.folders.set_permissions(self.folder["id"], add_ids=[role["id"]])
121 | assert (
122 | role["id"]
123 | in self.gi.folders.get_permissions(self.folder["id"], scope="available")["add_library_item_role_list"][0]
124 | )
125 |
--------------------------------------------------------------------------------
/bioblend/_tests/TestGalaxyGroups.py:
--------------------------------------------------------------------------------
1 | """
2 | WARNING: only admins can operate on groups!
3 | """
4 |
5 | import uuid
6 |
7 | from . import GalaxyTestBase
8 |
9 |
10 | class TestGalaxyGroups(GalaxyTestBase.GalaxyTestBase):
11 | def setUp(self):
12 | super().setUp()
13 | self.name = f"test_{uuid.uuid4().hex}"
14 | self.group = self.gi.groups.create_group(self.name)[0]
15 |
16 | def tearDown(self):
17 | # As of 2015/04/13, deleting a group is not possible through the API
18 | pass
19 |
20 | def test_create_group(self):
21 | assert self.group["name"] == self.name
22 | assert self.group["id"] is not None
23 |
24 | def test_get_groups(self):
25 | groups = self.gi.groups.get_groups()
26 | for group in groups:
27 | assert group["id"] is not None
28 | assert group["name"] is not None
29 |
30 | def test_show_group(self):
31 | group_data = self.gi.groups.show_group(self.group["id"])
32 | assert self.group["id"] == group_data["id"]
33 | assert self.group["name"] == group_data["name"]
34 |
35 | def test_get_group_users(self):
36 | group_users = self.gi.groups.get_group_users(self.group["id"])
37 | assert group_users == []
38 |
39 | def test_get_group_roles(self):
40 | group_roles = self.gi.groups.get_group_roles(self.group["id"])
41 | assert group_roles == []
42 |
43 | def test_update_group(self):
44 | new_name = f"test_{uuid.uuid4().hex}"
45 | new_users = [self.gi.users.get_current_user()["id"]]
46 | self.gi.groups.update_group(self.group["id"], new_name, user_ids=new_users)
47 | updated_group = self.gi.groups.show_group(self.group["id"])
48 | assert self.group["id"] == updated_group["id"]
49 | assert updated_group["name"] == new_name
50 | updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])]
51 | assert set(updated_group_users) == set(new_users)
52 | updated_group_roles = [_["id"] for _ in self.gi.groups.get_group_roles(self.group["id"])]
53 | assert set(updated_group_roles) == set()
54 |
55 | def test_add_delete_group_user(self):
56 | new_user = self.gi.users.get_current_user()["id"]
57 | ret = self.gi.groups.add_group_user(self.group["id"], new_user)
58 | assert ret["id"] == new_user
59 | updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])]
60 | assert new_user in updated_group_users
61 | self.gi.groups.delete_group_user(self.group["id"], new_user)
62 | updated_group_users = [_["id"] for _ in self.gi.groups.get_group_users(self.group["id"])]
63 | assert new_user not in updated_group_users
64 |
--------------------------------------------------------------------------------
/bioblend/_tests/TestGalaxyInstance.py:
--------------------------------------------------------------------------------
1 | """
2 | Tests on the GalaxyInstance object itself.
3 | """
4 |
5 | import os
6 | import time
7 | import unittest
8 |
9 | import pytest
10 |
11 | from bioblend import ConnectionError
12 | from bioblend.galaxy import GalaxyInstance
13 | from . import test_util
14 |
15 |
16 | class TestGalaxyInstance(unittest.TestCase):
17 | def setUp(self):
18 | # "connect" to a fake Galaxy instance
19 | self.gi = GalaxyInstance("http://localhost:56789", key="whatever")
20 |
21 | def test_url_attribute(self):
22 | assert self.gi.base_url == "http://localhost:56789"
23 | assert self.gi.url == "http://localhost:56789/api"
24 | # Test instance served at a subdirectory
25 | gi = GalaxyInstance("http://localhost:56789/galaxy/", key="whatever")
26 | assert gi.base_url == "http://localhost:56789/galaxy"
27 | assert gi.url == "http://localhost:56789/galaxy/api"
28 |
29 | def test_set_max_get_attempts(self):
30 | self.gi.max_get_attempts = 3
31 | assert 3 == self.gi.max_get_attempts
32 |
33 | def test_set_retry_delay(self):
34 | self.gi.get_retry_delay = 5.0
35 | assert 5.0 == self.gi.get_retry_delay
36 |
37 | def test_get_retry(self):
38 | # We set the client to try twice, with a delay of 5 seconds between
39 | # attempts. So, we expect the call to take at least 5 seconds before
40 | # failing.
41 | self.gi.max_get_attempts = 3
42 | self.gi.get_retry_delay = 2
43 | start = time.time()
44 | with pytest.raises(ConnectionError):
45 | self.gi.libraries.get_libraries()
46 | end = time.time()
47 | duration = end - start
48 | assert duration > self.gi.get_retry_delay * (self.gi.max_get_attempts - 1), "Didn't seem to retry long enough"
49 |
50 | def test_missing_scheme_fake_url(self):
51 | with pytest.raises(ValueError):
52 | GalaxyInstance("localhost:56789", key="whatever")
53 |
54 | @test_util.skip_unless_galaxy()
55 | def test_missing_scheme_real_url(self):
56 | galaxy_url = os.environ["BIOBLEND_GALAXY_URL"]
57 | # Strip the scheme from galaxy_url
58 | scheme_sep = "://"
59 | if scheme_sep in galaxy_url:
60 | galaxy_url = galaxy_url.partition(scheme_sep)[2]
61 | GalaxyInstance(url=galaxy_url)
62 |
--------------------------------------------------------------------------------
/bioblend/_tests/TestGalaxyQuotas.py:
--------------------------------------------------------------------------------
1 | import uuid
2 |
3 | from . import (
4 | GalaxyTestBase,
5 | test_util,
6 | )
7 |
8 |
9 | class TestGalaxyQuotas(GalaxyTestBase.GalaxyTestBase):
10 | def setUp(self):
11 | super().setUp()
12 | # Quota names must be unique, and they're impossible to delete
13 | # without accessing the database.
14 | self.quota_name = f"BioBlend-Test-Quota-{uuid.uuid4().hex}"
15 | self.quota = self.gi.quotas.create_quota(self.quota_name, "testing", "100 GB", "=", default="registered")
16 |
17 | def tearDown(self):
18 | self.gi.quotas.update_quota(self.quota["id"], default="registered")
19 | self.gi.quotas.update_quota(self.quota["id"], default="no")
20 | self.gi.quotas.delete_quota(self.quota["id"])
21 |
22 | def test_create_quota(self):
23 | quota = self.gi.quotas.show_quota(self.quota["id"])
24 | assert quota["id"] == self.quota["id"]
25 | assert quota["name"] == self.quota_name
26 | assert quota["bytes"] == 107374182400
27 | assert quota["operation"] == "="
28 | assert quota["description"] == "testing"
29 |
30 | def test_get_quotas(self):
31 | quotas = self.gi.quotas.get_quotas()
32 | assert self.quota["id"] in [quota["id"] for quota in quotas]
33 |
34 | def test_update_quota(self):
35 | response = self.gi.quotas.update_quota(
36 | self.quota["id"],
37 | name=self.quota_name + "-new",
38 | description="asdf",
39 | default="registered",
40 | operation="-",
41 | amount=".01 TB",
42 | )
43 | assert f"""Quota '{self.quota_name}' has been renamed to '{self.quota_name}-new'""" in response
44 |
45 | quota = self.gi.quotas.show_quota(self.quota["id"])
46 | assert quota["id"] == self.quota["id"]
47 | assert quota["name"] == self.quota_name + "-new"
48 | assert quota["bytes"] == 10995116277
49 | assert quota["operation"] == "-"
50 | assert quota["description"] == "asdf"
51 |
52 | def test_delete_undelete_quota(self):
53 | self.gi.quotas.update_quota(self.quota["id"], default="no")
54 | response = self.gi.quotas.delete_quota(self.quota["id"])
55 | assert response == "Deleted 1 quotas: " + self.quota_name
56 | response = self.gi.quotas.undelete_quota(self.quota["id"])
57 | assert response == "Undeleted 1 quotas: " + self.quota_name
58 |
59 | @test_util.skip_unless_galaxy("release_19.09") # for user purging
60 | def test_update_non_default_quota(self):
61 | """
62 | Test updating a non default quota.
63 | Needs to use `default=None` (which is the default), `default="no"` will fail.
64 | """
65 | if self.gi.config.get_config()["use_remote_user"]:
66 | self.skipTest("This Galaxy instance is not configured to use local users")
67 | new_username = test_util.random_string()
68 | new_user_email = f"{new_username}@example.org"
69 | password = test_util.random_string(20)
70 | new_user = self.gi.users.create_local_user(new_username, new_user_email, password)
71 |
72 | quota = self.gi.quotas.create_quota(
73 | name="non_default_quota",
74 | description="testing",
75 | amount="100 GB",
76 | operation="+",
77 | in_users=[new_user["id"]],
78 | )
79 | self.gi.quotas.update_quota(quota["id"], amount="200 GB")
80 |
81 | if self.gi.config.get_config()["allow_user_deletion"]:
82 | self.gi.users.delete_user(new_user["id"])
83 | self.gi.users.delete_user(new_user["id"], purge=True)
84 |
--------------------------------------------------------------------------------
/bioblend/_tests/TestGalaxyRoles.py:
--------------------------------------------------------------------------------
1 | import uuid
2 |
3 | from . import GalaxyTestBase
4 |
5 |
6 | class TestGalaxyRoles(GalaxyTestBase.GalaxyTestBase):
7 | def setUp(self):
8 | super().setUp()
9 | self.name = f"test_{uuid.uuid4().hex}"
10 | self.description = "automated test role"
11 | self.role = self.gi.roles.create_role(self.name, self.description)
12 |
13 | def tearDown(self):
14 | # As of 2017/07/26, deleting a role is not possible through the API
15 | pass
16 |
17 | def test_get_roles(self):
18 | roles = self.gi.roles.get_roles()
19 | for role in roles:
20 | assert role["id"] is not None
21 | assert role["name"] is not None
22 |
23 | def test_create_role(self):
24 | assert self.role["name"] == self.name
25 | assert self.role["description"] == self.description
26 | assert self.role["id"] is not None
27 |
--------------------------------------------------------------------------------
/bioblend/_tests/TestGalaxyToolContainerResolution.py:
--------------------------------------------------------------------------------
1 | """
2 | Test functions in bioblend.galaxy.container_resolution
3 | """
4 |
5 | from . import (
6 | GalaxyTestBase,
7 | test_util,
8 | )
9 |
10 |
11 | class TestGalaxyContainerResolution(GalaxyTestBase.GalaxyTestBase):
12 | @test_util.skip_unless_galaxy("release_22.05")
13 | def test_get_container_resolvers(self):
14 | container_resolvers = self.gi.container_resolution.get_container_resolvers()
15 | assert isinstance(container_resolvers, list)
16 | assert len(container_resolvers) > 0
17 | assert isinstance(container_resolvers[0], dict)
18 | assert container_resolvers[0]["model_class"] == "ExplicitContainerResolver"
19 | assert container_resolvers[0]["resolver_type"] == "explicit"
20 | assert container_resolvers[0]["can_uninstall_dependencies"] is False
21 | assert container_resolvers[0]["builds_on_resolution"] is False
22 |
23 | @test_util.skip_unless_galaxy("release_22.05")
24 | def test_show_container_resolver(self):
25 | container_resolver = self.gi.container_resolution.show_container_resolver(0)
26 | print(container_resolver)
27 | assert isinstance(container_resolver, dict)
28 | assert container_resolver["model_class"] == "ExplicitContainerResolver"
29 | assert container_resolver["resolver_type"] == "explicit"
30 | assert container_resolver["can_uninstall_dependencies"] is False
31 | assert container_resolver["builds_on_resolution"] is False
32 |
33 | @test_util.skip_unless_galaxy("release_22.05")
34 | def test_resolve(self):
35 | tool = self.gi.container_resolution.resolve(tool_id="CONVERTER_parquet_to_csv")
36 | print(tool)
37 | assert isinstance(tool, dict)
38 |
39 | tool_requirements_only = self.gi.container_resolution.resolve(
40 | tool_id="CONVERTER_parquet_to_csv", requirements_only=True
41 | )
42 | assert isinstance(tool_requirements_only, dict)
43 |
44 | @test_util.skip_unless_galaxy("release_22.05")
45 | def test_resolve_toolbox(self):
46 | toolbox = self.gi.container_resolution.resolve_toolbox()
47 | assert isinstance(toolbox, list)
48 | assert len(toolbox) > 0
49 | assert isinstance(toolbox[0], dict)
50 |
51 | toolbox_by_tool_ids = self.gi.container_resolution.resolve_toolbox(tool_ids=[toolbox[0]["tool_id"]])
52 | assert isinstance(toolbox_by_tool_ids, list)
53 | assert len(toolbox_by_tool_ids) == 1
54 | assert isinstance(toolbox_by_tool_ids[0], dict)
55 |
56 | toolbox_by_resolver_type = self.gi.container_resolution.resolve_toolbox(resolver_type="mulled")
57 | assert isinstance(toolbox_by_resolver_type, list)
58 | assert len(toolbox_by_resolver_type) > 0
59 | assert isinstance(toolbox_by_resolver_type[0], dict)
60 | assert len(toolbox) == len(toolbox_by_resolver_type)
61 | for tool in toolbox_by_resolver_type:
62 | print(tool)
63 | assert (
64 | tool["status"]["dependency_type"] is None
65 | or tool["status"]["container_resolver"]["resolver_type"] == "mulled"
66 | )
67 |
68 | toolbox_by_container_type = self.gi.container_resolution.resolve_toolbox(container_type="docker")
69 | assert isinstance(toolbox_by_container_type, list)
70 | assert len(toolbox_by_container_type) > 0
71 | assert isinstance(toolbox_by_container_type[0], dict)
72 | assert len(toolbox) == len(toolbox_by_container_type)
73 | for tool in toolbox_by_container_type:
74 | assert tool["status"]["dependency_type"] is None or tool["status"]["dependency_type"] == "docker"
75 | assert (
76 | tool["status"]["dependency_type"] is None or tool["status"]["container_description"]["type"] == "docker"
77 | )
78 |
79 | toolbox_requirements_only = self.gi.container_resolution.resolve_toolbox(requirements_only=True)
80 | assert isinstance(toolbox_requirements_only, list)
81 | assert len(toolbox_requirements_only) > 0
82 | assert isinstance(toolbox_requirements_only[0], dict)
83 | assert len(toolbox) == len(toolbox_requirements_only)
84 |
85 | # TODO unless containers are available this may fallback to conda by default?
86 | # depending on Galaxy's config
87 | # toolbox_by_index = self.gi.container_resolution.resolve_toolbox(tool_ids=[toolbox[0]['tool_id']], index=0, install=True)
88 | # assert isinstance(toolbox_by_index, list)
89 | # assert len(toolbox_by_index) > 0
90 | # assert isinstance(toolbox_by_index[0], dict)
91 |
92 | # TODO unless containers are available this may fallback to conda by default?
93 | # depending on Galaxy's config
94 | # def test_resolve_toolbox_with_install(self):
95 | # toolbox = self.gi.container_resolution.resolve_toolbox_with_install(tool_ids=[])
96 | # assert isinstance(toolbox, list)
97 | # assert len(toolbox) == 0
98 |
--------------------------------------------------------------------------------
/bioblend/_tests/TestGalaxyToolData.py:
--------------------------------------------------------------------------------
1 | from . import GalaxyTestBase
2 |
3 |
4 | class TestGalaxyToolData(GalaxyTestBase.GalaxyTestBase):
5 | def test_get_data_tables(self):
6 | tables = self.gi.tool_data.get_data_tables()
7 | for table in tables:
8 | assert table["name"] is not None
9 |
10 | def test_show_data_table(self):
11 | tables = self.gi.tool_data.get_data_tables()
12 | table = self.gi.tool_data.show_data_table(tables[0]["name"])
13 | assert table["columns"] is not None
14 | assert table["fields"] is not None
15 | assert table["name"] is not None
16 |
--------------------------------------------------------------------------------
/bioblend/_tests/TestGalaxyToolDependencies.py:
--------------------------------------------------------------------------------
1 | """
2 | Test functions in bioblend.galaxy.tool_dependencies
3 | """
4 |
5 | from . import (
6 | GalaxyTestBase,
7 | test_util,
8 | )
9 |
10 |
11 | class TestGalaxyToolDependencies(GalaxyTestBase.GalaxyTestBase):
12 | @test_util.skip_unless_galaxy("release_20.01")
13 | def test_summarize_toolbox(self):
14 | toolbox_summary = self.gi.tool_dependencies.summarize_toolbox()
15 | assert isinstance(toolbox_summary, list)
16 | assert len(toolbox_summary) > 0
17 |
18 | toolbox_summary_by_tool = self.gi.tool_dependencies.summarize_toolbox(index_by="tools")
19 | assert isinstance(toolbox_summary_by_tool, list)
20 | assert len(toolbox_summary_by_tool) > 0
21 | assert isinstance(toolbox_summary_by_tool[0], dict)
22 | assert "tool_ids" in toolbox_summary_by_tool[0]
23 | assert isinstance(toolbox_summary_by_tool[0]["tool_ids"], list)
24 | tool_id = toolbox_summary_by_tool[0]["tool_ids"][0]
25 |
26 | toolbox_summary_select_tool_ids = self.gi.tool_dependencies.summarize_toolbox(
27 | index_by="tools", tool_ids=[tool_id]
28 | )
29 | assert isinstance(toolbox_summary_select_tool_ids, list)
30 | assert len(toolbox_summary_select_tool_ids) == 1
31 | assert toolbox_summary_select_tool_ids[0]["tool_ids"][0] == tool_id
32 |
33 | @test_util.skip_unless_galaxy("release_20.01")
34 | def test_unused_dependency_paths(self):
35 | unused_paths = self.gi.tool_dependencies.unused_dependency_paths()
36 | assert isinstance(unused_paths, list)
37 |
38 | @test_util.skip_unless_galaxy("release_20.01")
39 | def test_delete_unused_dependency_paths(self):
40 | self.gi.tool_dependencies.delete_unused_dependency_paths(paths=[])
41 |
--------------------------------------------------------------------------------
/bioblend/_tests/TestGalaxyToolInputs.py:
--------------------------------------------------------------------------------
1 | from bioblend.galaxy.tools.inputs import (
2 | conditional,
3 | dataset,
4 | inputs,
5 | repeat,
6 | )
7 |
8 |
9 | def test_conditional():
10 | # Build up example inputs for random_lines1
11 | as_dict = (
12 | inputs()
13 | .set("num_lines", 5)
14 | .set("input", dataset("encoded1"))
15 | .set("seed_source", conditional().set("seed_source_selector", "set_seed").set("seed", "asdf"))
16 | .to_dict()
17 | )
18 | assert as_dict["num_lines"] == 5
19 | assert as_dict["input"]["src"] == "hda"
20 | assert as_dict["input"]["id"] == "encoded1"
21 | assert as_dict["seed_source|seed_source_selector"] == "set_seed"
22 | assert as_dict["seed_source|seed"] == "asdf"
23 |
24 |
25 | def test_repeat():
26 | # Build up inputs for cat1
27 | as_dict = (
28 | inputs()
29 | .set("input1", dataset("encoded1"))
30 | .set(
31 | "queries",
32 | repeat()
33 | .instance(inputs().set_dataset_param("input2", "encoded2"))
34 | .instance(inputs().set_dataset_param("input2", "encoded3")),
35 | )
36 | .to_dict()
37 | )
38 | assert as_dict["input1"]["src"] == "hda"
39 | assert as_dict["input1"]["id"] == "encoded1"
40 | assert as_dict["queries_0|input2"]["src"] == "hda"
41 | assert as_dict["queries_0|input2"]["id"] == "encoded2"
42 | assert as_dict["queries_1|input2"]["src"] == "hda"
43 | assert as_dict["queries_1|input2"]["id"] == "encoded3"
44 |
--------------------------------------------------------------------------------
/bioblend/_tests/TestGalaxyToolShed.py:
--------------------------------------------------------------------------------
1 | """ """
2 |
3 | from . import (
4 | GalaxyTestBase,
5 | test_util,
6 | )
7 |
8 |
9 | @test_util.skip_unless_galaxy("release_20.05")
10 | class TestGalaxyToolShed(GalaxyTestBase.GalaxyTestBase):
11 |
12 | def test_install_old_first(self):
13 | # This test uses two revisions of the same tool, where only one has
14 | # repository metadata associated:
15 | # 6:289d6299bd2e contains a tool version bump
16 | # 7:c14c7fd4d1be is a minor fix (no repository metadata changes)
17 |
18 | # asking to install old version immediately installs
19 | # new version
20 | response = self.gi.toolshed.install_repository_revision(
21 | tool_shed_url="https://toolshed.g2.bx.psu.edu/",
22 | name="ampvis2_alpha_diversity",
23 | owner="iuc",
24 | changeset_revision="289d6299bd2e",
25 | )
26 | assert isinstance(response, list), response
27 | assert len(response) == 1
28 | assert response[0]["status"] == "Installed"
29 |
30 | installed_repos = self.gi.toolshed.get_repositories()
31 | assert len(installed_repos) == 1
32 | assert installed_repos[0]["installed_changeset_revision"] == "c14c7fd4d1be"
33 | assert installed_repos[0]["changeset_revision"] == "c14c7fd4d1be"
34 |
35 | self.gi.toolshed.uninstall_repository_revision(
36 | name="ampvis2_alpha_diversity",
37 | owner="iuc",
38 | changeset_revision="c14c7fd4d1be",
39 | tool_shed_url="https://toolshed.g2.bx.psu.edu/",
40 | remove_from_disk=True,
41 | )
42 |
43 | def test_install_new_first(self):
44 | # 6:289d6299bd2e
45 | # 7:c14c7fd4d1be was not bumped
46 | response = self.gi.toolshed.install_repository_revision(
47 | tool_shed_url="https://toolshed.g2.bx.psu.edu/",
48 | name="ampvis2_alpha_diversity",
49 | owner="iuc",
50 | changeset_revision="c14c7fd4d1be",
51 | )
52 | assert isinstance(response, list), response
53 | assert len(response) == 1
54 | assert response[0]["status"] == "Installed"
55 |
56 | installed_repos = self.gi.toolshed.get_repositories()
57 | assert len(installed_repos) == 1
58 | assert installed_repos[0]["installed_changeset_revision"] == "c14c7fd4d1be"
59 | assert installed_repos[0]["changeset_revision"] == "c14c7fd4d1be"
60 |
61 | # install older revision
62 | # -> galaxy realizes that a tool with the same version is alredy installed
63 | # -> responds with a dict indicating this
64 | response = self.gi.toolshed.install_repository_revision(
65 | tool_shed_url="https://toolshed.g2.bx.psu.edu/",
66 | name="ampvis2_alpha_diversity",
67 | owner="iuc",
68 | changeset_revision="289d6299bd2e",
69 | )
70 | assert isinstance(response, dict), response
71 | assert response["status"] == "ok"
72 |
73 | installed_repos = self.gi.toolshed.get_repositories()
74 | assert len(installed_repos) == 1
75 | assert installed_repos[0]["installed_changeset_revision"] == "c14c7fd4d1be"
76 | assert installed_repos[0]["changeset_revision"] == "c14c7fd4d1be"
77 |
78 | self.gi.toolshed.uninstall_repository_revision(
79 | name="ampvis2_alpha_diversity",
80 | owner="iuc",
81 | changeset_revision="c14c7fd4d1be",
82 | tool_shed_url="https://toolshed.g2.bx.psu.edu/",
83 | remove_from_disk=True,
84 | )
85 |
--------------------------------------------------------------------------------
/bioblend/_tests/TestToolshed.py:
--------------------------------------------------------------------------------
1 | import os
2 | import unittest
3 |
4 | import bioblend
5 | import bioblend.toolshed
6 | from . import test_util
7 |
8 |
9 | @test_util.skip_unless_toolshed()
10 | class TestToolshed(unittest.TestCase):
11 | def setUp(self):
12 | toolshed_url = os.environ["BIOBLEND_TOOLSHED_URL"]
13 | self.ts = bioblend.toolshed.ToolShedInstance(url=toolshed_url)
14 |
15 | def test_categories_client(self):
16 | # get_categories
17 | categories = self.ts.categories.get_categories()
18 | assert "Assembly" in [c["name"] for c in categories]
19 | # we cannot test get_categories with deleted=True as it requires administrator status
20 |
21 | # show_category
22 | visualization_category_id = [c for c in categories if c["name"] == "Visualization"][0]["id"]
23 | visualization_category = self.ts.categories.show_category(visualization_category_id)
24 | assert visualization_category["description"] == "Tools for visualizing data"
25 |
26 | # get_repositories
27 | repositories = self.ts.categories.get_repositories(visualization_category_id)
28 | repositories_reversed = self.ts.categories.get_repositories(visualization_category_id, sort_order="desc")
29 | assert len(repositories["repositories"]) > 200
30 | assert {
31 | "deprecated",
32 | "description",
33 | "homepage_url",
34 | "id",
35 | "name",
36 | "owner",
37 | "remote_repository_url",
38 | "type",
39 | "update_time",
40 | } <= set(repositories["repositories"][0].keys())
41 | assert repositories["repositories"][0] == repositories_reversed["repositories"][-1]
42 |
43 | def test_repositories_client(self):
44 | # get_repositories
45 | repositories = self.ts.repositories.get_repositories()
46 | assert len(repositories) > 5000
47 | repository0 = repositories[0]
48 | for key in ("id", "name", "owner", "type", "description", "deprecated"):
49 | assert key in repository0
50 |
51 | repositories = self.ts.repositories.get_repositories(name="bam_to_sam", owner="devteam")
52 | assert len(repositories) == 1
53 | bam_to_sam_repo = repositories[0]
54 | assert bam_to_sam_repo["name"] == "bam_to_sam"
55 | assert bam_to_sam_repo["owner"] == "devteam"
56 | assert bam_to_sam_repo["type"] == "unrestricted"
57 | assert not bam_to_sam_repo["deprecated"]
58 |
59 | # search_repositories
60 | samtools_search = self.ts.repositories.search_repositories("samtools", page_size=5)
61 | assert int(samtools_search["total_results"]) > 20
62 | assert len(samtools_search["hits"]) == 5
63 |
64 | # show_repository
65 | show_bam_to_sam_repo = self.ts.repositories.show_repository(bam_to_sam_repo["id"])
66 | assert "SAM" in show_bam_to_sam_repo["long_description"]
67 |
68 | # test_create_repository
69 | # need to provide an API key to test this
70 |
71 | # test_update_repository
72 | # need to provide an API key to test this
73 |
74 | def test_repositories_revisions(self):
75 | # get_ordered_installable_revisions
76 | bam_to_sam_revisions = self.ts.repositories.get_ordered_installable_revisions("bam_to_sam", "devteam")
77 | assert len(bam_to_sam_revisions) >= 4
78 |
79 | # get_repository_revision_install_info
80 | bam_to_sam_revision_install_info = self.ts.repositories.get_repository_revision_install_info(
81 | "bam_to_sam", "devteam", bam_to_sam_revisions[0]
82 | )
83 | assert len(bam_to_sam_revision_install_info) == 3
84 | assert bam_to_sam_revision_install_info[0].get("model_class") == "Repository"
85 | assert bam_to_sam_revision_install_info[1].get("model_class") == "RepositoryMetadata"
86 | assert bam_to_sam_revision_install_info[2].get("model_class") is None
87 |
88 | def test_tools_client(self):
89 | # search_tools
90 | samtools_search = self.ts.tools.search_tools("samtools", page_size=5)
91 | assert int(samtools_search["page"]) == 1
92 | assert len(samtools_search["hits"]) == 5
93 | hit0_tool = samtools_search["hits"][0]["tool"]
94 | for key in ("id", "repo_owner_username", "repo_name", "name", "description"):
95 | assert key in hit0_tool
96 |
--------------------------------------------------------------------------------
/bioblend/_tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/galaxyproject/bioblend/01909298c19bae6165a1c1f51cbf8c92be5ac55e/bioblend/_tests/__init__.py
--------------------------------------------------------------------------------
/bioblend/_tests/data/1.bed:
--------------------------------------------------------------------------------
1 | chr1 147962192 147962580 CCDS989.1_cds_0_0_chr1_147962193_r 0 -
2 | chr1 147984545 147984630 CCDS990.1_cds_0_0_chr1_147984546_f 0 +
3 | chr1 148078400 148078582 CCDS993.1_cds_0_0_chr1_148078401_r 0 -
4 | chr1 148185136 148185276 CCDS996.1_cds_0_0_chr1_148185137_f 0 +
5 | chr10 55251623 55253124 CCDS7248.1_cds_0_0_chr10_55251624_r 0 -
6 | chr11 116124407 116124501 CCDS8374.1_cds_0_0_chr11_116124408_r 0 -
7 | chr11 116206508 116206563 CCDS8377.1_cds_0_0_chr11_116206509_f 0 +
8 | chr11 116211733 116212337 CCDS8378.1_cds_0_0_chr11_116211734_r 0 -
9 | chr11 1812377 1812407 CCDS7726.1_cds_0_0_chr11_1812378_f 0 +
10 | chr12 38440094 38440321 CCDS8736.1_cds_0_0_chr12_38440095_r 0 -
11 | chr13 112381694 112381953 CCDS9526.1_cds_0_0_chr13_112381695_f 0 +
12 | chr14 98710240 98712285 CCDS9949.1_cds_0_0_chr14_98710241_r 0 -
13 | chr15 41486872 41487060 CCDS10096.1_cds_0_0_chr15_41486873_r 0 -
14 | chr15 41673708 41673857 CCDS10097.1_cds_0_0_chr15_41673709_f 0 +
15 | chr15 41679161 41679250 CCDS10098.1_cds_0_0_chr15_41679162_r 0 -
16 | chr15 41826029 41826196 CCDS10101.1_cds_0_0_chr15_41826030_f 0 +
17 | chr16 142908 143003 CCDS10397.1_cds_0_0_chr16_142909_f 0 +
18 | chr16 179963 180135 CCDS10401.1_cds_0_0_chr16_179964_r 0 -
19 | chr16 244413 244681 CCDS10402.1_cds_0_0_chr16_244414_f 0 +
20 | chr16 259268 259383 CCDS10403.1_cds_0_0_chr16_259269_r 0 -
21 | chr18 23786114 23786321 CCDS11891.1_cds_0_0_chr18_23786115_r 0 -
22 | chr18 59406881 59407046 CCDS11985.1_cds_0_0_chr18_59406882_f 0 +
23 | chr18 59455932 59456337 CCDS11986.1_cds_0_0_chr18_59455933_r 0 -
24 | chr18 59600586 59600754 CCDS11988.1_cds_0_0_chr18_59600587_f 0 +
25 | chr19 59068595 59069564 CCDS12866.1_cds_0_0_chr19_59068596_f 0 +
26 | chr19 59236026 59236146 CCDS12872.1_cds_0_0_chr19_59236027_r 0 -
27 | chr19 59297998 59298008 CCDS12877.1_cds_0_0_chr19_59297999_f 0 +
28 | chr19 59302168 59302288 CCDS12878.1_cds_0_0_chr19_59302169_r 0 -
29 | chr2 118288583 118288668 CCDS2120.1_cds_0_0_chr2_118288584_f 0 +
30 | chr2 118394148 118394202 CCDS2121.1_cds_0_0_chr2_118394149_r 0 -
31 | chr2 220190202 220190242 CCDS2441.1_cds_0_0_chr2_220190203_f 0 +
32 | chr2 220229609 220230869 CCDS2443.1_cds_0_0_chr2_220229610_r 0 -
33 | chr20 33330413 33330423 CCDS13249.1_cds_0_0_chr20_33330414_r 0 -
34 | chr20 33513606 33513792 CCDS13255.1_cds_0_0_chr20_33513607_f 0 +
35 | chr20 33579500 33579527 CCDS13256.1_cds_0_0_chr20_33579501_r 0 -
36 | chr20 33593260 33593348 CCDS13257.1_cds_0_0_chr20_33593261_f 0 +
37 | chr21 32707032 32707192 CCDS13614.1_cds_0_0_chr21_32707033_f 0 +
38 | chr21 32869641 32870022 CCDS13615.1_cds_0_0_chr21_32869642_r 0 -
39 | chr21 33321040 33322012 CCDS13620.1_cds_0_0_chr21_33321041_f 0 +
40 | chr21 33744994 33745040 CCDS13625.1_cds_0_0_chr21_33744995_r 0 -
41 | chr22 30120223 30120265 CCDS13897.1_cds_0_0_chr22_30120224_f 0 +
42 | chr22 30160419 30160661 CCDS13898.1_cds_0_0_chr22_30160420_r 0 -
43 | chr22 30665273 30665360 CCDS13901.1_cds_0_0_chr22_30665274_f 0 +
44 | chr22 30939054 30939266 CCDS13903.1_cds_0_0_chr22_30939055_r 0 -
45 | chr5 131424298 131424460 CCDS4149.1_cds_0_0_chr5_131424299_f 0 +
46 | chr5 131556601 131556672 CCDS4151.1_cds_0_0_chr5_131556602_r 0 -
47 | chr5 131621326 131621419 CCDS4152.1_cds_0_0_chr5_131621327_f 0 +
48 | chr5 131847541 131847666 CCDS4155.1_cds_0_0_chr5_131847542_r 0 -
49 | chr6 108299600 108299744 CCDS5061.1_cds_0_0_chr6_108299601_r 0 -
50 | chr6 108594662 108594687 CCDS5063.1_cds_0_0_chr6_108594663_f 0 +
51 | chr6 108640045 108640151 CCDS5064.1_cds_0_0_chr6_108640046_r 0 -
52 | chr6 108722976 108723115 CCDS5067.1_cds_0_0_chr6_108722977_f 0 +
53 | chr7 113660517 113660685 CCDS5760.1_cds_0_0_chr7_113660518_f 0 +
54 | chr7 116512159 116512389 CCDS5771.1_cds_0_0_chr7_116512160_r 0 -
55 | chr7 116714099 116714152 CCDS5773.1_cds_0_0_chr7_116714100_f 0 +
56 | chr7 116945541 116945787 CCDS5774.1_cds_0_0_chr7_116945542_r 0 -
57 | chr8 118881131 118881317 CCDS6324.1_cds_0_0_chr8_118881132_r 0 -
58 | chr9 128764156 128764189 CCDS6914.1_cds_0_0_chr9_128764157_f 0 +
59 | chr9 128787519 128789136 CCDS6915.1_cds_0_0_chr9_128787520_r 0 -
60 | chr9 128882427 128882523 CCDS6917.1_cds_0_0_chr9_128882428_f 0 +
61 | chr9 128937229 128937445 CCDS6919.1_cds_0_0_chr9_128937230_r 0 -
62 | chrX 122745047 122745924 CCDS14606.1_cds_0_0_chrX_122745048_f 0 +
63 | chrX 152648964 152649196 CCDS14733.1_cds_0_0_chrX_152648965_r 0 -
64 | chrX 152691446 152691471 CCDS14735.1_cds_0_0_chrX_152691447_f 0 +
65 | chrX 152694029 152694263 CCDS14736.1_cds_0_0_chrX_152694030_r 0 -
66 |
--------------------------------------------------------------------------------
/bioblend/_tests/data/Galaxy-History-Test-history-for-export.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/galaxyproject/bioblend/01909298c19bae6165a1c1f51cbf8c92be5ac55e/bioblend/_tests/data/Galaxy-History-Test-history-for-export.tar.gz
--------------------------------------------------------------------------------
/bioblend/_tests/data/paste_columns.ga:
--------------------------------------------------------------------------------
1 | {
2 | "a_galaxy_workflow": "true",
3 | "annotation": "",
4 | "format-version": "0.1",
5 | "name": "paste_columns",
6 | "steps": {
7 | "0": {
8 | "annotation": "",
9 | "id": 0,
10 | "input_connections": {},
11 | "inputs": [
12 | {
13 | "description": "",
14 | "name": "Input 1"
15 | }
16 | ],
17 | "name": "Input dataset",
18 | "outputs": [],
19 | "position": {
20 | "left": 10,
21 | "top": 10
22 | },
23 | "tool_errors": null,
24 | "tool_id": null,
25 | "tool_state": "{\"name\": \"Input 1\"}",
26 | "tool_version": null,
27 | "type": "data_input",
28 | "user_outputs": []
29 | },
30 | "1": {
31 | "annotation": "",
32 | "id": 1,
33 | "input_connections": {},
34 | "inputs": [
35 | {
36 | "description": "",
37 | "name": "Input 2"
38 | }
39 | ],
40 | "name": "Input dataset",
41 | "outputs": [],
42 | "position": {
43 | "left": 10,
44 | "top": 130
45 | },
46 | "tool_errors": null,
47 | "tool_id": null,
48 | "tool_state": "{\"name\": \"Input 2\"}",
49 | "tool_version": null,
50 | "type": "data_input",
51 | "user_outputs": []
52 | },
53 | "2": {
54 | "annotation": "",
55 | "id": 2,
56 | "input_connections": {
57 | "input1": {
58 | "id": 0,
59 | "output_name": "output"
60 | },
61 | "input2": {
62 | "id": 1,
63 | "output_name": "output"
64 | }
65 | },
66 | "inputs": [],
67 | "name": "Paste",
68 | "outputs": [
69 | {
70 | "name": "out_file1",
71 | "type": "input"
72 | }
73 | ],
74 | "position": {
75 | "left": 230,
76 | "top": 10
77 | },
78 | "post_job_actions": {},
79 | "tool_errors": null,
80 | "tool_id": "Paste1",
81 | "tool_state": "{\"input2\": \"null\", \"__page__\": 0, \"input1\": \"null\", \"__rerun_remap_job_id__\": null, \"delimiter\": \"\\\"T\\\"\", \"chromInfo\": \"\\\"/home/simleo/hg/galaxy-dist/tool-data/shared/ucsc/chrom/?.len\\\"\"}",
82 | "tool_version": "1.0.0",
83 | "type": "tool",
84 | "user_outputs": []
85 | }
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/bioblend/_tests/data/paste_columns_collections.ga:
--------------------------------------------------------------------------------
1 | {
2 | "a_galaxy_workflow": "true",
3 | "annotation": "",
4 | "format-version": "0.1",
5 | "name": "paste_columns_collections",
6 | "steps": {
7 | "0": {
8 | "annotation": "",
9 | "id": 0,
10 | "input_connections": {},
11 | "inputs": [
12 | {
13 | "description": "",
14 | "name": "Input Dataset Collection"
15 | }
16 | ],
17 | "label": null,
18 | "name": "Input dataset collection",
19 | "outputs": [],
20 | "position": {
21 | "left": 119.5,
22 | "top": 200
23 | },
24 | "tool_errors": null,
25 | "tool_id": null,
26 | "tool_state": "{\"collection_type\": \"list\", \"name\": \"Input Dataset Collection\"}",
27 | "tool_version": null,
28 | "type": "data_collection_input",
29 | "user_outputs": [],
30 | "uuid": "88591325-c867-407a-a8df-df01430f2196"
31 | },
32 | "1": {
33 | "annotation": "",
34 | "id": 1,
35 | "input_connections": {},
36 | "inputs": [
37 | {
38 | "description": "",
39 | "name": "Input 2"
40 | }
41 | ],
42 | "label": null,
43 | "name": "Input dataset",
44 | "outputs": [],
45 | "position": {
46 | "left": 200,
47 | "top": 434
48 | },
49 | "tool_errors": null,
50 | "tool_id": null,
51 | "tool_state": "{\"name\": \"Input 2\"}",
52 | "tool_version": null,
53 | "type": "data_input",
54 | "user_outputs": [],
55 | "uuid": "64008e61-3304-4452-96ce-9564ec55cf9f"
56 | },
57 | "2": {
58 | "annotation": "",
59 | "id": 2,
60 | "input_connections": {
61 | "input1": {
62 | "id": 0,
63 | "output_name": "output"
64 | },
65 | "input2": {
66 | "id": 1,
67 | "output_name": "output"
68 | }
69 | },
70 | "inputs": [],
71 | "label": null,
72 | "name": "Paste",
73 | "outputs": [
74 | {
75 | "name": "out_file1",
76 | "type": "input"
77 | }
78 | ],
79 | "position": {
80 | "left": 420,
81 | "top": 314
82 | },
83 | "post_job_actions": {},
84 | "tool_errors": null,
85 | "tool_id": "Paste1",
86 | "tool_state": "{\"input2\": \"null\", \"__page__\": 0, \"input1\": \"null\", \"__rerun_remap_job_id__\": null, \"delimiter\": \"\\\"T\\\"\", \"chromInfo\": \"\\\"/home/simleo/hg/galaxy-dist/tool-data/shared/ucsc/chrom/?.len\\\"\"}",
87 | "tool_version": "1.0.0",
88 | "type": "tool",
89 | "user_outputs": [],
90 | "uuid": "b89ede53-9967-4138-8b1a-59799f8f5cb5"
91 | }
92 | },
93 | "uuid": "4b38804c-064d-4e84-aa02-ca1e0fe7cf8d"
94 | }
95 |
--------------------------------------------------------------------------------
/bioblend/_tests/data/select_first.ga:
--------------------------------------------------------------------------------
1 | {
2 | "a_galaxy_workflow": "true",
3 | "annotation": "",
4 | "format-version": "0.1",
5 | "name": "Select first",
6 | "steps": {
7 | "0": {
8 | "annotation": "",
9 | "content_id": null,
10 | "errors": null,
11 | "id": 0,
12 | "input_connections": {},
13 | "inputs": [],
14 | "label": null,
15 | "name": "Input dataset",
16 | "outputs": [],
17 | "tool_id": null,
18 | "tool_state": "{\"optional\": false}",
19 | "tool_version": null,
20 | "type": "data_input",
21 | "uuid": "8d9e7e74-721a-4fe6-8e7f-f7f85707fbcb",
22 | "workflow_outputs": []
23 | },
24 | "1": {
25 | "annotation": "",
26 | "content_id": null,
27 | "errors": null,
28 | "id": 1,
29 | "input_connections": {},
30 | "inputs": [],
31 | "label": null,
32 | "name": "Input parameter",
33 | "outputs": [],
34 | "tool_id": null,
35 | "tool_state": "{\"parameter_type\": \"integer\", \"optional\": false}",
36 | "tool_version": null,
37 | "type": "parameter_input",
38 | "uuid": "fbb896b8-5406-4ee2-b4e6-bcfc26a9f57b",
39 | "workflow_outputs": []
40 | },
41 | "2": {
42 | "annotation": "",
43 | "content_id": "Show beginning1",
44 | "errors": null,
45 | "id": 2,
46 | "input_connections": {
47 | "input": {
48 | "id": 0,
49 | "output_name": "output"
50 | },
51 | "lineNum": {
52 | "id": 1,
53 | "output_name": "output"
54 | }
55 | },
56 | "inputs": [
57 | {
58 | "description": "runtime parameter for tool Select first",
59 | "name": "input"
60 | }
61 | ],
62 | "label": null,
63 | "name": "Select first",
64 | "outputs": [
65 | {
66 | "name": "out_file1",
67 | "type": "input"
68 | }
69 | ],
70 | "post_job_actions": {},
71 | "tool_id": "Show beginning1",
72 | "tool_state": "{\"header\": \"false\", \"input\": {\"__class__\": \"RuntimeValue\"}, \"lineNum\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
73 | "tool_version": "1.0.0",
74 | "type": "tool",
75 | "uuid": "809c3a0a-1a95-414d-ae64-bb3e19df7b99",
76 | "workflow_outputs": [
77 | {
78 | "label": null,
79 | "output_name": "out_file1",
80 | "uuid": "50c8e9c9-5ede-4a17-801f-21376f053dd4"
81 | }
82 | ]
83 | },
84 | "3": {
85 | "annotation": "",
86 | "content_id": "Paste1",
87 | "errors": null,
88 | "id": 3,
89 | "input_connections": {
90 | "input1": {
91 | "id": 2,
92 | "output_name": "out_file1"
93 | },
94 | "input2": {
95 | "id": 2,
96 | "output_name": "out_file1"
97 | }
98 | },
99 | "inputs": [],
100 | "label": null,
101 | "name": "Paste",
102 | "outputs": [
103 | {
104 | "name": "out_file1",
105 | "type": "input"
106 | }
107 | ],
108 | "post_job_actions": {
109 | "RenameDatasetActionout_file1": {
110 | "action_arguments": {
111 | "newname": "paste_output"
112 | },
113 | "action_type": "RenameDatasetAction",
114 | "output_name": "out_file1"
115 | }
116 | },
117 | "tool_id": "Paste1",
118 | "tool_state": "{\"delimiter\": \"T\", \"input1\": {\"__class__\": \"ConnectedValue\"}, \"input2\": {\"__class__\": \"ConnectedValue\"}, \"__page__\": null, \"__rerun_remap_job_id__\": null}",
119 | "tool_version": "1.0.0",
120 | "type": "tool",
121 | "uuid": "41d799a6-bde2-46cb-a206-caa7621151a6",
122 | "workflow_outputs": [
123 | {
124 | "label": null,
125 | "output_name": "out_file1",
126 | "uuid": "74584c3d-bf88-4bb3-846a-bfcb9ff375f8"
127 | }
128 | ]
129 | }
130 | },
131 | "tags": [],
132 | "uuid": "d38ebab1-6534-4702-a7c3-7ca44dd9d1ae",
133 | "version": 1
134 | }
--------------------------------------------------------------------------------
/bioblend/_tests/data/test_workflow_pause.ga:
--------------------------------------------------------------------------------
1 | {
2 | "a_galaxy_workflow": "true",
3 | "annotation": "",
4 | "format-version": "0.1",
5 | "name": "test_workflow_pause",
6 | "steps": {
7 | "0": {
8 | "annotation": "",
9 | "id": 0,
10 | "input_connections": {},
11 | "inputs": [
12 | {
13 | "description": "",
14 | "name": "Input Dataset"
15 | }
16 | ],
17 | "name": "Input dataset",
18 | "outputs": [],
19 | "position": {
20 | "left": 199.9201512336731,
21 | "top": 251.4826512336731
22 | },
23 | "tool_errors": null,
24 | "tool_id": null,
25 | "tool_state": "{\"name\": \"Input Dataset\"}",
26 | "tool_version": null,
27 | "type": "data_input",
28 | "user_outputs": []
29 | },
30 | "1": {
31 | "annotation": "",
32 | "id": 1,
33 | "input_connections": {
34 | "input1": {
35 | "id": 0,
36 | "output_name": "output"
37 | }
38 | },
39 | "inputs": [],
40 | "name": "Concatenate datasets (for test workflows)",
41 | "outputs": [
42 | {
43 | "name": "out_file1",
44 | "type": "input"
45 | }
46 | ],
47 | "position": {
48 | "left": 516.7257237434387,
49 | "top": 187.28126573562622
50 | },
51 | "post_job_actions": {},
52 | "tool_errors": null,
53 | "tool_id": "cat",
54 | "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[]\"}",
55 | "tool_version": "1.0.0",
56 | "type": "tool",
57 | "user_outputs": []
58 | },
59 | "2": {
60 | "annotation": "",
61 | "id": 2,
62 | "input_connections": {
63 | "input": {
64 | "id": 1,
65 | "output_name": "out_file1"
66 | }
67 | },
68 | "inputs": [
69 | {
70 | "description": "",
71 | "name": "Pause for Dataset Review"
72 | }
73 | ],
74 | "name": "Pause for dataset review",
75 | "outputs": [],
76 | "position": {
77 | "left": 862.715301990509,
78 | "top": 197.28126573562622
79 | },
80 | "tool_errors": null,
81 | "tool_id": null,
82 | "tool_state": "{\"name\": \"Pause for Dataset Review\"}",
83 | "tool_version": null,
84 | "type": "pause",
85 | "user_outputs": []
86 | },
87 | "3": {
88 | "annotation": "",
89 | "id": 3,
90 | "input_connections": {
91 | "input1": {
92 | "id": 2,
93 | "output_name": "output"
94 | }
95 | },
96 | "inputs": [],
97 | "name": "Concatenate datasets (for test workflows)",
98 | "outputs": [
99 | {
100 | "name": "out_file1",
101 | "type": "input"
102 | }
103 | ],
104 | "position": {
105 | "left": 1181.9722595214844,
106 | "top": 181.52084350585938
107 | },
108 | "post_job_actions": {},
109 | "tool_errors": null,
110 | "tool_id": "cat1",
111 | "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"input1\": \"null\", \"queries\": \"[]\"}",
112 | "tool_version": "1.0.0",
113 | "type": "tool",
114 | "user_outputs": []
115 | }
116 | },
117 | "uuid": "9058956e-76b6-4909-bab3-c12b2cc394c7"
118 | }
--------------------------------------------------------------------------------
/bioblend/_tests/data/workflow_with_parameter_input.ga:
--------------------------------------------------------------------------------
1 | {
2 | "a_galaxy_workflow": "true",
3 | "annotation": "",
4 | "format-version": "0.1",
5 | "name": "Workflow with parameter input",
6 | "steps": {
7 | "0": {
8 | "annotation": "",
9 | "content_id": null,
10 | "errors": null,
11 | "id": 0,
12 | "input_connections": {},
13 | "inputs": [],
14 | "label": null,
15 | "name": "Input parameter",
16 | "outputs": [],
17 | "position": {
18 | "left": 184,
19 | "top": 251.5
20 | },
21 | "tool_id": null,
22 | "tool_state": "{\"optional\": false, \"parameter_type\": \"text\"}",
23 | "tool_version": null,
24 | "type": "parameter_input",
25 | "uuid": "23e0b1bb-908c-4077-a75a-6898029ce21d",
26 | "workflow_outputs": [
27 | {
28 | "label": null,
29 | "output_name": "output",
30 | "uuid": "da74dde6-e1f4-4602-b778-748670912508"
31 | }
32 | ]
33 | },
34 | "1": {
35 | "annotation": "",
36 | "content_id": null,
37 | "errors": null,
38 | "id": 1,
39 | "input_connections": {},
40 | "inputs": [],
41 | "label": null,
42 | "name": "Input dataset",
43 | "outputs": [],
44 | "position": {
45 | "left": 186,
46 | "top": 342.5
47 | },
48 | "tool_id": null,
49 | "tool_state": "{\"optional\": false}",
50 | "tool_version": null,
51 | "type": "data_input",
52 | "uuid": "8c014439-d785-45d8-9c65-2453f31d28c7",
53 | "workflow_outputs": [
54 | {
55 | "label": null,
56 | "output_name": "output",
57 | "uuid": "50d9c228-0e0e-4cef-b56b-71a015882f5f"
58 | }
59 | ]
60 | },
61 | "2": {
62 | "annotation": "",
63 | "content_id": "addValue",
64 | "errors": null,
65 | "id": 2,
66 | "input_connections": {
67 | "exp": {
68 | "id": 0,
69 | "output_name": "output"
70 | },
71 | "input": {
72 | "id": 1,
73 | "output_name": "output"
74 | }
75 | },
76 | "inputs": [
77 | {
78 | "description": "runtime parameter for tool Add column",
79 | "name": "input"
80 | }
81 | ],
82 | "label": null,
83 | "name": "Add column",
84 | "outputs": [
85 | {
86 | "name": "out_file1",
87 | "type": "input"
88 | }
89 | ],
90 | "position": {
91 | "left": 546,
92 | "top": 254.5
93 | },
94 | "post_job_actions": {},
95 | "tool_id": "addValue",
96 | "tool_state": "{\"__page__\": null, \"input\": {\"__class__\": \"RuntimeValue\"}, \"__rerun_remap_job_id__\": null, \"exp\": {\"__class__\": \"ConnectedValue\"}, \"iterate\": \"no\"}",
97 | "tool_version": "1.0.0",
98 | "type": "tool",
99 | "uuid": "224d22f3-bb6d-444d-ae1d-744d6e035fbc",
100 | "workflow_outputs": [
101 | {
102 | "label": null,
103 | "output_name": "out_file1",
104 | "uuid": "e19e9691-0fe7-43c9-8d90-28a7b901691f"
105 | }
106 | ]
107 | }
108 | },
109 | "tags": [],
110 | "uuid": "90661668-5367-4f75-89b6-44a2ecb062df",
111 | "version": 1
112 | }
--------------------------------------------------------------------------------
/bioblend/_tests/pytest_galaxy_test_wrapper.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Wrapper around pytest to execute the bioblend Galaxy test suite against fixed instance.
3 |
4 | By default all Galaxy tests will run but a smaller subset can be executed by setting
5 | the environment variable ``BIOBLEND_TEST_SUITE`` to ``quick``.
6 | """
7 | import os
8 | import sys
9 | from typing import (
10 | NoReturn,
11 | Optional,
12 | )
13 |
14 | try:
15 | import pytest
16 | except ImportError:
17 | pytest = None
18 |
19 | DIRECTORY = os.path.abspath(os.path.dirname(__file__))
20 | BIOBLEND_TEST_SUITE = os.environ.get("BIOBLEND_TEST_SUITE", "full")
21 |
22 | quick_tests = [
23 | "TestGalaxyRoles.py",
24 | "TestGalaxyRoles.py",
25 | "TestGalaxyUsers.py",
26 | "TestGalaxyToolData.py",
27 | "TestGalaxyTools.py::TestGalaxyTools::test_get_tools", # Test single upload command.
28 | ]
29 |
30 |
31 | def main(args: Optional[list[str]] = None) -> NoReturn:
32 | """Entry point that delegates to pytest.main."""
33 | if pytest is None:
34 | raise Exception("pytest is required to use this script.")
35 | if args is None:
36 | args = sys.argv[1:]
37 | if len(args) < 2:
38 | if BIOBLEND_TEST_SUITE == "full":
39 | args.append(os.path.join(DIRECTORY))
40 | else:
41 | for quick_test in quick_tests:
42 | args.append(os.path.join(DIRECTORY, quick_test))
43 | sys.exit(pytest.main(args))
44 |
45 |
46 | if __name__ == "__main__":
47 | main()
48 |
--------------------------------------------------------------------------------
/bioblend/_tests/template_galaxy.ini:
--------------------------------------------------------------------------------
1 | [server:main]
2 |
3 | use = egg:Paste#http
4 | port = ${GALAXY_PORT:-8080}
5 |
6 | [app:main]
7 |
8 | paste.app_factory = galaxy.web.buildapp:app_factory
9 | database_connection = $DATABASE_CONNECTION
10 | file_path = ${TEMP_DIR:-${GALAXY_DIR}/database}/files
11 | new_file_path = ${TEMP_DIR:-${GALAXY_DIR}/database}/tmp
12 | tool_config_file = ${GALAXY_DIR}/config/tool_conf.xml.sample,${TEMP_DIR:-${GALAXY_DIR}}/config/shed_tool_conf.xml,${GALAXY_DIR}/test/functional/tools/samples_tool_conf.xml
13 | shed_tool_config_file = ${TEMP_DIR:-${GALAXY_DIR}}/config/shed_tool_conf.xml
14 | conda_auto_init = True
15 | job_working_directory = ${TEMP_DIR:-${GALAXY_DIR}/database}/jobs_directory
16 | allow_library_path_paste = True
17 | admin_users = $BIOBLEND_GALAXY_USER_EMAIL
18 | allow_user_deletion = True
19 | allow_user_dataset_purge = True
20 | enable_beta_workflow_modules = True
21 | master_api_key = $BIOBLEND_GALAXY_MASTER_API_KEY
22 | enable_quotas = True
23 | cleanup_job = onsuccess
24 |
--------------------------------------------------------------------------------
/bioblend/_tests/template_galaxy.yml:
--------------------------------------------------------------------------------
1 | gravity:
2 | galaxy_root: ${GALAXY_DIR}
3 | gunicorn:
4 | bind: localhost:${GALAXY_PORT:-8080}
5 |
6 | galaxy:
7 | managed_config_dir: ${TEMP_DIR:-${GALAXY_DIR}}/config
8 | data_dir: ${TEMP_DIR:-${GALAXY_DIR}}/database
9 | database_connection: $DATABASE_CONNECTION
10 | tool_config_file: ${GALAXY_DIR}/config/tool_conf.xml.sample,${TEMP_DIR:-${GALAXY_DIR}}/config/shed_tool_conf.xml,${GALAXY_DIR}/${TEST_TOOLS_CONF_FILE}
11 | # Don't use $TEMP_DIR for tool_dependency_dir to save time on local testing
12 | tool_dependency_dir: ${GALAXY_DIR}/database/dependencies
13 | allow_path_paste: true
14 | admin_users: $BIOBLEND_GALAXY_USER_EMAIL
15 | allow_user_deletion: true
16 | enable_beta_workflow_modules: true
17 | master_api_key: $BIOBLEND_GALAXY_MASTER_API_KEY
18 | enable_quotas: true
19 | cleanup_job: onsuccess
20 | enable_celery_tasks: true
21 |
--------------------------------------------------------------------------------
/bioblend/_tests/test_util.py:
--------------------------------------------------------------------------------
1 | """General support infrastructure not tied to any particular test."""
2 |
3 | import os
4 | import random
5 | import string
6 | import unittest
7 | from typing import (
8 | Callable,
9 | Optional,
10 | )
11 |
12 | import requests
13 |
14 | import bioblend.galaxy
15 |
16 | NO_GALAXY_MESSAGE = "Externally configured Galaxy required, but not found. Set BIOBLEND_GALAXY_URL and BIOBLEND_GALAXY_API_KEY to run this test."
17 |
18 |
19 | def random_string(length: int = 8) -> str:
20 | return "".join(random.choice(string.ascii_lowercase) for _ in range(length))
21 |
22 |
23 | def is_site_up(url: str) -> bool:
24 | try:
25 | response = requests.get(url, timeout=10)
26 | return response.status_code == 200
27 | except Exception:
28 | return False
29 |
30 |
31 | def skip_unless_toolshed() -> Callable:
32 | """Decorate tests with this to skip the test if a URL for a ToolShed
33 | to run the tests is not provided.
34 | """
35 | if "BIOBLEND_TOOLSHED_URL" not in os.environ:
36 | return unittest.skip(
37 | "Externally configured ToolShed required, but not found. Set BIOBLEND_TOOLSHED_URL (e.g. to https://testtoolshed.g2.bx.psu.edu/ ) to run this test."
38 | )
39 | toolshed_url = os.environ["BIOBLEND_TOOLSHED_URL"]
40 | if not is_site_up(toolshed_url):
41 | return unittest.skip(f"Configured ToolShed [{toolshed_url}] appears to be down")
42 | return lambda f: f
43 |
44 |
45 | def skip_unless_galaxy(min_release: Optional[str] = None) -> Callable:
46 | """Decorate tests with this to skip the test if Galaxy is not
47 | configured.
48 | """
49 | if min_release is not None:
50 | galaxy_release = os.environ.get("GALAXY_VERSION", None)
51 | if galaxy_release is not None and galaxy_release != "dev":
52 | if not galaxy_release.startswith("release_"):
53 | raise ValueError("The value of GALAXY_VERSION environment variable should start with 'release_'")
54 | if not min_release.startswith("release_"):
55 | raise Exception("min_release should start with 'release_'")
56 | if galaxy_release[8:] < min_release[8:]:
57 | return unittest.skip(f"Testing on Galaxy {galaxy_release}, but need {min_release} to run this test.")
58 |
59 | if "BIOBLEND_GALAXY_URL" not in os.environ:
60 | return unittest.skip(NO_GALAXY_MESSAGE)
61 |
62 | if "BIOBLEND_GALAXY_API_KEY" not in os.environ and "BIOBLEND_GALAXY_MASTER_API_KEY" in os.environ:
63 | galaxy_url = os.environ["BIOBLEND_GALAXY_URL"]
64 | galaxy_master_api_key = os.environ["BIOBLEND_GALAXY_MASTER_API_KEY"]
65 | gi = bioblend.galaxy.GalaxyInstance(galaxy_url, key=galaxy_master_api_key)
66 |
67 | if "BIOBLEND_GALAXY_USER_EMAIL" in os.environ:
68 | galaxy_user_email = os.environ["BIOBLEND_GALAXY_USER_EMAIL"]
69 | else:
70 | galaxy_user_email = f"{random_string()}@localhost.localdomain"
71 |
72 | galaxy_user_id = None
73 | for user in gi.users.get_users():
74 | if user["email"] == galaxy_user_email:
75 | galaxy_user_id = user["id"]
76 | break
77 |
78 | config = gi.config.get_config()
79 | if galaxy_user_id is None:
80 | if config.get("use_remote_user", False):
81 | new_user = gi.users.create_remote_user(galaxy_user_email)
82 | else:
83 | galaxy_user = galaxy_user_email.split("@", 1)[0]
84 | galaxy_password = random_string(20)
85 |
86 | # Create a new user
87 | new_user = gi.users.create_local_user(galaxy_user, galaxy_user_email, galaxy_password)
88 | galaxy_user_id = new_user["id"]
89 |
90 | if config["version_major"] >= "21.01":
91 | api_key = gi.users.get_or_create_user_apikey(galaxy_user_id)
92 | else:
93 | api_key = gi.users.get_user_apikey(galaxy_user_id)
94 | if not api_key or api_key == "Not available.":
95 | api_key = gi.users.create_user_apikey(galaxy_user_id)
96 | os.environ["BIOBLEND_GALAXY_API_KEY"] = api_key
97 |
98 | if "BIOBLEND_GALAXY_API_KEY" not in os.environ:
99 | return unittest.skip(NO_GALAXY_MESSAGE)
100 |
101 | return lambda f: f
102 |
103 |
104 | def skip_unless_tool(tool_id: str) -> Callable:
105 | """Decorate a Galaxy test method as requiring a specific tool,
106 | skip the test case if the tool is unavailable.
107 | """
108 |
109 | def method_wrapper(method):
110 | def wrapped_method(has_gi, *args, **kwargs):
111 | tools = has_gi.gi.tools.get_tools()
112 | # In panels by default, so flatten out sections...
113 | tool_ids = [_["id"] for _ in tools]
114 | if tool_id not in tool_ids:
115 | raise unittest.SkipTest(f"Externally configured Galaxy instance requires tool {tool_id} to run test.")
116 |
117 | return method(has_gi, *args, **kwargs)
118 |
119 | # Must preserve method name so pytest can detect and report tests by
120 | # name.
121 | wrapped_method.__name__ = method.__name__
122 | return wrapped_method
123 |
124 | return method_wrapper
125 |
126 |
127 | def get_abspath(path: str) -> str:
128 | return os.path.abspath(os.path.join(os.path.dirname(__file__), path))
129 |
--------------------------------------------------------------------------------
/bioblend/config.py:
--------------------------------------------------------------------------------
1 | import configparser
2 | import os
3 | from typing import (
4 | IO,
5 | Optional,
6 | )
7 |
8 | BioBlendConfigPath = "/etc/bioblend.cfg"
9 | BioBlendConfigLocations = [BioBlendConfigPath]
10 | UserConfigPath = os.path.join(os.path.expanduser("~"), ".bioblend")
11 | BioBlendConfigLocations.append(UserConfigPath)
12 |
13 |
14 | class Config(configparser.ConfigParser):
15 | """
16 | BioBlend allows library-wide configuration to be set in external files.
17 | These configuration files can be used to specify access keys, for example.
18 | By default we use two locations for the BioBlend configurations:
19 |
20 | * System wide: ``/etc/bioblend.cfg``
21 | * Individual user: ``~/.bioblend`` (which works on both Windows and Unix)
22 | """
23 |
24 | def __init__(self, path: Optional[str] = None, fp: Optional[IO[str]] = None, do_load: bool = True) -> None:
25 | super().__init__({"working_dir": "/mnt/pyami", "debug": "0"})
26 | if do_load:
27 | if path:
28 | self.read([path])
29 | elif fp:
30 | self.read_file(fp)
31 | else:
32 | self.read(BioBlendConfigLocations)
33 |
--------------------------------------------------------------------------------
/bioblend/galaxy/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | A base representation of an instance of Galaxy
3 | """
4 |
5 | from typing import Optional
6 |
7 | from bioblend.galaxy import (
8 | config,
9 | container_resolution,
10 | dataset_collections,
11 | datasets,
12 | datatypes,
13 | folders,
14 | forms,
15 | ftpfiles,
16 | genomes,
17 | groups,
18 | histories,
19 | invocations,
20 | jobs,
21 | libraries,
22 | quotas,
23 | roles,
24 | tool_data,
25 | tool_dependencies,
26 | tools,
27 | toolshed,
28 | users,
29 | visual,
30 | workflows,
31 | )
32 | from bioblend.galaxyclient import GalaxyClient
33 |
34 |
35 | class GalaxyInstance(GalaxyClient):
36 | def __init__(
37 | self,
38 | url: str,
39 | key: Optional[str] = None,
40 | email: Optional[str] = None,
41 | password: Optional[str] = None,
42 | *,
43 | verify: bool = True,
44 | user_agent: Optional[str] = None,
45 | ) -> None:
46 | """
47 | A base representation of a connection to a Galaxy instance, identified
48 | by the server URL and user credentials.
49 |
50 | After you have created a ``GalaxyInstance`` object, access various
51 | modules via the class fields. For example, to work with histories and
52 | get a list of all the user's histories, the following should be done::
53 |
54 | from bioblend import galaxy
55 |
56 | gi = galaxy.GalaxyInstance(url='http://127.0.0.1:8000', key='your_api_key')
57 |
58 | hl = gi.histories.get_histories()
59 |
60 | :type url: str
61 | :param url: A FQDN or IP for a given instance of Galaxy. For example:
62 | http://127.0.0.1:8080 . If a Galaxy instance is served under
63 | a prefix (e.g., http://127.0.0.1:8080/galaxy/), supply the
64 | entire URL including the prefix (note that the prefix must
65 | end with a slash). If a Galaxy instance has HTTP Basic
66 | authentication with username and password, then the
67 | credentials should be included in the URL, e.g.
68 | http://user:pass@host:port/galaxy/
69 |
70 | :type key: str
71 | :param key: User's API key for the given instance of Galaxy, obtained
72 | from the user preferences. If a key is not supplied, an
73 | email address and password must be and the key will
74 | automatically be created for the user.
75 |
76 | :type email: str
77 | :param email: Galaxy e-mail address corresponding to the user.
78 | Ignored if key is supplied directly.
79 |
80 | :type password: str
81 | :param password: Password of Galaxy account corresponding to the above
82 | e-mail address. Ignored if key is supplied directly.
83 |
84 | :param verify: Whether to verify the server's TLS certificate
85 | :type verify: bool
86 | """
87 | super().__init__(url, key=key, email=email, password=password, verify=verify, user_agent=user_agent)
88 | self.libraries = libraries.LibraryClient(self)
89 | self.histories = histories.HistoryClient(self)
90 | self.workflows = workflows.WorkflowClient(self)
91 | self.invocations = invocations.InvocationClient(self)
92 | self.datasets = datasets.DatasetClient(self)
93 | self.dataset_collections = dataset_collections.DatasetCollectionClient(self)
94 | self.users = users.UserClient(self)
95 | self.genomes = genomes.GenomeClient(self)
96 | self.tools = tools.ToolClient(self)
97 | self.toolshed = toolshed.ToolShedClient(self)
98 | self.toolShed = self.toolshed # historical alias
99 | self.config = config.ConfigClient(self)
100 | self.container_resolution = container_resolution.ContainerResolutionClient(self)
101 | self.visual = visual.VisualClient(self)
102 | self.quotas = quotas.QuotaClient(self)
103 | self.groups = groups.GroupsClient(self)
104 | self.roles = roles.RolesClient(self)
105 | self.datatypes = datatypes.DatatypesClient(self)
106 | self.jobs = jobs.JobsClient(self)
107 | self.forms = forms.FormsClient(self)
108 | self.ftpfiles = ftpfiles.FTPFilesClient(self)
109 | self.tool_data = tool_data.ToolDataClient(self)
110 | self.folders = folders.FoldersClient(self)
111 | self.tool_dependencies = tool_dependencies.ToolDependenciesClient(self)
112 |
113 | def __repr__(self) -> str:
114 | """
115 | A nicer representation of this GalaxyInstance object
116 | """
117 | return f"GalaxyInstance object for Galaxy at {self.base_url}"
118 |
--------------------------------------------------------------------------------
/bioblend/galaxy/config/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains possible interaction dealing with Galaxy configuration.
3 |
4 | """
5 |
6 | from typing import TYPE_CHECKING
7 |
8 | from bioblend.galaxy.client import Client
9 |
10 | if TYPE_CHECKING:
11 | from bioblend.galaxy import GalaxyInstance
12 |
13 |
14 | class ConfigClient(Client):
15 | module = "configuration"
16 |
17 | def __init__(self, galaxy_instance: "GalaxyInstance") -> None:
18 | super().__init__(galaxy_instance)
19 |
20 | def get_config(self) -> dict:
21 | """
22 | Get a list of attributes about the Galaxy instance. More attributes will
23 | be present if the user is an admin.
24 |
25 | :rtype: list
26 | :return: A list of attributes.
27 | For example::
28 |
29 | {'allow_library_path_paste': False,
30 | 'allow_user_creation': True,
31 | 'allow_user_dataset_purge': True,
32 | 'allow_user_deletion': False,
33 | 'enable_unique_workflow_defaults': False,
34 | 'ftp_upload_dir': '/SOMEWHERE/galaxy/ftp_dir',
35 | 'ftp_upload_site': 'galaxy.com',
36 | 'library_import_dir': 'None',
37 | 'logo_url': None,
38 | 'support_url': 'https://galaxyproject.org/support',
39 | 'terms_url': None,
40 | 'user_library_import_dir': None,
41 | 'wiki_url': 'https://galaxyproject.org/'}
42 | """
43 | return self._get()
44 |
45 | def get_version(self) -> dict:
46 | """
47 | Get the current version of the Galaxy instance.
48 |
49 | :rtype: dict
50 | :return: Version of the Galaxy instance
51 | For example::
52 |
53 | {'extra': {}, 'version_major': '17.01'}
54 | """
55 | url = self.gi.url + "/version"
56 | return self._get(url=url)
57 |
58 | def whoami(self) -> dict:
59 | """
60 | Return information about the current authenticated user.
61 |
62 | :rtype: dict
63 | :return: Information about current authenticated user
64 | For example::
65 |
66 | {'active': True,
67 | 'deleted': False,
68 | 'email': 'user@example.org',
69 | 'id': '4aaaaa85aacc9caa',
70 | 'last_password_change': '2021-07-29T05:34:54.632345',
71 | 'model_class': 'User',
72 | 'username': 'julia'}
73 | """
74 | url = self.gi.url + "/whoami"
75 | return self._get(url=url)
76 |
77 | def reload_toolbox(self) -> None:
78 | """
79 | Reload the Galaxy toolbox (but not individual tools)
80 |
81 | :rtype: None
82 | :return: None
83 | """
84 | url = f"{self._make_url()}/toolbox"
85 | return self._put(url=url)
86 |
87 | def decode_id(self, encoded_id: str) -> int:
88 | """
89 | Decode a Galaxy-encoded hexadecimal id to obtain the unencoded integer.
90 |
91 | :rtype: int
92 | :return: Integer value equivalent to the provided Galaxy-encoded id
93 | """
94 | url = f"{self._make_url()}/decode/{encoded_id}"
95 | return self._get(url=url)["decoded_id"]
96 |
97 | def encode_id(self, decoded_id: int) -> str:
98 | """
99 | Encode an integer to a Galaxy-encoded hexadecimal id.
100 |
101 | :rtype: str
102 | :return: Galaxy-encoded hexadecimal id equivalent to the provided integer
103 | """
104 | url = f"{self._make_url()}/encode/{decoded_id}"
105 | return self._get(url=url)["encoded_id"]
106 |
--------------------------------------------------------------------------------
/bioblend/galaxy/datatypes/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains possible interactions with the Galaxy Datatype
3 | """
4 |
5 | from typing import TYPE_CHECKING
6 |
7 | from bioblend.galaxy.client import Client
8 |
9 | if TYPE_CHECKING:
10 | from bioblend.galaxy import GalaxyInstance
11 |
12 |
13 | class DatatypesClient(Client):
14 | module = "datatypes"
15 |
16 | def __init__(self, galaxy_instance: "GalaxyInstance") -> None:
17 | super().__init__(galaxy_instance)
18 |
19 | def get_datatypes(self, extension_only: bool = False, upload_only: bool = False) -> list[str]:
20 | """
21 | Get the list of all installed datatypes.
22 |
23 | :type extension_only: bool
24 | :param extension_only: Return only the extension rather than the datatype name
25 |
26 | :type upload_only: bool
27 | :param upload_only: Whether to return only datatypes which can be uploaded
28 |
29 | :rtype: list
30 | :return: A list of datatype names.
31 | For example::
32 |
33 | ['snpmatrix',
34 | 'snptest',
35 | 'tabular',
36 | 'taxonomy',
37 | 'twobit',
38 | 'txt',
39 | 'vcf',
40 | 'wig',
41 | 'xgmml',
42 | 'xml']
43 | """
44 |
45 | params: dict[str, bool] = {}
46 | if extension_only:
47 | params["extension_only"] = True
48 |
49 | if upload_only:
50 | params["upload_only"] = True
51 |
52 | return self._get(params=params)
53 |
54 | def get_sniffers(self) -> list[str]:
55 | """
56 | Get the list of all installed sniffers.
57 |
58 | :rtype: list
59 | :return: A list of sniffer names.
60 | For example::
61 |
62 | ['galaxy.datatypes.tabular:Vcf',
63 | 'galaxy.datatypes.binary:TwoBit',
64 | 'galaxy.datatypes.binary:Bam',
65 | 'galaxy.datatypes.binary:Sff',
66 | 'galaxy.datatypes.xml:Phyloxml',
67 | 'galaxy.datatypes.xml:GenericXml',
68 | 'galaxy.datatypes.sequence:Maf',
69 | 'galaxy.datatypes.sequence:Lav',
70 | 'galaxy.datatypes.sequence:csFasta']
71 | """
72 | url = self._make_url() + "/sniffers"
73 | return self._get(url=url)
74 |
--------------------------------------------------------------------------------
/bioblend/galaxy/forms/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains possible interactions with the Galaxy Forms
3 | """
4 |
5 | from typing import (
6 | Any,
7 | TYPE_CHECKING,
8 | )
9 |
10 | from bioblend.galaxy.client import Client
11 |
12 | if TYPE_CHECKING:
13 | from bioblend.galaxy import GalaxyInstance
14 |
15 |
16 | class FormsClient(Client):
17 | module = "forms"
18 |
19 | def __init__(self, galaxy_instance: "GalaxyInstance") -> None:
20 | super().__init__(galaxy_instance)
21 |
22 | def get_forms(self) -> list[dict[str, Any]]:
23 | """
24 | Get the list of all forms.
25 |
26 | :rtype: list
27 | :return: Displays a collection (list) of forms.
28 | For example::
29 |
30 | [{'id': 'f2db41e1fa331b3e',
31 | 'model_class': 'FormDefinition',
32 | 'name': 'First form',
33 | 'url': '/api/forms/f2db41e1fa331b3e'},
34 | {'id': 'ebfb8f50c6abde6d',
35 | 'model_class': 'FormDefinition',
36 | 'name': 'second form',
37 | 'url': '/api/forms/ebfb8f50c6abde6d'}]
38 | """
39 | return self._get()
40 |
41 | def show_form(self, form_id: str) -> dict[str, Any]:
42 | """
43 | Get details of a given form.
44 |
45 | :type form_id: str
46 | :param form_id: Encoded form ID
47 |
48 | :rtype: dict
49 | :return: A description of the given form.
50 | For example::
51 |
52 | {'desc': 'here it is ',
53 | 'fields': [],
54 | 'form_definition_current_id': 'f2db41e1fa331b3e',
55 | 'id': 'f2db41e1fa331b3e',
56 | 'layout': [],
57 | 'model_class': 'FormDefinition',
58 | 'name': 'First form',
59 | 'url': '/api/forms/f2db41e1fa331b3e'}
60 | """
61 | return self._get(id=form_id)
62 |
63 | def create_form(self, form_xml_text: str) -> list[dict[str, Any]]:
64 | """
65 | Create a new form.
66 |
67 | :type form_xml_text: str
68 | :param form_xml_text: Form xml to create a form on galaxy instance
69 |
70 | :rtype: list of dicts
71 | :return: List with a single dictionary describing the created form
72 | """
73 | payload = {
74 | "xml_text": form_xml_text,
75 | }
76 | return self._post(payload=payload)
77 |
--------------------------------------------------------------------------------
/bioblend/galaxy/ftpfiles/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains possible interactions with the Galaxy FTP Files
3 | """
4 |
5 | from typing import TYPE_CHECKING
6 |
7 | from bioblend.galaxy.client import Client
8 |
9 | if TYPE_CHECKING:
10 | from bioblend.galaxy import GalaxyInstance
11 |
12 |
13 | class FTPFilesClient(Client):
14 | module = "ftp_files"
15 |
16 | def __init__(self, galaxy_instance: "GalaxyInstance") -> None:
17 | super().__init__(galaxy_instance)
18 |
19 | def get_ftp_files(self, deleted: bool = False) -> list[dict]:
20 | """
21 | Get a list of local files.
22 |
23 | :type deleted: bool
24 | :param deleted: Whether to include deleted files
25 |
26 | :rtype: list
27 | :return: A list of dicts with details on individual files on FTP
28 | """
29 | return self._get()
30 |
--------------------------------------------------------------------------------
/bioblend/galaxy/genomes/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains possible interactions with Galaxy Genomes
3 | """
4 |
5 | from typing import (
6 | Any,
7 | Literal,
8 | Optional,
9 | TYPE_CHECKING,
10 | )
11 |
12 | from bioblend.galaxy.client import Client
13 |
14 | if TYPE_CHECKING:
15 | from bioblend.galaxy import GalaxyInstance
16 |
17 |
18 | class GenomeClient(Client):
19 | module = "genomes"
20 |
21 | def __init__(self, galaxy_instance: "GalaxyInstance") -> None:
22 | super().__init__(galaxy_instance)
23 |
24 | def get_genomes(self) -> list:
25 | """
26 | Returns a list of installed genomes
27 |
28 | :rtype: list
29 | :return: List of installed genomes
30 | """
31 | genomes = self._get()
32 | return genomes
33 |
34 | def show_genome(
35 | self,
36 | id: str,
37 | num: Optional[str] = None,
38 | chrom: Optional[str] = None,
39 | low: Optional[str] = None,
40 | high: Optional[str] = None,
41 | ) -> dict[str, Any]:
42 | """
43 | Returns information about build
44 |
45 | :type id: str
46 | :param id: Genome build ID to use
47 |
48 | :type num: str
49 | :param num: num
50 |
51 | :type chrom: str
52 | :param chrom: chrom
53 |
54 | :type low: str
55 | :param low: low
56 |
57 | :type high: str
58 | :param high: high
59 |
60 | :rtype: dict
61 | :return: Information about the genome build
62 | """
63 | params: dict[str, str] = {}
64 | if num:
65 | params["num"] = num
66 | if chrom:
67 | params["chrom"] = chrom
68 | if low:
69 | params["low"] = low
70 | if high:
71 | params["high"] = high
72 | return self._get(id=id, params=params)
73 |
74 | def install_genome(
75 | self,
76 | func: Literal["download", "index"] = "download",
77 | source: Optional[str] = None,
78 | dbkey: Optional[str] = None,
79 | ncbi_name: Optional[str] = None,
80 | ensembl_dbkey: Optional[str] = None,
81 | url_dbkey: Optional[str] = None,
82 | indexers: Optional[list] = None,
83 | ) -> dict[str, Any]:
84 | """
85 | Download and/or index a genome.
86 |
87 | :type func: str
88 | :param func: Allowed values: 'download', Download and index; 'index', Index only
89 |
90 | :type source: str
91 | :param source: Data source for this build. Can be: UCSC, Ensembl, NCBI, URL
92 |
93 | :type dbkey: str
94 | :param dbkey: DB key of the build to download, ignored unless 'UCSC' is specified as the source
95 |
96 | :type ncbi_name: str
97 | :param ncbi_name: NCBI's genome identifier, ignored unless NCBI is specified as the source
98 |
99 | :type ensembl_dbkey: str
100 | :param ensembl_dbkey: Ensembl's genome identifier, ignored unless Ensembl is specified as the source
101 |
102 | :type url_dbkey: str
103 | :param url_dbkey: DB key to use for this build, ignored unless URL is specified as the source
104 |
105 | :type indexers: list
106 | :param indexers: POST array of indexers to run after downloading (indexers[] = first, indexers[] = second, ...)
107 |
108 | :rtype: dict
109 | :return: dict( status: 'ok', job: )
110 | If error:
111 | dict( status: 'error', error: )
112 | """
113 | payload: dict[str, Any] = {}
114 | if source:
115 | payload["source"] = source
116 | if func:
117 | payload["func"] = func
118 | if dbkey:
119 | payload["dbkey"] = dbkey
120 | if ncbi_name:
121 | payload["ncbi_name"] = ncbi_name
122 | if ensembl_dbkey:
123 | payload["ensembl_dbkey"] = ensembl_dbkey
124 | if url_dbkey:
125 | payload["url_dbkey"] = url_dbkey
126 | if indexers:
127 | payload["indexers"] = indexers
128 | return self._post(payload)
129 |
--------------------------------------------------------------------------------
/bioblend/galaxy/groups/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains possible interactions with the Galaxy Groups
3 | """
4 |
5 | from typing import (
6 | Any,
7 | Optional,
8 | TYPE_CHECKING,
9 | )
10 |
11 | from bioblend.galaxy.client import Client
12 |
13 | if TYPE_CHECKING:
14 | from bioblend.galaxy import GalaxyInstance
15 |
16 |
17 | class GroupsClient(Client):
18 | module = "groups"
19 |
20 | def __init__(self, galaxy_instance: "GalaxyInstance") -> None:
21 | super().__init__(galaxy_instance)
22 |
23 | def get_groups(self) -> list[dict[str, Any]]:
24 | """
25 | Get all (not deleted) groups.
26 |
27 | :rtype: list
28 | :return: A list of dicts with details on individual groups.
29 | For example::
30 |
31 | [{'id': '33abac023ff186c2',
32 | 'model_class': 'Group',
33 | 'name': 'Listeria',
34 | 'url': '/api/groups/33abac023ff186c2'},
35 | {'id': '73187219cd372cf8',
36 | 'model_class': 'Group',
37 | 'name': 'LPN',
38 | 'url': '/api/groups/73187219cd372cf8'}]
39 | """
40 | return self._get()
41 |
42 | def show_group(self, group_id: str) -> dict[str, Any]:
43 | """
44 | Get details of a given group.
45 |
46 | :type group_id: str
47 | :param group_id: Encoded group ID
48 |
49 | :rtype: dict
50 | :return: A description of group
51 | For example::
52 |
53 | {'id': '33abac023ff186c2',
54 | 'model_class': 'Group',
55 | 'name': 'Listeria',
56 | 'roles_url': '/api/groups/33abac023ff186c2/roles',
57 | 'url': '/api/groups/33abac023ff186c2',
58 | 'users_url': '/api/groups/33abac023ff186c2/users'}
59 | """
60 | return self._get(id=group_id)
61 |
62 | def create_group(
63 | self, group_name: str, user_ids: Optional[list[str]] = None, role_ids: Optional[list[str]] = None
64 | ) -> list[dict[str, Any]]:
65 | """
66 | Create a new group.
67 |
68 | :type group_name: str
69 | :param group_name: A name for the new group
70 |
71 | :type user_ids: list
72 | :param user_ids: A list of encoded user IDs to add to the new group
73 |
74 | :type role_ids: list
75 | :param role_ids: A list of encoded role IDs to add to the new group
76 |
77 | :rtype: list
78 | :return: A (size 1) list with newly created group
79 | details, like::
80 |
81 | [{'id': '7c9636938c3e83bf',
82 | 'model_class': 'Group',
83 | 'name': 'My Group Name',
84 | 'url': '/api/groups/7c9636938c3e83bf'}]
85 | """
86 | if user_ids is None:
87 | user_ids = []
88 | if role_ids is None:
89 | role_ids = []
90 | payload = {"name": group_name, "user_ids": user_ids, "role_ids": role_ids}
91 | return self._post(payload)
92 |
93 | def update_group(
94 | self,
95 | group_id: str,
96 | group_name: Optional[str] = None,
97 | user_ids: Optional[list[str]] = None,
98 | role_ids: Optional[list[str]] = None,
99 | ) -> None:
100 | """
101 | Update a group.
102 |
103 | :type group_id: str
104 | :param group_id: Encoded group ID
105 |
106 | :type group_name: str
107 | :param group_name: A new name for the group. If None, the group name is
108 | not changed.
109 |
110 | :type user_ids: list
111 | :param user_ids: New list of encoded user IDs for the group. It will
112 | substitute the previous list of users (with [] if not specified)
113 |
114 | :type role_ids: list
115 | :param role_ids: New list of encoded role IDs for the group. It will
116 | substitute the previous list of roles (with [] if not specified)
117 |
118 | :rtype: None
119 | :return: None
120 | """
121 | if user_ids is None:
122 | user_ids = []
123 | if role_ids is None:
124 | role_ids = []
125 | payload = {"name": group_name, "user_ids": user_ids, "role_ids": role_ids}
126 | return self._put(payload=payload, id=group_id)
127 |
128 | def get_group_users(self, group_id: str) -> list[dict[str, Any]]:
129 | """
130 | Get the list of users associated to the given group.
131 |
132 | :type group_id: str
133 | :param group_id: Encoded group ID
134 |
135 | :rtype: list of dicts
136 | :return: List of group users' info
137 | """
138 | url = self._make_url(group_id) + "/users"
139 | return self._get(url=url)
140 |
141 | def get_group_roles(self, group_id: str) -> list[dict[str, Any]]:
142 | """
143 | Get the list of roles associated to the given group.
144 |
145 | :type group_id: str
146 | :param group_id: Encoded group ID
147 |
148 | :rtype: list of dicts
149 | :return: List of group roles' info
150 | """
151 | url = self._make_url(group_id) + "/roles"
152 | return self._get(url=url)
153 |
154 | def add_group_user(self, group_id: str, user_id: str) -> dict[str, Any]:
155 | """
156 | Add a user to the given group.
157 |
158 | :type group_id: str
159 | :param group_id: Encoded group ID
160 |
161 | :type user_id: str
162 | :param user_id: Encoded user ID to add to the group
163 |
164 | :rtype: dict
165 | :return: Added group user's info
166 | """
167 | url = "/".join((self._make_url(group_id), "users", user_id))
168 | return self._put(url=url)
169 |
170 | def add_group_role(self, group_id: str, role_id: str) -> dict[str, Any]:
171 | """
172 | Add a role to the given group.
173 |
174 | :type group_id: str
175 | :param group_id: Encoded group ID
176 |
177 | :type role_id: str
178 | :param role_id: Encoded role ID to add to the group
179 |
180 | :rtype: dict
181 | :return: Added group role's info
182 | """
183 | url = "/".join((self._make_url(group_id), "roles", role_id))
184 | return self._put(url=url)
185 |
186 | def delete_group_user(self, group_id: str, user_id: str) -> dict[str, Any]:
187 | """
188 | Remove a user from the given group.
189 |
190 | :type group_id: str
191 | :param group_id: Encoded group ID
192 |
193 | :type user_id: str
194 | :param user_id: Encoded user ID to remove from the group
195 |
196 | :rtype: dict
197 | :return: The user which was removed
198 | """
199 | url = "/".join((self._make_url(group_id), "users", user_id))
200 | return self._delete(url=url)
201 |
202 | def delete_group_role(self, group_id: str, role_id: str) -> dict[str, Any]:
203 | """
204 | Remove a role from the given group.
205 |
206 | :type group_id: str
207 | :param group_id: Encoded group ID
208 |
209 | :type role_id: str
210 | :param role_id: Encoded role ID to remove from the group
211 |
212 | :rtype: dict
213 | :return: The role which was removed
214 | """
215 | url = "/".join((self._make_url(group_id), "roles", role_id))
216 | return self._delete(url=url)
217 |
--------------------------------------------------------------------------------
/bioblend/galaxy/objects/__init__.py:
--------------------------------------------------------------------------------
1 | from .galaxy_instance import GalaxyInstance # noqa: F401
2 |
3 | __all__ = ("GalaxyInstance",)
4 |
--------------------------------------------------------------------------------
/bioblend/galaxy/objects/galaxy_instance.py:
--------------------------------------------------------------------------------
1 | """
2 | A representation of a Galaxy instance based on oo wrappers.
3 | """
4 |
5 | import time
6 | from collections.abc import Iterable
7 | from typing import Optional
8 |
9 | import bioblend
10 | import bioblend.galaxy
11 | from bioblend.galaxy.datasets import TERMINAL_STATES
12 | from . import (
13 | client,
14 | wrappers,
15 | )
16 |
17 |
18 | def _get_error_info(dataset: wrappers.Dataset) -> str:
19 | msg = dataset.id
20 | try:
21 | msg += f" ({dataset.name}): {dataset.misc_info}"
22 | except Exception: # avoid 'error while generating an error report'
23 | msg += ": error"
24 | return msg
25 |
26 |
27 | class GalaxyInstance:
28 | """
29 | A representation of an instance of Galaxy, identified by a URL and
30 | a user's API key.
31 |
32 | :type url: str
33 | :param url: a FQDN or IP for a given instance of Galaxy. For example:
34 | ``http://127.0.0.1:8080``
35 |
36 | :type api_key: str
37 | :param api_key: user's API key for the given instance of Galaxy, obtained
38 | from the Galaxy web UI.
39 |
40 | This is actually a factory class which instantiates the entity-specific
41 | clients.
42 |
43 | Example: get a list of all histories for a user with API key 'foo'::
44 |
45 | from bioblend.galaxy.objects import GalaxyInstance
46 | gi = GalaxyInstance('http://127.0.0.1:8080', api_key='foo')
47 | histories = gi.histories.list()
48 | """
49 |
50 | def __init__(
51 | self,
52 | url: str,
53 | api_key: Optional[str] = None,
54 | email: Optional[str] = None,
55 | password: Optional[str] = None,
56 | *,
57 | verify: bool = True,
58 | user_agent: Optional[str] = None,
59 | ) -> None:
60 | self.gi = bioblend.galaxy.GalaxyInstance(
61 | url, key=api_key, email=email, password=password, verify=verify, user_agent=user_agent
62 | )
63 | self.log = bioblend.log
64 | self.datasets = client.ObjDatasetClient(self)
65 | self.dataset_collections = client.ObjDatasetCollectionClient(self)
66 | self.histories = client.ObjHistoryClient(self)
67 | self.libraries = client.ObjLibraryClient(self)
68 | self.workflows = client.ObjWorkflowClient(self)
69 | self.invocations = client.ObjInvocationClient(self)
70 | self.tools = client.ObjToolClient(self)
71 | self.jobs = client.ObjJobClient(self)
72 |
73 | def _wait_datasets(
74 | self, datasets: Iterable[wrappers.Dataset], polling_interval: float, break_on_error: bool = True
75 | ) -> None:
76 | """
77 | Wait for datasets to come out of the pending states.
78 |
79 | :type datasets: :class:`~collections.Iterable` of
80 | :class:`~.wrappers.Dataset`
81 | :param datasets: datasets
82 |
83 | :type polling_interval: float
84 | :param polling_interval: polling interval in seconds
85 |
86 | :type break_on_error: bool
87 | :param break_on_error: if ``True``, raise a RuntimeError exception as
88 | soon as at least one of the datasets is in the 'error' state.
89 |
90 | .. warning::
91 |
92 | This is a blocking operation that can take a very long time.
93 | Also, note that this method does not return anything;
94 | however, each input dataset is refreshed (possibly multiple
95 | times) during the execution.
96 | """
97 |
98 | def poll(ds_list: Iterable[wrappers.Dataset]) -> list[wrappers.Dataset]:
99 | pending = []
100 | for ds in ds_list:
101 | ds.refresh()
102 | if break_on_error and ds.state == "error":
103 | raise RuntimeError(_get_error_info(ds))
104 | if not ds.state:
105 | self.log.warning("Dataset %s has an empty state", ds.id)
106 | elif ds.state not in TERMINAL_STATES:
107 | self.log.info("Dataset %s is in non-terminal state %s", ds.id, ds.state)
108 | pending.append(ds)
109 | return pending
110 |
111 | self.log.info("Waiting for datasets")
112 | while datasets:
113 | datasets = poll(datasets)
114 | time.sleep(polling_interval)
115 |
--------------------------------------------------------------------------------
/bioblend/galaxy/roles/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains possible interactions with the Galaxy Roles
3 | """
4 |
5 | from typing import (
6 | Any,
7 | Optional,
8 | TYPE_CHECKING,
9 | )
10 |
11 | from bioblend.galaxy.client import Client
12 |
13 | if TYPE_CHECKING:
14 | from bioblend.galaxy import GalaxyInstance
15 |
16 |
17 | class RolesClient(Client):
18 | module = "roles"
19 |
20 | def __init__(self, galaxy_instance: "GalaxyInstance") -> None:
21 | super().__init__(galaxy_instance)
22 |
23 | def get_roles(self) -> list[dict[str, Any]]:
24 | """
25 | Displays a collection (list) of roles.
26 |
27 | :rtype: list
28 | :return: A list of dicts with details on individual roles.
29 | For example::
30 |
31 | [{"id": "f2db41e1fa331b3e",
32 | "model_class": "Role",
33 | "name": "Foo",
34 | "url": "/api/roles/f2db41e1fa331b3e"},
35 | {"id": "f597429621d6eb2b",
36 | "model_class": "Role",
37 | "name": "Bar",
38 | "url": "/api/roles/f597429621d6eb2b"}]
39 | """
40 | return self._get()
41 |
42 | def show_role(self, role_id: str) -> dict[str, Any]:
43 | """
44 | Display information on a single role
45 |
46 | :type role_id: str
47 | :param role_id: Encoded role ID
48 |
49 | :rtype: dict
50 | :return: Details of the given role.
51 | For example::
52 |
53 | {"description": "Private Role for Foo",
54 | "id": "f2db41e1fa331b3e",
55 | "model_class": "Role",
56 | "name": "Foo",
57 | "type": "private",
58 | "url": "/api/roles/f2db41e1fa331b3e"}
59 | """
60 | return self._get(id=role_id)
61 |
62 | def create_role(
63 | self,
64 | role_name: str,
65 | description: str,
66 | user_ids: Optional[list[str]] = None,
67 | group_ids: Optional[list[str]] = None,
68 | ) -> dict[str, Any]:
69 | """
70 | Create a new role.
71 |
72 | :type role_name: str
73 | :param role_name: A name for the new role
74 |
75 | :type description: str
76 | :param description: Description for the new role
77 |
78 | :type user_ids: list
79 | :param user_ids: A list of encoded user IDs to add to the new role
80 |
81 | :type group_ids: list
82 | :param group_ids: A list of encoded group IDs to add to the new role
83 |
84 | :rtype: dict
85 | :return: Details of the newly created role.
86 | For example::
87 |
88 | {'description': 'desc',
89 | 'url': '/api/roles/ebfb8f50c6abde6d',
90 | 'model_class': 'Role',
91 | 'type': 'admin',
92 | 'id': 'ebfb8f50c6abde6d',
93 | 'name': 'Foo'}
94 |
95 | .. versionchanged:: 0.15.0
96 | Changed the return value from a 1-element list to a dict.
97 | """
98 | if user_ids is None:
99 | user_ids = []
100 | if group_ids is None:
101 | group_ids = []
102 | payload = {"name": role_name, "description": description, "user_ids": user_ids, "group_ids": group_ids}
103 | ret = self._post(payload)
104 | if isinstance(ret, list):
105 | # Galaxy release_20.09 and earlier returned a 1-element list
106 | ret = ret[0]
107 | return ret
108 |
--------------------------------------------------------------------------------
/bioblend/galaxy/tool_data/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains possible interactions with the Galaxy Tool data tables
3 | """
4 |
5 | from typing import (
6 | Any,
7 | TYPE_CHECKING,
8 | )
9 |
10 | from bioblend.galaxy.client import Client
11 |
12 | if TYPE_CHECKING:
13 | from bioblend.galaxy import GalaxyInstance
14 |
15 |
16 | class ToolDataClient(Client):
17 | module = "tool_data"
18 |
19 | def __init__(self, galaxy_instance: "GalaxyInstance") -> None:
20 | super().__init__(galaxy_instance)
21 |
22 | def get_data_tables(self) -> list[dict[str, Any]]:
23 | """
24 | Get the list of all data tables.
25 |
26 | :rtype: list
27 | :return: A list of dicts with details on individual data tables.
28 | For example::
29 |
30 | [{"model_class": "TabularToolDataTable", "name": "fasta_indexes"},
31 | {"model_class": "TabularToolDataTable", "name": "bwa_indexes"}]
32 | """
33 | return self._get()
34 |
35 | def show_data_table(self, data_table_id: str) -> dict[str, Any]:
36 | """
37 | Get details of a given data table.
38 |
39 | :type data_table_id: str
40 | :param data_table_id: ID of the data table
41 |
42 | :rtype: dict
43 | :return: A description of the given data table and its content.
44 | For example::
45 |
46 | {'columns': ['value', 'dbkey', 'name', 'path'],
47 | 'fields': [['test id',
48 | 'test',
49 | 'test name',
50 | '/opt/galaxy-dist/tool-data/test/seq/test id.fa']],
51 | 'model_class': 'TabularToolDataTable',
52 | 'name': 'all_fasta'}
53 |
54 | """
55 | return self._get(id=data_table_id)
56 |
57 | def reload_data_table(self, data_table_id: str) -> dict[str, Any]:
58 | """
59 | Reload a data table.
60 |
61 | :type data_table_id: str
62 | :param data_table_id: ID of the data table
63 |
64 | :rtype: dict
65 | :return: A description of the given data table and its content.
66 | For example::
67 |
68 | {'columns': ['value', 'dbkey', 'name', 'path'],
69 | 'fields': [['test id',
70 | 'test',
71 | 'test name',
72 | '/opt/galaxy-dist/tool-data/test/seq/test id.fa']],
73 | 'model_class': 'TabularToolDataTable',
74 | 'name': 'all_fasta'}
75 | """
76 | url = self._make_url(data_table_id) + "/reload"
77 | return self._get(url=url)
78 |
79 | def delete_data_table(self, data_table_id: str, values: str) -> dict[str, Any]:
80 | """
81 | Delete an item from a data table.
82 |
83 | :type data_table_id: str
84 | :param data_table_id: ID of the data table
85 |
86 | :type values: str
87 | :param values: a "|" separated list of column contents, there must be a
88 | value for all the columns of the data table
89 |
90 | :rtype: dict
91 | :return: Remaining contents of the given data table
92 | """
93 | payload = {"values": values}
94 | return self._delete(payload=payload, id=data_table_id)
95 |
--------------------------------------------------------------------------------
/bioblend/galaxy/tool_dependencies/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains interactions dealing with Galaxy dependency resolvers.
3 | """
4 |
5 | from typing import (
6 | Any,
7 | Literal,
8 | Optional,
9 | TYPE_CHECKING,
10 | )
11 |
12 | from bioblend.galaxy.client import Client
13 |
14 | if TYPE_CHECKING:
15 | from bioblend.galaxy import GalaxyInstance
16 |
17 |
18 | class ToolDependenciesClient(Client):
19 | module = "dependency_resolvers"
20 |
21 | def __init__(self, galaxy_instance: "GalaxyInstance") -> None:
22 | super().__init__(galaxy_instance)
23 |
24 | def summarize_toolbox(
25 | self,
26 | index: Optional[int] = None,
27 | tool_ids: Optional[list[str]] = None,
28 | resolver_type: Optional[str] = None,
29 | include_containers: bool = False,
30 | container_type: Optional[str] = None,
31 | index_by: Literal["requirements", "tools"] = "requirements",
32 | ) -> list:
33 | """
34 | Summarize requirements across toolbox (for Tool Management grid).
35 |
36 | :type index: int
37 | :param index: index of the dependency resolver with respect to
38 | the dependency resolvers config file
39 |
40 | :type tool_ids: list
41 | :param tool_ids: tool_ids to return when index_by=tools
42 |
43 | :type resolver_type: str
44 | :param resolver_type: restrict to specified resolver type
45 |
46 | :type include_containers: bool
47 | :param include_containers: include container resolvers in resolution
48 |
49 | :type container_type: str
50 | :param container_type: restrict to specified container type
51 |
52 | :type index_by: str
53 | :param index_by: By default results are grouped by requirements. Set to 'tools'
54 | to return one entry per tool.
55 |
56 | :rtype: list of dicts
57 | :returns: dictified descriptions of the dependencies, with attribute
58 | `dependency_type: None` if no match was found.
59 | For example::
60 |
61 | [{'requirements': [{'name': 'galaxy_sequence_utils',
62 | 'specs': [],
63 | 'type': 'package',
64 | 'version': '1.1.4'},
65 | {'name': 'bx-python',
66 | 'specs': [],
67 | 'type': 'package',
68 | 'version': '0.8.6'}],
69 | 'status': [{'cacheable': False,
70 | 'dependency_type': None,
71 | 'exact': True,
72 | 'model_class': 'NullDependency',
73 | 'name': 'galaxy_sequence_utils',
74 | 'version': '1.1.4'},
75 | {'cacheable': False,
76 | 'dependency_type': None,
77 | 'exact': True,
78 | 'model_class': 'NullDependency',
79 | 'name': 'bx-python',
80 | 'version': '0.8.6'}],
81 | 'tool_ids': ['vcf_to_maf_customtrack1']}]
82 |
83 | .. note::
84 | This method works only on Galaxy 20.01 or later and if the user is a
85 | Galaxy admin. It relies on an experimental API particularly tied to
86 | the GUI and therefore is subject to breaking changes.
87 | """
88 | assert index_by in ["tools", "requirements"], "index_by must be one of 'tools' or 'requirements'."
89 | params = {
90 | "include_containers": str(include_containers),
91 | "index_by": index_by,
92 | }
93 | if index:
94 | params["index"] = str(index)
95 | if tool_ids:
96 | params["tool_ids"] = ",".join(tool_ids)
97 | if resolver_type:
98 | params["resolver_type"] = resolver_type
99 | if container_type:
100 | params["container_type"] = container_type
101 |
102 | url = "/".join((self._make_url(), "toolbox"))
103 | return self._get(url=url, params=params)
104 |
105 | def unused_dependency_paths(self) -> list[str]:
106 | """
107 | List unused dependencies
108 | """
109 | url = "/".join((self._make_url(), "unused_paths"))
110 | return self._get(url=url)
111 |
112 | def delete_unused_dependency_paths(self, paths: list[str]) -> None:
113 | """
114 | Delete unused paths
115 |
116 | :type paths: list
117 | :param paths: paths to delete
118 |
119 | """
120 | payload: dict[str, Any] = {"paths": paths}
121 | url = "/".join((self._make_url(), "unused_paths"))
122 | self._put(url=url, payload=payload)
123 |
--------------------------------------------------------------------------------
/bioblend/galaxy/tools/inputs.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Iterator
2 | from typing import (
3 | Any,
4 | Optional,
5 | Union,
6 | )
7 |
8 |
9 | class InputsBuilder:
10 | """ """
11 |
12 | def __init__(self) -> None:
13 | self._input_dict: dict[str, Any] = {}
14 |
15 | def set(self, name: str, input: Any) -> "InputsBuilder":
16 | self._input_dict[name] = input
17 | return self
18 |
19 | def set_param(self, name: str, value: Any) -> "InputsBuilder":
20 | return self.set(name, param(value=value))
21 |
22 | def set_dataset_param(self, name: str, value: str, src: str = "hda") -> "InputsBuilder":
23 | return self.set(name, dataset(value, src=src))
24 |
25 | def to_dict(self) -> dict[str, Any]:
26 | values = {}
27 | for key, value in self.flat_iter():
28 | if hasattr(value, "value"):
29 | value = value.value
30 | values[key] = value
31 | return values
32 |
33 | def flat_iter(self, prefix: Optional[str] = None) -> Iterator[tuple[str, Any]]:
34 | for key, value in self._input_dict.items():
35 | effective_key = key if prefix is None else f"{prefix}|{key}"
36 | if hasattr(value, "flat_iter"):
37 | yield from value.flat_iter(effective_key)
38 | else:
39 | yield effective_key, value
40 |
41 |
42 | class RepeatBuilder:
43 | def __init__(self) -> None:
44 | self._instances: list[InputsBuilder] = []
45 |
46 | def instance(self, inputs: InputsBuilder) -> "RepeatBuilder":
47 | self._instances.append(inputs)
48 | return self
49 |
50 | def flat_iter(self, prefix: str) -> Iterator[tuple[str, Any]]:
51 | for index, instance in enumerate(self._instances):
52 | index_prefix = f"{prefix}_{index}"
53 | yield from instance.flat_iter(index_prefix)
54 |
55 |
56 | class Param:
57 | def __init__(self, value: Any) -> None:
58 | self.value = value
59 |
60 |
61 | class DatasetParam(Param):
62 | def __init__(self, value: Union[dict[str, str], str], src: str = "hda") -> None:
63 | if not isinstance(value, dict):
64 | value = {"src": src, "id": value}
65 | super().__init__(value)
66 |
67 |
68 | inputs = InputsBuilder
69 | repeat = RepeatBuilder
70 | conditional = InputsBuilder
71 | param = Param
72 | dataset = DatasetParam
73 |
74 | __all__ = ("inputs", "repeat", "conditional", "param")
75 |
--------------------------------------------------------------------------------
/bioblend/galaxy/visual/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contains possible interactions with the Galaxy visualization
3 | """
4 |
5 | from typing import (
6 | Any,
7 | TYPE_CHECKING,
8 | )
9 |
10 | from bioblend.galaxy.client import Client
11 |
12 | if TYPE_CHECKING:
13 | from bioblend.galaxy import GalaxyInstance
14 |
15 |
16 | class VisualClient(Client):
17 | module = "visualizations"
18 |
19 | def __init__(self, galaxy_instance: "GalaxyInstance") -> None:
20 | super().__init__(galaxy_instance)
21 |
22 | def get_visualizations(self) -> list[dict[str, Any]]:
23 | """
24 | Get the list of all visualizations.
25 |
26 | :rtype: list
27 | :return: A list of dicts with details on individual visualizations.
28 | For example::
29 |
30 | [{'dbkey': 'eschColi_K12',
31 | 'id': 'df1c7c96fc427c2d',
32 | 'title': 'AVTest1',
33 | 'type': 'trackster',
34 | 'url': '/api/visualizations/df1c7c96fc427c2d'},
35 | {'dbkey': 'mm9',
36 | 'id': 'a669f50f8bf55b02',
37 | 'title': 'Bam to Bigwig',
38 | 'type': 'trackster',
39 | 'url': '/api/visualizations/a669f50f8bf55b02'}]
40 | """
41 | return self._get()
42 |
43 | def show_visualization(self, visual_id: str) -> dict[str, Any]:
44 | """
45 | Get details of a given visualization.
46 |
47 | :type visual_id: str
48 | :param visual_id: Encoded visualization ID
49 |
50 | :rtype: dict
51 | :return: A description of the given visualization.
52 | For example::
53 |
54 | {'annotation': None,
55 | 'dbkey': 'mm9',
56 | 'id': '18df9134ea75e49c',
57 | 'latest_revision': { ... },
58 | 'model_class': 'Visualization',
59 | 'revisions': ['aa90649bb3ec7dcb', '20622bc6249c0c71'],
60 | 'slug': 'visualization-for-grant-1',
61 | 'title': 'Visualization For Grant',
62 | 'type': 'trackster',
63 | 'url': '/u/azaron/v/visualization-for-grant-1',
64 | 'user_id': '21e4aed91386ca8b'}
65 | """
66 | return self._get(id=visual_id)
67 |
--------------------------------------------------------------------------------
/bioblend/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/galaxyproject/bioblend/01909298c19bae6165a1c1f51cbf8c92be5ac55e/bioblend/py.typed
--------------------------------------------------------------------------------
/bioblend/toolshed/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | A base representation of an instance of Tool Shed
3 | """
4 |
5 | from typing import Optional
6 |
7 | from bioblend.galaxyclient import GalaxyClient
8 | from bioblend.toolshed import (
9 | categories,
10 | repositories,
11 | tools,
12 | )
13 |
14 |
15 | class ToolShedInstance(GalaxyClient):
16 | def __init__(
17 | self,
18 | url: str,
19 | key: Optional[str] = None,
20 | email: Optional[str] = None,
21 | password: Optional[str] = None,
22 | *,
23 | verify: bool = True,
24 | user_agent: Optional[str] = None,
25 | ) -> None:
26 | """
27 | A base representation of a connection to a ToolShed instance, identified
28 | by the ToolShed URL and user credentials.
29 |
30 | After you have created a ``ToolShedInstance`` object, access various
31 | modules via the class fields. For example, to work with repositories and
32 | get a list of all public repositories, the following should be done::
33 |
34 | from bioblend import toolshed
35 |
36 | ts = toolshed.ToolShedInstance(url='https://testtoolshed.g2.bx.psu.edu')
37 |
38 | rl = ts.repositories.get_repositories()
39 |
40 | tools = ts.tools.search_tools('fastq')
41 |
42 | :type url: str
43 | :param url: A FQDN or IP for a given instance of ToolShed. For example:
44 | https://testtoolshed.g2.bx.psu.edu . If a ToolShed instance
45 | is served under a prefix (e.g.
46 | http://127.0.0.1:8080/toolshed/), supply the entire URL
47 | including the prefix (note that the prefix must end with a
48 | slash).
49 |
50 | :type key: str
51 | :param key: If required, user's API key for the given instance of ToolShed,
52 | obtained from the user preferences.
53 |
54 | :type email: str
55 | :param email: ToolShed e-mail address corresponding to the user.
56 | Ignored if key is supplied directly.
57 |
58 | :type password: str
59 | :param password: Password of ToolShed account corresponding to the above
60 | e-mail address. Ignored if key is supplied directly.
61 |
62 | :param verify: Whether to verify the server's TLS certificate
63 | :type verify: bool
64 | """
65 | super().__init__(url, key=key, email=email, password=password, verify=verify, user_agent=user_agent)
66 | self.categories = categories.ToolShedCategoryClient(self)
67 | self.repositories = repositories.ToolShedRepositoryClient(self)
68 | self.tools = tools.ToolShedToolClient(self)
69 |
--------------------------------------------------------------------------------
/bioblend/toolshed/categories/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Interaction with a Tool Shed instance categories
3 | """
4 |
5 | from typing import (
6 | Any,
7 | Literal,
8 | TYPE_CHECKING,
9 | )
10 |
11 | from bioblend.galaxy.client import Client
12 |
13 | if TYPE_CHECKING:
14 | from bioblend.toolshed import ToolShedInstance
15 |
16 |
17 | class ToolShedCategoryClient(Client):
18 | module = "categories"
19 |
20 | def __init__(self, toolshed_instance: "ToolShedInstance") -> None:
21 | super().__init__(toolshed_instance)
22 |
23 | def get_categories(self, deleted: bool = False) -> list[dict[str, Any]]:
24 | """
25 | Returns a list of dictionaries that contain descriptions of the
26 | repository categories found on the given Tool Shed instance.
27 |
28 | :type deleted: bool
29 | :param deleted: whether to show deleted categories. Requires
30 | administrator access to the Tool Shed instance.
31 |
32 | :rtype: list
33 | :return: A list of dictionaries containing information about
34 | repository categories present in the Tool Shed.
35 | For example::
36 |
37 | [{'deleted': False,
38 | 'description': 'Tools for manipulating data',
39 | 'id': '175812cd7caaf439',
40 | 'model_class': 'Category',
41 | 'name': 'Text Manipulation',
42 | 'url': '/api/categories/175812cd7caaf439'}]
43 |
44 | .. versionadded:: 0.5.2
45 | """
46 | return self._get(deleted=deleted)
47 |
48 | def show_category(self, category_id: str) -> dict[str, Any]:
49 | """
50 | Get details of a given category.
51 |
52 | :type category_id: str
53 | :param category_id: Encoded category ID
54 |
55 | :rtype: dict
56 | :return: details of the given category
57 | """
58 | return self._get(id=category_id)
59 |
60 | def get_repositories(
61 | self, category_id: str, sort_key: Literal["name", "owner"] = "name", sort_order: Literal["asc", "desc"] = "asc"
62 | ) -> dict[str, Any]:
63 | """
64 | Returns a dictionary of information for a repository category including
65 | a list of repositories belonging to the category.
66 |
67 | :type category_id: str
68 | :param category_id: Encoded category ID
69 |
70 | :type sort_key: str
71 | :param sort_key: key for sorting. Options are 'name' or 'owner' (default 'name').
72 |
73 | :type sort_order: str
74 | :param sort_order: ordering of sorted output. Options are 'asc' or 'desc' (default 'asc').
75 |
76 | :rtype: dict
77 | :return: A dict containing information about the category
78 | including a list of repository dicts.
79 | For example::
80 |
81 | {'deleted': False,
82 | 'description': 'Tools for constructing and analyzing 3-dimensional shapes and '
83 | 'their properties',
84 | 'id': '589548af7e391bcf',
85 | 'model_class': 'Category',
86 | 'name': 'Constructive Solid Geometry',
87 | 'repositories': [{'create_time': '2016-08-23T18:53:23.845013',
88 | 'deleted': False,
89 | 'deprecated': False,
90 | 'description': 'Adds a surface field to a selected shape '
91 | 'based on a given mathematical expression',
92 | 'homepage_url': 'https://github.com/gregvonkuster/galaxy-csg',
93 | 'id': 'af2ccc53697b064c',
94 | 'metadata': {'0:e12b55e960de': {'changeset_revision': 'e12b55e960de',
95 | 'downloadable': True,
96 | 'has_repository_dependencies': False,
97 | 'id': 'dfe022067783215f',
98 | 'includes_datatypes': False,
99 | 'includes_tool_dependencies': False,
100 | 'includes_tools': True,
101 | 'includes_tools_for_display_in_tool_panel': True,
102 | 'includes_workflows': False,
103 | 'malicious': False,
104 | 'missing_test_components': False,
105 | 'model_class': 'RepositoryMetadata',
106 | 'numeric_revision': 0,
107 | 'repository_id': 'af2ccc53697b064c'}},
108 | 'model_class': 'Repository',
109 | 'name': 'icqsol_add_surface_field_from_expression',
110 | 'owner': 'iuc',
111 | 'private': False,
112 | 'remote_repository_url': 'https://github.com/gregvonkuster/galaxy-csg',
113 | 'times_downloaded': 152,
114 | 'type': 'unrestricted',
115 | 'user_id': 'b563abc230aa8fd0'},
116 | # ...
117 | ],
118 | 'repository_count': 11,
119 | 'url': '/api/categories/589548af7e391bcf'}
120 | """
121 |
122 | params: dict[str, Any] = {}
123 | if sort_key:
124 | params.update({"sort_key": sort_key})
125 | if sort_order:
126 | params.update({"sort_order": sort_order})
127 |
128 | url = self._make_url(category_id) + "/repositories"
129 | return self._get(url=url, params=params)
130 |
--------------------------------------------------------------------------------
/bioblend/toolshed/tools/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Interaction with a Tool Shed instance tools
3 | """
4 |
5 | from typing import (
6 | Any,
7 | TYPE_CHECKING,
8 | )
9 |
10 | from bioblend.galaxy.client import Client
11 |
12 | if TYPE_CHECKING:
13 | from bioblend.toolshed import ToolShedInstance
14 |
15 |
16 | class ToolShedToolClient(Client):
17 | gi: "ToolShedInstance"
18 | module = "tools"
19 |
20 | def __init__(self, toolshed_instance: "ToolShedInstance") -> None:
21 | super().__init__(toolshed_instance)
22 |
23 | def search_tools(self, q: str, page: int = 1, page_size: int = 10) -> dict[str, Any]:
24 | """
25 | Search for tools in a Galaxy Tool Shed.
26 |
27 | :type q: str
28 | :param q: query string for searching purposes
29 |
30 | :type page: int
31 | :param page: page requested
32 |
33 | :type page_size: int
34 | :param page_size: page size requested
35 |
36 | :rtype: dict
37 | :return: dictionary containing search hits as well as metadata for the
38 | search. For example::
39 |
40 | {'hits': [{'matched_terms': [],
41 | 'score': 3.0,
42 | 'tool': {'description': 'convert between various FASTQ quality formats',
43 | 'id': '69819b84d55f521efda001e0926e7233',
44 | 'name': 'FASTQ Groomer',
45 | 'repo_name': None,
46 | 'repo_owner_username': 'devteam'}},
47 | {'matched_terms': [],
48 | 'score': 3.0,
49 | 'tool': {'description': 'converts a bam file to fastq files.',
50 | 'id': '521e282770fd94537daff87adad2551b',
51 | 'name': 'Defuse BamFastq',
52 | 'repo_name': None,
53 | 'repo_owner_username': 'jjohnson'}}],
54 | 'hostname': 'https://testtoolshed.g2.bx.psu.edu/',
55 | 'page': '1',
56 | 'page_size': '2',
57 | 'total_results': '118'}
58 | """
59 | params = {"q": q, "page": page, "page_size": page_size}
60 | return self._get(params=params)
61 |
--------------------------------------------------------------------------------
/bioblend/util/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import (
3 | Any,
4 | IO,
5 | NamedTuple,
6 | Optional,
7 | TypeVar,
8 | )
9 |
10 |
11 | class FileStream(NamedTuple):
12 | name: str
13 | fd: IO
14 |
15 | def close(self) -> None:
16 | self.fd.close()
17 |
18 |
19 | def attach_file(path: str, name: Optional[str] = None) -> FileStream:
20 | """
21 | Attach a path to a request payload object.
22 |
23 | :type path: str
24 | :param path: Path to file to attach to payload.
25 |
26 | :type name: str
27 | :param name: Name to give file, if different than actual pathname.
28 |
29 | :rtype: object
30 | :return: Returns an object compatible with requests post operation and
31 | capable of being closed with a ``close()`` method.
32 | """
33 | if name is None:
34 | name = os.path.basename(path)
35 | return FileStream(name, open(path, "rb"))
36 |
37 |
38 | T = TypeVar("T")
39 |
40 |
41 | def abstractclass(decorated_cls: type[T]) -> type[T]:
42 | """
43 | Decorator that marks a class as abstract even without any abstract method
44 |
45 | Adapted from https://stackoverflow.com/a/49013561/4503125
46 | """
47 |
48 | def clsnew(cls: type[T], *args: Any, **kwargs: Any) -> T:
49 | # assert issubclass(cls, decorated_cls)
50 | if cls is decorated_cls:
51 | cls_name = getattr(decorated_cls, "__name__", str(decorated_cls))
52 | raise TypeError(f"Can't instantiate abstract class {cls_name}")
53 | return super(decorated_cls, cls).__new__(cls) # type: ignore[misc]
54 |
55 | decorated_cls.__new__ = clsnew # type: ignore[assignment]
56 | return decorated_cls
57 |
58 |
59 | __all__ = (
60 | "abstractclass",
61 | "attach_file",
62 | )
63 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
14 | # the i18n builder cannot share the environment and doctrees with the others
15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
16 |
17 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
18 |
19 | help:
20 | @echo "Please use \`make ' where is one of"
21 | @echo " html to make standalone HTML files"
22 | @echo " dirhtml to make HTML files named index.html in directories"
23 | @echo " singlehtml to make a single large HTML file"
24 | @echo " pickle to make pickle files"
25 | @echo " json to make JSON files"
26 | @echo " htmlhelp to make HTML files and a HTML help project"
27 | @echo " qthelp to make HTML files and a qthelp project"
28 | @echo " devhelp to make HTML files and a Devhelp project"
29 | @echo " epub to make an epub"
30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
31 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
32 | @echo " text to make text files"
33 | @echo " man to make manual pages"
34 | @echo " texinfo to make Texinfo files"
35 | @echo " info to make Texinfo files and run them through makeinfo"
36 | @echo " gettext to make PO message catalogs"
37 | @echo " changes to make an overview of all changed/added/deprecated items"
38 | @echo " linkcheck to check all external links for integrity"
39 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
40 |
41 | clean:
42 | -rm -rf $(BUILDDIR)/*
43 |
44 | html:
45 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
46 | @echo
47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
48 |
49 | dirhtml:
50 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
51 | @echo
52 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
53 |
54 | singlehtml:
55 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
56 | @echo
57 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
58 |
59 | pickle:
60 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
61 | @echo
62 | @echo "Build finished; now you can process the pickle files."
63 |
64 | json:
65 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
66 | @echo
67 | @echo "Build finished; now you can process the JSON files."
68 |
69 | htmlhelp:
70 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
71 | @echo
72 | @echo "Build finished; now you can run HTML Help Workshop with the" \
73 | ".hhp project file in $(BUILDDIR)/htmlhelp."
74 |
75 | qthelp:
76 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
77 | @echo
78 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
79 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
80 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Blend.qhcp"
81 | @echo "To view the help file:"
82 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Blend.qhc"
83 |
84 | devhelp:
85 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
86 | @echo
87 | @echo "Build finished."
88 | @echo "To view the help file:"
89 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Blend"
90 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Blend"
91 | @echo "# devhelp"
92 |
93 | epub:
94 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
95 | @echo
96 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
97 |
98 | latex:
99 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
100 | @echo
101 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
102 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
103 | "(use \`make latexpdf' here to do that automatically)."
104 |
105 | latexpdf:
106 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
107 | @echo "Running LaTeX files through pdflatex..."
108 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
109 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
110 |
111 | text:
112 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
113 | @echo
114 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
115 |
116 | man:
117 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
118 | @echo
119 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
120 |
121 | texinfo:
122 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
123 | @echo
124 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
125 | @echo "Run \`make' in that directory to run these through makeinfo" \
126 | "(use \`make info' here to do that automatically)."
127 |
128 | info:
129 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
130 | @echo "Running Texinfo files through makeinfo..."
131 | make -C $(BUILDDIR)/texinfo info
132 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
133 |
134 | gettext:
135 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
136 | @echo
137 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
138 |
139 | changes:
140 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
141 | @echo
142 | @echo "The overview file is in $(BUILDDIR)/changes."
143 |
144 | linkcheck:
145 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
146 | @echo
147 | @echo "Link check complete; look for any errors in the above output " \
148 | "or in $(BUILDDIR)/linkcheck/output.txt."
149 |
150 | doctest:
151 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
152 | @echo "Testing of doctests in the sources finished, look at the " \
153 | "results in $(BUILDDIR)/doctest/output.txt."
154 |
--------------------------------------------------------------------------------
/docs/_static/.empty:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/galaxyproject/bioblend/01909298c19bae6165a1c1f51cbf8c92be5ac55e/docs/_static/.empty
--------------------------------------------------------------------------------
/docs/api_docs/galaxy/all.rst:
--------------------------------------------------------------------------------
1 | =============================================
2 | API documentation for interacting with Galaxy
3 | =============================================
4 |
5 | GalaxyInstance
6 | --------------
7 |
8 | .. autoclass:: bioblend.galaxy.GalaxyInstance
9 |
10 | .. automethod:: bioblend.galaxy.GalaxyInstance.__init__
11 |
12 | -----
13 |
14 | .. _libraries-api:
15 |
16 | Config
17 | ------
18 |
19 | .. automodule:: bioblend.galaxy.config
20 |
21 | -----
22 |
23 | Datasets
24 | --------
25 |
26 | .. automodule:: bioblend.galaxy.datasets
27 |
28 | -----
29 |
30 | Dataset collections
31 | -------------------
32 |
33 | .. automodule:: bioblend.galaxy.dataset_collections
34 |
35 | -----
36 |
37 | Datatypes
38 | ---------
39 |
40 | .. automodule:: bioblend.galaxy.datatypes
41 |
42 | -----
43 |
44 | Folders
45 | -------
46 |
47 | .. automodule:: bioblend.galaxy.folders
48 |
49 | -----
50 |
51 | Forms
52 | -----
53 |
54 | .. automodule:: bioblend.galaxy.forms
55 |
56 | -----
57 |
58 | FTP files
59 | ---------
60 |
61 | .. automodule:: bioblend.galaxy.ftpfiles
62 |
63 | -----
64 |
65 | Genomes
66 | -------
67 |
68 | .. automodule:: bioblend.galaxy.genomes
69 |
70 | Groups
71 | ------
72 |
73 | .. automodule:: bioblend.galaxy.groups
74 |
75 | -----
76 |
77 | Histories
78 | ---------
79 |
80 | .. automodule:: bioblend.galaxy.histories
81 |
82 | -----
83 |
84 | Invocations
85 | -----------
86 |
87 | .. automodule:: bioblend.galaxy.invocations
88 |
89 | -----
90 |
91 | Jobs
92 | ----
93 |
94 | .. automodule:: bioblend.galaxy.jobs
95 |
96 | -----
97 |
98 | Libraries
99 | ---------
100 |
101 | .. automodule:: bioblend.galaxy.libraries
102 |
103 | -----
104 |
105 | Quotas
106 | ------
107 |
108 | .. automodule:: bioblend.galaxy.quotas
109 |
110 | -----
111 |
112 | Roles
113 | -----
114 |
115 | .. automodule:: bioblend.galaxy.roles
116 |
117 | -----
118 |
119 | Tools
120 | -----
121 |
122 | .. automodule:: bioblend.galaxy.tools
123 |
124 | -----
125 |
126 | Tool data tables
127 | ----------------
128 |
129 | .. automodule:: bioblend.galaxy.tool_data
130 |
131 | -----
132 |
133 | Tool dependencies
134 | -----------------
135 |
136 | .. automodule:: bioblend.galaxy.tool_dependencies
137 |
138 | -----
139 |
140 | ToolShed
141 | --------
142 |
143 | .. automodule:: bioblend.galaxy.toolshed
144 |
145 | -----
146 |
147 | Users
148 | -----
149 |
150 | .. automodule:: bioblend.galaxy.users
151 |
152 | -----
153 |
154 | Visual
155 | ------
156 |
157 | .. automodule:: bioblend.galaxy.visual
158 |
159 | -----
160 |
161 | .. _workflows-api:
162 |
163 | Workflows
164 | ---------
165 |
166 | .. automodule:: bioblend.galaxy.workflows
167 |
168 |
169 | .. _objects-api:
170 |
171 | ==========================
172 | Object-oriented Galaxy API
173 | ==========================
174 |
175 | .. autoclass:: bioblend.galaxy.objects.galaxy_instance.GalaxyInstance
176 |
177 | Client
178 | ------
179 |
180 | .. automodule:: bioblend.galaxy.objects.client
181 |
182 | Wrappers
183 | --------
184 |
185 | .. automodule:: bioblend.galaxy.objects.wrappers
186 |
--------------------------------------------------------------------------------
/docs/api_docs/lib_config.rst:
--------------------------------------------------------------------------------
1 | ====================================
2 | Configuration documents for BioBlend
3 | ====================================
4 |
5 | BioBlend
6 | --------
7 |
8 | .. automodule:: bioblend
9 | :members:
10 |
11 | Config
12 | ------
13 |
14 | .. automodule:: bioblend.config
15 | :members:
16 | :undoc-members:
17 |
--------------------------------------------------------------------------------
/docs/api_docs/toolshed/all.rst:
--------------------------------------------------------------------------------
1 | ==========================================================
2 | API documentation for interacting with the Galaxy Toolshed
3 | ==========================================================
4 |
5 | ToolShedInstance
6 | ----------------
7 |
8 | .. autoclass:: bioblend.toolshed.ToolShedInstance
9 |
10 | .. automethod:: bioblend.toolshed.ToolShedInstance.__init__
11 |
12 |
13 | Categories
14 | ----------
15 |
16 | .. automodule:: bioblend.toolshed.categories
17 |
18 | Repositories
19 | ------------
20 |
21 | .. automodule:: bioblend.toolshed.repositories
22 |
23 | Tools
24 | -----
25 |
26 | .. automodule:: bioblend.toolshed.tools
27 |
--------------------------------------------------------------------------------
/docs/examples/create_user_get_api_key.py:
--------------------------------------------------------------------------------
1 | """
2 | This example creates a new user and prints her API key. It is also used to
3 | initialize a Galaxy server in Continuous Integration testing of BioBlend.
4 |
5 | Usage: python3 create_user_get_api_key.py
6 | """
7 |
8 | import sys
9 |
10 | import bioblend.galaxy
11 |
12 | if len(sys.argv) != 6:
13 | print(
14 | "Usage: python3 create_user_get_api_key.py "
15 | )
16 | sys.exit(1)
17 | galaxy_url = sys.argv[1]
18 | galaxy_api_key = sys.argv[2]
19 |
20 | # Initiating Galaxy connection
21 | gi = bioblend.galaxy.GalaxyInstance(galaxy_url, key=galaxy_api_key)
22 |
23 | # Create a new user and get a new API key for her
24 | new_user = gi.users.create_local_user(sys.argv[3], sys.argv[4], sys.argv[5])
25 | new_api_key = gi.users.create_user_apikey(new_user["id"])
26 | print(new_api_key)
27 |
--------------------------------------------------------------------------------
/docs/examples/list_data_libraries.py:
--------------------------------------------------------------------------------
1 | """
2 | This example retrieves details of all the Data Libraries available to us and lists information on them.
3 |
4 | Usage: python3 list_data_libraries.py
5 | """
6 |
7 | import sys
8 |
9 | from bioblend.galaxy import GalaxyInstance
10 |
11 | if len(sys.argv) != 3:
12 | print("Usage: python3 list_data_libraries.py ")
13 | sys.exit(1)
14 | galaxy_url = sys.argv[1]
15 | galaxy_key = sys.argv[2]
16 |
17 | print("Initiating Galaxy connection")
18 |
19 | gi = GalaxyInstance(url=galaxy_url, key=galaxy_key)
20 |
21 | print("Retrieving Data Library list")
22 |
23 | libraries = gi.libraries.get_libraries()
24 |
25 | if len(libraries) == 0:
26 | print("There are no Data Libraries available.")
27 | else:
28 | print("\nData Libraries:")
29 | for lib_dict in libraries:
30 | print(f"{lib_dict['name']} : {lib_dict['id']}")
31 |
--------------------------------------------------------------------------------
/docs/examples/list_histories.py:
--------------------------------------------------------------------------------
1 | """
2 | This example retrieves details of all the Histories in our Galaxy account and lists information on them.
3 |
4 | Usage: python list_histories.py
5 | """
6 |
7 | import sys
8 |
9 | from bioblend.galaxy import GalaxyInstance
10 |
11 | if len(sys.argv) != 3:
12 | print("Usage: python list_histories.py ")
13 | sys.exit(1)
14 | galaxy_url = sys.argv[1]
15 | galaxy_key = sys.argv[2]
16 |
17 | print("Initiating Galaxy connection")
18 |
19 | gi = GalaxyInstance(url=galaxy_url, key=galaxy_key)
20 |
21 | print("Retrieving History list")
22 |
23 | histories = gi.histories.get_histories()
24 |
25 | if len(histories) == 0:
26 | print("There are no Histories in your account.")
27 | else:
28 | print("\nHistories:")
29 | for hist_dict in histories:
30 | # As an example, we retrieve a piece of metadata (the size) using show_history
31 | hist_details = gi.histories.show_history(hist_dict["id"])
32 | print(f"{hist_dict['name']} ({hist_details['size']}) : {hist_dict['id']}")
33 |
--------------------------------------------------------------------------------
/docs/examples/list_workflows.py:
--------------------------------------------------------------------------------
1 | """
2 | This example retrieves details of all the Workflows in our Galaxy account and lists information on them.
3 |
4 | Usage: python list_workflows.py
5 | """
6 |
7 | import sys
8 |
9 | from bioblend.galaxy import GalaxyInstance
10 |
11 | if len(sys.argv) != 3:
12 | print("Usage: python list_workflows.py ")
13 | sys.exit(1)
14 | galaxy_url = sys.argv[1]
15 | galaxy_key = sys.argv[2]
16 |
17 | print("Initiating Galaxy connection")
18 |
19 | gi = GalaxyInstance(url=galaxy_url, key=galaxy_key)
20 |
21 | print("Retrieving Workflows list")
22 |
23 | workflows = gi.workflows.get_workflows()
24 |
25 | if len(workflows) == 0:
26 | print("There are no Workflows in your account.")
27 | else:
28 | print("\nWorkflows:")
29 | for wf_dict in workflows:
30 | print(f"{wf_dict['name']} : {wf_dict['id']}")
31 |
--------------------------------------------------------------------------------
/docs/examples/objects/README.txt:
--------------------------------------------------------------------------------
1 | BioBlend.objects Examples
2 | =========================
3 |
4 | Microbiology
5 | ------------
6 |
7 | This directory contains three examples of interaction with real-world
8 | microbiology workflows hosted by CRS4's Orione Galaxy server:
9 |
10 | * bacterial re-sequencing (w2_bacterial_reseq.py);
11 | * bacterial de novo assembly (w3_bacterial_denovo.py);
12 | * metagenomics (w5_metagenomics.py).
13 |
14 | All examples use workflows and datasets publicly available on Orione.
15 | Before you can run them, you have to register and obtain an API key:
16 |
17 | * go to https://orione.crs4.it and register -- or log in, if you are
18 | already registered -- through the "User" menu at the top of the page;
19 | * open "User" -> "API Keys";
20 | * generate an API key if you don't have one.
21 |
22 | In the example file, replace YOUR_API_KEY with your API key (or assign
23 | its value to the GALAXY_API_KEY environment variable), then run it:
24 |
25 | export GALAXY_API_KEY=000this_should_be_your_api_key00
26 | python w2_bacterial_reseq.py
27 |
28 | The job can take a long time to complete: before exiting, the script
29 | runs the workflow asynchronously, then displays the name and id of the
30 | output history on standard output. In the Galaxy web UI, click the
31 | gear icon at the top right corner of the History panel, select "Saved
32 | Histories" and look for the name of the output history in the center
33 | frame; finally, choose "switch" from the history's drop-down menu to
34 | make it the current one and follow the job as it evolves on Galaxy.
35 |
36 | Toy Example
37 | -----------
38 |
39 | The small.py file contains a "toy" example that should run much faster
40 | (once the cluster's resource manager allows it to run)
41 | than the above ones. In this case, the script waits for the job to
42 | complete and downloads its results to a local file.
43 |
44 | See Also
45 | --------
46 |
47 | Cuccuru et al., "Orione, a web-based framework for NGS
48 | analysis in microbiology". Bioinformatics (2014).
49 | http://dx.doi.org/10.1093/bioinformatics/btu135
50 |
--------------------------------------------------------------------------------
/docs/examples/objects/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/galaxyproject/bioblend/01909298c19bae6165a1c1f51cbf8c92be5ac55e/docs/examples/objects/__init__.py
--------------------------------------------------------------------------------
/docs/examples/objects/common.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Iterable
2 | from typing import TypeVar
3 |
4 | T = TypeVar("T")
5 |
6 |
7 | def get_one(iterable: Iterable[T]) -> T:
8 | seq = list(iterable)
9 | assert len(seq) == 1
10 | return seq[0]
11 |
--------------------------------------------------------------------------------
/docs/examples/objects/list_data_libraries.py:
--------------------------------------------------------------------------------
1 | """
2 | This example retrieves details of all the Data Libraries available to us and lists information on them.
3 |
4 | Usage: python list_data_libraries.py
5 | """
6 |
7 | import sys
8 |
9 | from bioblend.galaxy.objects import GalaxyInstance
10 |
11 | if len(sys.argv) != 3:
12 | print("Usage: python list_data_libraries.py ")
13 | sys.exit(1)
14 | galaxy_url = sys.argv[1]
15 | galaxy_key = sys.argv[2]
16 |
17 | print("Initiating Galaxy connection")
18 |
19 | gi = GalaxyInstance(galaxy_url, api_key=galaxy_key)
20 |
21 | print("Retrieving Data Library list")
22 |
23 | libraries = gi.libraries.get_previews()
24 |
25 | if len(libraries) == 0:
26 | print("There are no Data Libraries available.")
27 | else:
28 | print("\nData Libraries:")
29 | for lib in libraries:
30 | print(f"{lib.name} : {lib.id}")
31 |
--------------------------------------------------------------------------------
/docs/examples/objects/list_histories.py:
--------------------------------------------------------------------------------
1 | """
2 | This example retrieves details of all the Histories in our Galaxy account and lists information on them.
3 |
4 | Usage: python list_histories.py
5 | """
6 |
7 | import sys
8 |
9 | from bioblend.galaxy.objects import GalaxyInstance
10 |
11 | if len(sys.argv) != 3:
12 | print("Usage: python list_histories.py ")
13 | sys.exit(1)
14 | galaxy_url = sys.argv[1]
15 | galaxy_key = sys.argv[2]
16 |
17 | print("Initiating Galaxy connection")
18 |
19 | gi = GalaxyInstance(galaxy_url, api_key=galaxy_key)
20 |
21 | print("Retrieving History list")
22 |
23 | # histories.get_previews() returns a list of HistoryPreview objects, which contain only basic information
24 | # histories.list() method returns a list of History objects, which contain more extended information
25 | # As an example, we will use a piece of metadata (the size) from the 'wrapped' data attribute of History
26 | histories = gi.histories.list()
27 |
28 | if len(histories) == 0:
29 | print("There are no Histories in your account.")
30 | else:
31 | print("\nHistories:")
32 | for hist in histories:
33 | print(f"{hist.name} ({hist.wrapped['nice_size']}) : {hist.id}")
34 |
--------------------------------------------------------------------------------
/docs/examples/objects/list_workflows.py:
--------------------------------------------------------------------------------
1 | """
2 | This example retrieves details of all the Workflows in our Galaxy account and lists information on them.
3 |
4 | Usage: python list_workflows.py
5 | """
6 |
7 | import sys
8 |
9 | from bioblend.galaxy.objects import GalaxyInstance
10 |
11 | if len(sys.argv) != 3:
12 | print("Usage: python list_workflows.py ")
13 | sys.exit(1)
14 | galaxy_url = sys.argv[1]
15 | galaxy_key = sys.argv[2]
16 |
17 | print("Initiating Galaxy connection")
18 |
19 | gi = GalaxyInstance(galaxy_url, api_key=galaxy_key)
20 |
21 | print("Retrieving Workflows list")
22 |
23 | workflows = gi.workflows.get_previews()
24 |
25 | if len(workflows) == 0:
26 | print("There are no Workflows in your account.")
27 | else:
28 | print("\nWorkflows:")
29 | for wf in workflows:
30 | print(f"{wf.name} : {wf.id}")
31 |
--------------------------------------------------------------------------------
/docs/examples/objects/small.ga:
--------------------------------------------------------------------------------
1 | {
2 | "a_galaxy_workflow": "true",
3 | "annotation": "",
4 | "format-version": "0.1",
5 | "name": "get_col",
6 | "steps": {
7 | "0": {
8 | "annotation": "",
9 | "id": 0,
10 | "input_connections": {},
11 | "inputs": [
12 | {
13 | "description": "",
14 | "name": "input_tsv"
15 | }
16 | ],
17 | "name": "Input dataset",
18 | "outputs": [],
19 | "position": {
20 | "left": 200,
21 | "top": 200
22 | },
23 | "tool_errors": null,
24 | "tool_id": null,
25 | "tool_state": "{\"name\": \"input_tsv\"}",
26 | "tool_version": null,
27 | "type": "data_input",
28 | "user_outputs": []
29 | },
30 | "1": {
31 | "annotation": "",
32 | "id": 1,
33 | "input_connections": {
34 | "input": {
35 | "id": 0,
36 | "output_name": "output"
37 | }
38 | },
39 | "inputs": [],
40 | "name": "Remove beginning",
41 | "outputs": [
42 | {
43 | "name": "out_file1",
44 | "type": "input"
45 | }
46 | ],
47 | "position": {
48 | "left": 420,
49 | "top": 200
50 | },
51 | "post_job_actions": {
52 | "HideDatasetActionout_file1": {
53 | "action_arguments": {},
54 | "action_type": "HideDatasetAction",
55 | "output_name": "out_file1"
56 | }
57 | },
58 | "tool_errors": null,
59 | "tool_id": "Remove beginning1",
60 | "tool_state": "{\"input\": \"null\", \"__rerun_remap_job_id__\": null, \"chromInfo\": \"\\\"/SHARE/USERFS/els7/users/biobank/galaxy/tool-data/shared/ucsc/chrom/?.len\\\"\", \"num_lines\": \"\\\"2\\\"\", \"__page__\": 0}",
61 | "tool_version": "1.0.0",
62 | "type": "tool",
63 | "user_outputs": []
64 | },
65 | "2": {
66 | "annotation": "",
67 | "id": 2,
68 | "input_connections": {
69 | "input": {
70 | "id": 1,
71 | "output_name": "out_file1"
72 | }
73 | },
74 | "inputs": [],
75 | "name": "Cut",
76 | "outputs": [
77 | {
78 | "name": "out_file1",
79 | "type": "tabular"
80 | }
81 | ],
82 | "position": {
83 | "left": 640,
84 | "top": 200
85 | },
86 | "post_job_actions": {},
87 | "tool_errors": null,
88 | "tool_id": "Cut1",
89 | "tool_state": "{\"__page__\": 0, \"__rerun_remap_job_id__\": null, \"delimiter\": \"\\\"T\\\"\", \"columnList\": \"\\\"c1\\\"\", \"input\": \"null\", \"chromInfo\": \"\\\"/SHARE/USERFS/els7/users/biobank/galaxy/tool-data/shared/ucsc/chrom/?.len\\\"\"}",
90 | "tool_version": "1.0.2",
91 | "type": "tool",
92 | "user_outputs": []
93 | }
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/docs/examples/objects/small.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import tempfile
4 |
5 | from common import get_one # noqa:I100,I201
6 |
7 | from bioblend.galaxy.objects import GalaxyInstance
8 |
9 | # This is a "toy" example that should run much faster
10 | # (once the cluster's resource manager allows it to run) than the
11 | # real-world ones. The workflow, which is imported from local disk,
12 | # removes two header lines from a tabular file, then extracts one of
13 | # the columns. The input dataset is publicly available on CRS4's
14 | # Orione Galaxy server.
15 |
16 | URL = "https://orione.crs4.it"
17 | API_KEY = os.getenv("GALAXY_API_KEY", "YOUR_API_KEY")
18 | if API_KEY == "YOUR_API_KEY":
19 | sys.exit("API_KEY not set, see the README.txt file")
20 | gi = GalaxyInstance(URL, api_key=API_KEY)
21 |
22 | # import the workflow from the JSON dump
23 |
24 | with open("small.ga") as f:
25 | wf = gi.workflows.import_new(f.read())
26 |
27 | # Select the "Orione SupMat" library
28 |
29 | library_name = "Orione SupMat"
30 | library = get_one(gi.libraries.list(name=library_name))
31 |
32 | # Select the input dataset
33 |
34 | ds_name = "/RNA-Seq - Listeria monocytogenes/Listeria_monocytogenes_EGD_e_uid61583/NC_003210.rnt"
35 | ld = get_one(library.get_datasets(name=ds_name))
36 | input_map = {"input_tsv": ld}
37 |
38 | # Run the workflow on a new history with the selected dataset as
39 | # input, overriding the index of the column to remove; wait until the
40 | # computation is complete.
41 |
42 | history_name = "get_col output"
43 | params = {"Cut1": {"columnList": "c2"}}
44 | print(f"Running workflow: {wf.name} [{wf.id}]")
45 | inv = wf.invoke(input_map, params=params, history=history_name, inputs_by="name")
46 | out_hist = gi.histories.get(inv.history_id)
47 | inv.wait()
48 | print("Job has finished")
49 | assert out_hist.name == history_name
50 | print(f"Output history: {out_hist.name} [{out_hist.id}]")
51 |
52 | # Save results to local disk
53 | out_ds = get_one(out_hist.get_datasets(name="Cut on data 1"))
54 | with tempfile.NamedTemporaryFile(prefix="bioblend_", delete=False) as tmp_f:
55 | out_ds.download(tmp_f)
56 | print(f'Output downloaded to "{f.name}"')
57 |
--------------------------------------------------------------------------------
/docs/examples/objects/w2_bacterial_reseq.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | from common import get_one # noqa:I100,I201
5 |
6 | from bioblend.galaxy.objects import GalaxyInstance
7 |
8 | URL = "https://orione.crs4.it"
9 | API_KEY = os.getenv("GALAXY_API_KEY", "YOUR_API_KEY")
10 | if API_KEY == "YOUR_API_KEY":
11 | sys.exit("API_KEY not set, see the README.txt file")
12 | gi = GalaxyInstance(URL, api_key=API_KEY)
13 |
14 | # Select "W2 - Bacterial re-sequencing | Paired-end" from published workflows
15 |
16 | workflow_name = "W2 - Bacterial re-sequencing | Paired-end"
17 | previews = gi.workflows.get_previews(name=workflow_name, published=True)
18 | p = get_one(_ for _ in previews if _.published)
19 |
20 | # Import the workflow to user space
21 |
22 | iw = gi.workflows.import_shared(p.id)
23 |
24 | # Create a new history
25 |
26 | history_name = f"{workflow_name} output"
27 | h = gi.histories.create(history_name)
28 |
29 | # Select the "Orione SupMat" library
30 |
31 | library_name = "Orione SupMat"
32 | library = get_one(gi.libraries.list(name=library_name))
33 |
34 | # Select the datasets
35 |
36 | ds_names = [
37 | "/Whole genome - Escherichia coli/E coli DH10B MiSeq R1.fastq",
38 | "/Whole genome - Escherichia coli/E coli DH10B MiSeq R2.fastq",
39 | "/Whole genome - Escherichia coli/E coli DH10B - Reference",
40 | ]
41 | input_labels = [
42 | "Forward Reads",
43 | "Reverse Reads",
44 | "Reference Genome",
45 | ]
46 | input_map = {
47 | label: h.import_dataset(get_one(library.get_datasets(name=name))) for name, label in zip(ds_names, input_labels)
48 | }
49 |
50 | # Set custom parameters for the "check_contigs" and "sspace" tools
51 |
52 | params = {
53 | "check_contigs": {"genomesize": 5.0}, # affects both occurrences
54 | "sspace": {"insert": 300, "error": 0.5, "minoverlap": 35},
55 | }
56 |
57 | # Run the workflow on a new history with the selected datasets as inputs
58 |
59 | inv = iw.invoke(input_map, params=params, history=h, inputs_by="name")
60 | out_hist = gi.histories.get(inv.history_id)
61 | assert out_hist.name == history_name
62 |
63 | print(f"Running workflow: {iw.name} [{iw.id}]")
64 | print(f"Output history: {out_hist.name} [{out_hist.id}]")
65 |
--------------------------------------------------------------------------------
/docs/examples/objects/w3_bacterial_denovo.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import sys
4 | from typing import Any
5 |
6 | from common import get_one # noqa:I100,I201
7 |
8 | from bioblend.galaxy.objects import GalaxyInstance
9 |
10 | URL = "https://orione.crs4.it"
11 | API_KEY = os.getenv("GALAXY_API_KEY", "YOUR_API_KEY")
12 | if API_KEY == "YOUR_API_KEY":
13 | sys.exit("API_KEY not set, see the README.txt file")
14 | gi = GalaxyInstance(URL, api_key=API_KEY)
15 |
16 | # Select "W3 - Bacterial de novo assembly | Paired-end" from published workflows
17 |
18 | workflow_name = "W3 - Bacterial de novo assembly | Paired-end"
19 | previews = gi.workflows.get_previews(name=workflow_name, published=True)
20 | p = get_one(_ for _ in previews if _.published)
21 |
22 | # Import the workflow to user space
23 |
24 | iw = gi.workflows.import_shared(p.id)
25 |
26 | # Create a new history
27 |
28 | history_name = f"{workflow_name} output"
29 | h = gi.histories.create(history_name)
30 |
31 | # Select the "Orione SupMat" library
32 |
33 | library_name = "Orione SupMat"
34 | library = get_one(gi.libraries.list(name=library_name))
35 |
36 | # Select the datasets
37 |
38 | ds_names = [
39 | "/Whole genome - Escherichia coli/E coli DH10B MiSeq R1.fastq",
40 | "/Whole genome - Escherichia coli/E coli DH10B MiSeq R2.fastq",
41 | ]
42 | input_labels = [
43 | "Left/Forward FASTQ Reads",
44 | "Right/Reverse FASTQ Reads",
45 | ]
46 | input_map = {
47 | label: h.import_dataset(get_one(library.get_datasets(name=name))) for name, label in zip(ds_names, input_labels)
48 | }
49 |
50 | # Set the "hash_length" parameter to different values for the 3 "velveth" steps
51 |
52 | lengths = {"19", "23", "29"}
53 | ws_ids = iw.tool_labels_to_ids["velveth"]
54 | assert len(ws_ids) == len(lengths)
55 | params: dict[str, Any] = {id_: {"hash_length": v} for id_, v in zip(ws_ids, lengths)}
56 |
57 | # Set the "ins_length" runtime parameter to the same value for the 3
58 | # "velvetg" steps
59 |
60 | tool_id = "velvetg"
61 | ws_ids = iw.tool_labels_to_ids[tool_id]
62 | step = iw.steps[next(iter(ws_ids))] # arbitrarily pick one
63 | params[tool_id] = {"reads": json.loads(step.tool_inputs["reads"]).copy()}
64 | params[tool_id]["reads"]["ins_length"] = -1
65 |
66 | # Set more custom parameters
67 |
68 | params["cisarunner"] = {"genomesize": 5000000}
69 | params["check_contigs"] = {"genomesize": 5.0}
70 | params["toolshed.g2.bx.psu.edu/repos/edward-kirton/abyss_toolsuite/abyss/1.0.0"] = {"k": 41}
71 |
72 | # Run the workflow on a new history with the selected datasets as inputs
73 |
74 | inv = iw.invoke(input_map, params=params, history=h, inputs_by="name")
75 | out_hist = gi.histories.get(inv.history_id)
76 | assert out_hist.name == history_name
77 |
78 | print(f"Running workflow: {iw.name} [{iw.id}]")
79 | print(f"Output history: {out_hist.name} [{out_hist.id}]")
80 |
--------------------------------------------------------------------------------
/docs/examples/objects/w5_galaxy_api.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import sys
4 |
5 | import requests
6 |
7 | # This example, provided for comparison with w5_metagenomics.py,
8 | # contains the code required to run the metagenomics workflow
9 | # *without* BioBlend.
10 |
11 | URL = os.getenv("GALAXY_URL", "https://orione.crs4.it").rstrip("/")
12 | API_URL = f"{URL}/api"
13 | API_KEY = os.getenv("GALAXY_API_KEY")
14 | if not API_KEY:
15 | sys.exit("API_KEY not set, see the README.txt file")
16 | headers = {"Content-Type": "application/json", "x-api-key": API_KEY}
17 |
18 | # Select "W5 - Metagenomics" from published workflows
19 |
20 | workflow_name = "W5 - Metagenomics"
21 | r = requests.get(f"{API_URL}/workflows", params={"show_published": True}, headers=headers)
22 | workflows = r.json()
23 | filtered_workflows = [_ for _ in workflows if _["published"] and _["name"] == workflow_name]
24 | assert len(filtered_workflows) == 1
25 | w = filtered_workflows[0]
26 |
27 | # Import the workflow to user space
28 |
29 | data = {"workflow_id": w["id"]}
30 | r = requests.post(f"{API_URL}/workflows/import", data=json.dumps(data), headers=headers)
31 | iw = r.json()
32 | r = requests.get(f"{API_URL}/workflows/{iw['id']}", headers=headers)
33 | iw_details = r.json()
34 |
35 | # Select the "Orione SupMat" library
36 |
37 | library_name = "Orione SupMat"
38 | r = requests.get(f"{API_URL}/libraries", headers=headers)
39 | libraries = r.json()
40 | filtered_libraries = [_ for _ in libraries if _["name"] == library_name]
41 | assert len(filtered_libraries) == 1
42 | library = filtered_libraries[0]
43 |
44 | # Select the "/Metagenomics/MetagenomicsDataset.fq" dataset
45 |
46 | ds_name = "/Metagenomics/MetagenomicsDataset.fq"
47 | r = requests.get(f"{API_URL}/libraries/{library['id']}/contents", headers=headers)
48 | contents = r.json()
49 | filtered_contents = [_ for _ in contents if _["type"] == "file" and _["name"] == ds_name]
50 | assert len(filtered_contents) == 1
51 | ld = filtered_contents[0]
52 |
53 | # Select the blastn step
54 |
55 | filtered_wf_steps = [_ for _ in iw_details["steps"].values() if _["tool_id"] and "blastn" in _["tool_id"]]
56 | assert len(filtered_wf_steps) == 1
57 | ws = filtered_wf_steps[0]
58 | tool_id = ws["tool_id"]
59 |
60 | # Get (a copy of) the parameters dict for the selected step
61 |
62 | ws_parameters = ws["tool_inputs"].copy()
63 | for k, v in ws_parameters.items():
64 | ws_parameters[k] = json.loads(v)
65 |
66 | # Run the workflow on a new history with the selected dataset
67 | # as input, setting the BLAST db to "16SMicrobial-20131106"
68 |
69 | history_name = f"{workflow_name} output"
70 | ws_parameters["db_opts"]["database"] = "16SMicrobial-20131106"
71 | data = {
72 | "workflow_id": iw["id"],
73 | "parameters": {tool_id: {"db_opts": ws_parameters["db_opts"]}},
74 | }
75 | assert len(iw_details["inputs"]) == 1
76 | input_step_id = iw_details["inputs"].keys()[0]
77 | data["ds_map"] = {input_step_id: {"src": "ld", "id": ld["id"]}}
78 | data["history"] = history_name
79 | r = requests.post(f"{API_URL}/workflows", data=json.dumps(data), headers=headers)
80 | r_dict = r.json()
81 |
82 | print(f"Running workflow: {iw['name']} [{iw['id']}]")
83 | print(f"Output history: {history_name} [{r_dict['history']}]")
84 |
--------------------------------------------------------------------------------
/docs/examples/objects/w5_metagenomics.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import sys
4 |
5 | from common import get_one # noqa:I100,I201
6 |
7 | from bioblend.galaxy.objects import GalaxyInstance
8 |
9 | URL = "https://orione.crs4.it"
10 | API_KEY = os.getenv("GALAXY_API_KEY", "YOUR_API_KEY")
11 | if API_KEY == "YOUR_API_KEY":
12 | sys.exit("API_KEY not set, see the README.txt file")
13 | gi = GalaxyInstance(URL, api_key=API_KEY)
14 |
15 | # Select "W5 - Metagenomics" from published workflows
16 |
17 | workflow_name = "W5 - Metagenomics"
18 | previews = gi.workflows.get_previews(name=workflow_name, published=True)
19 | p = get_one(_ for _ in previews if _.published)
20 |
21 | # Import the workflow to user space
22 |
23 | iw = gi.workflows.import_shared(p.id)
24 |
25 | # Create a new history
26 |
27 | history_name = f"{workflow_name} output"
28 | h = gi.histories.create(history_name)
29 |
30 | # Select the "Orione SupMat" library
31 |
32 | library_name = "Orione SupMat"
33 | library = get_one(gi.libraries.list(name=library_name))
34 |
35 | # Select the "/Metagenomics/MetagenomicsDataset.fq" dataset
36 |
37 | ds_name = "/Metagenomics/MetagenomicsDataset.fq"
38 | input_map = {"Input Dataset": h.import_dataset(get_one(library.get_datasets(name=ds_name)))}
39 |
40 | # Select the blastn step
41 |
42 | tool_id = "toolshed.g2.bx.psu.edu/repos/devteam/ncbi_blast_plus/ncbi_blastn_wrapper/0.1.00"
43 | step_id = get_one(iw.tool_labels_to_ids[tool_id])
44 | ws = iw.steps[step_id]
45 |
46 | # Get (a copy of) the parameters dict for the selected step
47 |
48 | ws_parameters = ws.tool_inputs.copy()
49 |
50 | # Run the workflow on a new history with the selected dataset
51 | # as input, setting the BLAST db to "16SMicrobial-20131106"
52 |
53 | params = {tool_id: {"db_opts": json.loads(ws_parameters["db_opts"])}}
54 | params[tool_id]["db_opts"]["database"] = "16SMicrobial-20131106"
55 | inv = iw.invoke(input_map, params=params, history=h, inputs_by="name")
56 | out_hist = gi.histories.get(inv.history_id)
57 | assert out_hist.name == history_name
58 |
59 | print(f"Running workflow: {iw.name} [{iw.id}]")
60 | print(f"Output history: {out_hist.name} [{out_hist.id}]")
61 |
--------------------------------------------------------------------------------
/docs/examples/run_imported_workflow.py:
--------------------------------------------------------------------------------
1 | """
2 | This example demonstrates running a tophat+cufflinks workflow over paired-end data.
3 | This is a task we could not do using Galaxy's GUI batch mode, because the inputs need to be paired.
4 | The workflow is imported from a json file (previously exported from Galaxy), and the input data files from URLs.
5 |
6 | This example creates a new Data Library, so you must be a Galaxy Admin on the instance you run the script against.
7 |
8 | Also note that a Galaxy Workflow will only run without modification if it finds the expected versions of tool wrappers
9 | installed on the Galaxy instance. This is to ensure reproducibility.
10 | In this case we expect Tophat wrapper 1.5.0 and Cufflinks wrapper 0.0.5.
11 |
12 | Usage: python3 run_imported_workflow.py
13 | """
14 |
15 | import sys
16 |
17 | from bioblend import galaxy
18 |
19 | # Specify workflow and data to import into Galaxy
20 |
21 | workflow_file = "tophat_cufflinks_pairedend_workflow.ga"
22 |
23 | import_file_pairs = [
24 | ("https://bioblend.s3.amazonaws.com/C1_R1_1.chr4.fq", "https://bioblend.s3.amazonaws.com/C1_R1_2.chr4.fq"),
25 | ("https://bioblend.s3.amazonaws.com/C1_R2_1.chr4.fq", "https://bioblend.s3.amazonaws.com/C1_R2_2.chr4.fq"),
26 | ("https://bioblend.s3.amazonaws.com/C1_R3_1.chr4.fq", "https://bioblend.s3.amazonaws.com/C1_R3_2.chr4.fq"),
27 | ]
28 |
29 | # Specify names of Library and History that will be created in Galaxy
30 | # In this simple example, these will be created even if items with the same name already exist.
31 |
32 | library_name = "Imported data for API demo"
33 | output_history_name = "Output from API demo"
34 |
35 | if len(sys.argv) != 3:
36 | print("Usage: python3 run_imported_workflow.py ")
37 | sys.exit(1)
38 | galaxy_url = sys.argv[1]
39 | galaxy_key = sys.argv[2]
40 |
41 | print("Initiating Galaxy connection")
42 |
43 | gi = galaxy.GalaxyInstance(url=galaxy_url, key=galaxy_key)
44 |
45 | print("Importing workflow")
46 |
47 | wf_import_dict = gi.workflows.import_workflow_from_local_path(workflow_file)
48 | workflow = wf_import_dict["id"]
49 |
50 | print(f"Creating data library '{library_name}'")
51 |
52 | library_dict = gi.libraries.create_library(library_name)
53 | library = library_dict["id"]
54 |
55 | print("Importing data")
56 |
57 | # Import each pair of files, and track the resulting identifiers.
58 |
59 | dataset_ids = []
60 | filenames = {}
61 | for file1, file2 in import_file_pairs:
62 | dataset1 = gi.libraries.upload_file_from_url(library, file1, file_type="fastqsanger")
63 | dataset2 = gi.libraries.upload_file_from_url(library, file2, file_type="fastqsanger")
64 | id1, id2 = dataset1[0]["id"], dataset2[0]["id"]
65 | filenames[id1] = file1
66 | filenames[id2] = file2
67 | dataset_ids.append((id1, id2))
68 |
69 | print(f"Creating output history '{output_history_name}'")
70 |
71 | outputhist_dict = gi.histories.create_history(output_history_name)
72 | outputhist = outputhist_dict["id"]
73 |
74 | print(f"Will run workflow on {len(dataset_ids)} pairs of files")
75 |
76 | # Get the input step IDs from the workflow.
77 | # We use the BioBlend convenience function get_workflow_inputs to retrieve inputs by label.
78 |
79 | input1 = gi.workflows.get_workflow_inputs(workflow, label="Input fastq readpair-1")[0]
80 | input2 = gi.workflows.get_workflow_inputs(workflow, label="Input fastq readpair-2")[0]
81 |
82 | # For each pair of datasets we imported, run the imported workflow
83 | # For each input we need to build a datamap dict with 'src' set to 'ld', as we stored our data in a Galaxy Library
84 |
85 | for data1, data2 in dataset_ids:
86 | print(f"Initiating workflow run on files {filenames[data1]}, {filenames[data2]}")
87 | datamap = {
88 | input1: {"src": "ld", "id": data1},
89 | input2: {"src": "ld", "id": data2},
90 | }
91 | invocation = gi.workflows.invoke_workflow(
92 | workflow, inputs=datamap, history_id=outputhist, import_inputs_to_history=True
93 | )
94 |
--------------------------------------------------------------------------------
/docs/examples/tophat_cufflinks_pairedend_workflow.ga:
--------------------------------------------------------------------------------
1 | {
2 | "a_galaxy_workflow": "true",
3 | "annotation": "",
4 | "format-version": "0.1",
5 | "name": "TopHat + cufflinks paired-end",
6 | "steps": {
7 | "0": {
8 | "annotation": "",
9 | "id": 0,
10 | "input_connections": {},
11 | "inputs": [
12 | {
13 | "description": "",
14 | "name": "Input fastq readpair-1"
15 | }
16 | ],
17 | "name": "Input dataset",
18 | "outputs": [],
19 | "position": {
20 | "left": 200,
21 | "top": 308
22 | },
23 | "tool_errors": null,
24 | "tool_id": null,
25 | "tool_state": "{\"name\": \"Input fastq readpair-1\"}",
26 | "tool_version": null,
27 | "type": "data_input",
28 | "user_outputs": []
29 | },
30 | "1": {
31 | "annotation": "",
32 | "id": 1,
33 | "input_connections": {},
34 | "inputs": [
35 | {
36 | "description": "",
37 | "name": "Input fastq readpair-2"
38 | }
39 | ],
40 | "name": "Input dataset",
41 | "outputs": [],
42 | "position": {
43 | "left": 177.7833251953125,
44 | "top": 395.26666259765625
45 | },
46 | "tool_errors": null,
47 | "tool_id": null,
48 | "tool_state": "{\"name\": \"Input fastq readpair-2\"}",
49 | "tool_version": null,
50 | "type": "data_input",
51 | "user_outputs": []
52 | },
53 | "2": {
54 | "annotation": "",
55 | "id": 2,
56 | "input_connections": {
57 | "input1": {
58 | "id": 0,
59 | "output_name": "output"
60 | },
61 | "singlePaired|input2": {
62 | "id": 1,
63 | "output_name": "output"
64 | }
65 | },
66 | "inputs": [],
67 | "name": "Tophat for Illumina",
68 | "outputs": [
69 | {
70 | "name": "insertions",
71 | "type": "bed"
72 | },
73 | {
74 | "name": "deletions",
75 | "type": "bed"
76 | },
77 | {
78 | "name": "junctions",
79 | "type": "bed"
80 | },
81 | {
82 | "name": "accepted_hits",
83 | "type": "bam"
84 | }
85 | ],
86 | "position": {
87 | "left": 436,
88 | "top": 280
89 | },
90 | "post_job_actions": {
91 | "HideDatasetActiondeletions": {
92 | "action_arguments": {},
93 | "action_type": "HideDatasetAction",
94 | "output_name": "deletions"
95 | },
96 | "HideDatasetActioninsertions": {
97 | "action_arguments": {},
98 | "action_type": "HideDatasetAction",
99 | "output_name": "insertions"
100 | }
101 | },
102 | "tool_errors": null,
103 | "tool_id": "tophat",
104 | "tool_state": "{\"__page__\": 0, \"input1\": \"null\", \"refGenomeSource\": \"{\\\"genomeSource\\\": \\\"indexed\\\", \\\"index\\\": \\\"dm3\\\", \\\"__current_case__\\\": 0}\", \"singlePaired\": \"{\\\"input2\\\": null, \\\"sPaired\\\": \\\"paired\\\", \\\"pParams\\\": {\\\"pSettingsType\\\": \\\"preSet\\\", \\\"__current_case__\\\": 0}, \\\"__current_case__\\\": 1, \\\"mate_inner_distance\\\": \\\"20\\\"}\"}",
105 | "tool_version": "1.5.0",
106 | "type": "tool",
107 | "user_outputs": []
108 | },
109 | "3": {
110 | "annotation": "",
111 | "id": 3,
112 | "input_connections": {
113 | "input": {
114 | "id": 2,
115 | "output_name": "accepted_hits"
116 | }
117 | },
118 | "inputs": [],
119 | "name": "Cufflinks",
120 | "outputs": [
121 | {
122 | "name": "genes_expression",
123 | "type": "tabular"
124 | },
125 | {
126 | "name": "transcripts_expression",
127 | "type": "tabular"
128 | },
129 | {
130 | "name": "assembled_isoforms",
131 | "type": "gtf"
132 | },
133 | {
134 | "name": "total_map_mass",
135 | "type": "txt"
136 | }
137 | ],
138 | "position": {
139 | "left": 679,
140 | "top": 342
141 | },
142 | "post_job_actions": {},
143 | "tool_errors": null,
144 | "tool_id": "cufflinks",
145 | "tool_state": "{\"min_isoform_fraction\": \"\\\"0.1\\\"\", \"multiread_correct\": \"\\\"Yes\\\"\", \"singlePaired\": \"{\\\"sPaired\\\": \\\"No\\\", \\\"__current_case__\\\": 0}\", \"__page__\": 0, \"pre_mrna_fraction\": \"\\\"0.15\\\"\", \"bias_correction\": \"{\\\"do_bias_correction\\\": \\\"No\\\", \\\"__current_case__\\\": 1}\", \"max_intron_len\": \"\\\"300000\\\"\", \"reference_annotation\": \"{\\\"use_ref\\\": \\\"No\\\", \\\"__current_case__\\\": 0}\", \"global_model\": \"null\", \"do_normalization\": \"\\\"No\\\"\", \"input\": \"null\"}",
146 | "tool_version": "0.0.5",
147 | "type": "tool",
148 | "user_outputs": []
149 | }
150 | }
151 | }
152 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | ========
2 | BioBlend
3 | ========
4 |
5 | About
6 | =====
7 |
8 | .. include:: ../ABOUT.rst
9 |
10 | Installation
11 | ============
12 |
13 | Stable releases of BioBlend are best installed via ``pip`` from PyPI::
14 |
15 | $ python3 -m pip install bioblend
16 |
17 | Alternatively, the most current source code from our `Git repository`_ can be
18 | installed with::
19 |
20 | $ python3 -m pip install git+https://github.com/galaxyproject/bioblend
21 |
22 | After installing the library, you will be able to simply import it into your
23 | Python environment with ``import bioblend``. For details on the available functionality,
24 | see the `API documentation`_.
25 |
26 | BioBlend requires a number of Python libraries. These libraries are installed
27 | automatically when BioBlend itself is installed, regardless whether it is installed
28 | via PyPi_ or by running ``python3 setup.py install`` command. The current list of
29 | required libraries is always available from `setup.py`_ in the source code
30 | repository.
31 |
32 | If you also want to run tests locally, some extra libraries are required. To
33 | install them, run::
34 |
35 | $ python3 setup.py test
36 |
37 | Usage
38 | =====
39 |
40 | To get started using BioBlend, install the library as described above. Once the
41 | library becomes available on the given system, it can be developed against.
42 | The developed scripts do not need to reside in any particular location on the system.
43 |
44 | It is probably best to take a look at the example scripts in ``docs/examples`` source
45 | directory and browse the `API documentation`_. Beyond that, it's up to your creativity :).
46 |
47 | Development
48 | ===========
49 |
50 | Anyone interested in contributing or tweaking the library is more then welcome
51 | to do so. To start, simply fork the `Git repository`_ on Github and start playing with
52 | it. Then, issue pull requests.
53 |
54 | API Documentation
55 | =================
56 |
57 | BioBlend's API focuses around and matches the services it wraps. Thus, there are
58 | two top-level sets of APIs, each corresponding to a separate service and a
59 | corresponding step in the automation process. *Note* that each of the service APIs
60 | can be used completely independently of one another.
61 |
62 | Effort has been made to keep the structure and naming of those API's consistent
63 | across the library but because they do bridge different services, some discrepancies
64 | may exist. Feel free to point those out and/or provide fixes.
65 |
66 | For Galaxy, an alternative :ref:`object-oriented API ` is
67 | also available. This API provides an explicit modeling of server-side
68 | Galaxy instances and their relationships, providing higher-level
69 | methods to perform operations such as retrieving all datasets for a
70 | given history, etc. Note that, at the moment, the oo API is still
71 | incomplete, providing access to a more restricted set of Galaxy
72 | modules with respect to the standard one.
73 |
74 | Galaxy API
75 | ~~~~~~~~~~
76 |
77 | API used to manipulate genomic analyses within Galaxy, including data management
78 | and workflow execution.
79 |
80 | .. toctree::
81 | :maxdepth: 3
82 | :glob:
83 |
84 | api_docs/galaxy/*
85 |
86 | Toolshed API
87 | ~~~~~~~~~~~~
88 |
89 | API used to interact with the Galaxy Toolshed, including repository management.
90 |
91 | .. toctree::
92 | :maxdepth: 3
93 | :glob:
94 |
95 | api_docs/toolshed/*
96 |
97 | Configuration
98 | =============
99 | BioBlend allows library-wide configuration to be set in external files.
100 | These configuration files can be used to specify access keys, for example.
101 |
102 | .. toctree::
103 | :maxdepth: 1
104 | :glob:
105 |
106 | api_docs/lib_config
107 |
108 | Testing
109 | =======
110 | If you would like to do more than just a mock test, you need to point
111 | BioBlend to an instance of Galaxy. Do so by exporting the following
112 | two variables::
113 |
114 | $ export BIOBLEND_GALAXY_URL=http://127.0.0.1:8080
115 | $ export BIOBLEND_GALAXY_API_KEY=
116 |
117 | The unit tests, stored in the ``tests`` folder, can be run using
118 | `pytest `_. From the project root::
119 |
120 | $ pytest
121 |
122 | Getting help
123 | ============
124 |
125 | If you have run into issues, found a bug, or can't seem to find an answer to
126 | your question regarding the use and functionality of BioBlend, please use the
127 | `Github Issues `_ page to ask your
128 | question.
129 |
130 | Related documentation
131 | =====================
132 |
133 | Links to other documentation and libraries relevant to this library:
134 |
135 | * `Galaxy API documentation `_
136 | * `Blend4j `_: Galaxy API wrapper for Java
137 | * `clj-blend `_: Galaxy API wrapper for Clojure
138 |
139 | Indices and tables
140 | ==================
141 |
142 | * :ref:`genindex`
143 | * :ref:`modindex`
144 | * :ref:`search`
145 |
146 | .. References/hyperlinks used above
147 | .. _Git repository: https://github.com/galaxyproject/bioblend
148 | .. _PyPi: https://pypi.org/project/bioblend/
149 | .. _setup.py: https://github.com/galaxyproject/bioblend/blob/main/setup.py
150 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx>=2
2 | sphinx-rtd-theme>=0.5.2
3 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [tool.black]
6 | include = '\.pyi?$'
7 | line-length = 120
8 | target-version = ['py39']
9 |
10 | [tool.darker]
11 | isort = true
12 |
13 | [tool.ruff]
14 | target-version = "py39"
15 |
16 | [tool.ruff.lint]
17 | select = ["E", "F", "B", "C4", "G", "ISC", "UP"]
18 | # Exceptions:
19 | # B9 flake8-bugbear opinionated warnings
20 | # E501 is line length (delegated to black)
21 | ignore = ["B9", "E501"]
22 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | log_cli = true
3 | log_cli_level = INFO
4 | python_files = Test*.py
5 | testpaths = bioblend/_tests
6 |
--------------------------------------------------------------------------------
/run_bioblend_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | set -e
3 |
4 | show_help () {
5 | echo "Usage: $0 -g GALAXY_DIR [-p PORT] [-e TOX_ENV] [-t BIOBLEND_TESTS] [-r GALAXY_REV] [-c]
6 |
7 | Run tests for BioBlend. Useful for Continuous Integration testing.
8 | *Please note* that this script overwrites the main.pid file and appends to the
9 | main.log file inside the specified Galaxy directory (-g).
10 |
11 | Options:
12 | -g GALAXY_DIR
13 | Path of the local Galaxy git repository.
14 | -p PORT
15 | Port to use for the Galaxy server. Defaults to 8080.
16 | -e TOX_ENV
17 | Work against specified tox environments. Defaults to py39.
18 | -t BIOBLEND_TESTS
19 | Subset of tests to run, e.g.
20 | 'tests/TestGalaxyObjects.py::TestHistory::test_create_delete' . Defaults
21 | to all tests.
22 | -r GALAXY_REV
23 | Branch or commit of the local Galaxy git repository to checkout.
24 | -v GALAXY_PYTHON
25 | Python to use for the Galaxy virtual environment.
26 | -c
27 | Force removal of the temporary directory created for Galaxy, even if some
28 | test failed."
29 | }
30 |
31 | get_abs_dirname () {
32 | # $1 : relative dirname
33 | cd "$1" && pwd
34 | }
35 |
36 | e_val=py39
37 | GALAXY_PORT=8080
38 | while getopts 'hcg:e:p:t:r:v:' option; do
39 | case $option in
40 | h) show_help
41 | exit;;
42 | c) c_val=1;;
43 | g) GALAXY_DIR=$(get_abs_dirname "$OPTARG");;
44 | e) e_val=$OPTARG;;
45 | p) GALAXY_PORT=$OPTARG;;
46 | t) t_val=$OPTARG;;
47 | r) r_val=$OPTARG;;
48 | v) GALAXY_PYTHON=$OPTARG;;
49 | *) show_help
50 | exit 1;;
51 | esac
52 | done
53 |
54 | if [ -z "$GALAXY_DIR" ]; then
55 | echo "Error: missing -g value."
56 | show_help
57 | exit 1
58 | fi
59 |
60 | # Install BioBlend
61 | BIOBLEND_DIR=$(get_abs_dirname "$(dirname "$0")")
62 | if ! command -v tox >/dev/null; then
63 | cd "${BIOBLEND_DIR}"
64 | if [ ! -d .venv ]; then
65 | python3 -m venv .venv
66 | fi
67 | . .venv/bin/activate
68 | python3 -m pip install --upgrade "tox>=1.8.0"
69 | fi
70 |
71 | # Setup Galaxy version
72 | cd "${GALAXY_DIR}"
73 | if [ -n "${r_val}" ]; then
74 | # Update repository (may change the sample files or the list of eggs)
75 | git fetch
76 | git checkout "${r_val}"
77 | if git show-ref -q --verify "refs/heads/${r_val}" 2>/dev/null; then
78 | # ${r_val} is a branch
79 | export GALAXY_VERSION=${r_val}
80 | git pull --ff-only
81 | fi
82 | else
83 | BRANCH=$(git rev-parse --abbrev-ref HEAD)
84 | case $BRANCH in
85 | dev | release_*)
86 | export GALAXY_VERSION=$BRANCH
87 | ;;
88 | esac
89 | fi
90 |
91 | # Setup Galaxy virtual environment
92 | if [ -n "${GALAXY_PYTHON}" ]; then
93 | if [ ! -d .venv ]; then
94 | if ! "${GALAXY_PYTHON}" -m venv .venv; then
95 | echo "Creating the Python virtual environment for Galaxy using the venv standard library module failed."
96 | echo "Trying with virtualenv now."
97 | virtualenv -p "$GALAXY_PYTHON" .venv
98 | fi
99 | fi
100 | export GALAXY_PYTHON
101 | fi
102 |
103 | # Setup Galaxy master API key and admin user
104 | TEMP_DIR=$(mktemp -d 2>/dev/null || mktemp -d -t 'mytmpdir')
105 | echo "Created temporary directory $TEMP_DIR"
106 | mkdir "${TEMP_DIR}/config" "${TEMP_DIR}/database"
107 | printf "\n\n\n" "$TEMP_DIR/shed_tools" > "$TEMP_DIR/config/shed_tool_conf.xml"
108 | # Export BIOBLEND_ environment variables to be used in BioBlend tests
109 | BIOBLEND_GALAXY_MASTER_API_KEY=$(LC_ALL=C tr -dc A-Za-z0-9 < /dev/urandom | head -c 32)
110 | export BIOBLEND_GALAXY_MASTER_API_KEY
111 | export BIOBLEND_GALAXY_USER_EMAIL="${USER}@localhost.localdomain"
112 | DATABASE_CONNECTION=${DATABASE_CONNECTION:-"sqlite:///${TEMP_DIR}/database/universe.sqlite?isolation_level=IMMEDIATE"}
113 | # Update psycopg2 requirement to a version that doesn't use 2to3 for Galaxy release 19.05, see https://github.com/psycopg/psycopg2/issues/1419
114 | sed -i.bak -e 's/psycopg2-binary==2.7.4/psycopg2-binary==2.8.4/' lib/galaxy/dependencies/conditional-requirements.txt
115 | # Start Galaxy and wait for successful server start
116 | export GALAXY_SKIP_CLIENT_BUILD=1
117 | if grep -q wait_arg_set run.sh ; then
118 | # Galaxy 22.01 or earlier.
119 | # Export GALAXY_CONFIG_FILE environment variable to be used by run_galaxy.sh
120 | export GALAXY_CONFIG_FILE="${TEMP_DIR}/config/galaxy.ini"
121 | eval "echo \"$(cat "${BIOBLEND_DIR}/tests/template_galaxy.ini")\"" > "${GALAXY_CONFIG_FILE}"
122 | GALAXY_RUN_ALL=1 "${BIOBLEND_DIR}/run_galaxy.sh" --daemon --wait
123 | else
124 | # Galaxy is controlled via gravity, paste/uwsgi are replaced by gunicorn
125 | # and the `--wait` option does not work any more.
126 | # Export GALAXY_CONFIG_FILE environment variable to be used by run.sh
127 | export GALAXY_CONFIG_FILE="${TEMP_DIR}/config/galaxy.yml"
128 | if [ -f test/functional/tools/samples_tool_conf.xml ]; then
129 | # Galaxy 22.05 or earlier
130 | TEST_TOOLS_CONF_FILE=test/functional/tools/samples_tool_conf.xml
131 | else
132 | TEST_TOOLS_CONF_FILE=test/functional/tools/sample_tool_conf.xml
133 | fi
134 | eval "echo \"$(cat "${BIOBLEND_DIR}/tests/template_galaxy.yml")\"" > "${GALAXY_CONFIG_FILE}"
135 | export GRAVITY_STATE_DIR="${TEMP_DIR}/database/gravity"
136 | ./run.sh --daemon
137 | if ! .venv/bin/galaxyctl -h > /dev/null; then
138 | echo 'galaxyctl status not working'
139 | exit 1
140 | fi
141 | while true; do
142 | sleep 1
143 | if .venv/bin/galaxyctl status | grep -q 'gunicorn.*RUNNING'; then
144 | break
145 | else
146 | echo 'gunicorn not running yet'
147 | fi
148 | done
149 | while true; do
150 | sleep 1
151 | if grep -q "[Ss]erving on http://127.0.0.1:${GALAXY_PORT}" "${GRAVITY_STATE_DIR}/log/gunicorn.log"; then
152 | break
153 | else
154 | echo 'Galaxy not serving yet'
155 | fi
156 | done
157 | fi
158 | export BIOBLEND_GALAXY_URL=http://localhost:${GALAXY_PORT}
159 |
160 | # Run the tests
161 | cd "${BIOBLEND_DIR}"
162 | set +e # don't stop the script if tox fails
163 | if [ -n "${t_val}" ]; then
164 | tox -e "${e_val}" -- "${t_val}"
165 | else
166 | tox -e "${e_val}"
167 | fi
168 | exit_code=$?
169 |
170 | # Stop Galaxy
171 | echo 'Stopping Galaxy'
172 | cd "${GALAXY_DIR}"
173 | if grep -q wait_arg_set run.sh ; then
174 | GALAXY_RUN_ALL=1 "${BIOBLEND_DIR}/run_galaxy.sh" --daemon stop
175 | else
176 | ./run.sh --daemon stop
177 | fi
178 | # Remove temporary directory if -c is specified or if all tests passed
179 | if [ -n "${c_val}" ] || [ $exit_code -eq 0 ]; then
180 | rm -rf "$TEMP_DIR"
181 | fi
182 | exit $exit_code
183 |
--------------------------------------------------------------------------------
/run_galaxy.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | #This script should be run from inside the Galaxy base directory
4 |
5 | # If there is a file that defines a shell environment specific to this
6 | # instance of Galaxy, source the file.
7 | if [ -z "$GALAXY_LOCAL_ENV_FILE" ];
8 | then
9 | GALAXY_LOCAL_ENV_FILE='./config/local_env.sh'
10 | fi
11 |
12 | if [ -f $GALAXY_LOCAL_ENV_FILE ];
13 | then
14 | . $GALAXY_LOCAL_ENV_FILE
15 | fi
16 |
17 | ./scripts/common_startup.sh || exit 1
18 |
19 | # If there is a .venv/ directory, assume it contains a virtualenv that we
20 | # should run this instance in.
21 | if [ -d .venv ];
22 | then
23 | echo "Activating virtualenv at %s/.venv\n" "$(pwd)"
24 | . .venv/bin/activate
25 | fi
26 |
27 | python ./scripts/check_python.py || exit 1
28 |
29 | if [ -z "$GALAXY_CONFIG_FILE" ]; then
30 | if [ -f universe_wsgi.ini ]; then
31 | GALAXY_CONFIG_FILE=universe_wsgi.ini
32 | elif [ -f config/galaxy.ini ]; then
33 | GALAXY_CONFIG_FILE=config/galaxy.ini
34 | else
35 | GALAXY_CONFIG_FILE=config/galaxy.ini.sample
36 | fi
37 | export GALAXY_CONFIG_FILE
38 | fi
39 |
40 | if [ -n "$GALAXY_RUN_ALL" ]; then
41 | servers=$(sed -n 's/^\[server:\(.*\)\]/\1/ p' "$GALAXY_CONFIG_FILE" | xargs echo)
42 | if ! echo "$@" | grep -q 'daemon\|restart'; then
43 | echo "ERROR: \$GALAXY_RUN_ALL cannot be used without the '--daemon', '--stop-daemon', 'restart', 'start' or 'stop' arguments to run.sh"
44 | exit 1
45 | fi
46 | (echo "$@" | grep -q -e '--daemon\|restart') && (echo "$@" | grep -q -e '--wait')
47 | WAIT=$?
48 | ARGS=$(echo "$@" | sed 's/--wait//')
49 | for server in $servers; do
50 | if [ $WAIT -eq 0 ]; then
51 | python ./scripts/paster.py serve "$GALAXY_CONFIG_FILE" --server-name="$server" --pid-file="$server.pid" --log-file="$server.log" $ARGS
52 | while true; do
53 | sleep 1
54 | # Grab the current pid from the pid file and remove any trailing space
55 | if ! current_pid_in_file=$(sed -e 's/[[:space:]]*$//' "$server.pid"); then
56 | echo "A Galaxy process died, interrupting" >&2
57 | exit 1
58 | fi
59 | if [ -n "$current_pid_in_file" ]; then
60 | echo "Found PID $current_pid_in_file in '$server.pid', monitoring '$server.log'"
61 | else
62 | echo "No PID found in '$server.pid' yet"
63 | continue
64 | fi
65 | # Search for all pids in the logs and tail for the last one
66 | latest_pid=$(grep '^Starting server in PID [0-9]\+\.$' "$server.log" | sed 's/^Starting server in PID \([0-9]\{1,\}\).$/\1/' | tail -n 1)
67 | # If they're equivalent, then the current pid file agrees with our logs
68 | # and we've succesfully started
69 | [ -n "$latest_pid" ] && [ "$latest_pid" -eq "$current_pid_in_file" ] && break
70 | done
71 | echo
72 | else
73 | echo "Handling $server with log file $server.log..."
74 | python ./scripts/paster.py serve "$GALAXY_CONFIG_FILE" --server-name="$server" --pid-file="$server.pid" --log-file="$server.log" $@
75 | fi
76 | done
77 | else
78 | # Handle only 1 server, whose name can be specified with --server-name parameter (defaults to "main")
79 | python ./scripts/paster.py serve "$GALAXY_CONFIG_FILE" $@
80 | fi
81 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [flake8]
2 | exclude =
3 | .eggs
4 | .git
5 | .tox
6 | .venv
7 | build
8 | # E203 is whitespace before ':'; we follow black's formatting here. See https://black.readthedocs.io/en/stable/faq.html#why-are-flake8-s-e203-and-w503-violated
9 | # E501 is line length, managed by black
10 | # E701,E704 are multiple statements on one line; we follow black's formatting here. See https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#configuration
11 | # SFS3 is string literal formatting using f-strings
12 | # W503 is line breaks before binary operators, which has been reversed in PEP 8.
13 | ignore = E203,E501,E701,E704,SFS3,W503
14 |
15 | [metadata]
16 | author = Enis Afgan
17 | author_email = afgane@gmail.com
18 | classifiers =
19 | Development Status :: 5 - Production/Stable
20 | Intended Audience :: Developers
21 | License :: OSI Approved :: MIT License
22 | Operating System :: OS Independent
23 | Programming Language :: Python :: 3
24 | Programming Language :: Python :: 3.9
25 | Programming Language :: Python :: 3.10
26 | Programming Language :: Python :: 3.11
27 | Programming Language :: Python :: 3.12
28 | Programming Language :: Python :: 3.13
29 | Topic :: Scientific/Engineering :: Bio-Informatics
30 | Typing :: Typed
31 | description = Library for interacting with the Galaxy API
32 | license = MIT
33 | license_files =
34 | CITATION
35 | LICENSE
36 | long_description = file: README.rst
37 | long_description_content_type = text/x-rst
38 | maintainer = Nicola Soranzo
39 | maintainer_email = nicola.soranzo@earlham.ac.uk
40 | name = bioblend
41 | project_urls =
42 | Bug Tracker = https://github.com/galaxyproject/bioblend/issues
43 | Documentation = https://bioblend.readthedocs.io/
44 | Source Code = https://github.com/galaxyproject/bioblend
45 | url = https://bioblend.readthedocs.io/
46 | version = attr: bioblend.__version__
47 |
48 | [mypy]
49 | check_untyped_defs = True
50 | disallow_subclassing_any = True
51 | disallow_untyped_calls = True
52 | disallow_untyped_decorators = True
53 | disallow_untyped_defs = True
54 | ignore_missing_imports = True
55 | implicit_optional = False
56 | implicit_reexport = False
57 | pretty = True
58 | show_error_codes = True
59 | strict_equality = True
60 | warn_redundant_casts = True
61 | warn_unused_ignores = True
62 | warn_unreachable = True
63 |
64 | [mypy-bioblend._tests.*]
65 | disallow_untyped_defs = False
66 | # Allow testing that a function return value is None
67 | disable_error_code = func-returns-value
68 |
69 | [options]
70 | install_requires =
71 | PyYAML
72 | requests>=2.20.0
73 | requests-toolbelt>=0.5.1,!=0.9.0
74 | tuspy
75 | packages = find:
76 | python_requires = >=3.9
77 |
78 | [options.entry_points]
79 | console_scripts =
80 | bioblend-galaxy-tests = bioblend._tests.pytest_galaxy_test_wrapper:main [testing]
81 |
82 | [options.extras_require]
83 | testing =
84 | pytest
85 |
86 | [options.package_data]
87 | bioblend =
88 | _tests/data/*
89 | py.typed
90 |
91 | [options.packages.find]
92 | exclude = tests
93 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 | setup()
4 |
--------------------------------------------------------------------------------
/tests:
--------------------------------------------------------------------------------
1 | bioblend/_tests
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = lint, py39
3 |
4 | [testenv]
5 | commands =
6 | pytest {posargs}
7 | deps =
8 | pytest
9 | passenv =
10 | BIOBLEND_GALAXY_API_KEY
11 | BIOBLEND_GALAXY_MASTER_API_KEY
12 | BIOBLEND_GALAXY_URL
13 | BIOBLEND_GALAXY_USER_EMAIL
14 | BIOBLEND_TEST_JOB_TIMEOUT
15 | GALAXY_VERSION
16 | BIOBLEND_TOOLSHED_URL
17 |
18 | [testenv:build]
19 | commands =
20 | make clean
21 | python3 -m build
22 | twine check dist/*
23 | deps =
24 | build
25 | twine
26 | allowlist_externals =
27 | make
28 | skip_install = true
29 |
30 | [testenv:lint]
31 | commands =
32 | ruff check .
33 | flake8 .
34 | black --check --diff .
35 | isort --check --diff .
36 | mypy bioblend/ docs/examples/
37 | deps =
38 | black
39 | flake8
40 | flake8-bugbear
41 | flake8-sfs
42 | isort
43 | mypy
44 | ruff
45 | types-requests
46 | types-PyYAML
47 | skip_install = true
48 |
--------------------------------------------------------------------------------