├── waves_galaxy
├── __init__.py
├── settings.ini.sample
├── settings.ini
├── wsgi.py
├── urls.py
└── settings.py
├── docs
├── authors.rst
├── changelog.rst
├── license.rst
├── readme.rst
├── contributing.rst
├── source.rst
├── index.rst
├── installation.rst
├── django_sphinx.py
└── conf.py
├── requirements.txt
├── waves
├── __init__.py
└── adaptors
│ ├── __init__.py
│ └── galaxy
│ ├── apps.py
│ ├── __init__.py
│ ├── exception.py
│ ├── fixtures
│ ├── tests
│ │ └── mafft.fasta
│ └── services
│ │ ├── fasta_to_phylip.json
│ │ ├── fastme.json
│ │ ├── noisy.json
│ │ ├── mafft.json
│ │ └── phyml.json
│ ├── workflow.py
│ ├── utils.py
│ ├── tests.py
│ ├── tool.py
│ └── importers.py
├── MANIFEST.in
├── setup.cfg
├── AUTHORS.md
├── CONTRIBUTING.md
├── CHANGES.md
├── manage.py
├── README.md
├── LICENSE.md
├── .gitignore
└── setup.py
/waves_galaxy/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/authors.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../AUTHORS.md
2 |
--------------------------------------------------------------------------------
/docs/changelog.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../CHANGES.md
--------------------------------------------------------------------------------
/docs/license.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../LICENSE.md
2 |
--------------------------------------------------------------------------------
/docs/readme.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../README.md
2 |
--------------------------------------------------------------------------------
/docs/contributing.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../CONTRIBUTING.md
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | bioblend==0.8.0
2 | waves-core==1.1.3
--------------------------------------------------------------------------------
/waves/__init__.py:
--------------------------------------------------------------------------------
1 | __import__('pkg_resources').declare_namespace(__name__)
2 |
--------------------------------------------------------------------------------
/waves/adaptors/__init__.py:
--------------------------------------------------------------------------------
1 | __import__('pkg_resources').declare_namespace(__name__)
2 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
2 | include README.md
3 | include VERSION
4 | recursive-include docs *
5 | recursive-exclude waves_galaxy *
--------------------------------------------------------------------------------
/waves_galaxy/settings.ini.sample:
--------------------------------------------------------------------------------
1 | [galaxy]
2 | WAVES_TEST_GALAXY_API_KEY: 'GALAXY API KEY'
3 | WAVES_TEST_GALAXY_HOST: usegalaxy.org
4 | WAVES_TEST_GALAXY_PROTOCOL: https
5 | WAVES_TEST_GALAXY_PORT:
6 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | description-file = README.md
3 |
4 | [build_sphinx]
5 | source-dir = docs/
6 | build-dir = build/docs/
7 | all_files = 1
8 |
9 | [upload_sphinx]
10 | upload-dir = docs/build/html
--------------------------------------------------------------------------------
/AUTHORS.md:
--------------------------------------------------------------------------------
1 | Authors
2 | =======
3 |
4 | * Marc Chakiachvili (LIRMM - UMR 5506 CNRS / UM - France)
5 | * Vincent Lefort (LIRMM - UMR 5506 CNRS / UM - France)
6 | * Anne-Muriel Arigon Chiffoleau (LIRMM - UMR 5506 CNRS / UM - France)
7 |
--------------------------------------------------------------------------------
/docs/source.rst:
--------------------------------------------------------------------------------
1 | Source Documentation
2 | ====================
3 |
4 | Classes
5 | -------
6 |
7 | .. automodule:: waves.adaptors.galaxy.tool
8 |
9 | .. automodule:: waves.adaptors.galaxy.workflow
10 |
11 | .. automodule:: waves.adaptors.galaxy.importers
12 |
13 |
14 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | Contributing
2 | ============
3 |
4 | You can contribute to WAVES project(s) with following repositories:
5 |
6 | - Git source code: https://github.com/lirmm/waves-galaxy
7 | - Issue tracker: https://github.com/lirmm/waves-galaxy
8 | - Mailing list: waves-webapp@googlegroups.com
9 |
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/apps.py:
--------------------------------------------------------------------------------
1 | """
2 | WAVES app Django application descriptor
3 |
4 | """
5 | from __future__ import unicode_literals
6 |
7 | from django.apps import AppConfig
8 |
9 |
10 | class GalaxyConfig(AppConfig):
11 | """
12 | WAVES main application AppConfig, add signals for waves_webapp
13 | """
14 | name = "waves.adaptors.galaxy"
15 | verbose_name = 'WAVES Galaxy adaptors'
16 |
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import unicode_literals
2 |
3 | __version_detail__ = '1.1.4.1'
4 | __version__ = '1.1.4'
5 | __author__ = 'Marc Chakiachvili, MAB Team'
6 | __licence__ = 'GPLv3'
7 | __copyright__ = "Copyright(C) 2015-2017, LIRMM - UM - CNRS"
8 | __db_version__ = "1.1"
9 | __name__ = "WAVES - Galaxy adaptor"
10 |
11 | default_app_config = 'waves.adaptors.galaxy.apps.GalaxyConfig'
12 |
--------------------------------------------------------------------------------
/waves_galaxy/settings.ini:
--------------------------------------------------------------------------------
1 | #[galaxy]
2 | #WAVES_TEST_GALAXY_API_KEY: d27ba69b8d21af2f9d93032077373267
3 | #WAVES_TEST_GALAXY_HOST: wilkins.galaxy.atgc-montpellier.fr
4 | #WAVES_TEST_GALAXY_PROTOCOL: http
5 | #WAVES_TEST_GALAXY_PORT: 80
6 | [galaxy]
7 | WAVES_TEST_GALAXY_API_KEY: e8a3bda8f4f039d83575ffb593fa3159
8 | WAVES_TEST_GALAXY_HOST: usegalaxy.org
9 | WAVES_TEST_GALAXY_PROTOCOL: https
10 | #WAVES_TEST_GALAXY_PORT: 80
11 |
--------------------------------------------------------------------------------
/waves_galaxy/wsgi.py:
--------------------------------------------------------------------------------
1 | """
2 | WSGI config for waves_galaxy project.
3 |
4 | It exposes the WSGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
8 | """
9 |
10 | import os
11 |
12 | from django.core.wsgi import get_wsgi_application
13 |
14 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "waves_galaxy.settings")
15 |
16 | application = get_wsgi_application()
17 |
--------------------------------------------------------------------------------
/CHANGES.md:
--------------------------------------------------------------------------------
1 | Version 1.1.3 - 2018-02-15
2 | --------------------------
3 |
4 | - Integrated tools import from waves-core last version (1.1.9)
5 |
6 | Version 1.1.2 - 2014-02-07
7 | --------------------------
8 |
9 | - [Updated] - updated dependency to waves-core 1.1.6
10 |
11 | Version 1.1.1 - 2014-10-18
12 | --------------------------
13 |
14 | - Corrected importer / runner for Galaxy Tools
15 |
16 |
17 | Version 1.1.0 - 2017-09-30
18 | --------------------------
19 |
20 | - Integrated modification issued from waves-core 1.1.2
21 | - Added changelog
22 |
23 |
24 | Version 0.0.3 - 2017-07-07
25 | --------------------------
26 |
27 | - First Ready to play version - detached from waves-core
28 |
29 |
--------------------------------------------------------------------------------
/waves_galaxy/urls.py:
--------------------------------------------------------------------------------
1 | """waves_galaxy URL Configuration
2 |
3 | The `urlpatterns` list routes URLs to views. For more information please see:
4 | https://docs.djangoproject.com/en/1.11/topics/http/urls/
5 | Examples:
6 | Function views
7 | 1. Add an import: from my_app import views
8 | 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
9 | Class-based views
10 | 1. Add an import: from other_app.views import Home
11 | 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
12 | Including another URLconf
13 | 1. Import the include() function: from django.conf.urls import url, include
14 | 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
15 | """
16 | urlpatterns = []
17 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | Welcome to WAVES galaxy adapter documentation !
2 | ===============================================
3 |
4 | .. toctree::
5 | :maxdepth: 1
6 | :caption: Use Galaxy adapter
7 |
8 | readme
9 | authors
10 | license
11 | installation
12 | contributing
13 |
14 | .. toctree::
15 | :caption: Changelog
16 | :maxdepth: 1
17 |
18 | changelog
19 |
20 |
21 | .. toctree::
22 | :maxdepth: 1
23 | :caption: Waves Galaxy sources
24 |
25 | source
26 |
27 | .. seealso::
28 |
29 | WAVES CORE documentation : ``_
30 | WAVES DEMO documentation : ``_
31 |
32 | Indices and tables
33 | ==================
34 |
35 | * :ref:`genindex`
36 | * :ref:`modindex`
37 | * :ref:`search`
38 |
39 |
--------------------------------------------------------------------------------
/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import os
3 | import sys
4 |
5 | if __name__ == "__main__":
6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "waves_galaxy.settings")
7 | try:
8 | from django.core.management import execute_from_command_line
9 | except ImportError:
10 | # The above import may fail for some other reason. Ensure that the
11 | # issue is really that Django is missing to avoid masking other
12 | # exceptions on Python 2.
13 | try:
14 | import django
15 | except ImportError:
16 | raise ImportError(
17 | "Couldn't import Django. Are you sure it's installed and "
18 | "available on your PYTHONPATH environment variable? Did you "
19 | "forget to activate a virtual environment?"
20 | )
21 | raise
22 | execute_from_command_line(sys.argv)
23 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ReadME
2 | ======
3 |
4 | WAVES adaptors classes are aimed to be used inside a WAVES-webapp project, but could be useful elsewhere depending on your
5 | requirements
6 |
7 |
8 | Features
9 | --------
10 |
11 | Waves Galaxy adapter is a set of classes intended to wrapped remote calculation devices interface into the WAVES unified ServiceRunner API through Galaxy Server (https://usegalaxy.org)
12 |
13 | - WAVES core package: https://github.com/lirmm/waves-core
14 | - GALAXY project: https://galaxyproject.org/
15 | - BioBlend galaxy API: https://github.com/galaxyproject/bioblend
16 |
17 | Contribute
18 | ----------
19 |
20 | - Issue Tracker: https://github.com/lirmm/waves-galaxy/issues
21 | - Source Code: https://github.com/lirmm/waves-galaxy
22 |
23 | Support
24 | -------
25 |
26 | If you are having issues, please let us know.
27 | We have a mailing list located at: waves-webapp@googlegroups.com
28 |
29 |
30 | License
31 | -------
32 |
33 | The project is licensed under the GNU GPLv3 license.
34 |
35 |
--------------------------------------------------------------------------------
/docs/installation.rst:
--------------------------------------------------------------------------------
1 | Installation
2 | ============
3 |
4 | Add WAVES adaptors to communicate with Galaxy server
5 |
6 | .. WARNING::
7 | To run WAVES, it is strongly recommended to read dedicated doc:
8 | `waves-core `_.
9 |
10 |
11 | .. note::
12 | You need to install waves-core packages in your app before running this setup
13 | Once created your Django application, with waves-core, simply add waves-galaxy package
14 |
15 | Add package to your virtual env
16 |
17 | ``pip install waves-galaxy``
18 |
19 |
20 | 1. Configure WAVES
21 | ------------------
22 |
23 | You simply enable waves-galaxy adapters in your settings.py file
24 |
25 | .. code-block:: python
26 |
27 | WAVES_CORE = {
28 | ...
29 | 'ADAPTORS_CLASSES': (
30 | ...
31 | 'waves.adaptors.galaxy.tool.GalaxyJobAdaptor',
32 | 'waves.adaptors.galaxy.workflow.GalaxyWorkFlowAdaptor',
33 | ),
34 | }
35 |
36 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | License (GPLv3)
2 | ===============
3 |
4 | WAVES packages are free software: you can redistribute it and/or modify it under the terms of the GNU General Public License version 3 as published by the `Free Software Foundation `__.
5 |
6 | This program is distributed in the hope that it will be useful, but
7 | WITHOUT ANY WARRANTY; without even the implied warranty of
8 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
9 | Public License for more details.
10 |
11 | For more specific details see
12 | `http://www.gnu.org/licenses, `__ the
13 | `Quick Guide to GPLv3. `__ in the codebase.
14 |
15 | The GNU operating system which is under the same license has an
16 | informative `FAQ here `__.
17 |
18 | Note to developers
19 | ==================
20 |
21 | We very much appreciate you using our code to do fun and interesting
22 | things with. We hope that while doing this you may find and fix bugs or
23 | make enhancements that could be useful for the greater community and
24 | will makes the developers aware of them by emailing to waves-webapp@googlegroups.com
25 | so they can be considered to be added to the original code base.
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/exception.py:
--------------------------------------------------------------------------------
1 | """ Parse Bioblend connection errors """
2 | from __future__ import unicode_literals
3 |
4 | import json
5 |
6 | import bioblend
7 |
8 | from waves.wcore.adaptors.exceptions import AdaptorConnectException
9 |
10 | __all__ = ['GalaxyAdaptorConnectionError']
11 |
12 |
13 | class GalaxyAdaptorConnectionError(AdaptorConnectException):
14 | """
15 | Specific subclass for managing Galaxy service connection errors
16 | """
17 | def __init__(self, e):
18 | """
19 | Load and parse superclass ConnectionError message body
20 | :param e: The exception
21 | """
22 |
23 | class BaseError(Exception):
24 | def __init__(self, *args, **kwargs):
25 | super(BaseError, self).__init__(*args, **kwargs)
26 |
27 | if getattr(e, 'body'):
28 | error_data = json.loads(e.body)
29 | elif isinstance(e, bioblend.ConnectionError):
30 | error_data = dict(err_msg=e.message)
31 | elif e is str:
32 | try:
33 | error_data = json.loads(e)
34 | except ValueError:
35 | error_data = dict(err_msg="%s" % e)
36 | message = '{}'.format(error_data['err_msg'])
37 | super(GalaxyAdaptorConnectionError, self).__init__(message)
38 |
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/fixtures/tests/mafft.fasta:
--------------------------------------------------------------------------------
1 | >hg18.chr20(+):56827368-56827443|sequence_index=0|block_index=0|species=hg18|hg18_0_0
2 | GACAGGGTGCATCTGGGAGGG---CCTGCCGGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-
3 | >panTro2.chr20(+):56528685-56528760|sequence_index=0|block_index=0|species=panTro2|panTro2_0_0
4 | GACAGGGTGCATCTGAGAGGG---CCTGCCAGGCCTTTA-TTCAACACTAGATACGCCCCATCTCCAATTCTAATGGAC-
5 | >rheMac2.chr10(-):5711577-5711646|sequence_index=0|block_index=0|species=rheMac2|rheMac2_0_0
6 | GACAGGGTGCATCTGAGAGGG---CCTGCTGGGCCTTTG-TTCAAAACTAGATATGCCCCAACTCCAATTCTA-------
7 | >mm8.chr2(+):173910832-173910893|sequence_index=0|block_index=0|species=mm8|mm8_0_0
8 | AGAAGGATCCACCT------------TGCTGGGCCTCTGCTCCAGCAAGACCCACCTCCCAACTCAAATGCCC-------
9 | >canFam2.chr24(+):46551822-46551889|sequence_index=0|block_index=0|species=canFam2|canFam2_0_0
10 | CG------GCGTCTGTAAGGGGCCACCGCCCGGCCTGTG-CTCAAAGCTACAAATGACTCAACTCCCAACCGA------C
11 |
12 | >hg18.chr20(+):56827443-56827480|sequence_index=0|block_index=1|species=hg18|hg18_1_0
13 | ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG
14 | >panTro2.chr20(+):56528760-56528797|sequence_index=0|block_index=1|species=panTro2|panTro2_1_0
15 | ATGTGCAGAAAATGTGATACAGAAACCTGCAGAGCAG
16 | >rheMac2.chr10(-):5711540-5711577|sequence_index=0|block_index=1|species=rheMac2|rheMac2_1_0
17 | ATGTGCGGAAAATGTGATACAGAAACCTGCAGAGCAG
18 |
19 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | .directory
3 | build/
4 | dist/
5 | ### Python template
6 | # Byte-compiled / optimized / DLL files
7 | __pycache__/
8 | *.py[cod]
9 | *$py.class
10 |
11 | # C extensions
12 | *.so
13 |
14 | # Distribution / packaging
15 | .Python
16 | env/
17 | develop-eggs/
18 | downloads/
19 | eggs/
20 | .eggs/
21 | lib/
22 | lib64/
23 | parts/
24 | sdist/
25 | var/
26 | wheels/
27 | *.egg-info/
28 | .installed.cfg
29 | *.egg
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *,cover
50 | .hypothesis/
51 |
52 | # Translations
53 | *.mo
54 | *.pot
55 |
56 | # Django stuff:
57 | *.log
58 | local_settings.py
59 |
60 | # Flask stuff:
61 | instance/
62 | .webassets-cache
63 |
64 | # Scrapy stuff:
65 | .scrapy
66 |
67 | # Sphinx documentation
68 | docs/_build/
69 |
70 | # PyBuilder
71 | target/
72 |
73 | # Jupyter Notebook
74 | .ipynb_checkpoints
75 |
76 | # pyenv
77 | .python-version
78 |
79 | # celery beat schedule file
80 | celerybeat-schedule
81 |
82 | # SageMath parsed files
83 | *.sage.py
84 |
85 | # dotenv
86 | .env
87 |
88 | # virtualenv
89 | .venv
90 | venv/
91 | ENV/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 |
96 | # Rope project settings
97 | .ropeproject
98 | dist
99 | data
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 | import os
3 |
4 | with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
5 | README = readme.read()
6 |
7 | os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
8 |
9 |
10 | def import_version():
11 | from waves.adaptors.galaxy import __version_detail__
12 | return __version_detail__
13 |
14 |
15 | setup(
16 | name='waves-galaxy-adaptors',
17 | version=import_version(),
18 | packages=find_packages(),
19 | url='https://github.com/lirmm/waves-galaxy',
20 | license='GPLv3',
21 | author='Marc Chakiachvili',
22 | author_email='marc.chakiachvili@lirmm.fr',
23 | description='WAVES adaptor to interact with Galaxy remote platform',
24 | long_description=README,
25 | maintainer='LIRMM - MAB Laboratory - France',
26 | maintainer_email='vincent.lefort@lirmm.fr',
27 | include_package_data=True,
28 | install_requires=[
29 | 'bioblend==0.9.0',
30 | 'waves-core>=1.1.8'
31 | ],
32 | classifiers=[
33 | 'Environment :: Web Environment',
34 | 'Framework :: Django',
35 | 'Development Status :: 3 - Alpha',
36 | 'Intended Audience :: Developers',
37 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
38 | 'Programming Language :: Python :: 2.7',
39 | 'Topic :: Utilities',
40 | 'Topic :: System :: Distributed Computing',
41 | 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
42 | 'Topic :: Scientific/Engineering :: Bio-Informatics',
43 | 'Operating System :: Unix'
44 | ],
45 | )
46 |
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/workflow.py:
--------------------------------------------------------------------------------
1 | from __future__ import unicode_literals
2 |
3 | from waves.adaptors.galaxy.tool import GalaxyJobAdaptor
4 |
5 | grp_name = "Galaxy"
6 | __all__ = ['GalaxyWorkFlowAdaptor']
7 |
8 |
9 | class GalaxyWorkFlowAdaptor(GalaxyJobAdaptor):
10 | """Dedicated Adaptor to run / import / follow up Galaxy Workflow execution
11 |
12 | .. WARNING::
13 | This class is not fully implemented at the moment !
14 |
15 | As it inherit from :class:`waves.adaptors.galaxy.addons.GalaxyJobAdaptor`, its init paramas are the same.
16 |
17 | """
18 | name = 'Galaxy remote workflow adaptor (api_key)'
19 |
20 | #: Dedicated import clazz for Galaxy workflows see :class:`waves_addons.importer.galaxy.GalaxyWorkFlowImporter`
21 | # TODO create and manage corretly Workflow Imports
22 | # importer_clazz = 'waves_addons.importers.galaxy.workflow.GalaxyWorkFlowImporter'
23 |
24 | def _run_job(self, job):
25 | """
26 | :param job: Job to run
27 | :raise: NotImplementedError
28 | """
29 | raise NotImplementedError()
30 |
31 | def _cancel_job(self, job):
32 | """
33 | :param job: Job to cancel
34 | :raise: NotImplementedError
35 | """
36 | raise NotImplementedError()
37 |
38 | def _job_status(self, job):
39 | """
40 | :param job: Job to show status
41 | :raise: NotImplementedError
42 | """
43 | raise NotImplementedError()
44 |
45 | def _job_results(self, job):
46 | """
47 | :param job: Job to retrieve result for
48 | :raise: NotImplementedError
49 | """
50 | raise NotImplementedError()
51 |
52 |
53 |
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/fixtures/services/fasta_to_phylip.json:
--------------------------------------------------------------------------------
1 | {"inputs": [{"multiple": false, "help": "fasta or phylip format", "edam": {"edam_data": ["data_2044"], "edam_formats": ["format_1929"]}, "argument": null, "optional": false, "label": "Source file", "is_dynamic": false, "extensions": ["fasta"], "model_class": "DataToolParameter", "hidden": false, "refresh_on_change": true, "type": "data", "options": {"hdca": [], "hda": []}, "name": "input"}, {"multiple": false, "help": "Sequencial: sequences are one one line", "display": null, "optional": false, "argument": null, "value": "", "label": "format", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["Interleave", "", false], ["Sequencial", "--sequencial", false]], "name": "format"}], "panel_section_name": "Phylogenetics", "config_file": "/home/galaxy/shed_tools/testtoolshed.g2.bx.psu.edu/repos/dcorreia/phylogeny_tools/886d136b8e1a/phylogeny_tools/fasta2phylip/fasta_to_phylip.xml", "description": "file conversion", "outputs": [{"name": "output", "format": "phylip", "label": "${input.name}", "edam_format": "format_2330", "edam_data": "data_0006", "model_class": "ToolOutput", "hidden": false}], "labels": [], "edam_operations": [], "form_style": "regular", "edam_topics": [], "panel_section_id": "phylogenetics", "version": "1.0.0", "link": "/tool_runner?tool_id=testtoolshed.g2.bx.psu.edu%2Frepos%2Fdcorreia%2Fphylogeny_tools%2Ffasta_to_phylip%2F1.0.0", "target": "galaxy_main", "min_width": -1, "model_class": "Tool", "id": "testtoolshed.g2.bx.psu.edu/repos/dcorreia/phylogeny_tools/fasta_to_phylip/1.0.0", "tool_shed_repository": {"owner": "dcorreia", "changeset_revision": "886d136b8e1a", "name": "phylogeny_tools", "tool_shed": "testtoolshed.g2.bx.psu.edu"}, "name": "Fasta to Phylip"}
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/utils.py:
--------------------------------------------------------------------------------
1 | """
2 | A copy of bioblend libraty unit tests decorators, with added few functionality
3 | Based on https://github.com/galaxyproject/bioblend/
4 | Author : Marc Chakiachvili
5 | """
6 | from __future__ import unicode_literals
7 |
8 | import unittest
9 |
10 | from bioblend.galaxy.client import ConnectionError
11 | from bioblend.galaxy.objects import *
12 | from django.conf import settings
13 |
14 | NO_GALAXY_MESSAGE = "Externally configured Galaxy, but connection failed. %s"
15 | WRONG_GALAXY_KEY = "A Galaxy server is running, but provided api key is wrong."
16 | MISSING_SETTINGS = "Some settings are required to run Galaxy test : WAVES_TEST_GALAXY_HOST, " \
17 | "WAVES_TEST_GALAXY_PROTOCOL, " \
18 | "WAVES_TEST_GALAXY_PORT, WAVES_TEST_GALAXY_API_KEY."
19 | MISSING_TOOL_MESSAGE = "Externally configured Galaxy instance requires tool %s to run test."
20 |
21 |
22 | def skip_unless_galaxy():
23 | try:
24 | galaxy_key = settings.WAVES_TEST_GALAXY_API_KEY
25 | galaxy_url = '%s://%s' % (settings.WAVES_TEST_GALAXY_PROTOCOL, settings.WAVES_TEST_GALAXY_HOST)
26 | if settings.WAVES_TEST_GALAXY_PORT:
27 | galaxy_url += ':%s' % settings.WAVES_TEST_GALAXY_PORT
28 | gi_obj = GalaxyInstance(url=str(galaxy_url), api_key=galaxy_key)
29 | gi_obj.gi.users.get_current_user()
30 | except ConnectionError as e:
31 | return unittest.skip(NO_GALAXY_MESSAGE % e + ' [' + galaxy_url + '][' + galaxy_key + ']')
32 | except AttributeError as e:
33 | return unittest.skip(MISSING_SETTINGS)
34 | return lambda f: f
35 |
36 |
37 | def skip_unless_tool(command):
38 | """ Decorate a Galaxy test method as requiring a specific tool,
39 | skip the test case if the tool is unavailable.
40 | """
41 | galaxy_key = settings.WAVES_TEST_GALAXY_API_KEY
42 | galaxy_url = '%s://%s' % (settings.WAVES_TEST_GALAXY_PROTOCOL, settings.WAVES_TEST_GALAXY_HOST)
43 | if settings.WAVES_TEST_GALAXY_PORT:
44 | galaxy_url += ':%s' % settings.WAVES_TEST_GALAXY_PORT
45 | gi = GalaxyInstance(url=str(galaxy_url), api_key=galaxy_key)
46 |
47 | def method_wrapper(method):
48 | def wrapped_method(has_gi, *args, **kwargs):
49 | tools = gi.tools.list()
50 | # In panels by default, so flatten out sections...
51 | tool_ids = [_.id for _ in tools]
52 | tool_names = [_.name for _ in tools]
53 | if command not in tool_ids and not command not in tool_names:
54 | raise unittest.SkipTest(MISSING_TOOL_MESSAGE % command)
55 | return method(has_gi, *args, **kwargs)
56 |
57 | # Must preserve method name so nose can detect and report tests by
58 | # name.
59 | wrapped_method.__name__ = method.__name__
60 | return wrapped_method
61 |
62 | return method_wrapper
63 |
--------------------------------------------------------------------------------
/docs/django_sphinx.py:
--------------------------------------------------------------------------------
1 | import inspect
2 | from django.utils.html import strip_tags
3 | from django.utils.encoding import force_unicode
4 |
5 |
6 | def process_docstring(app, what, name, obj, options, lines):
7 | # This causes import errors if left outside the function
8 | from django.apps import apps
9 | from django.db import models
10 |
11 | # Make sure we have loaded models, otherwise related fields may end up
12 | # as strings
13 | apps.get_models()
14 |
15 | # Only look at objects that inherit from Django's base model class
16 | if inspect.isclass(obj) and issubclass(obj, models.Model):
17 | # Grab the field list from the meta class
18 | fields = obj._meta.get_fields()
19 | latelines = []
20 | for field in fields:
21 | if not hasattr(field, 'attname') or isinstance(field, models.ForeignKey):
22 | field.attname = field.name
23 | # Decode and strip any html out of the field's help text
24 | try:
25 | help_text = strip_tags(force_unicode(field.help_text))
26 | except:
27 | help_text = ''
28 |
29 | # Decode and capitalize the verbose name, for use if there isn't
30 | # any help text
31 | try:
32 | verbose_name = force_unicode(field.verbose_name).capitalize()
33 | except:
34 | verbose_name = ''
35 |
36 | if help_text:
37 | # Add the model field to the end of the docstring as a param
38 | # using the help text as the description
39 | lines.append(u':param %s: %s' % (field.attname, help_text))
40 | elif verbose_name:
41 | # Add the model field to the end of the docstring as a param
42 | # using the verbose name as the description
43 | lines.append(u':param %s: %s' % (field.attname, verbose_name))
44 |
45 | # Add the field's type to the docstring
46 | if isinstance(field, models.ForeignKey):
47 | to = field.rel.to
48 | lines.append(u':type %s: %s to :class:`%s.%s`' % (
49 | field.attname, type(field).__name__, to.__module__, to.__name__))
50 | elif isinstance(field, models.ManyToManyField):
51 | to = field.rel.to
52 | lines.append(u':type %s: %s to :class:`%s.%s`' % (
53 | field.attname, type(field).__name__, to.__module__, to.__name__))
54 | elif isinstance(field, models.ManyToOneRel):
55 | to = field.related_model
56 | latelines.append(u'.. attribute:: %s' % (field.related_name or field.name + '_set'))
57 | latelines.append('')
58 | latelines.append(u' %s to :class:`%s.%s`' % (type(field).__name__, to.__module__, to.__name__))
59 | latelines.append('')
60 | else:
61 | lines.append(u':type %s: %s' % (field.attname, type(field).__name__))
62 | lines.append('')
63 | lines += latelines
64 | # Return the extended docstring
65 | return lines
--------------------------------------------------------------------------------
/waves_galaxy/settings.py:
--------------------------------------------------------------------------------
1 | """
2 | Django settings for waves_galaxy project.
3 |
4 | Generated by 'django-admin startproject' using Django 1.11.1.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/1.11/topics/settings/
8 |
9 | For the full list of settings and their values, see
10 | https://docs.djangoproject.com/en/1.11/ref/settings/
11 | """
12 | from __future__ import unicode_literals
13 |
14 | import os
15 | import ConfigParser
16 | import logging.config
17 |
18 | # Build paths inside the project like this: os.path.join(BASE_DIR, ...)
19 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
20 |
21 |
22 | # Quick-start development settings - unsuitable for production
23 | # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
24 |
25 | # SECURITY WARNING: keep the secret key used in production secret!
26 | SECRET_KEY = '-*x&hed2z3@=b4w44j%&k=cm2k_j-4@z@k9dej0$ziv(flyfl*'
27 |
28 | # SECURITY WARNING: don't run with debug turned on in production!
29 | DEBUG = True
30 |
31 | ALLOWED_HOSTS = []
32 |
33 |
34 | # Application definition
35 |
36 | INSTALLED_APPS = [
37 | 'django.contrib.auth',
38 | 'django.contrib.contenttypes',
39 | 'waves.wcore',
40 | 'waves.adaptors.galaxy'
41 | ]
42 |
43 | MIDDLEWARE = [
44 | 'django.middleware.common.CommonMiddleware',
45 | ]
46 |
47 | ROOT_URLCONF = 'waves_galaxy.urls'
48 |
49 | TEMPLATES = [
50 | {
51 | 'BACKEND': 'django.template.backends.django.DjangoTemplates',
52 | 'DIRS': [],
53 | 'APP_DIRS': True,
54 | 'OPTIONS': {
55 | 'context_processors': [],
56 | },
57 | },
58 | ]
59 |
60 | WSGI_APPLICATION = 'waves_galaxy.wsgi.application'
61 |
62 |
63 | # Database
64 | # https://docs.djangoproject.com/en/1.11/ref/settings/#databases
65 |
66 | DATABASES = {
67 | 'default': {
68 | 'ENGINE': 'django.db.backends.sqlite3',
69 | 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
70 | }
71 | }
72 |
73 |
74 | # Password validation
75 | # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
76 | # Internationalization
77 | # https://docs.djangoproject.com/en/1.11/topics/i18n/
78 |
79 | LANGUAGE_CODE = 'en-gb'
80 | TIME_ZONE = 'UTC'
81 | USE_I18N = True
82 | USE_L10N = True
83 | USE_TZ = True
84 |
85 | # Static files (CSS, JavaScript, Images)
86 | # https://docs.djangoproject.com/en/1.11/howto/static-files/
87 |
88 | STATIC_URL = '/static/'
89 | LOGGING = {
90 | 'version': 1,
91 | 'disable_existing_loggers': False,
92 | 'formatters': {
93 | 'verbose': {
94 | 'format': '[%(levelname)s][%(asctime)s][line %(lineno)s][%(name)s.%(funcName)s] - %(message)s',
95 | 'datefmt': "%H:%M:%S"
96 | },
97 | 'simple': {
98 | 'format': '[%(levelname)s] - %(message)s'
99 | },
100 | 'trace': {
101 | 'format': '%(message)s'
102 | },
103 | },
104 | 'handlers': {
105 | 'console': {
106 | 'class': 'logging.StreamHandler',
107 | 'formatter': 'verbose'
108 | },
109 | },
110 | 'root': {
111 | 'handlers': ['console'],
112 | 'propagate': True,
113 | 'level': 'DEBUG',
114 | },
115 | 'loggers': {
116 | 'django': {
117 | 'handlers': ['console'],
118 | 'propagate': True,
119 | 'level': 'WARNING',
120 | },
121 | 'waves': {
122 | 'handlers': ['console'],
123 | 'level': 'DEBUG',
124 | 'propagate': True,
125 | },
126 | 'waves.galaxy.adaptors.importer': {
127 | 'handlers': ['console'],
128 | 'level': 'DEBUG',
129 | 'propagate': True,
130 | }
131 | }
132 | }
133 |
134 | configFile = os.path.join(os.path.dirname(__file__), 'settings.ini')
135 | Config = ConfigParser.SafeConfigParser(
136 | dict(WAVES_TEST_GALAXY_PORT='')
137 | )
138 | Config.read(configFile)
139 | WAVES_TEST_GALAXY_API_KEY = Config.get('galaxy', 'WAVES_TEST_GALAXY_API_KEY')
140 | WAVES_TEST_GALAXY_HOST = Config.get('galaxy', 'WAVES_TEST_GALAXY_HOST')
141 | WAVES_TEST_GALAXY_PROTOCOL = Config.get('galaxy', 'WAVES_TEST_GALAXY_PROTOCOL')
142 | WAVES_TEST_GALAXY_PORT = Config.get('galaxy', 'WAVES_TEST_GALAXY_PORT')
143 | WAVES_DEBUG_GALAXY = True
144 | WAVES_CORE = {
145 | 'ADAPTORS_CLASSES': (
146 | 'waves.adaptors.galaxy.tool.GalaxyJobAdaptor',
147 | 'waves.adaptors.galaxy.workflow.GalaxyWorkFlowAdaptor',
148 | ),
149 | }
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/tests.py:
--------------------------------------------------------------------------------
1 | """Galaxy Adaptor test cases """
2 | from __future__ import unicode_literals
3 |
4 | import logging
5 | import unittest
6 | from os.path import dirname, join
7 |
8 | from django.conf import settings
9 | from django.test import override_settings
10 |
11 | from waves.adaptors.galaxy.tool import GalaxyJobAdaptor
12 | from waves.adaptors.galaxy.utils import skip_unless_galaxy, skip_unless_tool
13 | from waves.adaptors.galaxy.workflow import GalaxyWorkFlowAdaptor
14 | from waves.wcore.adaptors.exceptions import AdaptorConnectException
15 | from waves.wcore.models import get_service_model, Job, JobInput, JobOutput
16 | from waves.wcore.models.const import ParamType, OptType
17 | from waves.wcore.tests.base import BaseTestCase, TestJobWorkflowMixin
18 |
19 | Service = get_service_model()
20 |
21 | logger = logging.getLogger(__name__)
22 |
23 |
24 | @skip_unless_galaxy()
25 | @override_settings(
26 | WAVES_CORE={
27 | 'DATA_ROOT': join(settings.BASE_DIR, 'tests', 'data'),
28 | 'JOB_BASE_DIR': join(settings.BASE_DIR, 'tests', 'data', 'jobs'),
29 | 'BINARIES_DIR': join(settings.BASE_DIR, 'tests', 'data', 'bin'),
30 | 'SAMPLE_DIR': join(settings.BASE_DIR, 'tests', 'data', 'sample'),
31 | 'JOB_LOG_LEVEL': logging.DEBUG,
32 | 'SRV_IMPORT_LOG_LEVEL': logging.DEBUG,
33 | 'ADAPTORS_CLASSES': (
34 | 'waves.adaptors.galaxy.tool.GalaxyJobAdaptor',
35 | ),
36 | },
37 | MEDIA_ROOT=join(dirname(settings.BASE_DIR), 'tests', 'media'),
38 | )
39 | class GalaxyRunnerTestCase(BaseTestCase, TestJobWorkflowMixin):
40 | def setUp(self):
41 | self.adaptor = GalaxyJobAdaptor(host=settings.WAVES_TEST_GALAXY_HOST,
42 | protocol=settings.WAVES_TEST_GALAXY_PROTOCOL,
43 | port=settings.WAVES_TEST_GALAXY_PORT,
44 | app_key=settings.WAVES_TEST_GALAXY_API_KEY)
45 | super(GalaxyRunnerTestCase, self).setUp()
46 | # ShortCut for adaptor GI
47 | try:
48 | self.gi = self.adaptor.connect()
49 | except AdaptorConnectException:
50 | self.skipTest('Unable to connect to remote')
51 | else:
52 | logger.info('Adaptor config: %s' % self.adaptor.dump_config())
53 |
54 | @classmethod
55 | def setUpClass(cls):
56 | super(GalaxyRunnerTestCase, cls).setUpClass()
57 |
58 | def test_list_galaxy_tools(self):
59 | """
60 | Test listing of available galaxy tools
61 | """
62 | tools = self.adaptor.importer.list_services()
63 | self.assertGreater(len(tools), 0)
64 | for tool in tools:
65 | logger.info('Found tool : %s', tool)
66 |
67 | @skip_unless_tool("MAF_To_Fasta1")
68 | def test_import_tool(self):
69 | service, submission = self.adaptor.importer.import_service("MAF_To_Fasta1")
70 | self.assertIsNotNone(service)
71 | self.assertGreater(submission.inputs.count(), 0)
72 |
73 | @skip_unless_tool("toolshed.g2.bx.psu.edu/repos/rnateam/mafft/rbc_mafft/7.221.1")
74 | def test_import_mafft(self):
75 |
76 | service, submission = self.adaptor.importer.import_service(
77 | "toolshed.g2.bx.psu.edu/repos/rnateam/mafft/rbc_mafft/7.221.1")
78 | self.assertIsNotNone(service)
79 | self.adaptor.command = "toolshed.g2.bx.psu.edu/repos/rnateam/mafft/rbc_mafft/7.221.1"
80 | submission.adaptor = self.adaptor
81 | # print "service init_params", service.runner.adaptor.init_params
82 | # job.adaptor = service.adaptor
83 | job = Job.objects.create(submission=submission)
84 | self.assertEqual(job.outputs.count(), 2)
85 | job.job_inputs.add(JobInput.objects.create(param_type=ParamType.TYPE_FILE,
86 | value=join(dirname(__file__), 'fixtures', 'tests', 'mafft.fasta'),
87 | name="inputs",
88 | cmd_format=OptType.OPT_TYPE_SIMPLE,
89 | job=job))
90 |
91 | for output in submission.outputs.all():
92 | logger.debug("Adding expected output %s ", output.name)
93 | job.outputs.add(JobOutput.objects.create(job=job,
94 | _name=output.name,
95 | value=output.name,
96 | extension=output.extension))
97 | job.save()
98 | self.run_job_workflow(job)
99 |
100 | def tearDown(self):
101 | """
102 | Delete created histories on remote Galaxy server after classic tearDown
103 | Returns:
104 | None
105 | """
106 | super(GalaxyRunnerTestCase, self).tearDown()
107 | if not settings.WAVES_DEBUG_GALAXY:
108 | for history in self.gi.histories.list():
109 | logger.debug('Deleting history %s:%s ', history.name, history.id)
110 | self.gi.histories.delete(history.id, purge=self.gi.gi.config.get_config()['allow_user_dataset_purge'])
111 |
112 |
113 | @skip_unless_galaxy()
114 | class GalaxyWorkFlowRunnerTestCase(unittest.TestCase):
115 | def setUp(self):
116 | self.adaptor = GalaxyWorkFlowAdaptor(host=settings.WAVES_TEST_GALAXY_HOST,
117 | protocol=settings.WAVES_TEST_GALAXY_PROTOCOL,
118 | port=settings.WAVES_TEST_GALAXY_PORT,
119 | app_key=settings.WAVES_TEST_GALAXY_API_KEY)
120 | super(GalaxyWorkFlowRunnerTestCase, self).setUp()
121 |
122 | @property
123 | def importer(self):
124 | return self.adaptor.importer
125 |
126 | def test_list_galaxy_workflow(self):
127 | services = self.importer.list_services()
128 | if len(services) > 0:
129 | self.assertGreaterEqual(len(services), 0)
130 | for serv in services:
131 | logger.debug('Service %s is retrieved', serv)
132 | else:
133 | self.skipTest("No remote workflows ")
134 |
135 | @unittest.skip('WorkFlow not available')
136 | def test_import_new_workflow(self):
137 | workflows = self.importer.list_services()
138 | if len(workflows) > 0:
139 | for remote_service in workflows:
140 | self.importer.import_service(tool_id=remote_service[0])
141 | else:
142 | self.skipTest("No remote workflows ")
143 |
144 | @unittest.skip('WorkFlow not available')
145 | def test_update_existing_workflow(self):
146 | service = Service(runner='waves.adaptors.galaxy.workflow.GalaxyWorkFlowAdaptor')
147 | self.assertGreaterEqual(len(service), 0)
148 | for updated in service[0:1]:
149 | # just try for the the first one
150 | remote_tool_param = updated.srv_run_params.get(name='remote_tool_id')
151 | logger.debug('Remote too id for service %s : %s', updated, remote_tool_param.value)
152 | self.importer.import_remote_service(tool_id=remote_tool_param.value)
153 |
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/fixtures/services/fastme.json:
--------------------------------------------------------------------------------
1 | {"inputs": [{"multiple": false, "help": "Phylip Alignment or Matrix file", "edam": {"edam_data": ["data_0006", null], "edam_formats": ["format_2330", null]}, "argument": null, "optional": false, "label": "Fastme input", "is_dynamic": false, "extensions": ["phylip", " phy"], "model_class": "DataToolParameter", "hidden": false, "refresh_on_change": true, "type": "data", "options": {"hdca": [], "hda": []}, "name": "input"}, {"size": null, "help": "Output name for files", "area": false, "optional": false, "argument": null, "value": "Newick tree", "label": "Output name", "is_dynamic": false, "model_class": "TextToolParameter", "hidden": false, "refresh_on_change": false, "type": "text", "name": "fileout_label"}, {"test_param": {"multiple": false, "help": "", "display": "radio", "optional": false, "argument": null, "value": "d", "label": "Data type", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": true, "type": "select", "options": [["DNA", "d", false], ["Protein", "p", false], ["Matrix", "m", false], ["Config file", "cfg", false]], "name": "datatype"}, "model_class": "Conditional", "cases": [{"model_class": "ConditionalWhen", "value": "d", "inputs": [{"multiple": false, "help": "", "display": null, "optional": false, "argument": null, "value": "T", "label": "Evolutionary model", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["F84", "4", false], ["RY", "R", false], ["F81", "1", false], ["JC69", "J", false], ["K2P", "K", false], ["TN93", "T", true], ["p-distance", "p", false]], "name": "modeldna"}]}, {"model_class": "ConditionalWhen", "value": "p", "inputs": [{"multiple": false, "help": "", "display": null, "optional": false, "argument": null, "value": "L", "label": "Evolutionary model", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["LG", "L", false], ["WAG", "W", false], ["JTT", "J", false], ["Day off", "h", false], ["CpRev", "C", false], ["DCMut", "D", false], ["HIVb", "b", false], ["HIVw", "I", false], ["MtREV", "M", false], ["RtREV", "R", false], ["p-distance", "p", false]], "name": "modelprot"}]}, {"model_class": "ConditionalWhen", "value": "m", "inputs": []}, {"model_class": "ConditionalWhen", "value": "cfg", "inputs": [{"multiple": false, "help": "Precompute file containning sequence description (dna or protein)", "edam": {"edam_data": ["data_0006"], "edam_formats": ["format_2330"]}, "argument": null, "optional": false, "label": "Config file", "is_dynamic": false, "extensions": ["txt"], "model_class": "DataToolParameter", "hidden": false, "refresh_on_change": true, "type": "data", "options": {"hdca": [], "hda": []}, "name": "input_info"}]}], "type": "conditional", "name": "typeChoice"}, {"help": "By default, frequencies are globally counted from the nucleotides alignment or defined by the proteic substitution model. By checking the box, frequencies are pairwise estimated by counting the nucleotides or estimated by counting the amino-acids in the alignment.", "optional": false, "truevalue": "--equilibrium", "argument": null, "value": false, "label": "Equilibrium frequencies", "is_dynamic": false, "model_class": "BooleanToolParameter", "hidden": false, "refresh_on_change": false, "type": "boolean", "falsevalue": "", "name": "equilibrium"}, {"test_param": {"multiple": false, "help": "", "display": null, "optional": false, "argument": null, "value": "true", "label": "Gamma distributed rates across sites", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": true, "type": "select", "options": [["Yes", "true", false], ["No", "false", false]], "name": "gamma"}, "model_class": "Conditional", "cases": [{"model_class": "ConditionalWhen", "value": "true", "inputs": [{"size": null, "help": "", "min": 0.0, "max": null, "area": false, "argument": null, "value": "1", "label": "Gamma distribution parameter", "is_dynamic": false, "optional": false, "model_class": "FloatToolParameter", "hidden": false, "refresh_on_change": false, "type": "float", "name": "rate"}]}, {"model_class": "ConditionalWhen", "value": "false", "inputs": []}], "type": "conditional", "name": "gammaChoice"}, {"multiple": false, "help": "", "display": "radio", "optional": false, "argument": null, "value": "", "label": "Remove gap strategy", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["Pairwise deletion of gaps", "", false], ["Remove all sites with gap", "--remove_gap", false]], "name": "removeGap"}, {"multiple": false, "help": "", "display": null, "optional": false, "argument": null, "value": "--method=I", "label": "Distance algorithm", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["BIONJ", "--method=I", false], ["TaxAdd BalME", "--method=B --branch_length=B", false], ["TaxAdd OLSME", "--method=O --branch_length=O", false], ["NJ", "--method=N", false], ["UNJ", "--method=U", false]], "name": "distance"}, {"multiple": false, "help": "(NNI) Nearest Neighbor Interchanges,(SPR) Subtree Pruning and Regrafting ", "display": "radio", "optional": false, "argument": null, "value": "--spr", "label": "Tree Refinement", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["None", "", false], ["OLS NNI", "--nni=O", false], ["BalME NNI", "--nni=B", false], ["BalME SPR", "--spr", true], ["BalME NNI + SPR", "--nni=B --spr", false]], "name": "treeRefinement"}, {"test_param": {"multiple": false, "help": "", "display": null, "optional": false, "argument": null, "value": "false", "label": "Bootstrap", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": true, "type": "select", "options": [["No", "false", false], ["Yes", "true", false]], "name": "boot"}, "model_class": "Conditional", "cases": [{"model_class": "ConditionalWhen", "value": "false", "inputs": []}, {"model_class": "ConditionalWhen", "value": "true", "inputs": [{"size": null, "help": "", "min": null, "max": null, "area": false, "argument": null, "value": "1000", "label": "Number of replicates", "is_dynamic": false, "optional": false, "model_class": "IntegerToolParameter", "hidden": false, "refresh_on_change": false, "type": "integer", "name": "replicates"}]}], "type": "conditional", "name": "bootChoice"}], "panel_section_name": "Phylogenetics", "config_file": "/home/galaxy/shed_tools/testtoolshed.g2.bx.psu.edu/repos/gandres/fastme/5f6da08745cd/fastme/fastme.xml", "description": "Distance-based inference of phylogenetic trees", "outputs": [{"name": "outputTree", "format": "nwk", "label": "${fileout_label}", "edam_format": null, "edam_data": null, "model_class": "ToolOutput", "hidden": false}, {"name": "outputLog", "format": "txt", "label": "FastME Information", "edam_format": "format_2330", "edam_data": "data_0006", "model_class": "ToolOutput", "hidden": false}, {"name": "outputBoostrap", "format": "nwk", "label": "FastME Bootstrap trees", "edam_format": null, "edam_data": null, "model_class": "ToolOutput", "hidden": false}, {"name": "outputMatrix", "format": "txt", "label": "FastME Distance matrix", "edam_format": "format_2330", "edam_data": "data_0006", "model_class": "ToolOutput", "hidden": false}], "labels": [], "edam_operations": [], "form_style": "regular", "edam_topics": [], "panel_section_id": "phylogenetics", "version": "2.1.4.2", "link": "/tool_runner?tool_id=testtoolshed.g2.bx.psu.edu%2Frepos%2Fgandres%2Ffastme%2Ffastme%2F2.1.4.2", "target": "galaxy_main", "min_width": -1, "model_class": "Tool", "id": "testtoolshed.g2.bx.psu.edu/repos/gandres/fastme/fastme/2.1.4.2", "tool_shed_repository": {"owner": "gandres", "changeset_revision": "5f6da08745cd", "name": "fastme", "tool_shed": "testtoolshed.g2.bx.psu.edu"}, "name": "FastME"}
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/fixtures/services/noisy.json:
--------------------------------------------------------------------------------
1 | {"inputs": [{"multiple": false, "help": "Fasta format", "edam": {"edam_data": ["data_2044"], "edam_formats": ["format_1929"]}, "argument": null, "optional": false, "label": "Source file", "is_dynamic": false, "extensions": ["fasta"], "model_class": "DataToolParameter", "hidden": false, "refresh_on_change": true, "type": "data", "options": {"hdca": [], "hda": []}, "name": "input"}, {"test_param": {"multiple": false, "help": "", "display": null, "optional": false, "argument": null, "value": "D", "label": "Sequence Coding", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": true, "type": "select", "options": [["DNA", "D", false], ["Protein", "P", false], ["RNA", "R", false], ["Config file", "cfg", false]], "name": "seqtype"}, "model_class": "Conditional", "cases": [{"model_class": "ConditionalWhen", "value": "D", "inputs": []}, {"model_class": "ConditionalWhen", "value": "P", "inputs": []}, {"model_class": "ConditionalWhen", "value": "R", "inputs": []}, {"model_class": "ConditionalWhen", "value": "cfg", "inputs": [{"multiple": false, "help": "Precompute file containning sequence description (dna or protein)", "edam": {"edam_data": ["data_0006"], "edam_formats": ["format_2330"]}, "argument": null, "optional": false, "label": "info", "is_dynamic": false, "extensions": ["txt"], "model_class": "DataToolParameter", "hidden": false, "refresh_on_change": true, "type": "data", "options": {"hdca": [], "hda": []}, "name": "input_info"}]}], "type": "conditional", "name": "sequence"}, {"size": null, "help": "Columns with a score below FLOAT are removed from the output alignment.", "min": 0.0, "max": 1.0, "area": false, "argument": null, "value": "0.8", "label": "cut-off [ 0-1 ]", "is_dynamic": false, "optional": false, "model_class": "FloatToolParameter", "hidden": false, "refresh_on_change": false, "type": "float", "name": "cutoff"}, {"multiple": false, "help": "", "display": null, "optional": false, "argument": null, "value": "HAMMING", "label": "Distance methode used by NeighbotNet", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["HAMMING", "HAMMING", false], ["GTR", "GTR", false]], "name": "distance"}, {"test_param": {"help": "default is calculate distances with nnet", "optional": false, "truevalue": "true", "argument": null, "value": false, "label": "Use matrix file", "is_dynamic": false, "model_class": "BooleanToolParameter", "hidden": false, "refresh_on_change": true, "type": "boolean", "falsevalue": "", "name": "matrix"}, "model_class": "Conditional", "cases": [{"model_class": "ConditionalWhen", "value": "", "inputs": []}, {"model_class": "ConditionalWhen", "value": "true", "inputs": [{"multiple": false, "help": "", "edam": {"edam_data": [null], "edam_formats": [null]}, "argument": null, "optional": false, "label": "matrix file", "is_dynamic": false, "extensions": ["text"], "model_class": "DataToolParameter", "hidden": false, "refresh_on_change": true, "type": "data", "options": {"hdca": [], "hda": []}, "name": "matrixfile"}]}], "type": "conditional", "name": "usematrix"}, {"test_param": {"help": "", "optional": false, "truevalue": "true", "argument": null, "value": false, "label": "Set list of missing chars", "is_dynamic": false, "model_class": "BooleanToolParameter", "hidden": false, "refresh_on_change": true, "type": "boolean", "falsevalue": "", "name": "setmissingchar"}, "model_class": "Conditional", "cases": [{"model_class": "ConditionalWhen", "value": "", "inputs": []}, {"model_class": "ConditionalWhen", "value": "true", "inputs": [{"size": null, "help": "", "area": false, "optional": false, "argument": null, "value": null, "label": "Missing chars list", "is_dynamic": false, "model_class": "TextToolParameter", "hidden": false, "refresh_on_change": false, "type": "text", "name": "chars"}]}], "type": "conditional", "name": "missingchar"}, {"test_param": {"multiple": false, "help": "", "display": "radio", "optional": false, "argument": null, "value": "nnet", "label": "Choose ordering method", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": true, "type": "select", "options": [["NeighborNet", "nnet", false], ["QNet", "qnet", false], ["Sample INT random permutation", "rand", false], ["List of index MSA ordering", "list", false], ["All permutations", "all", false]], "name": "ordering"}, "model_class": "Conditional", "cases": [{"model_class": "ConditionalWhen", "value": "nnet", "inputs": []}, {"model_class": "ConditionalWhen", "value": "qnet", "inputs": []}, {"model_class": "ConditionalWhen", "value": "rand", "inputs": [{"size": null, "help": "", "min": null, "max": null, "area": false, "argument": null, "value": "1000", "label": "random permutation", "is_dynamic": false, "optional": false, "model_class": "IntegerToolParameter", "hidden": false, "refresh_on_change": false, "type": "integer", "name": "randpermut"}]}, {"model_class": "ConditionalWhen", "value": "list", "inputs": [{"size": null, "help": "", "area": false, "optional": false, "argument": null, "value": null, "label": "Comma-seperated string of INT", "is_dynamic": false, "model_class": "TextToolParameter", "hidden": false, "refresh_on_change": false, "type": "text", "name": "randpermutlist"}]}, {"model_class": "ConditionalWhen", "value": "all", "inputs": []}], "type": "conditional", "name": "orderingmethode"}, {"help": "", "optional": false, "truevalue": "", "argument": null, "value": true, "label": "Constant columns in output alignment", "is_dynamic": false, "model_class": "BooleanToolParameter", "hidden": false, "refresh_on_change": false, "type": "boolean", "falsevalue": "--noconstant", "name": "constant"}, {"help": "", "optional": false, "truevalue": "", "argument": null, "value": true, "label": "Count gap symbol as character state", "is_dynamic": false, "model_class": "BooleanToolParameter", "hidden": false, "refresh_on_change": false, "type": "boolean", "falsevalue": "--nogap", "name": "gap"}, {"size": null, "help": "Calculate a running average over the reliability score of INT columns and use this smoothed values to remove unreliable columns from the MAS.s", "min": 0, "max": 1000, "area": false, "argument": null, "value": "1", "label": "Running average over INT columns", "is_dynamic": false, "optional": false, "model_class": "IntegerToolParameter", "hidden": false, "refresh_on_change": false, "type": "integer", "name": "smooth"}, {"size": null, "help": "", "min": 0, "max": 1000, "area": false, "argument": null, "value": "0", "label": "Perform INT random shuffles per column of the MSA", "is_dynamic": false, "optional": false, "model_class": "IntegerToolParameter", "hidden": false, "refresh_on_change": false, "type": "integer", "name": "shuffles"}], "panel_section_name": "Phylogenetics", "config_file": "/home/galaxy/shed_tools/testtoolshed.g2.bx.psu.edu/repos/dcorreia/noisy/dc60058d559e/noisy/noisy.xml", "description": "Cleaning aligned sequences", "outputs": [{"name": "output1", "format": "fasta", "label": "Noisy Cleaned sequencies", "edam_format": "format_1929", "edam_data": "data_2044", "model_class": "ToolOutput", "hidden": false}, {"name": "output2", "format": "eps", "label": "Noisy Cleaned sequencies image", "edam_format": "format_3466", "edam_data": "data_2968", "model_class": "ToolOutput", "hidden": false}, {"name": "output3", "format": "txt", "label": "Noisy Cleaned sequencies information", "edam_format": "format_2330", "edam_data": "data_0006", "model_class": "ToolOutput", "hidden": false}], "labels": [], "edam_operations": [], "form_style": "regular", "edam_topics": [], "panel_section_id": "phylogenetics", "version": "1.5.12.1", "link": "/tool_runner?tool_id=testtoolshed.g2.bx.psu.edu%2Frepos%2Fdcorreia%2Fnoisy%2Fnoisy%2F1.5.12.1", "target": "galaxy_main", "min_width": -1, "model_class": "Tool", "id": "testtoolshed.g2.bx.psu.edu/repos/dcorreia/noisy/noisy/1.5.12.1", "tool_shed_repository": {"owner": "dcorreia", "changeset_revision": "dc60058d559e", "name": "noisy", "tool_shed": "testtoolshed.g2.bx.psu.edu"}, "name": "Noisy"}
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/fixtures/services/mafft.json:
--------------------------------------------------------------------------------
1 | {"inputs": [{"multiple": false, "help": "Amino acid or nucleotide sequences in FASTA format.", "edam": {"edam_data": ["data_2044"], "edam_formats": ["format_1929"]}, "argument": null, "optional": false, "label": "Sequences to align", "is_dynamic": false, "extensions": ["fasta"], "model_class": "DataToolParameter", "hidden": false, "refresh_on_change": true, "type": "data", "options": {"hdca": [], "hda": []}, "name": "inputSequences"}, {"multiple": false, "help": "", "display": null, "optional": false, "argument": null, "value": "", "label": "Data type", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["Auto detection", "", false], ["Nucleic acids", "--nuc", false], ["Amino acids", "--amino", false]], "name": "datatype"}, {"test_param": {"multiple": false, "help": "Run mafft with pre-defined input parameters. Specification of these parameters can be found in the help section.", "display": null, "optional": false, "argument": null, "value": "mafft-fftns", "label": "MAFFT flavour", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": true, "type": "select", "options": [["auto", "mafft --auto", false], ["fftns", "mafft-fftns", true], ["fftnsi", "mafft-fftnsi", false], ["nwns", "mafft-nwns", false], ["nwnsi", "mafft-nwnsi", false], ["einsi", "mafft-einsi", false], ["ginsi", "mafft-ginsi", false], ["linsi", "mafft-linsi", false], ["qinsi", "mafft-qinsi", false], ["xinsi", "mafft-xinsi", false], ["Custom Parameters", "custom", false]], "name": "flavourType"}, "model_class": "Conditional", "cases": [{"model_class": "ConditionalWhen", "value": "mafft-fftns", "inputs": []}, {"model_class": "ConditionalWhen", "value": "mafft --auto", "inputs": []}, {"model_class": "ConditionalWhen", "value": "mafft-fftnsi", "inputs": []}, {"model_class": "ConditionalWhen", "value": "mafft-nwns", "inputs": []}, {"model_class": "ConditionalWhen", "value": "mafft-nwnsi", "inputs": []}, {"model_class": "ConditionalWhen", "value": "mafft-einsi", "inputs": []}, {"model_class": "ConditionalWhen", "value": "mafft-ginsi", "inputs": []}, {"model_class": "ConditionalWhen", "value": "mafft-linsi", "inputs": []}, {"model_class": "ConditionalWhen", "value": "mafft-qinsi", "inputs": []}, {"model_class": "ConditionalWhen", "value": "mafft-xinsi", "inputs": []}, {"model_class": "ConditionalWhen", "value": "custom", "inputs": [{"multiple": false, "help": "Distance method must be chosen regarding your data", "display": "radio", "optional": false, "argument": null, "value": "--6merpair", "label": "Distance method", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["Shared 6mers distance (fastest)", "--6merpair", true], ["Global alignment (Needleman-Wunsch)", "--globalpair", false], ["Local alignment (Smith-Waterman)", "--localpair", false], ["Local, affine gap cost", "--genafpair", false]], "name": "distance_method"}, {"size": null, "help": "Valid with 6mer distance", "min": 1, "max": 100, "area": false, "argument": null, "value": "2", "label": "Guide tree is built this number of times in the progressive stage.", "is_dynamic": false, "optional": false, "model_class": "IntegerToolParameter", "hidden": false, "refresh_on_change": false, "type": "integer", "name": "retree"}, {"size": null, "help": "1000 for maximum quality", "min": 0, "max": 1000, "area": false, "argument": null, "value": "0", "label": "Maximum number of iterations", "is_dynamic": false, "optional": false, "model_class": "IntegerToolParameter", "hidden": false, "refresh_on_change": false, "type": "integer", "name": "iterations"}]}], "type": "conditional", "name": "cond_flavour"}, {"size": null, "help": "Offset value, which works like gap extension penalty, for group-to-group alignment. For E-INS-i, 0 is recommended to allow large gaps", "min": null, "max": null, "area": false, "argument": null, "value": "0.123", "label": "Gap extend penalty", "is_dynamic": false, "optional": false, "model_class": "FloatToolParameter", "hidden": false, "refresh_on_change": false, "type": "float", "name": "ep"}, {"size": null, "help": "1.53 default value", "min": null, "max": null, "area": false, "argument": null, "value": "1.53", "label": "Gap opening penalty", "is_dynamic": false, "optional": false, "model_class": "FloatToolParameter", "hidden": false, "refresh_on_change": false, "type": "float", "name": "op"}, {"multiple": false, "help": "Generate reverse complement sequences, as necessary, and align them together with the remaining sequences", "display": "radio", "optional": false, "argument": null, "value": " ", "label": "Direction of nucleotide sequences", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["adjust direction", "--adjustdirection", false], ["do not adjust direction", " ", true]], "name": "adjustdirection"}, {"test_param": {"multiple": false, "help": "Usefull only for amino acids", "display": "radio", "optional": false, "argument": null, "value": "", "label": "Matrix selection", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": true, "type": "select", "options": [["No matrix", "", false], ["BLOSUM", "BLOSUM", false], ["PAM", "PAM", false]], "name": "matrix"}, "model_class": "Conditional", "cases": [{"model_class": "ConditionalWhen", "value": "", "inputs": []}, {"model_class": "ConditionalWhen", "value": "BLOSUM", "inputs": [{"multiple": false, "help": "", "display": "radio", "optional": false, "argument": null, "value": "62", "label": "Coefficient of the BLOSUM matrix", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["30", "30", false], ["45", "45", false], ["62", "62", true], ["80", "80", false]], "name": "BLOSUM"}]}, {"model_class": "ConditionalWhen", "value": "PAM", "inputs": [{"size": null, "help": "", "min": 1, "max": 350, "area": false, "argument": null, "value": "80", "label": "Coefficient of the PAM matrix", "is_dynamic": false, "optional": false, "model_class": "IntegerToolParameter", "hidden": false, "refresh_on_change": false, "type": "integer", "name": "PAM"}]}], "type": "conditional", "name": "matrix_condition"}, {"help": "", "optional": false, "truevalue": "--reorder", "argument": null, "value": false, "label": "Reorder output?", "is_dynamic": false, "model_class": "BooleanToolParameter", "hidden": false, "refresh_on_change": false, "type": "boolean", "falsevalue": "", "name": "reorder"}, {"help": "", "optional": false, "truevalue": "--treeout", "argument": null, "value": false, "label": "Display alignment tree ?", "is_dynamic": false, "model_class": "BooleanToolParameter", "hidden": false, "refresh_on_change": false, "type": "boolean", "falsevalue": "", "name": "getTree"}, {"multiple": false, "help": "Either FASTA or ClustalW", "display": null, "optional": false, "argument": null, "value": "", "label": "Output format", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["FASTA", "", true], ["ClustalW", "--clustalout", false], ["Phylip", "--phylipout", false]], "name": "outputFormat"}], "panel_section_name": "Phylogenetics", "config_file": "/home/galaxy/shed_tools/testtoolshed.g2.bx.psu.edu/repos/rnateam/mafft/dd4a533a0e3c/mafft/mafft.xml", "description": "Multiple alignment program for amino acid or nucleotide sequences", "outputs": [{"name": "outputAlignment", "format": "fasta", "label": "${tool.name} on ${on_string}", "edam_format": "format_1929", "edam_data": "data_2044", "model_class": "ToolOutput", "hidden": false}, {"name": "outputTree", "format": "txt", "label": "${tool.name} Guide Tree", "edam_format": "format_2330", "edam_data": "data_0006", "model_class": "ToolOutput", "hidden": false}], "labels": [], "edam_operations": [], "form_style": "regular", "edam_topics": [], "panel_section_id": "phylogenetics", "version": "7.221.3", "link": "/tool_runner?tool_id=testtoolshed.g2.bx.psu.edu%2Frepos%2Frnateam%2Fmafft%2Frbc_mafft%2F7.221.3", "target": "galaxy_main", "min_width": -1, "model_class": "Tool", "id": "testtoolshed.g2.bx.psu.edu/repos/rnateam/mafft/rbc_mafft/7.221.3", "tool_shed_repository": {"owner": "rnateam", "changeset_revision": "dd4a533a0e3c", "name": "mafft", "tool_shed": "testtoolshed.g2.bx.psu.edu"}, "name": "MAFFT"}
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/fixtures/services/phyml.json:
--------------------------------------------------------------------------------
1 | {"inputs": [{"multiple": false, "help": "phylip format", "edam": {"edam_data": ["data_0006", null], "edam_formats": ["format_2330", null]}, "argument": null, "optional": false, "label": "Alignment file", "is_dynamic": false, "extensions": ["phylip", " phy"], "model_class": "DataToolParameter", "hidden": false, "refresh_on_change": true, "type": "data", "options": {"hdca": [], "hda": []}, "name": "input"}, {"multiple": false, "help": "", "display": "radio", "optional": false, "argument": null, "value": "", "label": "Changes interleaved format", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["Interleaved", "", false], ["Sequential", "--sequential", false]], "name": "phylip_format"}, {"size": null, "help": "", "min": 1, "max": null, "area": false, "argument": null, "value": "1", "label": "Number of data sets", "is_dynamic": false, "optional": false, "model_class": "IntegerToolParameter", "hidden": false, "refresh_on_change": false, "type": "integer", "name": "nb_data_set"}, {"test_param": {"multiple": false, "help": "", "display": "radio", "optional": false, "argument": null, "value": "nt", "label": "Data type", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": true, "type": "select", "options": [["Nucleic acids", "nt", false], ["Amino acids", "aa", false]], "name": "type_of_seq"}, "model_class": "Conditional", "cases": [{"model_class": "ConditionalWhen", "value": "nt", "inputs": [{"size": null, "help": "Must be a positive integer, 'e' if you want PhyML to estimate it", "area": false, "optional": false, "argument": null, "value": "e", "label": "Transition/transversion ratio", "is_dynamic": false, "model_class": "TextToolParameter", "hidden": false, "refresh_on_change": false, "type": "text", "name": "tstv"}, {"multiple": false, "help": "", "display": null, "optional": false, "argument": null, "value": "HKY85", "label": "Substitution model", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["HKY85", "HKY85", false], ["JC69", "JC69", false], ["K80", "K80", false], ["F81", "F81", false], ["F84", "F84", false], ["TN93", "TN93", false], ["GTR", "GTR", false]], "name": "model"}]}, {"model_class": "ConditionalWhen", "value": "aa", "inputs": [{"multiple": false, "help": "", "display": null, "optional": false, "argument": null, "value": "LG", "label": "Evolution model", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["LG", "LG", false], ["WAG", "WAG", false], ["JTT", "JTT", false], ["MtREV", "MtREV", false], ["Dayhoff", "Dayhoff", false], ["DCMut", "DCMut", false], ["RtREV", "RtREV", false], ["CpREV", "CpREV", false], ["VT", "VT", false], ["Blosum62", "Blosum62", false], ["MtMam", "MtMam", false], ["MtArt", "MtArt", false], ["HIVw", "HIVw", false], ["HIVb", "HIVb", false]], "name": "model"}]}], "type": "conditional", "name": "seq"}, {"size": null, "help": "Can be a fixed value in the [0,1] range or 'e' to get the maximum likelihood estimate, 0 to ignore this parameter", "area": false, "optional": false, "argument": null, "value": "e", "label": "Proportion of invariant sites", "is_dynamic": false, "model_class": "TextToolParameter", "hidden": false, "refresh_on_change": false, "type": "text", "name": "prop_invar"}, {"multiple": false, "help": "Empirical: frequencies are estimated by counting the occurences in the alignment. ML/Model: frequencies are estimated using ML for nucleotides or defined by the proteic substitution model.", "display": null, "optional": false, "argument": null, "value": "m", "label": "Equilibrium frequencies", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["ML/Model", "m", false], ["Empirical", "e", false]], "name": "equi_freq"}, {"size": null, "help": "1 means no gamma model", "min": 1, "max": null, "area": false, "argument": null, "value": "4", "label": "Number of categories for the discrete gamma model", "is_dynamic": false, "optional": false, "model_class": "IntegerToolParameter", "hidden": false, "refresh_on_change": false, "type": "integer", "name": "nbSubstCat"}, {"size": null, "help": "'e' if you want PhyML to estimate it", "area": false, "optional": false, "argument": null, "value": "e", "label": "Parameter of the gamma model", "is_dynamic": false, "model_class": "TextToolParameter", "hidden": false, "refresh_on_change": false, "type": "text", "name": "gamma"}, {"multiple": false, "help": "", "display": "radio", "optional": false, "argument": null, "value": "NNI", "label": "Tree topology search", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["NNI (Nearest Neighbor Interchange)", "NNI", false], ["SPR (Subtree Pruning and Regraphing)", "SPR", false], ["Best of NNI and SPR", "BEST", false]], "name": "move"}, {"multiple": false, "help": "", "display": null, "optional": false, "argument": null, "value": "tlr", "label": "Optimise topology", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": false, "type": "select", "options": [["Tree topology, Branch length, Rate parameter", "tlr", false], ["Tree topology, Branch length", "tl", false], ["Branch length", "l", false], ["Rate parameter", "r", false], ["No parameter is optimized", "n", false]], "name": "optimisationTopology"}, {"test_param": {"multiple": false, "help": "Use aLRT or aBayes to save computing time", "display": null, "optional": false, "argument": null, "value": "-4", "label": "Statistical tests for branch support", "is_dynamic": false, "model_class": "SelectToolParameter", "hidden": false, "refresh_on_change": true, "type": "select", "options": [["No bootstrap", "0", false], ["Bootstrap", "1", false], ["likelihood aLRT statistics", "-1", false], ["likelihood Chi2-based", "-2", false], ["SH-like", "-4", true], ["Approximate Bayes branch supports", "-5", false]], "name": "branchSupport"}, "model_class": "Conditional", "cases": [{"model_class": "ConditionalWhen", "value": "0", "inputs": []}, {"model_class": "ConditionalWhen", "value": "-1", "inputs": []}, {"model_class": "ConditionalWhen", "value": "-2", "inputs": []}, {"model_class": "ConditionalWhen", "value": "-4", "inputs": []}, {"model_class": "ConditionalWhen", "value": "-5", "inputs": []}, {"model_class": "ConditionalWhen", "value": "1", "inputs": [{"size": null, "help": "Must be a positive integer", "min": 1, "max": null, "area": false, "argument": null, "value": "100", "label": "Number of bootstrap replicates", "is_dynamic": false, "optional": false, "model_class": "IntegerToolParameter", "hidden": false, "refresh_on_change": false, "type": "integer", "name": "replicate"}]}, {"model_class": "ConditionalWhen", "value": "no", "inputs": []}], "type": "conditional", "name": "support_condition"}, {"size": null, "help": "0 to random seed", "min": null, "max": null, "area": false, "argument": null, "value": "0", "label": "Number of seed used to initiate the random number generator", "is_dynamic": false, "optional": false, "model_class": "IntegerToolParameter", "hidden": false, "refresh_on_change": false, "type": "integer", "name": "numStartSeed"}, {"test_param": {"help": "", "optional": false, "truevalue": "true", "argument": null, "value": false, "label": "Use input tree guide", "is_dynamic": false, "model_class": "BooleanToolParameter", "hidden": false, "refresh_on_change": true, "type": "boolean", "falsevalue": "false", "name": "inputTree"}, "model_class": "Conditional", "cases": [{"model_class": "ConditionalWhen", "value": "true", "inputs": [{"multiple": false, "help": "newick format", "edam": {"edam_data": ["data_0006"], "edam_formats": ["format_1915"]}, "argument": null, "optional": false, "label": "Tree file", "is_dynamic": false, "extensions": ["data"], "model_class": "DataToolParameter", "hidden": false, "refresh_on_change": true, "type": "data", "options": {"hdca": [], "hda": []}, "name": "userInputTree"}]}, {"model_class": "ConditionalWhen", "value": "false", "inputs": []}], "type": "conditional", "name": "usetree"}], "panel_section_name": "Phylogenetics", "config_file": "/home/galaxy/shed_tools/testtoolshed.g2.bx.psu.edu/repos/dereeper/phyml/67555e761895/phyml/phyml.xml", "description": "Phylogeny software based on the maximum-likelihood", "outputs": [{"name": "output_tree", "format": "txt", "label": "PhyML Newick tree", "edam_format": "format_2330", "edam_data": "data_0006", "model_class": "ToolOutput", "hidden": false}, {"name": "output_stats", "format": "txt", "label": "PhyML Statistics", "edam_format": "format_2330", "edam_data": "data_0006", "model_class": "ToolOutput", "hidden": false}, {"name": "output_stdout", "format": "txt", "label": "PhyML Stdout", "edam_format": "format_2330", "edam_data": "data_0006", "model_class": "ToolOutput", "hidden": false}], "labels": [], "edam_operations": [], "form_style": "regular", "edam_topics": [], "panel_section_id": "phylogenetics", "version": "3.1", "link": "/tool_runner?tool_id=testtoolshed.g2.bx.psu.edu%2Frepos%2Fdereeper%2Fphyml%2Fphyml%2F3.1", "target": "galaxy_main", "min_width": -1, "model_class": "Tool", "id": "testtoolshed.g2.bx.psu.edu/repos/dereeper/phyml/phyml/3.1", "tool_shed_repository": {"owner": "dereeper", "changeset_revision": "67555e761895", "name": "phyml", "tool_shed": "testtoolshed.g2.bx.psu.edu"}, "name": "PhyML"}
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Read the Docs Template documentation build configuration file, created by
4 | # sphinx-quickstart on Tue Aug 26 14:19:49 2014.
5 | #
6 | # This file is execfile()d with the current directory set to its
7 | # containing dir.
8 | #
9 | # Note that not all possible configuration values are present in this
10 | # autogenerated file.
11 | #
12 | # All configuration values have a default; values that are commented out
13 | # serve to show the default.
14 |
15 | import os
16 | import sys
17 | from distutils.sysconfig import get_python_lib
18 |
19 | sys.path.append(os.path.abspath('.'))
20 | sys.path.append(os.path.abspath('..'))
21 | sys.path.insert(0, os.path.abspath('waves'))
22 | sys.path.append(get_python_lib())
23 | os.environ['DJANGO_SETTINGS_MODULE'] = 'waves_galaxy.settings'
24 |
25 |
26 | import django
27 | import waves.adaptors.galaxy as waves_galaxy
28 |
29 |
30 | # If extensions (or modules to document with autodoc) are in another directory,
31 | # add these directories to sys.path here. If the directory is relative to the
32 | # documentation root, use os.path.abspath to make it absolute, like shown here.
33 | # settings.configure()
34 | django.setup()
35 |
36 | # -- General configuration ------------------------------------------------
37 |
38 | # If your documentation needs a minimal Sphinx version, state it here.
39 | #needs_sphinx = '1.0'
40 |
41 | # Add any Sphinx extension module names here, as strings. They can be
42 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
43 | # ones.
44 | extensions = [
45 | 'sphinx.ext.autodoc',
46 | 'sphinx.ext.coverage',
47 | 'sphinx.ext.todo',
48 | 'sphinx.ext.viewcode',
49 | 'sphinx.ext.intersphinx',
50 | 'sphinx.ext.autosummary'
51 | ]
52 |
53 | # Add any paths that contain templates here, relative to this directory.
54 | templates_path = ['templates']
55 |
56 | # The suffix of source filenames.
57 | source_suffix = '.rst'
58 |
59 | # The encoding of source files.
60 | #source_encoding = 'utf-8-sig'
61 |
62 | # The master toctree document.
63 | master_doc = 'index'
64 |
65 | # General information about the project.
66 | project = waves_galaxy.__name__
67 | copyright = waves_galaxy.__copyright__
68 |
69 | # The version info for the project you're documenting, acts as replacement for
70 | # |version| and |release|, also used in various other places throughout the
71 | # built documents.
72 | #
73 | # The short X.Y version.
74 | version = waves_galaxy.__version__
75 | # The full version, including alpha/beta/rc tags.
76 | release = waves_galaxy.__version_detail__
77 |
78 | # The language for content autogenerated by Sphinx. Refer to documentation
79 | # for a list of supported languages.
80 | # language =
81 |
82 | # There are two options for replacing |today|: either, you set today to some
83 | # non-false value, then it is used:
84 | #today = ''
85 | # Else, today_fmt is used as the format for a strftime call.
86 | #today_fmt = '%B %d, %Y'
87 |
88 | # List of patterns, relative to source directory, that match files and
89 | # directories to ignore when looking for source files.
90 | exclude_patterns = ['_build']
91 |
92 | # The reST default role (used for this markup: `text`) to use for all
93 | # documents.
94 | #default_role = None
95 |
96 | # If true, '()' will be appended to :func: etc. cross-reference text.
97 | #add_function_parentheses = True
98 |
99 | # If true, the current module name will be prepended to all description
100 | # unit titles (such as .. function::).
101 | #add_module_names = True
102 |
103 | # If true, sectionauthor and moduleauthor directives will be shown in the
104 | # output. They are ignored by default.
105 | #show_authors = False
106 |
107 | # The name of the Pygments (syntax highlighting) style to use.
108 | pygments_style = 'sphinx'
109 |
110 | # A list of ignored prefixes for module index sorting.
111 | #modindex_common_prefix = []
112 |
113 | # If true, keep warnings as "system message" paragraphs in the built documents.
114 | #keep_warnings = False
115 |
116 |
117 | # -- Options for HTML output ----------------------------------------------
118 | on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
119 |
120 | if not on_rtd: # only import and set the theme if we're building docs locally
121 | import sphinx_rtd_theme
122 | html_theme = 'sphinx_rtd_theme'
123 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
124 | else:
125 | # The theme to use for HTML and HTML Help pages. See the documentation for
126 | # a list of builtin themes.
127 | html_theme = 'default'
128 |
129 | # Theme options are theme-specific and customize the look and feel of a theme
130 | # further. For a list of options available for each theme, see the
131 | # documentation.
132 | #html_theme_options = {}
133 |
134 | # Add any paths that contain custom themes here, relative to this directory.
135 | #html_theme_path = []
136 |
137 | # The name for this set of Sphinx documents. If None, it defaults to
138 | # " v documentation".
139 | #html_title = None
140 |
141 | # A shorter title for the navigation bar. Default is the same as html_title.
142 | #html_short_title = None
143 |
144 | # The name of an image file (relative to this directory) to place at the top
145 | # of the sidebar.
146 | #html_logo = None
147 |
148 | # The name of an image file (within the static path) to use as favicon of the
149 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
150 | # pixels large.
151 | #html_favicon = None
152 |
153 | # Add any paths that contain custom static files (such as style sheets) here,
154 | # relative to this directory. They are copied after the builtin static files,
155 | # so a file named "default.css" will overwrite the builtin "default.css".
156 | html_static_path = ['_static']
157 |
158 | # Add any extra paths that contain custom files (such as robots.txt or
159 | # .htaccess) here, relative to this directory. These files are copied
160 | # directly to the root of the documentation.
161 | #html_extra_path = []
162 |
163 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
164 | # using the given strftime format.
165 | #html_last_updated_fmt = '%b %d, %Y'
166 |
167 | # If true, SmartyPants will be used to convert quotes and dashes to
168 | # typographically correct entities.
169 | #html_use_smartypants = True
170 |
171 | # Custom sidebar templates, maps document names to template names.
172 | #html_sidebars = {}
173 |
174 | # Additional templates that should be rendered to pages, maps page names to
175 | # template names.
176 | #html_additional_pages = {}
177 |
178 | # If false, no module index is generated.
179 | #html_domain_indices = True
180 |
181 | # If false, no index is generated.
182 | #html_use_index = True
183 |
184 | # If true, the index is split into individual pages for each letter.
185 | #html_split_index = False
186 |
187 | # If true, links to the reST sources are added to the pages.
188 | #html_show_sourcelink = True
189 |
190 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
191 | #html_show_sphinx = True
192 |
193 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
194 | #html_show_copyright = True
195 |
196 | # If true, an OpenSearch description file will be output, and all pages will
197 | # contain a tag referring to it. The value of this option must be the
198 | # base URL from which the finished HTML is served.
199 | #html_use_opensearch = ''
200 |
201 | # This is the file name suffix for HTML files (e.g. ".xhtml").
202 | #html_file_suffix = None
203 |
204 | # Output file base name for HTML help builder.
205 | htmlhelp_basename = 'WavesDoc'
206 |
207 |
208 | # -- Options for LaTeX output ---------------------------------------------
209 |
210 | latex_elements = {
211 | # The paper size ('letterpaper' or 'a4paper').
212 | #'papersize': 'letterpaper',
213 |
214 | # The font size ('10pt', '11pt' or '12pt').
215 | #'pointsize': '10pt',
216 |
217 | # Additional stuff for the LaTeX preamble.
218 | #'preamble': '',
219 | }
220 |
221 | # Grouping the document tree into LaTeX files. List of tuples
222 | # (source start file, target name, title,
223 | # author, documentclass [howto, manual, or own class]).
224 | latex_documents = [
225 | ('index', 'ReadtheDocsTemplate.tex', u'WAVES Documentation',
226 | u'Read the Docs', 'manual'),
227 | ]
228 |
229 | # The name of an image file (relative to this directory) to place at the top of
230 | # the title page.
231 | #latex_logo = None
232 |
233 | # For "manual" documents, if this is true, then toplevel headings are parts,
234 | # not chapters.
235 | #latex_use_parts = False
236 |
237 | # If true, show page references after internal links.
238 | #latex_show_pagerefs = False
239 |
240 | # If true, show URL addresses after external links.
241 | #latex_show_urls = False
242 |
243 | # Documents to append as an appendix to all manuals.
244 | #latex_appendices = []
245 |
246 | # If false, no module index is generated.
247 | #latex_domain_indices = True
248 |
249 |
250 | # -- Options for manual page output ---------------------------------------
251 |
252 | # One entry per manual page. List of tuples
253 | # (source start file, name, description, authors, manual section).
254 | man_pages = [
255 | ('index', 'readthedocstemplate', u'WAVES Man page Documentation',
256 | [u'WAVES'], 1)
257 | ]
258 |
259 | # If true, show URL addresses after external links.
260 | #man_show_urls = False
261 |
262 |
263 | # -- Options for Texinfo output -------------------------------------------
264 |
265 | # Grouping the document tree into Texinfo files. List of tuples
266 | # (source start file, target name, title, author,
267 | # dir menu entry, description, category)
268 | texinfo_documents = [
269 | ('index', 'ReadtheDocsTemplate', u'WAVES Documentation',
270 | u'WAVES', 'ReadtheDocsTemplate', 'One line description of project.',
271 | 'Miscellaneous'),
272 | ]
273 |
274 | # Documents to append as an appendix to all manuals.
275 | #texinfo_appendices = []
276 |
277 | # If false, no module index is generated.
278 | #texinfo_domain_indices = True
279 |
280 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
281 | #texinfo_show_urls = 'footnote'
282 |
283 | # If true, do not generate a @detailmenu in the "Top" node's menu.
284 | #texinfo_no_detailmenu = False
285 |
286 | autodoc_member_order = 'bysource'
287 | autodoc_default_flags = ['members']
288 |
289 | intersphinx_mapping = {
290 | 'python': ('http://python.readthedocs.org/en/v2.7.2/', None),
291 | 'django': ('http://docs.djangoproject.com/en/1.9/', 'https://docs.djangoproject.com/en/1.9/_objects/'),
292 | 'sphinx': ('http://sphinx.readthedocs.org/en/latest/', None),
293 | }
294 |
295 | locale_dirs = [get_python_lib() + '/django/conf/locale/']
296 |
297 |
298 |
299 | def setup(app):
300 | from django_sphinx import process_docstring
301 | # Register the docstring processor with sphinx
302 | app.connect('autodoc-process-docstring', process_docstring)
303 |
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/tool.py:
--------------------------------------------------------------------------------
1 | """ Remote Galaxy API adaptor """
2 | from __future__ import unicode_literals
3 |
4 | import logging
5 | import time
6 | from os.path import join
7 |
8 | import bioblend
9 | import requests
10 | from bioblend.galaxy.client import ConnectionError
11 | from bioblend.galaxy.objects import GalaxyInstance
12 |
13 | from waves.wcore.adaptors.const import JobStatus, JobRunDetails
14 | from exception import GalaxyAdaptorConnectionError
15 | from waves.wcore.adaptors.api import ApiKeyAdaptor
16 | from waves.wcore.adaptors.exceptions import AdaptorJobException, AdaptorExecException, AdaptorConnectException
17 | from waves.wcore.models import JobOutput
18 |
19 | logger = logging.getLogger(__name__)
20 |
21 | __group__ = 'Galaxy'
22 | __all__ = ['GalaxyJobAdaptor']
23 |
24 |
25 | class GalaxyJobAdaptor(ApiKeyAdaptor):
26 | """
27 | This is Galaxy bioblend api WAVES adaptors, maps call to Galaxy API to expected behaviour from base class
28 |
29 | Expected parameters to init call (dictionary):
30 |
31 | **Init parameters:**
32 | :param host: the ip address where Galaxy is set up (default: http://localhost)
33 | :param username: remote user name in Galaxy server
34 | :param app_key: remote user's app key in Galaxy
35 | :param library_dir: remote library dir, where to place files in order to create galaxy histories
36 |
37 | """
38 | name = 'Galaxy remote tool adaptor (api_key)'
39 | _states_map = dict(
40 | new=JobStatus.JOB_QUEUED,
41 | queued=JobStatus.JOB_QUEUED,
42 | running=JobStatus.JOB_RUNNING,
43 | waiting=JobStatus.JOB_RUNNING,
44 | error=JobStatus.JOB_ERROR,
45 | ok=JobStatus.JOB_COMPLETED
46 | )
47 | library_dir = ""
48 |
49 | def __init__(self, command=None, protocol='http', host="localhost", port='', api_base_path='', api_endpoint='',
50 | app_key=None, library_dir="", **kwargs):
51 | super(GalaxyJobAdaptor, self).__init__(command, protocol, host, port, api_base_path, api_endpoint,
52 | app_key, **kwargs)
53 |
54 | self.library_dir = library_dir
55 |
56 | @property
57 | def init_params(self):
58 | """
59 | Galaxy remote platform expected initialization parameters, defaults can be set in waves.wcore.adaptors.addons.env
60 |
61 | **returns**
62 | - host: Galaxy full host url
63 | - port: Galaxy host port
64 | - app_key: Galaxy remote user api_key
65 | - library_dir: Galaxy remote user library, no default
66 | - tool_id: Galaxy remote tool id, should be set for each Service, no default
67 |
68 | :return: A dictionary containing expected init params
69 | :rtype: dict
70 | """
71 | base_params = super(GalaxyJobAdaptor, self).init_params
72 | base_params.update(dict(library_dir=self.library_dir))
73 | return base_params
74 |
75 | def _connect(self):
76 | """ Create a bioblend galaxy object
77 | :raise: `waves.wcore.adaptors.addons.adaptors.galaxy.exception.GalaxyAdaptorConnectionError`
78 | """
79 | try:
80 | self.connector = GalaxyInstance(url=self.complete_url, api_key=self.app_key)
81 | except ConnectionError as exc:
82 | self._connected = False
83 | raise GalaxyAdaptorConnectionError(exc)
84 |
85 | def _disconnect(self):
86 | """ Setup Galaxy instance to 'disconnected' """
87 | self.connector = None
88 | self._connected = False
89 |
90 | def _prepare_job(self, job):
91 | """ - Create a new history from job data (hashkey as identifier)
92 | - upload job input files to galaxy in this newly created history
93 | - associate uploaded files galaxy id with input
94 | """
95 | import os
96 | try:
97 | history = self.connector.histories.create(name=job.title)
98 | job.remote_history_id = history.id
99 | logger.debug(u'New galaxy history to ' + history.id)
100 | if len(job.input_files) == 0:
101 | logger.info("No inputs files for galaxy service ??? %s ", job)
102 | for job_input_file in job.input_files:
103 | file_full_path = os.path.join(job.working_dir, job_input_file.value)
104 | upload = history.upload_file(file_full_path, file_name=job_input_file.name)
105 | job_input_file.remote_input_id = upload.id
106 | logger.debug('Remote data id %s for %s (%s)', job_input_file.remote_input_id, job_input_file.name,
107 | job_input_file.value)
108 | # PATCH wait for upload complete completion (history state ok)
109 | state_history = self.connector.histories.get(id_=str(job.remote_history_id))
110 | # FIXME : to not wait until the end of time !
111 | t0 = time.clock()
112 | max_time = 360
113 | while state_history.state != 'ok' and time.clock() - t0 < max_time:
114 | time.sleep(2.5)
115 | state_history = self.connector.histories.get(id_=str(job.remote_history_id))
116 | if state_history.state != 'ok':
117 | raise AdaptorExecException('Maximum time reached to prepare job')
118 | job.message = 'Job prepared with %i args ' % job.job_inputs.count()
119 | logger.debug(u'History initialized [galaxy_history_id: %s]', job.slug)
120 | return job
121 | except bioblend.galaxy.client.ConnectionError as e:
122 | exc = GalaxyAdaptorConnectionError(e)
123 | job.message = exc.message
124 | raise exc
125 | except IOError as e:
126 | raise AdaptorJobException('File upload error %s' % e.message)
127 |
128 | def _run_job(self, job):
129 | """
130 | Launch the job with current parameters from associated history
131 | Args:
132 | job:
133 | """
134 | try:
135 | history = self.connector.histories.get(id_=str(job.remote_history_id))
136 | logger.debug("First attempts %s ", history.state)
137 | if history.state == 'ok':
138 | galaxy_tool = self.connector.tools.get(id_=self.command)
139 | if galaxy_tool and type(galaxy_tool) is not list:
140 | logger.debug('Galaxy tool %s', galaxy_tool)
141 | inputs = {}
142 | for input_file in job.input_files:
143 | inputs[input_file.remote_input_id] = input_file.name
144 |
145 | for input_param in job.input_params:
146 | if input_param.value != 'None' and input_param.value is not None:
147 | inputs[input_param.name] = input_param.value
148 | logger.debug(u'Inputs added ' + str(inputs))
149 | output_data_sets = galaxy_tool.run(inputs, history=history, wait=False)
150 | for data_set in output_data_sets:
151 | job.remote_job_id = data_set.wrapped['creating_job']
152 | logger.debug(u'Job ID ' + job.remote_job_id)
153 | break
154 | remote_job = self.connector.jobs.get(job.remote_job_id, full_details=True)
155 | logger.debug('Job info %s', remote_job)
156 | remote_outputs = remote_job.wrapped['outputs']
157 | for remote_output in remote_outputs:
158 | output_data = remote_outputs[remote_output]
159 | logger.debug('Current output %s', remote_output)
160 | logger.debug('Remote output details %s', output_data)
161 | logger.debug('Remote output id %s', output_data['id'])
162 |
163 | job_output = next((x for x in job.outputs.all() if x.api_name == remote_output), None)
164 | if job_output is not None:
165 | job_output.remote_output_id = str(output_data['id'])
166 | job_output.save()
167 | else:
168 | logger.warn('Unable to retrieve job output in job description ! [%s]', remote_output)
169 | logger.info('Searched in %s', [x.name + "/" + x.api_name for x in job.outputs.all()])
170 | job.outputs.add(JobOutput.objects.create(_name=remote_output,
171 | job=job,
172 | remote_output_id=output_data['id']))
173 | for data_set in output_data_sets:
174 | logger.debug('Dataset Info %s', data_set)
175 | job_output = next((x for x in job.outputs.all() if x.remote_output_id == data_set.id), None)
176 | if job_output is not None:
177 | logger.debug("Dataset updates job output %s with %s, %s",
178 | job_output,
179 | data_set.name,
180 | data_set.file_ext
181 | )
182 | job_output.value = data_set.name
183 | job_output.extension = data_set.file_ext
184 | job_output.save()
185 | logger.debug(u'Output value updated [%s - %s]' % (
186 | data_set.id, '.'.join([data_set.name, data_set.file_ext])))
187 | job.message = "Job queued"
188 | return job
189 | else:
190 | raise AdaptorExecException(None, 'Unable to retrieve associated tool %s' % self.command)
191 | else:
192 | raise AdaptorExecException(None, 'History not ready %s' % self.command)
193 | except requests.exceptions.RequestException as e:
194 | # TODO Manage specific Exception to be more precise
195 | job.message = 'Error in request for run %s ' % e.message
196 | raise AdaptorConnectException(e, 'RequestError')
197 | except bioblend.galaxy.client.ConnectionError as e:
198 | job.message = 'Connexion error for run %s:%s', (e.message, e.body)
199 | raise GalaxyAdaptorConnectionError(e)
200 |
201 | def _cancel_job(self, job):
202 | """ Jobs cannot be cancelled for Galaxy runners
203 | """
204 | pass
205 |
206 | def _job_status(self, job):
207 | try:
208 | remote_job = self.connector.jobs.get(job.remote_job_id)
209 | logger.debug('Current job remote state %s', remote_job.state)
210 | return remote_job.state
211 | except bioblend.galaxy.client.ConnectionError as e:
212 | job.message = 'Connexion error for run %s:%s', (e.message, e.body)
213 | logger.error('Galaxy connexion error %s', e)
214 | raise GalaxyAdaptorConnectionError(e)
215 |
216 | def _job_results(self, job):
217 | try:
218 | remote_job = self.connector.jobs.get(job.remote_job_id, full_details=True)
219 | logger.debug('Retrieve job results from Galaxy %s', job.remote_job_id)
220 | if remote_job:
221 | job.exit_code = remote_job.wrapped['exit_code']
222 | if remote_job.state == 'ok':
223 | logger.debug('Job info %s', remote_job)
224 | for job_output in job.outputs.all():
225 | if job_output.remote_output_id:
226 | logger.debug("Retrieved data from output %s:%s", job_output, job_output.remote_output_id)
227 | self.connector.gi.histories.download_dataset(job.remote_job_id,
228 | job_output.remote_output_id,
229 | join(job.working_dir, job_output.file_path),
230 | use_default_filename=False)
231 | logger.debug("Saving output to %s" % join(job.working_dir, job_output.file_path))
232 | # GET stdout / stderr from Galaxy
233 | with open(join(job.working_dir, job.stdout), 'a') as out, \
234 | open(join(job.working_dir, job.stderr), 'a') as err:
235 | try:
236 | if remote_job.wrapped['stdout']:
237 | out.write(remote_job.wrapped['stdout'])
238 | except KeyError:
239 | logger.warning('No stdout from remote job')
240 | pass
241 | try:
242 | if remote_job.wrapped['stderr']:
243 | err.write(remote_job.wrapped['stderr'])
244 | except KeyError:
245 | logger.warning('No stderr from remote job')
246 | pass
247 | job.results_available = True
248 | else:
249 | logger.warning("Job not found %s ", job.remote_job_id)
250 | return job
251 | except bioblend.galaxy.client.ConnectionError as e:
252 | job.results_available = False
253 | job.message = 'Connexion error for run %s:%s', (e.message, e.body)
254 | raise GalaxyAdaptorConnectionError(e)
255 |
256 | def _job_run_details(self, job):
257 | remote_job = self.connector.jobs.get(job.remote_job_id, full_details=True)
258 | finished = None
259 | started = None
260 | extra = None
261 | if 'job_metrics' in remote_job.wrapped:
262 | for job_metric in remote_job.wrapped['job_metrics']:
263 | if job_metric['name'] == "end_epoch":
264 | finished = job_metric['raw_value']
265 | if job_metric['name'] == "start_epoch":
266 | started = job_metric['raw_value']
267 | if job_metric['name'] == "galaxy_slots":
268 | extra = "%s %s" % (job_metric['value'], job_metric['title'])
269 | created = remote_job.wrapped['create_time']
270 | name = job.title
271 | exit_code = remote_job.wrapped['exit_code']
272 | details = JobRunDetails(job.id, str(job.slug), remote_job.id, name, exit_code,
273 | created, started, finished, extra)
274 | logger.debug('Job Exit Code %s %s', exit_code, finished)
275 | # TODO see if remove history is needed
276 | # galaxy_allow_purge = self.connector.gi.config.get_config()['allow_user_dataset_purge']
277 | # self.connector.histories.delete(name=str(job.slug), purge=bool(galaxy_allow_purge))
278 | return details
279 |
280 | def test_connection(self):
281 | try:
282 | self.connector = self.connect()
283 | remote_user = self.connector.gi.users.get_current_user()
284 | return remote_user['username'] is not None and remote_user['deleted'] is False
285 | except ConnectionError as exc:
286 | self._connected = False
287 | raise GalaxyAdaptorConnectionError(exc)
288 | return False
289 |
290 | def connexion_string(self):
291 | return self.complete_url + '?api_key=' + str(self.app_key)
292 |
293 | @property
294 | def importer(self):
295 | from waves.adaptors.galaxy.importers import GalaxyToolImporter
296 | return GalaxyToolImporter(self)
297 |
--------------------------------------------------------------------------------
/waves/adaptors/galaxy/importers.py:
--------------------------------------------------------------------------------
1 | """ Galaxy remote platform Services / Workflow Import classes"""
2 | from __future__ import unicode_literals
3 |
4 | import logging
5 | import os
6 | import tempfile
7 | import json
8 | import bioblend
9 | import six
10 | import re
11 | from bioblend import ConnectionError
12 | from bioblend.galaxy.objects import client
13 |
14 | from waves.adaptors.galaxy.exception import GalaxyAdaptorConnectionError
15 | from waves.wcore.adaptors.exceptions import *
16 | from waves.wcore.adaptors.importer import AdaptorImporter
17 | from waves.wcore.models.inputs import *
18 | from waves.wcore.models.const import ParamType, OptType
19 | from waves.wcore.models import get_submission_model, SubmissionOutput, get_service_model, Runner
20 |
21 | Submission = get_submission_model()
22 | Service = get_service_model()
23 |
24 | logger = logging.getLogger(__file__)
25 |
26 |
27 | def _get_input_value(tool_input, field, default=''):
28 | return tool_input[field] if field in tool_input and tool_input[field] != '' else default
29 |
30 |
31 | class GalaxyToolImporter(AdaptorImporter):
32 | """ Allow Service to automatically import submission parameters from Galaxy bioblend API """
33 | #: List of tools categories which are not meaning a 'WAVES' service tool
34 | _unwanted_categories = [None, 'Get Data', 'Filter and sort', 'Collection Operations', 'Graph/Display Data',
35 | 'Send Data', 'Text Manipulation', 'Fetch Alignments', ]
36 |
37 | # TODO share constants with waves_addons-webapp (moved in main adaptors module ?)
38 | _type_map = dict(
39 | text=ParamType.TYPE_TEXT,
40 | boolean=ParamType.TYPE_BOOLEAN,
41 | integer=ParamType.TYPE_INT,
42 | float=ParamType.TYPE_DECIMAL,
43 | data=ParamType.TYPE_FILE,
44 | select=ParamType.TYPE_LIST,
45 | conditional='conditional',
46 | data_collection=ParamType.TYPE_FILE,
47 | genomebuild=ParamType.TYPE_LIST,
48 | )
49 |
50 | _clazz_map = dict(
51 | text=TextParam,
52 | boolean=BooleanParam,
53 | integer=IntegerParam,
54 | float=DecimalParam,
55 | data=FileInput,
56 | select=ListParam,
57 | conditional=ListParam,
58 | data_collection=FileInput,
59 | genomebuild=ListParam,
60 | )
61 |
62 | def get_clazz(self, type_param):
63 | self.logger.debug('Mapping %s' % type_param)
64 | param_clazz = self._clazz_map.get(type_param, None)
65 | if param_clazz is None:
66 | self.logger.warning("Unable to map %s", type_param)
67 | raise UnmanagedInputTypeException()
68 | else:
69 | return param_clazz
70 |
71 | def connect(self):
72 | """
73 | Connect to remote Galaxy Host
74 | :return:
75 | """
76 | self.adaptor.connect()
77 | self._tool_client = self.adaptor.connector.tools
78 |
79 | def load_tool_params(self, tool_id, for_submission):
80 | details = self._tool_client.get(id_=tool_id, io_details=True, link_details=True)
81 | self.logger.debug('Tools detailed: \n%s ' % json.dumps(details.wrapped))
82 | self.logger.debug('----------- IMPORT INPUTS --------------')
83 | for_submission.inputs = self.import_service_params(details.wrapped.get('inputs'))
84 | self.logger.debug('----------- // INPUTS --------------')
85 | self.logger.debug('----------- IMPORT OUTPUTS --------------')
86 | for_submission.outputs = self.import_service_outputs(details.wrapped.get('outputs'))
87 | self.logger.debug('----------- // OUTPUTS --------------')
88 | self.logger.debug('----------- IMPORT EXITCODES --------------')
89 | for_submission.exit_code = self.import_exit_codes([])
90 | self.logger.debug('----------- // EXITCODES --------------')
91 |
92 | def load_tool_details(self, tool_id):
93 | """
94 | Load remote tool details, return a initialized Service object (not saved)
95 | :param tool_id:
96 | :return: Service
97 | """
98 | try:
99 | details = self._tool_client.get(id_=tool_id, io_details=False, link_details=False)
100 | description = details.wrapped.get('description')
101 | # TODO add get retrieve existing services for updates
102 | service = Service(name=details.name,
103 | description=description,
104 | short_description=description,
105 | edam_topics=','.join(details.wrapped.get('edam_topics', [])),
106 | edam_operations=','.join(
107 | details.wrapped.get('edam_operations', [])),
108 | remote_service_id=tool_id,
109 | version=details.version)
110 | return service
111 | except ConnectionError as e:
112 | self.error(GalaxyAdaptorConnectionError(e))
113 | return None
114 |
115 | def _list_services(self):
116 | """
117 | List available tools on remote Galaxy server, filtering with ``_unwanted_categories``
118 | Group items by categories
119 |
120 | :return: A list of tuples corresponding to format used in Django for Choices
121 | """
122 | try:
123 | tool_list = self._tool_client.list()
124 | group_list = sorted(set(map(lambda x: x.wrapped['panel_section_name'], tool_list)), key=lambda z: z)
125 | group_list = [x for x in group_list if x not in self._unwanted_categories]
126 | service_list = [(x,
127 | sorted(
128 | (Service(remote_service_id=y.id, name=y.name, version=y.version,
129 | description=y.wrapped['description']) for y in tool_list if
130 | y.wrapped['panel_section_name'] == x and y.wrapped['model_class'] == 'Tool'),
131 | key=lambda d: d.name)
132 | ) for x in group_list]
133 | return [(x[0], [
134 | (y.remote_service_id, y.name + ' ' + y.version + (' (%s)' % y.description if y.description else '')) for
135 | y in x[1]])
136 | for x in service_list]
137 | except ConnectionError as e:
138 | raise GalaxyAdaptorConnectionError(e)
139 |
140 | def import_exit_codes(self, exit_codes):
141 | # TODO see if galaxy tool give this info
142 | return []
143 |
144 | def import_service_params(self, data):
145 | inputs = []
146 | self.logger.debug("%i inputs to import ", len(data))
147 | self.logger.debug("-----------------------")
148 | i = 1
149 | for cur_input in data:
150 | tool_input_type = self.map_type(cur_input.get('type'))
151 | clazz = self.get_clazz(cur_input.get('type'))
152 | self.logger.info("Input #%i %s %s %s", i, cur_input.get('label'), cur_input.get('name'),
153 | cur_input.get('type'))
154 | self.logger.debug('Input details: \n%s ' % json.dumps(cur_input))
155 | self.logger.info("%s mapped to %s (%s)", cur_input.get('type'), tool_input_type, clazz)
156 | service_input = None
157 | if tool_input_type == 'section':
158 | service_input = self.import_service_params(cur_input.get('inputs'))
159 | elif tool_input_type == 'repeat':
160 | repeat_group = self._import_repeat(cur_input)
161 | cur_input.repeat_group = repeat_group
162 | service_input = self.import_service_params(cur_input.get('inputs'))
163 | for srv_input in service_input:
164 | # print "srv_input", srv_input
165 | srv_input.repeat_group = repeat_group
166 | elif tool_input_type == 'expand':
167 | self.warn(UnmanagedInputTypeException("Expand"))
168 | elif tool_input_type == 'conditional':
169 | service_input = self._import_conditional_set(cur_input)
170 | else:
171 | service_input = self._import_param(cur_input)
172 | if service_input is not None:
173 | if type(service_input) is list:
174 | inputs.extend(service_input)
175 | else:
176 | inputs.append(service_input)
177 | i += 1
178 | return inputs
179 |
180 | def _import_param(self, tool_input):
181 | """
182 | Import a single parameter and return a AParam object (or one of its subclass)
183 | :param tool_input: Received input
184 | :return: AParam
185 | """
186 | try:
187 | logger.info(
188 | 'Import param ' + tool_input.get('name', 'NoName') + "/" + tool_input.get('label', 'NoLabel'))
189 | self.logger.info(
190 | 'Import param ' + tool_input.get('name', 'NoName') + "/" + tool_input.get('label', 'NoLabel'))
191 | if tool_input.get('is_dynamic', False):
192 | raise UnmanagedInputTypeException(
193 | 'Dynamic field \'%s\':%s ' % (tool_input.get('name'), tool_input.get('label')))
194 | if tool_input.get('hidden'):
195 | required = None
196 | else:
197 | required = not tool_input.get('optional')
198 | ParamClazz = self.get_clazz(tool_input.get('type', 'text'))
199 | self.logger.info('Creating a %s ' % ParamClazz.__name__)
200 | srv_input = ParamClazz.objects.create(
201 | label=tool_input.get('label', tool_input.get('name', 'NoLabel')),
202 | name=tool_input.get('name', 'NoName'),
203 | default=tool_input.get('default', None),
204 | help_text=tool_input.get('help', ''),
205 | required=required,
206 | submission=self.submission,
207 | multiple=_get_input_value(tool_input, 'multiple') is True
208 | )
209 | # Add special type import data
210 | _import_func = getattr(self, '_import_' + tool_input.get('type', 'text'))
211 | self.logger.info('Import function %s ', _import_func.__name__)
212 | _import_func(tool_input, srv_input)
213 | if 'edam' in tool_input and 'edam_formats' in tool_input['edam']:
214 | srv_input.edam_formats = \
215 | ','.join([edam_format for edam_format in tool_input['edam']['edam_formats'] if edam_format])
216 | srv_input.edam_datas = \
217 | ','.join([edam_data for edam_data in tool_input['edam']['edam_data'] if edam_data])
218 | srv_input.save()
219 | return srv_input
220 | except UnmanagedInputTypeException as e:
221 | self.logger.error(e)
222 |
223 | self.warn(e)
224 | return None
225 | except KeyError as e:
226 | self.logger.error(e)
227 | self.warn(
228 | UnManagedAttributeTypeException(
229 | "Type:%s|Name:%s" % (tool_input.get('type', 'NA'), tool_input.get('name', 'NA'))))
230 | return None
231 | except AttributeError as e:
232 | self.warn(
233 | UnManagedAttributeException(
234 | "Type:%s|Name:%s|Label:%s" % (tool_input.get('type', 'NA'), tool_input.get('name', 'NA'),
235 | tool_input.get('label', 'NA'))))
236 | self.logger.warning("Attribute error %s", e.message)
237 | return None
238 | except Exception as e:
239 | self.logger.exception(e)
240 | self.error(Exception('UnexpectedError for input "%s" (%s)' % (tool_input['name'], e)))
241 | return None
242 |
243 | def _import_conditional_set(self, tool_input):
244 | self.logger.info('Import conditional set %s ' % tool_input.get('test_param'))
245 | test_data = tool_input.get('test_param')
246 | test_param = self._import_param(test_data)
247 | self.logger.debug('Imported conditional %s', test_param)
248 | for related in tool_input.get('cases', []):
249 | self.logger.info('Import case ' + related.get('value'))
250 | for when_input in related['inputs']:
251 | when = self._import_param(when_input)
252 | if when is not None:
253 | when.default = when_input.get('value', '')
254 | when.when_value = related.get('value')
255 | when.parent = test_param
256 | when.save()
257 | test_param.dependents_inputs.add(when)
258 | else:
259 | self.logger.warning("Unable to import this param %s ", when_input)
260 | return test_param
261 |
262 | def _import_text(self, tool_input, service_input):
263 | # TODO check if format needed
264 | service_input.default = tool_input.get('value', '')
265 |
266 | def _import_boolean(self, tool_input, service_input):
267 | service_input.true_value = tool_input.get('truevalue', 'True')
268 | service_input.false_value = tool_input.get('falsevalue', 'False')
269 | service_input.required = False
270 | self.logger.debug('ToolInputBoolean %s|%s', service_input.true_value, service_input.false_value)
271 |
272 | def _import_integer(self, tool_input, service_input):
273 | return self._import_number(tool_input, service_input)
274 |
275 | def _import_float(self, tool_input, service_input):
276 | return self._import_number(tool_input, service_input)
277 |
278 | def _import_number(self, tool_input, service_input):
279 | service_input.default = tool_input.get('value', '')
280 | service_input.min_val = tool_input.get('min', '')
281 | service_input.max_val = tool_input.get('max', '')
282 |
283 | def _import_data(self, tool_input, service_input):
284 | allowed_extensions = ", ".join([".%s" % val for val in _get_input_value(tool_input, 'extensions', [])])
285 | self.logger.debug("Allowed extensions: %s " % allowed_extensions)
286 | service_input.allowed_extensions = allowed_extensions
287 | self.logger.debug("Multiple: %s " % service_input.multiple)
288 |
289 | def _import_select(self, tool_input, service_input):
290 | service_input.default = _get_input_value(tool_input, 'value')
291 | options = []
292 | for option in _get_input_value(tool_input, 'options'):
293 | if option[1].strip() == '':
294 | option[1] = 'None'
295 | options.append('|'.join([option[0], option[1].strip()]))
296 | self.logger.debug('List options %s', options)
297 | display = _get_input_value(tool_input, 'value', None)
298 | if display == 'radio':
299 | service_input.list_mode = ListParam.DISPLAY_RADIO if not service_input.multiple else ListParam.DISPLAY_CHECKBOX
300 | service_input.list_elements = "\n".join(options)
301 |
302 | def _import_repeat(self, tool_input, service_input=None):
303 | return RepeatedGroup.objects.create(name=_get_input_value(tool_input, 'name'),
304 | title=_get_input_value(tool_input, 'title'),
305 | max_repeat=_get_input_value(tool_input, 'max'),
306 | min_repeat=_get_input_value(tool_input, 'min'),
307 | default=_get_input_value(tool_input, 'default'),
308 | submission=self.submission)
309 |
310 | def _import_genomebuild(self, tool_input, service_input):
311 | return self._import_select(tool_input, service_input)
312 |
313 | def import_service_outputs(self, outputs):
314 | self.logger.debug(u'Managing service outputs')
315 | service_outputs = []
316 | index = 0
317 | for tool_output in outputs:
318 | # self.logger.debug(tool_output.keys())
319 | self.logger.debug(tool_output.items())
320 | if tool_output.get('label').startswith('$'):
321 | label = tool_output.get('name')
322 | else:
323 | label = tool_output.get('label') if tool_output.get('label', '') != '' else tool_output.get('name')
324 | input_api_name = tool_output.get('name')
325 | service_output = SubmissionOutput(label=label,
326 | name=tool_output.get('name'),
327 | api_name=input_api_name,
328 | extension=".%s" % tool_output.get('format'),
329 | edam_format=tool_output.get('edam_format'),
330 | edam_data=tool_output.get('edam_data'),
331 | submission=self.submission,
332 | file_pattern=tool_output.get('name'))
333 |
334 | m = re.match(r"\$\{([a-z]+)\.([a-z]+)\}", tool_output.get('label'))
335 | if m is not None:
336 | input_related_name = m.group(2)
337 | self.logger.info("Value is depending on other input %s", m.group(1, 2))
338 | related_input = AParam.objects.filter(name=input_related_name, submission=self.submission).first()
339 | if related_input:
340 | self.logger.info('Found related \'%s\'', related_input)
341 | service_output.from_input = related_input
342 | service_output.file_pattern = "%s"
343 | service_output.description = "Issued from input '%s'" % input_related_name
344 | else:
345 | self.logger.warning('Related input not found %s', m.group(1,2))
346 | service_output.save()
347 | service_outputs.append(service_output)
348 | index += 1
349 | return service_outputs
350 |
351 | def _import_section(self, section):
352 | return self.import_service_params(section['inputs'])
353 |
354 |
355 | class GalaxyWorkFlowImporter(GalaxyToolImporter):
356 | """
357 | Galaxy Workflow service importer
358 | """
359 | workflow = None
360 | workflow_full_description = None
361 |
362 | def connect(self):
363 | """
364 | Connect to remote Galaxy Host
365 | :return:
366 | """
367 | self._tool_client = client.ObjWorkflowClient(self.adaptor.connect())
368 |
369 | def _list_services(self):
370 | try:
371 |
372 | tool_list = self._tool_client.list()
373 | return [
374 | (y.id, y.name) for y in tool_list if y.published is True
375 | ]
376 | except ConnectionError as e:
377 | raise GalaxyAdaptorConnectionError(e)
378 |
379 | def _list_remote_inputs(self, tool_id):
380 | self.logger.warn('Not Implemented yet')
381 | wl = self._tool_client.get(id_=tool_id)
382 | wc = bioblend.galaxy.workflows.WorkflowClient(self._tool_client.gi)
383 | with tempfile.TemporaryFile() as tmp_file:
384 | wc.export_workflow_to_local_path(workflow_id=tool_id,
385 | file_local_path=os.path.join(tempfile.gettempdir(), tmp_file.name),
386 | use_default_filename=False)
387 | if self.logger.isEnabledFor(logging.DEBUG):
388 | self.logger.debug('inputs %s', wl.inputs)
389 | self.logger.debug('inputs_i %s', wl.data_input_ids)
390 | self.logger.debug('inputs %s', wl.inputs['0'])
391 | self.logger.debug('labels %s', wl.input_labels)
392 | self.logger.debug('runnable %s', wl.is_runnable)
393 | for id_step in wl.sorted_step_ids():
394 | step = wl.steps[id_step]
395 | if self.logger.isEnabledFor(logging.DEBUG):
396 | self.logger.debug('step %s %s %s:', step.type, ' name ', step.name)
397 | self.logger.debug('input_steps %s', step.input_steps)
398 | self.logger.debug('tool_inputs %s', step.tool_inputs)
399 | self.logger.debug('tool_id %s', step.tool_id)
400 | return wl.inputs
401 |
402 | def _list_remote_outputs(self, tool_id):
403 | self.logger.warn('Not Implemented yet')
404 | return []
405 |
406 | def import_exit_codes(self, tool_id):
407 | self.logger.warn('Not Implemented yet')
408 | return []
409 |
410 | def load_tool_details(self, tool_id):
411 | self.workflow = self._tool_client.get(id_=tool_id)
412 | self.workflow_full_description = self.workflow.export()
413 | # TODO refactor this to import values from workflow
414 | return Service.objects.create(name='new workflow',
415 | version='1.0',
416 | short_description="")
417 |
418 | def import_service_params(self, data):
419 | service_inputs = []
420 | for dat in six.iteritems(data):
421 | dic = dat[-1]
422 | service_input = TextParam(name=dic['label'],
423 | label=dic['label'],
424 | submission=self.service,
425 | default=dic['value'],
426 | mandatory=True)
427 | self.logger.debug('Service input %s ', service_input)
428 | service_inputs.append(service_input)
429 | return service_inputs
430 |
--------------------------------------------------------------------------------