├── .github └── workflows │ ├── docs.yml │ ├── publish-docs.yml │ ├── pylint.yml │ └── testing-coverage.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── HISTORY.md ├── LICENSE ├── README.rst ├── RELEASE.rst ├── beeflow ├── __init__.py ├── client │ ├── README.md │ ├── __init__.py │ ├── bee_client.py │ ├── core.py │ └── remote_client.py ├── cloud_launcher.py ├── common │ ├── README.md │ ├── __init__.py │ ├── api.py │ ├── build │ │ ├── README.md │ │ ├── __init__.py │ │ ├── build_driver.py │ │ ├── container_drivers.py │ │ └── utils.py │ ├── build_interfaces.py │ ├── cli.py │ ├── cli_connection.py │ ├── cloud │ │ ├── README.md │ │ ├── __init__.py │ │ ├── chameleoncloud.py │ │ ├── cloud.py │ │ ├── constants.py │ │ ├── google.py │ │ ├── openstack.py │ │ └── provider.py │ ├── config_driver.py │ ├── config_utils.py │ ├── config_validator.py │ ├── connection.py │ ├── container_path.py │ ├── crt │ │ ├── __init__.py │ │ ├── charliecloud_driver.py │ │ ├── crt_driver.py │ │ └── singularity_driver.py │ ├── crt_interface.py │ ├── cwl │ │ ├── __init__.py │ │ ├── cwl.py │ │ ├── examples │ │ │ ├── Dockerfile.comd-x86_64 │ │ │ ├── README.txt │ │ │ ├── cat_grep_tar.py │ │ │ ├── comd.py │ │ │ ├── comd_driver.py │ │ │ ├── comd_pre.sh │ │ │ └── lorem.txt │ │ └── workflow.py │ ├── db │ │ ├── __init__.py │ │ ├── bdb.py │ │ ├── client_db.py │ │ ├── sched_db.py │ │ ├── tm_db.py │ │ └── wfm_db.py │ ├── deps │ │ ├── __init__.py │ │ ├── celery_manager.py │ │ ├── container_manager.py │ │ ├── neo4j_manager.py │ │ └── redis_manager.py │ ├── expr.py │ ├── gdb │ │ ├── DESIGN.md │ │ ├── __init__.py │ │ ├── gdb_driver.py │ │ ├── generate_graph.py │ │ ├── graphml_key_updater.py │ │ ├── neo4j_cypher.py │ │ └── neo4j_driver.py │ ├── integration │ │ ├── __init__.py │ │ ├── generated_workflows.py │ │ └── utils.py │ ├── integration_test.py │ ├── log.py │ ├── parser │ │ ├── .gitignore │ │ ├── README.md │ │ ├── __init__.py │ │ └── parser.py │ ├── paths.py │ ├── states.py │ ├── tab_completion.py │ ├── validation.py │ ├── wf_data.py │ ├── wf_interface.py │ ├── wf_profiler.py │ ├── worker │ │ ├── README.md │ │ ├── __init__.py │ │ ├── flux_worker.py │ │ ├── lsf_worker.py │ │ ├── simple_worker.py │ │ ├── slurm_worker.py │ │ ├── utils.py │ │ └── worker.py │ └── worker_interface.py ├── data │ ├── cloud_templates │ │ ├── dora.jinja │ │ └── dora.yaml │ ├── cwl │ │ ├── README.md │ │ ├── bee_workflows │ │ │ ├── README.md │ │ │ ├── blast │ │ │ │ ├── README.md │ │ │ │ ├── blast.yml │ │ │ │ ├── blast_output.cwl │ │ │ │ ├── blast_output_err.cwl │ │ │ │ ├── blast_split.cwl │ │ │ │ ├── blast_wf.cwl │ │ │ │ ├── blast_worker001.cwl │ │ │ │ ├── blast_worker002.cwl │ │ │ │ └── input │ │ │ │ │ └── small.fasta │ │ │ ├── cat-grep-fail │ │ │ │ ├── README.md │ │ │ │ ├── cat.cwl │ │ │ │ ├── grep0.cwl │ │ │ │ ├── grep1.cwl │ │ │ │ ├── input.yml │ │ │ │ ├── lorem.txt │ │ │ │ ├── tar.cwl │ │ │ │ └── workflow.cwl │ │ │ ├── cgt-2branches │ │ │ │ ├── README.txt │ │ │ │ ├── cat.cwl │ │ │ │ ├── cat2.cwl │ │ │ │ ├── grep0.cwl │ │ │ │ ├── grep1.cwl │ │ │ │ ├── grep2.cwl │ │ │ │ ├── grep3.cwl │ │ │ │ ├── input.yml │ │ │ │ ├── lorem.txt │ │ │ │ ├── tar.cwl │ │ │ │ ├── tar2.cwl │ │ │ │ └── workflow.cwl │ │ │ ├── clamr-ci │ │ │ │ ├── README.md │ │ │ │ ├── clamr.cwl │ │ │ │ ├── clamr_job.json │ │ │ │ ├── clamr_job.yml │ │ │ │ ├── clamr_wf.cwl │ │ │ │ └── ffmpeg.cwl │ │ │ ├── clamr-ffmpeg-build_script │ │ │ │ ├── Dockerfile.clamr-ffmpeg │ │ │ │ ├── README.md │ │ │ │ ├── clamr.cwl │ │ │ │ ├── clamr_job.json │ │ │ │ ├── clamr_job.yml │ │ │ │ ├── clamr_wf.cwl │ │ │ │ ├── ffmpeg.cwl │ │ │ │ ├── post_run.sh │ │ │ │ └── pre_run.sh │ │ │ ├── clamr-ffmpeg-validate_script │ │ │ │ ├── Dockerfile.clamr-ffmpeg │ │ │ │ ├── README.md │ │ │ │ ├── clamr.cwl │ │ │ │ ├── clamr_job.json │ │ │ │ ├── clamr_job.yml │ │ │ │ ├── clamr_wf.cwl │ │ │ │ ├── ffmpeg.cwl │ │ │ │ ├── post_run.sh │ │ │ │ └── pre_run.sh │ │ │ ├── clamr-wf-chicoma │ │ │ │ ├── README.md │ │ │ │ ├── clamr.cwl │ │ │ │ ├── clamr_job.json │ │ │ │ ├── clamr_job.yml │ │ │ │ ├── clamr_wf.cwl │ │ │ │ └── ffmpeg.cwl │ │ │ ├── clamr-wf-noyaml │ │ │ │ ├── README.md │ │ │ │ ├── lsf-charliecloud │ │ │ │ │ ├── README.md │ │ │ │ │ └── cf-summit.cwl │ │ │ │ ├── slurm-charliecloud │ │ │ │ │ ├── Dockerfile.clamr-lanl-x86_64 │ │ │ │ │ ├── README.md │ │ │ │ │ ├── cf-darwin.cwl │ │ │ │ │ ├── cf-no-owrite.cwl │ │ │ │ │ ├── cf.cwl │ │ │ │ │ ├── copyContainer_containerName.cwl │ │ │ │ │ ├── dockerFile_containerName.cwl │ │ │ │ │ └── dockerPull.cwl │ │ │ │ └── slurm-singularity │ │ │ │ │ ├── README.md │ │ │ │ │ └── cf-singularity.cwl │ │ │ ├── clamr-wf-singularity │ │ │ │ ├── README.md │ │ │ │ ├── clamr.cwl │ │ │ │ ├── clamr_job.json │ │ │ │ ├── clamr_job.yml │ │ │ │ ├── clamr_wf.cwl │ │ │ │ └── ffmpeg.cwl │ │ │ ├── clamr-wf-summit │ │ │ │ ├── README.md │ │ │ │ ├── clamr.cwl │ │ │ │ ├── clamr_job.json │ │ │ │ ├── clamr_job.yml │ │ │ │ ├── clamr_wf.cwl │ │ │ │ └── ffmpeg.cwl │ │ │ ├── clamr-wf-use-container │ │ │ │ ├── README.md │ │ │ │ ├── clamr.cwl │ │ │ │ ├── clamr_job.json │ │ │ │ ├── clamr_job.yml │ │ │ │ ├── clamr_wf.cwl │ │ │ │ └── ffmpeg.cwl │ │ │ ├── clamr-wf │ │ │ │ ├── README.md │ │ │ │ ├── clamr.cwl │ │ │ │ ├── clamr_job.json │ │ │ │ ├── clamr_job.yml │ │ │ │ ├── clamr_wf.cwl │ │ │ │ └── ffmpeg.cwl │ │ │ ├── lulesh-mpi-multi-file │ │ │ │ ├── lulesh.cwl │ │ │ │ ├── lulesh_job.yml │ │ │ │ └── lulesh_wf.cwl │ │ │ ├── lulesh-mpi │ │ │ │ ├── lulesh.cwl │ │ │ │ └── lulesh.yml │ │ │ ├── nwchem-mpi │ │ │ │ ├── README.md │ │ │ │ ├── nwchem.cwl │ │ │ │ ├── nwchem.yml │ │ │ │ └── nwchem_bin.cwl │ │ │ ├── pennant-build │ │ │ │ ├── Dockerfile.pennant-flux-x86_64 │ │ │ │ ├── Dockerfile.pennant-graph-x86_64 │ │ │ │ ├── graph.cwl │ │ │ │ ├── graph_pennant.py │ │ │ │ ├── graph_pennant.sh │ │ │ │ ├── pennant.yml │ │ │ │ ├── pennant_1_node.cwl │ │ │ │ ├── pennant_2_node.cwl │ │ │ │ ├── pennant_4_node.cwl │ │ │ │ ├── pennant_8_node.cwl │ │ │ │ └── pennant_wf.cwl │ │ │ ├── pennant │ │ │ │ ├── graph.cwl │ │ │ │ ├── pennant.yml │ │ │ │ ├── pennant_1_node.cwl │ │ │ │ ├── pennant_2_node.cwl │ │ │ │ ├── pennant_4_node.cwl │ │ │ │ └── pennant_wf.cwl │ │ │ ├── pytest.ini │ │ │ └── simple-workflows │ │ │ │ ├── README.md │ │ │ │ ├── cancel.cwl │ │ │ │ └── grep-wordcount │ │ │ │ ├── README.md │ │ │ │ ├── gc-nc.cwl │ │ │ │ ├── gc.cwl │ │ │ │ └── lorem.txt │ │ └── cwl_validation │ │ │ ├── README.md │ │ │ ├── builder │ │ │ ├── dockerFile.cwl │ │ │ └── dockerPull.cwl │ │ │ ├── grep-wordcount │ │ │ ├── gc.cwl │ │ │ └── lorem.txt │ │ │ ├── helloworld.cwl │ │ │ ├── helloworld_input.yaml │ │ │ └── ml-workflow │ │ │ ├── example-1 │ │ │ ├── DAG_example-1.png │ │ │ ├── README.md │ │ │ ├── add.cwl │ │ │ ├── add_multiply_example_workflow.cwl │ │ │ ├── add_step1.py │ │ │ ├── isolated test.cwl │ │ │ ├── job.yaml │ │ │ ├── multiply.cwl │ │ │ ├── multiply_step2.py │ │ │ └── workflow_generater_python.py │ │ │ └── machine_learning │ │ │ ├── README.md │ │ │ ├── decision_tree.py │ │ │ ├── decision_tree_output.txt │ │ │ ├── decision_tree_tool.cwl │ │ │ ├── expectedValue.txt │ │ │ ├── hiring1.txt │ │ │ ├── linear_regress_output.txt │ │ │ ├── linear_regression.py │ │ │ ├── machinelearning_pipeline.cwl │ │ │ ├── out.PNG │ │ │ ├── predict_code.py │ │ │ ├── predict_tool.cwl │ │ │ ├── read_dataset.py │ │ │ ├── read_dataset_tool.cwl │ │ │ └── regress_tool.cwl │ └── dockerfiles │ │ ├── Dockerfile.builder_demo │ │ ├── Dockerfile.clamr-ffmpeg │ │ ├── Dockerfile.clamr-lanl-x86_64 │ │ ├── Dockerfile.clamr-ppc64le │ │ ├── Dockerfile.comd-flux-x86_64 │ │ ├── Dockerfile.deb9ompi-x86_64 │ │ ├── Dockerfile.lulesh-x86_64 │ │ ├── Dockerfile.neo4j │ │ ├── Dockerfile.nwchem-x86_64 │ │ ├── Dockerfile.pennant-flux-x86_64 │ │ ├── Dockerfile.vaspompi-x86_64 │ │ ├── README.md │ │ ├── comd-pmix-support │ │ ├── Dockerfile.comd-x86_64-wpmix │ │ ├── Dockerfile.debian │ │ ├── Dockerfile.openmpi-3.1.5 │ │ ├── README.md │ │ └── dont-init-ucx-on-intel-cray.patch │ │ └── pennant-graph │ │ ├── Dockerfile.pennant-graph-x86_64 │ │ ├── graph_pennant.py │ │ └── graph_pennant.sh ├── remote │ ├── __init__.py │ └── remote.py ├── scheduler │ ├── README.md │ ├── __init__.py │ ├── algorithms.py │ ├── resource_allocation.py │ ├── scheduler.py │ ├── scheduler.yaml │ ├── serializable.py │ └── task.py ├── task_manager │ ├── __init__.py │ ├── background.py │ ├── task_actions.py │ ├── task_manager.py │ ├── task_submit.py │ └── utils.py ├── tests │ ├── 42.tgz │ ├── README.md │ ├── cf.cwl │ ├── clamr-wf.tgz │ ├── clamr-wf │ │ ├── clamr.cwl │ │ ├── clamr_job.json │ │ ├── clamr_job.yml │ │ ├── clamr_wf.cwl │ │ ├── ffmpeg.cwl │ │ ├── mv_script.cwl │ │ └── mv_script.sh │ ├── cwl_files │ │ ├── cat-grep-tar.cwl │ │ ├── cat-grep-tar.yml │ │ ├── clamr.cwl │ │ ├── clamr.yml │ │ ├── comd.cwl │ │ ├── comd.yml │ │ ├── task-req.cwl │ │ └── task-req.yml │ ├── gdb.py │ ├── mocks.py │ ├── test_bee_client.py │ ├── test_charliecloud_driver.py │ ├── test_cloud.py │ ├── test_config_driver.py │ ├── test_config_utils.py │ ├── test_config_validator.py │ ├── test_container_path.py │ ├── test_cwl.py │ ├── test_cwl_workflow.py │ ├── test_db_client.py │ ├── test_db_sched.py │ ├── test_db_tm.py │ ├── test_neo4j_cypher.py │ ├── test_parser.py │ ├── test_remote.py │ ├── test_remote_client.py │ ├── test_scheduler.py │ ├── test_scheduler_resource_allocation.py │ ├── test_scheduler_rest.py │ ├── test_slurm_worker.py │ ├── test_tm.py │ ├── test_wf_interface.py │ ├── test_wf_manager.py │ └── test_wf_update.py └── wf_manager │ ├── __init__.py │ ├── resources │ ├── __init__.py │ ├── wf_actions.py │ ├── wf_list.py │ ├── wf_update.py │ └── wf_utils.py │ └── wf_manager.py ├── ci ├── README.md ├── batch_scheduler.sh ├── bee_config.sh ├── bee_install.sh ├── deps_install.sh ├── docs.sh ├── env.sh ├── flux_install.sh ├── inner_integration_test.sh ├── integration_test.py ├── integration_test.sh ├── slurm_start.sh ├── test_workflows │ ├── build-failure │ │ ├── Dockerfile.build-failure │ │ ├── input.yml │ │ └── workflow.cwl │ ├── checkpoint-too-long │ │ ├── Dockerfile │ │ ├── input.yml │ │ ├── step0.cwl │ │ └── workflow.cwl │ ├── clamr-wf-checkpoint │ │ ├── Dockerfile.clamr-ffmpeg │ │ ├── README.md │ │ ├── clamr.cwl │ │ ├── clamr_job.json │ │ ├── clamr_job.yml │ │ ├── clamr_job_long.yml │ │ ├── clamr_wf.cwl │ │ └── ffmpeg.cwl │ ├── comd-mpi │ │ ├── Dockerfile.comd-x86_64 │ │ ├── comd.cwl │ │ ├── comd_job.yml │ │ └── comd_wf.cwl │ ├── failure-dependent-tasks │ │ ├── input.yml │ │ └── workflow.cwl │ ├── missing-input │ │ ├── input.yml │ │ └── workflow.cwl │ ├── partial-fail │ │ ├── cat.cwl │ │ ├── grep0.cwl │ │ ├── grep1.cwl │ │ ├── input.yml │ │ ├── printf.cwl │ │ └── workflow.cwl │ ├── pre-post-script │ │ ├── input.yml │ │ ├── post.sh │ │ ├── pre.sh │ │ └── workflow.cwl │ └── shell_validate │ │ ├── input.yml │ │ ├── post.sh │ │ ├── pre.sh │ │ └── workflow.cwl └── unit_tests.sh ├── coverage.svg ├── docs ├── README.md ├── configuration.md ├── html │ └── .buildinfo ├── neo4j │ ├── .gitignore │ ├── README.md │ ├── ch-grow-files │ │ ├── Dockerfile │ │ ├── ch-run-neo4j.sh │ │ └── environment │ └── img │ │ ├── 5node.png │ │ ├── cypher_pane.png │ │ ├── graph_pane.png │ │ └── neo4j_browser.png ├── poetry_old.md ├── poetry_tutorial │ └── README.md ├── sphinx │ ├── .gitignore │ ├── Makefile │ ├── advanced_usage.rst │ ├── bee_cwl.rst │ ├── commands.rst │ ├── conf.py │ ├── contribute.rst │ ├── development.rst │ ├── error_logs.rst │ ├── examples.rst │ ├── images │ │ ├── bee-viz.png │ │ ├── cat-dag.png │ │ ├── clamr-step.png │ │ ├── logos │ │ │ ├── BEEGrey.jpg │ │ │ ├── BEEGrey.png │ │ │ ├── BEEYellow.jpg │ │ │ ├── BEEYellow.png │ │ │ ├── BEE_Symbol.jpg │ │ │ └── BEE_Symbol.png │ │ └── src │ │ │ └── clamr-step.pptx │ ├── index.rst │ ├── installation.rst │ ├── make.bat │ ├── rest_api.rst │ └── visualization.rst ├── summit.md └── unittest_tutorial │ ├── README.md │ ├── __init__.py │ ├── test_fixtures.py │ ├── test_numbers.py │ ├── test_skip.py │ └── test_strings.py ├── examples ├── .gitignore ├── cat-grep-tar │ ├── cat.cwl │ ├── grep0.cwl │ ├── grep1.cwl │ ├── input.yml │ ├── lorem.txt │ ├── tar.cwl │ └── workflow.cwl ├── clamr-checkpoint-cwl │ ├── Dockerfile.clamr-ffmpeg │ └── clamr_checkpoint.py └── clamr-ffmpeg-build │ ├── Dockerfile.clamr-ffmpeg │ ├── README.md │ ├── clamr.cwl │ ├── clamr_job.json │ ├── clamr_job.yml │ ├── clamr_wf.cwl │ └── ffmpeg.cwl ├── poetry.lock ├── pyproject.toml └── setup.cfg /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Build Docs 2 | 3 | on: 4 | workflow_dispatch: {} 5 | push: 6 | branches: [main, develop] 7 | pull_request: 8 | types: [opened, synchronize, edited, labeled, unlabeled] 9 | branches: [main, develop] 10 | 11 | jobs: 12 | docs: 13 | if: ${{ !(contains(github.event.pull_request.labels.*.name, 'WIP (no-ci)')) && !(contains(github.event.pull_request.labels.*.name, 'WIP (lint-only)')) }} 14 | name: Build Docs 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: actions/checkout@v4 18 | - name: Install BEE and Build Docs 19 | run: ./ci/docs.sh 20 | -------------------------------------------------------------------------------- /.github/workflows/publish-docs.yml: -------------------------------------------------------------------------------- 1 | # Based on https://github.com/actions/starter-workflows/blob/main/pages/static.yml 2 | name: Publish docs 3 | 4 | on: 5 | push: 6 | branches: [main] 7 | 8 | # Needed for publishing to Github Pages 9 | permissions: 10 | contents: read 11 | pages: write 12 | id-token: write 13 | 14 | concurrency: 15 | group: "pages" 16 | cancel-in-progress: true 17 | 18 | jobs: 19 | publish: 20 | environment: 21 | name: github-pages 22 | url: ${{ steps.deployment.outputs.page_url }} 23 | runs-on: ubuntu-latest 24 | steps: 25 | - uses: actions/checkout@v4 26 | - name: BEE Install and Build Docs 27 | run: | 28 | ./ci/docs.sh 29 | - name: Upload 30 | uses: actions/upload-pages-artifact@v3 31 | with: 32 | path: docs/sphinx/_build/html 33 | - name: Publish 34 | id: deployment 35 | uses: actions/deploy-pages@v4 36 | -------------------------------------------------------------------------------- /.github/workflows/pylint.yml: -------------------------------------------------------------------------------- 1 | name: Pylint Lint 2 | 3 | on: 4 | # For manual exec 5 | # (https://github.blog/changelog/2020-07-06-github-actions-manual-triggers-with-workflow_dispatch/) 6 | workflow_dispatch: {} 7 | push: 8 | branches: [main, develop] 9 | pull_request: 10 | types: [opened, synchronize, edited, labeled, unlabeled] 11 | branches: [main, develop] 12 | 13 | jobs: 14 | pylint: 15 | if: ${{ !(contains(github.event.pull_request.labels.*.name, 'WIP (no-ci)')) }} 16 | name: Pylint Lint 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Lint 21 | run: | 22 | pip install pylint>=3.2.7 2>&1 >/dev/null 23 | pylint --rcfile=setup.cfg beeflow/ 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | __pycache__ 3 | poetry.lock 4 | *.iso 5 | *.pyc 6 | *.egg-info 7 | *.out 8 | *.tgz 9 | *.tar.gz 10 | *.log 11 | .python-version 12 | .DS_Store 13 | .idea 14 | .idea/vcs.xml 15 | .vscode 16 | .env 17 | .venv 18 | docs/sphinx/_build 19 | src/beeflow/enhanced_client/node_modules 20 | **/dist 21 | _build 22 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: local 3 | hooks: 4 | - id: pylint 5 | name: pylint 6 | entry: pylint 7 | language: system 8 | types: [python] 9 | args: 10 | [ 11 | "-rn", # Only display messages 12 | "-sn", # Don't display the score 13 | "--rcfile=setup.cfg", # Link to the config file 14 | ] 15 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-20.04 5 | tools: 6 | python: "3.9" 7 | 8 | python: 9 | install: 10 | - method: pip 11 | path: . -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2019,Triad National Security,LLC 2 | All rights reserved. 3 | Copyright 2019.Triad National Security,LLC.This software was produced under U.S.Government contract No.89233218CNA000001 for Los Alamos National Laboratory (LANL),which is operated by Triad National Security,LLC for the U.S.Department of Energy.The U.S.Government has rights to use,reproduce,and distribute this software.NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY,LLC MAKES ANY WARRANTY,EXPRESS OR IMPLIED,OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.If software is modified to produce derivative works,such modified software should be clearly marked,so as not to confuse it with the version available from LANL. 4 | 5 | Additionally,redistribution and use in source and binary forms,with or without modification,are permitted provided that the following conditions are met: 6 | 1. Redistributions of source code must retain the above copyright notice,this list of conditions and the following disclaimer. 7 | 2. Redistributions in binary form must reproduce the above copyright notice,this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 8 | 3. Neither the name of Triad National Security,LLC, Los Alamos National Laboratory,LANL,the U.S.Government,nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 9 | -------------------------------------------------------------------------------- /beeflow/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow module.""" 2 | -------------------------------------------------------------------------------- /beeflow/client/README.md: -------------------------------------------------------------------------------- 1 | # BEE Client 2 | The client directory contains all of the client-only software necessary 3 | to launch and monitor BEE workflows. 4 | -------------------------------------------------------------------------------- /beeflow/client/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow client module.""" 2 | -------------------------------------------------------------------------------- /beeflow/common/README.md: -------------------------------------------------------------------------------- 1 | # BEE Common 2 | The common directory contains all of the software shared by the BEE client 3 | and server. Most (all?) of this software is intended to be classes imported 4 | into other tools and is not expected to run in a standalone fashion. 5 | 6 | -------------------------------------------------------------------------------- /beeflow/common/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow common module.""" 2 | -------------------------------------------------------------------------------- /beeflow/common/api.py: -------------------------------------------------------------------------------- 1 | """BeeApi wrapper around Flask-Restful for handling exceptions.""" 2 | import traceback 3 | from flask import make_response, jsonify 4 | from flask_restful import Api 5 | 6 | 7 | class BeeApi(Api): 8 | """Wrapper around Flask-Restful's API to catch exceptions.""" 9 | 10 | def handle_error(self, e): # pylint: disable=W0613 # conflict on naming in base class vs. following convention 11 | """Handle an error or exception.""" 12 | return make_response(jsonify(error=traceback.format_exc()), 500) 13 | -------------------------------------------------------------------------------- /beeflow/common/build/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow common build module.""" 2 | -------------------------------------------------------------------------------- /beeflow/common/build/utils.py: -------------------------------------------------------------------------------- 1 | """Container build utility code.""" 2 | import jsonpickle 3 | 4 | 5 | def arg2task(task_arg): 6 | """Convert JSON encoded task to Task object. 7 | 8 | The build driver will expect a Task object, and the build 9 | interface starts with a JSON representation of the Task object. 10 | """ 11 | return jsonpickle.decode(task_arg) 12 | 13 | 14 | def task2arg(task): 15 | """Convert Task object to JSON encoded string. 16 | 17 | The build interface needs to pass Task data on the command line, 18 | because each compute node needs to understand the Task description. 19 | JSON format is a convenient way to describe the Task object at the 20 | command line. 21 | """ 22 | return jsonpickle.encode(task) 23 | 24 | 25 | class ContainerBuildError(Exception): 26 | """Cotnainer build error class.""" 27 | -------------------------------------------------------------------------------- /beeflow/common/cli.py: -------------------------------------------------------------------------------- 1 | """Common CLI code used by different BEE scripts.""" 2 | import click 3 | 4 | 5 | class NaturalOrderGroup(click.Group): 6 | """Natural ordering class for using with CLI code.""" 7 | 8 | def list_commands(self, ctx): 9 | """List the commands in order.""" 10 | return self.commands.keys() 11 | -------------------------------------------------------------------------------- /beeflow/common/cloud/__init__.py: -------------------------------------------------------------------------------- 1 | """Cloud init module.""" 2 | 3 | # Disable W0611: These are meant to be used by external code 4 | # pylint:disable=W0611 5 | 6 | from beeflow.common.cloud import chameleoncloud 7 | from beeflow.common.cloud import openstack 8 | from beeflow.common.cloud import provider 9 | from beeflow.common.cloud import google 10 | from beeflow.common.cloud.constants import BEE_USER 11 | from beeflow.common.cloud.cloud import CloudError 12 | 13 | 14 | providers = { 15 | 'google': google.GoogleProvider, 16 | 'chameleoncloud': chameleoncloud.ChameleoncloudProvider, 17 | 'openstack': openstack.OpenstackProvider, 18 | 'mock': provider.MockProvider, # Provider to be used for testing 19 | } 20 | 21 | 22 | def get_provider(name, **kwargs): 23 | """Return a Provider object for the given provider.""" 24 | if name in providers: 25 | return providers[name](**kwargs) 26 | 27 | raise RuntimeError(f'Invalid provider "{name}"') 28 | -------------------------------------------------------------------------------- /beeflow/common/cloud/chameleoncloud.py: -------------------------------------------------------------------------------- 1 | """Chameleon provider code.""" 2 | import openstack 3 | 4 | from beeflow.common.cloud import provider 5 | 6 | 7 | class ChameleoncloudProvider(provider.Provider): 8 | """Chameleoncloud provider class.""" 9 | 10 | def __init__(self, stack_name=None, **_kwargs): 11 | """Chameleoncloud provider constructor.""" 12 | self._stack_name = stack_name 13 | self._api = openstack.connect() 14 | 15 | def create_from_template(self, template_file): 16 | """Create from a template file.""" 17 | raise RuntimeError( 18 | 'create_from_template() is not implemented for Chameleoncloud. ' 19 | 'Use the Horizon interface instead' 20 | ) 21 | 22 | def get_ext_ip_addr(self, node_name): # pylint: disable=W0613 23 | """Get the external IP address of the node, if it has one.""" 24 | if self._stack_name is not None: 25 | stack = self._api.get_stack(self._stack_name) 26 | if stack is None: 27 | raise RuntimeError(f'Invalid stack {self._stack_name}') 28 | outputs = {output['output_key']: output['output_value'] for output in stack['outputs']} 29 | if 'head_node_login_ip' in outputs: 30 | return outputs['head_node_login_ip'] 31 | return None 32 | -------------------------------------------------------------------------------- /beeflow/common/cloud/cloud.py: -------------------------------------------------------------------------------- 1 | """BEE Cloud class.""" 2 | 3 | 4 | class CloudError(Exception): 5 | """Cloud error class.""" 6 | 7 | def __init__(self, msg): 8 | """Cloud error constructor.""" 9 | self.msg = msg 10 | -------------------------------------------------------------------------------- /beeflow/common/cloud/constants.py: -------------------------------------------------------------------------------- 1 | """Cloud constants.""" 2 | 3 | 4 | BEE_USER = 'bee' 5 | -------------------------------------------------------------------------------- /beeflow/common/cloud/google.py: -------------------------------------------------------------------------------- 1 | """Google provider code.""" 2 | import time 3 | import googleapiclient.discovery 4 | import yaml 5 | 6 | from beeflow.common.cloud import provider 7 | 8 | 9 | class GoogleProvider(provider.Provider): 10 | """Google provider class.""" 11 | 12 | def __init__(self, project, zone, **kwargs): 13 | """Google provider constructor.""" 14 | self.params = kwargs 15 | self.zone = zone 16 | self.project = project 17 | # Set defaults here for now 18 | self._api = googleapiclient.discovery.build('compute', 'v1') 19 | 20 | def get_ext_ip_addr(self, node_name): 21 | """Get the external IP of this node (or None if no IP).""" 22 | res = self._api.instances().get(instance=node_name, 23 | project=self.project, 24 | zone=self.zone).execute() 25 | try: 26 | return res['networkInterfaces'][0]['accessConfigs'][0]['natIP'] 27 | except (IndexError, KeyError): 28 | return None 29 | 30 | def setup_cloud(self, config): 31 | """Set up the cloud based on the config information.""" 32 | # Load the YAML data 33 | config = yaml.load(config, Loader=yaml.Loader) 34 | # This just creates instances one-by-one. There may be a better API call 35 | # to just create everything at once. 36 | for instance in config['instances']: 37 | call = self._api.instances().insert(project=self.project, 38 | zone=self.zone, body=instance) 39 | res = call.execute() 40 | print(res) 41 | time.sleep(2) 42 | -------------------------------------------------------------------------------- /beeflow/common/cloud/openstack.py: -------------------------------------------------------------------------------- 1 | """OpenStack provider module.""" 2 | import openstack 3 | from beeflow.common.cloud import provider 4 | from beeflow.common.cloud.cloud import CloudError 5 | 6 | 7 | class OpenstackProvider(provider.Provider): 8 | """OpenStack provider class.""" 9 | 10 | def __init__(self, stack_name, **_kwargs): 11 | """Construct a new OpenStack provider class.""" 12 | self._cloud = openstack.connect() 13 | self._stack_name = stack_name 14 | 15 | def get_ext_ip_addr(self, node_name): 16 | """Get external IP address of Task Manager node.""" 17 | node = self._cloud.get_server(node_name) 18 | if node is None: 19 | raise CloudError('Cannot retrieve node/IP information. Is `node_name` set correctly?') 20 | return node.accessIPv4 21 | 22 | def setup_cloud(self, config): 23 | """Set up the cloud based on config data.""" 24 | # Just write out the template to the pwd for right now 25 | template_file = './openstack.yaml' 26 | with open(template_file, 'w', encoding='utf-8') as fp: 27 | fp.write(config) 28 | self._cloud.create_stack(self._stack_name, template_file=template_file, wait=True) 29 | -------------------------------------------------------------------------------- /beeflow/common/cloud/provider.py: -------------------------------------------------------------------------------- 1 | """Provider class, representing a specific Cloud provider.""" 2 | import abc 3 | 4 | 5 | class Provider(abc.ABC): 6 | """Provider Abstract Base Class.""" 7 | 8 | @abc.abstractmethod 9 | def get_ext_ip_addr(self, node_name): 10 | """Get the external IP address of the node, if it has one.""" 11 | 12 | @abc.abstractmethod 13 | def setup_cloud(self, config): 14 | """Set up the cloud based on the config data.""" 15 | 16 | 17 | class MockProvider(Provider): 18 | """Mock provider class for testing.""" 19 | 20 | def __init__(self, **kwargs): 21 | """Construct a mock provider.""" 22 | 23 | def get_ext_ip_addr(self, node_name): 24 | """Get the external IP address of the node, if it has one.""" 25 | return '100.100.100.100' 26 | 27 | def setup_cloud(self, config): 28 | """Set up the cloud based on the config data.""" 29 | -------------------------------------------------------------------------------- /beeflow/common/config_utils.py: -------------------------------------------------------------------------------- 1 | """Functions used by the config classes.""" 2 | 3 | import os 4 | import shutil 5 | 6 | 7 | def filter_and_validate(config, validator): 8 | """Filter and validate the configuration file.""" 9 | default_keys = list(config['DEFAULT']) 10 | config = {sec_name: {key: config[sec_name][key] for key in config[sec_name] 11 | if sec_name == 'DEFAULT' or key not in default_keys} 12 | for sec_name in config} 13 | # Validate the config 14 | return validator.validate(config) 15 | 16 | 17 | def write_config(file_name, sections): 18 | """Write the configuration file.""" 19 | try: 20 | with open(file_name, 'w', encoding='utf-8') as fp: 21 | print('# BEE Configuration File', file=fp) 22 | for sec_name, section in sections.items(): 23 | if not section: 24 | continue 25 | print(file=fp) 26 | print(f'[{sec_name}]', file=fp) 27 | for opt_name, value in section.items(): 28 | print(f'{opt_name} = {value}', file=fp) 29 | except FileNotFoundError: 30 | print('Configuration file does not exist!') 31 | 32 | 33 | def backup(fname): 34 | """Backup the configuration file.""" 35 | i = 1 36 | backup_path = f'{fname}.{i}' 37 | while os.path.exists(backup_path): 38 | i += 1 39 | backup_path = f'{fname}.{i}' 40 | shutil.copy(fname, backup_path) 41 | print(f'Saved old config to "{backup_path}".') 42 | print() 43 | -------------------------------------------------------------------------------- /beeflow/common/container_path.py: -------------------------------------------------------------------------------- 1 | """Path conversion code.""" 2 | import os 3 | 4 | 5 | class PathError(Exception): 6 | """Path error class.""" 7 | 8 | def __init__(self, *args): 9 | """Construct a path error object.""" 10 | self.args = args 11 | 12 | 13 | def _components(path): 14 | """Convert a path into a list of components.""" 15 | if not os.path.isabs(path): 16 | raise PathError('Bind mounts and workdir paths must be absolute') 17 | path = os.path.normpath(path) 18 | return [comp for comp in path.split('/') if comp] 19 | 20 | 21 | def convert_path(path, bind_mounts): 22 | """Convert a path outside the container to a path inside the container.""" 23 | comps = _components(path) 24 | for outside, inside in bind_mounts.items(): 25 | outside = _components(outside) 26 | inside = _components(inside) 27 | if comps[:len(outside)] == outside: 28 | base = comps[len(outside):] 29 | inside.extend(base) 30 | new_path = '/'.join(inside) 31 | return f'/{new_path}' 32 | return path 33 | -------------------------------------------------------------------------------- /beeflow/common/crt/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow common crt module.""" 2 | -------------------------------------------------------------------------------- /beeflow/common/crt/crt_driver.py: -------------------------------------------------------------------------------- 1 | """Abstract base class for crt_driver, the Container Runtime and drivers. 2 | 3 | Builds text for job to run task in a Container 4 | """ 5 | from abc import ABC, abstractmethod 6 | 7 | 8 | class ContainerRuntimeResult: 9 | """Result to be used for returning to the worker code.""" 10 | 11 | def __init__(self, env_code, pre_commands, main_command, post_commands): 12 | """Construct the result.""" 13 | self.env_code = env_code 14 | self.pre_commands = pre_commands 15 | self.main_command = main_command 16 | self.post_commands = post_commands 17 | 18 | 19 | class CommandType: 20 | """Command types.""" 21 | 22 | DEFAULT = 'default' 23 | ONE_PER_NODE = 'one-per-node' 24 | ENV = 'env' 25 | 26 | 27 | class Command: 28 | """Command in a batch script.""" 29 | 30 | def __init__(self, args, type_=CommandType.DEFAULT): 31 | """Construct the command.""" 32 | self.args = args 33 | self.type = type_ 34 | 35 | 36 | class ContainerRuntimeDriver(ABC): 37 | """ContainerRuntimeDriver interface for generic container runtime.""" 38 | 39 | @abstractmethod 40 | def run_text(self, task): 41 | """Create commands for job using the container runtime. 42 | 43 | Returns a tuple (pre-commands, main-command, post-commands). 44 | :param task: instance of Task 45 | :rtype: tuple of (list of list of str, list of str, list of list of str) 46 | """ 47 | 48 | @abstractmethod 49 | def build_text(self, userconfig, task): 50 | """Create text for builder pre-run using the container runtime. 51 | 52 | :param task: instance of Task 53 | :rtype: string 54 | """ 55 | -------------------------------------------------------------------------------- /beeflow/common/crt_interface.py: -------------------------------------------------------------------------------- 1 | """Mid-level interface for container runtime system. 2 | 3 | Delegates the writing of the text for job script to an instance of a subclass 4 | of the abstract base class 'ContainerRuntimeDriver'. 5 | Default: 'CharliecloudDriver' class. 6 | """ 7 | 8 | # Disable module imported but unused error. No way to know which crt will be needed 9 | # pylint:disable=W0611 10 | 11 | from beeflow.common.config_driver import BeeConfig as bc 12 | from beeflow.common.crt.charliecloud_driver import CharliecloudDriver 13 | from beeflow.common.crt.singularity_driver import SingularityDriver 14 | 15 | 16 | class ContainerRuntimeInterface: 17 | """Interface for the container runtime. 18 | 19 | Requires an implemented subclass of ContainerRuntimeDriver to function. 20 | """ 21 | 22 | def __init__(self, crt_driver=CharliecloudDriver): 23 | """Initialize the CRT interface with a runtime, CharliecloudDriver by default. 24 | 25 | :param crt_driver: container runtime driver (default: CharliecloudDriver) 26 | :type crt_driver: subclass of ContainerRuntimeDriver 27 | """ 28 | self._crt_driver = crt_driver() 29 | 30 | def run_text(self, task): 31 | """Create text required to run the task using the container_runtime. 32 | 33 | :param task: instance of Task 34 | :rtype: string 35 | """ 36 | return self._crt_driver.run_text(task) 37 | 38 | def build_text(self, userconfig, task): 39 | """Create text required to build a task environment. 40 | 41 | :param task: instance of Task 42 | :param userconfig: path to userconfig file 43 | :rtype: string 44 | """ 45 | return self._crt_driver.build_text(userconfig, task) 46 | -------------------------------------------------------------------------------- /beeflow/common/cwl/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow common cwl module.""" 2 | -------------------------------------------------------------------------------- /beeflow/common/cwl/examples/comd_pre.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Starting COMD Workflow!" 3 | -------------------------------------------------------------------------------- /beeflow/common/db/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow common db module.""" 2 | -------------------------------------------------------------------------------- /beeflow/common/deps/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow common deps module.""" 2 | -------------------------------------------------------------------------------- /beeflow/common/deps/celery_manager.py: -------------------------------------------------------------------------------- 1 | """Module for celery configuration.""" 2 | from beeflow.wf_manager.wf_manager import create_app 3 | 4 | 5 | flask_app = create_app() 6 | celery_app = flask_app.extensions['celery'] 7 | -------------------------------------------------------------------------------- /beeflow/common/deps/redis_manager.py: -------------------------------------------------------------------------------- 1 | """Module contains the code for launching redis subprocess.""" 2 | import os 3 | import subprocess 4 | 5 | from beeflow.common import paths 6 | 7 | 8 | def start(log): 9 | """Start redis.""" 10 | data_dir = 'data' 11 | os.makedirs(os.path.join(paths.redis_root(), data_dir), exist_ok=True) 12 | conf_name = 'redis.conf' 13 | container_path = paths.redis_container() 14 | # Dump the config 15 | conf_path = os.path.join(paths.redis_root(), conf_name) 16 | if not os.path.exists(conf_path): 17 | with open(conf_path, 'w', encoding='utf-8') as fp: 18 | # Don't listen on TCP 19 | print('port 0', file=fp) 20 | print('dir', os.path.join('/mnt', data_dir), file=fp) 21 | print('maxmemory 2mb', file=fp) 22 | print('unixsocket', os.path.join('/mnt', paths.redis_sock_fname()), file=fp) 23 | print('unixsocketperm 700', file=fp) 24 | cmd = [ 25 | 'ch-run', 26 | f'--bind={paths.redis_root()}:/mnt', 27 | container_path, 28 | 'redis-server', 29 | '/mnt/redis.conf', 30 | ] 31 | # Ran into a strange "Failed to configure LOCALE for invalid locale name." 32 | # from Redis, so setting LANG=C. This could have consequences for UTF-8 33 | # strings. 34 | env = dict(os.environ) 35 | env['LANG'] = 'C' 36 | env['LC_ALL'] = 'C' 37 | return subprocess.Popen(cmd, env=env, stdout=log, stderr=log) 38 | -------------------------------------------------------------------------------- /beeflow/common/gdb/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow common gdb module.""" 2 | -------------------------------------------------------------------------------- /beeflow/common/integration/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow common integration module.""" 2 | -------------------------------------------------------------------------------- /beeflow/common/log.py: -------------------------------------------------------------------------------- 1 | """Logging interface for BEE.""" 2 | import logging 3 | import os 4 | 5 | 6 | # Set the default log level (BEE_LOG_LEVEL will be passed in by beeflow/cli.py) 7 | LEVEL = os.getenv('BEE_LOG_LEVEL') 8 | LEVEL = 'DEBUG' if LEVEL is None else LEVEL 9 | 10 | 11 | def setup(name): 12 | """Set up and return logger. 13 | 14 | :param name: Name to be used for logger (best would be __name__) 15 | :type level: String 16 | """ 17 | log = logging.getLogger(name) 18 | log.setLevel(LEVEL) 19 | handler = logging.StreamHandler() 20 | handler.setLevel(logging.DEBUG) 21 | handler.setFormatter(logging.Formatter('[%(asctime)s] %(name)s:%(funcName)s(): %(msg)s')) 22 | log.addHandler(handler) 23 | return log 24 | -------------------------------------------------------------------------------- /beeflow/common/parser/.gitignore: -------------------------------------------------------------------------------- 1 | bee_history.dat 2 | -------------------------------------------------------------------------------- /beeflow/common/parser/__init__.py: -------------------------------------------------------------------------------- 1 | """Init code for parser.""" 2 | 3 | from beeflow.common.parser.parser import CwlParser, CwlParseError 4 | -------------------------------------------------------------------------------- /beeflow/common/states.py: -------------------------------------------------------------------------------- 1 | """Workflow and Task state values.""" 2 | 3 | 4 | class WorkflowStates: 5 | """Workflow status values.""" 6 | 7 | INITIALIZING = 'INITIALIZING' 8 | PENDING = 'PENDING' 9 | RUNNING = 'RUNNING' 10 | PAUSED = 'PAUSED' 11 | RESUME = 'RESUME' 12 | CANCELLED = 'CANCELLED' 13 | 14 | 15 | class TaskStates: 16 | """Task status values.""" 17 | 18 | PENDING = 'PENDING' 19 | RUNNING = 'RUNNING' 20 | COMPLETED = 'COMPLETED' 21 | FAILED = 'FAILED' 22 | PAUSED = 'PAUSED' 23 | CANCELLED = 'CANCELLED' 24 | -------------------------------------------------------------------------------- /beeflow/common/tab_completion.py: -------------------------------------------------------------------------------- 1 | """Tab completion code for client terminal input.""" 2 | import readline 3 | import contextlib 4 | 5 | 6 | @contextlib.contextmanager 7 | def filepath_completion(): 8 | """Tab complete files and pathnames within a context.""" 9 | old_delims = readline.get_completer_delims() 10 | readline.set_completer_delims(' \n\t') 11 | readline.parse_and_bind('tab: complete') 12 | try: 13 | yield 14 | finally: 15 | # Reset the completer 16 | readline.set_completer_delims(old_delims) 17 | readline.parse_and_bind('') 18 | -------------------------------------------------------------------------------- /beeflow/common/wf_profiler.py: -------------------------------------------------------------------------------- 1 | """Workflow profiling code.""" 2 | import json 3 | import time 4 | 5 | 6 | class WorkflowProfiler: 7 | """Class for profiling a single workflow's execution.""" 8 | 9 | def __init__(self, workflow_name, output_path): 10 | """Construct the workflow profiler class for a workflow.""" 11 | self.workflow_name = workflow_name 12 | self.output_path = output_path 13 | self._state_changes = [] 14 | self._scheduling_results = [] 15 | 16 | def add_state_change(self, task, next_state): 17 | """Save a change of state for a task (at each task state change).""" 18 | self._state_changes.append({ 19 | 'task_id': task.id, 20 | 'task_name': task.name, 21 | # State is not stored in the task object 22 | # 'previous_state': task.state, 23 | 'next_state': next_state, 24 | 'timestamp': int(time.time()), 25 | }) 26 | 27 | def add_scheduling_results(self, tasks, resources, allocations): 28 | """Add scheduling results (given the set of available resources).""" 29 | self._scheduling_results.append({ 30 | 'tasks': tasks, 31 | 'resources': resources, 32 | 'allocations': dict(allocations), 33 | 'timestamp': int(time.time()), 34 | }) 35 | 36 | def save(self): 37 | """Save the workflow results (run on workflow completion).""" 38 | with open(self.output_path, 'w', encoding='utf-8') as fp: 39 | json.dump({ 40 | 'state_changes': self._state_changes, 41 | 'scheduling_results': self._scheduling_results, 42 | }, fp=fp, indent=4) 43 | -------------------------------------------------------------------------------- /beeflow/common/worker/README.md: -------------------------------------------------------------------------------- 1 | At this point for any tasks that are submitted as jobs, the scripts are left at 2 | ~/.beeflow/worker/work-... see ToDo.md 3 | 4 | A job template ~/.beeflow/worker/job.template for specific batch type 5 | requirements for the user (we may get rid of this once we fix requirements). 6 | 7 | When a task is submitted task.name and task.id are substituted for 8 | $name and $id in the template. 9 | 10 | An example of job.template: 11 | 12 | ``` 13 | #! /bin/bash 14 | 15 | #SBATCH -p galton 16 | #SBATCH -J $name-$id 17 | #SBATCH -o $name-$id.log 18 | ``` 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /beeflow/common/worker/__init__.py: -------------------------------------------------------------------------------- 1 | """Init file for the worker package.""" 2 | 3 | from beeflow.common.worker.worker import WorkerError 4 | from beeflow.common.worker.slurm_worker import SlurmWorker 5 | from beeflow.common.worker.lsf_worker import LSFWorker 6 | from beeflow.common.worker.flux_worker import FluxWorker 7 | from beeflow.common.worker.simple_worker import SimpleWorker 8 | 9 | 10 | supported_workload_schedulers = { 11 | 'Slurm': SlurmWorker, 12 | 'LSF': LSFWorker, 13 | 'Flux': FluxWorker, 14 | 'Simple': SimpleWorker, 15 | } 16 | 17 | 18 | def find_worker(name): 19 | """Find the worker class or return None.""" 20 | if name in supported_workload_schedulers: 21 | return supported_workload_schedulers[name] 22 | return None 23 | -------------------------------------------------------------------------------- /beeflow/data/cloud_templates/dora.yaml: -------------------------------------------------------------------------------- 1 | # Location of the jinja template file 2 | template_file: /home/jtronge/BEE/templates/dora.jinja 3 | # Private key file for logging onto the remote instance 4 | private_key_file: ... 5 | bee_user: debian 6 | # These ports may need to change to avoid conflicts 7 | wfm_listen_port: 5005 8 | tm_listen_port: 7777 9 | tm_launch_cmd: /bee/tm 10 | head_node: bee-server 11 | provider: openstack 12 | # Place all files that should be copied over with the `--copy` argument here 13 | copy_files: 14 | - src: /home/jtronge/heat-transfer.tar.gz 15 | dst: /home/debian 16 | - src: /home/jtronge/clamr.tar.gz 17 | dst: /home/debian 18 | - src: /home/jtronge/ffmpeg.tar.gz 19 | dst: /home/debian 20 | # Stack parameters to be passed to the heat stack on submission 21 | key_name: dora-bee-key 22 | public_net: external 23 | stack_name: test-bee-stack-2 24 | github_pat: ... 25 | git_branch: milestone-cloud-2 26 | https_proxy: ... 27 | http_proxy: ... 28 | no_proxy: ... 29 | nameservers: ... 30 | -------------------------------------------------------------------------------- /beeflow/data/cwl/README.md: -------------------------------------------------------------------------------- 1 | # CWL files 2 | 3 | This directory is home to CWL files. These may be in JSON or YAML format. 4 | 5 | * `bee_workflow` directory: Workflows which are designed to run with BEE. 6 | * `cwl_validation` directory: Workflows which are used to validate cwl behavior, but are not explicitly expected to run with BEE. 7 | * `synthetic` directory: Exampes of CWL code, features and cwl files, that can be used for testing or experimentation. 8 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/README.md: -------------------------------------------------------------------------------- 1 | # BEE Workflows 2 | 3 | This is a home for workflows which demonstrate BEE functionality. 4 | 5 | 6 | Directories: 7 | * `clamr-wf`: CLAMR workflow with yaml files. The workflow executes CLAMR in a container, then processes png files getnerated by CLAMR into an MPEG video using ffmpeg on bare metal. This is a good example to test running both in a container and on bare metal. 8 | 9 | * `clamr-wf-noyaml/`: Original workflows with all parameters on the command line. The workflows execute CLAMR in a container, and then processes png files generated by CLAMR into an MPEG video, using ffmpeg. There are examples for container runtimes, Charliecloud and Singularity, and for workload schedulers, LSF and Slurm. The selection of container runtime and workload scheduler are defined in the bee.conf configuration file. 10 | 11 | * `clamr-wf-singularity`: CLAMR workflow with yaml files. The workflow executes CLAMR in a Singularity container, then processes png files getnerated by CLAMR into an MPEG video using ffmpeg on bare metal. 12 | 13 | * `clamr-wf-summit`: CLAMR workflow with yaml files that runs on Summit, using LSF. This workflow executes both the clamr and ffmpeg steps in a container. 14 | 15 | * `simple-workflow/`: Contains some example workflows used for tests and simple demonstrations of running workflows with BEE. 16 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/blast/README.md: -------------------------------------------------------------------------------- 1 | Copy input/small.fasta to wherever you are going to submit this workflow before packaging and submitting this blast workflow 2 | 3 | Also there seems to be an error with using the glob in blast_worker001.cwl & 2 4 | I have left the previous output in a comment. 5 | 6 | stderr has not been tested since it is still not implemented 7 | 8 | 9 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/blast/blast.yml: -------------------------------------------------------------------------------- 1 | # Parameters for blast_split.cwl 2 | input_file: small.fasta 3 | query_granularity: 100 4 | 5 | # Parameters for blast_worker001.cwl 6 | program_name: blastn 7 | database: nt/nt 8 | worker001_output: fasta.0.out 9 | 10 | # Parameters for blast_worker002.cwl 11 | # Same program_name and database 12 | worker002_output: fasta.1.out 13 | 14 | # Parameters for blast_output.cwl 15 | cat_output: output.fasta 16 | 17 | # Parameters for blast_output_err.cwl 18 | cat_output_err: output.fasta.err 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/blast/blast_output.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.2 5 | 6 | baseCommand: /makeflow-examples/blast/cat_blast 7 | 8 | inputs: 9 | output_filename: 10 | type: string 11 | inputBinding: 12 | position: 1 13 | input_file1: 14 | type: File 15 | inputBinding: 16 | position: 2 17 | input_file2: 18 | type: File 19 | inputBinding: 20 | position: 3 21 | 22 | outputs: 23 | blast_output: 24 | type: File 25 | outputBinding: 26 | glob: $(inputs.output_filename) 27 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/blast/blast_output_err.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.2 5 | 6 | baseCommand: /bin/cat 7 | 8 | stdout: output.fasta.err 9 | 10 | inputs: 11 | input_file1: 12 | type: File 13 | inputBinding: 14 | position: 1 15 | input_file2: 16 | type: File 17 | inputBinding: 18 | position: 2 19 | 20 | outputs: 21 | blast_output_err: 22 | type: stdout 23 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/blast/blast_split.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.2 5 | 6 | baseCommand: /makeflow-examples/blast/split_fasta 7 | 8 | inputs: 9 | query_granularity: 10 | type: int 11 | inputBinding: 12 | position: 1 13 | input_file: 14 | type: File 15 | inputBinding: 16 | position: 2 17 | 18 | outputs: 19 | split1: 20 | type: File 21 | outputBinding: 22 | glob: small.fasta.0 23 | split2: 24 | type: File 25 | outputBinding: 26 | glob: small.fasta.1 27 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/blast/blast_worker001.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.2 5 | 6 | baseCommand: /makeflow-examples/blast/blastall 7 | 8 | stderr: fasta.0.err 9 | 10 | inputs: 11 | program_name: 12 | type: string 13 | inputBinding: 14 | prefix: -p 15 | database: 16 | type: string 17 | inputBinding: 18 | prefix: -d 19 | input_file: 20 | type: File 21 | inputBinding: 22 | prefix: -i 23 | output_file: 24 | type: string 25 | inputBinding: 26 | prefix: -o 27 | 28 | 29 | outputs: 30 | output: 31 | type: File 32 | outputBinding: 33 | glob: fasta.0.out 34 | # Was glob: $(inputs.output_file) but fails 35 | 36 | output_err: 37 | type: stderr 38 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/blast/blast_worker002.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.2 5 | 6 | baseCommand: /makeflow-examples/blast/blastall 7 | 8 | stderr: fasta.1.err 9 | 10 | inputs: 11 | program_name: 12 | type: string 13 | inputBinding: 14 | prefix: -p 15 | database: 16 | type: string 17 | inputBinding: 18 | prefix: -d 19 | input_file: 20 | type: File 21 | inputBinding: 22 | prefix: -i 23 | output_file: 24 | type: string 25 | inputBinding: 26 | prefix: -o 27 | 28 | outputs: 29 | output: 30 | type: File 31 | outputBinding: 32 | glob: fasta.1.out 33 | # Was glob: $(inputs.output_file) but fails 34 | # 35 | output_err: 36 | type: stderr 37 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cat-grep-fail/README.md: -------------------------------------------------------------------------------- 1 | This workflow is designed to fail on purpose, so that we can test that case 2 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cat-grep-fail/cat.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: cat 6 | stdout: cat.txt 7 | stderr: cat.err 8 | inputs: 9 | input_file: 10 | type: File 11 | inputBinding: 12 | position: 1 13 | outputs: 14 | contents: 15 | type: stdout 16 | cat_stderr: 17 | type: stderr 18 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cat-grep-fail/grep0.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: grep 6 | stdout: occur0.txt 7 | inputs: 8 | word: 9 | type: string 10 | inputBinding: 11 | position: 1 12 | text_file: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | outputs: 17 | occur: 18 | type: stdout 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cat-grep-fail/grep1.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: grep 6 | stdout: occur1.txt 7 | inputs: 8 | word: 9 | type: string 10 | inputBinding: 11 | position: 1 12 | text_file: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | outputs: 17 | occur: 18 | type: stdout 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cat-grep-fail/input.yml: -------------------------------------------------------------------------------- 1 | input_file: lorem.txt 2 | word0: Vivamus 3 | word1: pulvinar 4 | tarball_fname: out.tgz 5 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cat-grep-fail/tar.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: tar-not-a-real-command-for-failure 6 | inputs: 7 | tarball_fname: 8 | type: string 9 | inputBinding: 10 | position: 1 11 | prefix: -cf 12 | file0: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | file1: 17 | type: File 18 | inputBinding: 19 | position: 3 20 | outputs: 21 | tarball: 22 | type: File 23 | outputBinding: 24 | glob: $(inputs.tarball_fname) 25 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cat-grep-fail/workflow.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: Workflow 5 | inputs: 6 | input_file: File 7 | word0: string 8 | word1: string 9 | tarball_fname: string 10 | 11 | outputs: 12 | tarball: 13 | type: File 14 | outputSource: tar/tarball 15 | cat_stderr: 16 | type: File 17 | outputSource: cat/cat_stderr 18 | 19 | steps: 20 | cat: 21 | run: cat.cwl 22 | in: 23 | input_file: input_file 24 | out: [contents, cat_stderr] 25 | grep0: 26 | run: grep0.cwl 27 | in: 28 | word: word0 29 | text_file: cat/contents 30 | out: [occur] 31 | grep1: 32 | run: grep1.cwl 33 | in: 34 | word: word1 35 | text_file: cat/contents 36 | out: [occur] 37 | tar: 38 | run: tar.cwl 39 | in: 40 | file0: grep0/occur 41 | file1: grep1/occur 42 | tarball_fname: tarball_fname 43 | out: [tarball] 44 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cgt-2branches/README.txt: -------------------------------------------------------------------------------- 1 | This example has two branches of cat-grep-tar. The second branch searches for a 2 | file that should not exist. That branch fails but the workflow can continue. 3 | 4 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cgt-2branches/cat.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: cat 6 | stdout: cat.txt 7 | stderr: cat.err 8 | inputs: 9 | input_file: 10 | type: File 11 | inputBinding: 12 | position: 1 13 | outputs: 14 | contents: 15 | type: stdout 16 | cat_stderr: 17 | type: stderr 18 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cgt-2branches/cat2.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: cat 6 | stdout: cat2.txt 7 | stderr: cat2.err 8 | inputs: 9 | input_file2: 10 | type: File 11 | inputBinding: 12 | position: 1 13 | outputs: 14 | contents: 15 | type: stdout 16 | cat_stderr2: 17 | type: stderr 18 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cgt-2branches/grep0.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: grep 6 | stdout: occur0.txt 7 | inputs: 8 | word: 9 | type: string 10 | inputBinding: 11 | position: 1 12 | text_file: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | outputs: 17 | occur: 18 | type: stdout 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cgt-2branches/grep1.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: grep 6 | stdout: occur1.txt 7 | inputs: 8 | word: 9 | type: string 10 | inputBinding: 11 | position: 1 12 | text_file: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | outputs: 17 | occur: 18 | type: stdout 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cgt-2branches/grep2.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: grep 6 | stdout: occur2.txt 7 | inputs: 8 | word: 9 | type: string 10 | inputBinding: 11 | position: 1 12 | text_file: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | outputs: 17 | occur: 18 | type: stdout 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cgt-2branches/grep3.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: grep 6 | stdout: occur3.txt 7 | inputs: 8 | word: 9 | type: string 10 | inputBinding: 11 | position: 1 12 | text_file: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | outputs: 17 | occur: 18 | type: stdout 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cgt-2branches/input.yml: -------------------------------------------------------------------------------- 1 | input_file: lorem.txt 2 | input_file2: none.txt 3 | word0: Vivamus 4 | word1: pulvinar 5 | word2: Morbi 6 | word3: vitae 7 | tarball_fname: out.tgz 8 | tarball_fname2: out2.tgz 9 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cgt-2branches/tar.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: tar 6 | inputs: 7 | tarball_fname: 8 | type: string 9 | inputBinding: 10 | position: 1 11 | prefix: -cf 12 | file0: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | file1: 17 | type: File 18 | inputBinding: 19 | position: 3 20 | outputs: 21 | tarball: 22 | type: File 23 | outputBinding: 24 | glob: $(inputs.tarball_fname) 25 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cgt-2branches/tar2.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: tar 6 | inputs: 7 | tarball_fname2: 8 | type: string 9 | inputBinding: 10 | position: 1 11 | prefix: -cf 12 | file2: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | file3: 17 | type: File 18 | inputBinding: 19 | position: 3 20 | outputs: 21 | tarball2: 22 | type: File 23 | outputBinding: 24 | glob: $(inputs.tarball_fname2) 25 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/cgt-2branches/workflow.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: Workflow 5 | inputs: 6 | input_file: File 7 | word0: string 8 | word1: string 9 | tarball_fname: string 10 | input_file2: File 11 | word2: string 12 | word3: string 13 | tarball_fname2: string 14 | 15 | outputs: 16 | tarball: 17 | type: File 18 | outputSource: tar/tarball 19 | cat_stderr: 20 | type: File 21 | outputSource: cat/cat_stderr 22 | cat_stderr2: 23 | type: File 24 | outputSource: cat2/cat_stderr2 25 | tarball2: 26 | type: File 27 | outputSource: tar2/tarball2 28 | 29 | steps: 30 | cat: 31 | run: cat.cwl 32 | in: 33 | input_file: input_file 34 | out: [contents, cat_stderr] 35 | grep0: 36 | run: grep0.cwl 37 | in: 38 | word: word0 39 | text_file: cat/contents 40 | out: [occur] 41 | grep1: 42 | run: grep1.cwl 43 | in: 44 | word: word1 45 | text_file: cat/contents 46 | out: [occur] 47 | tar: 48 | run: tar.cwl 49 | in: 50 | file0: grep0/occur 51 | file1: grep1/occur 52 | tarball_fname: tarball_fname 53 | out: [tarball] 54 | cat2: 55 | run: cat2.cwl 56 | in: 57 | input_file2: input_file2 58 | out: [contents,cat_stderr2] 59 | grep2: 60 | run: grep2.cwl 61 | in: 62 | word: word2 63 | text_file: cat2/contents 64 | out: [occur] 65 | grep3: 66 | run: grep3.cwl 67 | in: 68 | word: word3 69 | text_file: cat2/contents 70 | out: [occur] 71 | tar2: 72 | run: tar2.cwl 73 | in: 74 | file2: grep2/occur 75 | file3: grep3/occur 76 | tarball_fname2: tarball_fname2 77 | out: [tarball2] 78 | 79 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ci/README.md: -------------------------------------------------------------------------------- 1 | # CLAMR - FFMPEG workflow using CWL 2 | 3 | clamr_wf.cwl - the main cwl. 4 | calmr_job.yml - yaml file for values used by the cwl files. 5 | clamr.cwl - cwl file for the clamr step. 6 | ffmpeg.cwl - cwl file for the ffmpeg step. 7 | 8 | The values in these files run on fog a LANL cluster, using the container runtime Charliecloud. Fog uses slurm as the workload scheduler. 9 | 10 | 11 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ci/clamr_job.json: -------------------------------------------------------------------------------- 1 | { 2 | "grid_resolution": 32, 3 | "max_levels": 3, 4 | "time_steps": 5000, 5 | "steps_between_outputs": 10, 6 | "steps_between_graphics": 25, 7 | "graphics_type": "png", 8 | "input_format": "image2", 9 | "frame_rate": 12, 10 | "frame_size": "800x800", 11 | "pixel_format": "yuv420p", 12 | "output_filename": "CLAMR_movie.mp4" 13 | } 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ci/clamr_job.yml: -------------------------------------------------------------------------------- 1 | # Inputs for CLAMR 2 | # /clamr/CLAMR-master/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png 3 | 4 | grid_resolution: 32 5 | max_levels: 3 6 | time_steps: 500 7 | steps_between_outputs: 10 8 | steps_between_graphics: 25 9 | graphics_type: png 10 | 11 | # Inputs for FFMPEG 12 | #ffmpeg -f image2 -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4 13 | 14 | input_format: image2 15 | frame_rate: 12 16 | frame_size: 800x800 17 | pixel_format: yuv420p 18 | output_filename: $HOME/CLAMR_movie.mp4 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ci/ffmpeg.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: ffmpeg -y 7 | inputs: 8 | input_format: 9 | type: string? 10 | inputBinding: 11 | prefix: -f 12 | position: 1 13 | ffmpeg_input: 14 | type: Directory 15 | inputBinding: 16 | prefix: -i 17 | position: 2 18 | valueFrom: $(self.path + "/graph%05d.png") 19 | frame_rate: 20 | type: int? 21 | inputBinding: 22 | prefix: -r 23 | position: 3 24 | frame_size: 25 | type: string? 26 | inputBinding: 27 | prefix: -s 28 | position: 4 29 | pixel_format: 30 | type: string? 31 | inputBinding: 32 | prefix: -pix_fmt 33 | position: 5 34 | output_file: 35 | type: string 36 | inputBinding: 37 | position: 6 38 | 39 | outputs: 40 | movie: 41 | type: File 42 | outputBinding: 43 | glob: $(inputs.output_file) 44 | # glob: CLAMR_movie.mp4 45 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-build_script/Dockerfile.clamr-ffmpeg: -------------------------------------------------------------------------------- 1 | # Dockerfile.clamr-ffmpeg 2 | # Developed on Chicoma @lanl 3 | # Patricia Grubel 4 | 5 | FROM debian:11 6 | 7 | 8 | RUN apt-get update && \ 9 | apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev 10 | 11 | RUN git clone https://github.com/lanl/CLAMR.git 12 | RUN cd CLAMR && cmake . && make clamr_cpuonly 13 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-build_script/README.md: -------------------------------------------------------------------------------- 1 | # CLAMR - FFMPEG workflow using CWL 2 | 3 | This workflow uses the DockerRequirements dockerFile and beeflow:containerNameto build the clamr and ffmpeg in a container. 4 | 5 | ``` 6 | clamr_wf.cwl - the main cwl 7 | calmr_job.yml - yaml file for values used by the cwl files 8 | clamr.cwl - cwl file for the clamr step 9 | ffmpeg.cwl - cwl file for the ffmpeg step 10 | ``` 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-build_script/clamr_job.json: -------------------------------------------------------------------------------- 1 | { 2 | "grid_resolution": 32, 3 | "max_levels": 3, 4 | "time_steps": 5000, 5 | "steps_between_outputs": 10, 6 | "steps_between_graphics": 25, 7 | "graphics_type": "png", 8 | "input_format": "image2", 9 | "frame_rate": 12, 10 | "frame_size": "800x800", 11 | "pixel_format": "yuv420p", 12 | "output_filename": "CLAMR_movie.mp4" 13 | } 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-build_script/clamr_job.yml: -------------------------------------------------------------------------------- 1 | # Inputs for CLAMR 2 | # /CLAMR/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png 3 | 4 | grid_resolution: 32 5 | max_levels: 3 6 | time_steps: 5000 7 | steps_between_outputs: 10 8 | steps_between_graphics: 25 9 | graphics_type: png 10 | 11 | # Inputs for FFMPEG 12 | #ffmpeg -f image2 -r 12 -s 800x800 -pix_fmt yuv420p CLAMR_movie.mp4 13 | 14 | input_format: image2 15 | frame_rate: 12 16 | frame_size: 800x800 17 | pixel_format: yuv420p 18 | output_filename: CLAMR_movie.mp4 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-build_script/ffmpeg.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: ffmpeg -y 7 | 8 | stderr: ffmpeg_stderr.txt 9 | 10 | inputs: 11 | input_format: 12 | type: string? 13 | inputBinding: 14 | prefix: -f 15 | position: 1 16 | ffmpeg_input: 17 | type: Directory 18 | inputBinding: 19 | prefix: -i 20 | position: 2 21 | valueFrom: $("/graph%05d.png") 22 | frame_rate: 23 | type: int? 24 | inputBinding: 25 | prefix: -r 26 | position: 3 27 | frame_size: 28 | type: string? 29 | inputBinding: 30 | prefix: -s 31 | position: 4 32 | pixel_format: 33 | type: string? 34 | inputBinding: 35 | prefix: -pix_fmt 36 | position: 5 37 | output_file: 38 | type: string 39 | inputBinding: 40 | position: 6 41 | 42 | outputs: 43 | movie: 44 | type: File 45 | outputBinding: 46 | glob: $(inputs.output_file) 47 | # glob: CLAMR_movie.mp4 48 | ffmpeg_stderr: 49 | type: stderr 50 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-build_script/post_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "After run" 4 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-build_script/pre_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Before run" 4 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-validate_script/Dockerfile.clamr-ffmpeg: -------------------------------------------------------------------------------- 1 | # Dockerfile.clamr-ffmpeg 2 | # Developed on Chicoma @lanl 3 | # Patricia Grubel 4 | 5 | FROM debian:11 6 | 7 | 8 | RUN apt-get update && \ 9 | apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev 10 | 11 | RUN git clone https://github.com/lanl/CLAMR.git 12 | RUN cd CLAMR && cmake . && make clamr_cpuonly 13 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-validate_script/README.md: -------------------------------------------------------------------------------- 1 | # CLAMR - FFMPEG workflow using CWL 2 | 3 | This workflow uses the DockerRequirements dockerFile and beeflow:containerNameto build the clamr and ffmpeg in a container. 4 | 5 | ``` 6 | clamr_wf.cwl - the main cwl 7 | calmr_job.yml - yaml file for values used by the cwl files 8 | clamr.cwl - cwl file for the clamr step 9 | ffmpeg.cwl - cwl file for the ffmpeg step 10 | ``` 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-validate_script/clamr_job.json: -------------------------------------------------------------------------------- 1 | { 2 | "grid_resolution": 32, 3 | "max_levels": 3, 4 | "time_steps": 5000, 5 | "steps_between_outputs": 10, 6 | "steps_between_graphics": 25, 7 | "graphics_type": "png", 8 | "input_format": "image2", 9 | "frame_rate": 12, 10 | "frame_size": "800x800", 11 | "pixel_format": "yuv420p", 12 | "output_filename": "CLAMR_movie.mp4" 13 | } 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-validate_script/clamr_job.yml: -------------------------------------------------------------------------------- 1 | # Inputs for CLAMR 2 | # /CLAMR/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png 3 | 4 | grid_resolution: 32 5 | max_levels: 3 6 | time_steps: 5000 7 | steps_between_outputs: 10 8 | steps_between_graphics: 25 9 | graphics_type: png 10 | 11 | # Inputs for FFMPEG 12 | #ffmpeg -f image2 -r 12 -s 800x800 -pix_fmt yuv420p CLAMR_movie.mp4 13 | 14 | input_format: image2 15 | frame_rate: 12 16 | frame_size: 800x800 17 | pixel_format: yuv420p 18 | output_filename: CLAMR_movie.mp4 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-validate_script/ffmpeg.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: ffmpeg -y 7 | 8 | stderr: ffmpeg_stderr.txt 9 | 10 | inputs: 11 | input_format: 12 | type: string? 13 | inputBinding: 14 | prefix: -f 15 | position: 1 16 | ffmpeg_input: 17 | type: Directory 18 | inputBinding: 19 | prefix: -i 20 | position: 2 21 | valueFrom: $("/graph%05d.png") 22 | frame_rate: 23 | type: int? 24 | inputBinding: 25 | prefix: -r 26 | position: 3 27 | frame_size: 28 | type: string? 29 | inputBinding: 30 | prefix: -s 31 | position: 4 32 | pixel_format: 33 | type: string? 34 | inputBinding: 35 | prefix: -pix_fmt 36 | position: 5 37 | output_file: 38 | type: string 39 | inputBinding: 40 | position: 6 41 | 42 | outputs: 43 | movie: 44 | type: File 45 | outputBinding: 46 | glob: $(inputs.output_file) 47 | # glob: CLAMR_movie.mp4 48 | ffmpeg_stderr: 49 | type: stderr 50 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-validate_script/post_run.sh: -------------------------------------------------------------------------------- 1 | echo "After run" 2 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-ffmpeg-validate_script/pre_run.sh: -------------------------------------------------------------------------------- 1 | echo "Before run" 2 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-chicoma/README.md: -------------------------------------------------------------------------------- 1 | # CLAMR - FFMPEG workflow using CWL 2 | 3 | clamr_wf.cwl - the main cwl. 4 | calmr_job.yml - yaml file for values used by the cwl files. 5 | clamr.cwl - cwl file for the clamr step. 6 | ffmpeg.cwl - cwl file for the ffmpeg step. 7 | 8 | The values in these files run on Chicoma a LANL cluster on the turquoise network, using the container runtime Charliecloud. Chicoma uses slurm as the workload scheduler. 9 | 10 | 11 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-chicoma/clamr_job.json: -------------------------------------------------------------------------------- 1 | { 2 | "grid_resolution": 32, 3 | "max_levels": 3, 4 | "time_steps": 5000, 5 | "steps_between_outputs": 10, 6 | "steps_between_graphics": 25, 7 | "graphics_type": "png", 8 | "input_format": "image2", 9 | "frame_rate": 12, 10 | "frame_size": "800x800", 11 | "pixel_format": "yuv420p", 12 | "output_filename": "CLAMR_movie.mp4" 13 | } 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-chicoma/clamr_job.yml: -------------------------------------------------------------------------------- 1 | # Inputs for CLAMR 2 | # /CLAMR/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png 3 | 4 | grid_resolution: 32 5 | max_levels: 3 6 | time_steps: 5000 7 | steps_between_outputs: 10 8 | steps_between_graphics: 25 9 | graphics_type: png 10 | 11 | # Inputs for FFMPEG 12 | #ffmpeg -f image2 -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4 13 | 14 | input_format: image2 15 | frame_rate: 12 16 | frame_size: 800x800 17 | pixel_format: yuv420p 18 | output_filename: CLAMR_movie.mp4 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-chicoma/ffmpeg.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: ffmpeg -y 7 | inputs: 8 | input_format: 9 | type: string? 10 | inputBinding: 11 | prefix: -f 12 | position: 1 13 | ffmpeg_input: 14 | type: Directory 15 | inputBinding: 16 | prefix: -i 17 | position: 2 18 | valueFrom: $("/graph%05d.png") 19 | frame_rate: 20 | type: int? 21 | inputBinding: 22 | prefix: -r 23 | position: 3 24 | frame_size: 25 | type: string? 26 | inputBinding: 27 | prefix: -s 28 | position: 4 29 | pixel_format: 30 | type: string? 31 | inputBinding: 32 | prefix: -pix_fmt 33 | position: 5 34 | output_file: 35 | type: string 36 | inputBinding: 37 | position: 6 38 | 39 | outputs: 40 | movie: 41 | type: File 42 | outputBinding: 43 | glob: $(inputs.output_file) 44 | # glob: CLAMR_movie.mp4 45 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-noyaml/README.md: -------------------------------------------------------------------------------- 1 | # CLAMR-FFMPEG WORKFLOW 2 | 3 | This workflow executes the `CLAMR` AMR simulation and then runs `ffmpeg` to produce a video from the `CLAMR` output. These workflows place the command all on one line and were used early in the development of BEE, and are valid for demonstration and testing purposes. We recommend you use the workflows with yaml files as examples for scientific workflows. 4 | 5 | The directories are organized as . 6 | 7 | 8 | CLAMR workflows for various systems: 9 | 10 | * Fog (LANL system) slurm-charliecloud/cf.cwl 11 | * Summit (ORNL system) lsf-charliecloud/cf-summit.cwl 12 | * Darwin (LANL system) slurm-charliecloud/cf-darwin.cwl 13 | * Case (LANL desktop with singularity) slurm-singularity/cf-singularity.cwl 14 | 15 | There are also examples using various functions of the build interface in the slurm-charliecloud directory. 16 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-noyaml/lsf-charliecloud/README.md: -------------------------------------------------------------------------------- 1 | # CLAMR-FFMPEG WORKFLOW 2 | 3 | This workflow executes the `CLAMR` AMR simulation and then runs `ffmpeg`, represented in a single CWL file. 4 | 5 | 6 | * cf-summit.cwl - clamr CWL file, runs on Summit. Both steps run in a container. 7 | 8 | Note: The ls command was added to use the output from the first step for the 2nd step. 9 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-noyaml/lsf-charliecloud/cf-summit.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | infile: File 8 | 9 | outputs: 10 | clamr_dir: 11 | type: File 12 | outputSource: clamr/outfile 13 | ffmpeg_movie: 14 | type: File 15 | outputSource: ffmpeg/outfile 16 | 17 | steps: 18 | clamr: 19 | run: 20 | class: CommandLineTool 21 | inputs: 22 | infile: 23 | type: File 24 | default: "" 25 | inputBinding: {position: 1} 26 | outputs: 27 | outfile: stdout 28 | stdout: $HOME/graphics_output 29 | baseCommand: "/CLAMR/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png" 30 | hints: 31 | DockerRequirement: 32 | beeflow:copyContainer: "/ccs/proj/csc420/BEE/clamr-ppc64le.tar.gz" 33 | in: 34 | infile: infile 35 | out: [outfile] 36 | 37 | ffmpeg: 38 | run: 39 | class: CommandLineTool 40 | inputs: 41 | infile: 42 | type: File 43 | inputBinding: {position: 1} 44 | outputs: 45 | outfile: stdout 46 | stdout: CLAMR_movie.mp4 47 | baseCommand: "ffmpeg -f image2 -i /home/$USER/graphics_output/graph%05d.png -r 12 -s 800x800 -pix_fmt yuv420p /home/$USER/CLAMR_movie.mp4 && ls -ld" 48 | hints: 49 | DockerRequirement: 50 | beeflow:copyContainer: "/ccs/proj/csc420/BEE/clamr-ppc64le.tar.gz" 51 | in: 52 | infile: clamr/outfile 53 | out: [outfile] 54 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-noyaml/slurm-charliecloud/Dockerfile.clamr-lanl-x86_64: -------------------------------------------------------------------------------- 1 | FROM git.lanl.gov:5050/trandles/baseimages/centos-buildclamr:7 2 | 3 | RUN git clone https://github.com/lanl/CLAMR.git 4 | RUN cd CLAMR && cmake3 . && make 5 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-noyaml/slurm-charliecloud/cf-darwin.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | infile: File 8 | 9 | outputs: 10 | clamr_dir: 11 | type: File 12 | outputSource: clamr/outfile 13 | ffmpeg_movie: 14 | type: File 15 | outputSource: ffmpeg/outfile 16 | 17 | steps: 18 | clamr: 19 | run: 20 | class: CommandLineTool 21 | inputs: 22 | infile: 23 | type: File 24 | default: "" 25 | inputBinding: {position: 1} 26 | outputs: 27 | outfile: stdout 28 | stdout: $HOME/graphics_output 29 | baseCommand: "/clamr/CLAMR-master/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png" 30 | hints: 31 | DockerRequirement: 32 | beeflow:copyContainer: "/projects/beedev/clamr-toss.tar.gz" 33 | in: 34 | infile: infile 35 | out: [outfile] 36 | 37 | ffmpeg: 38 | run: 39 | class: CommandLineTool 40 | inputs: 41 | infile: 42 | type: File 43 | inputBinding: {position: 1} 44 | outputs: 45 | outfile: stdout 46 | stdout: CLAMR_movie.mp4 47 | baseCommand: "ffmpeg -y -f image2 -i $HOME/graphics_output/graph%05d.png -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4 && ls -ld" 48 | hints: 49 | in: 50 | infile: clamr/outfile 51 | out: [outfile] 52 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-noyaml/slurm-charliecloud/cf-no-owrite.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | infile: File 8 | 9 | outputs: 10 | clamr_dir: 11 | type: File 12 | outputSource: clamr/outfile 13 | ffmpeg_movie: 14 | type: File 15 | outputSource: ffmpeg/outfile 16 | 17 | steps: 18 | clamr: 19 | run: 20 | class: CommandLineTool 21 | inputs: 22 | infile: 23 | type: File 24 | default: "" 25 | inputBinding: {position: 1} 26 | outputs: 27 | outfile: stdout 28 | stdout: $HOME/graphics_output 29 | baseCommand: "/clamr/CLAMR-master/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png" 30 | hints: 31 | DockerRequirement: 32 | beeflow:copyContainer: "/usr/projects/beedev/clamr/clamr-toss.tar.gz" 33 | in: 34 | infile: infile 35 | out: [outfile] 36 | 37 | ffmpeg: 38 | run: 39 | class: CommandLineTool 40 | inputs: 41 | infile: 42 | type: File 43 | inputBinding: {position: 1} 44 | outputs: 45 | outfile: stdout 46 | stdout: CLAMR_movie.mp4 47 | baseCommand: "ffmpeg -f image2 -i $HOME/graphics_output/graph%05d.png -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4 && ls -ld" 48 | hints: 49 | in: 50 | infile: clamr/outfile 51 | out: [outfile] 52 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-noyaml/slurm-charliecloud/cf.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | infile: File 8 | 9 | outputs: 10 | clamr_dir: 11 | type: File 12 | outputSource: clamr/outfile 13 | ffmpeg_movie: 14 | type: File 15 | outputSource: ffmpeg/outfile 16 | 17 | steps: 18 | clamr: 19 | run: 20 | class: CommandLineTool 21 | inputs: 22 | infile: 23 | type: File 24 | default: "" 25 | inputBinding: {position: 1} 26 | outputs: 27 | outfile: stdout 28 | stdout: $HOME/graphics_output 29 | baseCommand: "/clamr/CLAMR-master/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png" 30 | hints: 31 | DockerRequirement: 32 | beeflow:copyContainer: "/usr/projects/beedev/clamr/clamr-toss.tar.gz" 33 | in: 34 | infile: infile 35 | out: [outfile] 36 | 37 | ffmpeg: 38 | run: 39 | class: CommandLineTool 40 | inputs: 41 | infile: 42 | type: File 43 | inputBinding: {position: 1} 44 | outputs: 45 | outfile: stdout 46 | stdout: CLAMR_movie.mp4 47 | baseCommand: "ffmpeg -y -f image2 -i $HOME/graphics_output/graph%05d.png -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4 && ls -ld" 48 | hints: 49 | in: 50 | infile: clamr/outfile 51 | out: [outfile] 52 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-noyaml/slurm-charliecloud/copyContainer_containerName.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | infile: File 8 | 9 | outputs: 10 | clamr_dir: 11 | type: File 12 | outputSource: clamr/outfile 13 | ffmpeg_movie: 14 | type: File 15 | outputSource: ffmpeg/outfile 16 | 17 | steps: 18 | clamr: 19 | run: 20 | class: CommandLineTool 21 | inputs: 22 | infile: 23 | type: File 24 | default: "" 25 | inputBinding: {position: 1} 26 | outputs: 27 | outfile: stdout 28 | stdout: $HOME/graphics_output 29 | baseCommand: "/clamr/CLAMR-master/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png" 30 | hints: 31 | DockerRequirement: 32 | beeflow:copyContainer: "/usr/projects/beedev/clamr/clamr-toss.tar.gz" 33 | beeflow:containerName: "foo" 34 | in: 35 | infile: infile 36 | out: [outfile] 37 | 38 | ffmpeg: 39 | run: 40 | class: CommandLineTool 41 | inputs: 42 | infile: 43 | type: File 44 | inputBinding: {position: 1} 45 | outputs: 46 | outfile: stdout 47 | stdout: CLAMR_movie.mp4 48 | baseCommand: "ffmpeg -y -f image2 -i $HOME/graphics_output/graph%05d.png -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4 && ls -ld" 49 | hints: 50 | in: 51 | infile: clamr/outfile 52 | out: [outfile] 53 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-noyaml/slurm-charliecloud/dockerFile_containerName.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | infile: File 8 | 9 | outputs: 10 | clamr_dir: 11 | type: File 12 | outputSource: clamr/outfile 13 | ffmpeg_movie: 14 | type: File 15 | outputSource: ffmpeg/outfile 16 | 17 | steps: 18 | clamr: 19 | run: 20 | class: CommandLineTool 21 | inputs: 22 | infile: 23 | type: File 24 | default: "" 25 | inputBinding: {position: 1} 26 | outputs: 27 | outfile: stdout 28 | stdout: $HOME/graphics_output 29 | baseCommand: "/CLAMR/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png" 30 | hints: 31 | DockerRequirement: 32 | dockerFile: "Dockerfile.clamr-lanl-x86_64" 33 | beeflow:containerName: "foo2" 34 | in: 35 | infile: infile 36 | out: [outfile] 37 | 38 | ffmpeg: 39 | run: 40 | class: CommandLineTool 41 | inputs: 42 | infile: 43 | type: File 44 | inputBinding: {position: 1} 45 | outputs: 46 | outfile: stdout 47 | stdout: CLAMR_movie.mp4 48 | baseCommand: "ffmpeg -y -f image2 -i $HOME/graphics_output/graph%05d.png -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4 && ls -ld" 49 | hints: 50 | in: 51 | infile: clamr/outfile 52 | out: [outfile] 53 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-noyaml/slurm-charliecloud/dockerPull.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | infile: File 8 | 9 | outputs: 10 | clamr_dir: 11 | type: File 12 | outputSource: clamr/outfile 13 | ffmpeg_movie: 14 | type: File 15 | outputSource: ffmpeg/outfile 16 | 17 | steps: 18 | clamr: 19 | run: 20 | class: CommandLineTool 21 | inputs: 22 | infile: 23 | type: File 24 | default: "" 25 | inputBinding: {position: 1} 26 | outputs: 27 | outfile: stdout 28 | stdout: $HOME/graphics_output 29 | baseCommand: "/CLAMR/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png" 30 | hints: 31 | DockerRequirement: 32 | dockerPull: "git.lanl.gov:5050/qwofford/containerhub/clamr-lanl-x86_64:latest" 33 | in: 34 | infile: infile 35 | out: [outfile] 36 | 37 | ffmpeg: 38 | run: 39 | class: CommandLineTool 40 | inputs: 41 | infile: 42 | type: File 43 | inputBinding: {position: 1} 44 | outputs: 45 | outfile: stdout 46 | stdout: CLAMR_movie.mp4 47 | baseCommand: "ffmpeg -y -f image2 -i $HOME/graphics_output/graph%05d.png -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4 && ls -ld" 48 | hints: 49 | in: 50 | infile: clamr/outfile 51 | out: [outfile] 52 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-noyaml/slurm-singularity/README.md: -------------------------------------------------------------------------------- 1 | # CLAMR-FFMPEG WORKFLOW 2 | 3 | Using a Singularity container runtime, this workflow executes the `CLAMR` AMR simulation and then runs `ffmpeg` to produce a video from the `CLAMR` output. 4 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-noyaml/slurm-singularity/cf-singularity.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | infile: File 8 | 9 | outputs: 10 | clamr_dir: 11 | type: File 12 | outputSource: clamr/outfile 13 | ffmpeg_movie: 14 | type: File 15 | outputSource: ffmpeg/outfile 16 | 17 | steps: 18 | clamr: 19 | run: 20 | class: CommandLineTool 21 | inputs: 22 | infile: 23 | type: File 24 | default: "" 25 | inputBinding: {position: 1} 26 | outputs: 27 | outfile: stdout 28 | stdout: $HOME/graphics_output 29 | baseCommand: "/clamr/CLAMR-master/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png" 30 | hints: 31 | DockerRequirement: 32 | beeflow:copyContainer: "/usr/projects/beedev/clamr/clamr-toss.simg" 33 | in: 34 | infile: infile 35 | out: [outfile] 36 | 37 | ffmpeg: 38 | run: 39 | class: CommandLineTool 40 | inputs: 41 | infile: 42 | type: File 43 | inputBinding: {position: 1} 44 | outputs: 45 | outfile: stdout 46 | stdout: CLAMR_movie.mp4 47 | baseCommand: "ffmpeg -y -f image2 -i $HOME/graphics_output/graph%05d.png -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4 && ls -ld" 48 | in: 49 | infile: clamr/outfile 50 | out: [outfile] 51 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-singularity/README.md: -------------------------------------------------------------------------------- 1 | # CLAMR - FFMPEG workflow using CWL 2 | 3 | clamr_wf.cwl - the main cwl. 4 | calmr_job.yml - yaml file for values used by the cwl files. 5 | clamr.cwl - cwl file for the clamr step. 6 | ffmpeg.cwl - cwl file for the ffmpeg step. 7 | 8 | The values in these files use the container runtime Singularity for a system with Slurm. 9 | 10 | 11 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-singularity/clamr_job.json: -------------------------------------------------------------------------------- 1 | { 2 | "grid_resolution": 32, 3 | "max_levels": 3, 4 | "time_steps": 5000, 5 | "steps_between_outputs": 10, 6 | "steps_between_graphics": 25, 7 | "graphics_type": "png", 8 | "input_format": "image2", 9 | "frame_rate": 12, 10 | "frame_size": "800x800", 11 | "pixel_format": "yuv420p", 12 | "output_filename": "CLAMR_movie.mp4" 13 | } 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-singularity/clamr_job.yml: -------------------------------------------------------------------------------- 1 | # Inputs for CLAMR 2 | # /clamr/CLAMR-master/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png 3 | 4 | grid_resolution: 32 5 | max_levels: 3 6 | time_steps: 5000 7 | steps_between_outputs: 10 8 | steps_between_graphics: 25 9 | graphics_type: png 10 | 11 | # Inputs for FFMPEG 12 | #ffmpeg -f image2 -r 12 -s 800x800 -pix_fmt yuv420p CLAMR_movie.mp4 13 | 14 | input_format: image2 15 | frame_rate: 12 16 | frame_size: 800x800 17 | pixel_format: yuv420p 18 | output_filename: CLAMR_movie.mp4 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-singularity/ffmpeg.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: ffmpeg -y 7 | inputs: 8 | input_format: 9 | type: string? 10 | inputBinding: 11 | prefix: -f 12 | position: 1 13 | ffmpeg_input: 14 | type: Directory 15 | inputBinding: 16 | prefix: -i 17 | position: 2 18 | valueFrom: $("graphics_output/graph%05d.png") 19 | frame_rate: 20 | type: int? 21 | inputBinding: 22 | prefix: -r 23 | position: 3 24 | frame_size: 25 | type: string? 26 | inputBinding: 27 | prefix: -s 28 | position: 4 29 | pixel_format: 30 | type: string? 31 | inputBinding: 32 | prefix: -pix_fmt 33 | position: 5 34 | output_file: 35 | type: string 36 | inputBinding: 37 | position: 6 38 | 39 | outputs: 40 | movie: 41 | type: File 42 | outputBinding: 43 | glob: $(inputs.output_file) 44 | # glob: CLAMR_movie.mp4 45 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-summit/README.md: -------------------------------------------------------------------------------- 1 | # CLAMR - FFMPEG workflow using CWL 2 | 3 | clamr_wf.cwl - the main cwl. 4 | calmr_job.yml - yaml file for values used by the cwl files. 5 | clamr.cwl - cwl file for the clamr step. 6 | ffmpeg.cwl - cwl file for the ffmpeg step. 7 | 8 | The values in these files run on Summit, using the container runtime Charliecloud. Summit uses LSF as the workload scheduler. 9 | 10 | 11 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-summit/clamr_job.json: -------------------------------------------------------------------------------- 1 | { 2 | "grid_resolution": 32, 3 | "max_levels": 3, 4 | "time_steps": 5000, 5 | "steps_between_outputs": 10, 6 | "steps_between_graphics": 25, 7 | "graphics_type": "png", 8 | "input_format": "image2", 9 | "frame_rate": 12, 10 | "frame_size": "800x800", 11 | "pixel_format": "yuv420p", 12 | "output_filename": "CLAMR_movie.mp4" 13 | } 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-summit/clamr_job.yml: -------------------------------------------------------------------------------- 1 | # Inputs for CLAMR 2 | # /clamr/CLAMR-master/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png 3 | 4 | grid_resolution: 32 5 | max_levels: 3 6 | time_steps: 5000 7 | steps_between_outputs: 10 8 | steps_between_graphics: 25 9 | graphics_type: png 10 | 11 | # Inputs for FFMPEG 12 | #ffmpeg -f image2 -r 12 -s 800x800 -pix_fmt yuv420p CLAMR_movie.mp4 13 | 14 | input_format: image2 15 | frame_rate: 12 16 | frame_size: 800x800 17 | pixel_format: yuv420p 18 | output_filename: CLAMR_movie.mp4 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-summit/ffmpeg.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: ffmpeg -y 7 | inputs: 8 | input_format: 9 | type: string? 10 | inputBinding: 11 | prefix: -f 12 | position: 1 13 | ffmpeg_input: 14 | type: Directory 15 | inputBinding: 16 | prefix: -i 17 | position: 2 18 | valueFrom: $(self.path + "/graph%05d.png") 19 | frame_rate: 20 | type: int? 21 | inputBinding: 22 | prefix: -r 23 | position: 3 24 | frame_size: 25 | type: string? 26 | inputBinding: 27 | prefix: -s 28 | position: 4 29 | pixel_format: 30 | type: string? 31 | inputBinding: 32 | prefix: -pix_fmt 33 | position: 5 34 | output_file: 35 | type: string 36 | inputBinding: 37 | position: 6 38 | 39 | outputs: 40 | movie: 41 | type: File 42 | outputBinding: 43 | glob: $(inputs.output_file) 44 | # glob: CLAMR_movie.mp4 45 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-use-container/README.md: -------------------------------------------------------------------------------- 1 | # CLAMR - FFMPEG workflow using CWL 2 | 3 | clamr_wf.cwl - the main cwl. 4 | calmr_job.yml - yaml file for values used by the cwl files. 5 | clamr.cwl - cwl file for the clamr step. 6 | ffmpeg.cwl - cwl file for the ffmpeg step. 7 | 8 | The values in these files run on fog a LANL cluster, using the container runtime Charliecloud. Fog uses slurm as the workload scheduler. 9 | 10 | 11 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-use-container/clamr_job.json: -------------------------------------------------------------------------------- 1 | { 2 | "grid_resolution": 32, 3 | "max_levels": 3, 4 | "time_steps": 5000, 5 | "steps_between_outputs": 10, 6 | "steps_between_graphics": 25, 7 | "graphics_type": "png", 8 | "input_format": "image2", 9 | "frame_rate": 12, 10 | "frame_size": "800x800", 11 | "pixel_format": "yuv420p", 12 | "output_filename": "CLAMR_movie.mp4" 13 | } 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-use-container/clamr_job.yml: -------------------------------------------------------------------------------- 1 | # Inputs for CLAMR 2 | # /clamr/CLAMR-master/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png 3 | 4 | grid_resolution: 32 5 | max_levels: 3 6 | time_steps: 5000 7 | steps_between_outputs: 10 8 | steps_between_graphics: 25 9 | graphics_type: png 10 | 11 | # Inputs for FFMPEG 12 | #ffmpeg -f image2 -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4 13 | 14 | input_format: image2 15 | frame_rate: 12 16 | frame_size: 800x800 17 | pixel_format: yuv420p 18 | output_filename: $HOME/CLAMR_movie.mp4 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf-use-container/ffmpeg.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: ffmpeg -y 7 | inputs: 8 | input_format: 9 | type: string? 10 | inputBinding: 11 | prefix: -f 12 | position: 1 13 | ffmpeg_input: 14 | type: Directory 15 | inputBinding: 16 | prefix: -i 17 | position: 2 18 | valueFrom: $(self.path + "/graph%05d.png") 19 | frame_rate: 20 | type: int? 21 | inputBinding: 22 | prefix: -r 23 | position: 3 24 | frame_size: 25 | type: string? 26 | inputBinding: 27 | prefix: -s 28 | position: 4 29 | pixel_format: 30 | type: string? 31 | inputBinding: 32 | prefix: -pix_fmt 33 | position: 5 34 | output_file: 35 | type: string 36 | inputBinding: 37 | position: 6 38 | 39 | outputs: 40 | movie: 41 | type: File 42 | outputBinding: 43 | glob: $(inputs.output_file) 44 | # glob: CLAMR_movie.mp4 45 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf/README.md: -------------------------------------------------------------------------------- 1 | # CLAMR - FFMPEG workflow using CWL 2 | 3 | clamr_wf.cwl - the main cwl. 4 | calmr_job.yml - yaml file for values used by the cwl files. 5 | clamr.cwl - cwl file for the clamr step. 6 | ffmpeg.cwl - cwl file for the ffmpeg step. 7 | 8 | The values in these files run on fog a LANL cluster, using the container runtime Charliecloud. Fog uses slurm as the workload scheduler. 9 | 10 | 11 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf/clamr_job.json: -------------------------------------------------------------------------------- 1 | { 2 | "grid_resolution": 32, 3 | "max_levels": 3, 4 | "time_steps": 5000, 5 | "steps_between_outputs": 10, 6 | "steps_between_graphics": 25, 7 | "graphics_type": "png", 8 | "input_format": "image2", 9 | "frame_rate": 12, 10 | "frame_size": "800x800", 11 | "pixel_format": "yuv420p", 12 | "output_filename": "CLAMR_movie.mp4" 13 | } 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf/clamr_job.yml: -------------------------------------------------------------------------------- 1 | # Inputs for CLAMR 2 | # /clamr/CLAMR-master/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png 3 | 4 | grid_resolution: 32 5 | max_levels: 3 6 | time_steps: 5000 7 | steps_between_outputs: 10 8 | steps_between_graphics: 25 9 | graphics_type: png 10 | 11 | # Inputs for FFMPEG 12 | #ffmpeg -f image2 -r 12 -s 800x800 -pix_fmt yuv420p CLAMR_movie.mp4 13 | 14 | input_format: image2 15 | frame_rate: 12 16 | frame_size: 800x800 17 | pixel_format: yuv420p 18 | # output_filename: CLAMR_movie.mp4 19 | output_filename: ./CLAMR_movie.mp4 20 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/clamr-wf/ffmpeg.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: ffmpeg -y 7 | 8 | stderr: ffmpeg_stderr.txt 9 | 10 | inputs: 11 | input_format: 12 | type: string? 13 | inputBinding: 14 | prefix: -f 15 | position: 1 16 | ffmpeg_input: 17 | type: Directory 18 | inputBinding: 19 | prefix: -i 20 | position: 2 21 | valueFrom: $("/graph%05d.png") 22 | frame_rate: 23 | type: int? 24 | inputBinding: 25 | prefix: -r 26 | position: 3 27 | frame_size: 28 | type: string? 29 | inputBinding: 30 | prefix: -s 31 | position: 4 32 | pixel_format: 33 | type: string? 34 | inputBinding: 35 | prefix: -pix_fmt 36 | position: 5 37 | output_file: 38 | type: string 39 | inputBinding: 40 | position: 6 41 | 42 | outputs: 43 | movie: 44 | type: File 45 | outputBinding: 46 | glob: $(inputs.output_file) 47 | # glob: CLAMR_movie.mp4 48 | ffmpeg_stderr: 49 | type: stderr 50 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/lulesh-mpi-multi-file/lulesh.cwl: -------------------------------------------------------------------------------- 1 | class: CommandLineTool 2 | cwlVersion: v1.0 3 | 4 | baseCommand: [/lulesh2.0] 5 | stdout: lulesh_stdout.txt 6 | inputs: 7 | size: 8 | type: int 9 | inputBinding: 10 | prefix: -s 11 | iterations: 12 | type: int 13 | inputBinding: 14 | prefix: -i 15 | outputs: 16 | lulesh_stdout: 17 | type: stdout 18 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/lulesh-mpi-multi-file/lulesh_job.yml: -------------------------------------------------------------------------------- 1 | size: 20 2 | iterations: 10 3 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/lulesh-mpi-multi-file/lulesh_wf.cwl: -------------------------------------------------------------------------------- 1 | class: Workflow 2 | cwlVersion: v1.0 3 | 4 | inputs: 5 | size: int 6 | iterations: int 7 | 8 | outputs: 9 | lulesh_stdout: 10 | type: File 11 | outputSource: lulesh/lulesh_stdout 12 | 13 | steps: 14 | lulesh: 15 | run: lulesh.cwl 16 | in: 17 | size: size 18 | iterations: iterations 19 | out: [lulesh_stdout] 20 | hints: 21 | DockerRequirement: 22 | beeflow:useContainer: '/usr/projects/beedev/mpi/lulesh-x86_64.tgz' 23 | # See Dockerfile.lulesh-x86_64 24 | beeflow:MPIRequirement: 25 | ntasks: 27 26 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/lulesh-mpi/lulesh.cwl: -------------------------------------------------------------------------------- 1 | class: Workflow 2 | cwlVersion: v1.0 3 | 4 | inputs: 5 | size: int 6 | iterations: int 7 | 8 | outputs: 9 | lulesh_stdout: 10 | type: File 11 | outputSource: lulesh/lulesh_stdout 12 | 13 | steps: 14 | lulesh: 15 | run: 16 | class: CommandLineTool 17 | baseCommand: [/lulesh2.0] 18 | stdout: lulesh_stdout.txt 19 | inputs: 20 | size: 21 | type: int 22 | inputBinding: 23 | prefix: -s 24 | iterations: 25 | type: int 26 | inputBinding: 27 | prefix: -i 28 | outputs: 29 | lulesh_stdout: 30 | type: stdout 31 | in: 32 | size: size 33 | iterations: iterations 34 | out: [lulesh_stdout] 35 | hints: 36 | DockerRequirement: 37 | beeflow:useContainer: /usr/projects/beedev/mpi/lulesh-x86_64.tgz 38 | # dockerPull: "jtronge/lulesh" 39 | # See Dockerfile.lulesh-x86_64 40 | beeflow:MPIRequirement: 41 | ntasks: 27 42 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/lulesh-mpi/lulesh.yml: -------------------------------------------------------------------------------- 1 | size: 20 2 | iterations: 10 3 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/nwchem-mpi/README.md: -------------------------------------------------------------------------------- 1 | ## NWChem MPI Workflow 2 | 3 | This is a simple example MPI workflow that should run on x86 systems. The 4 | client script needs to first package up this directory: 5 | `beeflow/data/cwl/bee_workflows/nwchem-mpi`. Then you should be able to 6 | submit the workflow to the WFM after packaging with the client as well. An 7 | example submission is below with the `nwchem.cwl` and `nwchem.yml` files 8 | specified. 9 | 10 | ``` 11 | Welcome to BEE Client! 🐝 12 | 0) Package Workflow 13 | 1) Submit Workflow 14 | 2) List Workflows 15 | 3) Start Workflow 16 | 4) Query Workflow 17 | 5) Pause Workflow 18 | 6) Resume Workflow 19 | 7) Cancel Workflow 20 | 8) Copy Workflow 21 | 9) ReExecute Workflow 22 | 10) Exit 23 | $ 1 24 | Workflow name: 25 | $ nwchem-mpi-2 26 | Workflow tarball path: 27 | $ ./nwchem-mpi.tgz 28 | Main cwl file: 29 | $ nwchem.cwl 30 | Does the job have a yaml file (y/n): 31 | $ y 32 | Yaml file: 33 | $ nwchem.yml 34 | Submitting 35 | Job submitted! Your workflow id is d9a1482e-782e-4323-bb5e-aedfd77e0228. 36 | ``` 37 | 38 | At this point the job can then be started just as with any other workflow. 39 | 40 | This workflow pulls the x86 container from DockerHub, so if the container is not 41 | working for some reason, I've included an example Dockerfile that should build 42 | with Charliecloud. See `beeflow/data/dockerfiles/Dockerfile.nwchem-x86_64`. 43 | NWChem is a massive program, so the build may take upwards of an hour depending 44 | on the system. 45 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/nwchem-mpi/nwchem.cwl: -------------------------------------------------------------------------------- 1 | # This workflow has a fixed number of tasks and a fixed container type 2 | class: Workflow 3 | cwlVersion: v1.0 4 | 5 | inputs: 6 | nw_file: string 7 | 8 | outputs: 9 | nw_stdout: 10 | type: File 11 | outputSource: nwchem/nw_stdout 12 | 13 | steps: 14 | nwchem: 15 | run: nwchem_bin.cwl 16 | in: 17 | nw_file: nw_file 18 | out: [nw_stdout] 19 | hints: 20 | DockerRequirement: 21 | # This is an x86 container (it's about ~1200MB so it will take a while to pull) 22 | dockerPull: "jtronge/nwchem:05aafc87223af82f58865d8b0f924dabd1adacbc" 23 | beeflow:MPIRequirement: 24 | nodes: 1 25 | ntasks: 2 26 | version: pmix_v3 27 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/nwchem-mpi/nwchem.yml: -------------------------------------------------------------------------------- 1 | # nw_file: "/nwchem/examples/tcepolar/ccsd_polar_small.nw" 2 | # this input seems like a good quick-running test case 3 | nw_file: "/nwchem/QA/tests/h2o_bnl/h2o_bnl.nw" 4 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/nwchem-mpi/nwchem_bin.cwl: -------------------------------------------------------------------------------- 1 | class: CommandLineTool 2 | baseCommand: [/opt/nwchem/bin/nwchem] 3 | stdout: nwchem_stdout.txt 4 | inputs: 5 | nw_file: 6 | type: string? 7 | inputBinding: 8 | position: 1 9 | outputs: 10 | nw_stdout: 11 | type: stdout 12 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant-build/Dockerfile.pennant-graph-x86_64: -------------------------------------------------------------------------------- 1 | # Build a container with matplotlib for graphing 2 | # 3 | # `ch-image build --force -f Dockerfile.pennant-graph-x86_64 -t pennant-graph .` 4 | FROM almalinux:8 5 | 6 | RUN dnf update \ 7 | && dnf install -y \ 8 | gcc \ 9 | gcc-c++ \ 10 | binutils \ 11 | libtool \ 12 | autoconf \ 13 | automake \ 14 | cmake \ 15 | pkgconf \ 16 | bzip2-devel \ 17 | zlib-devel \ 18 | libjpeg-devel \ 19 | libpng-devel \ 20 | python3 \ 21 | python3-devel 22 | 23 | RUN python3 -m venv /venv \ 24 | && echo ". /venv/bin/activate" >> /etc/profile.d/venv.sh \ 25 | && . /venv/bin/activate \ 26 | && pip install matplotlib 27 | 28 | COPY graph_pennant.py graph_pennant.sh / 29 | 30 | RUN chmod 755 /graph_pennant.sh 31 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant-build/graph.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | 4 | baseCommand: /graph_pennant.sh 5 | 6 | inputs: 7 | out1node: 8 | type: File 9 | inputBinding: 10 | position: 1 11 | out2node: 12 | type: File 13 | inputBinding: 14 | position: 2 15 | out4node: 16 | type: File 17 | inputBinding: 18 | position: 3 19 | out8node: 20 | type: File 21 | inputBinding: 22 | position: 4 23 | outputs: 24 | image: 25 | type: File 26 | outputBinding: 27 | glob: graph.png 28 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant-build/graph_pennant.py: -------------------------------------------------------------------------------- 1 | """Graph the output of a PENNANT workflow.""" 2 | 3 | # Disable C0103: This is just a simple script, not all globals should be UPPER_CASE here 4 | # pylint:disable=C0103 5 | 6 | import re 7 | import sys 8 | import matplotlib.pyplot as plt 9 | 10 | 11 | results = [] 12 | for fname in sys.argv[1:]: 13 | pe_count = 0 14 | times = [] 15 | with open(fname, encoding='utf-8') as fp: 16 | for line in fp: 17 | # Check for the PE count 18 | m_pe_count = re.match(r'Running on (\d+) MPI PE\(s\)', line) 19 | if m_pe_count: 20 | pe_count = int(m_pe_count.group(1)) 21 | continue 22 | # Check for an End cyle line 23 | if not line.startswith('End cycle'): 24 | continue 25 | _, _, _, wall = line.split(',') 26 | _, time = wall.split('=') 27 | time = float(time.strip()) 28 | times.append(time) 29 | results.append({ 30 | 'pe_count': pe_count, 31 | 'average_wall_time': sum(times) / len(times), 32 | }) 33 | 34 | # The node counts 35 | x = [str(result['pe_count']) for result in results] 36 | # Average wall for cycle 37 | y = [result['average_wall_time'] for result in results] 38 | fig, ax = plt.subplots() 39 | ax.plot(x, y) 40 | ax.set_title('PENNANT Workflow Run') 41 | ax.set_xlabel('Node count') 42 | ax.set_ylabel('Average wall time for cycle') 43 | # Save to a png file 44 | fig.savefig('graph.png') 45 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant-build/graph_pennant.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Wrapper to make sure the environment is set up 3 | 4 | . /venv/bin/activate 5 | python3 /graph_pennant.py $@ 6 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant-build/pennant.yml: -------------------------------------------------------------------------------- 1 | pnt: '/PENNANT/test/sedovbig/sedovbig.pnt' 2 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant-build/pennant_1_node.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | 4 | baseCommand: /PENNANT/build/pennant 5 | 6 | inputs: 7 | pnt: 8 | type: File 9 | inputBinding: {} 10 | stdout: pennant_1_node.out 11 | outputs: 12 | output: 13 | type: stdout 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant-build/pennant_2_node.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | 4 | baseCommand: /PENNANT/build/pennant 5 | 6 | inputs: 7 | pnt: 8 | type: File 9 | inputBinding: {} 10 | stdout: pennant_2_node.out 11 | outputs: 12 | output: 13 | type: stdout 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant-build/pennant_4_node.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | 4 | baseCommand: /PENNANT/build/pennant 5 | 6 | inputs: 7 | pnt: 8 | type: File 9 | inputBinding: {} 10 | stdout: pennant_4_node.out 11 | outputs: 12 | output: 13 | type: stdout 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant-build/pennant_8_node.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | 4 | baseCommand: /PENNANT/build/pennant 5 | 6 | inputs: 7 | pnt: 8 | type: File 9 | inputBinding: {} 10 | stdout: pennant_8_node.out 11 | outputs: 12 | output: 13 | type: stdout 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant/graph.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | 4 | baseCommand: /graph_pennant.sh 5 | 6 | inputs: 7 | out1node: 8 | type: File 9 | inputBinding: 10 | position: 1 11 | out2node: 12 | type: File 13 | inputBinding: 14 | position: 2 15 | out4node: 16 | type: File 17 | inputBinding: 18 | position: 3 19 | outputs: 20 | image: 21 | type: File 22 | outputBinding: 23 | glob: graph.png 24 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant/pennant.yml: -------------------------------------------------------------------------------- 1 | pnt: '/PENNANT/test/sedovbig/sedovbig.pnt' 2 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant/pennant_1_node.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | 4 | baseCommand: /PENNANT/build/pennant 5 | 6 | inputs: 7 | pnt: 8 | type: File 9 | inputBinding: {} 10 | stdout: pennant_1_node.out 11 | outputs: 12 | output: 13 | type: stdout 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant/pennant_2_node.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | 4 | baseCommand: /PENNANT/build/pennant 5 | 6 | inputs: 7 | pnt: 8 | type: File 9 | inputBinding: {} 10 | stdout: pennant_2_node.out 11 | outputs: 12 | output: 13 | type: stdout 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant/pennant_4_node.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | 4 | baseCommand: /PENNANT/build/pennant 5 | 6 | inputs: 7 | pnt: 8 | type: File 9 | inputBinding: {} 10 | stdout: pennant_4_node.out 11 | outputs: 12 | output: 13 | type: stdout 14 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pennant/pennant_wf.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.2 2 | class: Workflow 3 | 4 | inputs: 5 | pnt: File 6 | 7 | outputs: 8 | output_1_node: 9 | type: File 10 | outputSource: 1_node/output 11 | output_2_node: 12 | type: File 13 | outputSource: 2_node/output 14 | output_4_node: 15 | type: File 16 | outputSource: 4_node/output 17 | image: 18 | type: File 19 | outputSource: graph/image 20 | 21 | steps: 22 | 1_node: 23 | run: pennant_1_node.cwl 24 | in: 25 | pnt: pnt 26 | out: [output] 27 | hints: 28 | beeflow:MPIRequirement: 29 | nodes: 1 30 | DockerRequirement: 31 | beeflow:useContainer: "$HOME/img/pennant.tar.gz" 32 | 2_node: 33 | run: pennant_2_node.cwl 34 | in: 35 | pnt: pnt 36 | out: [output] 37 | hints: 38 | beeflow:MPIRequirement: 39 | nodes: 2 40 | DockerRequirement: 41 | beeflow:useContainer: "$HOME/img/pennant.tar.gz" 42 | 4_node: 43 | run: pennant_4_node.cwl 44 | in: 45 | pnt: pnt 46 | out: [output] 47 | hints: 48 | beeflow:MPIRequirement: 49 | nodes: 4 50 | DockerRequirement: 51 | beeflow:useContainer: "$HOME/img/pennant.tar.gz" 52 | graph: 53 | run: graph.cwl 54 | in: 55 | out1node: 1_node/output 56 | out2node: 2_node/output 57 | out4node: 4_node/output 58 | out: [image] 59 | hints: 60 | DockerRequirement: 61 | beeflow:useContainer: "$HOME/img/pennant-graph.tar.gz" 62 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | filterwarnings = 3 | ignore::DeprecationWarning:flask_restful 4 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/simple-workflows/README.md: -------------------------------------------------------------------------------- 1 | Simple Workflows: 2 | 3 | Files: 4 | * `cancel.cwl`: A workflow which sleeps for 20 seconds, long enough to demonstrate the cancel functionality BEE provides. 5 | 6 | 7 | Directories: 8 | * `grep-wordcount/`: A workflow which parses an input file (`lorem.txt`) and counts the number of lines which contain the literal string "`integer`". 9 | 10 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/simple-workflows/cancel.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | pattern: string 8 | infile: File 9 | 10 | outputs: 11 | grep_file: 12 | type: File 13 | outputSource: grep/outfile 14 | count_file: 15 | type: File 16 | outputSource: wc/outfile 17 | 18 | steps: 19 | grep: 20 | run: 21 | class: CommandLineTool 22 | inputs: 23 | pattern: 24 | type: string 25 | default: "integer" 26 | inputBinding: {position: 0} 27 | infile: 28 | type: File 29 | default: lorem.txt 30 | inputBinding: {position: 1} 31 | outputs: 32 | outfile: stdout 33 | stdout: grepout.txt 34 | baseCommand: "sleep 20; grep" 35 | in: 36 | pattern: pattern 37 | infile: infile 38 | out: [outfile] 39 | 40 | wc: 41 | run: 42 | class: CommandLineTool 43 | inputs: 44 | infile: 45 | type: File 46 | default: grepout.txt 47 | inputBinding: {position: 1} 48 | outputs: 49 | outfile: stdout 50 | stdout: counts.txt 51 | baseCommand: "wc -l" 52 | in: 53 | infile: grep/outfile 54 | out: [outfile] 55 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/simple-workflows/grep-wordcount/README.md: -------------------------------------------------------------------------------- 1 | Simple example of a two step workflow. 2 | 3 | gc.cwl - A simple workflow that greps a file (lorem.txt) for a value, using a container. 4 | gc-nc.cwl - A simple workflow that greps a file (lorem.txt) for a value, no container. 5 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/simple-workflows/grep-wordcount/gc-nc.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | pattern: string 8 | infile: File 9 | 10 | outputs: 11 | grep_file: 12 | type: File 13 | outputSource: grep/outfile 14 | count_file: 15 | type: File 16 | outputSource: wc/outfile 17 | 18 | steps: 19 | grep: 20 | run: 21 | class: CommandLineTool 22 | inputs: 23 | pattern: 24 | type: string 25 | default: "integer" 26 | inputBinding: {position: 0} 27 | infile: 28 | type: File 29 | default: lorem.txt 30 | inputBinding: {position: 1} 31 | outputs: 32 | outfile: stdout 33 | stdout: grepout.txt 34 | baseCommand: "grep integer $HOME/lorem.txt > $HOME/grepout.txt" 35 | hints: 36 | in: 37 | pattern: pattern 38 | infile: infile 39 | out: [outfile] 40 | 41 | wc: 42 | run: 43 | class: CommandLineTool 44 | inputs: 45 | infile: 46 | type: File 47 | default: grepout.txt 48 | inputBinding: {position: 1} 49 | outputs: 50 | outfile: stdout 51 | stdout: counts.txt 52 | baseCommand: "wc -l $HOME/grepout.txt > $HOME/counts.txt" 53 | hints: 54 | in: 55 | infile: grep/outfile 56 | out: [outfile] 57 | -------------------------------------------------------------------------------- /beeflow/data/cwl/bee_workflows/simple-workflows/grep-wordcount/gc.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | pattern: string 8 | infile: File 9 | 10 | outputs: 11 | grep_file: 12 | type: File 13 | outputSource: grep/outfile 14 | count_file: 15 | type: File 16 | outputSource: wc/outfile 17 | 18 | steps: 19 | grep: 20 | run: 21 | class: CommandLineTool 22 | inputs: 23 | pattern: 24 | type: string 25 | default: "integer" 26 | inputBinding: {position: 0} 27 | infile: 28 | type: File 29 | default: lorem.txt 30 | inputBinding: {position: 1} 31 | outputs: 32 | outfile: stdout 33 | stdout: grepout.txt 34 | baseCommand: "sh -c 'grep integer lorem.txt > grepout.txt;sleep 15;ls -l|grep grepout.txt'" 35 | hints: 36 | DockerRequirement: 37 | beeflow:copyContainer: "/usr/projects/beedev/toss-tiny-3-5.tar" 38 | in: 39 | pattern: pattern 40 | infile: infile 41 | out: [outfile] 42 | 43 | wc: 44 | run: 45 | class: CommandLineTool 46 | inputs: 47 | infile: 48 | type: File 49 | default: grepout.txt 50 | inputBinding: {position: 1} 51 | outputs: 52 | outfile: stdout 53 | stdout: counts.txt 54 | baseCommand: "sh -c 'ls -l;sleep 10; wc -l grepout.txt > counts.txt'" 55 | hints: 56 | DockerRequirement: 57 | beeflow:copyContainer: "/usr/projects/beedev/toss-tiny-3-5.tar" 58 | in: 59 | infile: grep/outfile 60 | out: [outfile] 61 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/README.md: -------------------------------------------------------------------------------- 1 | # Validating CWL behavior 2 | 3 | Working from CWL v1.1. It's often useful to test the CWL expected behavior by validating with CWL tool, ie- `cwltool --validate `. This directory is a home for all CWL tool validations which are not explicitly designed to run on BEE. These CWL examples are not designed to demonstrated the funcitonality of BEE, but to demonstrate the expected behavior of the CWL reference implementation. 4 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/builder/dockerFile.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: ['/bin/cat', '/etc/centos-release'] 7 | stdout: output.txt 8 | inputs: 9 | get-release: 10 | type: boolean 11 | inputBinding: 12 | position: 1 13 | outputs: 14 | release_output: 15 | type: stdout 16 | requirements: 17 | DockerRequirement: 18 | dockerFile: "Dockerfile" 19 | dockerImageId: cart:horse 20 | dockerPull: my_silly_container:my_sillytag 21 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/builder/dockerPull.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: ['/bin/cat', '/etc/centos-release'] 7 | stdout: output.txt 8 | inputs: 9 | get-release: 10 | type: boolean 11 | inputBinding: 12 | position: 1 13 | outputs: 14 | release_output: 15 | type: stdout 16 | requirements: 17 | DockerRequirement: 18 | dockerPull: centos:8 19 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/grep-wordcount/gc.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | pattern: string 8 | infile: File 9 | 10 | outputs: 11 | grep_file: 12 | type: File 13 | outputSource: grep/outfile 14 | count_file: 15 | type: File 16 | outputSource: wc/outfile 17 | 18 | steps: 19 | grep: 20 | run: 21 | class: CommandLineTool 22 | inputs: 23 | pattern: 24 | type: string 25 | default: "integer" 26 | inputBinding: {position: 0} 27 | infile: 28 | type: File 29 | default: lorem.txt 30 | inputBinding: {position: 1} 31 | outputs: 32 | outfile: stdout 33 | stdout: grepout.txt 34 | baseCommand: grep 35 | hints: 36 | DockerRequirement: 37 | beeflow:copyContainer: "/usr/projects/beedev/toss-tiny-3-5.tar" 38 | in: 39 | pattern: pattern 40 | infile: infile 41 | out: [outfile] 42 | 43 | wc: 44 | run: 45 | class: CommandLineTool 46 | inputs: 47 | infile: 48 | type: File 49 | default: grepout.txt 50 | inputBinding: {position: 1} 51 | outputs: 52 | outfile: stdout 53 | stdout: counts.txt 54 | baseCommand: "wc -l" 55 | hints: 56 | DockerRequirement: 57 | beeflow:copyContainer: "/usr/projects/beedev/toss-tiny-3-5.tar" 58 | in: 59 | infile: grep/outfile 60 | out: [outfile] 61 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/helloworld.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: echo 6 | inputs: 7 | message: 8 | type: string 9 | inputBinding: 10 | position: 1 11 | outputs: [] 12 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/helloworld_input.yaml: -------------------------------------------------------------------------------- 1 | message: Hiihihihi 2 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/example-1/DAG_example-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/beeflow/data/cwl/cwl_validation/ml-workflow/example-1/DAG_example-1.png -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/example-1/README.md: -------------------------------------------------------------------------------- 1 | ## Sample CWL Pipeline for testing purpose with CWL-Runner tool 2 | 3 | This directory contains a sample CWL pipeline that makes use of adding two numbers followed by multiplication. Interpretor used: CWL-Runner 4 | 5 | ### Prerequisites 6 | 7 | 1. CWL tool implementation 8 | 9 | Installing the official package using pip (this will also install 'cwltool' package as well): 10 | ``` 11 | pip install cwl-runner 12 | ``` 13 | OR from Source package as follows: 14 | ``` 15 | git clone https://github.com/common-workflow-language/cwltool.git 16 | cd cwltool && python setup.py install 17 | cd cwl-runner && python setup.py install 18 | ``` 19 | 20 | ### Run CWL Pipeline on the Command Line 21 | 22 | First validate cwl file for any CWL syntax errors: 23 | ``` 24 | cwl-runner --validate add_multiply_example_workflow.cwl --num1 20 --num2 50 25 | ``` 26 | Simple Command: 27 | ``` 28 | cwl-runner add_multiply_example_workflow.cwl --num1 20 --num2 50 29 | ``` 30 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/example-1/add.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | baseCommand: ["python", "-m", "add"] 4 | 5 | inputs: 6 | x: 7 | type: int 8 | inputBinding: 9 | position: 1 10 | y: 11 | type: int 12 | inputBinding: 13 | position: 2 14 | 15 | stdout: cwl.output.json 16 | 17 | outputs: 18 | answer: 19 | type: int -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/example-1/add_multiply_example_workflow.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | cwlVersion: v1.0 3 | class: Workflow 4 | inputs: 5 | num1: int 6 | num2: int 7 | outputs: 8 | final_answer: 9 | outputSource: multiply/answer 10 | type: int 11 | steps: 12 | add: 13 | run: add.cwl 14 | in: 15 | x: num1 16 | y: num2 17 | out: 18 | - answer 19 | multiply: 20 | run: multiply.cwl 21 | in: 22 | x: add/answer 23 | y: num2 24 | out: 25 | - answer 26 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/example-1/add_step1.py: -------------------------------------------------------------------------------- 1 | """Add Step.""" 2 | import json 3 | import click 4 | 5 | 6 | @click.command() 7 | @click.argument('x', type=int) 8 | @click.argument('y', type=int) 9 | def add(x, y): 10 | """Add.""" 11 | click.echo(json.dumps({'answer': x + y})) 12 | 13 | 14 | if __name__ == '__main__': 15 | add(x=1, y=2) 16 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/example-1/isolated test.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | cwlVersion: v1.0 3 | class: CommandLineTool 4 | label: Run an embedded Python script 5 | 6 | baseCommand: python 7 | 8 | inputs: 9 | script: 10 | type: File 11 | inputBinding: 12 | position: 1 13 | default: 14 | class: File 15 | basename: "script.py" 16 | contents: |- 17 | cash = 256.75 18 | print("This costs ${}".format(cash)) 19 | 20 | outputs: 21 | results: 22 | type: stdout -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/example-1/job.yaml: -------------------------------------------------------------------------------- 1 | message: Hello world! -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/example-1/multiply.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | cwlVersion: v1.0 3 | class: CommandLineTool 4 | baseCommand: ["python", "-m", "scriptcwl.examples.multiply"] 5 | 6 | inputs: 7 | x: 8 | type: int 9 | inputBinding: 10 | position: 1 11 | y: 12 | type: int 13 | inputBinding: 14 | position: 2 15 | 16 | stdout: cwl.output.json 17 | 18 | outputs: 19 | answer: 20 | type: int -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/example-1/multiply_step2.py: -------------------------------------------------------------------------------- 1 | """Multiply Step.""" 2 | 3 | import json 4 | import click 5 | 6 | 7 | @click.command() 8 | @click.argument('x', type=int) 9 | @click.argument('y', type=int) 10 | def multiply(x, y): 11 | """Multiply.""" 12 | click.echo(json.dumps({'answer': x * y})) 13 | 14 | 15 | if __name__ == '__main__': 16 | multiply(x=2, y=10) 17 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/example-1/workflow_generater_python.py: -------------------------------------------------------------------------------- 1 | """Workflow Generator.""" 2 | from scriptcwl import WorkflowGenerator 3 | 4 | with WorkflowGenerator() as wf: 5 | wf.load(steps_dir='/Users/raginigupta/PythonCodes/cwl') 6 | 7 | num1 = wf.add_input(num1='int') 8 | num2 = wf.add_input(num2='int') 9 | 10 | answer1 = wf.add(x=num1, y=num2) 11 | answer2 = wf.multiply(x=answer1, y=num2) 12 | 13 | wf.add_outputs(final_answer=answer2) 14 | 15 | wf.save('add_multiply_example_workflow.cwl') 16 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/decision_tree.py: -------------------------------------------------------------------------------- 1 | """Decision Tree.""" 2 | 3 | # Disable preserving code for now 4 | # pylint:disable=C0103,R1732,W0612,W0621 5 | 6 | # import json 7 | import pickle 8 | import click 9 | # import numpy as np 10 | # from sklearn.linear_model import LinearRegression 11 | from sklearn.tree import DecisionTreeClassifier 12 | # from sklearn.model_selection import train_test_split 13 | # from sklearn import metrics 14 | 15 | 16 | @click.command() 17 | @click.argument('x1', type=int) 18 | def reg(x1): 19 | """Reqression.""" 20 | # Import X and Y datasets 21 | X = pickle.load(open('/home/bee/cwl2/MyX.p', 'rb')) 22 | Y = pickle.load(open('/home/bee/cwl2/MyY.p', 'rb')) 23 | 24 | X = X.values 25 | 26 | print("My pickle X is", X) 27 | Y = Y.values 28 | print("My pickle Y is", Y) 29 | for i in range(x1): 30 | clf = DecisionTreeClassifier() 31 | clf1 = clf.fit(X, Y) 32 | print("Decision tree parameters") 33 | print(clf1) 34 | pickle.dump(clf1, open('clf1.p', 'wb')) 35 | 36 | 37 | if __name__ == '__main__': 38 | reg(x1=1) 39 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/decision_tree_output.txt: -------------------------------------------------------------------------------- 1 | My pickle X is [[ 0. 8. 9. ] 2 | [ 0. 8. 6. ] 3 | [ 5. 6. 7. ] 4 | [ 2. 10. 10. ] 5 | [ 7. 9. 6. ] 6 | [ 3. 7. 10. ] 7 | [10. 7.85714286 7. ] 8 | [11. 7. 8. ]] 9 | My pickle Y is [50000 45000 60000 65000 70000 62000 72000 80000] 10 | Decision tree parameters 11 | DecisionTreeClassifier() 12 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/decision_tree_tool.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | baseCommand: ["python", "/home/bee/cwl2/decision_tree.py"] 4 | 5 | inputs: 6 | x: 7 | type: int 8 | inputBinding: 9 | position: 1 10 | 11 | stdout: decision_tree_output.txt 12 | 13 | outputs: 14 | answer: 15 | type: stdout 16 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/expectedValue.txt: -------------------------------------------------------------------------------- 1 | Expected Salary from Regression is $ [45618.17859389] 2 | Expected Salary from DT is $ [70000] 3 | {"Predicted Salary (S) by Regression and DT Model": ["[45618.1785938945]", "[70000]"]} 4 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/hiring1.txt: -------------------------------------------------------------------------------- 1 | experience,test_score,interview_score,salary($) 2 | 0,8,9,50000 3 | 0,8,6,45000 4 | 5,6,7,60000 5 | 2,10,10,65000 6 | 7,9,6,70000 7 | 3,7,10,62000 8 | 10,,7,72000 9 | 11,7,8,80000 10 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/linear_regress_output.txt: -------------------------------------------------------------------------------- 1 | My pickle X is [[ 0. 8. 9. ] 2 | [ 0. 8. 6. ] 3 | [ 5. 6. 7. ] 4 | [ 2. 10. 10. ] 5 | [ 7. 9. 6. ] 6 | [ 3. 7. 10. ] 7 | [10. 7.85714286 7. ] 8 | [11. 7. 8. ]] 9 | My pickle Y is [50000 45000 60000 65000 70000 62000 72000 80000] 10 | Learning regression line parameters. 11 | 2312.515795927383 17237.330313727158 12 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/linear_regression.py: -------------------------------------------------------------------------------- 1 | """Linear Regression.""" 2 | 3 | # Disable preserving code for now 4 | # pylint:disable=C0103,R1732,W0612,W0621 5 | 6 | import json 7 | import pickle 8 | import click 9 | import numpy as np 10 | from sklearn.linear_model import LinearRegression 11 | 12 | 13 | @click.command() 14 | @click.argument('x1', type=int) # Enter number of iteration 15 | def reg(x1): 16 | """Linear Regression.""" 17 | X = pickle.load(open('/home/bee/cwl2/MyX.p', 'rb')) 18 | Y = pickle.load(open('/home/bee/cwl2/MyY.p', 'rb')) 19 | 20 | X = X.values 21 | 22 | print("My pickle X is", X) 23 | Y = Y.values 24 | print("My pickle Y is", Y) 25 | m = [] 26 | y_in = [] 27 | for i in range(x1): 28 | reg = LinearRegression().fit(X, Y) 29 | 30 | pickle.dump(reg, open('mymodel.p', 'wb')) 31 | m.append(reg.coef_) 32 | y_in.append(reg.intercept_) 33 | # m, y_in = reg.coef_, reg.intercept_ 34 | 35 | average_slope = np.mean(m) 36 | average_y_intercept = np.mean(y_in) 37 | m_list = average_slope.tolist() 38 | m_json = json.dumps(m_list) 39 | y_in_list = average_y_intercept.tolist() 40 | y_in_json = json.dumps(y_in_list) 41 | 42 | print('Learning regression line parameters.') 43 | print(average_slope, average_y_intercept) # model parameters i.e. slope and y-intercept 44 | 45 | 46 | if __name__ == '__main__': 47 | reg(x1=1) 48 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/machinelearning_pipeline.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | cwlVersion: v1.0 3 | class: Workflow 4 | inputs: 5 | experience: int 6 | interview: int 7 | test: int 8 | iterations: int 9 | datasetpath: string 10 | outputs: 11 | expected_values: 12 | outputSource: predict/answer 13 | type: File 14 | steps: 15 | read: 16 | run: /home/bee/cwl2/read_dataset_tool.cwl 17 | in: 18 | x: datasetpath 19 | out: 20 | - answer 21 | regress: 22 | run: /home/bee/cwl2/regress_tool.cwl 23 | in: 24 | x: iterations 25 | out: 26 | - answer 27 | decisiontree: 28 | run: /home/bee/cwl2/decision_tree_tool.cwl 29 | in: 30 | x: iterations 31 | out: 32 | - answer 33 | predict: 34 | run: /home/bee/cwl2/predict_tool.cwl 35 | in: 36 | x: experience 37 | y: interview 38 | z: test 39 | out: 40 | - answer 41 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/out.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/out.PNG -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/predict_code.py: -------------------------------------------------------------------------------- 1 | """Predict Code.""" 2 | 3 | # Disable preserving code for now 4 | # pylint:disable=C0103,R1732 5 | 6 | import json 7 | import pickle 8 | import click 9 | # import numpy as np 10 | # from sklearn.linear_model import LinearRegression 11 | # from sklearn.tree import DecisionTreeClassifier 12 | # from sklearn.model_selection import train_test_split 13 | # from sklearn import metrics 14 | 15 | 16 | @click.command() 17 | @click.argument('e', type=float) 18 | @click.argument('i', type=float) 19 | @click.argument('t', type=float) 20 | def pred(e, i, t): 21 | """Predict.""" 22 | # Import Linear Regression model 23 | regression_model = pickle.load(open('/home/bee/cwl2/mymodel.p', 'rb')) 24 | # Import Decision Tree classifier model 25 | dt_model = pickle.load(open('/home/bee/cwl2/clf1.p', 'rb')) 26 | predict_linear_regression = regression_model.predict([[e, i, t]]) 27 | predict_linear_regression_list = predict_linear_regression.tolist() 28 | predict_linear_regression_json_str = json.dumps(predict_linear_regression_list) 29 | 30 | predict_decision_tree = dt_model.predict([[e, i, t]]) 31 | predict_decision_tree_list = predict_decision_tree.tolist() 32 | predict_decision_tree_json_str = json.dumps(predict_decision_tree_list) 33 | 34 | print("Expected Salary from Regression is $", predict_linear_regression) 35 | print("Expected Salary from DT is $", predict_decision_tree) 36 | 37 | key = 'Predicted Salary (S) by Regression and DT Model' 38 | values = (predict_linear_regression_json_str, predict_decision_tree_json_str) 39 | click.echo(json.dumps({key: values})) 40 | 41 | 42 | if __name__ == '__main__': 43 | pred(e=4, i=5, t=6) 44 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/predict_tool.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | baseCommand: ["python", "/home/bee/cwl2/predict_code.py"] 4 | 5 | inputs: 6 | x: 7 | type: int 8 | inputBinding: 9 | position: 1 10 | y: 11 | type: int 12 | inputBinding: 13 | position: 2 14 | z: 15 | type: int 16 | inputBinding: 17 | position: 3 18 | 19 | stdout: expectedValue.txt 20 | 21 | outputs: 22 | answer: 23 | type: stdout 24 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/read_dataset.py: -------------------------------------------------------------------------------- 1 | """Read Data Set.""" 2 | 3 | # Disables preserving code for now 4 | # pylint:disable=C0103,R1732,W0612 5 | 6 | import pickle 7 | import click 8 | # import json 9 | import pandas as pd 10 | # import numpy as np 11 | # from sklearn.linear_model import LinearRegression 12 | 13 | 14 | @click.command() 15 | @click.argument('y3', type=str) # y3 is the input argument for path to dataset 16 | def reader(y3): 17 | """Reader.""" 18 | dataset = pd.read_csv(y3) 19 | 20 | print("this dataset", dataset) 21 | dataset['experience'].fillna(0, inplace=True) 22 | dataset['test_score'].fillna(dataset['test_score'].mean(), inplace=True) 23 | # Extracting rows (X-> independent variables and Y-> dependent/target variable) 24 | 25 | X = dataset.iloc[:, :3] # Extracting first three columns from the dataset 26 | print('My X', X) 27 | 28 | Y = dataset.iloc[:, -1] # Extracting last column from the dataset for target variable 29 | 30 | # Exporting X and Y as pickle files on to the disk 31 | pickle.dump(X, open("MyX.p", "wb")) 32 | pickle.dump(Y, open("MyY.p", "wb")) 33 | df1 = X.to_json() 34 | df2 = Y.to_json() 35 | 36 | 37 | if __name__ == '__main__': 38 | reader(y3="") 39 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/read_dataset_tool.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | baseCommand: ["python", "/home/bee/cwl2/read_dataset.py"] 4 | 5 | inputs: 6 | x: 7 | type: string 8 | inputBinding: 9 | position: 1 10 | 11 | stdout: output_read_dataset.txt 12 | 13 | outputs: 14 | answer: 15 | type: stdout 16 | -------------------------------------------------------------------------------- /beeflow/data/cwl/cwl_validation/ml-workflow/machine_learning/regress_tool.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: CommandLineTool 3 | baseCommand: ["python", "/home/bee/cwl2/linear_regression.py"] 4 | 5 | inputs: 6 | x: 7 | type: int 8 | inputBinding: 9 | position: 1 10 | 11 | stdout: linear_regress_output.txt 12 | 13 | outputs: 14 | answer: 15 | type: stdout 16 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/Dockerfile.builder_demo: -------------------------------------------------------------------------------- 1 | FROM git.lanl.gov:5050/trandles/baseimages/centos:7 2 | CMD cat /etc/centos-release 3 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/Dockerfile.clamr-ffmpeg: -------------------------------------------------------------------------------- 1 | # Dockerfile.clamr-ffmpeg 2 | # Developed on Chicoma @lanl 3 | # Patricia Grubel 4 | 5 | FROM debian:11 6 | 7 | 8 | RUN apt-get update && \ 9 | apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev 10 | 11 | RUN git clone https://github.com/lanl/CLAMR.git 12 | RUN cd CLAMR && cmake . && make clamr_cpuonly 13 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/Dockerfile.clamr-lanl-x86_64: -------------------------------------------------------------------------------- 1 | FROM git.lanl.gov:5050/trandles/baseimages/centos-buildclamr:7 2 | 3 | RUN git clone https://github.com/lanl/CLAMR.git 4 | RUN cd CLAMR && cmake3 . && make 5 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/Dockerfile.clamr-ppc64le: -------------------------------------------------------------------------------- 1 | # Dockerfile.clamr-ppc64le 2 | # Developed on Summit at ORNL. Works with Charliecloud. 3 | # Tim Randles 4 | 5 | FROM ppc64le/debian:stable-slim 6 | 7 | # Makes apt happy 8 | RUN echo 'APT::Sandbox::User "root";' > /etc/apt/apt.conf.d/no-sandbox 9 | RUN apt-config dump | fgrep 'APT::Sandbox' 10 | 11 | # Install prerequisites for neo4j 12 | RUN apt-get update && \ 13 | mkdir -p /usr/share/man/man1 && \ 14 | groupadd staff && \ 15 | apt-get install -y pseudo && \ 16 | fakeroot apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev 17 | 18 | RUN git clone https://github.com/lanl/CLAMR.git 19 | RUN cd CLAMR && cmake . && make clamr_cpuonly 20 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/Dockerfile.deb9ompi-x86_64: -------------------------------------------------------------------------------- 1 | FROM debian:stretch 2 | 3 | ENV DEBIAN_FRONTEND noninteractive 4 | 5 | RUN apt-get update \ 6 | && apt-get install -y apt-utils 7 | 8 | RUN apt-get install -y \ 9 | file \ 10 | g++ \ 11 | gcc \ 12 | gfortran \ 13 | less \ 14 | libdb5.3-dev \ 15 | make \ 16 | wget \ 17 | git \ 18 | openmpi-common \ 19 | libopenmpi-dev 20 | 21 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/Dockerfile.neo4j: -------------------------------------------------------------------------------- 1 | FROM neo4j:5.17-community 2 | RUN cp /var/lib/neo4j/labs/apoc-5.17.0-core.jar /var/lib/neo4j/plugins/apoc-5.17.8-core.jar 3 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/README.md: -------------------------------------------------------------------------------- 1 | # Dockerfiles 2 | 3 | This is a home for Dockerfiles which have been used during the execution of workflows by the BEE development team. 4 | 5 | ## VASP Build Requirements 6 | 7 | The VASP Dockerfile requires the following files to build successfully: 8 | - VASP 5.4.4 source code (`vasp.5.4.4.tar.gz`) 9 | - VASP configurable Makefile (`makefile.include`) 10 | - ScaLAPACK 1.0.3 installer (`scalapack_installer.tgz`) 11 | - `symbol.inc-5.4.4.patch` 12 | - BEE Application SSH Keys 13 | - `keys/config` 14 | - `keys/id_rsa` 15 | - `keys/id_rsa.pub` 16 | 17 | Dockerfile.clamr-ffmpeg works on Chicoma a LANL resource on the turquoise network (ffmpeg is built in the container) 18 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/comd-pmix-support/Dockerfile.comd-x86_64-wpmix: -------------------------------------------------------------------------------- 1 | FROM openmpi-3.1.5 2 | 3 | ENV DEBIAN_FRONTEND=noninteractive 4 | RUN apt-get -y update && \ 5 | apt-get -y install git cmake gcc g++ gfortran 6 | 7 | WORKDIR / 8 | 9 | # Build the MPI version 10 | ARG REPO=https://github.com/ECP-copa/CoMD.git 11 | RUN git clone $REPO && \ 12 | cd CoMD && \ 13 | cp src-mpi/Makefile.vanilla src-mpi/Makefile && \ 14 | make -C src-mpi 15 | 16 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/comd-pmix-support/Dockerfile.debian: -------------------------------------------------------------------------------- 1 | FROM debian:bullseye 2 | 3 | RUN apt-get update -y \ 4 | && apt-get install -y \ 5 | autoconf \ 6 | automake \ 7 | coreutils \ 8 | gcc \ 9 | g++ \ 10 | gfortran \ 11 | git \ 12 | make \ 13 | rsync \ 14 | wget 15 | 16 | # add /usr/local to linker search paths. 17 | RUN echo "/usr/local/lib" > /etc/ld.so.conf.d/usrlocal.conf \ 18 | && echo "/usr/local/lib64" >> /etc/ld.so.conf.d/usrlocal.conf \ 19 | && ldconfig 20 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/comd-pmix-support/README.md: -------------------------------------------------------------------------------- 1 | These dockerfiles enable one to build comd with pmix support to use on a system that 2 | does not have slurm with pmix support such as the Darwin test bed at LANL. 3 | 4 | You can build a container using charliecloud (vers 0.27 +) 5 | 6 | ``` 7 | ch-image build -f Dockerfile.debian . --force 8 | ch-image build -f Dockerfile.openmpi-3.1.5 . 9 | ch-image build -f Dockerfile.comd-x86_64-wpmix . 10 | 11 | # List images 12 | ch-image list 13 | 14 | # Tar resultant image 15 | ch-convert -o tar comd-x86_64-wpmix /usr/projects/beedev/comd/comd-x86_64-wpmix.tar.gz 16 | ``` 17 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/comd-pmix-support/dont-init-ucx-on-intel-cray.patch: -------------------------------------------------------------------------------- 1 | diff --git a/ompi/mca/pml/ucx/pml_ucx_component.c b/ompi/mca/pml/ucx/pml_ucx_component.c 2 | index ff0040f18c..e8cf903860 100644 3 | --- a/ompi/mca/pml/ucx/pml_ucx_component.c 4 | +++ b/ompi/mca/pml/ucx/pml_ucx_component.c 5 | @@ -14,6 +14,9 @@ 6 | 7 | #include 8 | 9 | +#ifdef HAVE_UNISTD_H 10 | +#include 11 | +#endif 12 | 13 | static int mca_pml_ucx_component_register(void); 14 | static int mca_pml_ucx_component_open(void); 15 | @@ -131,6 +134,11 @@ mca_pml_ucx_component_init(int* priority, bool enable_progress_threads, 16 | { 17 | int ret; 18 | 19 | + if ((0 == access("/sys/class/ugni/", F_OK) || (0 == access("/sys/class/hfi1/", F_OK)))){ 20 | + PML_UCX_VERBOSE(1, "Cray or Intel HSN detected, removing UCX from consideration"); 21 | + return NULL; 22 | + } 23 | + 24 | if ( (ret = mca_pml_ucx_init()) != 0) { 25 | return NULL; 26 | } 27 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/pennant-graph/Dockerfile.pennant-graph-x86_64: -------------------------------------------------------------------------------- 1 | # Build a container with matplotlib for graphing 2 | # 3 | # `ch-image build --force -f Dockerfile.pennant-graph-x86_64 -t pennant-graph .` 4 | FROM almalinux:8 5 | 6 | RUN dnf update \ 7 | && dnf install -y \ 8 | gcc \ 9 | gcc-c++ \ 10 | binutils \ 11 | libtool \ 12 | autoconf \ 13 | automake \ 14 | cmake \ 15 | pkgconf \ 16 | bzip2-devel \ 17 | zlib-devel \ 18 | libjpeg-devel \ 19 | libpng-devel \ 20 | python3 \ 21 | python3-devel 22 | 23 | RUN python3 -m venv /venv \ 24 | && echo ". /venv/bin/activate" >> /etc/profile.d/venv.sh \ 25 | && . /venv/bin/activate \ 26 | && pip install matplotlib 27 | 28 | COPY graph_pennant.py graph_pennant.sh / 29 | 30 | RUN chmod 755 /graph_pennant.sh 31 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/pennant-graph/graph_pennant.py: -------------------------------------------------------------------------------- 1 | """Graph the output of a PENNANT workflow.""" 2 | 3 | # Disable C0103: This is just a simple script, not all globals should be UPPER_CASE here 4 | # pylint:disable=C0103 5 | 6 | import re 7 | import sys 8 | import matplotlib.pyplot as plt 9 | 10 | 11 | results = [] 12 | for fname in sys.argv[1:]: 13 | pe_count = 0 14 | times = [] 15 | with open(fname, encoding='utf-8') as fp: 16 | for line in fp: 17 | # Check for the PE count 18 | m_pe_count = re.match(r'Running on (\d+) MPI PE\(s\)', line) 19 | if m_pe_count: 20 | pe_count = int(m_pe_count.group(1)) 21 | continue 22 | # Check for an End cyle line 23 | if not line.startswith('End cycle'): 24 | continue 25 | _, _, _, wall = line.split(',') 26 | _, time = wall.split('=') 27 | time = float(time.strip()) 28 | times.append(time) 29 | results.append({ 30 | 'pe_count': pe_count, 31 | 'average_wall_time': sum(times) / len(times), 32 | }) 33 | 34 | # The node counts 35 | x = [str(result['pe_count']) for result in results] 36 | # Average wall for cycle 37 | y = [result['average_wall_time'] for result in results] 38 | fig, ax = plt.subplots() 39 | ax.plot(x, y) 40 | ax.set_title('PENNANT Workflow Run') 41 | ax.set_xlabel('Node count') 42 | ax.set_ylabel('Average wall time for cycle') 43 | # Save to a png file 44 | fig.savefig('graph.png') 45 | -------------------------------------------------------------------------------- /beeflow/data/dockerfiles/pennant-graph/graph_pennant.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Wrapper to make sure the environment is set up 3 | 4 | . /venv/bin/activate 5 | python3 /graph_pennant.py $@ 6 | -------------------------------------------------------------------------------- /beeflow/remote/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow remote module.""" 2 | -------------------------------------------------------------------------------- /beeflow/scheduler/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow scheduler module.""" 2 | -------------------------------------------------------------------------------- /beeflow/scheduler/serializable.py: -------------------------------------------------------------------------------- 1 | """Serialization module. 2 | 3 | This holds a class for simple serialization of Python objects. 4 | """ 5 | import abc 6 | 7 | 8 | class Serializable(abc.ABC): 9 | """Serializable base class. 10 | 11 | This class allows subclasses to easily serialize into simple Python 12 | data types which can be serialized into JSON. 13 | """ 14 | 15 | def encode(self): 16 | """Encode and return a simple Python data type. 17 | 18 | Produce a simple Python data type for serialization. 19 | """ 20 | return self.__dict__ 21 | 22 | @staticmethod 23 | @abc.abstractmethod 24 | def decode(data): 25 | """Decode a serialized object and return an instance. 26 | 27 | Decode a simple Python data type and return and instance of 28 | the object. 29 | """ 30 | -------------------------------------------------------------------------------- /beeflow/task_manager/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow task_manager module.""" 2 | -------------------------------------------------------------------------------- /beeflow/task_manager/task_actions.py: -------------------------------------------------------------------------------- 1 | """Manage actions for tasks coming from the WFM.""" 2 | import traceback 3 | from flask import jsonify, make_response 4 | from flask_restful import Resource 5 | from beeflow.common import log as bee_logging 6 | from beeflow.task_manager import utils 7 | 8 | log = bee_logging.setup(__name__) 9 | 10 | 11 | class TaskActions(Resource): 12 | """Actions to take for tasks.""" 13 | 14 | @staticmethod 15 | def delete(): 16 | """Cancel received from WFM to cancel job, update queue to monitor state.""" 17 | db = utils.connect_db() 18 | worker = utils.worker_interface() 19 | cancel_msg = "" 20 | for job in db.job_queue: 21 | task_id = job.task.id 22 | job_id = job.job_id 23 | name = job.task.name 24 | log.info(f"Cancelling {name} with job_id: {job_id}") 25 | try: 26 | job_state = worker.cancel_task(job_id) 27 | except Exception as err: # pylint: disable=W0718 # we have to catch everything here 28 | log.error(err) 29 | log.error(traceback.format_exc()) 30 | job_state = 'ZOMBIE' 31 | cancel_msg += f"{name} {task_id} {job_id} {job_state}" 32 | db.job_queue.clear() 33 | db.submit_queue.clear() 34 | resp = make_response(jsonify(msg=cancel_msg, status='ok'), 200) 35 | return resp 36 | -------------------------------------------------------------------------------- /beeflow/task_manager/task_manager.py: -------------------------------------------------------------------------------- 1 | """Task Manager app and api set up code. 2 | 3 | Submits, cancels and monitors states of tasks. 4 | Communicates status to the Work Flow Manager, through RESTful API. 5 | """ 6 | import atexit 7 | import sys 8 | from apscheduler.schedulers.background import BackgroundScheduler 9 | from flask import Flask, jsonify, make_response 10 | from beeflow.common.api import BeeApi 11 | from beeflow.task_manager.task_submit import TaskSubmit 12 | from beeflow.task_manager.task_actions import TaskActions 13 | from beeflow.task_manager.background import process_queues 14 | from beeflow.common.config_driver import BeeConfig as bc 15 | 16 | 17 | def create_app(): 18 | """Create the flask app and add the REST endpoints for the TM.""" 19 | app = Flask(__name__) 20 | api = BeeApi(app) 21 | 22 | # Endpoints 23 | api.add_resource(TaskSubmit, '/bee_tm/v1/task/submit/') 24 | api.add_resource(TaskActions, '/bee_tm/v1/task/') 25 | 26 | @app.route('/status') 27 | def get_status(): 28 | """Report the current status of the Task Manager.""" 29 | return make_response(jsonify(stauts='up'), 200) 30 | 31 | # Start the background scheduler and make sure it gets cleaned up 32 | if "pytest" not in sys.modules: 33 | scheduler = BackgroundScheduler({'apscheduler.timezone': 'UTC'}) 34 | scheduler.add_job(func=process_queues, trigger="interval", 35 | seconds=bc.get('task_manager', 'background_interval')) 36 | scheduler.start() 37 | 38 | # This kills the scheduler when the process terminates 39 | # so we don't accidentally leave a zombie process 40 | atexit.register(scheduler.shutdown) 41 | 42 | return app 43 | -------------------------------------------------------------------------------- /beeflow/task_manager/task_submit.py: -------------------------------------------------------------------------------- 1 | """Handle task submission.""" 2 | from flask import jsonify, make_response 3 | from flask_restful import Resource, reqparse 4 | import jsonpickle 5 | from beeflow.common import log as bee_logging 6 | from beeflow.task_manager import utils 7 | 8 | log = bee_logging.setup(__name__) 9 | 10 | 11 | class TaskSubmit(Resource): 12 | """WFM sends tasks to the task manager.""" 13 | 14 | @staticmethod 15 | def post(): 16 | """Receives task from WFM.""" 17 | db = utils.connect_db() 18 | parser = reqparse.RequestParser() 19 | parser.add_argument('tasks', type=str, location='json') 20 | data = parser.parse_args() 21 | tasks = jsonpickle.decode(data['tasks']) 22 | for task in tasks: 23 | db.submit_queue.push(task) 24 | log.info(f"Added {task.name} task to the submit queue") 25 | resp = make_response(jsonify(msg='Tasks Added!', status='ok'), 200) 26 | return resp 27 | -------------------------------------------------------------------------------- /beeflow/tests/42.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/beeflow/tests/42.tgz -------------------------------------------------------------------------------- /beeflow/tests/README.md: -------------------------------------------------------------------------------- 1 | # BEE Unit Testing 2 | This directory shall be used for unit testing BEE modules for stability 3 | and compatability across changes 4 | 5 | The `workflows` directory contains sample workflow data in the form of CWL 6 | files or pre-built Neo4j databases. 7 | -------------------------------------------------------------------------------- /beeflow/tests/cf.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | infile: 8 | type: File 9 | default: 'infile' 10 | 11 | outputs: 12 | clamr_dir: 13 | type: File 14 | outputSource: clamr/outfile 15 | ffmpeg_movie: 16 | type: File 17 | outputSource: ffmpeg/outfile 18 | 19 | steps: 20 | clamr: 21 | run: 22 | class: CommandLineTool 23 | inputs: 24 | infile: 25 | type: File 26 | default: lorem.txt 27 | inputBinding: {position: 1} 28 | outputs: 29 | outfile: stdout 30 | stdout: graphics_output 31 | baseCommand: "/clamr/CLAMR-master/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png" 32 | hints: 33 | DockerRequirement: 34 | dockerImageId: "/usr/projects/beedev/clamr/clamr-toss.tar.gz" 35 | in: 36 | infile: infile 37 | out: [outfile] 38 | 39 | ffmpeg: 40 | run: 41 | class: CommandLineTool 42 | inputs: 43 | infile: 44 | type: File 45 | default: graphics_output 46 | inputBinding: {position: 1} 47 | outputs: 48 | outfile: stdout 49 | stdout: CLAMR_movie.mp4 50 | baseCommand: "ffmpeg -f image2 -i $HOME/graphics_output/graph%05d.png -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4" 51 | in: 52 | infile: clamr/outfile 53 | out: [outfile] 54 | -------------------------------------------------------------------------------- /beeflow/tests/clamr-wf.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/beeflow/tests/clamr-wf.tgz -------------------------------------------------------------------------------- /beeflow/tests/clamr-wf/clamr.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: /clamr/CLAMR-master/clamr_cpuonly 7 | stdout: clamr_stdout.txt 8 | inputs: 9 | amr_type: 10 | type: string? 11 | inputBinding: 12 | prefix: -A 13 | grid_res: 14 | type: int? 15 | inputBinding: 16 | prefix: -n 17 | max_levels: 18 | type: int? 19 | inputBinding: 20 | prefix: -l 21 | time_steps: 22 | type: int? 23 | inputBinding: 24 | prefix: -t 25 | output_steps: 26 | type: int? 27 | inputBinding: 28 | prefix: -i 29 | graphic_steps: 30 | type: int? 31 | inputBinding: 32 | prefix: -g 33 | graphics_type: 34 | type: string? 35 | inputBinding: 36 | prefix: -G 37 | rollback_images: 38 | type: int? 39 | inputBinding: 40 | prefix: -b 41 | checkpoint_disk_interval: 42 | type: int? 43 | inputBinding: 44 | prefix: -c 45 | checkpoint_mem_interval: 46 | type: int? 47 | inputBinding: 48 | prefix: -C 49 | hash_method: 50 | type: string? 51 | inputBinding: 52 | prefix: -e 53 | 54 | outputs: 55 | stdout: 56 | type: stdout 57 | outdir: 58 | type: Directory 59 | outputBinding: 60 | glob: graphics_output 61 | time_log: 62 | type: File 63 | outputBinding: 64 | glob: total_execution_time.log 65 | -------------------------------------------------------------------------------- /beeflow/tests/clamr-wf/clamr_job.json: -------------------------------------------------------------------------------- 1 | { 2 | "grid_resolution": 32, 3 | "max_levels": 3, 4 | "time_steps": 5000, 5 | "steps_between_outputs": 10, 6 | "steps_between_graphics": 25, 7 | "graphics_type": "png", 8 | "input_format": "image2", 9 | "frame_rate": 12, 10 | "frame_size": "800x800", 11 | "pixel_format": "yuv420p", 12 | "output_filename": "CLAMR_movie.mp4" 13 | } 14 | -------------------------------------------------------------------------------- /beeflow/tests/clamr-wf/clamr_job.yml: -------------------------------------------------------------------------------- 1 | # Inputs for CLAMR 2 | # /clamr/CLAMR-master/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png 3 | 4 | grid_resolution: 32 5 | max_levels: 3 6 | time_steps: 5000 7 | steps_between_outputs: 10 8 | steps_between_graphics: 25 9 | graphics_type: png 10 | 11 | # Inputs for FFMPEG 12 | #ffmpeg -f image2 -r 12 -s 800x800 -pix_fmt yuv420p $HOME/CLAMR_movie.mp4 13 | 14 | input_format: image2 15 | frame_rate: 12 16 | frame_size: 800x800 17 | pixel_format: yuv420p 18 | output_filename: CLAMR_movie.mp4 19 | -------------------------------------------------------------------------------- /beeflow/tests/clamr-wf/clamr_wf.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | ##### CLAMR inputs ##### 8 | grid_resolution: int 9 | max_levels: int 10 | time_steps: int 11 | steps_between_outputs: int 12 | steps_between_graphics: int 13 | graphics_type: string 14 | ##### FFMPEG inputs ##### 15 | input_format: string 16 | frame_rate: int 17 | frame_size: string 18 | pixel_format: string 19 | output_filename: string 20 | 21 | outputs: 22 | clamr_stdout: 23 | type: File 24 | outputSource: clamr/stdout 25 | clamr_time_log: 26 | type: File 27 | outputSource: clamr/time_log 28 | clamr_movie: 29 | type: File 30 | outputSource: ffmpeg/movie 31 | 32 | steps: 33 | clamr: 34 | run: clamr.cwl 35 | in: 36 | grid_res: grid_resolution 37 | max_levels: max_levels 38 | time_steps: time_steps 39 | output_steps: steps_between_outputs 40 | graphic_steps: steps_between_graphics 41 | graphics_type: graphics_type 42 | out: [stdout, outdir, time_log] 43 | hints: 44 | DockerRequirement: 45 | dockerImport: clamr_img.tar.gz 46 | dockerImageId: clamr 47 | mv_script: 48 | run: mv_script.cwl 49 | in: 50 | script_input: clamr/outdir 51 | out: [stdout] 52 | 53 | ffmpeg: 54 | run: ffmpeg.cwl 55 | in: 56 | input_format: input_format 57 | ffmpeg_input: mv_script/stdout 58 | frame_rate: frame_rate 59 | frame_size: frame_size 60 | pixel_format: pixel_format 61 | output_file: output_filename 62 | out: [movie] 63 | -------------------------------------------------------------------------------- /beeflow/tests/clamr-wf/ffmpeg.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: ffmpeg 7 | inputs: 8 | input_format: 9 | type: string? 10 | inputBinding: 11 | prefix: -f 12 | position: 1 13 | ffmpeg_input: 14 | type: string 15 | inputBinding: 16 | prefix: -i 17 | position: 2 18 | frame_rate: 19 | type: int? 20 | inputBinding: 21 | prefix: -r 22 | position: 3 23 | frame_size: 24 | type: string? 25 | inputBinding: 26 | prefix: -s 27 | position: 4 28 | pixel_format: 29 | type: string? 30 | inputBinding: 31 | prefix: -pix_fmt 32 | position: 5 33 | output_file: 34 | type: string 35 | inputBinding: 36 | position: 6 37 | 38 | outputs: 39 | movie: 40 | type: File 41 | outputBinding: 42 | glob: $(inputs.output_file) 43 | -------------------------------------------------------------------------------- /beeflow/tests/clamr-wf/mv_script.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | # Script has to be discoverable in PATH 7 | baseCommand: [mv_script.sh] 8 | stdout: cwl.output.json 9 | inputs: 10 | script_input: 11 | type: Directory 12 | inputBinding: 13 | position: 1 14 | 15 | outputs: 16 | stdout: 17 | type: string 18 | -------------------------------------------------------------------------------- /beeflow/tests/clamr-wf/mv_script.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # One argument is given: the absolute path to the 3 | # CLAMR output directory (graphics output) 4 | set -e 5 | 6 | CLAMR_OUTDIR="$1" 7 | CLAMR_BASEDIR=$(basename "$1") 8 | CLAMR_TMPDIR=$(mktemp -d clamr.XXXXX -p /tmp) 9 | 10 | mv "$CLAMR_OUTDIR" "$CLAMR_TMPDIR" 11 | echo "{\"stdout\": \"${CLAMR_TMPDIR}/${CLAMR_BASEDIR}/graph%05d.png\"}" 12 | -------------------------------------------------------------------------------- /beeflow/tests/cwl_files/cat-grep-tar.yml: -------------------------------------------------------------------------------- 1 | input_file: lorem.txt 2 | word0: Vivamus 3 | word1: pulvinar 4 | tarball_fname: out.tgz 5 | 6 | -------------------------------------------------------------------------------- /beeflow/tests/cwl_files/clamr.yml: -------------------------------------------------------------------------------- 1 | max_levels: 3 2 | grid_resolution: 32 3 | steps_between_output: 10 4 | steps_between_graphics: 25 5 | time_steps: 10000 6 | graphics_type: png 7 | checkpoint_disk_interval: 50 8 | 9 | -------------------------------------------------------------------------------- /beeflow/tests/cwl_files/comd.yml: -------------------------------------------------------------------------------- 1 | i: 2 2 | j: 2 3 | k: 2 4 | x: 40 5 | y: 40 6 | z: 40 7 | pot_dir: /CoMD/pots 8 | 9 | -------------------------------------------------------------------------------- /beeflow/tests/cwl_files/task-req.cwl: -------------------------------------------------------------------------------- 1 | cwlVersion: v1.0 2 | class: Workflow 3 | 4 | inputs: 5 | input_file: File 6 | word: string 7 | 8 | outputs: 9 | cat_stderr: 10 | type: File 11 | outputSource: cat/cat_stderr 12 | 13 | steps: 14 | cat: 15 | run: 16 | class: CommandLineTool 17 | baseCommand: cat 18 | stdout: cat.txt 19 | stderr: cat.err 20 | inputs: 21 | input_file: 22 | type: File 23 | inputBinding: 24 | position: 1 25 | outputs: 26 | contents: 27 | type: stdout 28 | cat_stderr: 29 | type: stderr 30 | in: 31 | input_file: input_file 32 | out: [contents, cat_stderr] 33 | hints: 34 | beeflow:TaskRequirement: 35 | workdir: cat_workdir 36 | grep: 37 | run: 38 | class: CommandLineTool 39 | baseCommand: grep 40 | stdout: occur.txt 41 | inputs: 42 | word: 43 | type: string 44 | inputBinding: 45 | position: 1 46 | text_file: 47 | type: File 48 | inputBinding: 49 | position: 2 50 | outputs: 51 | occur: 52 | type: stdout 53 | in: 54 | word: word 55 | text_file: cat/contents 56 | out: [occur] 57 | 58 | -------------------------------------------------------------------------------- /beeflow/tests/cwl_files/task-req.yml: -------------------------------------------------------------------------------- 1 | input_file: lorem.txt 2 | word: Vivamus 3 | 4 | -------------------------------------------------------------------------------- /beeflow/tests/gdb.py: -------------------------------------------------------------------------------- 1 | """Helper functions for starting and stopping the GDB.""" 2 | import time 3 | from beeflow.wf_manager.common import dep_manager 4 | from beeflow.wf_manager.resources import wf_utils 5 | 6 | 7 | def start(): 8 | """Start the GDB.""" 9 | # dep_manager.kill_gdb() 10 | # dep_manager.remove_current_run() 11 | try: 12 | dep_manager.create_image() 13 | except dep_manager.NoContainerRuntime: 14 | raise RuntimeError('Charliecloud is not installed') from None 15 | bolt_port = wf_utils.get_open_port() 16 | http_port = wf_utils.get_open_port() 17 | https_port = wf_utils.get_open_port() 18 | pid = dep_manager.start_gdb('/', bolt_port, http_port, https_port) 19 | time.sleep(10) 20 | return pid 21 | 22 | 23 | def stop(pid): 24 | """Stop the GDB.""" 25 | dep_manager.kill_gdb(pid) 26 | # dep_manager.remove_current_run() 27 | -------------------------------------------------------------------------------- /beeflow/tests/test_cloud.py: -------------------------------------------------------------------------------- 1 | """Cloud API tests.""" 2 | from beeflow.common.cloud import provider 3 | 4 | 5 | def test_cloud_api(): 6 | """Simple test to check the cloud provider API.""" 7 | mock = provider.MockProvider() 8 | 9 | mock.setup_cloud('empty config....') 10 | 11 | assert mock.get_ext_ip_addr('some-node') 12 | -------------------------------------------------------------------------------- /beeflow/tests/test_container_path.py: -------------------------------------------------------------------------------- 1 | """Tests for container path conversion.""" 2 | 3 | from beeflow.common.container_path import convert_path 4 | 5 | 6 | def test_empty(): 7 | """Test an empty bind mounts list.""" 8 | assert convert_path('/home/test', {}) == '/home/test' 9 | assert convert_path('/usr', {}) == '/usr' 10 | 11 | 12 | def test_nonempty(): 13 | """Test a non-empty bind mount list.""" 14 | bind_mounts = { 15 | '/prefix/home/test': '/home/test', 16 | '/usr/projects/some-project': '/mnt/0', 17 | '/data': '/mnt/1', 18 | } 19 | 20 | assert convert_path('/prefix/home/test/some/path', bind_mounts) == '/home/test/some/path' 21 | assert convert_path('/usr/projects/some-project/123', bind_mounts) == '/mnt/0/123' 22 | assert convert_path('/data/1/2/3', bind_mounts) == '/mnt/1/1/2/3' 23 | assert convert_path('/some/other/path', bind_mounts) == '/some/other/path' 24 | -------------------------------------------------------------------------------- /beeflow/tests/test_db_client.py: -------------------------------------------------------------------------------- 1 | """Tests of the client database.""" 2 | 3 | # Disable W0621: Pylint complains about redefining 'temp_db' from the outer 4 | # scope. This is how pytest fixtures work. 5 | # pylint:disable=W0621 6 | 7 | import tempfile 8 | import os 9 | 10 | import pytest 11 | 12 | from beeflow.common.db import client_db 13 | 14 | 15 | @pytest.fixture 16 | def temp_db(): 17 | """Create a fixture for making a temporary database.""" 18 | fname = tempfile.mktemp() 19 | db = client_db.open_db(fname) 20 | yield db 21 | os.remove(fname) 22 | 23 | 24 | def test_empty(temp_db): 25 | """Test the empty database.""" 26 | db = temp_db 27 | 28 | host_name = db.info.get_hostname() 29 | backend_stat = db.info.get_backend_status() 30 | assert host_name == "" 31 | assert backend_stat == "" 32 | 33 | 34 | def test_info(temp_db): 35 | """Test setting the info.""" 36 | db = temp_db 37 | 38 | db.info.set_hostname('front_end_name') 39 | host_name = db.info.get_hostname() 40 | 41 | db.info.set_backend_status('true') 42 | backend_stat = db.info.get_backend_status() 43 | 44 | assert host_name == 'front_end_name' 45 | assert backend_stat == 'true' 46 | -------------------------------------------------------------------------------- /beeflow/tests/test_db_sched.py: -------------------------------------------------------------------------------- 1 | """Tests of the scheduler database.""" 2 | 3 | # Disable W0621: Pylint complains about redefining 'temp_db' from the outer 4 | # scope. This is how pytest fixtures work. 5 | # pylint:disable=W0621 6 | 7 | import tempfile 8 | import os 9 | 10 | import pytest 11 | 12 | from beeflow.common.db import sched_db 13 | 14 | 15 | @pytest.fixture 16 | def temp_db(): 17 | """Create a fixture for making a temporary datbase.""" 18 | fname = tempfile.mktemp() 19 | db = sched_db.open_db(fname) 20 | yield db 21 | os.remove(fname) 22 | 23 | 24 | def test_empty(temp_db): 25 | """Test the empty database.""" 26 | db = temp_db 27 | 28 | assert len(list(db.resources)) == 0 29 | 30 | 31 | def test_extend(temp_db): 32 | """Test setting the resources.""" 33 | db = temp_db 34 | 35 | db.resources.extend([1, 2, 3]) 36 | assert list(db.resources) == [1, 2, 3] 37 | 38 | 39 | def test_clear(temp_db): 40 | """Test clearing the resources.""" 41 | db = temp_db 42 | db.resources.extend([8, 9, 10, 11, 12, 13, 14]) 43 | 44 | db.resources.clear() 45 | assert len(list(db.resources)) == 0 46 | -------------------------------------------------------------------------------- /beeflow/wf_manager/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow wf_manager module.""" 2 | -------------------------------------------------------------------------------- /beeflow/wf_manager/resources/__init__.py: -------------------------------------------------------------------------------- 1 | """beeflow wf_manager resources module.""" 2 | -------------------------------------------------------------------------------- /beeflow/wf_manager/wf_manager.py: -------------------------------------------------------------------------------- 1 | """Start up the workflow manager connecting all of the endpoints.""" 2 | 3 | import os 4 | from flask import Flask 5 | from celery import Celery 6 | from celery import shared_task # pylint: disable=W0611 # pylint can't find celery imports 7 | from beeflow.common.api import BeeApi 8 | from beeflow.common import paths 9 | from beeflow.wf_manager.resources.wf_list import WFList 10 | from beeflow.wf_manager.resources.wf_actions import WFActions 11 | from beeflow.wf_manager.resources.wf_update import WFUpdate 12 | from beeflow.wf_manager.resources import wf_utils 13 | 14 | 15 | def create_app(): 16 | """Create flask app object and add REST endpoints.""" 17 | app = Flask(__name__) 18 | api = BeeApi(app) 19 | 20 | # Add endpoints 21 | api.add_resource(WFList, '/bee_wfm/v1/jobs/') 22 | api.add_resource(WFActions, '/bee_wfm/v1/jobs/') 23 | api.add_resource(WFUpdate, '/bee_wfm/v1/jobs/update/') 24 | 25 | # Initialize celery app 26 | celery_app = Celery(app.name) 27 | redis_socket = os.path.join(paths.redis_root(), paths.redis_sock_fname()) 28 | celery_app.config_from_object({ 29 | 'broker_url': f'redis+socket://{redis_socket}', 30 | 'result_backend': f'db+sqlite://{paths.celery_db()}', 31 | 'task_serializer': 'pickle', 32 | 'accept_content': ['application/json', 'application/x-python-serialize'], 33 | }) 34 | celery_app.set_default() 35 | app.extensions['celery'] = celery_app 36 | 37 | return app 38 | 39 | 40 | if __name__ == '__main__': 41 | flask_app = create_app() 42 | bee_workdir = wf_utils.get_bee_workdir() 43 | -------------------------------------------------------------------------------- /ci/README.md: -------------------------------------------------------------------------------- 1 | # CI code 2 | 3 | This directory contains all the scripts that are needed for configuring and 4 | running BEE on a CI machine. `BEE_WORKER` must be set in the environment by the 5 | workflow to `Slurmrestd`, `SlurmCommands`, or `Flux`. The scripts are as follows: 6 | 7 | * `env.sh`: CI environment set up 8 | * `batch_scheduler.sh`: Install and set up a batch scheduler 9 | * `bee_install.sh`: Install BEE and python dependencies 10 | * `bee_config.sh`: Generate the bee.conf 11 | * `deps_install.sh`: Install external dependencies needed by BEE and batch schedulers 12 | * `flux_install.sh`: Install flux and dependencies 13 | * `inner_integration_test.sh`: Inner script for integration testing and running 14 | with specific batch scheduler 15 | * `integration_test.py`: The actual integration test script; can be run locally 16 | * `integration_test.sh`: Outer script for integration testing called from the 17 | github workflow 18 | * `slurm_start.sh`: Start the Slurm batch scheduler 19 | * `unit_tests.sh`: Run the unit tests 20 | 21 | Note: The only script that you should be able to run locally without problems is 22 | `integration_test.py`. The rest are designed for the CI environment and will 23 | likely not work on a local machine. 24 | 25 | ## Integration tests 26 | 27 | The integration tests are written as a Python script `integration_test.py`. 28 | This test can be run locally after you've started BEE with `beeflow`, by just 29 | launching the script `./ci/integration_test.py`. 30 | -------------------------------------------------------------------------------- /ci/batch_scheduler.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Set up and start the batch scheduler 3 | 4 | case $BEE_WORKER in 5 | Slurmrestd|SlurmCommands) 6 | ./ci/slurm_start.sh 7 | ;; 8 | Flux) 9 | ./ci/flux_install.sh 10 | ;; 11 | *) 12 | printf "ERROR: Invalid worker type '%s'\n" "$BEE_WORKER" 13 | exit 1 14 | ;; 15 | esac 16 | -------------------------------------------------------------------------------- /ci/bee_install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Setup and install BEE. 3 | 4 | set -e 5 | 6 | # BEE Containers 7 | printf "\n\n" 8 | printf "**Setting up BEE containers**\n" 9 | printf "\n\n" 10 | mkdir -p $HOME/img 11 | # Build the Neo4j container 12 | ch-image build -t neo4j_image -f ./beeflow/data/dockerfiles/Dockerfile.neo4j ./ci || exit 1 13 | ch-convert -i ch-image -o tar neo4j_image $NEO4J_CONTAINER || exit 1 14 | # Pull the Redis container 15 | ch-image pull redis || exit 1 16 | ch-convert -i ch-image -o tar redis $REDIS_CONTAINER || exit 1 17 | 18 | # BEE install 19 | printf "\n\n" 20 | printf "**Installing BEE**\n" 21 | printf "\n\n" 22 | $PYTHON -m venv venv 23 | . venv/bin/activate 24 | pip install --upgrade pip 25 | # pip install poetry 26 | # TODO: May want to use pip with specific version here 27 | curl -L https://install.python-poetry.org/ > install-poetry.sh 28 | chmod +x install-poetry.sh 29 | ./install-poetry.sh 30 | # Do a poetry install, making sure that all extras are added 31 | poetry install -E cloud_extras || exit 1 32 | -------------------------------------------------------------------------------- /ci/deps_install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Install all dependencies for BEE 3 | set -e 4 | 5 | sudo apt-get update 6 | sudo apt-get install -y slurmctld slurmd slurmrestd munge python3 python3-venv \ 7 | curl build-essential zlib1g-dev libncurses5-dev libgdbm-dev libnss3-dev \ 8 | libssl-dev libsqlite3-dev libreadline-dev libffi-dev libbz2-dev \ 9 | libmunge-dev \ 10 | libyaml-dev # needed for PyYAML 11 | 12 | sudo apt-get install -y graphviz libgraphviz-dev 13 | 14 | # Install most recent Charliecloud 15 | curl -O -L https://github.com/hpc/charliecloud/releases/download/v${CHARLIECLOUD_VERSION}/charliecloud-${CHARLIECLOUD_VERSION}.tar.gz 16 | tar -xvf charliecloud-${CHARLIECLOUD_VERSION}.tar.gz 17 | (cd charliecloud-${CHARLIECLOUD_VERSION} 18 | ./configure --prefix=/usr 19 | make 20 | sudo make install) 21 | 22 | # Install Python3 23 | sudo apt-get install -y software-properties-common 24 | sudo apt-get install -y python3 python3-dev python3-venv 25 | -------------------------------------------------------------------------------- /ci/docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Install BEE and build the docs in CI. 3 | sudo apt-get update 4 | sudo apt-get install python3 python3-venv curl build-essential \ 5 | zlib1g-dev libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libsqlite3-dev \ 6 | libreadline-dev libffi-dev libbz2-dev libyaml-dev 7 | curl -sSL https://install.python-poetry.org | python3 - 8 | poetry update 9 | poetry install 10 | poetry run make -C docs/sphinx html 11 | -------------------------------------------------------------------------------- /ci/env.sh: -------------------------------------------------------------------------------- 1 | # Environment set up 2 | # Set up environment 3 | export PYTHON=python3 4 | export HOSTNAME=`$PYTHON -c 'import socket; print(socket.gethostname())'` 5 | # Everything is in /tmp for right now 6 | export SLURMCTLD_PID=/tmp/slurmctld.pid 7 | export SLURMD_PID=/tmp/slurmd.pid 8 | export SLURMD_SPOOL_DIR=/tmp/slurm_spool 9 | export LOG_DIR=/tmp/slurm_log 10 | export SLURM_STATE_SAVE_LOCATION=/tmp/slurm_state 11 | mkdir -p $SLURMD_SPOOL_DIR $SLURM_STATE_SAVE_LOCATION $LOG_DIR 12 | export SLURMCTLD_LOG=$LOG_DIR/slurmctld.log 13 | export SLURMD_LOG=$LOG_DIR/slurmd.log 14 | export SLURM_USER=`whoami` 15 | export MUNGE_SOCKET=/tmp/munge.sock 16 | export MUNGE_LOG=/tmp/munge.log 17 | export MUNGE_PID=/tmp/munge.pid 18 | mkdir -p /tmp/munge 19 | export MUNGE_KEY=/tmp/munge/munge.key 20 | export BEE_WORKDIR=$HOME/.beeflow 21 | export NEO4J_CONTAINER=$HOME/img/neo4j.tar.gz 22 | export REDIS_CONTAINER=$HOME/img/redis.tar.gz 23 | mkdir -p $BEE_WORKDIR 24 | export SLURM_CONF=~/slurm.conf 25 | # Flux variables 26 | export FLUX_CORE_VERSION=0.51.0 27 | export FLUX_SECURITY_VERSION=0.9.0 28 | export BEE_CONFIG=$HOME/.config/beeflow/bee.conf 29 | export OPENAPI_VERSION=v0.0.37 30 | export CHARLIECLOUD_VERSION=0.36 31 | -------------------------------------------------------------------------------- /ci/flux_install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Install, set up, and start Flux 3 | 4 | set -e 5 | . ./ci/env.sh 6 | 7 | # Install dependencies as listed in https://github.com/flux-framework/flux-core/blob/master/scripts/install-deps-deb.sh 8 | sudo apt-get install -y \ 9 | autoconf \ 10 | automake \ 11 | libtool \ 12 | make \ 13 | pkg-config \ 14 | libc6-dev \ 15 | libzmq3-dev \ 16 | libczmq-dev \ 17 | uuid-dev \ 18 | libjson-glib-dev \ 19 | libjansson-dev \ 20 | liblz4-dev \ 21 | libarchive-dev \ 22 | libhwloc-dev \ 23 | libsqlite3-dev \ 24 | lua5.1 \ 25 | liblua5.1-dev \ 26 | lua-posix \ 27 | python3-dev \ 28 | python3-cffi \ 29 | python3-ply \ 30 | python3-yaml \ 31 | python3-jsonschema \ 32 | python3-sphinx \ 33 | aspell \ 34 | aspell-en \ 35 | valgrind \ 36 | libmpich-dev \ 37 | jq 38 | 39 | # Install flux-security 40 | git clone --depth 1 -b v${FLUX_SECURITY_VERSION} https://github.com/flux-framework/flux-security.git 41 | (cd flux-security 42 | ./autogen.sh 43 | ./configure --prefix=/usr 44 | make 45 | sudo make install 46 | sudo ldconfig) 47 | 48 | # Install flux-core 49 | git clone --depth 1 -b v${FLUX_CORE_VERSION} https://github.com/flux-framework/flux-core.git 50 | (cd flux-core 51 | ./autogen.sh 52 | ./configure --prefix=/usr 53 | make 54 | sudo make install 55 | sudo ldconfig) 56 | # Install the python API 57 | pip install --user wheel 58 | pip install --user flux-python==$FLUX_CORE_VERSION 59 | -------------------------------------------------------------------------------- /ci/inner_integration_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Actual integration test runner 3 | 4 | . ./ci/env.sh 5 | . venv/bin/activate 6 | 7 | set +e 8 | 9 | # BEE needs to be started here in order to access batch scheduler resources 10 | case $BEE_WORKER in 11 | Slurmrestd) 12 | # Slurmrestd will fail by default when running as `SlurmUser` 13 | SLURMRESTD_SECURITY=disable_user_check beeflow core start 14 | ;; 15 | *) 16 | beeflow core start 17 | ;; 18 | esac 19 | sleep 4 20 | 21 | # Start the actual integration tests 22 | ./ci/integration_test.py 23 | # Save the exit code for later 24 | EXIT_CODE=$? 25 | 26 | # Output the status logs 27 | beeflow core status 28 | 29 | for log in $BEE_WORKDIR/logs/*.log; do 30 | printf "### $log ###\n" 31 | cat $log 32 | printf "################################################################################\n" 33 | done 34 | 35 | exit $EXIT_CODE 36 | -------------------------------------------------------------------------------- /ci/integration_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """CI workflow run script.""" 3 | import typer 4 | import coverage 5 | 6 | from beeflow.common import integration_test 7 | 8 | 9 | if __name__ == '__main__': 10 | cov = coverage.Coverage(data_file='.coverage.integration', auto_data=True) 11 | cov.start() 12 | typer.run(integration_test.main) 13 | cov.stop() 14 | cov.save() 15 | -------------------------------------------------------------------------------- /ci/integration_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | . venv/bin/activate 4 | 5 | set +e 6 | 7 | case $BEE_WORKER in 8 | Slurm*) 9 | ./ci/inner_integration_test.sh 10 | EXIT_CODE=$? 11 | ;; 12 | Flux) 13 | flux start --test-size=1 ./ci/inner_integration_test.sh 14 | EXIT_CODE=$? 15 | ;; 16 | *) 17 | printf "ERROR: Invalid batch scheduler option '%s'\n" "$BEE_WORKER" 18 | ;; 19 | esac 20 | 21 | for log in $SLURMCTLD_LOG $SLURMD_LOG $MUNGE_LOG $BEE_WORKDIR/logs/*; do 22 | printf "\n\n" 23 | printf "#### $log ####\n" 24 | cat $log 25 | printf "#### $log ####\n" 26 | printf "\n\n" 27 | done 28 | 29 | exit $EXIT_CODE 30 | -------------------------------------------------------------------------------- /ci/test_workflows/build-failure/Dockerfile.build-failure: -------------------------------------------------------------------------------- 1 | FROM some_nonexistent_container 2 | 3 | RUN touch /file 4 | -------------------------------------------------------------------------------- /ci/test_workflows/build-failure/input.yml: -------------------------------------------------------------------------------- 1 | fname: /file 2 | -------------------------------------------------------------------------------- /ci/test_workflows/build-failure/workflow.cwl: -------------------------------------------------------------------------------- 1 | # Dummy workflow designed to fail at the container build stage 2 | class: Workflow 3 | cwlVersion: v1.2 4 | 5 | inputs: 6 | fname: string 7 | 8 | outputs: 9 | step0_stdout: 10 | type: File 11 | outputSource: step0/step0_stdout 12 | 13 | steps: 14 | step0: 15 | run: 16 | class: CommandLineTool 17 | baseCommand: ls 18 | stdout: step0_stdout.txt 19 | inputs: 20 | fname: 21 | type: string 22 | inputBinding: {} 23 | outputs: 24 | step0_stdout: 25 | type: stdout 26 | in: 27 | fname: fname 28 | out: [step0_stdout] 29 | hints: 30 | DockerRequirement: 31 | dockerFile: "Dockerfile.build-failure" 32 | beeflow:containerName: "build-failure" 33 | -------------------------------------------------------------------------------- /ci/test_workflows/checkpoint-too-long/Dockerfile: -------------------------------------------------------------------------------- 1 | # Dummy container with program to run forever 2 | FROM alpine 3 | 4 | RUN printf "#!/bin/sh\n" > /usr/bin/checkpoint-program \ 5 | && printf "touch backup0.crx\n" >> /usr/bin/checkpoint-program \ 6 | && printf "sleep 10000000000\n" >> /usr/bin/checkpoint-program \ 7 | && chmod 755 /usr/bin/checkpoint-program 8 | -------------------------------------------------------------------------------- /ci/test_workflows/checkpoint-too-long/input.yml: -------------------------------------------------------------------------------- 1 | # Dummy input 2 | fake_input: 64 3 | -------------------------------------------------------------------------------- /ci/test_workflows/checkpoint-too-long/step0.cwl: -------------------------------------------------------------------------------- 1 | class: CommandLineTool 2 | cwlVersion: v1.0 3 | 4 | baseCommand: /usr/bin/checkpoint-program 5 | stdout: checkpoint_stdout.txt 6 | inputs: 7 | fake_input: 8 | type: int? 9 | inputBinding: 10 | prefix: -f 11 | outputs: 12 | step0_output: 13 | type: stdout 14 | -------------------------------------------------------------------------------- /ci/test_workflows/checkpoint-too-long/workflow.cwl: -------------------------------------------------------------------------------- 1 | # Test workflow that will run forever, causing a failure when it is restarted 2 | # more than 'num_tries'. 3 | class: Workflow 4 | cwlVersion: v1.0 5 | 6 | inputs: 7 | # Dummy input for the first step 8 | fake_input: int 9 | outputs: 10 | step0_stdout: 11 | type: File 12 | outputSource: step0/step0_stdout 13 | 14 | steps: 15 | step0: 16 | run: step0.cwl 17 | in: 18 | fake_input: fake_input 19 | out: [step0_output] 20 | hints: 21 | beeflow:CheckpointRequirement: 22 | enabled: true 23 | file_path: . 24 | container_path: . 25 | file_regex: backup[0-9]*.crx 26 | restart_parameters: -R 27 | num_tries: 1 28 | beeflow:SlurmRequirement: 29 | timeLimit: 00:00:10 30 | DockerRequirement: 31 | dockerFile: "Dockerfile" 32 | beeflow:containerName: "checkpoint-failure" 33 | -------------------------------------------------------------------------------- /ci/test_workflows/clamr-wf-checkpoint/Dockerfile.clamr-ffmpeg: -------------------------------------------------------------------------------- 1 | # Dockerfile.clamr-ffmpeg 2 | # Developed on Chicoma @lanl 3 | # Patricia Grubel 4 | 5 | FROM debian:11 6 | 7 | 8 | RUN apt-get update && \ 9 | apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev 10 | 11 | RUN git clone https://github.com/lanl/CLAMR.git 12 | RUN cd CLAMR && cmake . && make clamr_cpuonly 13 | -------------------------------------------------------------------------------- /ci/test_workflows/clamr-wf-checkpoint/README.md: -------------------------------------------------------------------------------- 1 | # CLAMR - FFMPEG workflow using CWL 2 | 3 | clamr_wf.cwl - the main cwl. 4 | calmr_job.yml - yaml file for values used by the cwl files. 5 | clamr.cwl - cwl file for the clamr step. 6 | ffmpeg.cwl - cwl file for the ffmpeg step. 7 | 8 | The values in these files run on fog a LANL cluster, using the container runtime Charliecloud. Fog uses slurm as the workload scheduler. 9 | 10 | 11 | -------------------------------------------------------------------------------- /ci/test_workflows/clamr-wf-checkpoint/clamr_job.json: -------------------------------------------------------------------------------- 1 | { 2 | "grid_resolution": 32, 3 | "max_levels": 3, 4 | "time_steps": 10000, 5 | "steps_between_outputs": 10, 6 | "steps_between_graphics": 25, 7 | "graphics_type": "png", 8 | "checkpoint_disk_interval": 50, 9 | 10 | 11 | "input_format": "image2", 12 | "frame_rate": 12, 13 | "frame_size": "800x800", 14 | "pixel_format": "yuv420p", 15 | "output_filename": "CLAMR_movie.mp4" 16 | } 17 | -------------------------------------------------------------------------------- /ci/test_workflows/clamr-wf-checkpoint/clamr_job.yml: -------------------------------------------------------------------------------- 1 | # Inputs for CLAMR 2 | # /clamr/CLAMR-master/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png -c 50 3 | 4 | grid_resolution: 32 5 | max_levels: 3 6 | time_steps: 10000 7 | steps_between_outputs: 10 8 | steps_between_graphics: 25 9 | graphics_type: png 10 | checkpoint_disk_interval: 50 11 | 12 | # Inputs for FFMPEG 13 | #ffmpeg -f image2 -r 12 -s 800x800 -pix_fmt yuv420p CLAMR_movie.mp4 14 | 15 | input_format: image2 16 | frame_rate: 12 17 | frame_size: 800x800 18 | pixel_format: yuv420p 19 | output_filename: CLAMR_movie.mp4 20 | -------------------------------------------------------------------------------- /ci/test_workflows/clamr-wf-checkpoint/clamr_job_long.yml: -------------------------------------------------------------------------------- 1 | { 2 | "grid_resolution": 32, 3 | "max_levels": 3, 4 | "time_steps": 150000, 5 | "steps_between_outputs": 10, 6 | "steps_between_graphics": 25, 7 | "graphics_type": "png", 8 | "checkpoint_disk_interval": 50, 9 | 10 | 11 | "input_format": "image2", 12 | "frame_rate": 12, 13 | "frame_size": "800x800", 14 | "pixel_format": "yuv420p", 15 | "output_filename": "CLAMR_movie.mp4" 16 | } 17 | -------------------------------------------------------------------------------- /ci/test_workflows/clamr-wf-checkpoint/ffmpeg.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: ffmpeg -y 7 | inputs: 8 | input_format: 9 | type: string? 10 | inputBinding: 11 | prefix: -f 12 | position: 1 13 | ffmpeg_input: 14 | type: Directory 15 | inputBinding: 16 | prefix: -i 17 | position: 2 18 | valueFrom: $(self.path + "/graph%05d.png") 19 | frame_rate: 20 | type: int? 21 | inputBinding: 22 | prefix: -r 23 | position: 3 24 | frame_size: 25 | type: string? 26 | inputBinding: 27 | prefix: -s 28 | position: 4 29 | pixel_format: 30 | type: string? 31 | inputBinding: 32 | prefix: -pix_fmt 33 | position: 5 34 | output_file: 35 | type: string 36 | inputBinding: 37 | position: 6 38 | 39 | outputs: 40 | movie: 41 | type: File 42 | outputBinding: 43 | glob: $(inputs.output_file) 44 | # glob: CLAMR_movie.mp4 45 | -------------------------------------------------------------------------------- /ci/test_workflows/comd-mpi/comd.cwl: -------------------------------------------------------------------------------- 1 | class: CommandLineTool 2 | baseCommand: [/CoMD/bin/CoMD-mpi, -e] 3 | stdout: comd_stdout.txt 4 | inputs: 5 | i: 6 | type: int 7 | inputBinding: 8 | prefix: -i 9 | j: 10 | type: int 11 | inputBinding: 12 | prefix: -j 13 | k: 14 | type: int 15 | inputBinding: 16 | prefix: -k 17 | x: 18 | type: int 19 | inputBinding: 20 | prefix: -x 21 | y: 22 | type: int 23 | inputBinding: 24 | prefix: -y 25 | z: 26 | type: int 27 | inputBinding: 28 | prefix: -z 29 | pot_dir: 30 | type: string 31 | inputBinding: 32 | prefix: --potDir 33 | outputs: 34 | comd_stdout: 35 | type: stdout 36 | -------------------------------------------------------------------------------- /ci/test_workflows/comd-mpi/comd_job.yml: -------------------------------------------------------------------------------- 1 | # Note: i * j * k must equal the total number of tasks 2 | i: 2 3 | j: 2 4 | k: 2 5 | x: 40 6 | y: 40 7 | z: 40 8 | pot_dir: "/CoMD/pots" 9 | -------------------------------------------------------------------------------- /ci/test_workflows/comd-mpi/comd_wf.cwl: -------------------------------------------------------------------------------- 1 | class: Workflow 2 | cwlVersion: v1.0 3 | 4 | inputs: 5 | i: int 6 | j: int 7 | k: int 8 | x: int 9 | y: int 10 | z: int 11 | pot_dir: string 12 | 13 | outputs: 14 | comd_stdout: 15 | type: File 16 | outputSource: comd/comd_stdout 17 | 18 | steps: 19 | comd: 20 | run: comd.cwl 21 | in: 22 | i: i 23 | j: j 24 | k: k 25 | x: x 26 | y: y 27 | z: z 28 | pot_dir: pot_dir 29 | out: [comd_stdout] 30 | hints: 31 | DockerRequirement: 32 | beeflow:containerName: "comd-mpi" 33 | dockerFile: "Dockerfile.comd-x86_64" 34 | beeflow:MPIRequirement: 35 | nodes: 4 36 | ntasks: 8 37 | -------------------------------------------------------------------------------- /ci/test_workflows/failure-dependent-tasks/input.yml: -------------------------------------------------------------------------------- 1 | fname: some_file_that_doesnt_exist 2 | cat_argument: -n 3 | -------------------------------------------------------------------------------- /ci/test_workflows/missing-input/input.yml: -------------------------------------------------------------------------------- 1 | b: 232 2 | -------------------------------------------------------------------------------- /ci/test_workflows/missing-input/workflow.cwl: -------------------------------------------------------------------------------- 1 | class: Workflow 2 | cwlVersion: v1.2 3 | 4 | inputs: 5 | a: int 6 | outputs: {} 7 | 8 | steps: 9 | step0: 10 | run: 11 | class: CommandLineTool 12 | baseCommand: echo 13 | inputs: 14 | a: 15 | type: int 16 | inputBinding: {} 17 | outputs: [] 18 | in: 19 | a: a 20 | out: [] 21 | -------------------------------------------------------------------------------- /ci/test_workflows/partial-fail/cat.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: cat 6 | stdout: cat.txt 7 | inputs: 8 | text_file: 9 | type: File 10 | inputBinding: 11 | position: 1 12 | outputs: 13 | cat_out: 14 | type: stdout 15 | -------------------------------------------------------------------------------- /ci/test_workflows/partial-fail/grep0.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: grep 6 | stdout: occur0.txt 7 | inputs: 8 | word: 9 | type: string 10 | inputBinding: 11 | position: 1 12 | text_file: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | outputs: 17 | occur0: 18 | type: stdout 19 | -------------------------------------------------------------------------------- /ci/test_workflows/partial-fail/grep1.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: grep 6 | stdout: occur1.txt 7 | inputs: 8 | word: 9 | type: string 10 | inputBinding: 11 | position: 1 12 | text_file: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | outputs: 17 | occur1: 18 | type: stdout 19 | -------------------------------------------------------------------------------- /ci/test_workflows/partial-fail/input.yml: -------------------------------------------------------------------------------- 1 | source: Vivamuspulvinar 2 | word0: Vivamus 3 | text_file: none.txt 4 | -------------------------------------------------------------------------------- /ci/test_workflows/partial-fail/printf.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: printf 6 | stdout: printf.txt 7 | inputs: 8 | source: 9 | type: string 10 | inputBinding: 11 | position: 1 12 | outputs: 13 | contents: 14 | type: stdout 15 | -------------------------------------------------------------------------------- /ci/test_workflows/partial-fail/workflow.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: Workflow 5 | inputs: 6 | source: string 7 | word0: string 8 | text_file: File 9 | 10 | outputs: 11 | occur0: 12 | type: File 13 | outputSource: grep0/occur0 14 | occur1: 15 | type: File 16 | outputSource: grep1/occur1 17 | 18 | steps: 19 | printf: 20 | run: printf.cwl 21 | in: 22 | source: source 23 | out: [contents] 24 | grep0: 25 | run: grep0.cwl 26 | in: 27 | word: word0 28 | text_file: printf/contents 29 | out: [occur0] 30 | cat: 31 | run: cat.cwl 32 | in: 33 | text_file: text_file 34 | out: [cat_out] 35 | grep1: 36 | run: grep1.cwl 37 | in: 38 | word: word0 39 | text_file: cat/cat_out 40 | out: [occur1] 41 | -------------------------------------------------------------------------------- /ci/test_workflows/pre-post-script/input.yml: -------------------------------------------------------------------------------- 1 | sleep_time: 1 2 | -------------------------------------------------------------------------------- /ci/test_workflows/pre-post-script/post.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | touch post.txt 4 | -------------------------------------------------------------------------------- /ci/test_workflows/pre-post-script/pre.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | touch pre.txt 4 | -------------------------------------------------------------------------------- /ci/test_workflows/pre-post-script/workflow.cwl: -------------------------------------------------------------------------------- 1 | class: Workflow 2 | cwlVersion: v1.0 3 | 4 | inputs: 5 | sleep_time: int 6 | 7 | outputs: 8 | step0_stdout: 9 | type: File 10 | outputSource: step0/step0_stdout 11 | 12 | steps: 13 | step0: 14 | run: 15 | class: CommandLineTool 16 | baseCommand: sleep 17 | stdout: step0_stdout.txt 18 | inputs: 19 | sleep_time: 20 | type: int 21 | inputBinding: 22 | position: 0 23 | outputs: 24 | step0_stdout: 25 | type: stdout 26 | in: 27 | sleep_time: sleep_time 28 | out: [step0_stdout] 29 | hints: 30 | beeflow:ScriptRequirement: 31 | enabled: true 32 | pre_script: "pre.sh" 33 | post_script: "post.sh" 34 | shell: "/bin/bash" 35 | -------------------------------------------------------------------------------- /ci/test_workflows/shell_validate/input.yml: -------------------------------------------------------------------------------- 1 | sleep_time: 1 2 | -------------------------------------------------------------------------------- /ci/test_workflows/shell_validate/post.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bashoo 2 | 3 | touch post.txt 4 | -------------------------------------------------------------------------------- /ci/test_workflows/shell_validate/pre.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | touch pre.txt 4 | -------------------------------------------------------------------------------- /ci/test_workflows/shell_validate/workflow.cwl: -------------------------------------------------------------------------------- 1 | class: Workflow 2 | cwlVersion: v1.0 3 | 4 | inputs: 5 | sleep_time: int 6 | 7 | outputs: 8 | step0_stdout: 9 | type: File 10 | outputSource: step0/step0_stdout 11 | 12 | steps: 13 | step0: 14 | run: 15 | class: CommandLineTool 16 | baseCommand: sleep 17 | stdout: step0_stdout.txt 18 | inputs: 19 | sleep_time: 20 | type: int 21 | inputBinding: 22 | position: 0 23 | outputs: 24 | step0_stdout: 25 | type: stdout 26 | in: 27 | sleep_time: sleep_time 28 | out: [step0_stdout] 29 | hints: 30 | beeflow:ScriptRequirement: 31 | enabled: true 32 | pre_script: "pre.sh" 33 | post_script: "post.sh" 34 | shell: "/bin/bash" 35 | -------------------------------------------------------------------------------- /ci/unit_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | . ./venv/bin/activate 4 | 5 | # Needed to run slurmrestd in CI 6 | export SLURMRESTD_SECURITY=disable_user_check 7 | 8 | pytest beeflow/tests/ 9 | 10 | #Get coverage report 11 | pytest --cov=beeflow beeflow/tests/ 12 | -------------------------------------------------------------------------------- /coverage.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | coverage 17 | coverage 18 | 74% 19 | 74% 20 | 21 | 22 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # BEE Documentation 2 | The HTML BEE documentation is stored in the `/doc/sphinx` directory. 3 | Documentation is automatically generated using Sphinx. 4 | 5 | For users with pip in their environment(may need to install hpc-beeflow, sphinx and sphinx-rtd-theme using pip): 6 | 7 | ``` 8 | cd /docs/sphinx 9 | make clean 10 | make html 11 | ``` 12 | 13 | For developers with a poetry environment and beeflow installed: 14 | 15 | ``` 16 | cd 17 | poetry shell 18 | cd docs/sphinx 19 | make html 20 | ``` 21 | 22 | To view: 23 | ``` 24 | open _build/html/index.html 25 | ``` 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /docs/html/.buildinfo: -------------------------------------------------------------------------------- 1 | # Sphinx build info version 1 2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. 3 | config: aadefebb72a3faa2b84319fb5cc6d58d 4 | tags: 645f666f9bcd5a90fca523b33c5a78b7 5 | -------------------------------------------------------------------------------- /docs/neo4j/.gitignore: -------------------------------------------------------------------------------- 1 | */*.tar.gz 2 | -------------------------------------------------------------------------------- /docs/neo4j/ch-grow-files/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM neo4j 2 | COPY environment / 3 | COPY ch-run-neo4j.sh / 4 | -------------------------------------------------------------------------------- /docs/neo4j/ch-grow-files/ch-run-neo4j.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | for i in `cat /environment`; do 4 | export $i 5 | done 6 | 7 | neo4j console 8 | -------------------------------------------------------------------------------- /docs/neo4j/ch-grow-files/environment: -------------------------------------------------------------------------------- 1 | PATH=/var/lib/neo4j/bin:/usr/local/openjdk-8/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin 2 | LANG=C.UTF-8 3 | JAVA_HOME=/usr/local/openjdk-8 4 | JAVA_VERSION=8u222 5 | JAVA_BASE_URL=https://github.com/AdoptOpenJDK/openjdk8-upstream-binaries/releases/download/jdk8u222-b10/OpenJDK8U-jre_ 6 | JAVA_URL_VERSION=8u222b10 7 | NEO4J_SHA256=cf0e6c6e9733cda11922a4a060e53269ac05b6e55cb7817c55621e005928f6cf 8 | NEO4J_TARBALL=neo4j-community-3.5.9-unix.tar.gz 9 | NEO4J_EDITION=community 10 | NEO4J_HOME=/var/lib/neo4j 11 | TINI_VERSION=v0.18.0 12 | TINI_SHA256=12d20136605531b09a2c2dac02ccee85e1b874eb322ef6baf7561cd93f93c855 13 | -------------------------------------------------------------------------------- /docs/neo4j/img/5node.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/neo4j/img/5node.png -------------------------------------------------------------------------------- /docs/neo4j/img/cypher_pane.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/neo4j/img/cypher_pane.png -------------------------------------------------------------------------------- /docs/neo4j/img/graph_pane.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/neo4j/img/graph_pane.png -------------------------------------------------------------------------------- /docs/neo4j/img/neo4j_browser.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/neo4j/img/neo4j_browser.png -------------------------------------------------------------------------------- /docs/sphinx/.gitignore: -------------------------------------------------------------------------------- 1 | _build -------------------------------------------------------------------------------- /docs/sphinx/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/sphinx/error_logs.rst: -------------------------------------------------------------------------------- 1 | Information and Error Logs 2 | ************************** 3 | 4 | BEE components information and error logs are in the logs subdirectory of the BEE work directory specified in bee.conf as **bee_workdir**. The default location is **$HOME/.beeflow/logs**.There are logs for each component. When you are given a message such as "Check the workflow manager", look for information in the wf_manager.log. The task_manager.log will contain logging information for each step in a workflow including any build logs if a container is built by BEE. 5 | 6 | 7 | -------------------------------------------------------------------------------- /docs/sphinx/images/bee-viz.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/sphinx/images/bee-viz.png -------------------------------------------------------------------------------- /docs/sphinx/images/cat-dag.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/sphinx/images/cat-dag.png -------------------------------------------------------------------------------- /docs/sphinx/images/clamr-step.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/sphinx/images/clamr-step.png -------------------------------------------------------------------------------- /docs/sphinx/images/logos/BEEGrey.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/sphinx/images/logos/BEEGrey.jpg -------------------------------------------------------------------------------- /docs/sphinx/images/logos/BEEGrey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/sphinx/images/logos/BEEGrey.png -------------------------------------------------------------------------------- /docs/sphinx/images/logos/BEEYellow.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/sphinx/images/logos/BEEYellow.jpg -------------------------------------------------------------------------------- /docs/sphinx/images/logos/BEEYellow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/sphinx/images/logos/BEEYellow.png -------------------------------------------------------------------------------- /docs/sphinx/images/logos/BEE_Symbol.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/sphinx/images/logos/BEE_Symbol.jpg -------------------------------------------------------------------------------- /docs/sphinx/images/logos/BEE_Symbol.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/sphinx/images/logos/BEE_Symbol.png -------------------------------------------------------------------------------- /docs/sphinx/images/src/clamr-step.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/sphinx/images/src/clamr-step.pptx -------------------------------------------------------------------------------- /docs/sphinx/index.rst: -------------------------------------------------------------------------------- 1 | .. BEE: Build and Execution Environment documentation master file, created by 2 | sphinx-quickstart on Wed Aug 14 09:56:07 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | .. include:: ../../README.rst 6 | 7 | .. note:: 8 | 9 | This documentation was built |today|, beeflow version |version|. 10 | 11 | 12 | .. toctree:: :maxdepth: 2 13 | :caption: Contents: 14 | 15 | 16 | installation 17 | examples 18 | bee_cwl 19 | commands 20 | advanced_usage 21 | error_logs 22 | contribute 23 | development 24 | visualization 25 | 26 | .. Commented out 27 | rest_api 28 | 29 | Indices and tables 30 | ================== 31 | 32 | * :ref:`genindex` 33 | * :ref:`modindex` 34 | * :ref:`search` 35 | -------------------------------------------------------------------------------- /docs/sphinx/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/sphinx/visualization.rst: -------------------------------------------------------------------------------- 1 | .. _workflow-visualization: 2 | 3 | Workflow Visualization 4 | ********************** 5 | 6 | BEE includes a simple command for viewing BEE workflows. By using the ``beeflow 7 | dag $ID $OUTPUT_DIR`` command, you can view the directed acyclic graph (DAG) of any submitted 8 | workflow. 9 | 10 | Creating DAGs 11 | ============= 12 | 13 | The dag command can be run at any point of the workflow, and can 14 | be run multiple times. To see the DAG of a workflow before it runs, submit 15 | the workflow with the ``--no-start`` flag and then use the dag command. The 16 | DAGs are exported in PNG format to $OUTPUT_DIR/$WD_ID-dags by default. If the 17 | ``no-dag-dir`` flag is specified when the dag command is run, the DAG will be 18 | exported to $OUTPUT_DIR. The dag command makes multiple versions of the DAGs. The 19 | most recent version is $WF_ID.png and the others are $WD_ID_v1.png, 20 | $WF_ID_v2.png ... where v1 is the oldest. The graphmls used to make the DAGs are saved 21 | in the workflow archive and are saved with their version number. These graphmls can 22 | be useful for debugging when there are errors creating the DAGs. 23 | 24 | Example DAG 25 | =========== 26 | 27 | The DAG below was created by running the dag command while the cat-grep-tar 28 | example workflow was running. 29 | 30 | .. image:: images/cat-dag.png 31 | 32 | The orange bubbles are inputs, the blue bubbles are task states, the red 33 | bubbles are tasks, and the green bubbles are outputs. The graph is in a 34 | hierarchical format, meaning that tasks that are higher up in the graph 35 | run before the ones below them. 36 | -------------------------------------------------------------------------------- /docs/summit.md: -------------------------------------------------------------------------------- 1 | ### To run on summit: 2 | 3 | ``` 4 | #### Create a new bee.conf for first time use 5 | 6 | bee_cfg new 7 | 8 | You need to know the path of the Charliecloud image that has depdencies for 9 | beeflow. 10 | Summit uses the LSF workload scheduler so when queried for that answer LSF. 11 | 12 | ### Fix user config file for summit 13 | In ~/.config/beeflow/bee.conf (or wherever your configuration file is) 14 | you must change to your account and set the time limit to an appropriate time. 15 | 16 | -------------------------------------------------------------------------------- /docs/unittest_tutorial/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/docs/unittest_tutorial/__init__.py -------------------------------------------------------------------------------- /docs/unittest_tutorial/test_fixtures.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python3 2 | """Example of unit testing fixtures using the unittest framework.""" 3 | 4 | import unittest 5 | 6 | 7 | class TestFixtures(unittest.TestCase): 8 | """Example of using fixtures in unit testing.""" 9 | 10 | # BEGIN test fixtures 11 | @classmethod 12 | def setUpClass(cls): 13 | """Set-up operation performed once at initilization for the entire test case.""" 14 | cls.example_lst = list() 15 | 16 | @classmethod 17 | def tearDownClass(cls): 18 | """Tear-down operation performed once after all test methods finish executing.""" 19 | # Useful for e.g. closing a connection 20 | # Nothing useful to do in this case, though 21 | 22 | def setUp(self): 23 | """Perform a set-up operation before each test method executes.""" 24 | # Append 1, 2, 3 to the example list 25 | self.example_lst.extend([1, 2, 3]) 26 | 27 | def tearDown(self): 28 | """Perform a tear-down operation before each test method executes.""" 29 | # Clear the example list of all elements 30 | self.example_lst.clear() 31 | # END test fixtures 32 | 33 | def test_list1(self): 34 | """Test that the example list's contents are [1, 2, 3].""" 35 | # One way to test contents of list 36 | self.assertEqual(self.example_lst, [1, 2, 3]) 37 | # Another way specific to lists, will raise exception if given non-list argument 38 | self.assertListEqual(self.example_lst, [1, 2, 3]) 39 | 40 | def test_list2(self): 41 | """Test that the example list's contents are still [1, 2, 3].""" 42 | self.assertListEqual(self.example_lst, [1, 2, 3]) 43 | 44 | 45 | if __name__ == '__main__': 46 | unittest.main() 47 | -------------------------------------------------------------------------------- /docs/unittest_tutorial/test_numbers.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python3 2 | """Example of unit testing with numbers using the unittest framework.""" 3 | 4 | import unittest 5 | 6 | 7 | class TestNumbers(unittest.TestCase): 8 | """Example of unit testing with integers.""" 9 | 10 | def test_integers(self): 11 | """Testing relations between integers (also works between floats, of course).""" 12 | # Test if 1 == 1 13 | self.assertEqual(1, 1) 14 | 15 | # Test if 1 != 2 16 | self.assertNotEqual(1, 2) 17 | 18 | # Test if 1 > 0 19 | self.assertGreater(1, 0) 20 | 21 | # Test if 1 >= 1 22 | self.assertGreaterEqual(1, 1) 23 | 24 | # Test if 1 < 2 25 | self.assertLess(1, 2) 26 | 27 | # Test if 1 <= 1 28 | self.assertLessEqual(1, 1) 29 | 30 | 31 | class TestFloats(unittest.TestCase): 32 | """Example of unit testing with integers.""" 33 | 34 | def test_floats(self): 35 | """Testing relations between integers (also works between floats, of course).""" 36 | # Test if 1 ~= 1.0001 using rounding to decimal places 37 | self.assertAlmostEqual(1, 1.0001, places=3) 38 | 39 | # Test if 1 ~= 1.0001 using delta threshold 40 | self.assertAlmostEqual(1, 1.0001, delta=0.0001) 41 | 42 | 43 | if __name__ == "__main__": 44 | unittest.main() 45 | -------------------------------------------------------------------------------- /docs/unittest_tutorial/test_skip.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python3 2 | """Example of test skipping in the unittest framework.""" 3 | 4 | import unittest 5 | 6 | 7 | class TestSkip(unittest.TestCase): 8 | """Example of unit test skipping.""" 9 | 10 | @unittest.skip("Demonstrating skipping.") 11 | def test_skip(self): 12 | """This test method will be skipped.""" 13 | self.fail("Shouldn't happen.") 14 | 15 | @unittest.skipIf(True, "Skipped because True.") 16 | def test_skipif(self): 17 | """This test method will also be skipped.""" 18 | self.fail("Also shouldn't happen.") 19 | 20 | @unittest.skipUnless(True, "Not skipped.") 21 | def test_skipunless(self): 22 | """This test method should not be skipped.""" 23 | -------------------------------------------------------------------------------- /docs/unittest_tutorial/test_strings.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python3 2 | """Example of unit testing with strings using the unittest framework.""" 3 | 4 | import unittest 5 | 6 | 7 | class TestStrings(unittest.TestCase): 8 | """Example of unit testing with strings.""" 9 | 10 | def test_isupper(self): 11 | """Testing if a string is upper case.""" 12 | # Assert that an object is a string 13 | self.assertIsInstance("FOO", str) 14 | 15 | # One way to test if string is upper case, using assertTrue 16 | self.assertTrue("FOO".isupper()) 17 | 18 | # Another way using assert False 19 | self.assertFalse(not "FOO".isupper()) 20 | 21 | # Another way using equality test 22 | self.assertEqual("FOO", "FOO".upper()) 23 | 24 | def test_error(self): 25 | """Testing for error raised.""" 26 | with self.assertRaises(TypeError): 27 | return "FOO" / 5 28 | 29 | 30 | if __name__ == '__main__': 31 | unittest.main() 32 | -------------------------------------------------------------------------------- /examples/.gitignore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lanl/BEE/d1d94c132dc23cf6f523556c5eb45d51ca0c6aac/examples/.gitignore -------------------------------------------------------------------------------- /examples/cat-grep-tar/cat.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: cat 6 | stdout: cat.txt 7 | stderr: cat.err 8 | inputs: 9 | input_file: 10 | type: File 11 | inputBinding: 12 | position: 1 13 | outputs: 14 | contents: 15 | type: stdout 16 | cat_stderr: 17 | type: stderr 18 | -------------------------------------------------------------------------------- /examples/cat-grep-tar/grep0.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: grep 6 | stdout: occur0.txt 7 | inputs: 8 | word: 9 | type: string 10 | inputBinding: 11 | position: 1 12 | text_file: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | outputs: 17 | occur: 18 | type: stdout 19 | -------------------------------------------------------------------------------- /examples/cat-grep-tar/grep1.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: grep 6 | stdout: occur1.txt 7 | inputs: 8 | word: 9 | type: string 10 | inputBinding: 11 | position: 1 12 | text_file: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | outputs: 17 | occur: 18 | type: stdout 19 | -------------------------------------------------------------------------------- /examples/cat-grep-tar/input.yml: -------------------------------------------------------------------------------- 1 | input_file: lorem.txt 2 | word0: Vivamus 3 | word1: pulvinar 4 | tarball_fname: out.tgz 5 | -------------------------------------------------------------------------------- /examples/cat-grep-tar/tar.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: CommandLineTool 5 | baseCommand: tar 6 | inputs: 7 | tarball_fname: 8 | type: string 9 | inputBinding: 10 | position: 1 11 | prefix: -cf 12 | file0: 13 | type: File 14 | inputBinding: 15 | position: 2 16 | file1: 17 | type: File 18 | inputBinding: 19 | position: 3 20 | outputs: 21 | tarball: 22 | type: File 23 | outputBinding: 24 | glob: $(inputs.tarball_fname) 25 | -------------------------------------------------------------------------------- /examples/cat-grep-tar/workflow.cwl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env cwl-runner 2 | 3 | cwlVersion: v1.0 4 | class: Workflow 5 | inputs: 6 | input_file: File 7 | word0: string 8 | word1: string 9 | tarball_fname: string 10 | 11 | outputs: 12 | tarball: 13 | type: File 14 | outputSource: tar/tarball 15 | cat_stderr: 16 | type: File 17 | outputSource: cat/cat_stderr 18 | 19 | steps: 20 | cat: 21 | run: cat.cwl 22 | in: 23 | input_file: input_file 24 | out: [contents, cat_stderr] 25 | grep0: 26 | run: grep0.cwl 27 | in: 28 | word: word0 29 | text_file: cat/contents 30 | out: [occur] 31 | grep1: 32 | run: grep1.cwl 33 | in: 34 | word: word1 35 | text_file: cat/contents 36 | out: [occur] 37 | tar: 38 | run: tar.cwl 39 | in: 40 | file0: grep0/occur 41 | file1: grep1/occur 42 | tarball_fname: tarball_fname 43 | out: [tarball] 44 | -------------------------------------------------------------------------------- /examples/clamr-checkpoint-cwl/Dockerfile.clamr-ffmpeg: -------------------------------------------------------------------------------- 1 | # Dockerfile.clamr-ffmpeg 2 | # Developed on Chicoma @lanl 3 | # Patricia Grubel 4 | 5 | FROM debian:11 6 | 7 | 8 | RUN apt-get update && \ 9 | apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev 10 | 11 | RUN git clone https://github.com/lanl/CLAMR.git 12 | RUN cd CLAMR && cmake . && make clamr_cpuonly 13 | -------------------------------------------------------------------------------- /examples/clamr-ffmpeg-build/Dockerfile.clamr-ffmpeg: -------------------------------------------------------------------------------- 1 | # Dockerfile.clamr-ffmpeg 2 | # Developed on Chicoma @lanl 3 | # Patricia Grubel 4 | 5 | FROM debian:11 6 | 7 | 8 | RUN apt-get update && \ 9 | apt-get install -y wget gnupg git cmake ffmpeg g++ make openmpi-bin libopenmpi-dev libpng-dev libpng16-16 libpng-tools imagemagick libmagickwand-6.q16-6 libmagickwand-6.q16-dev 10 | 11 | RUN git clone https://github.com/lanl/CLAMR.git 12 | RUN cd CLAMR && cmake . && make clamr_cpuonly 13 | -------------------------------------------------------------------------------- /examples/clamr-ffmpeg-build/README.md: -------------------------------------------------------------------------------- 1 | # CLAMR - FFMPEG workflow using CWL 2 | 3 | This workflow uses the DockerRequirements dockerFile and beeflow:containerNameto build the clamr and ffmpeg in a container. 4 | 5 | ``` 6 | clamr_wf.cwl - the main cwl 7 | calmr_job.yml - yaml file for values used by the cwl files 8 | clamr.cwl - cwl file for the clamr step 9 | ffmpeg.cwl - cwl file for the ffmpeg step 10 | ``` 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /examples/clamr-ffmpeg-build/clamr.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: /CLAMR/clamr_cpuonly 7 | # This is the stdout field which makes all stdout be captured in this file 8 | stdout: clamr_stdout.txt 9 | # Arguments to the command 10 | inputs: 11 | amr_type: 12 | # ? means the argument is optional 13 | # All of the ? here are legacy from the original CWL 14 | type: string? 15 | # Declare extra options 16 | # We support prefix and position 17 | inputBinding: 18 | # Prefix is the flag for cli command 19 | prefix: -A 20 | grid_res: 21 | type: int? 22 | inputBinding: 23 | prefix: -n 24 | max_levels: 25 | type: int? 26 | inputBinding: 27 | prefix: -l 28 | time_steps: 29 | type: int? 30 | inputBinding: 31 | prefix: -t 32 | output_steps: 33 | type: int? 34 | inputBinding: 35 | prefix: -i 36 | graphic_steps: 37 | type: int? 38 | inputBinding: 39 | prefix: -g 40 | graphics_type: 41 | type: string? 42 | inputBinding: 43 | prefix: -G 44 | rollback_images: 45 | type: int? 46 | inputBinding: 47 | prefix: -b 48 | checkpoint_disk_interval: 49 | type: int? 50 | inputBinding: 51 | prefix: -c 52 | checkpoint_mem_interval: 53 | type: int? 54 | inputBinding: 55 | prefix: -C 56 | hash_method: 57 | type: string? 58 | inputBinding: 59 | prefix: -e 60 | 61 | outputs: 62 | # Captures stdout. Name is arbitrary. 63 | clamr_stdout: 64 | type: stdout 65 | outdir: 66 | type: Directory 67 | outputBinding: 68 | glob: graphics_output/graph%05d.png 69 | time_log: 70 | type: File 71 | outputBinding: 72 | glob: total_execution_time.log 73 | -------------------------------------------------------------------------------- /examples/clamr-ffmpeg-build/clamr_job.json: -------------------------------------------------------------------------------- 1 | { 2 | "grid_resolution": 32, 3 | "max_levels": 3, 4 | "time_steps": 5000, 5 | "steps_between_outputs": 10, 6 | "steps_between_graphics": 25, 7 | "graphics_type": "png", 8 | "input_format": "image2", 9 | "frame_rate": 12, 10 | "frame_size": "800x800", 11 | "pixel_format": "yuv420p", 12 | "output_filename": "CLAMR_movie.mp4" 13 | } 14 | -------------------------------------------------------------------------------- /examples/clamr-ffmpeg-build/clamr_job.yml: -------------------------------------------------------------------------------- 1 | # Inputs for CLAMR 2 | # /CLAMR/clamr_cpuonly -n 32 -l 3 -t 5000 -i 10 -g 25 -G png 3 | 4 | grid_resolution: 32 5 | max_levels: 3 6 | time_steps: 5000 7 | steps_between_outputs: 10 8 | steps_between_graphics: 25 9 | graphics_type: png 10 | 11 | # Inputs for FFMPEG 12 | #ffmpeg -f image2 -r 12 -s 800x800 -pix_fmt yuv420p CLAMR_movie.mp4 13 | 14 | input_format: image2 15 | frame_rate: 12 16 | frame_size: 800x800 17 | pixel_format: yuv420p 18 | output_filename: CLAMR_movie.mp4 19 | -------------------------------------------------------------------------------- /examples/clamr-ffmpeg-build/ffmpeg.cwl: -------------------------------------------------------------------------------- 1 | # -*- mode: YAML; -*- 2 | 3 | class: CommandLineTool 4 | cwlVersion: v1.0 5 | 6 | baseCommand: ffmpeg -y 7 | 8 | stderr: ffmpeg_stderr.txt 9 | 10 | inputs: 11 | input_format: 12 | type: string? 13 | inputBinding: 14 | prefix: -f 15 | position: 1 16 | ffmpeg_input: 17 | type: Directory 18 | inputBinding: 19 | prefix: -i 20 | position: 2 21 | valueFrom: $("/graph%05d.png") 22 | frame_rate: 23 | type: int? 24 | inputBinding: 25 | prefix: -r 26 | position: 3 27 | frame_size: 28 | type: string? 29 | inputBinding: 30 | prefix: -s 31 | position: 4 32 | pixel_format: 33 | type: string? 34 | inputBinding: 35 | prefix: -pix_fmt 36 | position: 5 37 | output_file: 38 | type: string 39 | inputBinding: 40 | position: 6 41 | 42 | outputs: 43 | movie: 44 | type: File 45 | outputBinding: 46 | glob: $(inputs.output_file) 47 | # glob: CLAMR_movie.mp4 48 | ffmpeg_stderr: 49 | type: stderr 50 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [pylint] 2 | max-line-length = 99 3 | good-names = i,j,k,v,t,m,s,n,x,y,ax,ex,fp,id,tx,fn,pi,wf,db,Run,_ 4 | disable = R0902,R0903,R0904,R0912,R0913,R0914,R0916,W0603,W1203,C0413,E0401,R0917 5 | # R0902: too many instance attributes (default: 8) 6 | # R0903: class has too few public methods (default: 2) 7 | # R0904: class has too many public methods (default: 20) 8 | # R0912: function or method has too many logical branches (default: 12) 9 | # R0913: function or method takes too many arguments (default: 5) 10 | # R0914: function or method has too many local variables (default: 15) 11 | # R0916: if-statement contains too many boolean expressions (default: 5) 12 | # R0917: too-many-positional-arguments 13 | # W0603: global statement used to update a global variable 14 | # W1203: use lazy % formatting in logging functions 15 | # C0413: requires imports at the top of the file 16 | ignore = data,examples 17 | --------------------------------------------------------------------------------