├── .gitignore
├── CWLDockerfile
├── Contributing.md
├── LICENSE
├── README.md
├── WebDockerfile
├── conduit
├── __init__.py
├── conduit
├── config
│ ├── airflow.cfg
│ ├── aws_config.yml
│ ├── cloudformation.py
│ ├── dockerfile_local_template
│ ├── dockerfile_s3_template
│ ├── pool_config.json
│ ├── saberAirflowGeneral.json
│ └── saberAirflowMICrONS.json
├── dags
│ ├── __init__.py
│ └── template_dag
├── plugins
│ └── customapi.py
├── requirements.txt
├── scripts
│ ├── __init__.py
│ ├── cwl_monitor
│ ├── entrypoint.sh
│ ├── localwrap
│ └── s3wrap
├── tests
│ ├── __init__.py
│ ├── test_awsbatch_operator.py
│ ├── test_commandlist.py
│ ├── test_cwlparser.py
│ ├── test_data
│ │ └── test_parameterization.yml
│ ├── test_datajoint_hook.py
│ ├── test_job_definitions.py
│ ├── test_parameterization.py
│ ├── test_saber_docker_operator.py
│ └── testing_utils.py
└── utils
│ ├── __init__.py
│ ├── awsbatch_operator.py
│ ├── command_list.py
│ ├── cwlparser.py
│ ├── datajoint_hook.py
│ ├── job_definitions.py
│ ├── parameterization.py
│ └── saber_docker_operator.py
├── demos
├── dvid_ffn
│ ├── block_params.yml
│ ├── job_dvid_ffn.yml
│ ├── job_public.yml
│ ├── workflow_dvid_ffn.cwl
│ └── workflow_public.cwl
├── em_pipelines
│ ├── job_i2g.yaml
│ ├── job_synapse_threshold.yaml
│ ├── synapse_threshold.cwl
│ └── workflow_i2g.cwl
└── local_input_example
│ ├── job.yaml
│ └── local_input.cwl
├── docker-compose-tools.yml
├── docker-compose.yml
├── saber
├── boss_access
│ ├── Dockerfile
│ ├── README.md
│ ├── boss_access.py
│ ├── boss_merge_nos3.cwl
│ ├── boss_pull_nos3.cwl
│ ├── boss_push_nos3.cwl
│ └── boss_test
│ │ ├── dummy_workflow.cwl
│ │ ├── job_params.yml
│ │ └── sweep.yml
├── datajoint
│ ├── configure.py
│ └── docker-compose.yml
├── dvid_access
│ ├── Dockerfile
│ ├── README.md
│ ├── dvid_access.py
│ ├── dvid_pull.cwl
│ └── dvid_push.cwl
├── i2g
│ ├── detection
│ │ ├── Dockerfile
│ │ ├── Dockerfile-gpu
│ │ ├── README.md
│ │ ├── cnn_tools.py
│ │ ├── data_tools.py
│ │ ├── deploy_pipeline.py
│ │ ├── membrane_detection.cwl
│ │ ├── membrane_detection_gpu.cwl
│ │ ├── synapse_detection.cwl
│ │ └── synapse_detection_gpu.cwl
│ ├── examples
│ │ ├── I2G_Demo
│ │ │ ├── job.yml
│ │ │ ├── jobPublic.yaml
│ │ │ ├── jobSecondPass.yaml
│ │ │ ├── job_simple.yaml
│ │ │ ├── sweep.yml
│ │ │ ├── workflow.cwl
│ │ │ ├── workflowDetect.cwl
│ │ │ ├── workflow_GPU.cwl
│ │ │ ├── workflow_i2g.cwl
│ │ │ └── workflow_simple.cwl
│ │ ├── I2G_FFN
│ │ │ ├── job_ffn.yml
│ │ │ └── workflow_ffn.cwl
│ │ ├── I2G_Neuroproof
│ │ │ ├── job_neuroproof_deploy.yaml
│ │ │ ├── job_neuroproof_train.yaml
│ │ │ ├── workflow_neuroproof_deploy.cwl
│ │ │ └── workflow_neuroproof_train.cwl
│ │ ├── I2G_Seg_Workflow
│ │ │ ├── job_seg.yml
│ │ │ ├── test_job_seg.yml
│ │ │ ├── workflowSegment.cwl
│ │ │ └── workflowSegmentTest.cwl
│ │ ├── I2G_association
│ │ │ ├── assoc_job.yml
│ │ │ ├── assoc_workflow.cwl
│ │ │ └── workflow.cwl
│ │ └── I2G_gala
│ │ │ ├── ex_gala_job.yml
│ │ │ ├── i2g.cwl
│ │ │ ├── i2g_gala.cwl
│ │ │ └── i2g_graph.cwl
│ ├── ffns
│ │ ├── Dockerfile.base
│ │ ├── Dockerfile.inference
│ │ ├── Dockerfile.train
│ │ ├── README.md
│ │ ├── example_config.pbtxt
│ │ ├── ffn_segmentation.cwl
│ │ ├── ffn_train.cwl
│ │ ├── ffn_workflow.cwl
│ │ ├── inference
│ │ │ ├── config_template.pbtxt
│ │ │ ├── driver.py
│ │ │ ├── get-latest-checkpoint
│ │ │ └── npy2h5.py
│ │ └── train
│ │ │ └── main.sh
│ ├── metric_computation
│ │ ├── Dockerfile
│ │ ├── metric_computation.py
│ │ ├── metrics.cwl
│ │ └── santiago.py
│ ├── neuron_segmentation
│ │ ├── Dockerfile
│ │ ├── README.md
│ │ ├── driver.py
│ │ ├── neuron_segmentation.cwl
│ │ ├── requirements.txt
│ │ └── trained_classifier.pkl
│ ├── neuroproof
│ │ ├── Dockerfile
│ │ ├── README.md
│ │ ├── driver.py
│ │ └── neuroproof.cwl
│ └── seg_syn_association
│ │ ├── Dockerfile
│ │ ├── README.md
│ │ ├── assoc_boss.cwl
│ │ ├── assoc_local.cwl
│ │ ├── santiago.py
│ │ ├── seg_syn_assoc.py
│ │ ├── seg_syn_assoc_aws.py
│ │ └── seg_syn_assoc_boss.py
├── postprocessing
│ ├── blob_detect
│ │ ├── Dockerfile
│ │ ├── README.md
│ │ ├── blob_detect.cwl
│ │ ├── blob_detect.py
│ │ └── test_workflow
│ │ │ └── example_blob_detect.cwl
│ └── threshold
│ │ ├── Dockerfile
│ │ ├── README.md
│ │ ├── test_workflow
│ │ ├── example_job.yaml
│ │ └── example_workflow.cwl
│ │ ├── threshold.cwl
│ │ └── threshold.py
├── preprocessing
│ └── normalization
│ │ ├── Dockerfile
│ │ ├── normalize
│ │ └── normalize.cwl
└── xbrain
│ ├── Dockerfile
│ ├── README.md
│ ├── example
│ ├── example_job.yml
│ └── workflow.cwl
│ ├── example2d
│ ├── job.yml
│ ├── optimization2d.cwl
│ └── params.yml
│ ├── jobs
│ ├── cell_detect
│ │ ├── batch.cwl
│ │ ├── job.yml
│ │ ├── job_simple.yml
│ │ ├── local.cwl
│ │ ├── local_simple.cwl
│ │ └── params.yml
│ ├── full_test
│ │ ├── example-job.yml
│ │ └── parameterization.yml
│ ├── merge_test
│ │ ├── merge_test_xbrain.cwl
│ │ └── parameterization.yml
│ ├── param_sweep
│ │ ├── example_job.yml
│ │ ├── params.yml
│ │ ├── xbrain_unsup_local.cwl
│ │ └── xbrain_unsup_online.cwl
│ └── unet_train_job
│ │ ├── params.yml
│ │ ├── params_sgd.yml
│ │ ├── xbrain_unets_celldetect_train.cwl
│ │ ├── xbrain_unets_ex_job.yml
│ │ ├── xbrain_unets_ex_job_s3.yml
│ │ ├── xbrain_unets_ex_job_sgd.yml
│ │ ├── xbrain_unets_train.cwl
│ │ └── xbrain_unets_train_sgd.cwl
│ ├── process-xbrain.py
│ ├── split_cells.py
│ ├── tools
│ ├── cell_detect_nos3.cwl
│ ├── cell_split.cwl
│ ├── membrane_classify_nos3.cwl
│ ├── membrane_unets_train.cwl
│ ├── optimize_supervised.cwl
│ ├── optimize_unsupervised.cwl
│ ├── unsup_cell_detect_3D_nos3.cwl
│ ├── unsup_cell_detect_nos3.cwl
│ ├── unsup_membrane_classify_3D_nos3.cwl
│ ├── unsup_membrane_classify_nos3.cwl
│ ├── unsup_metrics_nos3.cwl
│ └── vessel_segment_nos3.cwl
│ ├── unets
│ ├── Dockerfile
│ ├── Dockerfile.train
│ ├── cnn_tools.py
│ ├── data_tools.py
│ ├── deploy_unet_docker.py
│ ├── deploy_unets.cwl
│ ├── image_handler.py
│ ├── train_unet_docker.py
│ └── train_unets.cwl
│ ├── unsupervised_celldetect.py
│ ├── workflows
│ ├── parameterization.yml
│ ├── xbrain-example-job.yml
│ ├── xbrain.cwl
│ ├── xbrain_supervised.cwl
│ ├── xbrain_supervised_optimization.cwl
│ ├── xbrain_unets_celldetect_train.cwl
│ ├── xbrain_unets_train.cwl
│ ├── xbrain_unsupervised_optimization.cwl
│ ├── xbrain_unsupervised_optimization3D.cwl
│ └── xbrain_with_boss.cwl
│ └── xbrain.py
└── setup.py
/.gitignore:
--------------------------------------------------------------------------------
1 | #Ignore cache and DS_Store files
2 | *.DS_Store
3 | *.pyc
4 |
5 | #ignore notebooks
6 | *.ipynb
7 |
8 | #Ignore all local volume and test files
9 | volumes/
10 |
11 | #ignore DAGs except template
12 | conduit/dags
13 | !conduit/dags/__init__.py
14 | !conduit/dags/template_dag
--------------------------------------------------------------------------------
/CWLDockerfile:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #### CWL parser dockerfile ####
16 |
17 | FROM python:3.6-slim
18 | ARG AIRFLOW_VERSION=1.10.4
19 | ENV SLUGIFY_USES_TEXT_UNIDECODE=yes
20 | RUN set -ex \
21 | && buildDeps=' \
22 | python3-dev \
23 | libkrb5-dev \
24 | libsasl2-dev \
25 | libssl-dev \
26 | libffi-dev \
27 | build-essential \
28 | libblas-dev \
29 | liblapack-dev \
30 | libpq-dev \
31 | git \
32 | ' \
33 | && apt-get update -yqq \
34 | && apt-get upgrade -yqq \
35 | && apt-get install -yqq --no-install-recommends \
36 | $buildDeps \
37 | python3-pip \
38 | python3-requests \
39 | default-libmysqlclient-dev \
40 | apt-utils \
41 | curl \
42 | rsync \
43 | netcat \
44 | default-mysql-server \
45 | default-mysql-client \
46 | locales
47 | RUN apt-get update \
48 | && apt-get install -y python3-pip python3-dev \
49 | && cd /usr/local/bin \
50 | && pip3 install --upgrade pip
51 |
52 | RUN pip3 install apache-airflow==$AIRFLOW_VERSION
53 | RUN pip install apache-airflow==$AIRFLOW_VERSION
54 | # COPY awsbatch_operator.py \
55 | # cwl-to-dag.py \
56 | # create_job_definitions.py \
57 | # parameterization.py \
58 | # datajoint_hook.py \
59 | # s3wrap /scripts/
60 | # s3wrap \
61 | # cwl_monitor /scripts/
62 | # ENTRYPOINT [ "cwl_monitor" ]
63 | COPY ./conduit /conduit
64 | COPY ./setup.py /
65 | RUN pip install -e /
66 | ENV PATH="/conduit:${PATH}"
67 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # SABER
4 |
5 | ## What is SABER?
6 | SABER (Scalable Analytics for Brain Exploration Research) is a library of containerized tools and a workflow deployment system for enabling processing of large neuroimaging datasets. Current workflows include processing of X-ray Microtomorography data to produce cell density estimates as well as processing Electron Microscopy images to segment neuron bodies.
7 | This project builds on Apache Airflow (http://airflow.apache.org) to orchestrate workflow deployment. This project interfaces with Datajoint (https://datajoint.io) and bossDB (https://github.com/jhuapl-boss/boss) to enable data storage and reproducible workflows.
8 |
9 | ## Prerequisites
10 | SABER requires docker and docker-compose. Please use the latest versions. To run our example workflows, you will need an AWS account to enable cloud storage through the AWS S3 service (https://aws.amazon.com/account/). To access data for our example workflows you will need an account at https://api.bossdb.org
11 |
12 | ## Installation
13 | Simply clone this repository and run
14 | `docker-compose up -d` inside it!
15 |
16 | ## Execution of workflows
17 |
18 | Please see our [wiki](https://github.com/aplbrain/saber/wiki) for more information!
19 |
20 | ## Data Access
21 |
22 | Please see our [wiki](https://github.com/aplbrain/saber/wiki/Data-Access) for more information about public access to data for testing Electron Microscopy and X-ray Microtomography workflows.
23 |
24 | ## Legal
25 |
26 | Use or redistribution of the SABER system in source and/or binary forms, with or without modification, are permitted provided that the following conditions are met:
27 |
28 | 1. Redistributions of source code or binary forms must adhere to the terms and conditions of any applicable software licenses.
29 | 2. End-user documentation or notices, whether included as part of a redistribution or disseminated as part of a legal or scientific disclosure (e.g. publication) or advertisement, must include the following acknowledgement: The SABER software system was designed and developed by the Johns Hopkins University Applied Physics Laboratory (JHU/APL).
30 | 3. The names "SABER", "JHU/APL", "Johns Hopkins University", "Applied Physics Laboratory" must not be used to endorse or promote products derived from this software without prior written permission. For written permission, please contact BossAdmin@jhuapl.edu.
31 | 4. This source code and library is distributed in the hope that it will be useful, but is provided without any warranty of any kind.
32 |
--------------------------------------------------------------------------------
/WebDockerfile:
--------------------------------------------------------------------------------
1 | #### Conduit airflow server ####
2 |
3 | # Mainly copied from Puckel_ : https://github.com/puckel/docker-airflow
4 |
5 | FROM python:3.6-slim
6 |
7 | # Never prompts the user for choices on installation/configuration of packages
8 | ENV DEBIAN_FRONTEND noninteractive
9 | ENV TERM linux
10 |
11 | # Airflow
12 | ARG AIRFLOW_VERSION=1.10.4
13 | ARG AIRFLOW_HOME=/root
14 |
15 | # Define en_US.
16 | ENV LANGUAGE en_US.UTF-8
17 | ENV LANG en_US.UTF-8
18 | ENV LC_ALL en_US.UTF-8
19 | ENV LC_CTYPE en_US.UTF-8
20 | ENV LC_MESSAGES en_US.UTF-8
21 | ENV AIRFLOW_GPL_UNIDECODE True
22 | RUN set -ex \
23 | && buildDeps=' \
24 | python3-dev \
25 | libkrb5-dev \
26 | libsasl2-dev \
27 | libssl-dev \
28 | libffi-dev \
29 | build-essential \
30 | libblas-dev \
31 | liblapack-dev \
32 | libpq-dev \
33 | git \
34 | ' \
35 | && apt-get update -yqq \
36 | && apt-get upgrade -yqq \
37 | && apt-get install -yqq --no-install-recommends \
38 | $buildDeps \
39 | python3-pip \
40 | python3-requests \
41 | default-libmysqlclient-dev \
42 | apt-utils \
43 | curl \
44 | rsync \
45 | netcat \
46 | locales \
47 | default-mysql-server \
48 | default-mysql-client \
49 | && apt-get install -y git \
50 | && sed -i 's/^# en_US.UTF-8 UTF-8$/en_US.UTF-8 UTF-8/g' /etc/locale.gen \
51 | && locale-gen \
52 | && update-locale LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 \
53 | && useradd -ms /bin/bash -d ${AIRFLOW_HOME} airflow \
54 | && pip install -U pip setuptools wheel \
55 | && pip install werkzeug==0.16.0 \
56 | && pip install SQLAlchemy==1.3.15 \
57 | && pip install Cython \
58 | && pip install pytz \
59 | && pip install pyOpenSSL \
60 | && pip install ndg-httpsclient \
61 | && pip install pyasn1 \
62 | && pip install apache-airflow[crypto,celery,postgres,hive,jdbc,mysql]==$AIRFLOW_VERSION \
63 | && pip install celery[redis]==4.1.1
64 | # && apt-get purge --auto-remove -yqq $buildDeps \
65 | # && apt-get autoremove -yqq --purge \
66 | # && apt-get clean \
67 | # && rm -rf \
68 | # /var/lib/apt/lists/* \
69 | # /tmp/* \
70 | # /var/tmp/* \
71 | # /usr/share/man \
72 | # /usr/share/doc \
73 | # /usr/share/doc-base
74 |
75 | # RUN git clone https://github.com/aplbrain/cwl-airflow-parser.git \
76 | # && cd cwl-airflow-parser \
77 | # && pip install -U . \
78 | # && cd ../
79 |
80 | COPY ./conduit/scripts/entrypoint.sh /entrypoint.sh
81 | COPY ./conduit/config/airflow.cfg ${AIRFLOW_HOME}/airflow.cfg
82 |
83 | RUN chown -R airflow: ${AIRFLOW_HOME}
84 |
85 | EXPOSE 8080 5555 8793
86 |
87 | # USER airflow
88 | WORKDIR /home
89 | ENTRYPOINT ["/entrypoint.sh"]
90 | COPY ./conduit /conduit
91 | COPY ./setup.py /
92 | RUN pip install -e /
93 |
--------------------------------------------------------------------------------
/conduit/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aplbrain/saber/051b9506fd7356113be013ac3c435a101fd95123/conduit/__init__.py
--------------------------------------------------------------------------------
/conduit/config/aws_config.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # Configuration options for AWS
16 | # IAM configuration
17 | iam:
18 | RoleName: Saber-Airflow-Workflow-ecsTaskWithS3-45XRWHEJJ8DK
19 | # Must start with file:// or be a url (eg. http://)
20 | AssumeRolePolicyDocument: file://ec2-trust-policy.json
21 | Description: Allows ECS tasks to have full access to S3
22 | # Do not change
23 | # PermissionsBoundary: arn:aws:iam::aws:policy/AmazonS3FullAccess
24 |
25 |
26 |
27 | # batch configuration
28 | batch:
29 | computeEnvironmentName: &comp_env xbrain-compute-environment
30 | # Do not change
31 | type: MANAGED
32 | # Do not change
33 | state: ENABLED
34 | computeResources:
35 | type: EC2
36 | # Change as needed
37 | minvCpus: 0
38 | maxvCpus: 256
39 | desiredvCpus: 0
40 | instanceTypes: [optimal]
41 | instanceRole: arn:aws:iam::aws_account_id:instance-profile/ecsInstanceRole
42 | vpc:
43 | CidrBlock: 172.31.0.0/16
44 | # Add additional subnets here
45 | subnets:
46 | - CidrBlock: 172.31.16.0/20
47 | # - CidrBlock: xxx.xx.xx.x/xx
48 |
49 | acl:
50 | # Change these settings to be more specific or secure
51 | Entries:
52 | - CidrBlock: 0.0.0.0/0
53 | Protocol: '-1' # Means all
54 | RuleAction: allow
55 | RuleNumber: 100
56 | Egress: True
57 | - CidrBlock: 0.0.0.0/0
58 | Protocol: '-1'
59 | RuleAction: allow
60 | RuleNumber: 100
61 | Egress: False
62 |
63 | #Don't need to specify if want default
64 | security-group:
65 | Description: Default VPC security group
66 | GroupName: default
67 | job-queue:
68 | jobQueueName: saber-gpu-queue-enhanced-memory
69 | state: ENABLED
70 | priority: 1
71 | computeEnvironmentOrder:
72 | - order: 1
73 | # Default args for job definitions
74 | job-definitions:
75 | type: container
76 | parameters: {}
77 | retryStrategy:
78 | attempts: 1
79 | containerProperties:
80 | vcpus: 2
81 | memory: 4000
82 | jobRoleArn: arn:aws:iam::438004392447:role/Saber-Airflow-Workflow-BatchInstanceRole-1TQSRWFR81Y5O
83 | volumes:
84 | - name: saber-home
85 | host:
86 | sourcePath : '/dev/xvdcz/'
87 | environment: []
88 | mountPoints:
89 | - sourceVolume: saber-home
90 | containerPath: /saber-home
91 | ulimits: []
92 | datajoint:
93 | host: 'datajoint:3306'
94 | user: root
95 | password: airflow
96 |
--------------------------------------------------------------------------------
/conduit/config/dockerfile_local_template:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | COPY ./localwrap /app/
16 | RUN python -m pip install parse
17 | RUN python3 -m pip install parse
18 |
19 | ENV PATH="/app:${PATH}"
20 | ENTRYPOINT []
--------------------------------------------------------------------------------
/conduit/config/dockerfile_s3_template:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | COPY ./s3wrap /app/
16 | RUN python -m pip install boto3
17 | RUN python -m pip install parse
18 | RUN python3 -m pip install boto3
19 | RUN python3 -m pip install parse
20 |
21 | ENV PATH="/app:${PATH}"
22 | ENTRYPOINT []
--------------------------------------------------------------------------------
/conduit/config/pool_config.json:
--------------------------------------------------------------------------------
1 | {
2 | "Local": {
3 | "description": "Local Execution Pool",
4 | "slots": 4
5 | },
6 | "Batch": {
7 | "description": "AWS Batch Execution Pool",
8 | "slots": 100
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/conduit/dags/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aplbrain/saber/051b9506fd7356113be013ac3c435a101fd95123/conduit/dags/__init__.py
--------------------------------------------------------------------------------
/conduit/dags/template_dag:
--------------------------------------------------------------------------------
1 | import pickle
2 | from airflow import DAG
3 | import os
4 | import sys
5 | from pathlib import Path # if you haven't already done so
6 |
7 | file = Path(__file__).resolve()
8 | parent, root = file.parent, file.parents[1]
9 | sys.path.append(str(root))
10 | from utils.awsbatch_operator import AWSBatchOperator
11 | from utils.datajoint_hook import *
12 |
13 | with open(os.path.join(os.path.dirname(__file__), "{}_dag.pickle"), "rb") as fp:
14 | print(fp)
15 | dag = pickle.load(fp)
16 | dagdag = dag
17 |
--------------------------------------------------------------------------------
/conduit/requirements.txt:
--------------------------------------------------------------------------------
1 | watchdog==0.9.0
2 | parse==1.9.0
3 | boto3==1.9.79
4 | docker==3.7.0
5 | datajoint==0.11.3
6 | cwltool==1.0.20181217162649
--------------------------------------------------------------------------------
/conduit/scripts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aplbrain/saber/051b9506fd7356113be013ac3c435a101fd95123/conduit/scripts/__init__.py
--------------------------------------------------------------------------------
/conduit/scripts/cwl_monitor:
--------------------------------------------------------------------------------
1 | #! /usr/bin/python
2 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | from watchdog.observers import Observer
17 | from watchdog.events import FileSystemEventHandler, LoggingEventHandler, FileModifiedEvent
18 | from cwl-to-dag import CwlParser
19 | import parse
20 | import sys
21 | import time
22 | import logging
23 | import re
24 | class CWLFileHandler(LoggingEventHandler):
25 | def on_any_event(self,event):
26 | super.on_any_event(event)
27 | if isinstance(event, FileModifiedEvent):
28 | path_s = event.src_path.split('/')
29 | meta_wf_name = path_s[0]
30 | wf_name = path_s[1]
31 | job_name = path_s[2]
32 | if re.match('.*job.*\.yml$', path_s[3]):
33 | # Job file was modified
34 | elif re.match('.*\.cwl$', path_s[3]):
35 | # Workflow file was modified
36 |
37 |
38 | # Need to rebuild scripts under wf_name
39 | def job_modified(self,event):
40 | pass
41 | def workflow_modified(self, event):
42 | pass
43 | def tool_modified(self, event):
44 | pass
45 | def params_modified(self, event):
46 | pass
47 |
48 |
49 |
50 | if __name__ == "__main__":
51 | logging.basicConfig(level=logging.INFO,
52 | format='%(asctime)s - %(message)s',
53 | datefmt='%Y-%m-%d %H:%M:%S')
54 | # path = sys.argv[1] if len(sys.argv) > 1 else '.'
55 | path = '/saber/'
56 | # Find workflows
57 | # Find jobs
58 |
59 |
60 |
61 | c = CWLFileHandler()
62 | observer = Observer()
63 | observer.schedule(c, path, recursive=True)
64 | observer.start()
65 | try:
66 | while True:
67 | time.sleep(1)
68 | except KeyboardInterrupt:
69 | observer.stop()
70 | observer.join()
71 |
--------------------------------------------------------------------------------
/conduit/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aplbrain/saber/051b9506fd7356113be013ac3c435a101fd95123/conduit/tests/__init__.py
--------------------------------------------------------------------------------
/conduit/tests/test_awsbatch_operator.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | class TestAwsBatchOperator(unittest.TestCase):
3 | def setUp(self):
4 | pass
5 | def test_execute(self):
6 | # Test that execution works properly
7 | # Perhaps use a fake AWS server somehow?
8 | # Initialize object and set operator.client to some fake fixture?
9 | pass
10 |
--------------------------------------------------------------------------------
/conduit/tests/test_commandlist.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from conduit.utils.command_list import generate_command_list, generate_io_strings, sub_params
3 | import yaml
4 |
5 | class TestCommandlist(unittest.TestCase):
6 | # TODO needs to be class
7 |
8 | def setUp(self):
9 | pass
10 | def test_generate_command_list(self):
11 | # Test cases:
12 | # 1. Single input, single output (SISO) tool
13 | # 2. Multi input, multi output (MIMO) tool
14 | # 3. MIMO tool with local
15 | # 4. MIMO tool with iteration parameters
16 | # 5. MIMO tool with no file path
17 | # 6. MIMO tool with file path
18 | pass
19 | def test_sub_params(self):
20 | # Test cases:
21 | # 1. Single input
22 | # 2. Multi input
23 | # 3. Edge case
24 | pass
25 | def test_generate_io_strings(self):
26 | # Test cases:
27 | # 1. Empty input
28 | # 2. Single input
29 | # 3. Multi input
30 | # 4. Edge case
31 | pass
32 |
33 |
--------------------------------------------------------------------------------
/conduit/tests/test_cwlparser.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from conduit.utils.cwlparser import CwlParser
3 |
4 | class TestCwlParser(unittest.TestCase):
5 | def setUp(self):
6 | pass
7 | def test_resolve_tools(self):
8 | # Test cases:
9 | # 1. Single tool
10 | # 2. Multiple tools
11 | # 3. Workflow CWL not in current directory
12 | pass
13 | def test_generate_volume_list(self):
14 | # Test cases:
15 | # 1. Test no outputs
16 | # 2. Single output
17 | # 3. Multiple output
18 | # 4. Empty local path
19 | pass
20 | def test_create_job_definitions(self):
21 | # Test cases:
22 | # 1. Single tool
23 | # 2. Multiple tools
24 | # 3. Workflow CWL not in current directory
25 | pass
26 | def test_build_docker_images(self):
27 | # Test cases:
28 | # 1. Single tool
29 | # 2. Multiple tools
30 | # 3. Multiple tools using same image
31 | pass
32 | def test_create_subdag(self):
33 | # Test cases:
34 | # 1. Single tool
35 | # 2. Multiple tools
36 | # 3. No iterations
37 | # 4. Multiple iterations
38 | # 5. Empty update dict
39 |
40 | pass
41 |
42 | def test_generate_dag(self):
43 | # Test cases:
44 | # 1. Single tool
45 | # 2. Multiple tools
46 | # 3. No iterations
47 | # 4. Multiple iterations
48 | # 5. Empty update dict
49 | # 6. Subdag = False
50 | pass
51 | def test_resolve_args(self):
52 | # Test cases:
53 | # 1. Single tool
54 | # 2. Multiple tools
55 | # 3. No iterations
56 | # 4. Multiple iterations
57 | # 5. Empty update dict
58 | pass
59 | def test_resolve_dependencies(self):
60 | # Test cases:
61 | # 1. Single tool
62 | # 2. Multiple tools
63 | # 3. Single tool, no outputs
64 | # 4. Multiple iterations
65 | # 5. Empty update dict
66 | pass
67 | def test_resolve_glob(self):
68 | # Test cases:
69 | # 1. Undefined tool
70 | # 2. Non-parseable glob
71 | # 3. Parseable but non-input glob
72 | pass
73 |
--------------------------------------------------------------------------------
/conduit/tests/test_data/test_parameterization.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | metaparam1:
16 | range:
17 | start: 0.0
18 | stop: 1
19 | step: 0.1
20 | parameters:
21 | abs: param1
22 | steps:
23 | - step1
24 | - step2
25 | - step3
26 | metaparam2:
27 | range:
28 | start: 0.0
29 | stop: 0.2
30 | step: 0.1
31 | parameters:
32 | abs: param2
33 | steps:
34 | - step1
35 | metaparam3:
36 | range:
37 | start: 0.0
38 | stop: 1
39 | step: 0.1
40 | parameters:
41 | abs: param2
42 | steps:
43 | - step1
44 |
45 | metaparam4:
46 | range:
47 | start: 0.0
48 | stop: 1
49 | step: 0.1
50 | parameters:
51 | abs: param2
52 | steps:
53 | - step1
54 |
55 | metaparam5:
56 | range:
57 | start: 0.0
58 | stop: 1
59 | step: 0.1
60 | parameters:
61 | abs: param2
62 | steps:
63 | - step1
--------------------------------------------------------------------------------
/conduit/tests/test_datajoint_hook.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | # Wait until refactor here
3 | # from conduit.utils.datajoint_hook import *
4 |
5 | class TestDatajointHook(unittest.TestCase):
6 | def setUp(self):
7 | pass
8 |
--------------------------------------------------------------------------------
/conduit/tests/test_job_definitions.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from conduit.utils.job_definitions import (create_and_push_docker_image,
4 | create_job_definition,
5 | create_job_definitions, docker_auth,
6 | docker_login, docker_registry_login,
7 | extract, generate_job_definition,
8 | get_original_docker_name,
9 | make_build_context, make_tag)
10 | class TestJobDefinitions(unittest.TestCase):
11 | # TODO need to make job definitions into a class in order to test properly
12 | def setUp(self):
13 | pass
14 | def test_create_and_push_docker_image(self):
15 | pass
16 | def test_create_job_definition(self):
17 | pass
18 | def test_create_job_definitions(self):
19 | pass
20 | def test_docker_auth(self):
21 | pass
22 | def test_docker_login(self):
23 | pass
24 | def test_docker_registry_login(self):
25 | pass
26 | def test_extract(self):
27 | pass
28 | def test_generate_job_definition(self):
29 | pass
30 | def test_get_original_docker_name(self):
31 | pass
32 | def test_make_build_context(self):
33 | pass
34 | def test_make_tag(self):
35 | pass
36 |
37 |
--------------------------------------------------------------------------------
/conduit/tests/test_parameterization.py:
--------------------------------------------------------------------------------
1 |
2 | import unittest
3 | import yaml
4 | import json
5 | import os
6 | import itertools
7 | import numpy as np
8 | from conduit.utils.parameterization import parameterize
9 | from conduit.tests.testing_utils import load_test_data
10 | class TestParameterization(unittest.TestCase):
11 | def setUp(self):
12 | self._test_data = load_test_data('test_parameterization.yml')
13 |
14 | def test_parameterize_single(self):
15 | data = self._test_data['metaparam1']
16 | data = {"metaparam1" : data}
17 | p = parameterize(data)
18 | expected_dict_format = {
19 | "step1" : {
20 | "param1" : "{a}"
21 | },
22 | "step2" : {
23 | "param1" : "{a}"
24 | },
25 | "step3" : {
26 | "param1" : "{a}"
27 | }
28 | }
29 | for i,step in enumerate(p):
30 | self.assertDictLike(expected_dict_format, step, a=0.1*i)
31 | def test_parameterize_multiple(self):
32 | data = {
33 | "metaparam1" : self._test_data['metaparam1'],
34 | "metaparam2" : self._test_data['metaparam2'],
35 | }
36 | p = parameterize(data)
37 | expected_dict_format = {
38 | "step1" : {
39 | "param1" : "{a}",
40 | "param2" : "{b}"
41 | },
42 | "step2" : {
43 | "param1" : "{a}",
44 |
45 | },
46 | "step3" : {
47 | "param1" : "{a}",
48 |
49 | }
50 | }
51 | vals = list(itertools.product(np.arange(0.0, 1, 0.1),np.arange(0.0, 0.2, 0.1)))
52 | self.assertEqual(len(p), len(vals))
53 | for step,(a,b) in zip(p,vals):
54 |
55 | self.assertDictLike(expected_dict_format,step, a=a, b=b)
56 |
57 |
58 | def assertDictLike(self, d1, d2, *args, **kwargs):
59 | yaml.Dumper.ignore_aliases = lambda *args : True
60 | d1str = yaml.dump(d1, default_flow_style=False)
61 | d2str = yaml.dump(d2, default_flow_style=False)
62 |
63 | d1str = d1str.format(*args, **kwargs)
64 | # print(d1str, d2str)
65 | d1l = yaml.load(d1str)
66 | d2l = yaml.load(d2str)
67 | self.assertEqual(d1l,d2l)
68 |
69 | if __name__ == "__main__":
70 | unittest.main()
--------------------------------------------------------------------------------
/conduit/tests/test_saber_docker_operator.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | class TestSaberDockerOperator(unittest.TestCase):
3 | def setUp(self):
4 | pass
5 | def test_execute(self):
6 | pass
7 |
--------------------------------------------------------------------------------
/conduit/tests/testing_utils.py:
--------------------------------------------------------------------------------
1 | import yaml
2 | def load_test_data(filename):
3 | fileloc = os.path.dirname(__file__)
4 | fn = os.path.join(fileloc, 'test_data', filename)
5 | with open(fn) as fp:
6 | test_data = yaml.load(fp)
7 | return test_data
--------------------------------------------------------------------------------
/conduit/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aplbrain/saber/051b9506fd7356113be013ac3c435a101fd95123/conduit/utils/__init__.py
--------------------------------------------------------------------------------
/conduit/utils/saber_docker_operator.py:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | import time
16 | import parse
17 |
18 | from airflow.operators.docker_operator import DockerOperator
19 | from conduit.utils.datajoint_hook import DatajointHook, JobMetadata
20 | from datajoint import DuplicateError
21 |
22 |
23 | class SaberDockerOperator(DockerOperator):
24 | def __init__(self, *args, workflow_id, score_format="", **kwargs):
25 | super().__init__(*args, **kwargs)
26 | self.score_format = score_format
27 | self.workflow_id = workflow_id
28 | self.task_id = kwargs["task_id"]
29 | self.dj_hook = DatajointHook()
30 |
31 | def execute(self, *args, **kwargs):
32 | begin_time = time.time()
33 | super().execute(*args, **kwargs)
34 | task_time = time.time() - begin_time
35 | score = self._get_score()
36 | iteration = self.task_id.split(".")[1]
37 | real_task_id = self.task_id.split(".")[0]
38 | self.log.info(
39 | "Inserting {} {} {} {} {} into job metadata database".format(
40 | self.workflow_id, iteration, real_task_id, task_time, score
41 | )
42 | )
43 | self.dj_hook.insert1(
44 | {
45 | "iteration": iteration,
46 | "workflow_id": self.workflow_id,
47 | "job_id": real_task_id,
48 | "cost": task_time,
49 | "score": score,
50 | },
51 | JobMetadata,
52 | )
53 |
54 | def _get_score(self):
55 |
56 | if self.score_format:
57 | logEvents = self.cli.logs(container=self.container["Id"], stream=True)
58 | # Reads events from most recent to least recent (earliest), so the
59 | # first match is the most recent score. Perhaps change this?
60 | for logEvent in logEvents:
61 | parsed_event = parse.parse(self.score_format, logEvent.decode())
62 | if parsed_event and "score" in parsed_event.named:
63 | return float(parsed_event["score"])
64 | self.log.info("Score format present but no score found in logs...")
65 | return None
66 |
--------------------------------------------------------------------------------
/demos/dvid_ffn/block_params.yml:
--------------------------------------------------------------------------------
1 | x:
2 | range:
3 | start: 2000
4 | stop: 2512
5 | step: 128
6 | parameters:
7 | min: xmin
8 | max: xmax
9 | steps:
10 | - dvid_pull_raw
11 | - dvid_push_seg
12 | y:
13 | range:
14 | start: 2000
15 | stop: 2512
16 | step: 128
17 | parameters:
18 | min: ymin
19 | max: ymax
20 | steps:
21 | - dvid_pull_raw
22 | - dvid_push_seg
23 | z:
24 | range:
25 | start: 2000
26 | stop: 2512
27 | step: 128
28 | parameters:
29 | min: zmin
30 | max: zmax
31 | steps:
32 | - dvid_pull_raw
33 | - dvid_push_seg
--------------------------------------------------------------------------------
/demos/dvid_ffn/job_dvid_ffn.yml:
--------------------------------------------------------------------------------
1 | # DVID:
2 | host_name: 3.209.156.251:8001
3 | uuid: 56e2e4251774426abdde8cdee4be747e
4 | resource_name: validation
5 | dtype_name: uint8
6 | type: uint8blk
7 | resolution: 0
8 | xmin: 0
9 | xmax: 64
10 | ymin: 0
11 | ymax: 64
12 | zmin: 0
13 | zmax: 64
14 | pull_output_name: medulla7_raw.npy
15 | resource_name_out: ffn_segmentation
16 | dtype_name_out: uint64
17 | type_out: labelblk
18 |
19 | # FFN segmentation
20 | image_mean: 128
21 | image_stddev: 33
22 | depth: 12
23 | fov_size: 33,33,33
24 | deltas: 8,8,8
25 | init_activation: 0.95
26 | pad_value: 0.05
27 | move_threshold: 0.9
28 | min_boundary_dist: 1,1,1
29 | segment_threshold: 0.6
30 | min_segment_size: 1000
31 | bound_start: 0,0,0
32 | bound_stop: 64,64,64
33 | outfile: medulla7_ffn_out.npy
--------------------------------------------------------------------------------
/demos/dvid_ffn/job_public.yml:
--------------------------------------------------------------------------------
1 | # DVID:
2 | host_name: emdata.janelia.org
3 | uuid: 822524777d3048b8bd520043f90c1d28
4 | resource_name: grayscale
5 | dtype_name: uint8
6 | type: uint8blk
7 | resolution: 0
8 | xmin: 2000
9 | xmax: 2256
10 | ymin: 2000
11 | ymax: 2256
12 | zmin: 2000
13 | zmax: 2256
14 | pull_output_name: medulla7_raw.npy
15 |
16 | # FFN segmentation
17 | image_mean: 128
18 | image_stddev: 33
19 | depth: 12
20 | fov_size: 33,33,33
21 | deltas: 8,8,8
22 | init_activation: 0.95
23 | pad_value: 0.05
24 | move_threshold: 0.9
25 | min_boundary_dist: 1,1,1
26 | segment_threshold: 0.6
27 | min_segment_size: 1000
28 | bound_start: 0,0,0
29 | bound_stop: 64,64,64
30 | outfile: medulla7_ffn_out.npy
--------------------------------------------------------------------------------
/demos/em_pipelines/job_i2g.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # BOSS:
16 | #------------------------------------------------
17 | # general:
18 | host_bossdb: api.bossdb.io
19 | token_bossdb: public
20 | coll_name: Kasthuri
21 | exp_name: ac4
22 | coord_name: ac4-cf
23 | resolution: 0
24 | xmin: 0
25 | xmax: 512
26 | ymin: 0
27 | ymax: 512
28 | zmin: 0
29 | zmax: 50
30 | padding: 0
31 | onesided: 0
32 | #inputs:
33 | in_chan_name_raw: em
34 | itype_name_in: image
35 | dtype_name_in: uint8
36 | #outputs
37 | pull_output_name_raw: pull_output_raw.npy
38 |
39 |
40 | #PROCESSING:
41 | #------------------------------------------------
42 | #General:
43 | width: 512
44 | height: 512
45 | mode: synapse
46 | #Synapse_detection
47 | synapse_output: synapse_output.npy
48 | #Membrane_detection
49 | membrane_output: membrane_output.npy
50 | #neuron_segmentation:
51 | train_file: ./trained_classifier.pkl
52 | neuron_output: neuron_output.npy
53 | mode: synapse
54 | neuron_mode: 1
55 | agg_threshold: "0.5"
56 | seeds_cc_threshold: "5"
57 | #syn assoc
58 | assoc_output_name: edge_list.pkl
59 | assoc_output_name_noneu: edge_list_noneu.pkl
60 | _saber_bucket: saber-batch
61 |
62 |
--------------------------------------------------------------------------------
/demos/em_pipelines/job_synapse_threshold.yaml:
--------------------------------------------------------------------------------
1 | #Boss files
2 | host_name: api.bossdb.io
3 | coord: ac4-cf
4 | token: public
5 | coll: Kasthuri
6 | exp: ac4
7 | chan_labels: synapse
8 | chan_img: em
9 | dtype_img: uint8
10 | dtype_lbl: uint64
11 | itype_name: image
12 | res: 0
13 | xmin: 0
14 | xmax: 256
15 | ymin: 0
16 | ymax: 256
17 | zmin: 0
18 | zmax: 50
19 | padding: 0
20 |
21 | #threshold input
22 | threshold: 0.5
23 |
24 | # synapse detection
25 | width: 256
26 | height: 256
27 | mode: synapse
28 |
29 | #Output File Names
30 | raw_pull_output_name: pull_output.npy
31 | anno_pull_output_name: anno_output.npy
32 | synapse_output_name: synapse_output.npy
33 | threshold_output_name: threshold_output.npy
--------------------------------------------------------------------------------
/demos/local_input_example/job.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 |
16 | #General:
17 | width: 160
18 | height: 160
19 | mode: synapse
20 | #Synapse_detection
21 | input:
22 | class: File
23 | path: test_dir/pull_output_raw.npy
24 | synapse_output: synapse_output.npy
25 |
26 |
--------------------------------------------------------------------------------
/demos/local_input_example/local_input.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 | ## This workflow will make use of the general synapse and membrane detection cwl files, meaning the processes will happen on CPU rather than on GPU. Does not include Boss push steps.
17 |
18 | cwlVersion: v1.0
19 | class: Workflow
20 | doc: local
21 |
22 | inputs:
23 | #Inputs for processing
24 | width: int?
25 | height: int?
26 | mode: string
27 | input: File
28 |
29 | #Inputs for output names:
30 | synapse_output: string
31 |
32 | outputs:
33 | synapse_detection:
34 | type: File
35 | outputSource: synapse_detection/synapse_detection_out
36 |
37 | steps:
38 |
39 | synapse_detection:
40 | run: ../../../i2g/detection/synapse_detection.cwl
41 | in:
42 | input: input
43 | width: width
44 | height: height
45 | mode: mode
46 | output: synapse_output
47 | hints:
48 | saber:
49 | local: True
50 | file_path: ""
51 | out: [synapse_detection_out]
52 |
--------------------------------------------------------------------------------
/docker-compose-tools.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | version: '2.1'
16 | services:
17 | xbrain:
18 | privileged: true
19 | build:
20 | context: ./saber/xbrain
21 | dockerfile: Dockerfile
22 | image: aplbrain/xbrain
23 | xbrain_unets:
24 | privileged: true
25 | build:
26 | context: ./saber/xbrain/unets
27 | dockerfile: Dockerfile
28 | image: aplbrain/unets
29 | boss:
30 | privileged: true
31 | build:
32 | context: ./saber/boss_access
33 | dockerfile: Dockerfile
34 | image: aplbrain/boss-access
35 | # i2gdetect:
36 | # privileged: true
37 | # build:
38 | # context: ./saber/i2g/detection
39 | # dockerfile: Dockerfile
40 | # image: aplbrain/i2gdetect
41 | # i2gdetect_gpu:
42 | # privileged: true
43 | # build:
44 | # context: ./saber/i2g/detection/gpu
45 | # dockerfile: Dockerfile
46 | # image: aplbrain/i2gdetect_gpu
47 | # i2gmetrics:
48 | # privileged: true
49 | # build:
50 | # context: ./saber/i2g/metric_computation
51 | # dockerfile: Dockerfile
52 | # image: aplbrain/i2gmetrics
53 | # i2gseg:
54 | # privileged: true
55 | # build:
56 | # context: ./saber/i2g/neuron_segmentation
57 | # dockerfile: Dockerfile
58 | # image: aplbrain/i2gseg
59 | # i2gseg:
60 | # privileged: true
61 | # build:
62 | # context: ./saber/i2g/neuron_segmentation
63 | # dockerfile: Dockerfile
64 | # image: aplbrain/i2gseg
65 | # i2gassoc:
66 | # privileged: true
67 | # build:
68 | # context: ./saber/i2g/seg_syn_association
69 | # dockerfile: Dockerfile
70 | # image: aplbrain/i2gassoc
71 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | version: '2.1'
16 | services:
17 | postgres:
18 | image: postgres:9.6
19 | environment:
20 | - POSTGRES_USER=airflow
21 | - POSTGRES_PASSWORD=airflow
22 | - POSTGRES_DB=airflow
23 |
24 | webserver:
25 | privileged: true
26 | build:
27 | context: .
28 | dockerfile: WebDockerfile
29 | restart: always
30 | depends_on:
31 | - postgres
32 | - datajoint
33 | environment:
34 | - LOAD_EX=n
35 | - EXECUTOR=Local
36 | volumes:
37 | # - ./dags:/usr/local/airflow/dags
38 | - ./conduit:/conduit
39 | - ~/.aws:/root/.aws
40 | - ./volumes/logs:/root/logs
41 | # Uncomment to include custom plugins
42 | - ./conduit/plugins:/root/plugins
43 | - /var/run/docker.sock:/var/run/docker.sock
44 | ports:
45 | - "8080:8080"
46 | command: webserver
47 | healthcheck:
48 | test: ["CMD-SHELL", "[ -f /root/airflow-webserver.pid ]"]
49 | interval: 30s
50 | timeout: 30s
51 | retries: 3
52 | cwl_parser:
53 | privileged: true
54 | build:
55 | context: .
56 | dockerfile: CWLDockerfile
57 | depends_on:
58 | - webserver
59 | environment:
60 | - DOCKER_CLIENT_TIMEOUT=120
61 | - COMPOSE_HTTP_TIMEOUT=120
62 | volumes:
63 | - ./conduit:/conduit
64 | - ~/.aws:/root/.aws
65 | - ./saber/:/saber
66 | - ./demos:/demos
67 | - /var/run/docker.sock:/var/run/docker.sock
68 |
69 | command: tail -F root
70 | datajoint:
71 | image: datajoint/mysql
72 | ports:
73 | - "3306:3306"
74 | environment:
75 | - MYSQL_ROOT_PASSWORD=airflow
76 | volumes:
77 | - ./volumes/data:/var/lib/mysql
78 |
79 |
80 |
--------------------------------------------------------------------------------
/saber/boss_access/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #Use an official Python runtime as a parent image
16 | FROM python:3.6
17 |
18 | # Install any needed packages specified in requirements.txt
19 | RUN pip install numpy
20 | RUN pip install scikit-image
21 | RUN pip install scipy boto3
22 |
23 | # RUN git clone https://github.com/jhuapl-boss/intern.git && cd intern && git checkout RemoteExtension && git pull && python3 setup.py install --user
24 | RUN pip install intern
25 |
26 | RUN mkdir /app
27 | COPY ./boss_access.py /app/
28 | RUN chown -R 1000:100 /app/
29 | ENV PATH /app:$PATH
30 | WORKDIR /app
31 |
32 |
--------------------------------------------------------------------------------
/saber/boss_access/boss_test/dummy_workflow.cwl:
--------------------------------------------------------------------------------
1 |
2 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | cwlVersion: v1.0
17 | class: Workflow
18 | doc: local
19 | inputs:
20 | # Inputs for BOSS
21 | host_name: string
22 | token: string
23 | coll_name: string
24 | exp_name: string
25 | coord_name: string
26 | xmin: int?
27 | xmax: int?
28 | ymin: int?
29 | ymax: int?
30 | zmin: int?
31 | zmax: int?
32 | padding: int?
33 | resolution: int?
34 | output_name: string
35 | dtype_name: string
36 | itype_name: string
37 | ## Boss pull
38 | in_chan_name: string
39 |
40 | outputs:
41 | pull_output:
42 | type: File
43 | outputSource: boss_pull/pull_output
44 | steps:
45 | boss_pull:
46 | run: ../../../../saber/boss_access/boss_pull_nos3.cwl
47 | in:
48 | host_name: host_name
49 | token: token
50 | coll_name: coll_name
51 | exp_name: exp_name
52 | chan_name: in_chan_name
53 | dtype_name: dtype_name
54 | itype_name: itype_name
55 | resolution: resolution
56 | xmin: xmin
57 | xmax: xmax
58 | ymin: ymin
59 | ymax: ymax
60 | zmin: zmin
61 | zmax: zmax
62 | padding: padding
63 | output_name: output_name
64 | coord_name: coord_name
65 | hints:
66 | saber:
67 | local: True
68 | file_path: /Users/xenesd1/Projects/aplbrain/saber/volumes/data/local
69 | out:
70 | [pull_output]
71 |
--------------------------------------------------------------------------------
/saber/boss_access/boss_test/job_params.yml:
--------------------------------------------------------------------------------
1 |
2 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | # Inputs for BOSS
16 | host_name: api.bossdb.io
17 | token: public
18 | coll_name: Kasthuri
19 | exp_name: em
20 | dtype_name: uint8
21 | itype_name: image
22 | coord_name: KasthuriFrame
23 | xmin: 5000
24 | xmax: 6000
25 | ymin: 5000
26 | ymax: 6000
27 | zmin: 600
28 | zmax: 610
29 | padding: 0
30 | resolution: 0
31 | in_chan_name: images
32 | output_name: kasthuri_raw.npy
33 |
--------------------------------------------------------------------------------
/saber/boss_access/boss_test/sweep.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | sampler:
16 | method: batch-grid
17 | batch_size: 2
18 | x:
19 | range:
20 | start: 5000
21 | stop: 6000
22 | step: 500
23 | parameters:
24 | min: xmin
25 | max: xmax
26 | steps:
27 | - boss_pull
28 | y:
29 | range:
30 | start: 5000
31 | stop: 6000
32 | step: 500
33 | parameters:
34 | min: ymin
35 | max: ymax
36 | steps:
37 | - boss_pull
38 | # z:
39 | # range:
40 | # start: 631
41 | # stop: 998
42 | # step: 92
43 | # parameters:
44 | # min: zmin
45 | # max: zmax
46 | # steps:
47 | # - boss_pull_raw
48 | # - boss_push_synapses_boss
49 | # - boss_push_synapses_bossdb
50 | # - boss_push_membranes_boss
51 | # - boss_push_membranes_bossdb
52 |
--------------------------------------------------------------------------------
/saber/datajoint/docker-compose.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #Only functional in a machine that does not have 3306:3305 port in use.
16 |
17 | version: '2'
18 |
19 | services:
20 | db:
21 | image: datajoint/mysql
22 | ports:
23 | - "3306:3306"
24 | environment:
25 | - MYSQL_ROOT_PASSWORD=simple
26 | volumes:
27 | - ./data:/var/lib/mysql
--------------------------------------------------------------------------------
/saber/dvid_access/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #Use an official Python runtime as a parent image
16 | FROM python:3.6
17 |
18 | RUN pip install boto3==1.11.8
19 | RUN git clone https://github.com/jhuapl-boss/intern.git && cd intern && git checkout RemoteExtension && git pull && python3 setup.py install --user
20 | RUN mkdir /app
21 | COPY ./dvid_access.py /app/
22 | RUN chown -R 1000:100 /app/
23 | ENV PATH /app:$PATH
24 | WORKDIR /app
25 |
26 |
--------------------------------------------------------------------------------
/saber/dvid_access/README.md:
--------------------------------------------------------------------------------
1 | # BOSS Access Docker Container
2 |
3 | ## Overview
4 |
5 | This Docker container contains the tools necessary to push and pull data from the DVID Service.
6 |
7 | ## Building
8 |
9 | 1. Navigate to this folder
10 |
11 | ```
12 | cd saber/saber/dvid-access/
13 | ```
14 | 1. Build the docker container
15 |
16 | ```
17 | docker build -t aplbrain/dvid-access .
18 | ```
19 |
20 | ## Running
21 |
22 | One can either run this docker container as a standalone tool, or you can launch an interactive terminal and access the tools via the command line. This is recommended, as you only have to attach volumes once.
23 |
24 | ```
25 | docker run -it -v ./data:/data/ aplbrain/dvid-access /bin/bash
26 | ```
27 |
28 | This will launch the container as an interactive terminal and bind `./data` on your local system to `/data/` in the container.
--------------------------------------------------------------------------------
/saber/dvid_access/dvid_pull.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | dockerPull: aplbrain/dvid-access
22 | baseCommand: python
23 | arguments: ['/app/dvid_access.py', 'pull']
24 | inputs:
25 | host_name:
26 | type: string
27 | inputBinding:
28 | position: 1
29 | prefix: --host
30 | uuid:
31 | type: string
32 | inputBinding:
33 | position: 2
34 | prefix: --uuid
35 | dtype_name:
36 | type: string
37 | inputBinding:
38 | position: 3
39 | prefix: --datatype
40 | resource_name:
41 | type: string
42 | inputBinding:
43 | position: 4
44 | prefix: --data_instance
45 | resolution:
46 | type: int?
47 | inputBinding:
48 | prefix: --res
49 | position: 5
50 | xmin:
51 | type: int?
52 | inputBinding:
53 | prefix: --xmin
54 | position: 6
55 | xmax:
56 | type: int?
57 | inputBinding:
58 | prefix: --xmax
59 | position: 7
60 | ymin:
61 | type: int?
62 | inputBinding:
63 | prefix: --ymin
64 | position: 8
65 | ymax:
66 | type: int?
67 | inputBinding:
68 | prefix: --ymax
69 | position: 9
70 | zmin:
71 | type: int?
72 | inputBinding:
73 | prefix: --zmin
74 | position: 10
75 | zmax:
76 | type: int?
77 | inputBinding:
78 | prefix: --zmax
79 | position: 11
80 | output_name:
81 | type: string
82 | inputBinding:
83 | position: 12
84 | prefix: --output
85 | type:
86 | type: string?
87 | inputBinding:
88 | prefix: --type
89 | position: 13
90 | alias:
91 | type: string?
92 | inputBinding:
93 | prefix: --alias
94 | position: 14
95 | outputs:
96 | pull_output:
97 | type: File
98 | outputBinding:
99 | glob: $(inputs.output_name)
100 |
--------------------------------------------------------------------------------
/saber/dvid_access/dvid_push.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | dockerPull: aplbrain/dvid-access
22 | baseCommand: python
23 | arguments: ['/app/dvid_access.py', 'push']
24 | inputs:
25 | input:
26 | type: File
27 | inputBinding:
28 | position: 1
29 | prefix: -i
30 | host_name:
31 | type: string
32 | inputBinding:
33 | position: 2
34 | prefix: --host
35 | uuid:
36 | type: string?
37 | inputBinding:
38 | position: 3
39 | prefix: --uuid
40 | dtype_name:
41 | type: string
42 | inputBinding:
43 | position: 4
44 | prefix: --datatype
45 | resource_name:
46 | type: string
47 | inputBinding:
48 | position: 5
49 | prefix: --data_instance
50 | resolution:
51 | type: int?
52 | inputBinding:
53 | prefix: --res
54 | position: 6
55 | xmin:
56 | type: int?
57 | inputBinding:
58 | prefix: --xmin
59 | position: 7
60 | xmax:
61 | type: int?
62 | inputBinding:
63 | prefix: --xmax
64 | position: 8
65 | ymin:
66 | type: int?
67 | inputBinding:
68 | prefix: --ymin
69 | position: 9
70 | ymax:
71 | type: int?
72 | inputBinding:
73 | prefix: --ymax
74 | position: 10
75 | zmin:
76 | type: int?
77 | inputBinding:
78 | prefix: --zmin
79 | position: 11
80 | zmax:
81 | type: int?
82 | inputBinding:
83 | prefix: --zmax
84 | position: 12
85 | source:
86 | type: string?
87 | inputBinding:
88 | prefix: --source
89 | position: 13
90 | type:
91 | type: string?
92 | inputBinding:
93 | prefix: --type
94 | position: 14
95 | alias:
96 | type: string?
97 | inputBinding:
98 | prefix: --alias
99 | position: 14
100 | outputs: []
101 |
102 |
--------------------------------------------------------------------------------
/saber/i2g/detection/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | FROM ermaker/keras
16 |
17 | RUN apt-get clean
18 | RUN apt-get update
19 | RUN apt-get -y upgrade
20 |
21 | RUN apt-get -y install build-essential
22 |
23 | RUN apt-get -y install \
24 | libx11-dev \
25 | libblosc-dev \
26 | libblas-dev \
27 | liblapack-dev \
28 | wget
29 |
30 | RUN apt-get update \
31 | && apt-get install -y python3-pip python3-dev \
32 | && cd /usr/local/bin \
33 | && ln -s /usr/bin/python3 python \
34 | && pip3 install --upgrade pip
35 |
36 | # Setup python packages
37 | RUN pip3 install Theano
38 | RUN pip3 install numpy
39 | RUN pip3 install awscli
40 | RUN pip3 install boto3
41 |
42 | # Install intern
43 | RUN pip3 install intern
44 | RUN mkdir -p /src/weights
45 | RUN wget --directory-prefix /src/weights https://raw.githubusercontent.com/aplbrain/emcv/master/unets/weights/kasthuri/synapse_weights.hdf5
46 | RUN wget --directory-prefix /src/weights https://raw.githubusercontent.com/aplbrain/emcv/master/unets/weights/kasthuri/membrane_weights.hdf5
47 | # Create workspace
48 | # TODO: Re-org this to use git clone and S3
49 | WORKDIR /src
50 | COPY ./*.py /src/
51 |
52 |
53 | ENV KERAS_BACKEND=theano
54 | ENV PATH=/src:$PATH
55 |
56 | RUN mkdir ~/.aws
57 | ENTRYPOINT ["python", "deploy_pipeline.py"]
58 |
--------------------------------------------------------------------------------
/saber/i2g/detection/README.md:
--------------------------------------------------------------------------------
1 | # Detection
2 | This module runs synapse or membrane detection on an EM data block stored on the file system and linked through the bind mount command -v. The instructions below illustrate running on a data block stored in the current directory as ./test_volume.npy. To build and run the example, have the Docker daemon running and enter the following instructions into the command line.
3 |
4 | To build :
5 | `docker build -t i2g:detect`
6 |
7 | To run :
8 |
9 | `docker run -v $(pwd):/data i2g:detect -i /data/test_volume.npy -o /data/synapse_output.npy -x 256 -y 256 --z_step=4 --mode=synapse`
10 |
11 | or
12 |
13 | `docker run -v $(pwd):/data i2g:detect -i /data/test_volume.npy -o /data/membrane_output.npy -x 256 -y 256 --z_step=4 --mode=membrane`
14 |
15 |
--------------------------------------------------------------------------------
/saber/i2g/detection/membrane_detection.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | cwlVersion: v1.0
16 | class: CommandLineTool
17 | hints:
18 | DockerRequirement:
19 | dockerPull: aplbrain/i2gdetect
20 | baseCommand: python
21 | arguments: ["deploy_pipeline.py"]
22 | inputs:
23 | input:
24 | type: File
25 | inputBinding:
26 | position: 1
27 | prefix: --input
28 | height:
29 | type: int?
30 | inputBinding:
31 | position: 2
32 | prefix: --height
33 | width:
34 | type: int?
35 | inputBinding:
36 | position: 3
37 | prefix: --width
38 | # z_step:
39 | # type: int?
40 | # inputBinding:
41 | # position: 4
42 | # prefix: --z_step
43 | output:
44 | type: string
45 | inputBinding:
46 | position: 5
47 | prefix: --output
48 | outputs:
49 | membrane_detection_out:
50 | type: File
51 | outputBinding:
52 | glob: $(inputs.output)
53 |
54 |
--------------------------------------------------------------------------------
/saber/i2g/detection/membrane_detection_gpu.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | cwlVersion: v1.0
16 | class: CommandLineTool
17 | hints:
18 | DockerRequirement:
19 | dockerPull: aplbrain/i2gdetect_gpu
20 | baseCommand: python2
21 | arguments: ["deploy_pipeline.py"]
22 | inputs:
23 | input:
24 | type: File
25 | inputBinding:
26 | position: 1
27 | prefix: --input
28 | height:
29 | type: int?
30 | inputBinding:
31 | position: 2
32 | prefix: --height
33 | width:
34 | type: int?
35 | inputBinding:
36 | position: 3
37 | prefix: --width
38 | # z_step:
39 | # type: int?
40 | # inputBinding:
41 | # position: 4
42 | # prefix: --z_step
43 | output:
44 | type: string
45 | inputBinding:
46 | position: 5
47 | prefix: --output
48 | outputs:
49 | membrane_detection_out:
50 | type: File
51 | outputBinding:
52 | glob: $(inputs.output)
53 |
--------------------------------------------------------------------------------
/saber/i2g/detection/synapse_detection.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | cwlVersion: v1.0
16 | class: CommandLineTool
17 | hints:
18 | DockerRequirement:
19 | dockerPull: aplbrain/i2gdetect
20 | baseCommand: python
21 | arguments: ["deploy_pipeline.py --mode synapse"]
22 | inputs:
23 | input:
24 | type: File
25 | inputBinding:
26 | position: 1
27 | prefix: --input
28 | height:
29 | type: int?
30 | inputBinding:
31 | position: 2
32 | prefix: --height
33 | width:
34 | type: int?
35 | inputBinding:
36 | position: 3
37 | prefix: --width
38 | # z_step:
39 | # type: int?
40 | # inputBinding:
41 | # position: 4
42 | # prefix: --z_step
43 | output:
44 | type: string
45 | inputBinding:
46 | position: 5
47 | prefix: --output
48 | mode:
49 | type: string
50 | inputBinding:
51 | position: 6
52 | prefix: --mode
53 | outputs:
54 | synapse_detection_out:
55 | type: File
56 | outputBinding:
57 | glob: $(inputs.output)
58 |
59 |
--------------------------------------------------------------------------------
/saber/i2g/detection/synapse_detection_gpu.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | cwlVersion: v1.0
16 | class: CommandLineTool
17 | hints:
18 | DockerRequirement:
19 | dockerPull: aplbrain/i2gdetect_gpu
20 | baseCommand: python2
21 | arguments: ["deploy_pipeline.py"]
22 | inputs:
23 | input:
24 | type: File
25 | inputBinding:
26 | position: 1
27 | prefix: --input
28 | height:
29 | type: int?
30 | inputBinding:
31 | position: 2
32 | prefix: --height
33 | width:
34 | type: int?
35 | inputBinding:
36 | position: 3
37 | prefix: --width
38 | # z_step:
39 | # type: int?
40 | # inputBinding:
41 | # position: 4
42 | # prefix: --z_step
43 | output:
44 | type: string
45 | inputBinding:
46 | position: 5
47 | prefix: --output
48 | mode:
49 | type: string
50 | inputBinding:
51 | position: 6
52 | prefix: --mode
53 | outputs:
54 | synapse_detection_out:
55 | type: File
56 | outputBinding:
57 | glob: $(inputs.output)
58 |
59 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_Demo/job.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # BOSS:
16 | #------------------------------------------------
17 | # general:
18 | host_boss: api.theboss.io
19 | host_bossdb: api.bossdb.org
20 | token_bossdb:
21 | token_boss:
22 | coll_name: Kasthuri
23 | exp_name: em
24 | coord_name: KasthuriFrame
25 | resolution: 1
26 | xmin: 5990
27 | xmax: 7824
28 | ymin: 6059
29 | ymax: 7892
30 | zmin: 631
31 | zmax: 998
32 | padding: 0
33 | onesided: 0
34 | #inputs:
35 | in_chan_name_raw: images
36 | in_chan_name_ann: ann
37 | itype_name_in: image
38 | dtype_name_in: uint8
39 | #outputs
40 | out_chan_name_syn: I2G_Synapses_i_1
41 | out_chan_name_neu: I2G_Neurons_Seg_1
42 | out_chan_name_mem: I2G_Membranes_i_1
43 | dtype_name_out: uint64
44 | itype_name_out: annotation
45 | pull_output_name_raw: pull_output_raw.npy
46 | pull_output_name_ann: pull_output_ann.npy
47 |
48 | #1760:8960/3680:10560/400:1800
49 | # ```{'x_bounds': [11980, 15648], 'y_bounds': [12118, 15784], 'z_bounds': [631, 998]}```
50 |
51 | #PROCESSING:
52 | #------------------------------------------------
53 | #General:
54 | width: 512
55 | height: 512
56 | #Membrane_detection:
57 | membrane_output: membrane_output.npy
58 | #Synapse_detection
59 | synapse_output: synapse_output.npy
60 | #nuron_segmentation:
61 | train_file: ./trained_classifier.pkl
62 | neuron_output: neuron_output.npy
63 | mode: synapse
64 | neuron_mode: 1
65 | agg_threshold: "0.5"
66 | seeds_cc_threshold: "5"
67 |
68 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_Demo/jobPublic.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # BOSS:
16 | #------------------------------------------------
17 | # general:
18 | host_bossdb: api.bossdb.org
19 | token_bossdb: 6edf6eacfe4b5a36b7184ad477db1fd7876e7982
20 | coll_name: Kasthuri
21 | exp_name: em
22 | exp_name_out: I2G_Saber_pipeline_results
23 | coord_name_out: KasthuriI2GResults
24 | coord_name: KasthuriFrame
25 | resolution: 1
26 | xmin: 5990
27 | xmax: 7824
28 | ymin: 6059
29 | ymax: 7892
30 | zmin: 610
31 | zmax: 620
32 | padding: 0
33 | onesided: 0
34 | #inputs:
35 | in_chan_name_raw: images
36 | in_chan_name_ann: ann
37 | itype_name_in: image
38 | dtype_name_in: uint8
39 | #outputs
40 | out_chan_name_syn: I2G_Synapses_i_1
41 | out_chan_name_neu: I2G_Neurons_Seg_1
42 | out_chan_name_mem: I2G_Membranes_i_1
43 | dtype_name_out: uint64
44 | itype_name_out: annotation
45 | pull_output_name_raw: pull_output_raw.npy
46 | pull_output_name_ann: pull_output_ann.npy
47 |
48 | #1760:8960/3680:10560/400:1800
49 | # ```{'x_bounds': [11980, 15648], 'y_bounds': [12118, 15784], 'z_bounds': [631, 998]}```
50 |
51 | #PROCESSING:
52 | #------------------------------------------------
53 | #General:
54 | width: 512
55 | height: 512
56 | #Membrane_detection:
57 | membrane_output: membrane_output.npy
58 | #Synapse_detection
59 | synapse_output: synapse_output.npy
60 | #nuron_segmentation:
61 | train_file: ./trained_classifier.pkl
62 | neuron_output: neuron_output.npy
63 | mode: synapse
64 | neuron_mode: 1
65 | agg_threshold: "0.5"
66 | seeds_cc_threshold: "5"
67 | _saber_bucket: saber-batch
68 |
69 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_Demo/jobSecondPass.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # BOSS:
16 | #------------------------------------------------
17 | # general:
18 | host_boss: api.theboss.io
19 | host_bossdb: api.bossdb.org
20 | token_bossdb:
21 | token_boss:
22 | coll_name: Kasthuri
23 | exp_name: em
24 | coord_name: KasthuriFrame
25 | resolution: 0
26 | xmin: 11980
27 | xmax: 12900
28 | ymin: 13958
29 | ymax: 14878
30 | zmin: 631
31 | zmax: 723
32 | padding: 0
33 | onesided: 0
34 | #inputs:
35 | in_chan_name_raw: images
36 | in_chan_name_ann: ann
37 | itype_name_in: image
38 | dtype_name_in: uint8
39 | #outputs
40 | out_chan_name_syn: I2G_Synapses_i_Pub
41 | out_chan_name_neu: I2G_Neuron_Seg_Pub
42 | out_chan_name_mem: I2G_Membranes_i_Pub
43 | dtype_name_out: uint64
44 | itype_name_out: annotation
45 | pull_output_name_raw: pull_output_raw.npy
46 | pull_output_name_ann: pull_output_ann.npy
47 |
48 | #1760:8960/3680:10560/400:1800
49 |
50 | #PROCESSING:
51 | #------------------------------------------------
52 | #General:
53 | width: 512
54 | height: 512
55 | #Membrane_detection:
56 | membrane_output: membrane_output.npy
57 | #Synapse_detection
58 | synapse_output: synapse_output.npy
59 | #nuron_segmentation:
60 | train_file: ./trained_classifier.pkl
61 | neuron_output: neuron_output.npy
62 | mode: synapse
63 | neuron_mode: 1
64 | agg_threshold: "0.5"
65 | seeds_cc_threshold: "5"
66 |
67 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_Demo/job_simple.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # BOSS:
16 | #------------------------------------------------
17 | # general:
18 | host_bossdb: api.bossdb.io
19 | token_bossdb: <>
20 | coll_name: Kasthuri
21 | exp_name: ac4
22 | coord_name: ac4-cf
23 | resolution: 0
24 | xmin: 0
25 | xmax: 512
26 | ymin: 0
27 | ymax: 512
28 | zmin: 0
29 | zmax: 50
30 | padding: 0
31 | onesided: 0
32 | #inputs:
33 | in_chan_name_raw: em
34 | itype_name_in: image
35 | dtype_name_in: uint8
36 | #outputs
37 | pull_output_name_raw: pull_output_raw.npy
38 |
39 |
40 | #PROCESSING:
41 | #------------------------------------------------
42 | #General:
43 | width: 512
44 | height: 512
45 | mode: synapse
46 | #Synapse_detection
47 | synapse_output: synapse_output.npy
48 | #Membrane_detection
49 | membrane_output: membrane_output.npy
50 | #neuron_segmentation:
51 | train_file: ./trained_classifier.pkl
52 | neuron_output: neuron_output.npy
53 | mode: synapse
54 | neuron_mode: 1
55 | agg_threshold: "0.5"
56 | seeds_cc_threshold: "5"
57 | _saber_bucket: saber-batch
58 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_Demo/sweep.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # ```{'x_bounds': [11980, 15648], 'y_bounds': [12118, 15784], 'z_bounds': [631, 998]}```
16 | #res 1:
17 | #{'x_bounds': [5990, 7824], 'y_bounds': [6059, 7892], 'z_bounds': [631, 998]}
18 | x:
19 | range:
20 | start: 5990
21 | stop: 7824
22 | step: 917
23 | parameters:
24 | min: xmin
25 | max: xmax
26 | steps:
27 | - boss_pull_raw
28 | - boss_push_synapses_boss
29 | - boss_push_synapses_bossdb
30 | - boss_push_membranes_boss
31 | - boss_push_membranes_bossdb
32 | - boss_push_neurons_boss
33 | y:
34 | range:
35 | start: 6059
36 | stop: 7892
37 | step: 917
38 | parameters:
39 | min: ymin
40 | max: ymax
41 | steps:
42 | - boss_pull_raw
43 | - boss_push_synapses_boss
44 | - boss_push_synapses_bossdb
45 | - boss_push_membranes_boss
46 | - boss_push_membranes_bossdb
47 | - boss_push_neurons_boss
48 | # z:
49 | # range:
50 | # start: 631
51 | # stop: 998
52 | # step: 92
53 | # parameters:
54 | # min: zmin
55 | # max: zmax
56 | # steps:
57 | # - boss_pull_raw
58 | # - boss_push_synapses_boss
59 | # - boss_push_synapses_bossdb
60 | # - boss_push_membranes_boss
61 | # - boss_push_membranes_bossdb
62 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_FFN/job_ffn.yml:
--------------------------------------------------------------------------------
1 | # BOSS:
2 | #------------------------------------------------
3 | # general:
4 | host_bossdb: api.bossdb.io
5 | token_bossdb: public
6 | coll_name: neuroproof_examples
7 | exp_name: training_sample
8 | coord_name: neuroproof_training_sample_2
9 | resolution: 0
10 | xmin: 0
11 | xmax: 250
12 | ymin: 0
13 | ymax: 250
14 | zmin: 0
15 | zmax: 250
16 | padding: 0
17 | onesided: 0
18 | #inputs:
19 | in_chan_name_raw: images
20 | itype_name_in: image
21 | dtype_name_in: uint8
22 | #outputs
23 | pull_output_name_raw: pull_output_raw.npy
24 |
25 | # FFN segmentation
26 | image_mean: 128
27 | image_stddev: 33
28 | depth: 12
29 | fov_size: 33,33,33
30 | deltas: 8,8,8
31 | init_activation: 0.95
32 | pad_value: 0.05
33 | move_threshold: 0.9
34 | min_boundary_dist: 1,1,1
35 | segment_threshold: 0.6
36 | min_segment_size: 100
37 | bound_start: 0,0,0
38 | bound_stop: 250,250,250
39 | outfile: ffn_seg_out.npy
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_Neuroproof/job_neuroproof_deploy.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # BOSS:
16 | #------------------------------------------------
17 | host_bossdb: api.bossdb.io
18 | token_bossdb: public
19 | coll_name: kasthuri2015
20 | exp_name: ac4
21 | coord_name: ac4-cf
22 | resolution: 0
23 | xmin: 256
24 | xmax: 320
25 | ymin: 256
26 | ymax: 320
27 | zmin: 0
28 | zmax: 64
29 | padding: 0
30 | onesided: 0
31 |
32 | #inputs:
33 | in_chan_name_raw: em
34 | itype_name_in: image
35 | dtype_name_in: uint8
36 | #outputs
37 | pull_output_name_raw: pull_output_raw.npy
38 |
39 | #PROCESSING:
40 | #------------------------------------------------
41 | #General:
42 | width: 64
43 | height: 64
44 | mode: synapse
45 | #Membrane_detection
46 | membrane_output: membrane_output.npy
47 | #neuron_segmentation:
48 | train_file: ./trained_classifier.pkl
49 | neuron_output: neuron_output.npy
50 | mode: synapse
51 | neuron_mode: 1
52 | agg_threshold: "0.5"
53 | seeds_cc_threshold: "5"
54 | #neuroproof
55 | class_file: ./kasthuri_classifier.xml
56 | neuroproof_output: neuroproof_output.npy
57 |
58 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_Neuroproof/job_neuroproof_train.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #neuroproof:
16 | mode: 0
17 | ws_file: oversegmented_stack_labels.h5
18 | pred_file: boundary_prediction.h5
19 | gt_file: groundtruth.h5
20 | neuroproof_output: myclassifier.xml
21 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_Neuroproof/workflow_neuroproof_train.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 | ## This workflow will make use of the general synapse and membrane detection cwl files, meaning the processes will happen on CPU rather than on GPU. Does not include Boss push steps.
17 |
18 | cwlVersion: v1.0
19 | class: Workflow
20 | doc: local
21 |
22 | inputs:
23 | #Inputs for Neuroproof
24 | mode: string
25 | ws_file: File
26 | pred_file: File
27 | gt_file: File
28 |
29 |
30 | #Inputs for output names:
31 | neuroproof_output: string
32 |
33 | outputs:
34 | neuroproof:
35 | type: File
36 | outputSource: neuroproof/neuroproof_out
37 |
38 | steps:
39 | neuroproof:
40 | run: ../../../../saber/i2g/neuroproof/neuroproof.cwl
41 | in:
42 | mode: mode
43 | ws_file: ws_file
44 | pred_file: pred_file
45 | gt_file: gt_file
46 | outfile: neuroproof_output
47 | hints:
48 | saber:
49 | local: True
50 | file_path: /Users/xenesd1/Projects/aplbrain/saber/volumes/data/local
51 | out: [neuroproof_out]
52 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_Seg_Workflow/job_seg.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # BOSS:
16 | #------------------------------------------------
17 | # general:
18 | host_boss: api.theboss.io
19 | host_bossdb: api.bossdb.org
20 | token_bossdb:
21 | token_boss:
22 | coll_name: Kasthuri
23 | exp_name: em
24 | coord_name: KasthuriFrame
25 | resolution: 1
26 | xmin: 5990
27 | xmax: 7824
28 | ymin: 6059
29 | ymax: 7892
30 | zmin: 631
31 | zmax: 998
32 | padding: 0
33 | onesided: 0
34 | #inputs:
35 | in_chan_name_raw: images
36 | in_chan_name_ann: ann
37 | itype_name_in: image
38 | dtype_name_in: uint8
39 | #outputs
40 | out_chan_name_syn: I2G_Synapses_i_1
41 | out_chan_name_neu: I2G_Neurons_Seg_1
42 | out_chan_name_mem: I2G_Membranes_i_1
43 |
44 | dtype_name_out: uint64
45 | itype_name_out: annotation
46 | pull_output_name_raw: pull_output_raw.npy
47 | pull_output_name_ann: pull_output_ann.npy
48 | pull_output_name_membranes: membrane_boss_output.npy
49 |
50 | #1760:8960/3680:10560/400:1800
51 | # ```{'x_bounds': [11980, 15648], 'y_bounds': [12118, 15784], 'z_bounds': [631, 998]}```
52 |
53 | #PROCESSING:
54 | #------------------------------------------------
55 | #General:
56 | width: 512
57 | height: 512
58 | #Membrane_detection:
59 | membrane_output: membrane_output.npy
60 | #Synapse_detection
61 | synapse_output: synapse_output.npy
62 | #nuron_segmentation:
63 | train_file: ./trained_classifier.pkl
64 | neuron_output: neuron_output.npy
65 | mode: synapse
66 | neuron_mode: 1
67 | agg_threshold: "0.5"
68 | seeds_cc_threshold: "5"
69 |
70 | # BACK-UP:
71 | #------------------------------------------------
72 | _saber_bucket: saber-batch-dev
73 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_Seg_Workflow/test_job_seg.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # BOSS:
16 | #------------------------------------------------
17 | # general:
18 | host_boss: api.theboss.io
19 | host_bossdb: api.bossdb.org
20 | token_bossdb:
21 | token_boss:
22 | coll_name: Kasthuri
23 | exp_name: em
24 | coord_name: KasthuriFrame
25 | coord_name_out: KasthuriFrame
26 | resolution: 0
27 | resolution_out: 1
28 | xmin_out: 11980
29 | xmax_out: 12980
30 | ymin_out: 12118
31 | ymax_out: 13118
32 | xmin: 11980
33 | xmax: 12980
34 | ymin: 12118
35 | ymax: 13118
36 | zmin: 631
37 | zmax: 651
38 | padding: 0
39 | onesided: 0
40 | #inputs:
41 | in_chan_name_raw: I2G_Membranes_i_Pub
42 | in_chan_name_ann: ann
43 | itype_name_in: image
44 | dtype_name_in: uint8
45 | #outputs
46 | out_coll_name: Kasthuri
47 | out_chan_name_syn: I2G_Synapses_i_Pub
48 | out_chan_name_neu: I2G_Neuron_Seg_Pub
49 | out_chan_name_mem: I2G_Membranes_i_Pub
50 | dtype_name_out: uint64
51 | itype_name_out: annotation
52 | pull_output_name_membranes: pull_output_membranes.npy
53 | pull_output_name_ann: pull_output_ann.npy
54 |
55 | #1760:8960/3680:10560/400:1800
56 | # ```{'x_bounds': [11980, 15648], 'y_bounds': [12118, 15784], 'z_bounds': [631, 998]}```
57 |
58 | #PROCESSING:
59 | #------------------------------------------------
60 | #General:
61 | width: 512
62 | height: 512
63 | #Membrane_detection:
64 | membrane_output: membrane_output.npy
65 | #Synapse_detection
66 | synapse_output: synapse_output.npy
67 | #nuron_segmentation:
68 | train_file: ./trained_classifier.pkl
69 | neuron_output: neuron_output.npy
70 | mode: synapse
71 | neuron_mode: 1
72 | agg_threshold: "0.5"
73 | seeds_cc_threshold: "5"
74 |
75 | # BACK-UP:
76 | #------------------------------------------------
77 | _saber_bucket: saber-batch-dev
78 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_association/assoc_job.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # Shared
16 | # Inputs for BOSS
17 | token:
18 | host_name: api.theboss.io
19 | coll_name: Kasthuri
20 | exp_name: em
21 | seg_chan_name: I2G_Neurons_i
22 | syn_chan_name: I2G_Synapses_i
23 | dtype_seg: uint8
24 | dtype_syn: uint8
25 | itype_name: annotation
26 | coord_name: KasthuriFrame
27 | resolution: 0
28 | xmin: 4820
29 | xmax: 6320
30 | ymin: 7660
31 | ymax: 9160
32 | zmin: 400
33 | zmax: 500
34 | use_boss: 1
35 | #No padding on this one, for optimization
36 | assoc_output_name: assoc_output.pkl
37 | assoc_output_name_noneu: assoc_output_noneu.pkl
38 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_association/assoc_workflow.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | #!/usr/bin/env cwl-runner
15 |
16 | cwlVersion: v1.0
17 | class: Workflow
18 | inputs:
19 | # Boss
20 | seg_chan_name: string
21 | syn_chan_name: string
22 | token: string?
23 | coll_name: string
24 | exp_name: string
25 | dtype_seg: string
26 | dtype_syn: string
27 | resolution: int?
28 | xmin: int?
29 | xmax: int?
30 | ymin: int?
31 | ymax: int?
32 | zmin: int?
33 | zmax: int?
34 | coord_name: string
35 | host_name: string
36 | assoc_output_name: string
37 | assoc_output_name_noneu: string
38 | use_boss: int
39 | outputs:
40 | assoc_output:
41 | type: File
42 | outputSource: assoc/assoc_output
43 | assoc_output_noneu:
44 | type: File
45 | outputSource: assoc/assoc_output_noneu
46 | steps:
47 | assoc:
48 | run: ../seg_syn_association/assoc.cwl
49 | in:
50 | use_boss: use_boss
51 | host_name: host_name
52 | token: token
53 | coll_name: coll_name
54 | exp_name: exp_name
55 | chan_seg: seg_chan_name
56 | chan_syn: syn_chan_name
57 | dtype_syn: dtype_syn
58 | dtype_seg: dtype_seg
59 | resolution: resolution
60 | xmin: xmin
61 | xmax: xmax
62 | ymin: ymin
63 | ymax: ymax
64 | zmin: zmin
65 | zmax: zmax
66 | output_name: assoc_output_name
67 | output_name_noneu: assoc_output_name_noneu
68 | coord_name: coord_name
69 | out:
70 | [assoc_output,assoc_output_noneu]
71 | hints:
72 | saber:
73 | local: True
74 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_gala/ex_gala_job.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | membrane_detection_output_name: i2g_membranes.npy
16 | synapse_detection_output_name: i2g_synapses.npy
17 | neuron_segmentation_output_name: i2g_segment.npy
18 | data: input.npy
19 | train_file: #trained classifier
20 | class: File
21 | path: ../neuron_segmentation/trained_classifier.pkl
22 | mode: 1
23 | seed_thres: 5
24 | agg_thres: 0.5
25 | height: 256
26 | width: 256
27 | z_step: 1
28 | dilation: 3
29 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_gala/i2g_gala.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 | #Deployment pipeline for dense segmentation
17 |
18 | cwlVersion: v1.0
19 | class: Workflow
20 |
21 | inputs:
22 | membrane_detection_output_name: string
23 | synapse_detection_output_name: string
24 | neuron_segmentation_output_name: string
25 | data: File
26 | train_file: File #trained classifier
27 | mode: int
28 | seed_thres: int?
29 | agg_thres: float?
30 | height: int?
31 | width: int?
32 | z_step: int?
33 | dilation: int?
34 |
35 | outputs:
36 | membrane_detection_output:
37 | type: File
38 | outputSource: membrane_detection/membrane_detection_out
39 | synapse_detection_output:
40 | type: File
41 | outputSource: synapse_detection/synapse_detection_out
42 | neuron_segmentation_output:
43 | type: File
44 | outputSource: neuron_segmentation/neuron_segmentation_out
45 |
46 | steps:
47 | membrane_detection:
48 | run: ../tools/membrane_detection.cwl
49 | in:
50 | input: data
51 | height : height
52 | width : width
53 | z_step : z_step
54 | output: membrane_detection_output_name
55 | out: [membrane_detection_out]
56 | synapse_detection:
57 | run: ../tools/synapse_detection.cwl
58 | in:
59 | input: data
60 | height: height
61 | width: width
62 | z_step: z_step
63 | output: synapse_detection_output_name
64 | out: [synapse_detection_out]
65 | neuron_segmentation:
66 | run: ../tools/neuron_segmentation.cwl
67 | in:
68 | mode: mode
69 | prob_file: membrane_detection/membrane_detection_out
70 | train_file: train_file
71 | seeds_cc_threshold: seed_thres
72 | agg_threshold: agg_thres
73 | outfile: neuron_segmentation_output_name
74 | out: [neuron_segmentation_out]
75 |
76 |
--------------------------------------------------------------------------------
/saber/i2g/examples/I2G_gala/i2g_graph.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: Workflow
19 |
20 | inputs:
21 | synapse_detection_input_name: string
22 | neuron_segmentation_input_name: string
23 | graph_gen_output_name: string
24 | dilation: int?
25 |
26 | outputs:
27 | graph_generation_output:
28 | type: File
29 | outputSource: graph_generation/graph_generation_out
30 |
31 | steps:
32 | graph_generation:
33 | run: ../tools/graph_generation.cwl
34 | in:
35 | seginput: neuron_segmentation_input_name
36 | synapseinput: synapse_detection_out
37 | dilation: dilation
38 | output_name: graph_gen_output_name
39 | out: [graph_generation_out]
40 |
41 |
42 |
--------------------------------------------------------------------------------
/saber/i2g/ffns/Dockerfile.base:
--------------------------------------------------------------------------------
1 | FROM tensorflow/tensorflow:1.14.0-gpu
2 |
3 | LABEL maintainer="Jordan Matelsky "
4 |
5 | RUN apt update
6 | RUN apt install -y git
7 | RUN pip install scikit-image scipy numpy tensorflow-gpu h5py pillow absl-py
8 | RUN git clone https://github.com/google/ffn/ \
9 | && cd ffn \
10 | && git checkout 30decd27d9d4f3ef5768f2608c8c4d3350f8232b
11 |
12 |
--------------------------------------------------------------------------------
/saber/i2g/ffns/Dockerfile.inference:
--------------------------------------------------------------------------------
1 | FROM ffn-base
2 |
3 | LABEL maintainer="Jordan Matelsky "
4 |
5 | RUN apt-get install -yqq \
6 | python3-dev \
7 | python3-pip
8 |
9 | WORKDIR "ffn"
10 | COPY ./inference/config_template.pbtxt .
11 | COPY ./inference/get-latest-checkpoint .
12 | COPY ./inference/npy2h5.py .
13 | COPY ./inference/driver.py .
14 | COPY ./model /model
15 | RUN mkdir /data
16 | RUN mkdir /latest-model
17 | RUN export LATEST=`./get-latest-checkpoint`
18 | RUN cp /model/model.ckpt-$LATEST* /latest-model
19 | ENTRYPOINT ["python", "driver.py"]
20 |
--------------------------------------------------------------------------------
/saber/i2g/ffns/Dockerfile.train:
--------------------------------------------------------------------------------
1 | FROM ffn-base
2 |
3 | LABEL maintainer="Jordan Matelsky "
4 |
5 | WORKDIR "ffn"
6 | COPY ./train/main.sh .
7 |
8 | ENTRYPOINT ["bash", "-c", "./main.sh"]
9 |
--------------------------------------------------------------------------------
/saber/i2g/ffns/README.md:
--------------------------------------------------------------------------------
1 | # SABER Floodfill Networks
2 |
3 | ## Building the docker containers
4 |
5 | ```shell
6 | docker build --rm -t ffn-base -f Dockerfile.base .
7 | docker build --rm -t aplbrain/ffn-inference -f Dockerfile.inference .
8 | ```
9 |
10 | ## python driver test
11 | docker run --rm -v $(pwd)/results:/ffn/output/ aplbrain/ffn-inference -c config.pbtxt -bb ['start { x:0 y:0 z:0 } size { x:64 y:64 z:63 }'] -o output/my_seg.npy
12 |
--------------------------------------------------------------------------------
/saber/i2g/ffns/example_config.pbtxt:
--------------------------------------------------------------------------------
1 | image {
2 | hdf5: "/data/raw.h5:raw"
3 | }
4 | image_mean: 128
5 | image_stddev: 33
6 | checkpoint_interval: 1800
7 | seed_policy: "PolicyPeaks"
8 | model_checkpoint_path: "/latest-model/model.ckpt-27465036"
9 | model_name: "convstack_3d.ConvStack3DFFNModel"
10 | model_args: "{\"depth\": 12, \"fov_size\": [33, 33, 33], \"deltas\": [8, 8, 8]}"
11 | segmentation_output_dir: "/results"
12 | inference_options {
13 | init_activation: 0.95
14 | pad_value: 0.05
15 | move_threshold: 0.9
16 | min_boundary_dist { x: 1 y: 1 z: 1}
17 | segment_threshold: 0.6
18 | min_segment_size: 100
19 | }
20 |
--------------------------------------------------------------------------------
/saber/i2g/ffns/ffn_segmentation.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | cwlVersion: v1.0
16 | class: CommandLineTool
17 | hints:
18 | DockerRequirement:
19 | dockerPull: aplbrain/ffn-inference
20 | baseCommand: python
21 | arguments: ["driver.py"]
22 | inputs:
23 | input:
24 | type: File
25 | inputBinding:
26 | position: 1
27 | prefix: --input_file
28 | image_mean:
29 | type: string
30 | inputBinding:
31 | position: 2
32 | prefix: --image_mean
33 | image_stddev:
34 | type: string
35 | inputBinding:
36 | position: 3
37 | prefix: --image_stddev
38 | depth:
39 | type: string
40 | inputBinding:
41 | position: 4
42 | prefix: --depth
43 | fov_size:
44 | type: string
45 | inputBinding:
46 | position: 5
47 | prefix: --fov_size
48 | deltas:
49 | type: string
50 | inputBinding:
51 | position: 6
52 | prefix: --deltas
53 | init_activation:
54 | type: string
55 | inputBinding:
56 | position: 7
57 | prefix: --init_activation
58 | pad_value:
59 | type: string
60 | inputBinding:
61 | position: 8
62 | prefix: --pad_value
63 | move_threshold:
64 | type: string
65 | inputBinding:
66 | position: 9
67 | prefix: --move_threshold
68 | min_boundary_dist:
69 | type: string
70 | inputBinding:
71 | position: 10
72 | prefix: --min_boundary_dist
73 | segment_threshold:
74 | type: string
75 | inputBinding:
76 | position: 11
77 | prefix: --segment_threshold
78 | min_segment_size:
79 | type: string
80 | inputBinding:
81 | position: 12
82 | prefix: --min_segment_size
83 | bound_start:
84 | type: string
85 | inputBinding:
86 | position: 13
87 | prefix: --bound_start
88 | bound_stop:
89 | type: string
90 | inputBinding:
91 | position: 14
92 | prefix: --bound_stop
93 | outfile:
94 | type: string
95 | inputBinding:
96 | position: 15
97 | prefix: --outfile
98 |
99 | outputs:
100 | ffn_out:
101 | type: File
102 | outputBinding:
103 | glob: $(inputs.outfile)
104 |
--------------------------------------------------------------------------------
/saber/i2g/ffns/ffn_train.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | cwlVersion: v1.0
16 | class: CommandLineTool
17 | hints:
18 | DockerRequirement:
19 | dockerPull: aplbrain/ffn_train
20 | baseCommand: /bin/bash
21 | arguments: ["main.sh"]
22 | inputs:
23 | input:
24 | type: File
25 | inputBinding:
26 | position: 1
27 | prefix: --input
28 | seg_input:
29 | type: File
30 | inputBinding:
31 | position: 2
32 | prefix: --seg_input
33 | min_thres:
34 | type: float?
35 | inputBinding:
36 | position: 3
37 | prefix: --min_thres
38 | max_thres:
39 | type: float?
40 | inputBinding:
41 | position: 4
42 | prefix: --max_thres
43 | thres_step:
44 | type: float?
45 | inputBinding:
46 | position: 5
47 | prefix: --thres_step
48 | lom_radius:
49 | type: int?
50 | inputBinding:
51 | position: 6
52 | prefix: --lom_radius
53 | min_size:
54 | type: int?
55 | inputBinding:
56 | position: 7
57 | prefix: --min_size
58 | margin:
59 | type: int?
60 | inputBinding:
61 | position: 8
62 | prefix: --margin
63 | model_name:
64 | type: string
65 | inputBinding:
66 | position: 9
67 | prefix: --name
68 | depth:
69 | type: int?
70 | inputBinding:
71 | position: 10
72 | prefix: --depth
73 | fov:
74 | type: int?
75 | inputBinding:
76 | position: 11
77 | prefix: --fov
78 | deltas:
79 | type: int?
80 | inputBinding:
81 | position: 12
82 | prefix: --deltas
83 | image_mean:
84 | type: int?
85 | inputBinding:
86 | position: 13
87 | prefix: --image_mean
88 | image_std:
89 | type: int?
90 | inputBinding:
91 | position: 14
92 | prefix: --image_std
93 | max_steps:
94 | type: int?
95 | inputBinding:
96 | position: 15
97 | prefix: --max_steps
98 | output:
99 | type: string
100 | inputBinding:
101 | position: 16
102 | prefix: --output
103 | outputs:
104 | training_out:
105 | type: File
106 | outputBinding:
107 | glob: $(inputs.output)
108 |
--------------------------------------------------------------------------------
/saber/i2g/ffns/inference/config_template.pbtxt:
--------------------------------------------------------------------------------
1 | image {
2 | hdf5: "/data/raw.h5:raw"
3 | }
4 | image_mean: {}
5 | image_stddev: {}
6 | checkpoint_interval: 1800
7 | seed_policy: "PolicyPeaks"
8 | model_checkpoint_path: "/latest-model/model.ckpt-27465036"
9 | model_name: "convstack_3d.ConvStack3DFFNModel"
10 | model_args: {}
11 | segmentation_output_dir: "/results"
12 | inference_options {
13 | init_activation: {}
14 | pad_value: {}
15 | move_threshold: {}
16 | min_boundary_dist {}
17 | segment_threshold: {}
18 | min_segment_size: {}
19 | }
20 |
--------------------------------------------------------------------------------
/saber/i2g/ffns/inference/get-latest-checkpoint:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import glob
4 |
5 | def get_max_checkpoint(path="/model/model.ckpt-*"):
6 | return max([int(f.split(".")[-2].split("-")[1]) for f in glob.glob(path)])
7 |
8 | if __name__ == "__main__":
9 | print(get_max_checkpoint())
10 |
--------------------------------------------------------------------------------
/saber/i2g/ffns/inference/npy2h5.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2018 The Johns Hopkins University Applied Physics Laboratory.
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
17 |
18 | import numpy as np
19 | import h5py
20 |
21 | def convert(file, path, name):
22 | data = np.load(file)
23 | dtype = data.dtype
24 | if dtype != np.uint8:
25 | print("Converting to uint8 from " + str(dtype))
26 | dtype = np.uint8
27 | with h5py.File(path, 'w') as fh:
28 | fh.create_dataset(name, data=data, dtype=dtype)
29 |
30 |
--------------------------------------------------------------------------------
/saber/i2g/ffns/train/main.sh:
--------------------------------------------------------------------------------
1 | echo "Computing partitions... (Go get a coffee.)"
2 |
3 | python ./compute_partitions.py --input_volume /data/seg.h5:seg --output_volume /data/af.h5:af --thresholds 0.025,0.05,0.075,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9 --lom_radius 24,24,24 --min_size 10000
4 |
5 | echo "Building coordinates file... (Go get lunch.)"
6 | # Takes a very long time:
7 | python build_coordinates.py --partition_volumes val:/data/af.h5:af --coordinate_output /data/tf_record_file --margin 24,24,24
8 |
9 | echo "Training. (Go get a life!)"
10 | python train.py --train_coords /data/tf_record_file --data_volumes val:/data/raw.h5:raw --label_volumes val:/data/seg.h5:seg --model_name convstack_3d.ConvStack3DFFNModel --model_args "{\"depth\": 12, \"fov_size\": [33, 33, 33], \"deltas\": [8, 8, 8]}" --image_mean 128 --image_stddev 33 --train_dir '/model' --max_steps 4000000
11 |
--------------------------------------------------------------------------------
/saber/i2g/metric_computation/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #Use an official Python runtime as a parent image
16 | FROM python:3
17 |
18 | # Install any needed packages specified in requirements.txt
19 | RUN pip install scikit-image
20 | RUN pip install numpy
21 | RUN pip install scipy
22 |
23 | RUN mkdir /app
24 | COPY --chown=1000:100 metric_computation.py santiago.py /app/
25 | ENV PATH /app:$PATH
26 | WORKDIR /app
27 |
--------------------------------------------------------------------------------
/saber/i2g/metric_computation/metric_computation.py:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env python
16 |
17 | import argparse
18 | import sys
19 |
20 | import numpy as np
21 |
22 | from santiago import graph_error
23 |
24 | def metrics(args):
25 | true_graph = np.load(args.gtinput)
26 | test_graph = np.load(args.siminput)
27 | f1 = graph_error(true_graph,test_graph)
28 | with open(args.output, 'wb') as f:
29 | np.save(f, f1)
30 |
31 | def main():
32 | parser = argparse.ArgumentParser(description='I2G graph generation processing script')
33 | parent_parser = argparse.ArgumentParser(add_help=False)
34 | subparsers = parser.add_subparsers(title='commands')
35 |
36 | parser.set_defaults(func=lambda _: parser.print_help())
37 |
38 | parser.add_argument('--gtinput', required=True, help='Ground truth graph input file')
39 | parser.add_argument('--siminput', required=True, help='Pipeline graph input file')
40 | parser.add_argument('--output', required=True, help='Output file')
41 |
42 | args = parser.parse_args()
43 | metrics(args)
44 |
45 | if __name__ == '__main__':
46 | main()
--------------------------------------------------------------------------------
/saber/i2g/metric_computation/metrics.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | dockerPull: i2g:metrics
22 | baseCommand: python
23 | arguments: ["metric_computation.py"]
24 | inputs:
25 | groundtruthinput:
26 | type: File
27 | inputBinding:
28 | position: 1
29 | prefix: --gtinput
30 | input:
31 | type: File
32 | inputBinding:
33 | position: 2
34 | prefix: --siminput
35 | output_name:
36 | type: File
37 | inputBinding:
38 | position: 3
39 | prefix: --output
40 | outputs:
41 | cell_detect_results:
42 | type: File
43 | outputBinding:
44 | glob: $(inputs.output_name)
45 |
--------------------------------------------------------------------------------
/saber/i2g/neuron_segmentation/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | FROM ubuntu
16 |
17 | RUN apt-get update
18 | RUN apt-get install -y \
19 | git \
20 | python3-dev \
21 | python3-pip
22 | RUN git clone https://github.com/janelia-flyem/gala.git
23 | COPY ./requirements.txt /gala/requirements.txt
24 | RUN pip3 install --upgrade pip
25 | RUN pip3 install Cython numpy h5py
26 | RUN cd gala && \
27 | pip3 install -r requirements.txt && \
28 | python3 setup.py install && \
29 | python3 setup.py build_ext --inplace && \
30 | cd -
31 | COPY ./driver.py /gala/driver.py
32 | COPY ./trained_classifier.pkl /gala/trained_classifier.pkl
33 | WORKDIR /gala
34 | RUN apt-get install -y python python-pip
35 | #ENTRYPOINT ["python3", "gala-train -I --seed-cc-threshold 5 -o ./train-sample --experiment-name agglom ./example/prediction.h5 ./example/groundtruth"]
36 | ENTRYPOINT ["python3", "driver.py" ]
37 |
--------------------------------------------------------------------------------
/saber/i2g/neuron_segmentation/README.md:
--------------------------------------------------------------------------------
1 | Gala neural segmentation and agglomeration tool
2 |
3 | docker build . -t i2g:gala
4 |
5 | docker run i2g:gala python3 ./bin/gala-train -I --seed-cc-threshold 5 -o ./train-sample --experiment-name agglom ./example/prediction_precomputed.h5 ./example/groundtruth
6 |
7 | Better:
8 | docker run i2g:gala python3 ./bin/gala-segment -I --seed-cc-threshold 5 --output-dir ./train-sample --experiment-name agglom --classifier ./example/agglom.classifier_precomputed.h5 ./example/prediction_precomputed.h5
9 |
10 | Try
11 | docker run -v $(pwd):/data i2g:gala python3 ./driver.py -m 0 -o /data/trained_classifier.pkl --prob_file ./tests/example-data/train-p1.lzf.h5 --gt_file ./tests/example-data/train-gt.lzf.h5 --ws_file ./tests/example-data/train-ws.lzf.h5
12 |
13 | docker run -v $(pwd):/data i2g:gala python3 ./driver.py -m 1 -o /data/annotations.npy --prob_file /data/membrane_output.npy --train_file /data/trained_classifier.pkl --seeds_cc_threshold 5 --agg_threshold 0.5
14 |
15 | Now Try
16 | docker run -v $(pwd):/data i2g:gala python3 ./driver.py -m 0 -o /data/trained_classifier.pkl --prob_file ./tests/example-data/train-p1.lzf.h5 --gt_file ./tests/example-data/train-gt.lzf.h5 --ws_file ./tests/example-data/train-ws.lzf.h5
17 | docker run -v $(pwd):/data i2g:gala python3 ./driver.py -m 1 -o /data/annotations.npy --prob_file /data/membrane_output.npy --train_file /data/trained_classifier.pkl --seeds_cc_threshold 5 --agg_threshold 0.5
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/saber/i2g/neuron_segmentation/neuron_segmentation.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | cwlVersion: v1.0
16 | class: CommandLineTool
17 | hints:
18 | DockerRequirement:
19 | dockerPull: aplbrain/i2gseg
20 | baseCommand: python3
21 | arguments: ["driver.py"]
22 | inputs:
23 | mode:
24 | type: string
25 | inputBinding:
26 | position: 1
27 | prefix: --mode
28 | prob_file:
29 | type: File
30 | inputBinding:
31 | position: 2
32 | prefix: --prob_file
33 | # gt_file:
34 | # type: File?
35 | # inputBinding:
36 | # position: 3
37 | # prefix: --gt_file
38 | # ws_file:
39 | # type: File?
40 | # inputBinding:
41 | # position: 4
42 | # prefix: --ws_file
43 | train_file:
44 | type: File?
45 | inputBinding:
46 | position: 5
47 | prefix: --train_file
48 | seeds_cc_threshold:
49 | type: string
50 | inputBinding:
51 | position: 6
52 | prefix: --seeds_cc_threshold
53 | agg_threshold:
54 | type: string
55 | inputBinding:
56 | position: 7
57 | prefix: --agg_threshold
58 | outfile:
59 | type: string
60 | inputBinding:
61 | position: 8
62 | prefix: --outfile
63 | outputs:
64 | neuron_segmentation_out:
65 | type: File
66 | outputBinding:
67 | glob: $(inputs.outfile)
68 |
69 |
--------------------------------------------------------------------------------
/saber/i2g/neuron_segmentation/requirements.txt:
--------------------------------------------------------------------------------
1 | pytest>=2.8
2 | setuptools>=19.6
3 | coverage>=4.0
4 | pytest-cov>=2.2
5 | numpy>=1.11
6 | nose>=1.3
7 | numpydoc>=0.5
8 | pillow>=2.7.0
9 | networkx>=1.6,<2.0
10 | h5py>=2.6
11 | scipy==0.19.1
12 | cython>=0.17
13 | viridis>=0.4
14 | scikit-image>=0.12
15 | scikit-learn>=0.15
16 | pyzmq>=14.7
17 | tqdm>=4.7
18 |
--------------------------------------------------------------------------------
/saber/i2g/neuron_segmentation/trained_classifier.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aplbrain/saber/051b9506fd7356113be013ac3c435a101fd95123/saber/i2g/neuron_segmentation/trained_classifier.pkl
--------------------------------------------------------------------------------
/saber/i2g/neuroproof/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM conda/miniconda3
2 |
3 | #Shared Library Dependencies
4 | RUN apt-get -qq update && apt-get install -qq -y \
5 | git \
6 | libgl1-mesa-dev \
7 | libsm6 \
8 | libglib2.0-0 \
9 | libxrender1 \
10 | libxss1 \
11 | libxft2 \
12 | libxt6
13 |
14 | #Neuroproof Installation
15 | RUN conda create -n saber_neuroproof -c flyem neuroproof
16 | ENV PATH=$PATH:"/usr/local/envs/saber_neuroproof/bin"
17 | RUN pip install numpy h5py
18 |
19 | WORKDIR /app
20 | COPY driver.py /app/driver.py
21 | # COPY kasthuri_classifier.xml /app/kasthuri_classifier.xml
22 | #RUN git clone https://github.com/janelia-flyem/neuroproof_examples.git
23 | RUN wget --directory-prefix /app/kasthuri_classifier.xml https://saber-batch-dev.s3.amazonaws.com/kasthuri_classifier.xml
24 | ENTRYPOINT ["python3", "driver.py" ]
25 |
--------------------------------------------------------------------------------
/saber/i2g/neuroproof/README.md:
--------------------------------------------------------------------------------
1 | ## Example Commands
2 |
3 | Try:
4 |
5 | `docker run -v $(pwd):/app aplbrain/neuroproof python3 ./driver.py -m 0 -o my_classifier.xml --pred_file ./test_data/boundary_prediction.h5 --gt_file ./test_data/groundtruth.h5 --ws_file ./test_data/oversegmented_stack_labels.h5`
--------------------------------------------------------------------------------
/saber/i2g/neuroproof/neuroproof.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | cwlVersion: v1.0
16 | class: CommandLineTool
17 | hints:
18 | DockerRequirement:
19 | dockerPull: aplbrain/neuroproof
20 | baseCommand: python3
21 | arguments: ["driver.py"]
22 | inputs:
23 | mode:
24 | type: string
25 | inputBinding:
26 | position: 1
27 | prefix: --mode
28 | ws_file:
29 | type: File
30 | inputBinding:
31 | position: 2
32 | prefix: --ws_file
33 | pred_file:
34 | type: File
35 | inputBinding:
36 | position: 2
37 | prefix: --pred_file
38 | gt_file:
39 | type: File?
40 | inputBinding:
41 | position: 3
42 | prefix: --gt_file
43 | class_file:
44 | type: File?
45 | inputBinding:
46 | position: 4
47 | prefix: --train_file
48 | iterations:
49 | type: string?
50 | inputBinding:
51 | position: 5
52 | prefix: --num_iterations
53 | use_mito:
54 | type: string?
55 | inputBinding:
56 | position: 6
57 | prefix: --use_mito
58 | outfile:
59 | type: string
60 | inputBinding:
61 | position: 7
62 | prefix: --outfile
63 | outputs:
64 | neuroproof_out:
65 | type: File
66 | outputBinding:
67 | glob: $(inputs.outfile)
68 |
--------------------------------------------------------------------------------
/saber/i2g/seg_syn_association/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #Use an official Python runtime as a parent image
16 | FROM python:3.6
17 |
18 | # Install any needed packages specified in requirements.txt
19 | RUN pip install numpy
20 | RUN pip install scikit-image
21 | RUN pip install scipy boto3
22 | RUN pip install pandas
23 |
24 | # RUN git clone https://github.com/jhuapl-boss/intern.git && cd intern && git checkout RemoteExtension && git pull && python3 setup.py install --user
25 | RUN pip install intern
26 | RUN pip install boto3
27 | RUN pip install botocore
28 |
29 | RUN mkdir /app
30 | COPY ./*.py /app/
31 | RUN chown -R 1000:100 /app/
32 | ENV PATH /app:$PATH
33 | WORKDIR /app
34 |
35 |
--------------------------------------------------------------------------------
/saber/i2g/seg_syn_association/README.md:
--------------------------------------------------------------------------------
1 | docker build . -t aplbrain/i2g:assoc
2 | docker run -v ./data:/data python /app
3 |
--------------------------------------------------------------------------------
/saber/i2g/seg_syn_association/assoc_local.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | dockerPull: aplbrain/i2gassoc
22 | baseCommand: python
23 | arguments: ['/app/seg_syn_assoc.py']
24 | inputs:
25 | seg_file:
26 | type: File
27 | inputBinding:
28 | position: 1
29 | prefix: --seg_file
30 | syn_file:
31 | type: File
32 | inputBinding:
33 | position: 2
34 | prefix: --syn_file
35 | output_name:
36 | type: string
37 | inputBinding:
38 | position: 3
39 | prefix: --output
40 | output_name_noneu:
41 | type: string
42 | inputBinding:
43 | position: 4
44 | prefix: --output_noneu
45 | dilation:
46 | type: string?
47 | inputBinding:
48 | position: 5
49 | prefix: --dilation
50 | threshold:
51 | type: string?
52 | inputBinding:
53 | position: 6
54 | prefix: --threshold
55 | blob:
56 | type: string?
57 | inputBinding:
58 | position: 7
59 | prefix: --blob
60 | outputs:
61 | assoc_output:
62 | type: File
63 | outputBinding:
64 | glob: $(inputs.output_name)
65 | assoc_output_noneu:
66 | type: File
67 | outputBinding:
68 | glob: $(inputs.output_name_noneu)
69 |
--------------------------------------------------------------------------------
/saber/postprocessing/blob_detect/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright 2020 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | FROM python:3.7-slim
16 |
17 | RUN pip install numpy \
18 | && pip install scikit-image
19 | WORKDIR /src/
20 | COPY *.py /src/
21 | ENTRYPOINT ["python", "blob_detect.py"]
--------------------------------------------------------------------------------
/saber/postprocessing/blob_detect/README.md:
--------------------------------------------------------------------------------
1 | # Blob Detect Tool
2 | Author: Daniel Xenes
3 | Takes a volume_thresholded volume (binary volume) and finds blobs in it (hopefully cell bodies).
4 |
5 | Inputs:
6 | input - (str) binary map input file
7 | max - (float) maximum area to be counted
8 | min - (float) minimum area to be counted
9 | outfil - (str) output file name
10 |
11 | outputs:
12 |
13 | MxN Array containing centroids where
14 | M is number of blobs and
15 | N is ndim of input array
16 |
17 | ## How to use
18 |
19 | `docker run aplbrain/blob_detect -i INPUT_FILE --min MINIMUM --max MAXIMUM --outfile OUTPUT_FILE`
20 |
21 | Input files must be numpy .npy files.
--------------------------------------------------------------------------------
/saber/postprocessing/blob_detect/blob_detect.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2020 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | cwlVersion: v1.0
16 | class: CommandLineTool
17 | hints:
18 | DockerRequirement:
19 | dockerPull: aplbrain/blob_detect
20 | baseCommand: python
21 | arguments: ["blob_detect.py"]
22 | inputs:
23 | input:
24 | type: File
25 | inputBinding:
26 | position: 1
27 | prefix: --input
28 | min:
29 | type: string
30 | inputBinding:
31 | position: 2
32 | prefix: --min
33 | max:
34 | type: string
35 | inputBinding:
36 | position: 3
37 | prefix: --max
38 | outfile:
39 | type: string
40 | inputBinding:
41 | position: 4
42 | prefix: --outfile
43 | outputs:
44 | blob_detect_out:
45 | type: File
46 | outputBinding:
47 | glob: $(inputs.outfile)
48 |
--------------------------------------------------------------------------------
/saber/postprocessing/blob_detect/blob_detect.py:
--------------------------------------------------------------------------------
1 | # Copyright 2020 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | import numpy as np
16 | from skimage.measure import label, regionprops
17 | import argparse
18 |
19 |
20 | def get_parser():
21 | parser = argparse.ArgumentParser(description="Blob Detect Tool")
22 | parser.set_defaults(func=lambda _: parser.print_help())
23 | parser.add_argument("-i", "--input", required=True, help="Input numpy array file")
24 | parser.add_argument(
25 | "--min", required=True, help="minimum area for region to be counted"
26 | )
27 | parser.add_argument(
28 | "--max", required=True, help="maximum area for region to be counted"
29 | )
30 | parser.add_argument("-o", "--outfile", required=True, help="Output file")
31 | return parser
32 |
33 |
34 | def blob_detect(dense_map, min, max):
35 | labels = label(dense_map)
36 | regions = regionprops(labels)
37 | output = np.empty((0, dense_map.ndim))
38 | for props in regions:
39 | if props.area >= float(min) and props.area <= float(max):
40 | output = np.concatenate((output, [props.centroid]), axis=0)
41 | return output
42 |
43 |
44 | def main():
45 | parser = get_parser()
46 | args = parser.parse_args()
47 | input_array = np.load(args.input)
48 | output_array = blob_detect(input_array, min=args.min, max=args.max)
49 | np.save(args.outfile, output_array)
50 |
51 |
52 | if __name__ == "__main__":
53 | main()
54 |
--------------------------------------------------------------------------------
/saber/postprocessing/blob_detect/test_workflow/example_blob_detect.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2020 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | cwlVersion: v1.0
16 | class: Workflow
17 | doc: local
18 |
19 | cwlVersion: v1.0
20 | class: Workflow
21 | inputs:
22 | input: File
23 | min: string
24 | max: string
25 | outfile: string
26 |
27 | outputs:
28 | blob_detect_output:
29 | type: File
30 | outputSource: blob_detect/blob_detect_out
31 | steps:
32 | blob_detect:
33 | run: ../blob_detect.cwl
34 | in:
35 | input: input
36 | min: min
37 | max: max
38 | outfile: outfile
39 | out:
40 | [blob_detect_out]
41 | hints:
42 | saber:
43 | local: True
44 | file_path: /Users/xenesd1/Projects/aplbrain/saber/output
--------------------------------------------------------------------------------
/saber/postprocessing/threshold/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright 2020 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 | FROM python:3.7-slim
15 | RUN pip install numpy
16 | WORKDIR /src/
17 | COPY *.py /src/
18 | ENTRYPOINT ["python", "threshold.py"]
19 |
--------------------------------------------------------------------------------
/saber/postprocessing/threshold/README.md:
--------------------------------------------------------------------------------
1 | # Volume Threshold Tool
2 | Author: Daniel Xenes
3 | Applies a threshold to a volume and outputs the binarized array. Prints an F1 score if groundtruth is provided.
4 |
5 | ## How to use
6 |
7 | `docker run aplbrain/threshold -i INPUT_FILE -t THRESHOLD -o OUTPUT_FILE -gt GROUNDTRUTH_FILE`
8 |
9 | Input files must be numpy .npy files.
10 |
--------------------------------------------------------------------------------
/saber/postprocessing/threshold/test_workflow/example_job.yaml:
--------------------------------------------------------------------------------
1 | #Boss files
2 | host_name: api.bossdb.io
3 | coord: GT-VS0172
4 | token:
5 | coll: GTXrayData
6 | exp: VS0172
7 | chan_labels: ilastik_dense_c_pixel_ahb
8 | chan_img: full_vol
9 | dtype_img: uint8
10 | dtype_lbl: uint64
11 | itype_name: image
12 | res: 0
13 | xmin: 4400
14 | xmax: 4656
15 | ymin: 343
16 | ymax: 599
17 | zmin: 211
18 | zmax: 300
19 | padding: 0
20 |
21 | #threshold input
22 | threshold: 0.5
23 |
24 | #Output File Names
25 | raw_pull_output_name: pull_output.npy
26 | anno_pull_output_name: anno_output.npy
27 | threshold_output_name: threshold_output.npy
--------------------------------------------------------------------------------
/saber/postprocessing/threshold/threshold.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2020 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | cwlVersion: v1.0
16 | class: CommandLineTool
17 | hints:
18 | DockerRequirement:
19 | dockerPull: aplbrain/threshold
20 | baseCommand: python
21 | arguments: ["threshold.py"]
22 | inputs:
23 | input:
24 | type: File
25 | inputBinding:
26 | position: 1
27 | prefix: --input
28 | groundtruth:
29 | type: File?
30 | inputBinding:
31 | position: 2
32 | prefix: --groundtruth
33 | threshold:
34 | type: string
35 | inputBinding:
36 | position: 3
37 | prefix: --threshold
38 | outfile:
39 | type: string
40 | inputBinding:
41 | position: 4
42 | prefix: --outfile
43 | outputs:
44 | threshold_out:
45 | type: File
46 | outputBinding:
47 | glob: $(inputs.outfile)
48 |
--------------------------------------------------------------------------------
/saber/preprocessing/normalization/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #Use an official Python runtime as a parent image
16 | FROM python:3
17 |
18 | # Install any needed packages specified in requirements.txt
19 | RUN pip install scikit-image
20 | RUN pip install numpy
21 | RUN pip install scipy
22 |
23 | RUN mkdir /app
24 | COPY --chown=1000:100 normalize /app/
25 | ENV PATH /app:$PATH
26 | WORKDIR /app
27 |
28 |
--------------------------------------------------------------------------------
/saber/preprocessing/normalization/normalize:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env python
16 |
17 | import argparse
18 | import sys
19 |
20 | import numpy as np
21 |
22 | from skimage import data, img_as_float
23 | from skimage import exposure
24 |
25 | def normalize(args):
26 | volume = np.load(args.input)
27 | volume_shape = volume.shape
28 | for img in range(0,volume_shape[2]):
29 | if(args.mode==1):
30 | # Gamma
31 | gamma_corrected = exposure.adjust_gamma(volume[:,:,img], 2)
32 | volume[:,:,img] = gamma_corrected
33 | else:
34 | # Logarithmic
35 | logarithmic_corrected = exposure.adjust_log(volume[:,:,img], 1)
36 | volume[:,:,img] = logarithmic_corrected #save in place
37 |
38 | with open(args.output, 'wb') as f:
39 | np.save(f, volume)
40 |
41 | def main():
42 | parser = argparse.ArgumentParser(description='boss processing script')
43 | parser = argparse.ArgumentParser(add_help=False)
44 |
45 | parser.add_argument('-m', '--mode', required=True, type=int, help='Mode: Logarithmic(0) Gamma(1)')
46 | parser.add_argument('-o', '--output', required=False, help='Output file')
47 | parser.add_argument('-i', '--input', required=False, help='Input file')
48 | parser.add_argument('--gamma', type=float, default=1.0, help='Non negative real number. Default value is 1.')
49 | parser.add_argument('--scale', type=float, default=1.0, help='The constant multiplier. Default value is 1')
50 |
51 | args = parser.parse_args()
52 | normalize(args)
53 | if __name__ == '__main__':
54 | main()
55 |
--------------------------------------------------------------------------------
/saber/preprocessing/normalization/normalize.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | dockerPull: saber:preprocess
22 | baseCommand: python /app/normalize
23 | inputs:
24 | mode:
25 | type: int?
26 | inputBinding:
27 | prefix: -m
28 | position: 1
29 | output_name:
30 | type: string
31 | inputBinding:
32 | position: 2
33 | prefix: -o
34 | input:
35 | type: File
36 | inputBinding:
37 | position: 3
38 | prefix: -i
39 | gamma:
40 | type: float?
41 | inputBinding:
42 | prefix: --gamma
43 | position: 4
44 | scale:
45 | type: float?
46 | inputBinding:
47 | prefix: --scale
48 | position: 5
49 | outputs:
50 | cell_detect_results:
51 | type: File
52 | outputBinding:
53 | glob: $(inputs.output_name)
54 |
--------------------------------------------------------------------------------
/saber/xbrain/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | FROM jupyter/scipy-notebook:17aba6048f44
16 | RUN conda install --yes -c ilastik-forge/label/cf201901 ilastik-dependencies-no-solvers-no-gui \
17 | && conda clean -y --all
18 | RUN conda install --yes --force libgfortran scipy \
19 | && conda clean -y --all
20 |
21 | #For supervised only
22 | # User must be responsible for supplying their own classifier
23 | # ADD ./classifier/xbrain_vessel_seg_v7.ilp /classifier/xbrain.ilp
24 |
25 | RUN pip install --no-cache-dir mahotas
26 | RUN pip install --no-cache-dir ndparse
27 | RUN pip install --no-cache-dir nibabel
28 | RUN pip install --no-cache-dir blosc==1.4.4
29 | RUN mkdir app
30 | RUN git clone https://github.com/jhuapl-boss/intern.git && cd intern && git checkout RemoteExtension && git pull && python3 setup.py install --user
31 | ADD ./unsupervised_celldetect.py /app/unsupervised_celldetect.py
32 | #Necessary to use for galaxy
33 | ADD --chown=1000:100 ./xbrain.py ./process-xbrain.py ./split_cells.py /app/
34 | ENV PATH /app:$PATH
35 | USER root
36 |
--------------------------------------------------------------------------------
/saber/xbrain/example/example_job.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # Shared
16 |
17 | # Inputs for BOSS
18 | host_name: api.bossdb.org
19 | token:
20 | coll_name: XBrainGTech5
21 | exp_name: Proj4
22 | dtype_name: uint8
23 | itype_name: image
24 | coord_name: XBrain_ingest_coord2
25 | xmin: 1100
26 | xmax: 1400
27 | ymin: 1000
28 | ymax: 1300
29 | zmin: 300
30 | zmax: 600
31 | padding: 0
32 | resolution: 0
33 | ## Boss pull
34 | in_chan_name: full_vol
35 | ## Boss push (membranes)
36 | mem_chan_name: membranes_Pub
37 | ## Boss push (vessels)
38 | ves_chan_name: vessels_Pub
39 | ## Boss push (cells)
40 | cell_chan_name: cells_Pub
41 |
42 | # Membrane classify
43 | classifier:
44 | class: File
45 | path: xbrain_vessel_seg_v7.ilp
46 | ram_amount: 2000
47 | num_threads: -1
48 |
49 |
50 | # Cell detect
51 | detect_threshold: 0.2
52 | stop: 0.47
53 | initial_template_size: 18
54 | detect_dilation: 8
55 | max_cells: 1000
56 | cell_index: 2
57 |
58 | # Vessel segment
59 | segment_threshold: 0.68
60 | segment_dilation: 3
61 | minimum: 4000
62 | vessel_index: 1
63 |
64 |
65 |
66 |
67 |
68 |
--------------------------------------------------------------------------------
/saber/xbrain/example2d/job.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | data:
16 | class: File
17 | path: /your/path/to/data.nii
18 | cell_gt:
19 | class: File
20 | path: /your/path/to/tj_anno.nii
21 | detect_threshold: 0.67
22 | stop: 0.47
23 | initial_template_size: 18
24 | detect_dilation: 2
25 | erode: 1
26 | max_cells: 400
27 | num_samp: 500000
28 | num_comp: 2
29 | _saber_bucket: saber-batch
30 |
--------------------------------------------------------------------------------
/saber/xbrain/example2d/optimization2d.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: Workflow
19 | doc: local
20 |
21 | inputs:
22 | data: File
23 | cell_gt: File
24 | detect_threshold: float?
25 | stop: float?
26 | initial_template_size: int?
27 | detect_dilation: int?
28 | max_cells: int?
29 | num_samp: int?
30 | num_comp: int?
31 | erode: int?
32 |
33 | outputs:
34 | membrane_classify_output:
35 | type: File
36 | outputSource: membrane_classify/membrane_probability_map
37 | cell_detect_output:
38 | type: File
39 | outputSource: cell_detect/cell_detect_results
40 | metrics_output:
41 | type: File
42 | outputSource: metrics/metrics
43 | steps:
44 | membrane_classify:
45 | run: ../tools/unsup_membrane_classify_nos3.cwl
46 | in:
47 | input: data
48 | output_name:
49 | default: 'optiout.npy'
50 | num_samp: num_samp
51 | num_comp: num_comp
52 | erodesz: erode
53 | hints:
54 | saber:
55 | local: True
56 | out: [membrane_probability_map]
57 | cell_detect:
58 | run: ../tools/unsup_cell_detect_nos3.cwl
59 | in:
60 | input: membrane_classify/membrane_probability_map
61 | output_name:
62 | default: 'optiout.npy'
63 | threshold: detect_threshold
64 | stop: stop
65 | initial_template_size: initial_template_size
66 | dilation: detect_dilation
67 | max_cells: max_cells
68 | hints:
69 | saber:
70 | local: True
71 | out: [cell_detect_results]
72 | metrics:
73 | run: ../tools/unsup_metrics_nos3.cwl
74 | in:
75 | input: cell_detect/cell_detect_results
76 | output_name:
77 | default: 'optiout.npy'
78 | initial_template_size: initial_template_size
79 | ground_truth: cell_gt
80 | hints:
81 | saber:
82 | local: True
83 | score_format: "F1: {score}"
84 | out: [metrics]
85 |
--------------------------------------------------------------------------------
/saber/xbrain/example2d/params.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | stop:
16 | range:
17 | start: 0.2
18 | stop: 0.7
19 | step: 0.1
20 | parameters:
21 | abs: stop
22 | steps:
23 | - cell_detect
24 | initial_template_size:
25 | range:
26 | start: 4
27 | stop: 24
28 | step: 1
29 | parameters:
30 | abs: initial_template_size
31 | steps:
32 | - cell_detect
33 | - metrics
34 | dilation:
35 | range:
36 | start: 2
37 | stop: 14
38 | step: 1
39 | parameters:
40 | abs: dilation
41 | steps:
42 | - cell_detect
43 | erode:
44 | range:
45 | start: 0
46 | stop: 4
47 | step: 1
48 | parameters:
49 | abs: erodesz
50 | steps:
51 | - membrane_classify
52 | detect_threshold:
53 | range:
54 | start: 0.2
55 | stop: 0.7
56 | step: 0.1
57 | parameters:
58 | abs: threshold
59 | steps:
60 | - cell_detect
--------------------------------------------------------------------------------
/saber/xbrain/jobs/cell_detect/job.yml:
--------------------------------------------------------------------------------
1 |
2 | #Boss files
3 | host_name: api.bossdb.io
4 | coord: GT-VS0172
5 | token:
6 | coll: GTXrayData
7 | exp: VS0172
8 | chan_labels: ilastik_dense_c_pixel_ahb
9 | chan_img: full_vol
10 | dtype_img: uint8
11 | dtype_lbl: uint64
12 | itype_name: image
13 | res: 0
14 | xmin: 4400
15 | xmax: 4656
16 | ymin: 343
17 | ymax: 599
18 | zmin: 211
19 | zmax: 300
20 | padding: 0
21 |
22 | # Unet Train
23 | use_boss: 1
24 | train_pct: 0.7
25 | n_epochs: 5
26 | mb_size: 4
27 | n_mb_per_epoch: 3
28 | learning_rate: 0.01
29 | use_adam: False
30 | beta1: 0.9
31 | beta2: 0.999
32 | momentum: 0.99
33 | decay: 0.000001
34 | save_freq: 50
35 | do_warp: False
36 | tile_size: 256
37 | _saber_bucket: saber-batch
38 |
39 | detect_threshold: 0.2
40 | stop: 0.47
41 | initial_template_size: 18
42 | detect_dilation: 8
43 | #max_cells: 500
44 |
45 |
46 | #Output File Names
47 | raw_pull_output_name: pull_output.npy
48 | anno_pull_output_name: anno_output.npy
49 | optimize_output: new_weights.h5
50 | score_out: f1_score.npy
51 | classify_output_name: probability_map.npy
52 | detect_output_name: detect_output.npy
53 | dense_output_name: dense_output.npy
54 | metrics_out: metrics.npy
--------------------------------------------------------------------------------
/saber/xbrain/jobs/cell_detect/job_simple.yml:
--------------------------------------------------------------------------------
1 | #Boss files
2 | host_name: api.bossdb.io
3 | coord: GT-VS0172
4 | token:
5 | coll: GTXrayData
6 | exp: VS0172
7 | chan_labels: ilastik_dense_c_pixel_ahb
8 | chan_img: full_vol
9 | dtype_img: uint8
10 | dtype_lbl: uint64
11 | itype_name: image
12 | res: 0
13 | padding: 0
14 |
15 | # Coordinates
16 | test_xmin: 4400
17 | test_xmax: 4656
18 | test_ymin: 343
19 | test_ymax: 599
20 | test_zmin: 211
21 | test_zmax: 300
22 |
23 | train_xmin: 4400
24 | train_xmax: 5168
25 | train_ymin: 472
26 | train_ymax: 1240
27 | train_zmin: 211
28 | train_zmax: 300
29 |
30 |
31 | # Unet Train
32 | use_boss: 1
33 | train_pct: 0.9
34 | n_epochs: 150
35 | mb_size: 6
36 | n_mb_per_epoch: 3
37 | learning_rate: 0.0001
38 | use_adam: False
39 | beta1: 0.9
40 | beta2: 0.999
41 | momentum: 0.99
42 | decay: 0.000001
43 | save_freq: 50
44 | do_warp: False
45 | tile_size: 256
46 | _saber_bucket: saber-batch
47 |
48 | threshold: 0.5
49 | min: 10
50 | max: 10000
51 | initial_template_size: 18
52 |
53 |
54 |
55 | #Output File Names
56 | raw_pull_output_name: pull_output.npy
57 | anno_pull_output_name: anno_output.npy
58 | optimize_output: new_weights.h5
59 | score_out: f1_score.npy
60 | classify_output_name: probability_map.npy
61 | threshold_output_name: threshold_output.npy
62 | blob_detect_output_name: centroid.npy
63 | metrics_out: metrics.npy
--------------------------------------------------------------------------------
/saber/xbrain/jobs/cell_detect/params.yml:
--------------------------------------------------------------------------------
1 | n_epochs:
2 | range:
3 | start: 5
4 | stop: 10
5 | step: 1
6 | parameters:
7 | abs: n_epochs
8 | steps:
9 | - optimize
10 | learning_rate:
11 | range:
12 | start: 0.005
13 | stop: 0.02
14 | step: 0.005
15 | parameters:
16 | abs: learning_rate
17 | steps:
18 | - optimize
19 | threshold:
20 | range:
21 | start: 0.2
22 | stop: 0.5
23 | step: 0.1
24 | parameters:
25 | abs: detect_threshold
26 | steps:
27 | - cell_detect
28 | stop:
29 | range:
30 | start: 0.4
31 | stop: 0.6
32 | step: 0.05
33 | parameters:
34 | abs: stop
35 | steps:
36 | - cell_detect
37 | # mb_size:
38 | # range:
39 | # start: 2
40 | # stop: 8
41 | # step: 2
42 | # parameters:
43 | # abs: mb_size
44 | # steps:
45 | # - optimize
46 | # n_mb_per_epoch:
47 | # range:
48 | # start: 2
49 | # stop: 4
50 | # step: 1
51 | # parameters:
52 | # abs: n_mb_per_epoch
53 | # steps:
54 | # - optimize
55 | # tile_size:
56 | # range:
57 | # start: 64
58 | # stop: 256
59 | # step: 64
60 | # parameters:
61 | # abs: tile_size
62 | # steps:
63 | # - optimize
64 | # decay:
65 | # range:
66 | # start: 0.0000005
67 | # stop: 0.000002
68 | # step: 0.0000005
69 | # parameters:
70 | # abs: decay
71 | # steps:
72 | # - optimize
73 | # momentum:
74 | # range:
75 | # start: 0.95
76 | # stop: 0.99
77 | # step: 0.01
78 | # parameters:
79 | # abs: momentum
80 | # steps:
81 | # - optimize
--------------------------------------------------------------------------------
/saber/xbrain/jobs/full_test/example-job.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # Shared
16 | # Inputs for BOSS
17 | host_name: api.bossdb.org
18 | token: TODO
19 | coll_name: GTXrayData
20 | exp_name: VS0172
21 | in_chan_name: full_vol
22 | out_chan_name: cell_test
23 | dtype_name: uint8
24 | itype_name: image
25 | coord_name: GT-VS0172
26 | resolution: 0
27 | xmin: 1177
28 | xmax: 1891
29 | ymin: 1692
30 | ymax: 2093
31 | zmin: 0
32 | zmax: 609
33 | padding: 5
34 | onesided: 1
35 | # Inputs for steps
36 | pull_output_name: pull_output.npy
37 | classifier:
38 | class: File
39 | path: xbrain_vessel_seg_v8.ilp
40 | membrane_classify_output_name: membrane_classify_output.npy
41 | cell_detect_output_name: cell_detect_output.npy
42 | vessel_segment_output_name: vessel_segment_output.npy
43 | ram_amount: 2000
44 | num_threads: -1
45 | detect_threshold: 0.2
46 | stop: 0.47
47 | initial_template_size: 18
48 | detect_dilation: 8
49 | max_cells: 500
50 | segment_threshold: 0.68
51 | segment_dilation: 3
52 | minimum: 4000
53 |
54 | cell_index: 1
55 | vessel_index: 0
56 |
57 |
58 | # Inputs for merge
59 | template_size: 19
60 | map_output_name: cell_detect_map.npy
61 | list_output_name: cell_detect_list.npy
62 | centroid_volume_output_name: centroid_volume.npy
63 |
64 | out_dtype_name: uint64
65 | out_itype_name: annotation
66 |
67 |
--------------------------------------------------------------------------------
/saber/xbrain/jobs/full_test/parameterization.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | x:
16 | range:
17 | start: 4300
18 | stop: 5800
19 | step: 500
20 | parameters:
21 | min: xmin
22 | max: xmax
23 | steps:
24 | - boss_merge
25 | - boss_pull
26 | y:
27 | range:
28 | start: 0
29 | stop: 1400
30 | step: 500
31 | parameters:
32 | min: ymin
33 | max: ymax
34 | steps:
35 | - boss_merge
36 | - boss_pull
37 | z:
38 | range:
39 | start: 0
40 | stop: 719
41 | step: 300
42 | parameters:
43 | min: zmin
44 | max: zmax
45 | steps:
46 | - boss_merge
47 | - boss_pull
48 |
--------------------------------------------------------------------------------
/saber/xbrain/jobs/merge_test/parameterization.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | x:
16 | range:
17 | start: 970
18 | stop: 1589
19 | step: 350
20 | parameters:
21 | min: xmin
22 | max: xmax
23 | steps:
24 | - boss_merge
25 | - boss_pull
26 | y:
27 | range:
28 | start: 858
29 | stop: 1533
30 | step: 350
31 | parameters:
32 | min: ymin
33 | max: ymax
34 | steps:
35 | - boss_merge
36 | - boss_pull
37 | z:
38 | range:
39 | start: 0
40 | stop: 500
41 | step: 250
42 | parameters:
43 | min: zmin
44 | max: zmax
45 | steps:
46 | - boss_merge
47 | - boss_pull
48 |
49 | initial_template_size:
50 | range:
51 | [19, 20, 34, 12, 16]
52 | parameters:
53 | abs: initial_template_size
54 | steps:
55 | - cell_detect
--------------------------------------------------------------------------------
/saber/xbrain/jobs/param_sweep/example_job.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # Shared
16 | # Inputs for BOSS
17 | host_name: api.bossdb.org
18 | token:
19 | coll_name: GTXrayData
20 | exp_name: VS0172
21 | im_chan_name: full_vol
22 | an_chan_name: ilastik_dense_c_pixel_ahb
23 | dtype_name: uint8
24 | itype_name: image
25 | coord_name: GT-VS0172
26 | resolution: 0
27 | xmin: 4700
28 | xmax: 5000
29 | ymin: 600
30 | ymax: 900
31 | zmin: 210
32 | zmax: 310
33 | padding: 5
34 | onesided: 1
35 | host_name: api.bossdb.org
36 | # Inputs for steps
37 | pull_output_name: pull_output.npy
38 | anno_output_name: anno_output.npy
39 |
40 | ram_amount: 2000
41 | num_threads: -1
42 | detect_threshold: 0.2
43 | stop: 0.1
44 | initial_template_size: 22
45 | detect_dilation: 0
46 | max_cells: 1500
47 | segment_threshold: 0.68
48 | segment_dilation: 3
49 | minimum: 4000
50 |
51 | cell_index: 1
52 | vessel_index: 0
53 |
54 |
55 | # Inputs for merge
56 | template_size: 19
57 | map_output_name: cell_detect_map.npy
58 | list_output_name: cell_detect_list.npy
59 | centroid_volume_output_name: centroid_volume.npy
60 |
61 | out_dtype_name: uint64
62 | out_itype_name: annotation
63 |
64 | membrane_output_name: membrane_out.npy
65 | detect_output_name: detect_out.npy
66 | metrics_output_name: metrics.npy
67 |
68 | num_samp: 500000
69 | num_comp: 2
70 | erode: 1
71 | vessel_thres: 0.8
72 | min_size: 0.6
73 | cell_class: 1
74 |
75 | dense_output_name: dense_output.npy
76 |
77 |
--------------------------------------------------------------------------------
/saber/xbrain/jobs/param_sweep/params.yml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | stop:
16 | range:
17 | start: 0.1
18 | stop: 0.47
19 | step: 0.15
20 | parameters:
21 | abs: stop
22 | steps:
23 | - cell_detect
24 | initial_template_size:
25 | range:
26 | start: 12
27 | stop: 40
28 | step: 10
29 | parameters:
30 | abs: initial_template_size
31 | steps:
32 | - cell_detect
33 | dilation:
34 | range:
35 | start: 0
36 | stop: 6
37 | step: 2
38 | parameters:
39 | abs: dilation
40 | steps:
41 | - cell_detect
--------------------------------------------------------------------------------
/saber/xbrain/jobs/unet_train_job/params.yml:
--------------------------------------------------------------------------------
1 | n_epochs:
2 | range:
3 | start: 200
4 | stop: 300
5 | step: 25
6 | parameters:
7 | abs: n_epochs
8 | steps:
9 | - optimize
10 | mb_size:
11 | range:
12 | start: 3
13 | stop: 5
14 | step: 1
15 | parameters:
16 | abs: mb_size
17 | steps:
18 | - optimize
19 | n_mb_per_epoch:
20 | range:
21 | start: 2
22 | stop: 4
23 | step: 1
24 | parameters:
25 | abs: n_mb_per_epoch
26 | steps:
27 | - optimize
28 | learning_rate:
29 | range:
30 | start: 0.0001
31 | stop: 0.0004
32 | step: 0.0001
33 | parameters:
34 | abs: learning_rate
35 | steps:
36 | - optimize
37 | beta1:
38 | range:
39 | start: 0.88
40 | stop: 0.92
41 | step: 0.01
42 | parameters:
43 | abs: beta1
44 | steps:
45 | - optimize
46 | beta2:
47 | range:
48 | start: 0.997
49 | stop: 0.999
50 | step: 0.001
51 | parameters:
52 | abs: beta2
53 | steps:
54 | -optimize
55 |
--------------------------------------------------------------------------------
/saber/xbrain/jobs/unet_train_job/params_sgd.yml:
--------------------------------------------------------------------------------
1 | n_epochs:
2 | range:
3 | start: 5
4 | stop: 10
5 | step: 1
6 | parameters:
7 | abs: n_epochs
8 | steps:
9 | - optimize
10 | mb_size:
11 | range:
12 | start: 2
13 | stop: 8
14 | step: 2
15 | parameters:
16 | abs: mb_size
17 | steps:
18 | - optimize
19 | n_mb_per_epoch:
20 | range:
21 | start: 2
22 | stop: 4
23 | step: 1
24 | parameters:
25 | abs: n_mb_per_epoch
26 | steps:
27 | - optimize
28 | #tile_size:
29 | # range:
30 | # start: 64
31 | # stop: 256
32 | # step: 64
33 | # parameters:
34 | # abs: tile_size
35 | # steps:
36 | # - optimize
37 | #learning_rate:
38 | # range:
39 | # start: 0.005
40 | # stop: 0.02
41 | # step: 0.005
42 | # parameters:
43 | # abs: learning_rate
44 | # steps:
45 | # - optimize
46 | #decay:
47 | # range:
48 | # start: 0.0000005
49 | # stop: 0.000002
50 | # step: 0.0000005
51 | # parameters:
52 | # abs: decay
53 | # steps:
54 | # - optimize
55 | # momentum:
56 | # range:
57 | # start: 0.95
58 | # stop: 0.99
59 | # step: 0.01
60 | # parameters:
61 | # abs: momentum
62 | # steps:
63 | # - optimize
--------------------------------------------------------------------------------
/saber/xbrain/jobs/unet_train_job/xbrain_unets_ex_job.yml:
--------------------------------------------------------------------------------
1 | # Unets
2 | use_boss: 1
3 | #Boss files
4 | coord: GT-VS0172
5 | img_file:
6 | lbl_file:
7 | token:
8 | coll: GTXrayData
9 | exp: VS0172
10 | chan_labels: ilastik_dense_c_pixel_ahb
11 | chan_img: full_vol
12 | dtype_img: uint8
13 | dtype_lbl: uint64
14 | res: 0
15 | xmin: 4400
16 | xmax: 4656
17 | ymin: 343
18 | ymax: 599
19 | zmin: 211
20 | zmax: 300
21 | train_pct: 0.7
22 | n_epochs: 250
23 | mb_size: 4
24 | n_mb_per_epoch: 3
25 | learning_rate: 0.0001
26 | beta1: 0.9
27 | beta2: 0.999
28 | save_freq: 50
29 | do_warp: False
30 | tile_size: 256
31 | weights_file:
32 | output: out_weights.h5
33 | score_out: f1_score.npy
34 |
--------------------------------------------------------------------------------
/saber/xbrain/jobs/unet_train_job/xbrain_unets_ex_job_s3.yml:
--------------------------------------------------------------------------------
1 | # Unets
2 | use_boss: 1
3 | #Boss files
4 | coord: GT-VS0172
5 | img_file: merp
6 | lbl_file: merp
7 | token:
8 | coll: GTXrayData
9 | exp: VS0172
10 | chan_labels: ilastik_dense_c_pixel_ahb
11 | chan_img: full_vol
12 | dtype_img: uint8
13 | dtype_lbl: uint64
14 | res: 0
15 | xmin: 4400
16 | xmax: 4656
17 | ymin: 343
18 | ymax: 599
19 | zmin: 211
20 | zmax: 300
21 | train_pct: 0.7
22 | n_epochs: 250
23 | mb_size: 4
24 | n_mb_per_epoch: 3
25 | learning_rate: 0.0001
26 | beta1: 0.9
27 | beta2: 0.999
28 | save_freq: 50
29 | do_warp: False
30 | tile_size: 256
31 | weights_file: None
32 | output: new_weights.h5
33 | score_out: f1_score.npy
34 | _saber_bucket: saber-batch
35 |
--------------------------------------------------------------------------------
/saber/xbrain/jobs/unet_train_job/xbrain_unets_ex_job_sgd.yml:
--------------------------------------------------------------------------------
1 | # Unets
2 | use_boss: 1
3 | #Boss files
4 | coord: GT-VS0172
5 | token:
6 | coll: GTXrayData
7 | exp: VS0172
8 | chan_labels: ilastik_dense_c_pixel_ahb
9 | chan_img: full_vol
10 | dtype_img: uint8
11 | dtype_lbl: uint64
12 | res: 0
13 | xmin: 4400
14 | xmax: 4656
15 | ymin: 343
16 | ymax: 599
17 | zmin: 211
18 | zmax: 300
19 | train_pct: 0.7
20 | n_epochs: 5
21 | mb_size: 4
22 | n_mb_per_epoch: 3
23 | learning_rate: 0.01
24 | use_adam: False
25 | beta1: 0.9
26 | beta2: 0.999
27 | momentum: 0.99
28 | decay: 0.000001
29 | save_freq: 50
30 | do_warp: False
31 | tile_size: 256
32 | output: new_weights.h5
33 | score_out: f1_score.npy
34 | _saber_bucket: saber-batch
35 |
--------------------------------------------------------------------------------
/saber/xbrain/jobs/unet_train_job/xbrain_unets_train.cwl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env cwl-runner
2 |
3 | cwlVersion: v1.0
4 | class: Workflow
5 | inputs:
6 | use_boss: int
7 | img_file: File?
8 | lbl_file: File?
9 | coord: string?
10 | token: string?
11 | coll: string?
12 | exp: string?
13 | chan_labels: string?
14 | chan_img: string?
15 | dtype_img: string?
16 | dtype_lbl: string?
17 | res: int?
18 | xmin: int?
19 | xmax: int?
20 | ymin: int?
21 | ymax: int?
22 | zmin: int?
23 | zmax: int?
24 | train_pct: float?
25 | n_epochs: int?
26 | mb_size: int?
27 | n_mb_per_epoch: int?
28 | learning_rate: float?
29 | use_adam: boolean?
30 | beta1: float?
31 | beta2: float?
32 | decay: float?
33 | momentum: float?
34 | save_freq: int?
35 | do_warp: boolean?
36 | tile_size: int?
37 | weights_file: File?
38 | output: string
39 | score_out: string
40 | outputs:
41 | train_output:
42 | type: File
43 | outputSource: optimize/classifier_weights
44 | steps:
45 | optimize:
46 | run: ../../tools/membrane_unets_train.cwl
47 | in:
48 | use_boss: use_boss
49 | img_file: img_file
50 | lbl_file: lbl_file
51 | coord: coord
52 | token: token
53 | coll: coll
54 | exp: exp
55 | chan_labels: chan_labels
56 | chan_img: chan_img
57 | dtype_img: dtype_img
58 | dtype_lbl: dtype_lbl
59 | res: res
60 | xmin: xmin
61 | xmax: xmax
62 | ymin: ymin
63 | ymax: ymax
64 | zmin: zmin
65 | zmax: zmax
66 | train_pct: train_pct
67 | n_epochs: n_epochs
68 | mb_size: mb_size
69 | n_mb_per_epoch: n_mb_per_epoch
70 | learning_rate: learning_rate
71 | use_adam: use_adam
72 | beta1: beta1
73 | beta2: beta2
74 | decay: decay
75 | momentum: momentum
76 | save_freq: save_freq
77 | do_warp: do_warp
78 | tile_size: tile_size
79 | weights_file: weights_file
80 | output: output
81 | score_out: score_out
82 | out: [classifier_weights,scores]
83 | hints:
84 | saber:
85 | score_format: "F1: {score}\n"
86 | local: True
87 |
--------------------------------------------------------------------------------
/saber/xbrain/jobs/unet_train_job/xbrain_unets_train_sgd.cwl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env cwl-runner
2 |
3 | cwlVersion: v1.0
4 | class: Workflow
5 | doc: local
6 | inputs:
7 | use_boss: int
8 | coord: string?
9 | token: string?
10 | coll: string?
11 | exp: string?
12 | chan_labels: string?
13 | chan_img: string?
14 | dtype_img: string?
15 | dtype_lbl: string?
16 | res: int?
17 | xmin: int?
18 | xmax: int?
19 | ymin: int?
20 | ymax: int?
21 | zmin: int?
22 | zmax: int?
23 | train_pct: float?
24 | n_epochs: int?
25 | mb_size: int?
26 | n_mb_per_epoch: int?
27 | learning_rate: float?
28 | use_adam: boolean?
29 | beta1: float?
30 | beta2: float?
31 | decay: float?
32 | momentum: float?
33 | save_freq: int?
34 | do_warp: boolean?
35 | tile_size: int?
36 | output: string
37 | score_out: string
38 | outputs:
39 | train_output:
40 | type: File
41 | outputSource: optimize/classifier_weights
42 | steps:
43 | optimize:
44 | run: ../../tools/membrane_unets_train.cwl
45 | in:
46 | use_boss: use_boss
47 | coord: coord
48 | token: token
49 | coll: coll
50 | exp: exp
51 | chan_labels: chan_labels
52 | chan_img: chan_img
53 | dtype_img: dtype_img
54 | dtype_lbl: dtype_lbl
55 | res: res
56 | xmin: xmin
57 | xmax: xmax
58 | ymin: ymin
59 | ymax: ymax
60 | zmin: zmin
61 | zmax: zmax
62 | train_pct: train_pct
63 | n_epochs: n_epochs
64 | mb_size: mb_size
65 | n_mb_per_epoch: n_mb_per_epoch
66 | learning_rate: learning_rate
67 | use_adam: use_adam
68 | beta1: beta1
69 | beta2: beta2
70 | decay: decay
71 | momentum: momentum
72 | save_freq: save_freq
73 | do_warp: do_warp
74 | tile_size: tile_size
75 | output: output
76 | score_out: score_out
77 | out: [classifier_weights,scores]
78 | hints:
79 | saber:
80 | score_format: "F1: {score}\n"
81 | local: True
82 | file_path: /home/ubuntu/saber/volumes/data/local
83 |
--------------------------------------------------------------------------------
/saber/xbrain/split_cells.py:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env python
16 |
17 | import argparse
18 | import sys
19 | import itertools
20 | import numpy as np
21 | sphere_radius = 5
22 | #Take output of cell detect step, split into two streams- one list of cells, the other the map of cells
23 | def split_cells(args):
24 | cells = np.load(args.input)
25 | cell_map = cells[1]
26 | cell_list = cells[0]
27 | with open(args.map_output, 'wb') as f:
28 | np.save(f, cell_map)
29 |
30 | # Make volume out of cell_list
31 | cell_centroid_volume = np.zeros(cell_map.shape)
32 | for cell in cell_list:
33 | axes_range = [[],[],[]]
34 | for i,axes in enumerate(cell[:3]):
35 | min_range = max(int(axes-args.sphere_size), 0)
36 | max_range = min(int(axes+args.sphere_size), cell_map.shape[i]-1)
37 | axes_range[i]=range(min_range, max_range)
38 | coords = list(itertools.product(*axes_range))
39 | for pixel in coords:
40 | if np.linalg.norm(np.array(cell[:3])-np.array(pixel)) <= args.sphere_size:
41 | cell_centroid_volume[pixel] = 1
42 | with open(args.list_output, 'wb') as f:
43 | np.save(f, cell_list)
44 | with open(args.centroid_volume_output, 'wb') as f:
45 | np.save(f, cell_centroid_volume)
46 |
47 |
48 | def main():
49 | parser = argparse.ArgumentParser(description='cell results splitting script')
50 | parser.set_defaults(func=lambda _: parser.print_help())
51 |
52 | parser.add_argument('-i', '--input', required=True, help='Input file')
53 | parser.add_argument('--map_output', required=True, help='Map Output file')
54 | parser.add_argument('--list_output', required=True, help='List Output file')
55 | parser.add_argument('--centroid_volume_output', required=True, help='Output volume with spheres')
56 | parser.add_argument('--sphere_size', required=False, help='Size of the spheres in the centroids volume', default=5, type=int)
57 | args = parser.parse_args()
58 | split_cells(args)
59 |
60 | if __name__ == '__main__':
61 | main()
62 |
--------------------------------------------------------------------------------
/saber/xbrain/tools/cell_detect_nos3.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | # dockerPull: xbrain:airflow-docker
22 | dockerPull: aplbrain/xbrain:latest
23 | baseCommand: process-xbrain.py
24 | arguments: ["detect"]
25 | inputs:
26 | input:
27 | type: File
28 | inputBinding:
29 | position: 1
30 | prefix: -i
31 | output_name:
32 | type: string
33 | inputBinding:
34 | position: 2
35 | prefix: -o
36 | threshold:
37 | type: float?
38 | inputBinding:
39 | prefix: --threshold
40 | position: 4
41 | stop:
42 | type: float?
43 | inputBinding:
44 | prefix: --stop
45 | position: 5
46 | initial_template_size:
47 | type: int?
48 | inputBinding:
49 | prefix: --initial-template-size
50 | position: 6
51 | dilation:
52 | type: int?
53 | inputBinding:
54 | prefix: --dilation
55 | position: 7
56 | max_cells:
57 | type: int?
58 | inputBinding:
59 | prefix: --max-cells
60 | position: 8
61 | cell_index:
62 | type: int?
63 | inputBinding:
64 | prefix: --cell-index
65 | position: 9
66 | outputs:
67 | cell_detect_results:
68 | type: File
69 | outputBinding:
70 | glob: $(inputs.output_name)
71 |
--------------------------------------------------------------------------------
/saber/xbrain/tools/cell_split.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | dockerPull: aplbrain/xbrain:latest
22 | baseCommand: split_cells.py
23 | arguments: []
24 | inputs:
25 | input:
26 | type: File
27 | inputBinding:
28 | position: 1
29 | prefix: -i
30 | map_output_name:
31 | type: string
32 | inputBinding:
33 | position: 2
34 | prefix: --map_output
35 | list_output_name:
36 | type: string
37 | inputBinding:
38 | position: 3
39 | prefix: --list_output
40 | centroid_volume_output_name:
41 | type: string
42 | inputBinding:
43 | position: 4
44 | prefix: --centroid_volume_output
45 | # bucket:
46 | # type: string
47 | outputs:
48 | cell_map:
49 | type: File
50 | outputBinding:
51 | glob: $(inputs.map_output_name)
52 | cell_list:
53 | type: File
54 | outputBinding:
55 | glob: $(inputs.list_output_name)
56 | centroid_volume:
57 | type: File
58 | outputBinding:
59 | glob: $(inputs.centroid_volume_output_name)
60 |
--------------------------------------------------------------------------------
/saber/xbrain/tools/membrane_classify_nos3.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | # dockerPull: xbrain:airflow-docker
22 | dockerPull: aplbrain/xbrain:latest
23 | baseCommand: python
24 | arguments: ["/app/process-xbrain.py", "classify"]
25 | inputs:
26 | input:
27 | type: File
28 | inputBinding:
29 | position: 1
30 | prefix: -i
31 | output_name:
32 | type: string
33 | inputBinding:
34 | position: 2
35 | prefix: -o
36 | classifier:
37 | type: File
38 | inputBinding:
39 | position: 3
40 | prefix: -c
41 | ram_amount:
42 | type: int?
43 | inputBinding:
44 | position: 4
45 | prefix: --ram
46 | num_threads:
47 | type: int?
48 | inputBinding:
49 | position: 5
50 | prefix: --threads
51 | outputs:
52 | membrane_probability_map:
53 | type: File
54 | outputBinding:
55 | glob: $(inputs.output_name)
56 |
--------------------------------------------------------------------------------
/saber/xbrain/tools/optimize_supervised.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | dockerPull: xbrain:airflow-docker
22 | baseCommand: python
23 | arguments: ["process-xbrain.py","optimize"]
24 | inputs:
25 | input:
26 | type: File
27 | inputBinding:
28 | position: 1
29 | prefix: -i
30 | output_name:
31 | type: string
32 | inputBinding:
33 | position: 2
34 | prefix: -o
35 | classifier:
36 | type: File?
37 | inputBinding:
38 | position: 3
39 | prefix: -c
40 | groundtruth:
41 | type: File
42 | inputBinding:
43 | position: 4
44 | prefix: --cellgt
45 | threshold:
46 | type: float?
47 | inputBinding:
48 | prefix: --threshold
49 | position: 5
50 | stop:
51 | type: float?
52 | inputBinding:
53 | prefix: --stop
54 | position: 6
55 | initial_template_size:
56 | type: int?
57 | inputBinding:
58 | prefix: --initial-template-size
59 | position: 7
60 | dilation:
61 | type: int?
62 | inputBinding:
63 | prefix: --dialation
64 | position: 8
65 | max_cells:
66 | type: int?
67 | inputBinding:
68 | prefix: --max-cells
69 | position: 9
70 | outputs:
71 | metric_score:
72 | type: File
73 | outputBinding:
74 | glob: $(inputs.output_name)
75 |
--------------------------------------------------------------------------------
/saber/xbrain/tools/optimize_unsupervised.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | dockerPull: xbrain:airflow-docker
22 | baseCommand: python
23 | arguments: ["unsup_celldetect.py"]
24 | inputs:
25 | input:
26 | type: File
27 | inputBinding:
28 | position: 1
29 | prefix: -i
30 | output_name:
31 | type: string
32 | inputBinding:
33 | position: 2
34 | prefix: -o
35 | groundtruth:
36 | type: File?
37 | inputBinding:
38 | position: 3
39 | prefix: --groundtruth
40 | threshold:
41 | type: float?
42 | inputBinding:
43 | prefix: --pthreshold
44 | position: 4
45 | stop:
46 | type: float?
47 | inputBinding:
48 | prefix: --presidual
49 | position: 5
50 | initial_template_size:
51 | type: int?
52 | inputBinding:
53 | prefix: --spheresz
54 | position: 6
55 | dilation:
56 | type: int?
57 | inputBinding:
58 | prefix: --dialationsz
59 | position: 8
60 | max_cells:
61 | type: int?
62 | inputBinding:
63 | prefix: --maxnumcells
64 | position: 9
65 | num_samp:
66 | type: int?
67 | inputBinding:
68 | prefix: --numsamp
69 | position: 10
70 | num_comp:
71 | type: int?
72 | inputBinding:
73 | prefix: --numcomp
74 | position: 11
75 | erode:
76 | type: int?
77 | inputBinding:
78 | prefix: --erodesz
79 | position: 12
80 | metrics:
81 | type: int?
82 | inputBinding:
83 | prefix: --metrics
84 | position: 13
85 | outputs:
86 | metric_score:
87 | type: File
88 | outputBinding:
89 | glob: $(inputs.output_name)
90 |
--------------------------------------------------------------------------------
/saber/xbrain/tools/unsup_cell_detect_3D_nos3.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | # dockerPull: xbrain:airflow-docker
22 | dockerPull: aplbrain/xbrain:latest
23 | baseCommand: python
24 | arguments: ["/app/unsupervised_celldetect.py","detect3D"]
25 | inputs:
26 | input:
27 | type: File
28 | inputBinding:
29 | position: 1
30 | prefix: -i
31 | output_name:
32 | type: string
33 | inputBinding:
34 | position: 2
35 | prefix: -o
36 | dense_output_name:
37 | type: string
38 | inputBinding:
39 | position: 3
40 | prefix: --denseoutput
41 | threshold:
42 | type: float?
43 | inputBinding:
44 | prefix: --pthreshold
45 | position: 4
46 | stop:
47 | type: float?
48 | inputBinding:
49 | prefix: --presidual
50 | position: 5
51 | initial_template_size:
52 | type: int?
53 | inputBinding:
54 | prefix: --spheresz
55 | position: 6
56 | dilation:
57 | type: int?
58 | inputBinding:
59 | prefix: --dilationsz
60 | position: 7
61 | max_cells:
62 | type: int?
63 | inputBinding:
64 | prefix: --maxnumcells
65 | position: 8
66 | outputs:
67 | cell_detect_results:
68 | type: File
69 | outputBinding:
70 | glob: $(inputs.output_name)
71 | dense_output:
72 | type: File
73 | outputBinding:
74 | glob: $(inputs.dense_output_name)
75 |
--------------------------------------------------------------------------------
/saber/xbrain/tools/unsup_cell_detect_nos3.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | # dockerPull: xbrain:airflow-docker
22 | dockerPull: aplbrain/xbrain:latest
23 | baseCommand: python
24 | arguments: ["/app/unsupervised_celldetect.py","detect"]
25 | inputs:
26 | input:
27 | type: File
28 | inputBinding:
29 | position: 1
30 | prefix: -i
31 | output_name:
32 | type: string
33 | inputBinding:
34 | position: 2
35 | prefix: -o
36 | threshold:
37 | type: float?
38 | inputBinding:
39 | prefix: --pthreshold
40 | position: 4
41 | stop:
42 | type: float?
43 | inputBinding:
44 | prefix: --presidual
45 | position: 5
46 | initial_template_size:
47 | type: int?
48 | inputBinding:
49 | prefix: --spheresz
50 | position: 6
51 | dilation:
52 | type: int?
53 | inputBinding:
54 | prefix: --dilationsz
55 | position: 7
56 | max_cells:
57 | type: int?
58 | inputBinding:
59 | prefix: --maxnumcells
60 | position: 8
61 | outputs:
62 | cell_detect_results:
63 | type: File
64 | outputBinding:
65 | glob: $(inputs.output_name)
66 |
67 |
--------------------------------------------------------------------------------
/saber/xbrain/tools/unsup_membrane_classify_3D_nos3.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | # dockerPull: xbrain:airflow-docker
22 | dockerPull: aplbrain/xbrain:latest
23 | baseCommand: python
24 | arguments: ["/app/unsupervised_celldetect.py","classify3D"]
25 | inputs:
26 | input:
27 | type: File
28 | inputBinding:
29 | position: 1
30 | prefix: -i
31 | output_name:
32 | type: string
33 | inputBinding:
34 | position: 2
35 | prefix: -o
36 | num_samp:
37 | type: int?
38 | inputBinding:
39 | position: 4
40 | prefix: --numsamp
41 | num_comp:
42 | type: int?
43 | inputBinding:
44 | position: 5
45 | prefix: --numcomp
46 | vessel_thres:
47 | type: float?
48 | inputBinding:
49 | position: 6
50 | prefix: --vesselthres
51 | min_size:
52 | type: float?
53 | inputBinding:
54 | position: 7
55 | prefix: --minsize
56 | cell_class:
57 | type: int?
58 | inputBinding:
59 | position: 8
60 | prefix: --cellclass
61 |
62 | outputs:
63 | membrane_probability_map:
64 | type: File
65 | outputBinding:
66 | glob: $(inputs.output_name)
67 |
68 |
--------------------------------------------------------------------------------
/saber/xbrain/tools/unsup_membrane_classify_nos3.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | # dockerPull: xbrain:airflow-docker
22 | dockerPull: aplbrain/xbrain:latest
23 | baseCommand: python
24 | arguments: ["/app/unsupervised_celldetect.py","classify"]
25 | inputs:
26 | input:
27 | type: File
28 | inputBinding:
29 | position: 1
30 | prefix: -i
31 | output_name:
32 | type: string
33 | inputBinding:
34 | position: 2
35 | prefix: -o
36 | num_samp:
37 | type: int?
38 | inputBinding:
39 | position: 4
40 | prefix: --numsamp
41 | num_comp:
42 | type: int?
43 | inputBinding:
44 | position: 5
45 | prefix: --numcomp
46 | erodesz:
47 | type: int?
48 | inputBinding:
49 | position: 5
50 | prefix: --erodesz
51 |
52 | outputs:
53 | membrane_probability_map:
54 | type: File
55 | outputBinding:
56 | glob: $(inputs.output_name)
57 |
--------------------------------------------------------------------------------
/saber/xbrain/tools/unsup_metrics_nos3.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | # dockerPull: xbrain:airflow-docker
22 | dockerPull: aplbrain/xbrain:latest
23 | baseCommand: python
24 | arguments: ["/app/unsupervised_celldetect.py","metrics3D"]
25 | inputs:
26 | input:
27 | type: File
28 | inputBinding:
29 | position: 1
30 | prefix: -i
31 | output_name:
32 | type: string
33 | inputBinding:
34 | position: 2
35 | prefix: -o
36 | ground_truth:
37 | type: File
38 | inputBinding:
39 | position: 3
40 | prefix: --groundtruth
41 | initial_template_size:
42 | type: int
43 | inputBinding:
44 | position: 4
45 | prefix: --spheresz
46 | outputs:
47 | metrics:
48 | type: File
49 | outputBinding:
50 | glob: $(inputs.output_name)
51 |
52 |
--------------------------------------------------------------------------------
/saber/xbrain/tools/vessel_segment_nos3.cwl:
--------------------------------------------------------------------------------
1 | # Copyright 2019 The Johns Hopkins University Applied Physics Laboratory
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | #!/usr/bin/env cwl-runner
16 |
17 | cwlVersion: v1.0
18 | class: CommandLineTool
19 | hints:
20 | DockerRequirement:
21 | # dockerPull: xbrain:airflow-docker
22 | dockerPull: aplbrain/xbrain:latest
23 | baseCommand: process-xbrain.py
24 | arguments: ["segment"]
25 | inputs:
26 | input:
27 | type: File
28 | inputBinding:
29 | position: 1
30 | prefix: -i
31 | output_name:
32 | type: string
33 | inputBinding:
34 | position: 2
35 | prefix: -o
36 | threshold:
37 | type: float?
38 | inputBinding:
39 | prefix: --threshold
40 | position: 4
41 | dilation:
42 | type: int?
43 | inputBinding:
44 | prefix: --dilation
45 | position: 5
46 | minimum:
47 | type: int?
48 | inputBinding:
49 | prefix: --minimum
50 | position: 6
51 | vessel_index:
52 | type: int?
53 | inputBinding:
54 | prefix: --vessel-index
55 | position: 7
56 | outputs:
57 | vessel_segment_results:
58 | type: File
59 | outputBinding:
60 | glob: $(inputs.output_name)
61 |
--------------------------------------------------------------------------------
/saber/xbrain/unets/Dockerfile:
--------------------------------------------------------------------------------
1 | # Copyright 2018 The Johns Hopkins University Applied Physics Laboratory.
2 | # Licensed under the Apache License, Version 2.0 (the "License");
3 | # you may not use this file except in compliance with the License.
4 | # You may obtain a copy of the License at
5 | # http://www.apache.org/licenses/LICENSE-2.0
6 | # Unless required by applicable law or agreed to in writing, software
7 | # distributed under the License is distributed on an "AS IS" BASIS,
8 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
9 | # See the License for the specific language governing permissions and
10 | # limitations under the License.
11 |
12 | FROM kaixhin/cuda-theano:8.0
13 |
14 | # Install dependencies
15 | RUN apt-get update && apt-get install -y \
16 | libhdf5-dev \
17 | python-h5py \
18 | python-yaml \
19 | python3-pip \
20 | vim
21 |
22 | RUN pip install --upgrade pip
23 | # Upgrade six
24 | RUN pip install --upgrade six
25 |
26 | # Clone Keras repo and move into it
27 | #RUN cd /root && git clone https://github.com/fchollet/keras.git && cd keras && \
28 | # # Install
29 | # python setup.py install
30 | RUN pip install --ignore-installed keras
31 | #==1.2.2
32 | #RUN pip install --ignore-installed pygpu
33 | #added
34 |
35 | # Install intern
36 | RUN pip install --ignore-installed intern
37 | #RUN pip install awscli
38 | #RUN pip install boto3
39 | RUN pip install --ignore-installed SimpleITK
40 |
41 | #Default python2 doesn't include enum34
42 | RUN pip install enum34
43 |
44 | #Theano needs numpy ver < 1.16.0 to work
45 | RUN pip install numpy==1.15.4
46 |
47 | # Create workspace
48 | # TODO: Re-org this to use git clone and S3
49 | WORKDIR /src
50 | #COPY ./weights/*.hdf5 /src/weights/
51 | #COPY ./aws-batch/setup/startup.sh /src/
52 | #COPY ./*.json /src/
53 | COPY ./*.py /src/
54 |
55 | ENV KERAS_BACKEND=theano
56 | ENV PATH=/src:$PATH
57 |
58 | ENV THEANO_FLAGS="device=cuda0"
59 | #ENV THEANO_FLAGS='device=cuda,lib.cnmem=1'
60 |
61 | CMD ["python", "train_unet_docker.py"]
62 |
--------------------------------------------------------------------------------
/saber/xbrain/unets/Dockerfile.train:
--------------------------------------------------------------------------------
1 | # Copyright 2018 The Johns Hopkins University Applied Physics Laboratory.
2 | # Licensed under the Apache License, Version 2.0 (the "License");
3 | # you may not use this file except in compliance with the License.
4 | # You may obtain a copy of the License at
5 | # http://www.apache.org/licenses/LICENSE-2.0
6 | # Unless required by applicable law or agreed to in writing, software
7 | # distributed under the License is distributed on an "AS IS" BASIS,
8 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
9 | # See the License for the specific language governing permissions and
10 | # limitations under the License.
11 |
12 | FROM kaixhin/cuda-theano:7.5
13 |
14 | # Install dependencies
15 | RUN apt-get update && apt-get install -y \
16 | libhdf5-dev \
17 | python-h5py \
18 | python-yaml \
19 | vim
20 |
21 | RUN pip install --upgrade pip
22 | # Upgrade six
23 | RUN pip install --upgrade six
24 |
25 | # Clone Keras repo and move into it
26 | #RUN cd /root && git clone https://github.com/fchollet/keras.git && cd keras && \
27 | # # Install
28 | # python setup.py install
29 | RUN pip install --ignore-installed keras
30 | #==1.2.2
31 | #RUN pip install --ignore-installed pygpu
32 | #added
33 |
34 | # Install intern
35 | RUN pip install --ignore-installed intern
36 | #RUN pip install awscli
37 | #RUN pip install boto3
38 | RUN pip install --ignore-installed SimpleITK
39 |
40 | # Create workspace
41 | # TODO: Re-org this to use git clone and S3
42 | WORKDIR /src
43 | #COPY ./weights/*.hdf5 /src/weights/
44 | #COPY ./aws-batch/setup/startup.sh /src/
45 | #COPY ./*.json /src/
46 | COPY ./*.py /src/
47 |
48 | ENV KERAS_BACKEND=theano
49 | ENV PATH=/src:$PATH
50 |
51 | #BLAS FOR THEANO
52 | RUN apt-get install -y libatlas-base-dev
53 | #ENV THEANO_FLAGS=blas.ldflags='-lf77blas -latlas -lgfortran'
54 |
55 | ENV DEVICE="cuda0"
56 | ENV GPUARRAY_CUDA_VERSION=75
57 | ENV THEANO_FLAGS="device=cuda0,blas.ldflags='-lf77blas -latlas -lgfortran',dnn.include_path=/usr/local/cuda/include"
58 | #ENV THEANO_FLAGS='device=cuda,lib.cnmem=1'
59 |
60 | RUN apt-get install -y python3-pip
61 |
62 | CMD ["python", "train_unet_docker.py"]
63 |
--------------------------------------------------------------------------------
/saber/xbrain/unets/deploy_unet_docker.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2018 The Johns Hopkins University Applied Physics Laboratory.
3 | Licensed under the Apache License, Version 2.0 (the "License");
4 | you may not use this file except in compliance with the License.
5 | You may obtain a copy of the License at
6 | http://www.apache.org/licenses/LICENSE-2.0
7 | Unless required by applicable law or agreed to in writing, software
8 | distributed under the License is distributed on an "AS IS" BASIS,
9 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 | See the License for the specific language governing permissions and
11 | limitations under the License.
12 | """
13 |
14 | import os
15 | import sys
16 | import time
17 | import numpy as np
18 | import argparse
19 |
20 | import image_handler as ih
21 |
22 | from cnn_tools import *
23 | from data_tools import *
24 |
25 | K.set_image_data_format('channels_first') #replaces K.set_image_dim_ordering('th')
26 |
27 |
28 | if __name__ == '__main__':
29 | # -------------------------------------------------------------------------
30 |
31 | parser = argparse.ArgumentParser(description='Deploying Unets for Probability Mapping')
32 | parser.set_defaults(func=lambda _: parser.print_help())
33 | parser.add_argument(
34 | '--img_file',
35 | required=True,
36 | help='Local image file'
37 | )
38 | parser.add_argument(
39 | '--lbl_file',
40 | required=False,
41 | help='Groundtruth image file'
42 | )
43 | parser.add_argument(
44 | '--weights_file',
45 | required=True,
46 | help='Weights file to deploy'
47 | )
48 | parser.add_argument(
49 | '--tile_size',
50 | required=False,
51 | type=int,
52 | default=256,
53 | help='Size of image chunks processed by network'
54 | )
55 | parser.add_argument(
56 | '--output',
57 | required=True,
58 | help='Inference output file (npy)'
59 | )
60 |
61 | args = parser.parse_args()
62 | y_data = np.load(args.img_file) # X, Y, Z
63 | y_data = np.transpose(y_data) # Z, Y, X
64 | print('Input data has shape: {}'.format(y_data.shape))
65 | y_data = y_data[:, np.newaxis, :, :].astype(np.float32) #Z, chan, Y, X
66 | y_data /= 255.
67 | tile_size = [args.tile_size, args.tile_size]
68 | model = create_unet((1, tile_size[0], tile_size[1]))
69 | model.load_weights(args.weights_file)
70 | print('Deploying model...')
71 | y_hat = deploy_model(y_data, model)
72 | np.save(args.output, y_hat)
73 |
74 | if args.lbl_file:
75 | y_hat = np.transpose(np.squeeze(y_hat)) # X,Y,Z
76 | y_true = np.load(args.lbl_file) # X,Y,Z
77 | print("Output data has shape: {}".format(y_hat.shape))
78 | print('Groundtruth data has shape: {}'.format(y_true.shape))
79 | f1 = f1_score(y_true, y_hat)
80 | print("F1: {}".format(f1))
--------------------------------------------------------------------------------
/saber/xbrain/unets/deploy_unets.cwl:
--------------------------------------------------------------------------------
1 | cwlVersion: v1.0
2 | class: CommandLineTool
3 | hints:
4 | DockerRequirement:
5 | dockerPull: aplbrain/unets
6 | baseCommand: python
7 | arguments: ["/src/deploy_unet_docker.py"]
8 | inputs:
9 | img_file:
10 | type: File
11 | inputBinding:
12 | position: 1
13 | prefix: --img_file
14 | lbl_file:
15 | type: File?
16 | inputBinding:
17 | position: 2
18 | prefix: --lbl_file
19 | weights_file:
20 | type: File
21 | inputBinding:
22 | prefix: --weights_file
23 | position: 3
24 | tile_size:
25 | type: int?
26 | inputBinding:
27 | prefix: --tile_size
28 | position: 4
29 | output:
30 | type: string
31 | inputBinding:
32 | prefix: --output
33 | position: 5
34 |
35 | outputs:
36 | membrane_probability_map:
37 | type: File
38 | outputBinding:
39 | glob: $(inputs.output)
--------------------------------------------------------------------------------
/saber/xbrain/workflows/parameterization.yml:
--------------------------------------------------------------------------------
1 | x:
2 | range:
3 | start: 0
4 | stop: 10000
5 | step: 10
6 | parameters:
7 | - xmin
8 | - xmax
9 | y:
10 | range:
11 | start: 0
12 | stop: 10000
13 | step: 10
14 | parameters:
15 | - ymin
16 | - ymax
17 | z:
18 | range:
19 | start: 0
20 | stop: 10000
21 | step: 10
22 | parameters:
23 | - zmin
24 | - zmax
25 |
--------------------------------------------------------------------------------
/saber/xbrain/workflows/xbrain-example-job.yml:
--------------------------------------------------------------------------------
1 |
2 |
3 | data:
4 | class: File
5 | path: V2_imgdata_gt.npy
6 | classifier:
7 | class: File
8 | path: xbrain_vessel_seg_v7.ilp
9 | membrane_classify_output_name: membrane_classify_output.npy
10 | cell_detect_output_name: cell_detect_output.npy
11 | vessel_segment_output_name: vessel_segment_output.npy
12 | bucket: saber-batch
13 | # ram_amount: int?
14 | # num_threads: int?
15 | # detect_threshold: float?
16 | # stop: float?
17 | # initial_template_size: int?
18 | # detect_dilation: int?
19 | # max_cells: int?
20 | # segment_threshold: float?
21 | # segment_dilation: int?
22 | # minimum: int?
--------------------------------------------------------------------------------
/saber/xbrain/workflows/xbrain.cwl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env cwl-runner
2 |
3 | cwlVersion: v1.0
4 | class: Workflow
5 | inputs:
6 | # i/o
7 | data: File
8 | membrane_classify_output_name: string
9 | cell_detect_output_name: string
10 | vessel_segment_output_name: string
11 | bucket: string
12 |
13 | # Cell detect
14 | detect_threshold: float?
15 | stop: float?
16 | initial_template_size: int?
17 | detect_dilation: int?
18 | max_cells: int?
19 | classifier: File
20 | # Membrane classify
21 | ram_amount: int?
22 | num_threads: int?
23 | segment_threshold: float?
24 | segment_dilation: int?
25 | minimum: int?
26 | outputs:
27 | membrane_classify_output:
28 | type: File
29 | outputSource: membrane_classify/membrane_probability_map
30 | cell_detect_output:
31 | type: File
32 | outputSource: cell_detect/cell_detect_results
33 | vessel_segment_output:
34 | type: File
35 | outputSource: vessel_segment/vessel_segment_results
36 |
37 | steps:
38 | membrane_classify:
39 | run: ../tools/membrane_classify.cwl
40 | in:
41 | bucket: bucket
42 | input: data
43 | output_name: membrane_classify_output_name
44 | classifier: classifier
45 | ram_amount: ram_amount
46 | num_threads: num_threads
47 | out: [membrane_probability_map]
48 | cell_detect:
49 | run: ../tools/cell_detect.cwl
50 | in:
51 | bucket: bucket
52 | input: membrane_classify/membrane_probability_map
53 | output_name: cell_detect_output_name
54 | threshold: detect_threshold
55 | stop: stop
56 | initial_template_size: initial_template_size
57 | dilation: detect_dilation
58 | max_cells: max_cells
59 | out: [cell_detect_results]
60 | vessel_segment:
61 | run: ../tools/vessel_segment.cwl
62 | in:
63 | bucket: bucket
64 | input: membrane_classify/membrane_probability_map
65 | output_name: vessel_segment_output_name
66 | threshold: segment_threshold
67 | dilation: segment_dilation
68 | minimum: minimum
69 | out: [vessel_segment_results]
70 |
--------------------------------------------------------------------------------
/saber/xbrain/workflows/xbrain_supervised.cwl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env cwl-runner
2 |
3 | cwlVersion: v1.0
4 | class: Workflow
5 | inputs:
6 | data: File
7 | classifier: File
8 | membrane_classify_output_name: string
9 | cell_detect_output_name: string
10 | vessel_segment_output_name: string
11 | ram_amount: int?
12 | num_threads: int?
13 | detect_threshold: float?
14 | stop: float?
15 | initial_template_size: int?
16 | detect_dilation: int?
17 | max_cells: int?
18 | segment_threshold: float?
19 | segment_dilation: int?
20 | minimum: int?
21 | outputs:
22 | membrane_classify_output:
23 | type: File
24 | outputSource: membrane_classify/membrane_probability_map
25 | cell_detect_output:
26 | type: File
27 | outputSource: cell_detect/cell_detect_results
28 | vessel_segment_output:
29 | type: File
30 | outputSource: vessel_segment/vessel_segment_results
31 |
32 | steps:
33 | membrane_classify:
34 | run: ../tools/membrane_classify.cwl
35 | in:
36 | input: data
37 | output_name: membrane_classify_output_name
38 | classifier: classifier
39 | ram_amount: ram_amount
40 | num_threads: num_threads
41 | out: [membrane_probability_map]
42 | cell_detect:
43 | run: ../tools/cell_detect.cwl
44 | in:
45 | input: membrane_classify/membrane_probability_map
46 | output_name: cell_detect_output_name
47 | classifier: classifier
48 | threshold: detect_threshold
49 | stop: stop
50 | initial_template_size: initial_template_size
51 | dilation: detect_dilation
52 | max_cells: max_cells
53 | out: [cell_detect_results]
54 | vessel_segment:
55 | run: ../tools/vessel_segment.cwl
56 | in:
57 | input: membrane_classify/membrane_probability_map
58 | output_name: vessel_segment_output_name
59 | classifier: classifier
60 | threshold: segment_threshold
61 | dilation: segment_dilation
62 | minimum: minimum
63 | out: [vessel_segment_results]
64 |
--------------------------------------------------------------------------------
/saber/xbrain/workflows/xbrain_supervised_optimization.cwl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env cwl-runner
2 |
3 | cwlVersion: v1.0
4 | class: Workflow
5 | inputs:
6 | data: File
7 | classifier: File?
8 | cell_gt: File
9 | optimize_output_name: string
10 | ram_amount: int?
11 | num_threads: int?
12 | detect_threshold: float?
13 | stop: float?
14 | initial_template_size: int?
15 | detect_dilation: int?
16 | max_cells: int?
17 |
18 | outputs:
19 | metrics_output:
20 | type: File
21 | outputSource: optimize/metrics
22 | steps:
23 | optimize:
24 | run: ../tools/optimize_supervised.cwl
25 | in:
26 | input: data
27 | output_name: optimize_output_name
28 | classifier: classifier
29 | cell_gt_input: cell_gt
30 | ram_amount: ram_amount
31 | num_threads: num_threads
32 | threshold: detect_threshold
33 | stop: stop
34 | initial_template_size: initial_template_size
35 | dilation: detect_dilation
36 | max_cells: max_cells
37 | out: [metrics]
38 |
--------------------------------------------------------------------------------
/saber/xbrain/workflows/xbrain_unets_train.cwl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env cwl-runner
2 |
3 | cwlVersion: v1.0
4 | class: Workflow
5 | inputs:
6 | use_boss: int
7 | img_file: File?
8 | lbl_file: File?
9 | coord: string?
10 | token: string?
11 | coll: string?
12 | exp: string?
13 | chan_labels: string?
14 | chan_img: string?
15 | dtype_img: string?
16 | dtype_lbl: string?
17 | res: int?
18 | xmin: int?
19 | xmax: int?
20 | ymin: int?
21 | ymax: int?
22 | zmin: int?
23 | zmax: int?
24 | train_pct: float?
25 | n_epochs: int?
26 | mb_size: int?
27 | n_mb_per_epoch: int?
28 | learning_rate: float?
29 | beta1: float?
30 | beta2: float?
31 | save_freq: int?
32 | do_warp: boolean?
33 | tile_size: int?
34 | weights_file: File?
35 | output: string
36 | score_out: string
37 | outputs:
38 | train_output:
39 | type: File
40 | outputSource: optimize/classifier_weights
41 | steps:
42 | optimize:
43 | run: ../tools/membrane_unets_train.cwl
44 | in:
45 | use_boss: use_boss
46 | img_file: img_file
47 | lbl_file: lbl_file
48 | coord: coord
49 | token: token
50 | coll: coll
51 | exp: exp
52 | chan_labels: chan_labels
53 | chan_img: chan_img
54 | dtype_img: dtype_img
55 | dtype_lbl: dtype_lbl
56 | res: res
57 | xmin: xmin
58 | xmax: xmax
59 | ymin: ymin
60 | ymax: ymax
61 | zmin: zmin
62 | zmax: zmax
63 | train_pct: train_pct
64 | n_epochs: n_epochs
65 | mb_size: mb_size
66 | n_mb_per_epoch: n_mb_per_epoch
67 | learning_rate: learning_rate
68 | beta1: beta1
69 | beta2: beta2
70 | save_freq: save_freq
71 | do_warp: do_warp
72 | tile_size: tile_size
73 | weights_file: weights_file
74 | output: output
75 | score_out: score_out
76 | out: [classifier_weights,scores]
77 | hints:
78 | saber:
79 | score_format: "F1: {score}"
80 | local: True
81 |
--------------------------------------------------------------------------------
/saber/xbrain/workflows/xbrain_unsupervised_optimization.cwl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env cwl-runner
2 |
3 | cwlVersion: v1.0
4 | class: Workflow
5 | inputs:
6 | data: File
7 | cell_gt: File
8 | optimize_output_name: string
9 | detect_threshold: float?
10 | stop: float?
11 | initial_template_size: int?
12 | detect_dilation: int?
13 | max_cells: int?
14 | num_samp: int?
15 | num_comp: int?
16 | erode: int?
17 | metrics: int?
18 |
19 | outputs:
20 | membrane_classify_output:
21 | type: File
22 | outputSource: membrane_classify/membrane_probability_map
23 | cell_detect_output:
24 | type: File
25 | outputSource: cell_detect/cell_detect_results
26 | metrics_output:
27 | type: File
28 | outputSource: optimize/metrics
29 | steps:
30 | membrane_classify:
31 | run: ../tools/unsup_membrane_nos3.cwl
32 | in:
33 | input: data
34 | output_name: optimize_output_name
35 | num_samp: num_samp
36 | num_comp: num_comp
37 | erode: erode
38 | out: [membrane_probability_map]
39 | cell_detect:
40 | run: ../tools/unsup_cell_detect_nos3.cwl
41 | in:
42 | input: membrane_classify/membrane_probability_map
43 | output_name: optimize_output_name
44 | threshold: detect_threshold
45 | stop: stop
46 | initial_template_size: initial_template_size
47 | dilation: detect_dilation
48 | max_cells: max_cells
49 | out: [cell_detect_results]
50 | metrics:
51 | run: ../tools/unsup_metrics_nos3.cwl
52 | in:
53 | input: cell_detect/cell_detect_results
54 | output_name: optimize_output_name
55 | groundtruth: cell_gt
56 | out: [metrics]
57 |
--------------------------------------------------------------------------------
/saber/xbrain/workflows/xbrain_unsupervised_optimization3D.cwl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env cwl-runner
2 |
3 | cwlVersion: v1.0
4 | class: Workflow
5 | inputs:
6 | data: File
7 | cell_gt: File
8 | optimize_output_name: string
9 | detect_threshold: float?
10 | stop: float?
11 | initial_template_size: int?
12 | detect_dilation: int?
13 | max_cells: int?
14 | num_samp: int?
15 | num_comp: int?
16 | erode: int?
17 | metrics: int?
18 |
19 | outputs:
20 | membrane_classify_output:
21 | type: File
22 | outputSource: membrane_classify/membrane_probability_map
23 | cell_detect_output:
24 | type: File
25 | outputSource: cell_detect/cell_detect_results
26 | metrics_output:
27 | type: File
28 | outputSource: optimize/metrics
29 | steps:
30 | membrane_classify:
31 | run: ../tools/unsup_membrane_3D_nos3.cwl
32 | in:
33 | input: data
34 | output_name: optimize_output_name
35 | num_samp: num_samp
36 | num_comp: num_comp
37 | erode: erode
38 | out: [membrane_probability_map]
39 | cell_detect:
40 | run: ../tools/unsup_cell_detect_3D_nos3.cwl
41 | in:
42 | input: membrane_classify/membrane_probability_map
43 | output_name: optimize_output_name
44 | threshold: detect_threshold
45 | stop: stop
46 | initial_template_size: initial_template_size
47 | dilation: detect_dilation
48 | max_cells: max_cells
49 | out: [cell_detect_results]
50 | metrics:
51 | run: ../tools/unsup_metrics_3D_nos3.cwl
52 | in:
53 | input: cell_detect/cell_detect_results
54 | output_name: optimize_output_name
55 | groundtruth: cell_gt
56 | out: [metrics]
57 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | deps = [
5 | "watchdog(==0.9.0)",
6 | "parse(==1.9.0)",
7 | "boto3(==1.9.79)",
8 | "docker(==3.7.0)",
9 | "datajoint(==0.11.3)",
10 | "cwltool(==2.0)",
11 | ]
12 | setup(name='conduit',
13 | version='1.0',
14 | description='Conduit tool for SABER',
15 | author='Raphael Norman-Tenazas',
16 | author_email='raphael.norman-tenazas@jhuapl.edu',
17 | url='https://github.com/aplbrain/saber',
18 | packages=['conduit', 'conduit.utils'],
19 | scripts=['conduit/conduit'],
20 | install_requires=deps,
21 | setup_requires=deps
22 |
23 | )
24 |
--------------------------------------------------------------------------------