├── .dockerignore
├── .gitattributes
├── .gitignore
├── INSTALLATION.md
├── MANIFEST.in
├── README.md
├── analyze_folder.py
├── deepknee-backend-broker
├── .dockerignore
├── package-lock.json
├── package.json
└── server.js
├── deepknee-frontend
├── .dockerignore
├── .gitignore
├── README.md
├── package-lock.json
├── package.json
├── public
│ ├── favicon.png
│ ├── index.html
│ └── manifest.json
└── src
│ ├── App.css
│ ├── App.js
│ ├── App.test.js
│ ├── FileUploader.js
│ ├── Footer.js
│ ├── ProgressCircularBar.js
│ ├── SIOClient.js
│ ├── index.js
│ └── registerServiceWorker.js
├── deploy.sh
├── docker
├── BrokerDockerfile
├── Dockerfile.cpu
├── Dockerfile.gpu
├── UIDockerfile
├── docker-compose-cpu.yml
└── docker-compose-gpu.yml
├── fetch_snapshots.sh
├── ouludeepknee
├── __init__.py
├── data
│ ├── __init__.py
│ └── utils.py
├── inference
│ ├── __init__.py
│ ├── app.py
│ ├── pipeline.py
│ └── utils.py
└── train
│ ├── __init__.py
│ ├── augmentation.py
│ ├── dataset.py
│ ├── model.py
│ ├── train.py
│ ├── train_utils.py
│ └── val_utils.py
├── pacs-integration
├── change_polling.py
└── orthanc.json
├── pics
├── 15_2_R_1_1_1_3_1_0_own.jpg
├── 235_2_R_3_3_0_0_1_1_own.jpg
├── 77_2_R_2_0_0_0_0_1_own.jpg
├── deepknee-architecture.png
└── deepkneeui.png
├── rebuild_docker_images.sh
├── run_deepknee_backend.sh
├── run_deepknee_backend_broker.sh
├── run_deepknee_ui.sh
├── run_kneel.sh
└── setup.py
/.dockerignore:
--------------------------------------------------------------------------------
1 | snapshots_knee_grading/*
2 | *.egg-info
3 | snapshots_release_kneel/*
4 | snapshots_release/*
5 | deepknee-frontend/*
6 | deepknee-backend-broker/*
7 | pacs-integration/*
8 | pics/*
9 | logs/*
10 | .git/
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | *.pth filter=lfs diff=lfs merge=lfs -text
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | deepknee-backend-broker/node_modules/
3 | __pycache__/
4 | *.py[cod]
5 | *$py.class
6 | .DS_Store
7 | *~
8 | # C extensions
9 | *.so
10 | .idea/*
11 | ./snapshots_knee_grading/*
12 | ./snapshots_release_kneel/*
13 | snapshots_knee_grading/
14 | snapshots_release_kneel/
15 | frontend/deepknee-backend-broker/node_modules/
16 |
17 | # Distribution / packaging
18 | .Python
19 | env/
20 | build/
21 | develop-eggs/
22 | dist/
23 | downloads/
24 | eggs/
25 | .eggs/
26 | lib/
27 | lib64/
28 | parts/
29 | sdist/
30 | var/
31 | wheels/
32 | *.egg-info/
33 | .installed.cfg
34 | *.egg
35 |
36 | # PyInstaller
37 | # Usually these files are written by a python script from a template
38 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
39 | *.manifest
40 | *.spec
41 |
42 | # Installer logs
43 | pip-log.txt
44 | pip-delete-this-directory.txt
45 |
46 | # Unit test / coverage reports
47 | htmlcov/
48 | .tox/
49 | .coverage
50 | .coverage.*
51 | .cache
52 | nosetests.xml
53 | coverage.xml
54 | *.cover
55 | .hypothesis/
56 |
57 | # Translations
58 | *.mo
59 | *.pot
60 |
61 | # Django stuff:
62 | *.log
63 | local_settings.py
64 |
65 | # Flask stuff:
66 | instance/
67 | .webassets-cache
68 |
69 | # Scrapy stuff:
70 | .scrapy
71 |
72 | # Sphinx documentation
73 | docs/_build/
74 |
75 | # PyBuilder
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # pyenv
82 | .python-version
83 |
84 | # celery beat schedule file
85 | celerybeat-schedule
86 |
87 | # SageMath parsed files
88 | *.sage.py
89 |
90 | # dotenv
91 | .env
92 |
93 | # virtualenv
94 | .venv
95 | venv/
96 | ENV/
97 |
98 | # Spyder project settings
99 | .spyderproject
100 | .spyproject
101 |
102 | # Rope project settings
103 | .ropeproject
104 |
105 | # mkdocs documentation
106 | /site
107 |
108 | # mypy
109 | .mypy_cache/
110 |
--------------------------------------------------------------------------------
/INSTALLATION.md:
--------------------------------------------------------------------------------
1 | # A detailed installation process of DeepKnee
2 | Tested on a fresh Ubuntu 18.04 LTS
3 |
4 | ## Installing Docker
5 |
6 | 1. Install the pre-requisites for `apt` and get the repository added to your list of repositories.
7 | ```
8 | sudo apt update
9 | sudo apt install -y apt-transport-https ca-certificates curl software-properties-common
10 | curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
11 | sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu bionic stable"
12 | sudo apt update
13 | ```
14 | Please note that if you use Linux Mint (also Ubuntu based), you might need to add the repo this way.
15 | You need to replace the last two lines in the above script.
16 |
17 | ```
18 | echo -e "\ndeb [arch=amd64] https://download.docker.com/linux/ubuntu bionic stable" | sudo tee -a /etc/apt/sources.list
19 | sudo apt update
20 | ```
21 |
22 | 2. Installing docker and checking that it works
23 | ```
24 | sudo apt install -y docker-ce
25 | sudo docker run hello-world
26 | ```
27 |
28 | The lines above will install the docker itself and will also run the test `hellow-world` container.
29 |
30 | 3. Making sure that docker can be executed w/o root:
31 | ```
32 | sudo usermod -aG docker ${USER}
33 | newgrp docker
34 | ```
35 |
36 | 4. Install docker-compose. The easiest path is to use anaconda:
37 | ```
38 | cd
39 | wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh
40 | chmod +x miniconda.sh
41 | ./miniconda.sh
42 | ```
43 |
44 | Note: if you use zsh instead of bash, you need to modify your `.zshrc` in order to proceed by adding the following at the end of the file:
45 |
46 | ```
47 | # >>> conda initialize >>>
48 | # !! Contents within this block are managed by 'conda init' !!
49 | __conda_setup="$('/home/lext/miniconda3/bin/conda' 'shell.bash' 'hook' 2> /dev/null)"
50 | if [ $? -eq 0 ]; then
51 | eval "$__conda_setup"
52 | else
53 | if [ -f "/home/lext/miniconda3/etc/profile.d/conda.sh" ]; then
54 | . "/home/lext/miniconda3/etc/profile.d/conda.sh"
55 | else
56 | export PATH="/home/lext/miniconda3/bin:$PATH"
57 | fi
58 | fi
59 | unset __conda_setup
60 | # <<< conda initialize <<<
61 | ```
62 |
63 | Exit your terminal and open it again. Now you should have a `base` conda environment activated.
64 | Install docker-compose:
65 |
66 | ```
67 | pip install docker-compose
68 | ```
69 | 5. Install DeepKnee (has to be run in the root of this repo):
70 | ```
71 | sh deploy.sh cpu
72 | ```
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.md LICENSE.txt MANIFEST.in
2 |
3 | recursive-include ouludeepknee *.npy *.pth *.pkl *.csv
4 | recursive-include ouludeepknee *.ipynb *.md
5 | include create_conda_env.sh
6 | include run_experiments_*.sh
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # About
2 | The software in this branch implements an automatic pipeline for osteoarthritis severity assessment from plain radiographs.
3 | It can be utilized via REST over HTTP or DICOM protocols. Furthermore, we have built a web UI that is easy to use.
4 |
5 | The core Machine Learning pipeline is backed up by two papers:
6 | 1. Tiulpin, Aleksei, Iaroslav Melekhov, and Simo Saarakkala.
7 | "KNEEL: Knee Anatomical Landmark Localization Using Hourglass Networks."
8 | Proceedings of the IEEE International Conference on Computer Vision Workshops. 2019.
9 | 2. Tiulpin, Aleksei, et al.
10 | "Automatic knee osteoarthritis diagnosis from plain radiographs: A deep learning-based approach."
11 | Scientific reports 8.1 (2018): 1727.
12 |
13 | The technology was also validated on external data in this paper:
14 | - Vaattovaara, E., Panfilov, E., Tiulpin, A., Niinimäki, T., Niinimäki, J., Saarakkala, S., & Nevalainen, M. T. (2025). Kellgren-Lawrence Grading of Knee Osteoarthritis using Deep Learning: Diagnostic Performance with External Dataset and Comparison with Four Readers. Osteoarthritis and Cartilage Open, 100580.
15 |
16 | # Authors
17 | The whole package was built by Aleksei Tiulpin (@lext). Egor Panfilov (@soupault)
18 | has built most of DeepKnee v1: https://github.com/MIPT-Oulu/DeepKnee-web (archived).
19 | The current version of DeepKnee uses only the UI from the previous version.
20 |
21 | # Disclaimer
22 | This software is not a medical device and intended for the use in research settings only.
23 |
24 | ### Running the software
25 | This code requires the fresh-most docker and docker compose installed.
26 | You can follow [INSTALLATION.md](INSTALLATION.md) and this will allow to install the
27 | DeepKnee and the dependencies.
28 |
29 | **!!Be careful, this app carries all the dependencies and weighs around 20GB in total!!**
30 |
31 | Execute `sh deploy.sh cpu` to deploy the app on CPU. If you have installed `nvidia-docker`,
32 | you can also deploy on GPU. The inference is 3 times faster on GPU. To deploy on GPU, run `sh deploy.sh gpu`.
33 | To test the software, just check in your browser `localhost:5003`. Here is an example of the result of processing
34 | a single knee X-ray via UI:
35 |
36 |
37 |
38 |
39 |
40 | A YouTube Video describing the process of using the UI is shown here: https://youtu.be/4wcnmsxvim8.
41 | The video has been recorded for DeepKnee v1, but nothing has really changed since then.
42 |
43 | # Technical documentation
44 | The software is currently composed of six separate loosely-coupled services. Specifically, those are:
45 |
46 | 1. `KNEEL` - Knee joint and landmark localization (https://arxiv.org/abs/1907.12237). REST microservice, port 5000.
47 | 2. `DeepKnee` - Automatic KL grading (this work, https://www.nature.com/articles/s41598-018-20132-7). REST microservice running on port 5001.
48 | 3. `Backend broker` - a NodeJS microservice implementing asynchronous communication between microservices and UI (socket.io). It runs on 5002 port.
49 | 4. `Orthanc PACS` - An embedded PACS that serves as a DICOM layer for clinical workflow integration
50 | 5. `Change polling` - A service that tracks what came to Orthanc and then forwards those data to DeepKnee as well as
51 | to PACS where user wants to store the results. By default, we use an embedded orthanc PACS as a remote PACS. However, this store is not
52 | persistent and will be emptied upon restart. It is highly recommended to specify a persistent remote PACS
53 | 6. `UI` - User Interface implemented in ReactJS. This part runs on 5003, however the port can be changed in docker compose files.
54 |
55 |
56 | The platform is designed so that it is possible to use `KNEEL` and `DeepKnee` separately. Both microservices expect
57 | a `JSON` with `{dicom: }`, where `` is the dicom file encoded in `base64`. If you make a request to either of the services,
58 | it needs to be done to `/kneel/predict/bilateral` or `/deepknee/predict/bilateral` for `KNEEL` and `DeepKnee`, respectively.
59 |
60 | An example script that uses the platform can be found in the file `analyze_folder.py`.
61 |
62 | Below you can see a schematic representation of the software architecture:
63 |
64 |
65 |
66 |
67 |
68 | ## A few words about PACS integration
69 | To deploy this software in your network with persistent PACS, you need to modify docker-compose file which is used
70 | to run DeepKnee. Specifically, you need to change the entry point parameters of `dicom-router` service
71 | modifying `--remote_pacs_addr` and `--remote_pacs_port` parameters. The software creates an exact copy of the X-ray that
72 | came via DICOM, creates new Instance ID and then stores KL grades in `(00040, 0A160)` DICOM field.
73 | DeepKnee does not store neither heatmaps nor softmax outputs in DICOM. The AET for DeepKnee is `DEEPKNEE` :-).
74 |
75 | ## License
76 | This code is freely available only for research purposes. Commercial use is not allowed by any means.
77 | The provided software is not cleared for diagnostic purposes.
78 |
79 | ## How to cite
80 | ```
81 | @article{tiulpin2018automatic,
82 | title={Automatic Knee Osteoarthritis Diagnosis from Plain Radiographs: A Deep Learning-Based Approach},
83 | author={Tiulpin, Aleksei and Thevenot, J{\'e}r{\^o}me and Rahtu, Esa and Lehenkari, Petri and Saarakkala, Simo},
84 | journal={Scientific reports},
85 | volume={8},
86 | number={1},
87 | pages={1727},
88 | year={2018},
89 | publisher={Nature Publishing Group}
90 | }
91 | ```
92 |
--------------------------------------------------------------------------------
/analyze_folder.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import glob
4 | import argparse
5 | import requests
6 | import os
7 | import base64
8 | import cv2
9 | import pandas as pd
10 | import numpy as np
11 | from tqdm import tqdm
12 |
13 |
14 | def base64tonumpy(buffer):
15 | binary = base64.b64decode(buffer)
16 |
17 | img = cv2.imdecode(np.fromstring(binary, dtype=np.uint8), cv2.IMREAD_COLOR)
18 | return img
19 |
20 |
21 | if __name__ == "__main__":
22 | parser = argparse.ArgumentParser()
23 | parser.add_argument('--deepknee_host', default='http://127.0.0.1', help='Host on which deepknee is running.')
24 | parser.add_argument('--deepknee_port', type=int, default=5001, help='Port of deepknee.')
25 | parser.add_argument('--img_dir', default='', help='Directory with images.')
26 | parser.add_argument('--patient_level', type=bool, default=False,
27 | help='Whether there image lies in a patient directory. '
28 | 'In this case, the script will know that the img_dir '
29 | 'contains folders that contain images.')
30 | parser.add_argument('--save_results', default='/tmp/deepknee', help='Folder where to save the results.')
31 | args = parser.parse_args()
32 |
33 | url = f'{args.deepknee_host}:{args.deepknee_port}/deepknee/predict/bilateral'
34 | os.makedirs(args.save_results, exist_ok=True)
35 | output_csv = []
36 | if not args.patient_level:
37 | flist = glob.glob(os.path.join(args.img_dir, '*'))
38 | else:
39 | flist = glob.glob(os.path.join(args.img_dir, '*', '*'))
40 |
41 | for idx, img_path in tqdm(enumerate(flist), total=len(flist)):
42 | # Encoding the DICOM as base64 and sending the request to the server
43 | with open(img_path, 'rb') as f:
44 | data_base64 = base64.b64encode(f.read()).decode('ascii')
45 | response = requests.post(url, json={'dicom': data_base64})
46 | res = response.json()
47 | # Parsing the response
48 | result = {}
49 | for knee in 'LR':
50 | # You can also access the localized image, heatmaps and the probability maps
51 | result[knee] = {'img': base64tonumpy(res[knee]['img']),
52 | 'hm': base64tonumpy(res[knee]['hm']),
53 | 'probs_bar': base64tonumpy(res[knee]['preds_bar']),
54 | 'kl': res[knee]['kl']}
55 | output_csv.append({'File': img_path, 'Side': knee, 'KL': result[knee]['kl']})
56 | cv2.imwrite(os.path.join(args.save_results, f'{idx}_{knee}_img.png'), result[knee]['img'])
57 | cv2.imwrite(os.path.join(args.save_results, f'{idx}_{knee}_hm.png'), result[knee]['hm'])
58 | cv2.imwrite(os.path.join(args.save_results, f'{idx}_{knee}_probs.png'), result[knee]['probs_bar'])
59 |
60 | df = pd.DataFrame(data=output_csv)
61 | df.to_csv(os.path.join(args.save_results, 'deepknee.csv'), index=None)
62 |
--------------------------------------------------------------------------------
/deepknee-backend-broker/.dockerignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | npm-debug.log
--------------------------------------------------------------------------------
/deepknee-backend-broker/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "deepknee-srv",
3 | "version": "1.0.0",
4 | "lockfileVersion": 1,
5 | "requires": true,
6 | "dependencies": {
7 | "accepts": {
8 | "version": "1.3.7",
9 | "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz",
10 | "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==",
11 | "requires": {
12 | "mime-types": "~2.1.24",
13 | "negotiator": "0.6.2"
14 | }
15 | },
16 | "after": {
17 | "version": "0.8.2",
18 | "resolved": "https://registry.npmjs.org/after/-/after-0.8.2.tgz",
19 | "integrity": "sha1-/ts5T58OAqqXaOcCvaI7UF+ufh8="
20 | },
21 | "ajv": {
22 | "version": "6.10.2",
23 | "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz",
24 | "integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==",
25 | "requires": {
26 | "fast-deep-equal": "^2.0.1",
27 | "fast-json-stable-stringify": "^2.0.0",
28 | "json-schema-traverse": "^0.4.1",
29 | "uri-js": "^4.2.2"
30 | }
31 | },
32 | "ansi-regex": {
33 | "version": "2.1.1",
34 | "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
35 | "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8="
36 | },
37 | "ansi-styles": {
38 | "version": "2.2.1",
39 | "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz",
40 | "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4="
41 | },
42 | "array-find-index": {
43 | "version": "1.0.2",
44 | "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz",
45 | "integrity": "sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E="
46 | },
47 | "array-flatten": {
48 | "version": "1.1.1",
49 | "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
50 | "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI="
51 | },
52 | "arraybuffer.slice": {
53 | "version": "0.0.7",
54 | "resolved": "https://registry.npmjs.org/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz",
55 | "integrity": "sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog=="
56 | },
57 | "asn1": {
58 | "version": "0.2.4",
59 | "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
60 | "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
61 | "requires": {
62 | "safer-buffer": "~2.1.0"
63 | }
64 | },
65 | "assert-plus": {
66 | "version": "1.0.0",
67 | "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
68 | "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU="
69 | },
70 | "async-limiter": {
71 | "version": "1.0.1",
72 | "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.1.tgz",
73 | "integrity": "sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ=="
74 | },
75 | "async-request": {
76 | "version": "1.2.0",
77 | "resolved": "https://registry.npmjs.org/async-request/-/async-request-1.2.0.tgz",
78 | "integrity": "sha1-tAevi5ctApKvZZ8ZAJfxId+mrJo=",
79 | "requires": {
80 | "lodash": "^3.5.0",
81 | "request": "^2.53.0",
82 | "tough-cookie": "^0.12.1"
83 | },
84 | "dependencies": {
85 | "lodash": {
86 | "version": "3.10.1",
87 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz",
88 | "integrity": "sha1-W/Rejkm6QYnhfUgnid/RW9FAt7Y="
89 | }
90 | }
91 | },
92 | "asynckit": {
93 | "version": "0.4.0",
94 | "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
95 | "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
96 | },
97 | "aws-sign2": {
98 | "version": "0.7.0",
99 | "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
100 | "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg="
101 | },
102 | "aws4": {
103 | "version": "1.9.0",
104 | "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.9.0.tgz",
105 | "integrity": "sha512-Uvq6hVe90D0B2WEnUqtdgY1bATGz3mw33nH9Y+dmA+w5DHvUmBgkr5rM/KCHpCsiFNRUfokW/szpPPgMK2hm4A=="
106 | },
107 | "axios": {
108 | "version": "0.19.0",
109 | "resolved": "https://registry.npmjs.org/axios/-/axios-0.19.0.tgz",
110 | "integrity": "sha512-1uvKqKQta3KBxIz14F2v06AEHZ/dIoeKfbTRkK1E5oqjDnuEerLmYTgJB5AiQZHJcljpg1TuRzdjDR06qNk0DQ==",
111 | "requires": {
112 | "follow-redirects": "1.5.10",
113 | "is-buffer": "^2.0.2"
114 | }
115 | },
116 | "backo2": {
117 | "version": "1.0.2",
118 | "resolved": "https://registry.npmjs.org/backo2/-/backo2-1.0.2.tgz",
119 | "integrity": "sha1-MasayLEpNjRj41s+u2n038+6eUc="
120 | },
121 | "base64-arraybuffer": {
122 | "version": "0.1.5",
123 | "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz",
124 | "integrity": "sha1-c5JncZI7Whl0etZmqlzUv5xunOg="
125 | },
126 | "base64id": {
127 | "version": "2.0.0",
128 | "resolved": "https://registry.npmjs.org/base64id/-/base64id-2.0.0.tgz",
129 | "integrity": "sha512-lGe34o6EHj9y3Kts9R4ZYs/Gr+6N7MCaMlIFA3F1R2O5/m7K06AxfSeO5530PEERE6/WyEg3lsuyw4GHlPZHog=="
130 | },
131 | "bcrypt-pbkdf": {
132 | "version": "1.0.2",
133 | "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
134 | "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=",
135 | "requires": {
136 | "tweetnacl": "^0.14.3"
137 | }
138 | },
139 | "better-assert": {
140 | "version": "1.0.2",
141 | "resolved": "https://registry.npmjs.org/better-assert/-/better-assert-1.0.2.tgz",
142 | "integrity": "sha1-QIZrnhueC1W0gYlDEeaPr/rrxSI=",
143 | "requires": {
144 | "callsite": "1.0.0"
145 | }
146 | },
147 | "blob": {
148 | "version": "0.0.5",
149 | "resolved": "https://registry.npmjs.org/blob/-/blob-0.0.5.tgz",
150 | "integrity": "sha512-gaqbzQPqOoamawKg0LGVd7SzLgXS+JH61oWprSLH+P+abTczqJbhTR8CmJ2u9/bUYNmHTGJx/UEmn6doAvvuig=="
151 | },
152 | "body-parser": {
153 | "version": "1.19.0",
154 | "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz",
155 | "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==",
156 | "requires": {
157 | "bytes": "3.1.0",
158 | "content-type": "~1.0.4",
159 | "debug": "2.6.9",
160 | "depd": "~1.1.2",
161 | "http-errors": "1.7.2",
162 | "iconv-lite": "0.4.24",
163 | "on-finished": "~2.3.0",
164 | "qs": "6.7.0",
165 | "raw-body": "2.4.0",
166 | "type-is": "~1.6.17"
167 | }
168 | },
169 | "bytes": {
170 | "version": "3.1.0",
171 | "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz",
172 | "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg=="
173 | },
174 | "callsite": {
175 | "version": "1.0.0",
176 | "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz",
177 | "integrity": "sha1-KAOY5dZkvXQDi28JBRU+borxvCA="
178 | },
179 | "camelcase": {
180 | "version": "2.1.1",
181 | "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-2.1.1.tgz",
182 | "integrity": "sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8="
183 | },
184 | "camelcase-keys": {
185 | "version": "2.1.0",
186 | "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-2.1.0.tgz",
187 | "integrity": "sha1-MIvur/3ygRkFHvodkyITyRuPkuc=",
188 | "requires": {
189 | "camelcase": "^2.0.0",
190 | "map-obj": "^1.0.0"
191 | }
192 | },
193 | "caseless": {
194 | "version": "0.12.0",
195 | "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
196 | "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw="
197 | },
198 | "chalk": {
199 | "version": "1.1.3",
200 | "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz",
201 | "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=",
202 | "requires": {
203 | "ansi-styles": "^2.2.1",
204 | "escape-string-regexp": "^1.0.2",
205 | "has-ansi": "^2.0.0",
206 | "strip-ansi": "^3.0.0",
207 | "supports-color": "^2.0.0"
208 | }
209 | },
210 | "combined-stream": {
211 | "version": "1.0.8",
212 | "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
213 | "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
214 | "requires": {
215 | "delayed-stream": "~1.0.0"
216 | }
217 | },
218 | "component-bind": {
219 | "version": "1.0.0",
220 | "resolved": "https://registry.npmjs.org/component-bind/-/component-bind-1.0.0.tgz",
221 | "integrity": "sha1-AMYIq33Nk4l8AAllGx06jh5zu9E="
222 | },
223 | "component-emitter": {
224 | "version": "1.2.1",
225 | "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz",
226 | "integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY="
227 | },
228 | "component-inherit": {
229 | "version": "0.0.3",
230 | "resolved": "https://registry.npmjs.org/component-inherit/-/component-inherit-0.0.3.tgz",
231 | "integrity": "sha1-ZF/ErfWLcrZJ1crmUTVhnbJv8UM="
232 | },
233 | "console-stamp": {
234 | "version": "0.2.9",
235 | "resolved": "https://registry.npmjs.org/console-stamp/-/console-stamp-0.2.9.tgz",
236 | "integrity": "sha512-jtgd1Fx3Im+pWN54mF269ptunkzF5Lpct2LBTbtyNoK2A4XjcxLM+TQW+e+XE/bLwLQNGRqPqlxm9JMixFntRA==",
237 | "requires": {
238 | "chalk": "^1.1.1",
239 | "dateformat": "^1.0.11",
240 | "merge": "^1.2.0"
241 | }
242 | },
243 | "content-disposition": {
244 | "version": "0.5.3",
245 | "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz",
246 | "integrity": "sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==",
247 | "requires": {
248 | "safe-buffer": "5.1.2"
249 | }
250 | },
251 | "content-type": {
252 | "version": "1.0.4",
253 | "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz",
254 | "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA=="
255 | },
256 | "cookie": {
257 | "version": "0.4.0",
258 | "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz",
259 | "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg=="
260 | },
261 | "cookie-signature": {
262 | "version": "1.0.6",
263 | "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
264 | "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw="
265 | },
266 | "core-util-is": {
267 | "version": "1.0.2",
268 | "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
269 | "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac="
270 | },
271 | "currently-unhandled": {
272 | "version": "0.4.1",
273 | "resolved": "https://registry.npmjs.org/currently-unhandled/-/currently-unhandled-0.4.1.tgz",
274 | "integrity": "sha1-mI3zP+qxke95mmE2nddsF635V+o=",
275 | "requires": {
276 | "array-find-index": "^1.0.1"
277 | }
278 | },
279 | "dashdash": {
280 | "version": "1.14.1",
281 | "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
282 | "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=",
283 | "requires": {
284 | "assert-plus": "^1.0.0"
285 | }
286 | },
287 | "dateformat": {
288 | "version": "1.0.12",
289 | "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-1.0.12.tgz",
290 | "integrity": "sha1-nxJLZ1lMk3/3BpMuSmQsyo27/uk=",
291 | "requires": {
292 | "get-stdin": "^4.0.1",
293 | "meow": "^3.3.0"
294 | }
295 | },
296 | "debug": {
297 | "version": "2.6.9",
298 | "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
299 | "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
300 | "requires": {
301 | "ms": "2.0.0"
302 | }
303 | },
304 | "decamelize": {
305 | "version": "1.2.0",
306 | "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
307 | "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA="
308 | },
309 | "delayed-stream": {
310 | "version": "1.0.0",
311 | "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
312 | "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk="
313 | },
314 | "depd": {
315 | "version": "1.1.2",
316 | "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
317 | "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak="
318 | },
319 | "destroy": {
320 | "version": "1.0.4",
321 | "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz",
322 | "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA="
323 | },
324 | "ecc-jsbn": {
325 | "version": "0.1.2",
326 | "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
327 | "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=",
328 | "requires": {
329 | "jsbn": "~0.1.0",
330 | "safer-buffer": "^2.1.0"
331 | }
332 | },
333 | "ee-first": {
334 | "version": "1.1.1",
335 | "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
336 | "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0="
337 | },
338 | "encodeurl": {
339 | "version": "1.0.2",
340 | "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
341 | "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k="
342 | },
343 | "engine.io": {
344 | "version": "3.4.0",
345 | "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-3.4.0.tgz",
346 | "integrity": "sha512-XCyYVWzcHnK5cMz7G4VTu2W7zJS7SM1QkcelghyIk/FmobWBtXE7fwhBusEKvCSqc3bMh8fNFMlUkCKTFRxH2w==",
347 | "requires": {
348 | "accepts": "~1.3.4",
349 | "base64id": "2.0.0",
350 | "cookie": "0.3.1",
351 | "debug": "~4.1.0",
352 | "engine.io-parser": "~2.2.0",
353 | "ws": "^7.1.2"
354 | },
355 | "dependencies": {
356 | "cookie": {
357 | "version": "0.3.1",
358 | "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz",
359 | "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s="
360 | },
361 | "debug": {
362 | "version": "4.1.1",
363 | "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
364 | "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==",
365 | "requires": {
366 | "ms": "^2.1.1"
367 | }
368 | },
369 | "ms": {
370 | "version": "2.1.2",
371 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
372 | "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
373 | }
374 | }
375 | },
376 | "engine.io-client": {
377 | "version": "3.4.0",
378 | "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-3.4.0.tgz",
379 | "integrity": "sha512-a4J5QO2k99CM2a0b12IznnyQndoEvtA4UAldhGzKqnHf42I3Qs2W5SPnDvatZRcMaNZs4IevVicBPayxYt6FwA==",
380 | "requires": {
381 | "component-emitter": "1.2.1",
382 | "component-inherit": "0.0.3",
383 | "debug": "~4.1.0",
384 | "engine.io-parser": "~2.2.0",
385 | "has-cors": "1.1.0",
386 | "indexof": "0.0.1",
387 | "parseqs": "0.0.5",
388 | "parseuri": "0.0.5",
389 | "ws": "~6.1.0",
390 | "xmlhttprequest-ssl": "~1.5.4",
391 | "yeast": "0.1.2"
392 | },
393 | "dependencies": {
394 | "debug": {
395 | "version": "4.1.1",
396 | "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
397 | "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==",
398 | "requires": {
399 | "ms": "^2.1.1"
400 | }
401 | },
402 | "ms": {
403 | "version": "2.1.2",
404 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
405 | "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
406 | },
407 | "ws": {
408 | "version": "6.1.4",
409 | "resolved": "https://registry.npmjs.org/ws/-/ws-6.1.4.tgz",
410 | "integrity": "sha512-eqZfL+NE/YQc1/ZynhojeV8q+H050oR8AZ2uIev7RU10svA9ZnJUddHcOUZTJLinZ9yEfdA2kSATS2qZK5fhJA==",
411 | "requires": {
412 | "async-limiter": "~1.0.0"
413 | }
414 | }
415 | }
416 | },
417 | "engine.io-parser": {
418 | "version": "2.2.0",
419 | "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-2.2.0.tgz",
420 | "integrity": "sha512-6I3qD9iUxotsC5HEMuuGsKA0cXerGz+4uGcXQEkfBidgKf0amsjrrtwcbwK/nzpZBxclXlV7gGl9dgWvu4LF6w==",
421 | "requires": {
422 | "after": "0.8.2",
423 | "arraybuffer.slice": "~0.0.7",
424 | "base64-arraybuffer": "0.1.5",
425 | "blob": "0.0.5",
426 | "has-binary2": "~1.0.2"
427 | }
428 | },
429 | "error-ex": {
430 | "version": "1.3.2",
431 | "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz",
432 | "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==",
433 | "requires": {
434 | "is-arrayish": "^0.2.1"
435 | }
436 | },
437 | "escape-html": {
438 | "version": "1.0.3",
439 | "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
440 | "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg="
441 | },
442 | "escape-string-regexp": {
443 | "version": "1.0.5",
444 | "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
445 | "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ="
446 | },
447 | "etag": {
448 | "version": "1.8.1",
449 | "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
450 | "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc="
451 | },
452 | "express": {
453 | "version": "4.17.1",
454 | "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz",
455 | "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==",
456 | "requires": {
457 | "accepts": "~1.3.7",
458 | "array-flatten": "1.1.1",
459 | "body-parser": "1.19.0",
460 | "content-disposition": "0.5.3",
461 | "content-type": "~1.0.4",
462 | "cookie": "0.4.0",
463 | "cookie-signature": "1.0.6",
464 | "debug": "2.6.9",
465 | "depd": "~1.1.2",
466 | "encodeurl": "~1.0.2",
467 | "escape-html": "~1.0.3",
468 | "etag": "~1.8.1",
469 | "finalhandler": "~1.1.2",
470 | "fresh": "0.5.2",
471 | "merge-descriptors": "1.0.1",
472 | "methods": "~1.1.2",
473 | "on-finished": "~2.3.0",
474 | "parseurl": "~1.3.3",
475 | "path-to-regexp": "0.1.7",
476 | "proxy-addr": "~2.0.5",
477 | "qs": "6.7.0",
478 | "range-parser": "~1.2.1",
479 | "safe-buffer": "5.1.2",
480 | "send": "0.17.1",
481 | "serve-static": "1.14.1",
482 | "setprototypeof": "1.1.1",
483 | "statuses": "~1.5.0",
484 | "type-is": "~1.6.18",
485 | "utils-merge": "1.0.1",
486 | "vary": "~1.1.2"
487 | }
488 | },
489 | "extend": {
490 | "version": "3.0.2",
491 | "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
492 | "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
493 | },
494 | "extsprintf": {
495 | "version": "1.3.0",
496 | "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
497 | "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU="
498 | },
499 | "fast-deep-equal": {
500 | "version": "2.0.1",
501 | "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz",
502 | "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk="
503 | },
504 | "fast-json-stable-stringify": {
505 | "version": "2.0.0",
506 | "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz",
507 | "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I="
508 | },
509 | "finalhandler": {
510 | "version": "1.1.2",
511 | "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz",
512 | "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==",
513 | "requires": {
514 | "debug": "2.6.9",
515 | "encodeurl": "~1.0.2",
516 | "escape-html": "~1.0.3",
517 | "on-finished": "~2.3.0",
518 | "parseurl": "~1.3.3",
519 | "statuses": "~1.5.0",
520 | "unpipe": "~1.0.0"
521 | }
522 | },
523 | "find-up": {
524 | "version": "1.1.2",
525 | "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz",
526 | "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=",
527 | "requires": {
528 | "path-exists": "^2.0.0",
529 | "pinkie-promise": "^2.0.0"
530 | }
531 | },
532 | "follow-redirects": {
533 | "version": "1.5.10",
534 | "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz",
535 | "integrity": "sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ==",
536 | "requires": {
537 | "debug": "=3.1.0"
538 | },
539 | "dependencies": {
540 | "debug": {
541 | "version": "3.1.0",
542 | "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz",
543 | "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==",
544 | "requires": {
545 | "ms": "2.0.0"
546 | }
547 | }
548 | }
549 | },
550 | "forever-agent": {
551 | "version": "0.6.1",
552 | "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
553 | "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE="
554 | },
555 | "form-data": {
556 | "version": "3.0.0",
557 | "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.0.tgz",
558 | "integrity": "sha512-CKMFDglpbMi6PyN+brwB9Q/GOw0eAnsrEZDgcsH5Krhz5Od/haKHAX0NmQfha2zPPz0JpWzA7GJHGSnvCRLWsg==",
559 | "requires": {
560 | "asynckit": "^0.4.0",
561 | "combined-stream": "^1.0.8",
562 | "mime-types": "^2.1.12"
563 | }
564 | },
565 | "forwarded": {
566 | "version": "0.1.2",
567 | "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz",
568 | "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ="
569 | },
570 | "fresh": {
571 | "version": "0.5.2",
572 | "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
573 | "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac="
574 | },
575 | "get-stdin": {
576 | "version": "4.0.1",
577 | "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz",
578 | "integrity": "sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4="
579 | },
580 | "getpass": {
581 | "version": "0.1.7",
582 | "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
583 | "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=",
584 | "requires": {
585 | "assert-plus": "^1.0.0"
586 | }
587 | },
588 | "graceful-fs": {
589 | "version": "4.2.3",
590 | "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz",
591 | "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ=="
592 | },
593 | "har-schema": {
594 | "version": "2.0.0",
595 | "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
596 | "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI="
597 | },
598 | "har-validator": {
599 | "version": "5.1.3",
600 | "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz",
601 | "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==",
602 | "requires": {
603 | "ajv": "^6.5.5",
604 | "har-schema": "^2.0.0"
605 | }
606 | },
607 | "has-ansi": {
608 | "version": "2.0.0",
609 | "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz",
610 | "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=",
611 | "requires": {
612 | "ansi-regex": "^2.0.0"
613 | }
614 | },
615 | "has-binary2": {
616 | "version": "1.0.3",
617 | "resolved": "https://registry.npmjs.org/has-binary2/-/has-binary2-1.0.3.tgz",
618 | "integrity": "sha512-G1LWKhDSvhGeAQ8mPVQlqNcOB2sJdwATtZKl2pDKKHfpf/rYj24lkinxf69blJbnsvtqqNU+L3SL50vzZhXOnw==",
619 | "requires": {
620 | "isarray": "2.0.1"
621 | }
622 | },
623 | "has-cors": {
624 | "version": "1.1.0",
625 | "resolved": "https://registry.npmjs.org/has-cors/-/has-cors-1.1.0.tgz",
626 | "integrity": "sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk="
627 | },
628 | "hosted-git-info": {
629 | "version": "2.8.5",
630 | "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.5.tgz",
631 | "integrity": "sha512-kssjab8CvdXfcXMXVcvsXum4Hwdq9XGtRD3TteMEvEbq0LXyiNQr6AprqKqfeaDXze7SxWvRxdpwE6ku7ikLkg=="
632 | },
633 | "http-errors": {
634 | "version": "1.7.2",
635 | "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz",
636 | "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==",
637 | "requires": {
638 | "depd": "~1.1.2",
639 | "inherits": "2.0.3",
640 | "setprototypeof": "1.1.1",
641 | "statuses": ">= 1.5.0 < 2",
642 | "toidentifier": "1.0.0"
643 | }
644 | },
645 | "http-signature": {
646 | "version": "1.2.0",
647 | "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
648 | "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=",
649 | "requires": {
650 | "assert-plus": "^1.0.0",
651 | "jsprim": "^1.2.2",
652 | "sshpk": "^1.7.0"
653 | }
654 | },
655 | "iconv-lite": {
656 | "version": "0.4.24",
657 | "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
658 | "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==",
659 | "requires": {
660 | "safer-buffer": ">= 2.1.2 < 3"
661 | }
662 | },
663 | "indent-string": {
664 | "version": "2.1.0",
665 | "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-2.1.0.tgz",
666 | "integrity": "sha1-ji1INIdCEhtKghi3oTfppSBJ3IA=",
667 | "requires": {
668 | "repeating": "^2.0.0"
669 | }
670 | },
671 | "indexof": {
672 | "version": "0.0.1",
673 | "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz",
674 | "integrity": "sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10="
675 | },
676 | "inherits": {
677 | "version": "2.0.3",
678 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
679 | "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
680 | },
681 | "ipaddr.js": {
682 | "version": "1.9.0",
683 | "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.0.tgz",
684 | "integrity": "sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA=="
685 | },
686 | "is-arrayish": {
687 | "version": "0.2.1",
688 | "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
689 | "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0="
690 | },
691 | "is-buffer": {
692 | "version": "2.0.4",
693 | "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz",
694 | "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A=="
695 | },
696 | "is-finite": {
697 | "version": "1.0.2",
698 | "resolved": "https://registry.npmjs.org/is-finite/-/is-finite-1.0.2.tgz",
699 | "integrity": "sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko=",
700 | "requires": {
701 | "number-is-nan": "^1.0.0"
702 | }
703 | },
704 | "is-typedarray": {
705 | "version": "1.0.0",
706 | "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
707 | "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo="
708 | },
709 | "is-utf8": {
710 | "version": "0.2.1",
711 | "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz",
712 | "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI="
713 | },
714 | "isarray": {
715 | "version": "2.0.1",
716 | "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.1.tgz",
717 | "integrity": "sha1-o32U7ZzaLVmGXJ92/llu4fM4dB4="
718 | },
719 | "isstream": {
720 | "version": "0.1.2",
721 | "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
722 | "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo="
723 | },
724 | "jsbn": {
725 | "version": "0.1.1",
726 | "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
727 | "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM="
728 | },
729 | "json-schema": {
730 | "version": "0.2.3",
731 | "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
732 | "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM="
733 | },
734 | "json-schema-traverse": {
735 | "version": "0.4.1",
736 | "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
737 | "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
738 | },
739 | "json-stringify-safe": {
740 | "version": "5.0.1",
741 | "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
742 | "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus="
743 | },
744 | "jsprim": {
745 | "version": "1.4.1",
746 | "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz",
747 | "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=",
748 | "requires": {
749 | "assert-plus": "1.0.0",
750 | "extsprintf": "1.3.0",
751 | "json-schema": "0.2.3",
752 | "verror": "1.10.0"
753 | }
754 | },
755 | "load-json-file": {
756 | "version": "1.1.0",
757 | "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz",
758 | "integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=",
759 | "requires": {
760 | "graceful-fs": "^4.1.2",
761 | "parse-json": "^2.2.0",
762 | "pify": "^2.0.0",
763 | "pinkie-promise": "^2.0.0",
764 | "strip-bom": "^2.0.0"
765 | }
766 | },
767 | "lodash": {
768 | "version": "4.17.15",
769 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz",
770 | "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A=="
771 | },
772 | "loud-rejection": {
773 | "version": "1.6.0",
774 | "resolved": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-1.6.0.tgz",
775 | "integrity": "sha1-W0b4AUft7leIcPCG0Eghz5mOVR8=",
776 | "requires": {
777 | "currently-unhandled": "^0.4.1",
778 | "signal-exit": "^3.0.0"
779 | }
780 | },
781 | "map-obj": {
782 | "version": "1.0.1",
783 | "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz",
784 | "integrity": "sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0="
785 | },
786 | "media-typer": {
787 | "version": "0.3.0",
788 | "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
789 | "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g="
790 | },
791 | "meow": {
792 | "version": "3.7.0",
793 | "resolved": "https://registry.npmjs.org/meow/-/meow-3.7.0.tgz",
794 | "integrity": "sha1-cstmi0JSKCkKu/qFaJJYcwioAfs=",
795 | "requires": {
796 | "camelcase-keys": "^2.0.0",
797 | "decamelize": "^1.1.2",
798 | "loud-rejection": "^1.0.0",
799 | "map-obj": "^1.0.1",
800 | "minimist": "^1.1.3",
801 | "normalize-package-data": "^2.3.4",
802 | "object-assign": "^4.0.1",
803 | "read-pkg-up": "^1.0.1",
804 | "redent": "^1.0.0",
805 | "trim-newlines": "^1.0.0"
806 | }
807 | },
808 | "merge": {
809 | "version": "1.2.1",
810 | "resolved": "https://registry.npmjs.org/merge/-/merge-1.2.1.tgz",
811 | "integrity": "sha512-VjFo4P5Whtj4vsLzsYBu5ayHhoHJ0UqNm7ibvShmbmoz7tGi0vXaoJbGdB+GmDMLUdg8DpQXEIeVDAe8MaABvQ=="
812 | },
813 | "merge-descriptors": {
814 | "version": "1.0.1",
815 | "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
816 | "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E="
817 | },
818 | "methods": {
819 | "version": "1.1.2",
820 | "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
821 | "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4="
822 | },
823 | "mime": {
824 | "version": "1.6.0",
825 | "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
826 | "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="
827 | },
828 | "mime-db": {
829 | "version": "1.42.0",
830 | "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.42.0.tgz",
831 | "integrity": "sha512-UbfJCR4UAVRNgMpfImz05smAXK7+c+ZntjaA26ANtkXLlOe947Aag5zdIcKQULAiF9Cq4WxBi9jUs5zkA84bYQ=="
832 | },
833 | "mime-types": {
834 | "version": "2.1.25",
835 | "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.25.tgz",
836 | "integrity": "sha512-5KhStqB5xpTAeGqKBAMgwaYMnQik7teQN4IAzC7npDv6kzeU6prfkR67bc87J1kWMPGkoaZSq1npmexMgkmEVg==",
837 | "requires": {
838 | "mime-db": "1.42.0"
839 | }
840 | },
841 | "minimist": {
842 | "version": "1.2.0",
843 | "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
844 | "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ="
845 | },
846 | "ms": {
847 | "version": "2.0.0",
848 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
849 | "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
850 | },
851 | "negotiator": {
852 | "version": "0.6.2",
853 | "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz",
854 | "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw=="
855 | },
856 | "normalize-package-data": {
857 | "version": "2.5.0",
858 | "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz",
859 | "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==",
860 | "requires": {
861 | "hosted-git-info": "^2.1.4",
862 | "resolve": "^1.10.0",
863 | "semver": "2 || 3 || 4 || 5",
864 | "validate-npm-package-license": "^3.0.1"
865 | }
866 | },
867 | "number-is-nan": {
868 | "version": "1.0.1",
869 | "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz",
870 | "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0="
871 | },
872 | "oauth-sign": {
873 | "version": "0.9.0",
874 | "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
875 | "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ=="
876 | },
877 | "object-assign": {
878 | "version": "4.1.1",
879 | "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
880 | "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM="
881 | },
882 | "object-component": {
883 | "version": "0.0.3",
884 | "resolved": "https://registry.npmjs.org/object-component/-/object-component-0.0.3.tgz",
885 | "integrity": "sha1-8MaapQ78lbhmwYb0AKM3acsvEpE="
886 | },
887 | "on-finished": {
888 | "version": "2.3.0",
889 | "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz",
890 | "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=",
891 | "requires": {
892 | "ee-first": "1.1.1"
893 | }
894 | },
895 | "parse-json": {
896 | "version": "2.2.0",
897 | "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz",
898 | "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=",
899 | "requires": {
900 | "error-ex": "^1.2.0"
901 | }
902 | },
903 | "parseqs": {
904 | "version": "0.0.5",
905 | "resolved": "https://registry.npmjs.org/parseqs/-/parseqs-0.0.5.tgz",
906 | "integrity": "sha1-1SCKNzjkZ2bikbouoXNoSSGouJ0=",
907 | "requires": {
908 | "better-assert": "~1.0.0"
909 | }
910 | },
911 | "parseuri": {
912 | "version": "0.0.5",
913 | "resolved": "https://registry.npmjs.org/parseuri/-/parseuri-0.0.5.tgz",
914 | "integrity": "sha1-gCBKUNTbt3m/3G6+J3jZDkvOMgo=",
915 | "requires": {
916 | "better-assert": "~1.0.0"
917 | }
918 | },
919 | "parseurl": {
920 | "version": "1.3.3",
921 | "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
922 | "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="
923 | },
924 | "path-exists": {
925 | "version": "2.1.0",
926 | "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz",
927 | "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=",
928 | "requires": {
929 | "pinkie-promise": "^2.0.0"
930 | }
931 | },
932 | "path-parse": {
933 | "version": "1.0.6",
934 | "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz",
935 | "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw=="
936 | },
937 | "path-to-regexp": {
938 | "version": "0.1.7",
939 | "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
940 | "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w="
941 | },
942 | "path-type": {
943 | "version": "1.1.0",
944 | "resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz",
945 | "integrity": "sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE=",
946 | "requires": {
947 | "graceful-fs": "^4.1.2",
948 | "pify": "^2.0.0",
949 | "pinkie-promise": "^2.0.0"
950 | }
951 | },
952 | "performance-now": {
953 | "version": "2.1.0",
954 | "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
955 | "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns="
956 | },
957 | "pify": {
958 | "version": "2.3.0",
959 | "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz",
960 | "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw="
961 | },
962 | "pinkie": {
963 | "version": "2.0.4",
964 | "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz",
965 | "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA="
966 | },
967 | "pinkie-promise": {
968 | "version": "2.0.1",
969 | "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz",
970 | "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=",
971 | "requires": {
972 | "pinkie": "^2.0.0"
973 | }
974 | },
975 | "proxy-addr": {
976 | "version": "2.0.5",
977 | "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.5.tgz",
978 | "integrity": "sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ==",
979 | "requires": {
980 | "forwarded": "~0.1.2",
981 | "ipaddr.js": "1.9.0"
982 | }
983 | },
984 | "psl": {
985 | "version": "1.6.0",
986 | "resolved": "https://registry.npmjs.org/psl/-/psl-1.6.0.tgz",
987 | "integrity": "sha512-SYKKmVel98NCOYXpkwUqZqh0ahZeeKfmisiLIcEZdsb+WbLv02g/dI5BUmZnIyOe7RzZtLax81nnb2HbvC2tzA=="
988 | },
989 | "punycode": {
990 | "version": "2.1.1",
991 | "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
992 | "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
993 | },
994 | "qs": {
995 | "version": "6.7.0",
996 | "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz",
997 | "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ=="
998 | },
999 | "range-parser": {
1000 | "version": "1.2.1",
1001 | "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz",
1002 | "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="
1003 | },
1004 | "raw-body": {
1005 | "version": "2.4.0",
1006 | "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz",
1007 | "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==",
1008 | "requires": {
1009 | "bytes": "3.1.0",
1010 | "http-errors": "1.7.2",
1011 | "iconv-lite": "0.4.24",
1012 | "unpipe": "1.0.0"
1013 | }
1014 | },
1015 | "read-pkg": {
1016 | "version": "1.1.0",
1017 | "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz",
1018 | "integrity": "sha1-9f+qXs0pyzHAR0vKfXVra7KePyg=",
1019 | "requires": {
1020 | "load-json-file": "^1.0.0",
1021 | "normalize-package-data": "^2.3.2",
1022 | "path-type": "^1.0.0"
1023 | }
1024 | },
1025 | "read-pkg-up": {
1026 | "version": "1.0.1",
1027 | "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz",
1028 | "integrity": "sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI=",
1029 | "requires": {
1030 | "find-up": "^1.0.0",
1031 | "read-pkg": "^1.0.0"
1032 | }
1033 | },
1034 | "redent": {
1035 | "version": "1.0.0",
1036 | "resolved": "https://registry.npmjs.org/redent/-/redent-1.0.0.tgz",
1037 | "integrity": "sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94=",
1038 | "requires": {
1039 | "indent-string": "^2.1.0",
1040 | "strip-indent": "^1.0.1"
1041 | }
1042 | },
1043 | "repeating": {
1044 | "version": "2.0.1",
1045 | "resolved": "https://registry.npmjs.org/repeating/-/repeating-2.0.1.tgz",
1046 | "integrity": "sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo=",
1047 | "requires": {
1048 | "is-finite": "^1.0.0"
1049 | }
1050 | },
1051 | "request": {
1052 | "version": "2.88.0",
1053 | "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz",
1054 | "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==",
1055 | "requires": {
1056 | "aws-sign2": "~0.7.0",
1057 | "aws4": "^1.8.0",
1058 | "caseless": "~0.12.0",
1059 | "combined-stream": "~1.0.6",
1060 | "extend": "~3.0.2",
1061 | "forever-agent": "~0.6.1",
1062 | "form-data": "~2.3.2",
1063 | "har-validator": "~5.1.0",
1064 | "http-signature": "~1.2.0",
1065 | "is-typedarray": "~1.0.0",
1066 | "isstream": "~0.1.2",
1067 | "json-stringify-safe": "~5.0.1",
1068 | "mime-types": "~2.1.19",
1069 | "oauth-sign": "~0.9.0",
1070 | "performance-now": "^2.1.0",
1071 | "qs": "~6.5.2",
1072 | "safe-buffer": "^5.1.2",
1073 | "tough-cookie": "~2.4.3",
1074 | "tunnel-agent": "^0.6.0",
1075 | "uuid": "^3.3.2"
1076 | },
1077 | "dependencies": {
1078 | "form-data": {
1079 | "version": "2.3.3",
1080 | "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
1081 | "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
1082 | "requires": {
1083 | "asynckit": "^0.4.0",
1084 | "combined-stream": "^1.0.6",
1085 | "mime-types": "^2.1.12"
1086 | }
1087 | },
1088 | "punycode": {
1089 | "version": "1.4.1",
1090 | "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz",
1091 | "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4="
1092 | },
1093 | "qs": {
1094 | "version": "6.5.2",
1095 | "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
1096 | "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA=="
1097 | },
1098 | "tough-cookie": {
1099 | "version": "2.4.3",
1100 | "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz",
1101 | "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==",
1102 | "requires": {
1103 | "psl": "^1.1.24",
1104 | "punycode": "^1.4.1"
1105 | }
1106 | }
1107 | }
1108 | },
1109 | "resolve": {
1110 | "version": "1.13.1",
1111 | "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.13.1.tgz",
1112 | "integrity": "sha512-CxqObCX8K8YtAhOBRg+lrcdn+LK+WYOS8tSjqSFbjtrI5PnS63QPhZl4+yKfrU9tdsbMu9Anr/amegT87M9Z6w==",
1113 | "requires": {
1114 | "path-parse": "^1.0.6"
1115 | }
1116 | },
1117 | "safe-buffer": {
1118 | "version": "5.1.2",
1119 | "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
1120 | "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
1121 | },
1122 | "safer-buffer": {
1123 | "version": "2.1.2",
1124 | "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
1125 | "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
1126 | },
1127 | "semver": {
1128 | "version": "5.7.1",
1129 | "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
1130 | "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ=="
1131 | },
1132 | "send": {
1133 | "version": "0.17.1",
1134 | "resolved": "https://registry.npmjs.org/send/-/send-0.17.1.tgz",
1135 | "integrity": "sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==",
1136 | "requires": {
1137 | "debug": "2.6.9",
1138 | "depd": "~1.1.2",
1139 | "destroy": "~1.0.4",
1140 | "encodeurl": "~1.0.2",
1141 | "escape-html": "~1.0.3",
1142 | "etag": "~1.8.1",
1143 | "fresh": "0.5.2",
1144 | "http-errors": "~1.7.2",
1145 | "mime": "1.6.0",
1146 | "ms": "2.1.1",
1147 | "on-finished": "~2.3.0",
1148 | "range-parser": "~1.2.1",
1149 | "statuses": "~1.5.0"
1150 | },
1151 | "dependencies": {
1152 | "ms": {
1153 | "version": "2.1.1",
1154 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz",
1155 | "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg=="
1156 | }
1157 | }
1158 | },
1159 | "serve-static": {
1160 | "version": "1.14.1",
1161 | "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz",
1162 | "integrity": "sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==",
1163 | "requires": {
1164 | "encodeurl": "~1.0.2",
1165 | "escape-html": "~1.0.3",
1166 | "parseurl": "~1.3.3",
1167 | "send": "0.17.1"
1168 | }
1169 | },
1170 | "setprototypeof": {
1171 | "version": "1.1.1",
1172 | "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz",
1173 | "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw=="
1174 | },
1175 | "signal-exit": {
1176 | "version": "3.0.2",
1177 | "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz",
1178 | "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0="
1179 | },
1180 | "socket.io": {
1181 | "version": "2.3.0",
1182 | "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-2.3.0.tgz",
1183 | "integrity": "sha512-2A892lrj0GcgR/9Qk81EaY2gYhCBxurV0PfmmESO6p27QPrUK1J3zdns+5QPqvUYK2q657nSj0guoIil9+7eFg==",
1184 | "requires": {
1185 | "debug": "~4.1.0",
1186 | "engine.io": "~3.4.0",
1187 | "has-binary2": "~1.0.2",
1188 | "socket.io-adapter": "~1.1.0",
1189 | "socket.io-client": "2.3.0",
1190 | "socket.io-parser": "~3.4.0"
1191 | },
1192 | "dependencies": {
1193 | "debug": {
1194 | "version": "4.1.1",
1195 | "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
1196 | "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==",
1197 | "requires": {
1198 | "ms": "^2.1.1"
1199 | }
1200 | },
1201 | "ms": {
1202 | "version": "2.1.2",
1203 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
1204 | "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
1205 | }
1206 | }
1207 | },
1208 | "socket.io-adapter": {
1209 | "version": "1.1.2",
1210 | "resolved": "https://registry.npmjs.org/socket.io-adapter/-/socket.io-adapter-1.1.2.tgz",
1211 | "integrity": "sha512-WzZRUj1kUjrTIrUKpZLEzFZ1OLj5FwLlAFQs9kuZJzJi5DKdU7FsWc36SNmA8iDOtwBQyT8FkrriRM8vXLYz8g=="
1212 | },
1213 | "socket.io-client": {
1214 | "version": "2.3.0",
1215 | "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-2.3.0.tgz",
1216 | "integrity": "sha512-cEQQf24gET3rfhxZ2jJ5xzAOo/xhZwK+mOqtGRg5IowZsMgwvHwnf/mCRapAAkadhM26y+iydgwsXGObBB5ZdA==",
1217 | "requires": {
1218 | "backo2": "1.0.2",
1219 | "base64-arraybuffer": "0.1.5",
1220 | "component-bind": "1.0.0",
1221 | "component-emitter": "1.2.1",
1222 | "debug": "~4.1.0",
1223 | "engine.io-client": "~3.4.0",
1224 | "has-binary2": "~1.0.2",
1225 | "has-cors": "1.1.0",
1226 | "indexof": "0.0.1",
1227 | "object-component": "0.0.3",
1228 | "parseqs": "0.0.5",
1229 | "parseuri": "0.0.5",
1230 | "socket.io-parser": "~3.3.0",
1231 | "to-array": "0.1.4"
1232 | },
1233 | "dependencies": {
1234 | "debug": {
1235 | "version": "4.1.1",
1236 | "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
1237 | "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==",
1238 | "requires": {
1239 | "ms": "^2.1.1"
1240 | }
1241 | },
1242 | "ms": {
1243 | "version": "2.1.2",
1244 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
1245 | "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
1246 | },
1247 | "socket.io-parser": {
1248 | "version": "3.3.0",
1249 | "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-3.3.0.tgz",
1250 | "integrity": "sha512-hczmV6bDgdaEbVqhAeVMM/jfUfzuEZHsQg6eOmLgJht6G3mPKMxYm75w2+qhAQZ+4X+1+ATZ+QFKeOZD5riHng==",
1251 | "requires": {
1252 | "component-emitter": "1.2.1",
1253 | "debug": "~3.1.0",
1254 | "isarray": "2.0.1"
1255 | },
1256 | "dependencies": {
1257 | "debug": {
1258 | "version": "3.1.0",
1259 | "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz",
1260 | "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==",
1261 | "requires": {
1262 | "ms": "2.0.0"
1263 | }
1264 | },
1265 | "ms": {
1266 | "version": "2.0.0",
1267 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
1268 | "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
1269 | }
1270 | }
1271 | }
1272 | }
1273 | },
1274 | "socket.io-parser": {
1275 | "version": "3.4.0",
1276 | "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-3.4.0.tgz",
1277 | "integrity": "sha512-/G/VOI+3DBp0+DJKW4KesGnQkQPFmUCbA/oO2QGT6CWxU7hLGWqU3tyuzeSK/dqcyeHsQg1vTe9jiZI8GU9SCQ==",
1278 | "requires": {
1279 | "component-emitter": "1.2.1",
1280 | "debug": "~4.1.0",
1281 | "isarray": "2.0.1"
1282 | },
1283 | "dependencies": {
1284 | "debug": {
1285 | "version": "4.1.1",
1286 | "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
1287 | "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==",
1288 | "requires": {
1289 | "ms": "^2.1.1"
1290 | }
1291 | },
1292 | "ms": {
1293 | "version": "2.1.2",
1294 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
1295 | "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
1296 | }
1297 | }
1298 | },
1299 | "spdx-correct": {
1300 | "version": "3.1.0",
1301 | "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz",
1302 | "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==",
1303 | "requires": {
1304 | "spdx-expression-parse": "^3.0.0",
1305 | "spdx-license-ids": "^3.0.0"
1306 | }
1307 | },
1308 | "spdx-exceptions": {
1309 | "version": "2.2.0",
1310 | "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz",
1311 | "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA=="
1312 | },
1313 | "spdx-expression-parse": {
1314 | "version": "3.0.0",
1315 | "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz",
1316 | "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==",
1317 | "requires": {
1318 | "spdx-exceptions": "^2.1.0",
1319 | "spdx-license-ids": "^3.0.0"
1320 | }
1321 | },
1322 | "spdx-license-ids": {
1323 | "version": "3.0.5",
1324 | "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz",
1325 | "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q=="
1326 | },
1327 | "sshpk": {
1328 | "version": "1.16.1",
1329 | "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
1330 | "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==",
1331 | "requires": {
1332 | "asn1": "~0.2.3",
1333 | "assert-plus": "^1.0.0",
1334 | "bcrypt-pbkdf": "^1.0.0",
1335 | "dashdash": "^1.12.0",
1336 | "ecc-jsbn": "~0.1.1",
1337 | "getpass": "^0.1.1",
1338 | "jsbn": "~0.1.0",
1339 | "safer-buffer": "^2.0.2",
1340 | "tweetnacl": "~0.14.0"
1341 | }
1342 | },
1343 | "statuses": {
1344 | "version": "1.5.0",
1345 | "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz",
1346 | "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow="
1347 | },
1348 | "strip-ansi": {
1349 | "version": "3.0.1",
1350 | "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz",
1351 | "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=",
1352 | "requires": {
1353 | "ansi-regex": "^2.0.0"
1354 | }
1355 | },
1356 | "strip-bom": {
1357 | "version": "2.0.0",
1358 | "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz",
1359 | "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=",
1360 | "requires": {
1361 | "is-utf8": "^0.2.0"
1362 | }
1363 | },
1364 | "strip-indent": {
1365 | "version": "1.0.1",
1366 | "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-1.0.1.tgz",
1367 | "integrity": "sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI=",
1368 | "requires": {
1369 | "get-stdin": "^4.0.1"
1370 | }
1371 | },
1372 | "supports-color": {
1373 | "version": "2.0.0",
1374 | "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz",
1375 | "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc="
1376 | },
1377 | "to-array": {
1378 | "version": "0.1.4",
1379 | "resolved": "https://registry.npmjs.org/to-array/-/to-array-0.1.4.tgz",
1380 | "integrity": "sha1-F+bBH3PdTz10zaek/zI46a2b+JA="
1381 | },
1382 | "toidentifier": {
1383 | "version": "1.0.0",
1384 | "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz",
1385 | "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw=="
1386 | },
1387 | "tough-cookie": {
1388 | "version": "0.12.1",
1389 | "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-0.12.1.tgz",
1390 | "integrity": "sha1-giDH4hq9WxPZaAQlS9WoHr8sfWI=",
1391 | "requires": {
1392 | "punycode": ">=0.2.0"
1393 | }
1394 | },
1395 | "trim-newlines": {
1396 | "version": "1.0.0",
1397 | "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-1.0.0.tgz",
1398 | "integrity": "sha1-WIeWa7WCpFA6QetST301ARgVphM="
1399 | },
1400 | "tunnel-agent": {
1401 | "version": "0.6.0",
1402 | "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
1403 | "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
1404 | "requires": {
1405 | "safe-buffer": "^5.0.1"
1406 | }
1407 | },
1408 | "tweetnacl": {
1409 | "version": "0.14.5",
1410 | "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
1411 | "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
1412 | },
1413 | "type-is": {
1414 | "version": "1.6.18",
1415 | "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
1416 | "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==",
1417 | "requires": {
1418 | "media-typer": "0.3.0",
1419 | "mime-types": "~2.1.24"
1420 | }
1421 | },
1422 | "unpipe": {
1423 | "version": "1.0.0",
1424 | "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
1425 | "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw="
1426 | },
1427 | "uri-js": {
1428 | "version": "4.2.2",
1429 | "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz",
1430 | "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==",
1431 | "requires": {
1432 | "punycode": "^2.1.0"
1433 | }
1434 | },
1435 | "utils-merge": {
1436 | "version": "1.0.1",
1437 | "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
1438 | "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM="
1439 | },
1440 | "uuid": {
1441 | "version": "3.3.3",
1442 | "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz",
1443 | "integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ=="
1444 | },
1445 | "validate-npm-package-license": {
1446 | "version": "3.0.4",
1447 | "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz",
1448 | "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==",
1449 | "requires": {
1450 | "spdx-correct": "^3.0.0",
1451 | "spdx-expression-parse": "^3.0.0"
1452 | }
1453 | },
1454 | "vary": {
1455 | "version": "1.1.2",
1456 | "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
1457 | "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw="
1458 | },
1459 | "verror": {
1460 | "version": "1.10.0",
1461 | "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
1462 | "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=",
1463 | "requires": {
1464 | "assert-plus": "^1.0.0",
1465 | "core-util-is": "1.0.2",
1466 | "extsprintf": "^1.2.0"
1467 | }
1468 | },
1469 | "ws": {
1470 | "version": "7.2.0",
1471 | "resolved": "https://registry.npmjs.org/ws/-/ws-7.2.0.tgz",
1472 | "integrity": "sha512-+SqNqFbwTm/0DC18KYzIsMTnEWpLwJsiasW/O17la4iDRRIO9uaHbvKiAS3AHgTiuuWerK/brj4O6MYZkei9xg==",
1473 | "requires": {
1474 | "async-limiter": "^1.0.0"
1475 | }
1476 | },
1477 | "xmlhttprequest-ssl": {
1478 | "version": "1.5.5",
1479 | "resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz",
1480 | "integrity": "sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4="
1481 | },
1482 | "yarn": {
1483 | "version": "1.21.0",
1484 | "resolved": "https://registry.npmjs.org/yarn/-/yarn-1.21.0.tgz",
1485 | "integrity": "sha512-g9cvrdKXPZlz1eJYpKanQm3eywEmecudeyDkwiVWeswBrpHK3nJFBholkphHF9eNc8y/FNEhSQ8Et5ZAx4XyLw=="
1486 | },
1487 | "yeast": {
1488 | "version": "0.1.2",
1489 | "resolved": "https://registry.npmjs.org/yeast/-/yeast-0.1.2.tgz",
1490 | "integrity": "sha1-AI4G2AlDIMNy28L47XagymyKxBk="
1491 | }
1492 | }
1493 | }
1494 |
--------------------------------------------------------------------------------
/deepknee-backend-broker/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "deepknee-srv",
3 | "version": "1.0.0",
4 | "description": "Broker for DeepKnee platform.",
5 | "repository": "https://github.com/MIPT-Oulu/DeepKnee",
6 | "main": "index.js",
7 | "scripts": {
8 | "test": "echo \"Error: no test specified\" && exit 1"
9 | },
10 | "author": "Aleksei Tiulpin",
11 | "license": "MIT",
12 | "dependencies": {
13 | "async-request": "^1.2.0",
14 | "axios": "^0.19.0",
15 | "console-stamp": "^0.2.9",
16 | "express": "^4.17.1",
17 | "form-data": "^3.0.0",
18 | "lodash": "^4.17.15",
19 | "socket.io": "^2.3.0",
20 | "yarn": "^1.21.0"
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/deepknee-backend-broker/server.js:
--------------------------------------------------------------------------------
1 |
2 | const express = require('express');
3 | const http = require('http');
4 | const socketIO = require('socket.io');
5 | let request = require('async-request');
6 |
7 | require('console-stamp')(console, '[HH:MM:ss.l]');
8 |
9 | const deploy_host = process.env.HOST_ADDR || '0.0.0.0'
10 | const port = process.env.DEPLOY_PORT || 4001;
11 |
12 | const kneel_addr = process.env.KNEEL_ADDR || "http://127.0.0.1";
13 | const kneel_port = process.env.KNEEL_PORT || 5000;
14 | const kneel_url_bilateral = `${kneel_addr}:${kneel_port}/kneel/predict/bilateral`;
15 |
16 | const deepknee_addr = process.env.DEEPKNEE_ADDR || `http://127.0.0.1`;
17 | const deepknee_port = process.env.DEEPKNEE_PORT || 5001;
18 | const deepknee_url_bilateral = `${deepknee_addr}:${deepknee_port}/deepknee/predict/bilateral`;
19 |
20 | const app = express();
21 | const server = http.createServer(app);
22 | const io = socketIO(server);
23 |
24 | io.on('connection', async (socket) => {
25 | console.log('User connected');
26 |
27 | socket.on('disconnect', () => {
28 | console.log('User disconnected');
29 | });
30 |
31 | socket.on('dicom_submission', async (dicom_base64) => {
32 | console.log('Got a DICOM file');
33 | socket.emit('dicom_received', {});
34 | socket.emit('processing_by_kneel', {});
35 |
36 | let base64_processed = dicom_base64.file_blob.split(',').pop()
37 |
38 | response = await request(kneel_url_bilateral, {
39 | method: 'POST',
40 | headers: JSON.stringify({'content-type':'application/json'}),
41 | data: JSON.stringify({dicom: base64_processed})
42 | });
43 | socket.emit('KNEEL finished', {});
44 | console.log('KNEEL finished the inference');
45 |
46 | let landmarks = JSON.parse(response.body);
47 | socket.emit('Processing by KNEEL', {});
48 |
49 | response = await request(deepknee_url_bilateral, {
50 | method: 'POST',
51 | headers: JSON.stringify({'content-type':'application/json'}),
52 | data: JSON.stringify({dicom: base64_processed, landmarks: landmarks})
53 | });
54 | let deepknee_result = JSON.parse(response.body);
55 | // Before sending the results to UI, we must prepend 'data:image/png;base64,' to every base64 string in order
56 | // to display them in the browse. Besides, the UI has a slightly different API than the backend,
57 | // So, the new JSON message needs to be prepared.
58 | socket.emit('dicom_processed', {
59 | image_1st_raw: 'data:image/png;base64,'+ deepknee_result.R.img,
60 | image_2nd_raw: 'data:image/png;base64,'+ deepknee_result.L.img,
61 | image_1st_heatmap: 'data:image/png;base64,'+ deepknee_result.R.hm,
62 | image_2nd_heatmap: 'data:image/png;base64,'+ deepknee_result.L.hm,
63 | special_1st: 'data:image/png;base64,'+ deepknee_result.R.preds_bar,
64 | special_2nd: 'data:image/png;base64,'+ deepknee_result.L.preds_bar,
65 | });
66 | console.log('The results have been sent back to UI.')
67 |
68 | });
69 |
70 | });
71 |
72 |
73 | server.listen(port, deploy_host, () => console.log(`Listening on ${deploy_host}:${port}`))
--------------------------------------------------------------------------------
/deepknee-frontend/.dockerignore:
--------------------------------------------------------------------------------
1 | build/
2 | node_modules/
--------------------------------------------------------------------------------
/deepknee-frontend/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 | ./build
8 | # testing
9 | /coverage
10 |
11 | # production
12 | /build
13 |
14 | # misc
15 | .DS_Store
16 | .env.local
17 | .env.development.local
18 | .env.test.local
19 | .env.production.local
20 |
21 | npm-debug.log*
22 | yarn-debug.log*
23 | yarn-error.log*
24 |
--------------------------------------------------------------------------------
/deepknee-frontend/README.md:
--------------------------------------------------------------------------------
1 | # React Frontend for DeepKnee
2 |
3 | (c) Egor Panfilov, Aleksei Tiulpin, University of Oulu, 2018-2019
--------------------------------------------------------------------------------
/deepknee-frontend/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "deepknee-frontend",
3 | "version": "0.1.0",
4 | "private": true,
5 | "dependencies": {
6 | "@material-ui/core": "^3.1.0",
7 | "ajv": "^6.5.3",
8 | "bootstrap": "^4.1.1",
9 | "css-loader": "^3.2.1",
10 | "jquery": "^3.3.1",
11 | "npm": "^6.4.1",
12 | "popper.js": "^1.14.4",
13 | "react": "^16.4.2",
14 | "react-dom": "^16.4.2",
15 | "react-scripts": "^2.0.0",
16 | "react-toolbox": "^2.0.0-beta.13",
17 | "socket.io-client": "^2.1.1"
18 | },
19 | "scripts": {
20 | "start": "react-scripts start",
21 | "build": "react-scripts build",
22 | "test": "react-scripts test",
23 | "eject": "react-scripts eject"
24 | },
25 | "eslintConfig": {
26 | "extends": "react-app"
27 | },
28 | "browserslist": {
29 | "production": [
30 | ">0.2%",
31 | "not op_mini all"
32 | ],
33 | "development": [
34 | "last 1 chrome version",
35 | "last 1 firefox version",
36 | "last 1 safari version"
37 | ]
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/deepknee-frontend/public/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imedslab/DeepKnee/2eee4645f632d04ed5847b35f675421b7dd4dfdb/deepknee-frontend/public/favicon.png
--------------------------------------------------------------------------------
/deepknee-frontend/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
11 |
12 |
13 |
20 | DeepKnee
21 |
22 |
23 |
24 | You need to enable JavaScript to run this app.
25 |
26 |
27 |
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/deepknee-frontend/public/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "short_name": "React App",
3 | "name": "Create React App Sample",
4 | "icons": [
5 | {
6 | "src": "favicon.png",
7 | "sizes": "64x64 32x32 24x24 16x16",
8 | "type": "image/x-icon"
9 | }
10 | ],
11 | "start_url": "./index.html",
12 | "display": "standalone",
13 | "theme_color": "#000000",
14 | "background_color": "#ffffff"
15 | }
16 |
--------------------------------------------------------------------------------
/deepknee-frontend/src/App.css:
--------------------------------------------------------------------------------
1 | .App {
2 | text-align: center;
3 | }
4 |
5 | .App-logo {
6 | animation: App-logo-spin infinite 20s linear;
7 | height: 80px;
8 | }
9 |
10 | .App-header {
11 | background-color: #222;
12 | height: 150px;
13 | padding: 20px;
14 | color: white;
15 | }
16 |
17 | .App-title {
18 | font-size: 1.5em;
19 | }
20 |
21 | .App-intro {
22 | font-size: large;
23 | }
24 |
25 | @keyframes App-logo-spin {
26 | from { transform: rotate(0deg); }
27 | to { transform: rotate(360deg); }
28 | }
29 |
--------------------------------------------------------------------------------
/deepknee-frontend/src/App.js:
--------------------------------------------------------------------------------
1 | import React, {Component} from 'react';
2 | import SIOClient from './SIOClient';
3 | import FileUploader from './FileUploader';
4 | import ProgressCircularBar from './ProgressCircularBar';
5 | import Footer from './Footer';
6 |
7 | class App extends Component {
8 | constructor(props) {
9 | super(props);
10 | this.state = {
11 | server_connected: false,
12 | server_status: null,
13 | server_response: null,
14 |
15 | file_name: null,
16 | file_blob: null,
17 |
18 | image_1st_raw: null,
19 | image_2nd_raw: null,
20 | image_1st_heatmap: null,
21 | image_2nd_heatmap: null,
22 | special_1st: null,
23 | special_2nd: null,
24 | };
25 |
26 | this.handleFileSubmission = this.handleFileSubmission.bind(this);
27 |
28 | this.handleDicomSent = this.handleDicomSent.bind(this);
29 | this.handleDicomReceived = this.handleDicomReceived.bind(this);
30 | this.handleDicomProcessed = this.handleDicomProcessed.bind(this);
31 | this.handleServerConnected = this.handleServerConnected.bind(this);
32 | this.handleServerDisconnected = this.handleServerDisconnected.bind(this);
33 | }
34 |
35 | handleFileSubmission(data) {
36 | this.setState({
37 | is_waiting_response: true,
38 |
39 | file_name: data.file_name,
40 | file_blob: data.file_blob
41 | });
42 | }
43 |
44 | handleDicomSent(data) {
45 | this.setState({
46 | server_status: "dicom_sent",
47 | });
48 | console.log("Sent");
49 | }
50 |
51 | handleDicomReceived(data) {
52 | this.setState({
53 | server_status: "dicom_received",
54 | });
55 | console.log("Received");
56 | }
57 |
58 | handleDicomProcessed(data) {
59 | this.setState({
60 | server_status: "dicom_processed",
61 | server_response: data.server_response,
62 |
63 | image_1st_raw: data.image_1st_raw,
64 | image_2nd_raw: data.image_2nd_raw,
65 | image_1st_heatmap: data.image_1st_heatmap,
66 | image_2nd_heatmap: data.image_2nd_heatmap,
67 | special_1st: data.special_1st,
68 | special_2nd: data.special_2nd,
69 | });
70 | console.log("Processed");
71 | }
72 |
73 | handleServerConnected() {
74 | this.setState({
75 | server_connected: true,
76 | server_status: 'standby',
77 | });
78 | }
79 |
80 | handleServerDisconnected() {
81 | this.setState({
82 | server_connected: false,
83 | server_status: 'standby',
84 | });
85 | }
86 |
87 | render() {
88 | const state = this.state;
89 | return (
90 |
91 |
102 |
103 |
106 |
107 |
108 |
109 | {state.server_status === "dicom_sent" &&
110 |
}
111 |
112 | {state.server_status === "dicom_received" &&
113 |
}
114 |
115 | {state.server_status === "dicom_processed" &&
116 |
117 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 | }
149 |
150 |
151 |
152 | );
153 | }
154 | }
155 |
156 | export default App;
157 |
--------------------------------------------------------------------------------
/deepknee-frontend/src/App.test.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import ReactDOM from 'react-dom';
3 | import App from './App';
4 |
5 | it('renders without crashing', () => {
6 | const div = document.createElement('div');
7 | ReactDOM.render( , div);
8 | ReactDOM.unmountComponentAtNode(div);
9 | });
10 |
--------------------------------------------------------------------------------
/deepknee-frontend/src/FileUploader.js:
--------------------------------------------------------------------------------
1 | import React, {Component} from "react";
2 |
3 | class FileUploader extends Component {
4 | constructor(props) {
5 | super(props);
6 | this.state = {file_name: ''};
7 |
8 | this.handleFileChosen = this.handleFileChosen.bind(this);
9 | this.handleFileSubmitted = this.handleFileSubmitted.bind(this);
10 | }
11 |
12 | extractFilename = function (fullname) {
13 | return fullname.split('\\').pop().split('/').pop();
14 | };
15 |
16 | handleFileChosen(event) {
17 | const file_name = this.extractFilename(event.target.value);
18 | this.setState({file_name: file_name});
19 | }
20 |
21 | handleFileSubmitted(event) {
22 | let file_input = document.querySelector('input[id=inputGroupFile]');
23 | let file = file_input.files[0];
24 |
25 | let reader = new FileReader();
26 |
27 | reader.onloadend = () => {
28 | let blob = {
29 | file_name: this.state.file_name,
30 | file_blob: reader.result,
31 | };
32 | console.log('File loaded');
33 | this.props.onFileSubmission(blob);
34 |
35 | file_input.value = "";
36 | this.setState({file_name: ''});
37 | };
38 | // Read file as base64 string
39 | reader.readAsDataURL(file);
40 | }
41 |
42 | render() {
43 | return (
44 |
45 |
46 |
48 | Submit DICOM
49 |
50 |
51 |
52 |
54 |
55 | {this.state.file_name ? this.state.file_name : Choose file }
56 |
57 |
58 |
59 | )};
60 | }
61 |
62 | export default FileUploader;
63 |
--------------------------------------------------------------------------------
/deepknee-frontend/src/Footer.js:
--------------------------------------------------------------------------------
1 | import {Component} from "react";
2 | import React from "react";
3 |
4 |
5 | class Footer extends Component {
6 | render() {
7 | return (
8 |
9 | © 2018-2019 Aleksei Tiulpin & Egor Panfilov.
10 | | Saarakkala's group | University of Oulu. This software is not cleared for diagnostic purposes.
11 |
12 | );
13 | }
14 | }
15 |
16 | export default Footer;
--------------------------------------------------------------------------------
/deepknee-frontend/src/ProgressCircularBar.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import PropTypes from 'prop-types';
3 | import { withStyles } from '@material-ui/core/styles';
4 | import CircularProgress from '@material-ui/core/CircularProgress';
5 | import Paper from '@material-ui/core/Paper';
6 | import green from '@material-ui/core/colors/green';
7 | import blue from '@material-ui/core/colors/lightBlue';
8 | import purple from '@material-ui/core/colors/purple';
9 |
10 |
11 | const styles = theme => ({
12 | progress: {
13 | margin: theme.spacing.unit * 2,
14 | },
15 | });
16 |
17 | const colors = {
18 | "blue": blue[500],
19 | "green": green[500],
20 | "purple": purple[500]
21 | };
22 |
23 | function ProgressCircularBar(props) {
24 | const { classes } = props;
25 |
26 | return (
27 |
36 | );
37 | }
38 |
39 | ProgressCircularBar.propTypes = {
40 | classes: PropTypes.object.isRequired,
41 | };
42 |
43 | export default withStyles(styles)(ProgressCircularBar);
--------------------------------------------------------------------------------
/deepknee-frontend/src/SIOClient.js:
--------------------------------------------------------------------------------
1 | import React, {Component} from "react";
2 | import openSocket from 'socket.io-client';
3 |
4 | class SIOClient extends Component {
5 | constructor(props) {
6 | super(props);
7 | const port = process.env.REACT_APP_BROKER_PORT || "4001";
8 | const host = process.env.REACT_APP_BROKER_ADDR || "http://127.0.0.1";
9 |
10 | this.state = {
11 | endpoint: `${host}:${port}/`
12 | };
13 | this.socket = openSocket(
14 | this.state.endpoint,
15 | {path: `/socket.io`,
16 | timeout: 120000,
17 | pingTimeout: 120000,
18 | pingInterval: 120000,
19 | }
20 | );
21 | }
22 |
23 | componentDidMount() {
24 | this.socket.on("dicom_received", data => this.props.onDicomReceived(data));
25 | this.socket.on("dicom_processed", data => this.props.onDicomProcessed(data));
26 | this.socket.on("connect", () => this.props.onServerConnected());
27 | this.socket.on("disconnect", () => this.props.onServerDisconnected());
28 | }
29 |
30 | componentDidUpdate(prevProps, prevState) {
31 | // Input file updated
32 | if (this.props.file_blob !== prevProps.file_blob) {
33 | let blob = {
34 | file_name: this.props.file_name,
35 | file_blob: this.props.file_blob,
36 | };
37 |
38 | this.socket.emit('dicom_submission', blob);
39 | this.props.onDicomSent();
40 | }
41 | }
42 |
43 | render() {
44 | return (
45 | this.props.connected ? null :
46 |
47 | Server is not connected
48 |
49 | );
50 | }
51 | }
52 |
53 | export default SIOClient;
54 |
--------------------------------------------------------------------------------
/deepknee-frontend/src/index.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import ReactDOM from 'react-dom';
3 | import App from './App';
4 | import registerServiceWorker from './registerServiceWorker';
5 | import 'bootstrap/dist/css/bootstrap.css';
6 |
7 |
8 | ReactDOM.render(
9 | ,
10 | document.getElementById('root')
11 | );
12 | registerServiceWorker();
13 |
--------------------------------------------------------------------------------
/deepknee-frontend/src/registerServiceWorker.js:
--------------------------------------------------------------------------------
1 | // In production, we register a service worker to serve assets from local cache.
2 |
3 | // This lets the app load faster on subsequent visits in production, and gives
4 | // it offline capabilities. However, it also means that developers (and users)
5 | // will only see deployed updates on the "N+1" visit to a page, since previously
6 | // cached resources are updated in the background.
7 |
8 | // To learn more about the benefits of this model, read https://goo.gl/KwvDNy.
9 | // This link also includes instructions on opting out of this behavior.
10 |
11 | const isLocalhost = Boolean(
12 | window.location.hostname === 'localhost' ||
13 | // [::1] is the IPv6 localhost address.
14 | window.location.hostname === '[::1]' ||
15 | // 127.0.0.1/8 is considered localhost for IPv4.
16 | window.location.hostname.match(
17 | /^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/
18 | )
19 | );
20 |
21 | export default function register() {
22 | if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {
23 | // The URL constructor is available in all browsers that support SW.
24 | const publicUrl = new URL(process.env.PUBLIC_URL, window.location);
25 | if (publicUrl.origin !== window.location.origin) {
26 | // Our service worker won't work if PUBLIC_URL is on a different origin
27 | // from what our page is served on. This might happen if a CDN is used to
28 | // serve assets; see https://github.com/facebookincubator/create-react-app/issues/2374
29 | return;
30 | }
31 |
32 | window.addEventListener('load', () => {
33 | const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
34 |
35 | if (isLocalhost) {
36 | // This is running on localhost. Lets check if a service worker still exists or not.
37 | checkValidServiceWorker(swUrl);
38 |
39 | // Add some additional logging to localhost, pointing developers to the
40 | // service worker/PWA documentation.
41 | navigator.serviceWorker.ready.then(() => {
42 | console.log(
43 | 'This web app is being served cache-first by a service ' +
44 | 'worker. To learn more, visit https://goo.gl/SC7cgQ'
45 | );
46 | });
47 | } else {
48 | // Is not local host. Just register service worker
49 | registerValidSW(swUrl);
50 | }
51 | });
52 | }
53 | }
54 |
55 | function registerValidSW(swUrl) {
56 | navigator.serviceWorker
57 | .register(swUrl)
58 | .then(registration => {
59 | registration.onupdatefound = () => {
60 | const installingWorker = registration.installing;
61 | installingWorker.onstatechange = () => {
62 | if (installingWorker.state === 'installed') {
63 | if (navigator.serviceWorker.controller) {
64 | // At this point, the old content will have been purged and
65 | // the fresh content will have been added to the cache.
66 | // It's the perfect time to display a "New content is
67 | // available; please refresh." message in your web app.
68 | console.log('New content is available; please refresh.');
69 | } else {
70 | // At this point, everything has been precached.
71 | // It's the perfect time to display a
72 | // "Content is cached for offline use." message.
73 | console.log('Content is cached for offline use.');
74 | }
75 | }
76 | };
77 | };
78 | })
79 | .catch(error => {
80 | console.error('Error during service worker registration:', error);
81 | });
82 | }
83 |
84 | function checkValidServiceWorker(swUrl) {
85 | // Check if the service worker can be found. If it can't reload the page.
86 | fetch(swUrl)
87 | .then(response => {
88 | // Ensure service worker exists, and that we really are getting a JS file.
89 | if (
90 | response.status === 404 ||
91 | response.headers.get('content-type').indexOf('javascript') === -1
92 | ) {
93 | // No service worker found. Probably a different app. Reload the page.
94 | navigator.serviceWorker.ready.then(registration => {
95 | registration.unregister().then(() => {
96 | window.location.reload();
97 | });
98 | });
99 | } else {
100 | // Service worker found. Proceed as normal.
101 | registerValidSW(swUrl);
102 | }
103 | })
104 | .catch(() => {
105 | console.log(
106 | 'No internet connection found. App is running in offline mode.'
107 | );
108 | });
109 | }
110 |
111 | export function unregister() {
112 | if ('serviceWorker' in navigator) {
113 | navigator.serviceWorker.ready.then(registration => {
114 | registration.unregister();
115 | });
116 | }
117 | }
118 |
--------------------------------------------------------------------------------
/deploy.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | if [ ! -d "snapshots_release_kneel" ]; then
4 | wget http://mipt-ml.oulu.fi/models/KNEEL/snapshots_release.tar.xz
5 | tar -xvf snapshots_release.tar.xz
6 | rm snapshots_release.tar.xz
7 | mv snapshots_release snapshots_release_kneel
8 | fi
9 |
10 | if [ ! -d "snapshots_knee_grading" ]; then
11 | sh ./fetch_snapshots.sh
12 | fi
13 | mkdir -p logs
14 |
15 | docker-compose -f ./docker/docker-compose-$1.yml down
16 | docker-compose -f ./docker/docker-compose-$1.yml build
17 | docker-compose -f ./docker/docker-compose-$1.yml up --remove-orphans
--------------------------------------------------------------------------------
/docker/BrokerDockerfile:
--------------------------------------------------------------------------------
1 | FROM node:10
2 |
3 | WORKDIR /usr/src/app
4 | COPY package*.json ./
5 |
6 | RUN npm install
7 | COPY . .
--------------------------------------------------------------------------------
/docker/Dockerfile.cpu:
--------------------------------------------------------------------------------
1 | # KNEEL and DeepKnee inference packages share the same docker image.
2 | FROM miptmloulu/kneel:cpu
3 |
4 | MAINTAINER Aleksei Tiulpin, University of Oulu, Version 1.0
5 |
6 | RUN pip install pynetdicom
7 |
8 | RUN mkdir -p /opt/pkg-deepknee/
9 | COPY . /opt/pkg-deepknee/
10 | RUN pip install /opt/pkg-deepknee/
--------------------------------------------------------------------------------
/docker/Dockerfile.gpu:
--------------------------------------------------------------------------------
1 | # KNEEL and DeepKnee inference packages share the same docker image.
2 | FROM miptmloulu/kneel:gpu
3 |
4 | MAINTAINER Aleksei Tiulpin, University of Oulu, Version 1.0
5 |
6 | RUN pip install pynetdicom
7 |
8 | RUN mkdir -p /opt/pkg-deepknee/
9 | COPY . /opt/pkg-deepknee/
10 | RUN pip install /opt/pkg-deepknee/
--------------------------------------------------------------------------------
/docker/UIDockerfile:
--------------------------------------------------------------------------------
1 | FROM node:10
2 |
3 | ARG REACT_APP_BROKER_PORT
4 | ARG REACT_APP_BROKER_ADDR
5 |
6 | ENV REACT_APP_BROKER_PORT $REACT_APP_BROKER_PORT
7 | ENV REACT_APP_BROKER_ADDR $REACT_APP_BROKER_ADDR
8 |
9 | WORKDIR /usr/src/app
10 | COPY . .
11 |
12 | RUN npm install -g serve
13 | RUN npm install
14 | RUN npm run build
15 |
16 |
--------------------------------------------------------------------------------
/docker/docker-compose-cpu.yml:
--------------------------------------------------------------------------------
1 | version: "2.3"
2 | services:
3 | kneel:
4 | image: "miptmloulu/kneel:cpu"
5 | ports:
6 | - "5000:5000"
7 | container_name: kneel
8 | volumes:
9 | - type: bind
10 | source: ../snapshots_release_kneel # The snapshots are stored in the root directory
11 | target: /snapshots/
12 | read_only: true
13 | - type: bind
14 | source: ../logs
15 | target: /logs/
16 | entrypoint: ["python", "-u", "-m", "kneel.inference.app",
17 | "--lc_snapshot_path", "/snapshots/lext-devbox_2019_07_14_16_04_41",
18 | "--hc_snapshot_path", "/snapshots/lext-devbox_2019_07_14_19_25_40",
19 | "--refine", "True", "--mean_std_path", "/snapshots/mean_std.npy",
20 | "--deploy", "True", "--device", "cpu", "--port", "5000", "--logs", "/logs/kneel-cpu.log"]
21 | deepknee-backend:
22 | depends_on:
23 | - kneel
24 | image: "miptmloulu/deepknee:cpu"
25 | ports:
26 | - "5001:5001"
27 | container_name: deepknee
28 | volumes:
29 | - type: bind
30 | source: ../snapshots_knee_grading/ # The snapshots are stored in the root directory
31 | target: /snapshots/
32 | read_only: true
33 | - type: bind
34 | source: ../logs
35 | target: /logs/
36 | environment:
37 | - KNEEL_ADDR=http://kneel:5000
38 | entrypoint: ["python", "-m", "ouludeepknee.inference.app",
39 | "--snapshots_path", "/snapshots/", "--device", "cpu", "--deploy", "True",
40 | "--port", "5001",
41 | "--logs", "/logs/deepknee-cpu.log"]
42 | orthanc-pacs:
43 | depends_on:
44 | - kneel
45 | - deepknee-backend
46 | image: "jodogne/orthanc"
47 | container_name: orthanc-pacs
48 | ports:
49 | - "6000:4242"
50 | - "6001:8042"
51 | volumes:
52 | - type: bind
53 | source: ../pacs-integration/orthanc.json
54 | target: /etc/orthanc/orthanc.json
55 | dicom-router:
56 | depends_on:
57 | kneel:
58 | condition: service_started
59 | deepknee-backend:
60 | condition: service_started
61 | orthanc-pacs:
62 | condition: service_started
63 | image: "miptmloulu/deepknee:cpu"
64 | container_name: dicom-router
65 | volumes:
66 | - type: bind
67 | source: ../pacs-integration/change_polling.py
68 | target: /opt/change_polling.py
69 | entrypoint: ["python", "-u", "/opt/change_polling.py",
70 | "--deepknee_addr", "http://deepknee",
71 | "--deepknee_port", "5001",
72 | "--orthanc_addr", "http://orthanc-pacs",
73 | "--orthanc_http_port", "8042",
74 | "--orthanc_dicom_port", "4242",
75 | '--remote_pacs_addr', 'orthanc-pacs',
76 | '--remote_pacs_port', '4242']
77 | backend-broker:
78 | depends_on:
79 | - kneel
80 | - deepknee-backend
81 | image: "miptmloulu/deepknee:broker"
82 | container_name: backend-broker
83 | ports:
84 | - "5002:5002"
85 | environment:
86 | - DEPLOY_HOST=0.0.0.0
87 | - DEPLOY_PORT=5002
88 | - KNEEL_ADDR=http://kneel
89 | - KNEEL_PORT=5000
90 | - DEEPKNEE_ADDR=http://deepknee-backend
91 | - DEEPKNEE_PORT=5001
92 | entrypoint: ["node", "/usr/src/app/server.js"]
93 | ui:
94 | depends_on:
95 | - kneel
96 | - deepknee-backend
97 | - backend-broker
98 | image: "miptmloulu/deepknee:ui"
99 | container_name: ui
100 | ports:
101 | - "5003:5003"
102 | working_dir: /usr/src/app/
103 | entrypoint: ["serve", "-l", "5003", "-s", "/usr/src/app/build"]
104 |
--------------------------------------------------------------------------------
/docker/docker-compose-gpu.yml:
--------------------------------------------------------------------------------
1 | version: "2.3"
2 | services:
3 | kneel:
4 | runtime: nvidia
5 | image: "miptmloulu/kneel:gpu"
6 | ports:
7 | - "5000:5000"
8 | container_name: kneel
9 | volumes:
10 | - type: bind
11 | source: ../snapshots_release_kneel # The snapshots are stored in the root directory
12 | target: /snapshots/
13 | read_only: true
14 | - type: bind
15 | source: ../logs
16 | target: /logs/
17 | entrypoint: ["python", "-u", "-m", "kneel.inference.app",
18 | "--lc_snapshot_path", "/snapshots/lext-devbox_2019_07_14_16_04_41",
19 | "--hc_snapshot_path", "/snapshots/lext-devbox_2019_07_14_19_25_40",
20 | "--refine", "True", "--mean_std_path", "/snapshots/mean_std.npy",
21 | "--deploy", "True", "--device", "cuda",
22 | "--port", "5000", "--logs", "/logs/kneel-gpu.log"]
23 | deepknee-backend:
24 | runtime: nvidia
25 | depends_on:
26 | - kneel
27 | image: "miptmloulu/deepknee:gpu"
28 | ports:
29 | - "5001:5001"
30 | container_name: deepknee
31 | volumes:
32 | - type: bind
33 | source: ../snapshots_knee_grading/ # The snapshots are stored in the root directory
34 | target: /snapshots/
35 | read_only: true
36 | - type: bind
37 | source: ../logs
38 | target: /logs/
39 | environment:
40 | - KNEEL_ADDR=http://kneel:5000
41 | entrypoint: ["python", "-m", "ouludeepknee.inference.app",
42 | "--snapshots_path", "/snapshots/",
43 | "--device", "cuda", "--deploy", "True",
44 | "--port", "5001", "--deploy_addr", "0.0.0.0",
45 | "--logs", "/logs/deepknee-gpu.log"]
46 | orthanc-pacs:
47 | depends_on:
48 | - kneel
49 | - deepknee-backend
50 | image: "jodogne/orthanc"
51 | container_name: orthanc-pacs
52 | ports:
53 | - "6000:4242"
54 | - "6001:8042"
55 | volumes:
56 | - type: bind
57 | source: ../pacs-integration/orthanc.json
58 | target: /etc/orthanc/orthanc.json
59 | dicom-router:
60 | depends_on:
61 | kneel:
62 | condition: service_started
63 | deepknee-backend:
64 | condition: service_started
65 | orthanc-pacs:
66 | condition: service_started
67 | image: "miptmloulu/deepknee:cpu"
68 | container_name: dicom-router
69 | volumes:
70 | - type: bind
71 | source: ../pacs-integration/change_polling.py
72 | target: /opt/change_polling.py
73 | entrypoint: ["python", "-u", "/opt/change_polling.py",
74 | "--deepknee_addr", "http://deepknee",
75 | "--deepknee_port", "5001",
76 | "--orthanc_addr", "http://orthanc-pacs",
77 | "--orthanc_http_port", "8042",
78 | "--orthanc_dicom_port", "4242",
79 | '--remote_pacs_addr', 'orthanc-pacs',
80 | '--remote_pacs_port', '4242']
81 | backend-broker:
82 | depends_on:
83 | - kneel
84 | - deepknee-backend
85 | image: "miptmloulu/deepknee:broker"
86 | container_name: backend-broker
87 | ports:
88 | - "5002:5002"
89 | environment:
90 | - DEPLOY_HOST=0.0.0.0
91 | - DEPLOY_PORT=5002
92 | - KNEEL_ADDR=http://kneel
93 | - KNEEL_PORT=5000
94 | - DEEPKNEE_ADDR=http://deepknee-backend
95 | - DEEPKNEE_PORT=5001
96 | entrypoint: ["node", "/usr/src/app/server.js"]
97 | ui:
98 | depends_on:
99 | - kneel
100 | - deepknee-backend
101 | - backend-broker
102 | image: "miptmloulu/deepknee:ui"
103 | container_name: ui
104 | ports:
105 | - "5003:5003"
106 | entrypoint: ["serve", "-l", "5003", "-s", "/usr/src/app/build"]
107 |
--------------------------------------------------------------------------------
/fetch_snapshots.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | wget http://mipt-ml.oulu.fi/models/DeepKnee/deepknee_snapshots.tar.gz
4 | tar -xvf deepknee_snapshots.tar.gz
5 | rm deepknee_snapshots.tar.gz*
6 |
--------------------------------------------------------------------------------
/ouludeepknee/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imedslab/DeepKnee/2eee4645f632d04ed5847b35f675421b7dd4dfdb/ouludeepknee/__init__.py
--------------------------------------------------------------------------------
/ouludeepknee/data/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imedslab/DeepKnee/2eee4645f632d04ed5847b35f675421b7dd4dfdb/ouludeepknee/data/__init__.py
--------------------------------------------------------------------------------
/ouludeepknee/data/utils.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import cv2
4 | import numpy as np
5 | import pydicom as dicom
6 |
7 |
8 | def isfloat(value):
9 | try:
10 | float(value)
11 | return True
12 | except ValueError:
13 | return False
14 |
15 |
16 | def dicom_img_spacing(data):
17 | spacing = None
18 |
19 | for spacing_param in ["Imager Pixel Spacing", "ImagerPixelSpacing", "PixelSpacing", "Pixel Spacing"]:
20 | if hasattr(data, spacing_param):
21 | spacing_attr_value = getattr(data, spacing_param)
22 | if isinstance(spacing_attr_value, str):
23 | if isfloat(spacing_attr_value):
24 | spacing = float(spacing_attr_value)
25 | else:
26 | spacing = float(spacing_attr_value.split()[0])
27 | elif isinstance(spacing_attr_value, dicom.multival.MultiValue):
28 | if len(spacing_attr_value) != 2:
29 | return None
30 | spacing = list(map(lambda x: float(x), spacing_attr_value))[0]
31 | elif isinstance(spacing_attr_value, float):
32 | spacing = spacing_attr_value
33 | else:
34 | continue
35 |
36 | if spacing is not None:
37 | break
38 | return spacing
39 |
40 |
41 | def read_dicom(filename, spacing_none_mode=True):
42 | """
43 | Reads a dicom file
44 | Parameters
45 | ----------
46 | filename : str or pydicom.dataset.FileDataset
47 | Full path to the image
48 | spacing_none_mode: bool
49 | Whether to return None if spacing info is not present. When False the output of the function
50 | will be None only if there are any issues with the image.
51 | Returns
52 | -------
53 | out : tuple
54 | Image itself as uint16, spacing, and the DICOM metadata
55 | """
56 |
57 | if isinstance(filename, str):
58 | try:
59 | data = dicom.read_file(filename)
60 | except:
61 | raise UserWarning('Failed to read the dicom.')
62 | return None
63 | elif isinstance(filename, dicom.dataset.FileDataset):
64 | data = filename
65 | else:
66 | raise TypeError('Unknown type of the filename. Mightbe either string or pydicom.dataset.FileDataset.')
67 |
68 | img = np.frombuffer(data.PixelData, dtype=np.uint16).copy().astype(np.float64)
69 |
70 | if data.PhotometricInterpretation == 'MONOCHROME1':
71 | img = img.max() - img
72 | try:
73 | img = img.reshape((data.Rows, data.Columns))
74 | except:
75 | raise UserWarning('Could not reshape the image while reading!')
76 | return None
77 |
78 | spacing = dicom_img_spacing(data)
79 | if spacing_none_mode:
80 | if spacing is not None:
81 | return img, spacing, data
82 | else:
83 | raise UserWarning('Could not read the spacing information!')
84 | return None
85 |
86 | return img, spacing, data
87 |
88 |
89 | def process_xray(img, cut_min=5, cut_max=99, multiplier=255):
90 | """
91 | This function changes the histogram of the image by doing global contrast normalization
92 |
93 | Parameters
94 | ----------
95 | img : array_like
96 | Image
97 | cut_min : int
98 | Low percentile to trim
99 | cut_max : int
100 | Highest percentile trim
101 | multiplier : int
102 | Multiplier to apply after global contrast normalization
103 |
104 | Returns
105 | -------
106 | array_like
107 | Returns a processed image
108 |
109 | """
110 |
111 | img = img.copy()
112 | lim1, lim2 = np.percentile(img, [cut_min, cut_max])
113 | img[img < lim1] = lim1
114 | img[img > lim2] = lim2
115 |
116 | img -= lim1
117 | img /= img.max()
118 | img *= multiplier
119 |
120 | return img
121 |
--------------------------------------------------------------------------------
/ouludeepknee/inference/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imedslab/DeepKnee/2eee4645f632d04ed5847b35f675421b7dd4dfdb/ouludeepknee/inference/__init__.py
--------------------------------------------------------------------------------
/ouludeepknee/inference/app.py:
--------------------------------------------------------------------------------
1 | """
2 | This micro-service takes a DICOM (bilateral X-ray) or PNG image (single knee, localized)
3 | and makes KL grade predictions with GradCAM.
4 |
5 | (c) Aleksei Tiulpin, University of Oulu, 2019
6 | """
7 | import argparse
8 | import base64
9 | import glob
10 | import logging
11 | import os
12 |
13 | import cv2
14 | from flask import Flask, request
15 | from flask import jsonify, make_response
16 | from gevent.pywsgi import WSGIServer
17 |
18 | from ouludeepknee.inference.pipeline import KneeNetEnsemble
19 |
20 |
21 | def numpy2base64(img):
22 | _, buffer = cv2.imencode('.png', img)
23 | return base64.b64encode(buffer).decode('ascii')
24 |
25 |
26 | app = Flask(__name__)
27 |
28 |
29 | def call_pipeline(dicom_raw, landmarks=None):
30 | # Localization of ROIs and their conversion into 8-bit 140x140mm images
31 | res_bilateral = net.predict_draw_bilateral(dicom_raw, args.sizemm, args.pad,
32 | kneel_addr=os.environ['KNEEL_ADDR'], landmarks=landmarks)
33 | if res_bilateral is None:
34 | return make_response(jsonify({'msg': 'Could not localize the landmarks'}), 400)
35 |
36 | img_l, img_hm_l, preds_bar_l, pred_l, img_r, img_hm_r, preds_bar_r, pred_r = res_bilateral
37 |
38 | response = {'L': {'img': numpy2base64(img_l),
39 | 'hm': numpy2base64(img_hm_l),
40 | 'preds_bar': numpy2base64(preds_bar_l),
41 | 'kl': str(pred_l)},
42 | 'R': {'img': numpy2base64(img_r),
43 | 'hm': numpy2base64(img_hm_r),
44 | 'preds_bar': numpy2base64(preds_bar_r),
45 | 'kl': str(pred_r)}}
46 | return response
47 |
48 |
49 | @app.route('/deepknee/predict/bilateral', methods=['POST'])
50 | def analyze_knee():
51 | logger = logging.getLogger(f'deepknee-backend:app')
52 | request_json = request.get_json(force=True)
53 | dicom_base64 = request_json['dicom']
54 | dicom_binary = base64.b64decode(dicom_base64)
55 | if 'landmarks' in request_json:
56 | landmarks = request_json['landmarks']
57 | else:
58 | landmarks = None
59 | logger.log(logging.INFO, f'Received DICOM')
60 |
61 | if os.environ['KNEEL_ADDR'] == '':
62 | return make_response(jsonify({'msg': 'KNEEL microservice address is not defined'}), 500)
63 |
64 | response = call_pipeline(dicom_binary, landmarks)
65 |
66 | return make_response(response, 200)
67 |
68 |
69 | if __name__ == '__main__':
70 | parser = argparse.ArgumentParser()
71 | parser.add_argument('--snapshots_path', default='')
72 | parser.add_argument('--deploy_addr', default='0.0.0.0')
73 | parser.add_argument('--device', default='cuda')
74 | parser.add_argument('--port', type=int, default=5001)
75 | parser.add_argument('--sizemm', type=int, default=140)
76 | parser.add_argument('--pad', type=int, default=300)
77 | parser.add_argument('--deploy', type=bool, default=False)
78 | parser.add_argument('--logs', type=str, default='/tmp/deepknee.log')
79 | args = parser.parse_args()
80 | logging.basicConfig(filename=args.logs, filemode='a',
81 | format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.DEBUG)
82 |
83 | logger = logging.getLogger(f'deepknee-backend:app')
84 |
85 | net = KneeNetEnsemble(glob.glob(os.path.join(args.snapshots_path, "*", '*.pth')),
86 | mean_std_path=os.path.join(args.snapshots_path, 'mean_std.npy'),
87 | device=args.device)
88 |
89 | if args.deploy:
90 | http_server = WSGIServer((args.deploy_addr, args.port), app, log=logger)
91 | logger.log(logging.INFO, f'Starting WSGI server')
92 | http_server.serve_forever()
93 | else:
94 | app.run(host=args.deploy_addr, port=args.port, debug=True)
95 |
--------------------------------------------------------------------------------
/ouludeepknee/inference/pipeline.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import glob
3 | import io
4 | import os
5 | from copy import deepcopy
6 |
7 | import matplotlib
8 |
9 | matplotlib.use('Agg')
10 |
11 | import cv2
12 | import matplotlib.pyplot as plt
13 | import numpy as np
14 | import torch
15 | import torch.nn as nn
16 | import torch.nn.functional as F
17 | import torchvision.transforms as transforms
18 | from PIL import Image
19 | from sklearn.preprocessing import OneHotEncoder
20 | from tqdm import tqdm
21 | import requests
22 | from pydicom import dcmread
23 | from pydicom.filebase import DicomBytesIO
24 | import logging
25 | import base64
26 |
27 | from ouludeepknee.data.utils import read_dicom, process_xray
28 |
29 | from ouludeepknee.inference.utils import fuse_bn_recursively
30 | from ouludeepknee.train.augmentation import CenterCrop
31 | from ouludeepknee.train.dataset import get_pair
32 | from ouludeepknee.train.model import KneeNet
33 |
34 |
35 | def smooth_edge_mask(s, w):
36 | res = np.zeros((s + w * 2, s + w * 2))
37 | res[w:w + s, w:w + s] = 1
38 | res = cv2.blur(res, (2 * w, 2 * w))
39 |
40 | return res[w:w + s, w:w + s]
41 |
42 |
43 | def inverse_pair_mapping(l, m, s, ps=128, smoothing=7):
44 | pad = int(np.floor(s / 3))
45 |
46 | l = cv2.resize(l, (ps, ps), cv2.INTER_CUBIC)
47 | l *= smooth_edge_mask(l.shape[0], smoothing)
48 |
49 | m = cv2.resize(m, (ps, ps), cv2.INTER_CUBIC)
50 | m *= smooth_edge_mask(m.shape[0], smoothing)
51 |
52 | hm = np.zeros((s, s))
53 | hm[pad:pad + ps, 0:ps] = l
54 | hm[pad:pad + ps, s - ps:] = m
55 |
56 | return hm
57 |
58 |
59 | class KneeNetEnsemble(nn.Module):
60 | def __init__(self, snapshots_paths, mean_std_path, device=None):
61 | super().__init__()
62 | self.states = []
63 | self.logger = logging.getLogger(f'deepknee-backend:pipeline')
64 | if device is None:
65 | if torch.cuda.is_available():
66 | device = 'cuda'
67 | else:
68 | device = 'cpu'
69 | self.device = device
70 | for snap_path in snapshots_paths:
71 | self.states.append(torch.load(snap_path, map_location=self.device))
72 | self.logger.log(logging.INFO, f'Loaded weights from {snap_path}')
73 |
74 | self.cropper = CenterCrop(300)
75 | self.ohe = OneHotEncoder(sparse=False, categories=[range(5)])
76 |
77 | mean_vector, std_vector = np.load(mean_std_path)
78 | self.patch_transform = transforms.Compose([
79 | transforms.ToTensor(),
80 | lambda x: x.float(),
81 | transforms.Normalize(mean_vector, std_vector)
82 | ])
83 |
84 | self.grads_l1 = None
85 | self.grads_m1 = None
86 |
87 | self.grads_l2 = None
88 | self.grads_m2 = None
89 |
90 | self.grads_l3 = None
91 | self.grads_m3 = None
92 | self.sm = torch.nn.Softmax(1)
93 | self.mean_std_path = mean_std_path
94 |
95 | self.init_networks_from_states()
96 |
97 | def empty_gradient_arrays(self):
98 | # Initializing arrays for storing the gradients
99 | self.grads_l1, self.grads_m1 = [], []
100 | self.grads_l2, self.grads_m2 = [], []
101 | self.grads_l3, self.grads_m3 = [], []
102 | self.logger.log(logging.INFO, f'Gradient arrays have been emptied')
103 |
104 | def init_networks_from_states(self):
105 | models = {}
106 | for idx, state in enumerate(self.states):
107 | # Data Parallel was accidentally stored back in 2017.
108 | model = nn.DataParallel(KneeNet(64, 0.2, False)).to(self.device)
109 | model.load_state_dict(state)
110 | self.logger.log(logging.INFO, f'Model {idx} state has been loaded')
111 | # Converting data parallel into a regular model
112 | model = model.module
113 | # Removing the dropout
114 | model.final = model.final[1]
115 | # Fusing BatchNorm
116 | # We need to re-assemble the architecture so that we are able to extract features
117 | # We should not forget that after calling model.branch, we should also pass the result to self.avgpool
118 | branch = nn.Sequential(model.branch.block1,
119 | nn.MaxPool2d(2),
120 | model.branch.block2,
121 | nn.MaxPool2d(2),
122 | model.branch.block3)
123 | branch = fuse_bn_recursively(branch)
124 | model.branch = branch
125 | models[f'net{idx + 1}'] = deepcopy(model)
126 | self.logger.log(logging.INFO, f'Model {idx} has been initialized')
127 |
128 | self.__dict__['_modules'].update(models)
129 | self.to(self.device)
130 | self.logger.log(logging.INFO, f'The whole pipeline has been moved to {self.device}')
131 |
132 | def load_picture(self, fname, nbits=16, flip_left=False):
133 | """
134 |
135 | :param fname: str or numpy.ndarray
136 | Takes either full path to the image or the numpy array
137 | :return:
138 | """
139 | self.logger.log(logging.DEBUG, f'Processing {nbits} bit {"left" if flip_left else "right"} image')
140 | if isinstance(fname, str):
141 | img = Image.open(fname)
142 | elif isinstance(fname, np.ndarray):
143 | img = fname
144 | if nbits == 16:
145 | img = Image.fromarray(np.uint8(255 * (img / 65535.)))
146 | elif nbits == 8:
147 | if img.dtype != np.uint8:
148 | raise TypeError
149 | img = Image.fromarray(img)
150 | else:
151 | raise TypeError
152 | else:
153 | raise TypeError
154 |
155 | width, height = img.size
156 |
157 | if width != 350 or height != 350:
158 | img = img.resize((350, 350), Image.BICUBIC)
159 |
160 | if flip_left:
161 | img = img.transpose(Image.FLIP_LEFT_RIGHT)
162 |
163 | img_cropped = self.cropper(img)
164 | lateral, medial = get_pair(img_cropped)
165 |
166 | lateral = self.patch_transform(lateral).to(self.device)
167 | medial = self.patch_transform(medial).to(self.device)
168 | self.logger.log(logging.DEBUG, f'Image pre-processing has been finished')
169 | return img_cropped, lateral.view(1, 1, 128, 128), medial.view(1, 1, 128, 128)
170 |
171 | def decompose_forward_avg(self, net, l, m):
172 | # Reducing the memory footprint.
173 | # We don't really need gradients to compute the features
174 | self.logger.log(logging.INFO, f'Forward pass started for {hex(id(net))}')
175 | with torch.no_grad():
176 | l_o = net.branch(l)
177 | m_o = net.branch(m)
178 | l_o_avg = F.adaptive_avg_pool2d(l_o, (1, 1))
179 | m_o_avg = F.adaptive_avg_pool2d(m_o, (1, 1))
180 | self.logger.log(logging.DEBUG, f'Features have been extracted')
181 | # These variables will requre features as they will initiate the forward pass to the FC layer
182 | # From which we will get the gradients
183 | l_o_avg.requires_grad = True
184 | m_o_avg.requires_grad = True
185 | # A normal forward pass. Concatenating the outputs from the lateral and the medial sides
186 | self.logger.log(logging.DEBUG, f'Pushing the feature maps through FC layer')
187 | concat = torch.cat([l_o_avg, m_o_avg], 1)
188 | # Passing the results through an FC layer
189 | o = net.final(concat.view(l.size(0), net.final.in_features))
190 | self.logger.log(logging.INFO, f'Model {hex(id(net))} finished predictions')
191 | return l_o, m_o, l_o_avg, m_o_avg, o
192 |
193 | def weigh_maps(self, weights, maps):
194 | maps = maps.squeeze()
195 | weights = weights.squeeze()
196 |
197 | res = torch.zeros(maps.size()[-2:]).to(self.device)
198 |
199 | for i, w in enumerate(weights):
200 | res += w * maps[i]
201 | return res
202 |
203 | def extract_gradcam_weighted_maps(self, o_l, o_m, wl, wm):
204 | self.logger.log(logging.DEBUG, f'GradCAM-based weighing started')
205 | # After extracting the features, we weigh them based on the provided weights
206 | o_l = self.weigh_maps(wl, o_l)
207 | o_m = self.weigh_maps(wm, o_m)
208 | return F.relu(o_l), F.relu(o_m)
209 |
210 | def compute_gradcam(self, features, img_size, ps, smoothing=7):
211 | self.logger.log(logging.INFO, f'GradCAM computation has been started')
212 | w_lateral, w_medial = self.grads_l1[0].data, self.grads_m1[0].data
213 | ol1, om1 = self.extract_gradcam_weighted_maps(features['net1'][0], features['net1'][1], w_lateral, w_medial)
214 |
215 | w_lateral, w_medial = self.grads_l2[0].data, self.grads_m2[0].data
216 | ol2, om2 = self.extract_gradcam_weighted_maps(features['net2'][0], features['net2'][1], w_lateral, w_medial)
217 |
218 | w_lateral, w_medial = self.grads_l3[0].data, self.grads_m3[0].data
219 | ol3, om3 = self.extract_gradcam_weighted_maps(features['net3'][0], features['net3'][1], w_lateral, w_medial)
220 |
221 | l_out = (ol1 + ol2 + ol3) / 3.
222 | m_out = (om1 + om2 + om3) / 3.
223 | self.logger.log(logging.INFO, f'Creating the heatmap')
224 | heatmap = inverse_pair_mapping(l_out.detach().to('cpu').numpy(),
225 | np.fliplr(m_out.detach().to('cpu').numpy()),
226 | img_size, ps, smoothing)
227 | heatmap -= heatmap.min()
228 | heatmap /= heatmap.max()
229 | return heatmap
230 |
231 | def forward(self, l, m):
232 | self.logger.log(logging.INFO, f'Forward pass started')
233 | self.empty_gradient_arrays()
234 |
235 | # Producing the branch outputs and registering the corresponding hooks for attention maps
236 | # Net 1
237 | l_o1, m_o1, l_o1_avg, m_o1_avg, o1 = self.decompose_forward_avg(self.net1, l, m)
238 | l_o1_avg.register_hook(lambda grad: self.grads_l1.append(grad))
239 | m_o1_avg.register_hook(lambda grad: self.grads_m1.append(grad))
240 | # Net 2
241 | l_o2, m_o2, l_o2_avg, m_o2_avg, o2 = self.decompose_forward_avg(self.net2, l, m)
242 | l_o2_avg.register_hook(lambda grad: self.grads_l2.append(grad))
243 | m_o2_avg.register_hook(lambda grad: self.grads_m2.append(grad))
244 | # Net 3
245 | l_o3, m_o3, l_o3_avg, m_o3_avg, o3 = self.decompose_forward_avg(self.net3, l, m)
246 | l_o3_avg.register_hook(lambda grad: self.grads_l3.append(grad))
247 | m_o3_avg.register_hook(lambda grad: self.grads_m3.append(grad))
248 |
249 | features = {'net1': (l_o1, m_o1), 'net2': (l_o2, m_o2), 'net3': (l_o3, m_o3)}
250 |
251 | return o1 + o2 + o3, features
252 |
253 | def predict(self, x, nbits=16, flip_left=False):
254 | """Makes a prediction from file or a pre-loaded image
255 |
256 | :param x: str or numpy.array
257 | Image. Should be 130x130mm with the pixel spacing of 0.3mm (300x300 pixels).
258 | :param nbits: int
259 | By default we load 16 bit images produced by CropROI Object and convert them to 8bit.
260 | :param flip_left: bool
261 | Whether to flip image. Done for the left knees
262 | :return: tuple
263 | Image, Heatmap, probabilities
264 | """
265 | img, l, m = self.load_picture(x, nbits=nbits, flip_left=flip_left)
266 | self.empty_gradient_arrays()
267 | self.train(True)
268 | self.zero_grad()
269 |
270 | out, features = self.forward(l, m)
271 |
272 | probs = self.sm(out).to('cpu').detach().numpy()
273 | index = np.argmax(out.detach().to('cpu').numpy(), axis=1).reshape(-1, 1)
274 | out.backward(torch.from_numpy(self.ohe.fit_transform(index)).float().to(self.device))
275 | gradcam_heatmap = self.compute_gradcam(features, 300, 128, 7)
276 |
277 | return img, gradcam_heatmap, probs.squeeze()
278 |
279 | def predict_draw(self, fileobj_in, nbits=16, fname_suffix=None, path_dir_out=None, flip_left=False):
280 | """Makes a prediction from file or a pre-loaded image
281 |
282 | :param fileobj_in: str or numpy.array
283 | Image. Should be 130x130mm with the pixel spacing of 0.3mm (300x300 pixels).
284 | :param nbits: int
285 | By default we load 16 bit images produced by CropROI Object and convert them to 8bit.
286 | :param fname_suffix: str or None
287 | Base filename used to save the results
288 | :param path_dir_out: str or None
289 | Where to save the heatmap and the softmax barplot
290 | :param flip_left: bool
291 | Whether to flip image. Done for the left knees
292 | :return: tuple
293 | Image, Heatmap, probabilities
294 | """
295 | self.logger.log(logging.INFO, f'Prediction started')
296 | if fname_suffix is not None:
297 | pass
298 | elif isinstance(fileobj_in, str):
299 | fname_suffix = os.path.splitext(os.path.basename(fileobj_in))[0]
300 | else:
301 | fname_suffix = ''
302 |
303 | img, heatmap, probs = self.predict(x=fileobj_in, nbits=nbits, flip_left=flip_left)
304 | self.logger.log(logging.INFO, f'Drawing the heatmap')
305 | img = np.asarray(img)
306 | img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
307 | if flip_left:
308 | img = np.fliplr(img)
309 | heatmap = np.fliplr(heatmap)
310 | # overlay with original image
311 | heatmap = cv2.applyColorMap(np.uint8(255 * heatmap), cv2.COLORMAP_JET)
312 | img_overlayed = cv2.addWeighted(img, 0.7, heatmap, 0.3, 0)
313 | # If path provided, we save the heatmap. Otherwise, this is skipped
314 | if path_dir_out is not None:
315 | tmp_fname = os.path.join(path_dir_out, f'heatmap_{fname_suffix}.png')
316 | cv2.imwrite(tmp_fname, img_overlayed)
317 |
318 | # Making a bar plot for displaying probabilities
319 | self.logger.log(logging.INFO, f'Drawing the vector with probabilities')
320 | plt.figure(figsize=(6, 1))
321 | for kl in range(5):
322 | plt.text(kl - 0.2, 0.35, "%.2f" % np.round(probs[kl], 2), fontsize=15)
323 | plt.bar(np.array([0, 1, 2, 3, 4]), probs, color='red', align='center',
324 | tick_label=['KL0', 'KL1', 'KL2', 'KL3', 'KL4'], alpha=0.3)
325 | plt.ylim(0, 1)
326 | plt.yticks([])
327 | # Saving the figure to a BytesIO object.
328 | buf = io.BytesIO()
329 | plt.savefig(buf, format="png", bbox_inches='tight', dpi=100, pad_inches=0)
330 | buf.seek(0)
331 | probs_bar_arr = np.frombuffer(buf.getvalue(), dtype=np.uint8)
332 | buf.close()
333 | plt.close()
334 | # Now decoding the result from the bytes object
335 | probs_bar = cv2.imdecode(probs_bar_arr, 1)
336 | if path_dir_out is not None:
337 | tmp_fname = os.path.join(path_dir_out, f'prob_{fname_suffix}.png')
338 | cv2.imwrite(tmp_fname, probs_bar)
339 | self.logger.log(logging.INFO, f'Sending the results back to the user')
340 | return img, img_overlayed, probs_bar, probs.squeeze().argmax()
341 |
342 | def request_landmarks(self, kneel_addr, file):
343 | self.logger.log(logging.INFO, f'Sending the image to KNEEL: {os.environ["KNEEL_ADDR"]}')
344 | if kneel_addr is None:
345 | kneel_addr = os.environ["KNEEL_ADDR"]
346 |
347 | response = requests.post(f'{kneel_addr}/kneel/predict/bilateral', json=file)
348 | landmarks = response.json()
349 | return landmarks
350 |
351 | def localize_bilateral(self, dicom_raw, sizemm, pad, kneel_addr=None, landmarks=None):
352 | if landmarks is None:
353 | landmarks = self.request_landmarks(kneel_addr, {'dicom': base64.b64encode(dicom_raw).decode()})
354 |
355 | if landmarks['R'] is None:
356 | self.logger.log(logging.INFO, f'Landmarks are not found. Returning None')
357 | return None
358 |
359 | self.logger.log(logging.INFO, f'Image decoding and pre-processing started')
360 | raw = DicomBytesIO(dicom_raw)
361 | dicom_data = dcmread(raw)
362 | img, spacing, dicom_data = read_dicom(dicom_data)
363 | img = process_xray(img, 5, 99, 255).astype(np.uint8)
364 | sizepx = int(np.round(sizemm / spacing))
365 | self.logger.log(logging.DEBUG, f'Padding the image')
366 | row, col = img.shape
367 | tmp = np.zeros((row + 2 * pad, col + 2 * pad))
368 | tmp[pad:pad + row, pad:pad + col] = img
369 | img = tmp
370 |
371 | landmarks_l = np.array(landmarks['L']) + pad
372 | landmarks_r = np.array(landmarks['R']) + pad
373 | # Extracting center landmarks
374 | lcx, lcy = landmarks_l[4]
375 | rcx, rcy = landmarks_r[4]
376 |
377 | img_left = img[(lcy - sizepx // 2):(lcy + sizepx // 2),
378 | (lcx - sizepx // 2):(lcx + sizepx // 2)].astype(np.uint8)
379 |
380 | img_right = img[(rcy - sizepx // 2):(rcy + sizepx // 2),
381 | (rcx - sizepx // 2):(rcx + sizepx // 2)].astype(np.uint8)
382 |
383 | self.logger.log(logging.INFO, f'Returning localized left and right knees')
384 | return img_left, img_right
385 |
386 | def predict_draw_bilateral(self, dicom_raw, sizemm, pad, kneel_addr=None, landmarks=None):
387 | res_landmarks = self.localize_bilateral(dicom_raw, sizemm, pad, kneel_addr, landmarks)
388 | if res_landmarks is None:
389 | return None
390 |
391 | img_left, img_right = res_landmarks
392 |
393 | img_l, img_hm_l, preds_bar_l, pred_l = self.predict_draw(fileobj_in=img_left,
394 | nbits=8,
395 | path_dir_out=None,
396 | flip_left=True)
397 |
398 | img_r, img_hm_r, preds_bar_r, pred_r = self.predict_draw(fileobj_in=img_right,
399 | nbits=8,
400 | path_dir_out=None,
401 | flip_left=False)
402 | return img_l, img_hm_l, preds_bar_l, pred_l, img_r, img_hm_r, preds_bar_r, pred_r
403 |
404 |
405 | def parse_args():
406 | parser = argparse.ArgumentParser()
407 | parser.add_argument('--snapshots_path', default='../../snapshots_knee_grading')
408 | parser.add_argument('--images', type=str, default='')
409 | parser.add_argument('--write_heatmaps', type=bool, default=False)
410 | parser.add_argument('--nbits', type=int, default=16)
411 | parser.add_argument('--device', type=str, default='cpu')
412 | parser.add_argument('--flip_left', type=bool, default=False)
413 | parser.add_argument('--output_dir', default='../../../deepknee_test_output', help='Stores heatmaps')
414 | parser.add_argument('--output_csv', default='../../../deepknee_test_output/preds.csv', help='Stores predictions')
415 |
416 | args = parser.parse_args()
417 | return args
418 |
419 |
420 | if __name__ == '__main__':
421 | print('Version of pytorch:', torch.__version__)
422 |
423 | args = parse_args()
424 |
425 | net = KneeNetEnsemble(glob.glob(os.path.join(args.snapshots_path, "*", '*.pth')),
426 | mean_std_path=os.path.join(args.snapshots_path, 'mean_std.npy'),
427 | device=args.device)
428 |
429 | paths_test_files = glob.glob(os.path.join(args.images, '*.png'))
430 |
431 | os.makedirs(args.output_dir, exist_ok=True)
432 |
433 | with open(args.output_csv, 'w') as f:
434 | f.write('IMG,KL_R,KL_L\n')
435 | for path_test_file in tqdm(paths_test_files, total=len(paths_test_files)):
436 | with open(path_test_file, 'rb') as fdicom:
437 | dicom_raw_local = fdicom.read()
438 |
439 | res_bilateral = net.predict_draw_bilateral(dicom_raw_local, 140, 300)
440 | if res_bilateral is None:
441 | print('Could not localize the landmarks!')
442 | img_l, img_hm_l, preds_bar_l, pred_l, img_r, img_hm_r, preds_bar_r, pred_r = res_bilateral
443 |
444 | line = '{},{},{}\n'.format(path_test_file.split('/')[-1], pred_r, pred_l)
445 | f.write(line)
446 |
--------------------------------------------------------------------------------
/ouludeepknee/inference/utils.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 |
4 |
5 | def fuse_bn_sequential(block):
6 | """
7 | This function takes a sequential block and fuses the batch normalization with convolution
8 | :param block: nn.Sequential.
9 | :return: nn.Sequential. Converted block
10 | """
11 | if not isinstance(block, nn.Sequential):
12 | return block
13 | stack = []
14 | for m in block.children():
15 | if isinstance(m, nn.BatchNorm2d):
16 | if isinstance(stack[-1], nn.Conv2d):
17 | bn_st_dict = m.state_dict()
18 | conv_st_dict = stack[-1].state_dict()
19 |
20 | # BatchNorm params
21 | eps = m.eps
22 | mu = bn_st_dict['running_mean']
23 | var = bn_st_dict['running_var']
24 | gamma = bn_st_dict['weight']
25 |
26 | if 'bias' in bn_st_dict:
27 | beta = bn_st_dict['bias']
28 | else:
29 | beta = torch.zeros(gamma.size(0)).float().to(gamma.device)
30 |
31 | # Conv params
32 | W = conv_st_dict['weight']
33 | if 'bias' in conv_st_dict:
34 | bias = conv_st_dict['bias']
35 | else:
36 | bias = torch.zeros(W.size(0)).float().to(gamma.device)
37 |
38 | denom = torch.sqrt(var + eps)
39 | b = beta - gamma.mul(mu).div(denom)
40 | A = gamma.div(denom)
41 | bias *= A
42 | A = A.expand_as(W.transpose(0, -1)).transpose(0, -1)
43 |
44 | W.mul_(A)
45 | bias.add_(b)
46 |
47 | stack[-1].weight.data.copy_(W)
48 | if stack[-1].bias is None:
49 | stack[-1].bias = torch.nn.Parameter(bias)
50 | else:
51 | stack[-1].bias.data.copy_(bias)
52 |
53 | else:
54 | stack.append(m)
55 |
56 | if len(stack) > 1:
57 | return nn.Sequential(*stack)
58 | else:
59 | return stack[0]
60 |
61 |
62 | def fuse_bn_recursively(model: torch.nn.Module):
63 | for module_name in model._modules:
64 | model._modules[module_name] = fuse_bn_sequential(model._modules[module_name])
65 | if len(model._modules[module_name]._modules) > 0:
66 | fuse_bn_recursively(model._modules[module_name])
67 |
68 | return model
69 |
--------------------------------------------------------------------------------
/ouludeepknee/train/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imedslab/DeepKnee/2eee4645f632d04ed5847b35f675421b7dd4dfdb/ouludeepknee/train/__init__.py
--------------------------------------------------------------------------------
/ouludeepknee/train/augmentation.py:
--------------------------------------------------------------------------------
1 | """
2 | Data augmentations
3 |
4 | (c) Aleksei Tiulpin, University of Oulu, 2017
5 | """
6 |
7 | import random
8 | from PIL import Image, ImageEnhance
9 | import numbers
10 | import numpy as np
11 |
12 |
13 | class CenterCrop(object):
14 | """
15 | Performs center crop of an image of a certain size.
16 | Modified version from torchvision
17 |
18 | """
19 |
20 | def __init__(self, size):
21 | if isinstance(size, numbers.Number):
22 | self.size = (int(size), int(size))
23 | else:
24 | self.size = size
25 |
26 | def __call__(self, img):
27 | """
28 | Args:
29 | img (PIL.Image): Image to be cropped.
30 | Returns:
31 | PIL.Image: Cropped image.
32 | """
33 | w, h = img.size
34 | tw, th, = self.size
35 | x1 = int(round((w - tw) / 2.))
36 | y1 = int(round((h - th) / 2.))
37 | return img.crop((x1, y1, x1 + tw, y1 + th))
38 |
39 |
40 | def correct_gamma16(img, gamma):
41 | """
42 | Gamma correction of a 16-bit image
43 | """
44 | img = np.array(img).astype(np.float64)
45 | img = (img/65535.)**gamma
46 | img = np.uint16(img*65535)
47 | img = Image.fromarray(img)
48 | return img
49 |
50 |
51 | def correct_gamma8(img, gamma):
52 | """
53 | Gamma correction of an 8-bit image
54 | """
55 | img = np.array(img).astype(np.float64)
56 | img = (img/255.)**gamma
57 | img = np.uint8(img*255)
58 | img = Image.fromarray(img)
59 | return img
60 |
61 |
62 | class CorrectGamma(object):
63 | """
64 | Does random gamma correction
65 |
66 | """
67 | def __init__(self, g_min, g_max, res=8):
68 | self.g_min = g_min
69 | self.g_max = g_max
70 | self.res = res
71 |
72 | def __call__(self, img):
73 | gamma = random.random()*self.g_max+self.g_min
74 | if self.res == 8:
75 | return correct_gamma8(img, gamma)
76 | return correct_gamma16(img, gamma)
77 |
78 |
79 | class Jitter(object):
80 | """
81 | Makes a crop of a fixed size with random offset
82 |
83 | """
84 | def __init__(self, crop_size, j_min, j_max):
85 | self.crop_size = crop_size
86 | self.j_min = j_min
87 | self.j_max = j_max
88 |
89 | def __call__(self, img):
90 | x1 = random.randint(self.j_min, self.j_max)
91 | y1 = random.randint(self.j_min, self.j_max)
92 | return img.crop([x1, y1, x1+self.crop_size, y1+self.crop_size])
93 |
94 |
95 | class Rotate(object):
96 | """
97 | Performs random rotation
98 |
99 | """
100 | def __init__(self, a_min, a_max, interp=Image.BICUBIC):
101 | self.a_min = a_min
102 | self.a_max = a_max
103 | self.interp = interp
104 |
105 | def __call__(self, img):
106 | angle = random.uniform(self.a_min, self.a_max)
107 | return img.rotate(angle,resample=self.interp)
108 |
109 |
110 | class CorrectBrightness(object):
111 | """
112 | Performs random brightness change
113 |
114 | """
115 | def __init__(self, b_min, b_max):
116 | self.b_min = b_min
117 | self.b_max = b_max
118 |
119 | def __call__(self, img):
120 | enhancer = ImageEnhance.Brightness(img)
121 | factor = random.uniform(self.b_min, self.b_max)
122 | return enhancer.enhance(factor)
123 |
124 |
125 | class CorrectContrast(object):
126 | """
127 | Performs random contrast change
128 |
129 | """
130 | def __init__(self, b_min, b_max):
131 | self.b_min = b_min
132 | self.b_max = b_max
133 |
134 | def __call__(self, img):
135 | enhancer = ImageEnhance.Contrast(img)
136 | factor = random.uniform(self.b_min, self.b_max)
137 | return enhancer.enhance(factor)
138 |
--------------------------------------------------------------------------------
/ouludeepknee/train/dataset.py:
--------------------------------------------------------------------------------
1 | """
2 | Dataset tools
3 |
4 |
5 | (c) Aleksei Tiulpin, University of Oulu, 2017
6 | """
7 |
8 | import torch.utils.data as data
9 | import torch
10 | import numpy as np
11 | from PIL import Image
12 | import os
13 |
14 |
15 | def get_pair(I):
16 | """
17 | Generates pair of images 128x128 from the knee joint.
18 | ps shows how big area should be mapped into that region.
19 | """
20 | s = I.size[0]
21 | pad = int(np.floor(s/3))
22 | ps = 128
23 |
24 | l = I.crop([0, pad, ps, pad+ps])
25 | m = I.crop([s-ps, pad, s, pad+ps])
26 | m = m.transpose(Image.FLIP_LEFT_RIGHT)
27 |
28 | return l, m
29 |
30 |
31 | class KneeGradingDataset(data.Dataset):
32 | """
33 | Dataset class.
34 | """
35 | def __init__(self, dataset, split, transform, augment, stage='train'):
36 | self.dataset = dataset
37 | self.names = split
38 | self.transform = transform
39 | self.augment = augment
40 | self.stage=stage
41 |
42 | def __getitem__(self, index):
43 | fname = os.path.join(self.dataset, self.stage, self.names[index])
44 | target = int(fname.split('/')[-1].split('_')[1])
45 |
46 | if self.stage == 'train':
47 | fname = os.path.join(self.dataset, self.stage, str(target), self.names[index])
48 |
49 | img = Image.open(fname)
50 | # We will use 8bit
51 | tmp = np.array(img, dtype=float)
52 | img = Image.fromarray(np.uint8(255*(tmp/65535.)))
53 |
54 | img = self.augment(img)
55 |
56 | l, m = get_pair(img)
57 |
58 | l = self.transform(l)
59 | m = self.transform(m)
60 |
61 | return l, m, target, fname
62 |
63 | def __len__(self):
64 | return len(self.names)
65 |
66 |
67 | class LimitedRandomSampler(data.sampler.Sampler):
68 | """
69 | Allows to use limited number of batches in the training
70 | """
71 | def __init__(self, data_source, nb, bs):
72 | self.data_source = data_source
73 | self.n_batches = nb
74 | self.bs = bs
75 |
76 | def __iter__(self):
77 | return iter(torch.randperm(len(self.data_source)).long()[:self.n_batches*self.bs])
78 |
79 | def __len__(self):
80 | return self.n_batches*self.bs
81 |
--------------------------------------------------------------------------------
/ouludeepknee/train/model.py:
--------------------------------------------------------------------------------
1 | """
2 | Network architecture class
3 |
4 | Aleksei Tiulpin, Unversity of Oulu, 2017 (c).
5 |
6 | """
7 |
8 | import torch
9 | import torch.nn as nn
10 | import torch.nn.functional as F
11 |
12 |
13 | def ConvBlock3(inp, out, stride, pad):
14 | """
15 | 3x3 ConvNet building block with different activations support.
16 |
17 | Aleksei Tiulpin, Unversity of Oulu, 2017 (c).
18 |
19 | """
20 | return nn.Sequential(
21 | nn.Conv2d(inp, out, kernel_size=3, stride=stride, padding=pad),
22 | nn.BatchNorm2d(out, eps=1e-3),
23 | nn.ReLU(inplace=True)
24 | )
25 |
26 |
27 | def weights_init_uniform(m):
28 | """
29 | Initializes the weights using kaiming method.
30 |
31 | """
32 | if isinstance(m, nn.Conv2d):
33 | nn.init.kaiming_uniform(m.weight.data)
34 | m.bias.data.fill_(0)
35 |
36 | if isinstance(m, nn.Linear):
37 | nn.init.kaiming_uniform(m.weight.data)
38 | m.bias.data.fill_(0)
39 |
40 |
41 | class Branch(nn.Module):
42 | def __init__(self, bw):
43 | super().__init__()
44 | self.block1 = nn.Sequential(ConvBlock3(1, bw, 2, 0),
45 | ConvBlock3(bw, bw, 1, 0),
46 | ConvBlock3(bw, bw, 1, 0),
47 | )
48 |
49 | self.block2 = nn.Sequential(ConvBlock3(bw, bw * 2, 1, 0),
50 | ConvBlock3(bw * 2, bw * 2, 1, 0),
51 | )
52 |
53 | self.block3 = ConvBlock3(bw * 2, bw * 4, 1, 0)
54 |
55 | def forward(self, x):
56 | o1 = F.max_pool2d(self.block1(x), 2)
57 | o2 = F.max_pool2d(self.block2(o1), 2)
58 | return F.avg_pool2d(self.block3(o2), 10).view(x.size(0), -1)
59 |
60 |
61 | def set_requires_grad(module, val):
62 | for p in module.parameters():
63 | p.requires_grad = val
64 |
65 |
66 | class KneeNet(nn.Module):
67 | """
68 | Siamese Net to automatically grade osteoarthritis
69 |
70 | Aleksei Tiulpin, Unversity of Oulu, 2017 (c).
71 |
72 | """
73 |
74 | def __init__(self, bw, drop, use_w_init=True):
75 | super().__init__()
76 | self.branch = Branch(bw)
77 |
78 | if drop > 0:
79 | self.final = nn.Sequential(nn.Dropout(p=drop), nn.Linear(2 * bw * 4, 5))
80 | else:
81 | self.final = nn.Linear(2 * bw * 4, 5)
82 |
83 | # Custom weights initialization
84 | if use_w_init:
85 | self.apply(weights_init_uniform)
86 |
87 | def forward(self, x1, x2):
88 | # Shared weights
89 | o1 = self.branch(x1)
90 | o2 = self.branch(x2)
91 | feats = torch.cat([o1, o2], 1)
92 |
93 | return self.final(feats)
94 |
--------------------------------------------------------------------------------
/ouludeepknee/train/train.py:
--------------------------------------------------------------------------------
1 | """
2 | Main training script
3 |
4 | (c) Aleksei Tiulpin, University of Oulu, 2017
5 |
6 | """
7 |
8 | from __future__ import print_function
9 |
10 | import argparse
11 | import os
12 | import gc
13 | import pickle
14 | import time
15 |
16 | from termcolor import colored
17 |
18 | from tqdm import tqdm
19 | import numpy as np
20 | import torch
21 | import torch.nn as nn
22 | import torch.nn.functional as F
23 | import torch.optim as optim
24 | import torchvision.transforms as transforms
25 | import torch.utils.data as data
26 | import torch.backends.cudnn as cudnn
27 | from sklearn.metrics import confusion_matrix, mean_squared_error, cohen_kappa_score
28 |
29 | from visdom import Visdom
30 |
31 | cudnn.benchmark = True
32 |
33 | from ouludeepknee.train.dataset import KneeGradingDataset, LimitedRandomSampler
34 | from ouludeepknee.train.train_utils import train_epoch, adjust_learning_rate
35 | from ouludeepknee.train.val_utils import validate_epoch
36 | from ouludeepknee.train.model import KneeNet
37 | from ouludeepknee.train.augmentation import (CenterCrop, CorrectGamma, Jitter,
38 | Rotate, CorrectBrightness, CorrectContrast)
39 |
40 |
41 | SNAPSHOTS_KNEE_GRADING = os.path.abspath(os.path.join(
42 | os.path.dirname(__file__), '../snapshots_knee_grading'))
43 |
44 |
45 | if __name__ == '__main__':
46 |
47 | parser = argparse.ArgumentParser()
48 | parser.add_argument('--data', default='../../KL_data')
49 | parser.add_argument('--snapshots', default=SNAPSHOTS_KNEE_GRADING)
50 | parser.add_argument('--experiment', default='own_net')
51 | parser.add_argument('--patch_size', type=int, default=130)
52 | parser.add_argument('--base_width', type=int, default=32)
53 | parser.add_argument('--start_val', type=int, default=-1)
54 | parser.add_argument('--lr', type=float, default=1e-3)
55 | parser.add_argument('--lr_drop', type=int, default=20)
56 | parser.add_argument('--lr_min', type=float, default=1e-5)
57 | parser.add_argument('--wd', type=float, default=5e-5)
58 | parser.add_argument('--drop', type=float, default=0.2)
59 | parser.add_argument('--bs', type=int, default=32)
60 | parser.add_argument('--val_bs', type=int, default=8)
61 | parser.add_argument('--n_epoch', type=int, default=20)
62 | parser.add_argument('--bootstrap', type=int, default=1)
63 | parser.add_argument('--n_batches', type=int, default=-1)
64 | parser.add_argument('--n_threads', type=int, default=20)
65 | parser.add_argument('--use_visdom', type=bool, default=False)
66 | parser.add_argument('--seed', type=int, default=42)
67 | args = parser.parse_args()
68 | cur_lr = args.lr
69 |
70 | torch.manual_seed(args.seed)
71 | torch.cuda.manual_seed(args.seed)
72 |
73 | if not os.path.isdir(args.snapshots):
74 | os.mkdir(args.snapshots)
75 |
76 | cur_snapshot = time.strftime('%Y_%m_%d_%H_%M_%S')
77 | os.mkdir(os.path.join(args.snapshots, cur_snapshot))
78 | with open(os.path.join(args.snapshots, cur_snapshot, 'args.pkl'), 'wb') as f:
79 | pickle.dump(args, f)
80 |
81 | # Getting the name of the train and validation datasets
82 | # We oversample the train set
83 | train_cats_length = []
84 | for kl in range(5):
85 | train_cats_length.append(len(os.listdir(
86 | os.path.join(args.dataset, 'train', str(kl))
87 | )))
88 |
89 | oversample_size = int(sum(train_cats_length) / 5)
90 | train_files = []
91 | print(oversample_size)
92 | np.random.seed(args.seed)
93 | for kl in range(5):
94 | files = np.array(os.listdir(
95 | os.path.join(args.dataset, 'train', str(kl))
96 | ))
97 | train_files.extend(
98 | np.random.choice(
99 | files, size=oversample_size, replace=True
100 | ).tolist()
101 | )
102 |
103 | train_files = np.array(train_files)
104 | np.random.shuffle(train_files)
105 | val_files = np.array(os.listdir(os.path.join(args.dataset,'val')))
106 |
107 | if os.path.isfile(os.path.join(args.snapshots, 'mean_std.npy')):
108 | tmp = np.load(os.path.join(args.snapshots, 'mean_std.npy'))
109 | mean_vector, std_vector = tmp
110 | else:
111 |
112 | transf_tens= transforms.Compose([
113 | transforms.ToTensor(),
114 | lambda x: x.float()
115 | ])
116 |
117 | train_ds = KneeGradingDataset(args.dataset,
118 | train_files.tolist(),
119 | transform=transf_tens,
120 | augment=CenterCrop(300),
121 | stage='train')
122 |
123 | train_loader = data.DataLoader(train_ds, batch_size=args.bs, num_workers=args.n_threads)
124 |
125 | mean_vector = np.zeros(1)
126 | std_vector = np.zeros(1)
127 |
128 | print(colored('==> ', 'green')+'Estimating the mean')
129 | pbar = tqdm(total=len(train_loader))
130 | for entry in train_loader:
131 | batch_l = entry[0]
132 | batch_m = entry[0]
133 | for j in range(mean_vector.shape[0]):
134 | mean_vector[j] += (batch_l[:, j, :, :].mean()+batch_l[:, j, :, :].mean())/2.
135 | std_vector[j] += (batch_l[:, j, :, :].std()+batch_m[:, j, :, :].std())/2.
136 | pbar.update()
137 | mean_vector /= len(train_loader)
138 | std_vector /= len(train_loader)
139 | np.save(os.path.join(args.snapshots, 'mean_std.npy'), [mean_vector, std_vector])
140 | pbar.close()
141 | print(colored('==> ', 'green')+'Mean: ', mean_vector)
142 | print(colored('==> ', 'green')+'Std: ', std_vector)
143 |
144 | # Defining the transforms
145 | # This is the transformation for each patch
146 | normTransform = transforms.Normalize(mean_vector, std_vector)
147 | patch_transform = transforms.Compose([
148 | transforms.ToTensor(),
149 | lambda x: x.float(),
150 | normTransform,
151 | ])
152 |
153 | # This we will use to globally augment the image
154 | augment_transforms = transforms.Compose([
155 | CorrectBrightness(0.7,1.3),
156 | CorrectContrast(0.7,1.3),
157 | Rotate(-15,15),
158 | CorrectGamma(0.5,2.5),
159 | Jitter(300, 6,20),
160 | ])
161 |
162 | # Validation set
163 | val_ds = KneeGradingDataset(args.dataset,
164 | val_files.tolist(),
165 | transform=patch_transform,
166 | augment=CenterCrop(300),
167 | stage='val'
168 | )
169 |
170 | val_loader = data.DataLoader(val_ds,
171 | batch_size=args.val_bs,
172 | num_workers=args.n_threads
173 | )
174 |
175 | print(colored('==> ', 'blue')+'Initialized the loaders....')
176 |
177 | # Network
178 | net = nn.DataParallel(KneeNet(args.base_width, args.drop, True))
179 | net.cuda()
180 | # Optimizer
181 | optimizer = optim.Adam(net.parameters(), lr=args.lr, weight_decay=args.wd)
182 |
183 | #optimizer = optim.SGD(filter(lambda p: p.requires_grad, net.parameters()),
184 | # lr=args.lr, weight_decay=args.wd, momentum=0.9)
185 | # Criterion
186 | criterion = F.cross_entropy
187 | # Visualizer-realted variables
188 | vis = Visdom()
189 | win = None
190 | win_metrics = None
191 |
192 | train_losses = []
193 | val_losses = []
194 | val_mse = []
195 | val_kappa = []
196 | val_acc = []
197 |
198 | best_dice = 0
199 | prev_model = None
200 |
201 | train_started = time.time()
202 | for epoch in range(args.n_epoch):
203 |
204 | # On each iteration we oversample the data to have everything correspond the torch implementation
205 | # This will be needed to oversample different KL-0 on each epoch
206 | train_files = []
207 | np.random.seed(args.seed)
208 | for kl in range(5):
209 | files = np.array(os.listdir(os.path.join(args.dataset,'train',str(kl))))
210 | train_files.extend(
211 | np.random.choice(
212 | files, size=oversample_size*args.bootstrap, replace=True
213 | ).tolist()
214 | )
215 |
216 | train_files = np.array(train_files)
217 |
218 | train_ds = KneeGradingDataset(args.dataset,
219 | train_files.tolist(),
220 | transform=patch_transform,
221 | augment=augment_transforms
222 | )
223 | N_batches = None
224 | if args.n_batches > 0:
225 | N_batches = args.n_batches
226 |
227 | if N_batches is not None:
228 | train_loader = data.DataLoader(train_ds, batch_size=args.bs,
229 | num_workers=args.n_threads,
230 | sampler=LimitedRandomSampler(train_ds, N_batches, args.bs)
231 | )
232 | else:
233 | train_loader = data.DataLoader(train_ds,
234 | batch_size=args.bs,
235 | num_workers=args.n_threads,
236 | shuffle=True
237 | )
238 |
239 | print(colored('==> ', 'blue')+'Epoch:', epoch+1, cur_snapshot)
240 | # Adjusting learning rate using the scheduler
241 | optimizer, cur_lr = adjust_learning_rate(optimizer, epoch+1, args)
242 | print(colored('==> ', 'red')+'LR:', cur_lr)
243 | # Training one epoch and measure the time
244 | start = time.time()
245 | train_loss = train_epoch(epoch, net, optimizer, train_loader, criterion, args.n_epoch)
246 | epoch_time = np.round(time.time() - start,4)
247 | print(colored('==> ', 'green')+'Epoch training time: {} s.'.format(epoch_time))
248 | # If it is time to start the validation, we will do it
249 | # args.args.start_val can be used to avoid time-consuming validation
250 | # in the beginning of the training
251 | if epoch >= args.start_val:
252 | start = time.time()
253 | val_loss, probs, truth, _ = validate_epoch(net, val_loader, criterion)
254 |
255 | preds = probs.argmax(1)
256 | # Validation metrics
257 | cm = confusion_matrix(truth, preds)
258 | kappa = np.round(cohen_kappa_score(truth, preds, weights="quadratic"),4)
259 | acc = np.round(np.mean(cm.diagonal().astype(float)/cm.sum(axis=1)),4)
260 | mse = np.round(mean_squared_error(truth, preds), 4)
261 | val_time = np.round(time.time() - start, 4)
262 | #Displaying the results
263 | print(colored('==> ', 'green')+'Kappa:', kappa)
264 | print(colored('==> ', 'green')+'Avg. class accuracy', acc)
265 | print(colored('==> ', 'green')+'MSE', mse)
266 | print(colored('==> ', 'green')+'Val loss:', val_loss)
267 | print(colored('==> ', 'green')+'Epoch val time: {} s.'.format(val_time))
268 | # Storing the logs
269 | train_losses.append(train_loss)
270 | val_losses.append(val_loss)
271 | val_mse.append(mse)
272 | val_acc.append(acc)
273 | val_kappa.append(kappa)
274 |
275 | # Displaying the results in Visdom
276 | if epoch > args.start_val+1 and args.use_visdom:
277 | # Train/Val window
278 | if win is None:
279 | win = vis.line(
280 | X=np.column_stack((np.arange(epoch, epoch+2),np.arange(epoch, epoch+2))),
281 | Y=np.column_stack((np.array(train_losses[-2:]), np.array(val_losses[-2:]))),
282 | opts=dict(title='[{}]\nTrain / val loss [{}]'.format(args.experiment, cur_snapshot),
283 | legend=['Train', 'Validation'])
284 | )
285 |
286 | else:
287 | vis.line(
288 | X=np.column_stack((np.arange(epoch, epoch+2),np.arange(epoch, epoch+2))),
289 | Y=np.column_stack((np.array(train_losses[-2:]), np.array(val_losses[-2:]))),
290 | win=win,
291 | update='append'
292 | )
293 | # Metrics
294 | if win_metrics is None:
295 | win_metrics = vis.line(
296 | X=np.column_stack((np.arange(epoch, epoch+2),np.arange(epoch, epoch+2),np.arange(epoch, epoch+2))),
297 | Y=np.column_stack((1-np.array(val_mse[-2:]), np.array(val_kappa[-2:]),np.array(val_acc[-2:]))),
298 | opts=dict(title='[{}]\nMetrics[{}]'.format(args.experiment, cur_snapshot),
299 | legend=['1-MSE', 'Kappa','Accuracy'])
300 | )
301 |
302 | else:
303 | vis.line(
304 | X=np.column_stack((np.arange(epoch, epoch+2),np.arange(epoch, epoch+2),np.arange(epoch, epoch+2))),
305 | Y=np.column_stack((1-np.array(val_mse[-2:]), np.array(val_kappa[-2:]),np.array(val_acc[-2:]))),
306 | win=win_metrics,
307 | update='append'
308 | )
309 |
310 | # Making logs backup
311 | np.save(os.path.join(args.snapshots, cur_snapshot, 'logs.npy'),
312 | [train_losses,val_losses, val_mse, val_acc, val_kappa])
313 |
314 | if epoch > args.start_val:
315 | # We will be saving only the snapshot which has lowest loss value on the validation set
316 | cur_snapshot_name = os.path.join(args.snapshots, cur_snapshot, 'epoch_{}.pth'.format(epoch+1))
317 | if prev_model is None:
318 | torch.save(net.state_dict(), cur_snapshot_name)
319 | prev_model = cur_snapshot_name
320 | best_kappa = kappa
321 | else:
322 | if kappa > best_kappa:
323 | os.remove(prev_model)
324 | best_kappa = kappa
325 | print('Saved snapshot:',cur_snapshot_name)
326 | torch.save(net.state_dict(), cur_snapshot_name)
327 | prev_model = cur_snapshot_name
328 |
329 | gc.collect()
330 |
331 |
332 | print(args.seed, 'Training took:', time.time()-train_started, 'seconds')
333 |
--------------------------------------------------------------------------------
/ouludeepknee/train/train_utils.py:
--------------------------------------------------------------------------------
1 | """
2 | This file contains the training utils
3 |
4 | (c) Aleksei Tiulpin, University of Oulu, 2017
5 | """
6 |
7 | from __future__ import print_function
8 |
9 | import gc
10 |
11 | import torch
12 | from torch.autograd import Variable
13 |
14 |
15 | def adjust_learning_rate(optimizer, epoch, args):
16 | """
17 | Decreases the initial LR by 10 every drop_step epochs.
18 | Conv layers learn slower if specified in the optimizer.
19 | """
20 | lr = args.lr * (0.1 ** (epoch // args.lr_drop))
21 | if lr < args.lr_min:
22 | lr = args.lr
23 | for param_group in optimizer.param_groups:
24 | param_group['lr'] = lr
25 |
26 | return optimizer, lr
27 |
28 |
29 | def train_epoch(epoch, net, optimizer, train_loader, criterion, max_ep):
30 |
31 | net.train(True)
32 |
33 | running_loss = 0.0
34 | n_batches = len(train_loader)
35 | for i, (batch_l, batch_m, targets, names) in enumerate(train_loader):
36 | optimizer.zero_grad()
37 |
38 | # forward + backward + optimize
39 | labels = Variable(targets.long().cuda())
40 | inputs_l = Variable(batch_l.cuda())
41 | inputs_m = Variable(batch_m.cuda())
42 |
43 | outputs = net(inputs_l, inputs_m)
44 |
45 | if batch_l.size(0) != torch.cuda.device_count():
46 | outputs = outputs.squeeze()
47 |
48 | loss = criterion(outputs, labels)
49 |
50 | loss.backward()
51 | optimizer.step()
52 |
53 | running_loss += loss.data[0]
54 | print('[%d | %d, %5d / %d] | Running loss: %.3f / loss %.3f' %
55 | (epoch + 1, max_ep, i + 1, n_batches, running_loss / (i+1), loss.data[0]))
56 | gc.collect()
57 | gc.collect()
58 |
59 | return running_loss/n_batches
60 |
--------------------------------------------------------------------------------
/ouludeepknee/train/val_utils.py:
--------------------------------------------------------------------------------
1 | """
2 | Validation utils
3 |
4 | (c) Aleksei Tiulpin, University of Oulu, 2017
5 | """
6 |
7 | import gc
8 | from tqdm import tqdm
9 |
10 | import numpy as np
11 | import torch
12 | import torch.nn as nn
13 | from torch.autograd import Variable
14 |
15 |
16 | def validate_epoch(net, val_loader, criterion):
17 |
18 | net.train(False)
19 |
20 | running_loss = 0.0
21 | n_batches = len(val_loader)
22 | sm = nn.Softmax()
23 |
24 | truth = []
25 | preds = []
26 | bar = tqdm(total=len(val_loader),desc='Processing', ncols=90)
27 | names_all = []
28 | for i, (batch_l, batch_m, targets, names) in enumerate(val_loader):
29 | labels = Variable(targets.long().cuda())
30 |
31 | inputs_l = Variable(batch_l.cuda())
32 | inputs_m = Variable(batch_m.cuda())
33 |
34 | outputs = net(inputs_l, inputs_m)
35 |
36 | if batch_l.size(0) != torch.cuda.device_count():
37 | outputs = outputs.squeeze()
38 |
39 | loss = criterion(outputs, labels)
40 | probs = sm(outputs).data.cpu().numpy()
41 | preds.append(probs)
42 | truth.append(targets.cpu().numpy())
43 | names_all.extend(names)
44 |
45 | running_loss += loss.data[0]
46 | bar.update(1)
47 | gc.collect()
48 | gc.collect()
49 | bar.close()
50 | preds = np.vstack(preds)
51 | truth = np.hstack(truth)
52 |
53 | return running_loss/n_batches, preds, truth, names_all
54 |
--------------------------------------------------------------------------------
/pacs-integration/change_polling.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import argparse
3 | import base64
4 | import logging
5 | import queue
6 | import time
7 |
8 | import requests
9 | from pydicom import dcmread
10 | from pydicom.filebase import DicomBytesIO
11 | from pydicom.uid import ImplicitVRLittleEndian
12 | from pydicom.uid import generate_uid
13 | from pynetdicom import AE, VerificationPresentationContexts
14 |
15 | queue = queue.Queue()
16 |
17 |
18 | def ingestion_loop():
19 | logger.log(logging.INFO, 'Creating application entity...')
20 | ae = AE(ae_title=b'DEEPKNEE')
21 | ae.requested_contexts = VerificationPresentationContexts
22 | ae.add_requested_context('1.2.840.10008.5.1.4.1.1.1.1', transfer_syntax=ImplicitVRLittleEndian)
23 |
24 | current = 0
25 | base_url = f'{args.orthanc_addr}:{args.orthanc_http_port}'
26 | response = requests.get(f'{base_url}/changes?since={current}&limit=10', auth=('deepknee', 'deepknee'))
27 | if response.status_code == 200:
28 | logger.log(logging.INFO, 'Connection to Orthanc via REST is healthy')
29 |
30 | # Orthanc addr must have http, but DICOM communicates via sockets
31 | assoc = ae.associate(args.orthanc_addr.split('http://')[1], args.orthanc_dicom_port)
32 | if assoc.is_established:
33 | logger.log(logging.INFO, 'Connection to Orthanc via DICOM is healthy')
34 | assoc.release()
35 |
36 | assoc = ae.associate(args.remote_pacs_addr, args.remote_pacs_port)
37 | if assoc.is_established:
38 | logger.log(logging.INFO, 'Connection to Remote PACS via DICOM is healthy')
39 | assoc.release()
40 |
41 | while True:
42 | response = requests.get(f'{base_url}/changes?since={current}&limit=10', auth=('deepknee', 'deepknee'))
43 | response = response.json()
44 | for change in response['Changes']:
45 | # We must also filter by the imaged body part in the future
46 | if change['ChangeType'] == 'NewInstance':
47 | logger.log(logging.INFO, 'Identified new received instance in Orthanc. '
48 | 'Checking if it has been created by DeepKnee...')
49 | # We should not analyze the instances if they are produced by DeepKnee
50 | # Checking if it was verified by DeepKnee
51 | resp_verifier = requests.get(f'{base_url}/instances/{change["ID"]}/content/0040-a027',
52 | auth=('deepknee', 'deepknee'))
53 | resp_verifier.encoding = 'utf-8'
54 | resp_content = requests.get(f'{base_url}/instances/{change["ID"]}/content/0070-0080',
55 | auth=('deepknee', 'deepknee'))
56 |
57 | resp_content.encoding = 'utf-8'
58 |
59 | if resp_verifier.text.strip("\x00 ") == 'UniOulu-DeepKnee' and \
60 | resp_content.text.strip("\x00 ") == 'DEEPKNEE-XRAY':
61 | continue
62 |
63 | # Once we are sure that the instance is new, we need to go ahead with teh analysis
64 | response = requests.get(f'{base_url}/instances/{change["ID"]}/file', auth=('deepknee', 'deepknee'))
65 |
66 | logger.log(logging.INFO, 'Instance has been retrieved from Orthanc')
67 | dicom_raw_bytes = response.content
68 | dcm = dcmread(DicomBytesIO(dicom_raw_bytes))
69 |
70 | dicom_base64 = base64.b64encode(dicom_raw_bytes).decode('ascii')
71 | logger.log(logging.INFO, 'Sending API request to DeepKnee core')
72 | url = f'{args.deepknee_addr}:{args.deepknee_port}/deepknee/predict/bilateral'
73 | response_deepknee = requests.post(url, json={'dicom': dicom_base64})
74 |
75 | if response_deepknee.status_code != 200:
76 | logger.log(logging.INFO, 'DeepKnee analysis has failed')
77 | else:
78 | logger.log(logging.INFO, 'Getting rid of the instance in Orthanc')
79 | if args.orthanc_addr.split('http://')[1] != args.remote_pacs_addr and \
80 | args.orthanc_dicom_port != args.remote_pacs_port:
81 | response = requests.delete(f'{base_url}/instances/{change["ID"]}',
82 | auth=('deepknee', 'deepknee'))
83 | if response.status_code == 200:
84 | logger.log(logging.INFO, 'Instance has been removed from the Orthanc')
85 | else:
86 | logger.log(logging.INFO, 'Remote PACS is DeepKnee. The instance will not be removed.')
87 |
88 | logger.log(logging.INFO, 'DeepKnee has successfully analyzed the image. Routing...')
89 |
90 | # Report
91 | deepknee_json = response_deepknee.json()
92 | dcm.add_new([0x40, 0xa160], 'LO', 'KL_right: {}, KL_left: {}'.format(deepknee_json['R']['kl'],
93 | deepknee_json['L']['kl']))
94 | # Verifier
95 | dcm.add_new([0x40, 0xa027], 'LO', 'UniOulu-DeepKnee')
96 | # Content label
97 | dcm.add_new([0x70, 0x80], 'CS', 'DEEPKNEE-XRAY')
98 |
99 | dcm[0x08, 0x8].value = 'DERIVED'
100 | # Instance_UUID
101 | current_uuid = dcm[0x08, 0x18].value
102 | dcm[0x08, 0x18].value = generate_uid(prefix='.'.join(current_uuid.split('.')[:-1])+'.')
103 | # Series UUID
104 | current_uuid = dcm[0x20, 0x0e].value
105 | dcm[0x20, 0x0e].value = generate_uid(prefix='.'.join(current_uuid.split('.')[:-1])+'.')
106 | logger.log(logging.INFO, 'Connecting to Orthanc over DICOM')
107 | assoc = ae.associate(args.remote_pacs_addr, args.remote_pacs_port)
108 | if assoc.is_established:
109 | logger.log(logging.INFO, 'Association with Orthanc has been established. Routing..')
110 | routing_status = assoc.send_c_store(dcm)
111 | logger.log(logging.INFO, f'Routing finished. Status: {routing_status}')
112 | assoc.release()
113 |
114 | else:
115 | # Here there should be a code to remove the change from the pacs
116 | # Now nothing is done here
117 | pass
118 | current += 1
119 | time.sleep(1)
120 |
121 |
122 | if __name__ == "__main__":
123 | parser = argparse.ArgumentParser()
124 | parser.add_argument('--deepknee_addr', default='http://127.0.0.1', help='DeepKnee address')
125 | parser.add_argument('--deepknee_port', default=5001, help='DeepKnee backend port')
126 |
127 | parser.add_argument('--orthanc_addr', default='http://127.0.0.1', help='The host address that runs Orthanc')
128 | parser.add_argument('--orthanc_http_port', type=int, default=6001, help='Orthanc REST API port')
129 | parser.add_argument('--orthanc_dicom_port', type=int, default=6000, help='Orthanc DICOM port')
130 |
131 | parser.add_argument('--remote_pacs_addr', default='http://127.0.0.1', help='Remote PACS IP addr')
132 | parser.add_argument('--remote_pacs_port', type=int, default=6000, help='Remote PACS port')
133 | args = parser.parse_args()
134 |
135 | logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
136 | logger = logging.getLogger(f'dicom-router')
137 |
138 | ingestion_loop()
139 |
140 |
--------------------------------------------------------------------------------
/pacs-integration/orthanc.json:
--------------------------------------------------------------------------------
1 | {
2 | /**
3 | * General configuration of Orthanc
4 | **/
5 |
6 | // The logical name of this instance of Orthanc. This one is
7 | // displayed in Orthanc Explorer and at the URI "/system".
8 | "Name" : "DEEPKNEE",
9 |
10 | // Path to the directory that holds the heavyweight files (i.e. the
11 | // raw DICOM instances). Backslashes must be either escaped by
12 | // doubling them, or replaced by forward slashes "/".
13 | "StorageDirectory" : "OrthancStorage",
14 |
15 | // Path to the directory that holds the SQLite index (if unset, the
16 | // value of StorageDirectory is used). This index could be stored on
17 | // a RAM-drive or a SSD device for performance reasons.
18 | "IndexDirectory" : "OrthancStorage",
19 |
20 | // Path to the directory where Orthanc stores its large temporary
21 | // files. The content of this folder can be safely deleted if
22 | // Orthanc once stopped. The folder must exist. The corresponding
23 | // filesystem must be properly sized, given that for instance a ZIP
24 | // archive of DICOM images created by a job can weight several GBs,
25 | // and that there might be up to "min(JobsHistorySize,
26 | // MediaArchiveSize)" archives to be stored simultaneously. If not
27 | // set, Orthanc will use the default temporary folder of the
28 | // operating system (such as "/tmp/" on UNIX-like systems, or
29 | // "C:/Temp" on Microsoft Windows).
30 | // "TemporaryDirectory" : "/tmp/Orthanc/",
31 |
32 | // Enable the transparent compression of the DICOM instances
33 | "StorageCompression" : false,
34 |
35 | // Maximum size of the storage in MB (a value of "0" indicates no
36 | // limit on the storage size)
37 | "MaximumStorageSize" : 0,
38 |
39 | // Maximum number of patients that can be stored at a given time
40 | // in the storage (a value of "0" indicates no limit on the number
41 | // of patients)
42 | "MaximumPatientCount" : 0,
43 |
44 | // List of paths to the custom Lua scripts that are to be loaded
45 | // into this instance of Orthanc
46 | "LuaScripts" : [
47 | ],
48 |
49 | // List of paths to the plugins that are to be loaded into this
50 | // instance of Orthanc (e.g. "./libPluginTest.so" for Linux, or
51 | // "./PluginTest.dll" for Windows). These paths can refer to
52 | // folders, in which case they will be scanned non-recursively to
53 | // find shared libraries. Backslashes must be either escaped by
54 | // doubling them, or replaced by forward slashes "/".
55 | "Plugins" : [
56 | ],
57 |
58 | // Maximum number of processing jobs that are simultaneously running
59 | // at any given time. A value of "0" indicates to use all the
60 | // available CPU logical cores. To emulate Orthanc <= 1.3.2, set
61 | // this value to "1".
62 | "ConcurrentJobs" : 2,
63 |
64 |
65 | /**
66 | * Configuration of the HTTP server
67 | **/
68 |
69 | // Enable the HTTP server. If this parameter is set to "false",
70 | // Orthanc acts as a pure DICOM server. The REST API and Orthanc
71 | // Explorer will not be available.
72 | "HttpServerEnabled" : true,
73 |
74 | // HTTP port for the REST services and for the GUI
75 | "HttpPort" : 8042,
76 |
77 | // When the following option is "true", if an error is encountered
78 | // while calling the REST API, a JSON message describing the error
79 | // is put in the HTTP answer. This feature can be disabled if the
80 | // HTTP client does not properly handles such answers.
81 | "HttpDescribeErrors" : true,
82 |
83 | // Enable HTTP compression to improve network bandwidth utilization,
84 | // at the expense of more computations on the server. Orthanc
85 | // supports the "gzip" and "deflate" HTTP encodings.
86 | "HttpCompressionEnabled" : true,
87 |
88 |
89 |
90 | /**
91 | * Configuration of the DICOM server
92 | **/
93 |
94 | // Enable the DICOM server. If this parameter is set to "false",
95 | // Orthanc acts as a pure REST server. It will not be possible to
96 | // receive files or to do query/retrieve through the DICOM protocol.
97 | "DicomServerEnabled" : true,
98 |
99 | // The DICOM Application Entity Title
100 | "DicomAet" : "DEEPKNEE",
101 |
102 | // Check whether the called AET corresponds to the AET of Orthanc
103 | // during an incoming DICOM SCU request
104 | "DicomCheckCalledAet" : false,
105 |
106 | // The DICOM port
107 | "DicomPort" : 4242,
108 |
109 | // The default encoding that is assumed for DICOM files without
110 | // "SpecificCharacterSet" DICOM tag, and that is used when answering
111 | // C-Find requests (including worklists). The allowed values are
112 | // "Ascii", "Utf8", "Latin1", "Latin2", "Latin3", "Latin4",
113 | // "Latin5", "Cyrillic", "Windows1251", "Arabic", "Greek", "Hebrew",
114 | // "Thai", "Japanese", "Chinese", "JapaneseKanji", "Korean", and
115 | // "SimplifiedChinese".
116 | "DefaultEncoding" : "Latin1",
117 |
118 | // The transfer syntaxes that are accepted by Orthanc C-Store SCP
119 | "DeflatedTransferSyntaxAccepted" : true,
120 | "JpegTransferSyntaxAccepted" : true,
121 | "Jpeg2000TransferSyntaxAccepted" : true,
122 | "JpegLosslessTransferSyntaxAccepted" : true,
123 | "JpipTransferSyntaxAccepted" : true,
124 | "Mpeg2TransferSyntaxAccepted" : true,
125 | "RleTransferSyntaxAccepted" : true,
126 |
127 | // Whether Orthanc accepts to act as C-Store SCP for unknown storage
128 | // SOP classes (aka. "promiscuous mode")
129 | "UnknownSopClassAccepted" : false,
130 |
131 | // Set the timeout (in seconds) after which the DICOM associations
132 | // are closed by the Orthanc SCP (server) if no further DIMSE
133 | // command is received from the SCU (client).
134 | "DicomScpTimeout" : 30,
135 |
136 |
137 |
138 | /**
139 | * Security-related options for the HTTP server
140 | **/
141 |
142 | // Whether remote hosts can connect to the HTTP server
143 | "RemoteAccessAllowed" : true,
144 |
145 | // Whether or not SSL is enabled
146 | "SslEnabled" : false,
147 |
148 | // Path to the SSL certificate in the PEM format (meaningful only if
149 | // SSL is enabled)
150 | "SslCertificate" : "certificate.pem",
151 |
152 | // Whether or not the password protection is enabled (using HTTP
153 | // basic access authentication). Starting with Orthanc 1.5.8, if
154 | // "AuthenticationEnabled" is not explicitly set, authentication is
155 | // enabled iff. remote access is allowed (i.e. the default value of
156 | // "AuthenticationEnabled" equals that of "RemoteAccessAllowed").
157 | /**
158 | "AuthenticationEnabled" : false,
159 | **/
160 |
161 | // The list of the registered users. Because Orthanc uses HTTP
162 | // Basic Authentication, the passwords are stored as plain text.
163 | "RegisteredUsers" : {
164 | "deepknee" : "deepknee"
165 | },
166 |
167 | /**
168 | * Network topology
169 | **/
170 |
171 | // The list of the known DICOM modalities
172 | "DicomModalities" : {
173 | /**
174 | * Uncommenting the following line would enable Orthanc to
175 | * connect to an instance of the "storescp" open-source DICOM
176 | * store (shipped in the DCMTK distribution) started by the
177 | * command line "storescp 2000".
178 | **/
179 | "remote-pacs" : [ "STORESCP", "127.0.0.1", 2000]
180 |
181 | /**
182 | * A fourth parameter is available to enable patches for
183 | * specific PACS manufacturers. The allowed values are currently:
184 | * - "Generic" (default value),
185 | * - "GenericNoWildcardInDates" (to replace "*" by "" in date fields
186 | * in outgoing C-Find requests originating from Orthanc),
187 | * - "GenericNoUniversalWildcard" (to replace "*" by "" in all fields
188 | * in outgoing C-Find SCU requests originating from Orthanc),
189 | * - "StoreScp" (storescp tool from DCMTK),
190 | * - "ClearCanvas",
191 | * - "Dcm4Chee",
192 | * - "Vitrea",
193 | * - "GE" (Enterprise Archive, MRI consoles and Advantage Workstation
194 | * from GE Healthcare).
195 | *
196 | * This parameter is case-sensitive.
197 | **/
198 | // "clearcanvas" : [ "CLEARCANVAS", "192.168.1.1", 104, "ClearCanvas" ]
199 |
200 | /**
201 | * By default, the Orthanc SCP accepts all DICOM commands (C-ECHO,
202 | * C-STORE, C-FIND, C-MOVE) issued by the registered remote SCU
203 | * modalities. Starting with Orthanc 1.5.0, it is possible to
204 | * specify which DICOM commands are allowed, separately for each
205 | * remote modality, using the syntax below. The "AllowEcho" (resp.
206 | * "AllowStore") option only has an effect respectively if global
207 | * option "DicomAlwaysAllowEcho" (resp. "DicomAlwaysAllowStore")
208 | * is set to false.
209 | **/
210 | //"untrusted" : {
211 | // "AET" : "ORTHANC",
212 | // "Port" : 104,
213 | // "Host" : "127.0.0.1",
214 | // "AllowEcho" : false,
215 | // "AllowFind" : false,
216 | // "AllowMove" : false,
217 | // "AllowStore" : true
218 | //}
219 | },
220 |
221 | // Whether to store the DICOM modalities in the Orthanc database
222 | // instead of in this configuration file (new in Orthanc 1.5.0)
223 | "DicomModalitiesInDatabase" : false,
224 |
225 | // Whether the Orthanc SCP allows incoming C-Echo requests, even
226 | // from SCU modalities it does not know about (i.e. that are not
227 | // listed in the "DicomModalities" option above). Orthanc 1.3.0
228 | // is the only version to behave as if this argument was set to "false".
229 | "DicomAlwaysAllowEcho" : true,
230 |
231 | // Whether the Orthanc SCP allows incoming C-Store requests, even
232 | // from SCU modalities it does not know about (i.e. that are not
233 | // listed in the "DicomModalities" option above)
234 | "DicomAlwaysAllowStore" : true,
235 |
236 | // Whether Orthanc checks the IP/hostname address of the remote
237 | // modality initiating a DICOM connection (as listed in the
238 | // "DicomModalities" option above). If this option is set to
239 | // "false", Orthanc only checks the AET of the remote modality.
240 | "DicomCheckModalityHost" : false,
241 |
242 | // The timeout (in seconds) after which the DICOM associations are
243 | // considered as closed by the Orthanc SCU (client) if the remote
244 | // DICOM SCP (server) does not answer.
245 | "DicomScuTimeout" : 10,
246 |
247 | // The list of the known Orthanc peers
248 | "OrthancPeers" : {
249 | /**
250 | * Each line gives the base URL of an Orthanc peer, possibly
251 | * followed by the username/password pair (if the password
252 | * protection is enabled on the peer).
253 | **/
254 | // "peer" : [ "http://127.0.0.1:8043/", "alice", "alicePassword" ]
255 | // "peer2" : [ "http://127.0.0.1:8044/" ]
256 |
257 | /**
258 | * This is another, more advanced format to define Orthanc
259 | * peers. It notably allows to specify HTTP headers, a HTTPS
260 | * client certificate in the PEM format (as in the "--cert" option
261 | * of curl), or to enable PKCS#11 authentication for smart cards.
262 | **/
263 | // "peer" : {
264 | // "Url" : "http://127.0.0.1:8043/",
265 | // "Username" : "alice",
266 | // "Password" : "alicePassword",
267 | // "HttpHeaders" : { "Token" : "Hello world" },
268 | // "CertificateFile" : "client.crt",
269 | // "CertificateKeyFile" : "client.key",
270 | // "CertificateKeyPassword" : "certpass",
271 | // "Pkcs11" : false
272 | // }
273 | },
274 |
275 | // Whether to store the Orthanc peers in the Orthanc database
276 | // instead of in this configuration file (new in Orthanc 1.5.0)
277 | "OrthancPeersInDatabase" : false,
278 |
279 | // Parameters of the HTTP proxy to be used by Orthanc. If set to the
280 | // empty string, no HTTP proxy is used. For instance:
281 | // "HttpProxy" : "192.168.0.1:3128"
282 | // "HttpProxy" : "proxyUser:proxyPassword@192.168.0.1:3128"
283 | "HttpProxy" : "",
284 |
285 | // If set to "true", debug messages from libcurl will be issued
286 | // whenever Orthanc makes an outgoing HTTP request. This is notably
287 | // useful to debug HTTPS-related problems.
288 | "HttpVerbose" : false,
289 |
290 | // Set the timeout for HTTP requests issued by Orthanc (in seconds).
291 | "HttpTimeout" : 60,
292 |
293 | // Enable the verification of the peers during HTTPS requests. This
294 | // option must be set to "false" if using self-signed certificates.
295 | // Pay attention that setting this option to "false" results in
296 | // security risks!
297 | // Reference: http://curl.haxx.se/docs/sslcerts.html
298 | "HttpsVerifyPeers" : true,
299 |
300 | // Path to the CA (certification authority) certificates to validate
301 | // peers in HTTPS requests. From curl documentation ("--cacert"
302 | // option): "Tells curl to use the specified certificate file to
303 | // verify the peers. The file may contain multiple CA
304 | // certificates. The certificate(s) must be in PEM format." On
305 | // Debian-based systems, this option can be set to
306 | // "/etc/ssl/certs/ca-certificates.crt"
307 | "HttpsCACertificates" : "",
308 |
309 |
310 |
311 | /**
312 | * Advanced options
313 | **/
314 |
315 | // Dictionary of symbolic names for the user-defined metadata. Each
316 | // entry must map an unique string to an unique number between 1024
317 | // and 65535. Reserved values:
318 | // - The Orthanc whole-slide imaging plugin uses metadata 4200
319 | "UserMetadata" : {
320 | // "Sample" : 1024
321 | },
322 |
323 | // Dictionary of symbolic names for the user-defined types of
324 | // attached files. Each entry must map an unique string to an unique
325 | // number between 1024 and 65535. Optionally, a second argument can
326 | // provided to specify a MIME content type for the attachment.
327 | "UserContentType" : {
328 | // "sample" : 1024
329 | // "sample2" : [ 1025, "application/pdf" ]
330 | },
331 |
332 | // Number of seconds without receiving any instance before a
333 | // patient, a study or a series is considered as stable.
334 | "StableAge" : 60,
335 |
336 | // By default, Orthanc compares AET (Application Entity Titles) in a
337 | // case-insensitive way. Setting this option to "true" will enable
338 | // case-sensitive matching.
339 | "StrictAetComparison" : false,
340 |
341 | // When the following option is "true", the MD5 of the DICOM files
342 | // will be computed and stored in the Orthanc database. This
343 | // information can be used to detect disk corruption, at the price
344 | // of a small performance overhead.
345 | "StoreMD5ForAttachments" : true,
346 |
347 | // The maximum number of results for a single C-FIND request at the
348 | // Patient, Study or Series level. Setting this option to "0" means
349 | // no limit.
350 | "LimitFindResults" : 0,
351 |
352 | // The maximum number of results for a single C-FIND request at the
353 | // Instance level. Setting this option to "0" means no limit.
354 | "LimitFindInstances" : 0,
355 |
356 | // The maximum number of active jobs in the Orthanc scheduler. When
357 | // this limit is reached, the addition of new jobs is blocked until
358 | // some job finishes.
359 | "LimitJobs" : 10,
360 |
361 | // If this option is set to "true" (default behavior until Orthanc
362 | // 1.3.2), Orthanc will log the resources that are exported to other
363 | // DICOM modalities or Orthanc peers, inside the URI
364 | // "/exports". Setting this option to "false" is useful to prevent
365 | // the index to grow indefinitely in auto-routing tasks (this is the
366 | // default behavior since Orthanc 1.4.0).
367 | "LogExportedResources" : false,
368 |
369 | // Enable or disable HTTP Keep-Alive (persistent HTTP
370 | // connections). Setting this option to "true" prevents Orthanc
371 | // issue #32 ("HttpServer does not support multiple HTTP requests in
372 | // the same TCP stream"), but can possibly slow down HTTP clients
373 | // that do not support persistent connections. The default behavior
374 | // used to be "false" in Orthanc <= 1.5.1. Setting this option to
375 | // "false" is also recommended if Orthanc is compiled against
376 | // Mongoose.
377 | "KeepAlive" : true,
378 |
379 | // Enable or disable Nagle's algorithm. Only taken into
380 | // consideration if Orthanc is compiled to use CivetWeb. Experiments
381 | // show that best performance can be obtained by setting both
382 | // "KeepAlive" and "TcpNoDelay" to "true". Beware however of
383 | // caveats: https://eklitzke.org/the-caveats-of-tcp-nodelay
384 | "TcpNoDelay" : true,
385 |
386 | // Number of threads that are used by the embedded HTTP server.
387 | "HttpThreadsCount" : 50,
388 |
389 | // If this option is set to "false", Orthanc will run in index-only
390 | // mode. The DICOM files will not be stored on the drive. Note that
391 | // this option might prevent the upgrade to newer versions of Orthanc.
392 | "StoreDicom" : true,
393 |
394 | // DICOM associations initiated by Lua scripts are kept open as long
395 | // as new DICOM commands are issued. This option sets the number of
396 | // seconds of inactivity to wait before automatically closing a
397 | // DICOM association used by Lua. If set to 0, the connection is
398 | // closed immediately.
399 | "DicomAssociationCloseDelay" : 5,
400 |
401 | // Maximum number of query/retrieve DICOM requests that are
402 | // maintained by Orthanc. The least recently used requests get
403 | // deleted as new requests are issued.
404 | "QueryRetrieveSize" : 100,
405 |
406 | // When handling a C-Find SCP request, setting this flag to "true"
407 | // will enable case-sensitive match for PN value representation
408 | // (such as PatientName). By default, the search is
409 | // case-insensitive, which does not follow the DICOM standard.
410 | "CaseSensitivePN" : false,
411 |
412 | // Configure PKCS#11 to use hardware security modules (HSM) and
413 | // smart cards when carrying on HTTPS client authentication.
414 | /**
415 | "Pkcs11" : {
416 | "Module" : "/usr/local/lib/libbeidpkcs11.so",
417 | "Module" : "C:/Windows/System32/beidpkcs11.dll",
418 | "Pin" : "1234",
419 | "Verbose" : true
420 | }
421 | **/
422 |
423 | // If set to "false", Orthanc will not load its default dictionary
424 | // of private tags. This might be necessary if you cannot import a
425 | // DICOM file encoded using the Implicit VR Endian transfer syntax,
426 | // and containing private tags: Such an import error might stem from
427 | // a bad dictionary. You can still list your private tags of
428 | // interest in the "Dictionary" configuration option below.
429 | "LoadPrivateDictionary" : true,
430 |
431 | // Locale to be used by Orthanc. Currently, only used if comparing
432 | // strings in a case-insensitive way. It should be safe to keep this
433 | // value undefined, which lets Orthanc autodetect the suitable locale.
434 | // "Locale" : "en_US.UTF-8",
435 |
436 | // Register a new tag in the dictionary of DICOM tags that are known
437 | // to Orthanc. Each line must contain the tag (formatted as 2
438 | // hexadecimal numbers), the value representation (2 upcase
439 | // characters), a nickname for the tag, possibly the minimum
440 | // multiplicity (> 0 with defaults to 1), possibly the maximum
441 | // multiplicity (0 means arbitrary multiplicity, defaults to 1), and
442 | // possibly the Private Creator (for private tags).
443 | "Dictionary" : {
444 | // "0014,1020" : [ "DA", "ValidationExpiryDate", 1, 1 ]
445 | // "00e1,10c2" : [ "UI", "PET-CT Multi Modality Name", 1, 1, "ELSCINT1" ]
446 | // "7053,1003" : [ "ST", "Original Image Filename", 1, 1, "Philips PET Private Group" ]
447 | // "2001,5f" : [ "SQ", "StackSequence", 1, 1, "Philips Imaging DD 001" ]
448 | },
449 |
450 | // Whether to run DICOM C-Move operations synchronously. If set to
451 | // "false" (asynchronous mode), each incoming C-Move request results
452 | // in the creation of a new background job. Up to Orthanc 1.3.2, the
453 | // implicit behavior was to use synchronous C-Move ("true"). Between
454 | // Orthanc 1.4.0 and 1.4.2, the default behavior was set to
455 | // asynchronous C-Move ("false"). Since Orthanc 1.5.0, the default
456 | // behavior is back to synchronous C-Move ("true", which ensures
457 | // backward compatibility with Orthanc <= 1.3.2).
458 | "SynchronousCMove" : true,
459 |
460 | // Maximum number of completed jobs that are kept in memory. A
461 | // processing job is considered as complete once it is tagged as
462 | // "Success" or "Failure". Since Orthanc 1.5.0, a value of "0"
463 | // indicates to keep no job in memory (i.e. jobs are removed from
464 | // the history as soon as they are completed), which prevents the
465 | // use of some features of Orthanc (typically, synchronous mode in
466 | // REST API) and should be avoided for non-developers.
467 | "JobsHistorySize" : 10,
468 |
469 | // Whether to save the jobs into the Orthanc database. If this
470 | // option is set to "true", the pending/running/completed jobs are
471 | // automatically reloaded from the database if Orthanc is stopped
472 | // then restarted (except if the "--no-jobs" command-line argument
473 | // is specified). This option should be set to "false" if multiple
474 | // Orthanc servers are using the same database (e.g. if PostgreSQL
475 | // or MariaDB/MySQL is used).
476 | "SaveJobs" : true,
477 |
478 | // Specifies how Orthanc reacts when it receives a DICOM instance
479 | // whose SOPInstanceUID is already stored. If set to "true", the new
480 | // instance replaces the old one. If set to "false", the new
481 | // instance is discarded and the old one is kept. Up to Orthanc
482 | // 1.4.1, the implicit behavior corresponded to "false".
483 | "OverwriteInstances" : false,
484 |
485 | // Maximum number of ZIP/media archives that are maintained by
486 | // Orthanc, as a response to the asynchronous creation of archives.
487 | // The least recently used archives get deleted as new archives are
488 | // generated. This option was introduced in Orthanc 1.5.0, and has
489 | // no effect on the synchronous generation of archives.
490 | "MediaArchiveSize" : 1,
491 |
492 | // Performance setting to specify how Orthanc accesses the storage
493 | // area during C-FIND. Three modes are available: (1) "Always"
494 | // allows Orthanc to read the storage area as soon as it needs an
495 | // information that is not present in its database (slowest mode),
496 | // (2) "Never" prevents Orthanc from accessing the storage area, and
497 | // makes it uses exclusively its database (fastest mode), and (3)
498 | // "Answers" allows Orthanc to read the storage area to generate its
499 | // answers, but not to filter the DICOM resources (balance between
500 | // the two modes). By default, the mode is "Always", which
501 | // corresponds to the behavior of Orthanc <= 1.5.0.
502 | "StorageAccessOnFind" : "Always",
503 |
504 | // Whether Orthanc monitors its metrics (new in Orthanc 1.5.4). If
505 | // set to "true", the metrics can be retrieved at
506 | // "/tools/metrics-prometheus" formetted using the Prometheus
507 | // text-based exposition format.
508 | "MetricsEnabled" : true,
509 |
510 | // Whether calls to URI "/tools/execute-script" is enabled. Starting
511 | // with Orthanc 1.5.8, this URI is disabled by default for security.
512 | "ExecuteLuaEnabled" : false,
513 |
514 | // Set the timeout for HTTP requests, in seconds. This corresponds
515 | // to option "request_timeout_ms" of Mongoose/Civetweb. It will set
516 | // the socket options "SO_RCVTIMEO" and "SO_SNDTIMEO" to the
517 | // specified value.
518 | "HttpRequestTimeout" : 30
519 | }
--------------------------------------------------------------------------------
/pics/15_2_R_1_1_1_3_1_0_own.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imedslab/DeepKnee/2eee4645f632d04ed5847b35f675421b7dd4dfdb/pics/15_2_R_1_1_1_3_1_0_own.jpg
--------------------------------------------------------------------------------
/pics/235_2_R_3_3_0_0_1_1_own.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imedslab/DeepKnee/2eee4645f632d04ed5847b35f675421b7dd4dfdb/pics/235_2_R_3_3_0_0_1_1_own.jpg
--------------------------------------------------------------------------------
/pics/77_2_R_2_0_0_0_0_1_own.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imedslab/DeepKnee/2eee4645f632d04ed5847b35f675421b7dd4dfdb/pics/77_2_R_2_0_0_0_0_1_own.jpg
--------------------------------------------------------------------------------
/pics/deepknee-architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imedslab/DeepKnee/2eee4645f632d04ed5847b35f675421b7dd4dfdb/pics/deepknee-architecture.png
--------------------------------------------------------------------------------
/pics/deepkneeui.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/imedslab/DeepKnee/2eee4645f632d04ed5847b35f675421b7dd4dfdb/pics/deepkneeui.png
--------------------------------------------------------------------------------
/rebuild_docker_images.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # This script must be built on a linux machine
3 |
4 | # DeepKnee REST microservice images
5 | docker build -t miptmloulu/deepknee:gpu -f docker/Dockerfile.gpu .
6 | docker build -t miptmloulu/deepknee:cpu -f docker/Dockerfile.cpu .
7 | docker build --build-arg REACT_APP_BROKER_PORT=5002 -t miptmloulu/deepknee:ui -f docker/UIDockerfile deepknee-frontend
8 | docker build -t miptmloulu/deepknee:broker -f docker/BrokerDockerfile deepknee-backend-broker
9 |
10 | # Frontend and Backend
11 | docker push miptmloulu/deepknee:cpu && docker push miptmloulu/deepknee:gpu
12 | docker push miptmloulu/deepknee:broker && docker push miptmloulu/deepknee:ui
13 |
14 |
--------------------------------------------------------------------------------
/run_deepknee_backend.sh:
--------------------------------------------------------------------------------
1 | mkdir -p logs
2 | KNEEL_ADDR=http://127.0.0.1:5000 python -m ouludeepknee.inference.app \
3 | --snapshots_path snapshots_knee_grading/ \
4 | --device cpu --deploy True \
5 | --port 6001 \
6 | --logs logs/deepknee-cpu.log
7 |
--------------------------------------------------------------------------------
/run_deepknee_backend_broker.sh:
--------------------------------------------------------------------------------
1 | export DEPLOY_HOST=0.0.0.0
2 | export DEPLOY_PORT=5002
3 | export KNEEL_ADDR=http://localhost
4 | export KNEEL_PORT=5000
5 | export DEEPKNEE_ADDR=http://localhost
6 | export DEEPKNEE_PORT=5001
7 |
8 | nodemon deepknee-backend-broker/server.js
--------------------------------------------------------------------------------
/run_deepknee_ui.sh:
--------------------------------------------------------------------------------
1 | export REACT_APP_BROKER_PORT=5002
2 |
3 | cd deepknee-frontend
4 | npm start
--------------------------------------------------------------------------------
/run_kneel.sh:
--------------------------------------------------------------------------------
1 | mkdir -p logs
2 | python -u -m kneel.inference.app --lc_snapshot_path snapshots_release/lext-devbox_2019_07_14_16_04_41 \
3 | --hc_snapshot_path snapshots_release/lext-devbox_2019_07_14_19_25_40 \
4 | --refine True --mean_std_path snapshots_release/mean_std.npy \
5 | --deploy True --device cpu --port 5000 --logs logs/kneel-cpu.log
6 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 |
4 | setup(
5 | name='ouludeepknee',
6 | version='0.1',
7 | author='Aleksei Tiulpin',
8 | author_email='aleksei.tiulpin@oulu.fi',
9 | packages=find_packages(),
10 | include_package_data=True,
11 | license='LICENSE.txt',
12 | long_description=open('README.md').read(),
13 | )
14 |
--------------------------------------------------------------------------------