├── .gitignore ├── .travis.yml ├── LICENSE ├── README.rst ├── compose.yml ├── data ├── pgadmin │ └── .gitignore ├── postgres │ └── .gitignore └── redis │ └── .gitignore ├── docker ├── api │ └── Dockerfile └── worker │ └── Dockerfile ├── envs ├── all-dev.env ├── core-dev.env ├── dev.env ├── drf-dev.env ├── k8-debug.env ├── k8-splunk-debug.env ├── k8-splunk.env ├── k8.env ├── minimal.env ├── only-core-dev.env ├── openshift-dev.env ├── openshift-no-hostnames.env ├── pgadmin-dev.env ├── pipeline-dev.env ├── postgres-dev.env ├── prod.env └── splunk.env ├── full-stack-dev.yml ├── get_user_jwt_token.sh ├── install.sh ├── log ├── colors-logging.json ├── console.json └── logging.json ├── only-rest.yml ├── openshift ├── README.rst ├── api │ ├── deployment.yaml │ ├── log_to_splunk_deployment.yaml │ └── service.yaml ├── core │ ├── deployment.yaml │ └── log_to_splunk_deployment.yaml ├── create-user.sh ├── deploy.sh ├── get-api-url.sh ├── get-token.sh ├── jupyter │ ├── deployment.yaml │ ├── log_to_splunk_deployment.yaml │ └── service.yaml ├── logs-api.sh ├── logs-core.sh ├── logs-jupyter.sh ├── logs-pipeline.sh ├── logs-worker.sh ├── pgadmin4 │ ├── crunchy-template-http.json │ └── persistent-volume.json ├── pipeline │ ├── deployment.yaml │ └── log_to_splunk_deployment.yaml ├── postgres │ ├── crunchy-template.json │ ├── deployment.yaml │ ├── persistent-volume.json │ ├── service.yaml │ └── template.json ├── primary-db.sh ├── redis │ ├── deployment.yaml │ ├── persistent-volume.json │ ├── pv.yml │ ├── pvc.yml │ └── service.yaml ├── remove-all.sh ├── run-pgadmin4.sh ├── show-create-db.sh ├── show-migrate-cmds.sh ├── show-urls.sh ├── ssh-api.sh ├── ssh-core.sh ├── ssh-jupyter.sh ├── ssh-pipeline.sh ├── ssh-worker.sh ├── stop-all.sh ├── test-pg-deploy.sh ├── test-redis-deploy.sh ├── test-rh-deploy.sh ├── tools │ ├── delete-and-fix-volumes.sh │ └── drop-database.sh ├── users │ └── user_1.sh └── worker │ ├── deployment.yaml │ └── log_to_splunk_deployment.yaml ├── redis.yml ├── requirements.txt ├── run-all.sh ├── run-celery-task.py ├── run-django.sh ├── run-migrations.sh ├── run-redis.sh ├── run-stack.sh ├── run-tests.sh ├── run-worker.sh ├── secure.yml ├── splunk-secure.yml ├── start.sh ├── stop-all.sh ├── stop-stack.sh ├── tail-api.sh ├── tail-core.sh ├── tail-pipeline.sh ├── tail-worker.sh ├── tests ├── antinex-train-and-predict.py ├── build-new-dataset.py ├── celery │ └── task_get_user.json ├── create-keras-dnn.py ├── create-user.sh ├── django-antinex.json ├── django-deep-antinex.json ├── get-a-job.py ├── get-a-prepared-dataset.py ├── get-a-result.py ├── get-api-url.sh ├── get-recent-datasets.py ├── get-recent-jobs.py ├── get-recent-results.py ├── images │ └── django-rest-framework-with-swagger-and-jwt-trains-a-deep-neural-network-using-keras-and-tensorflow-with-83-percent-accuracy.gif ├── only-publish-predict-rows-simple.json ├── only-publish-scaler-full-django.json ├── predict-rows-scaler-django-simple.json ├── predict-rows-scaler-full-django.json ├── prepare-new-dataset.json ├── readme-predict-demo-1.json ├── regression-spy.sh ├── run-user-sim.py ├── scaler-django-antinex-simple.json ├── scaler-full-django-antinex-simple.json ├── scaler-regression.json ├── send-worker-get-user.py ├── send-worker-publish-to-core.py ├── simulations │ ├── sim_user1.json │ ├── sim_user2.json │ ├── sim_user3.json │ ├── user1 │ │ ├── prepare1.json │ │ └── train1.json │ ├── user2 │ │ ├── prepare1.json │ │ └── train1.json │ └── user3 │ │ ├── prepare1.json │ │ └── train1.json ├── stocks │ ├── dnn-spy.json │ └── spy.json ├── test-keras-dnn.json └── users │ └── user_1.sh ├── tox.ini └── webapp ├── Pipfile ├── Pipfile.lock ├── Procfile ├── README.md ├── __init__.py ├── build-docs.sh ├── collect-statics.sh ├── create-super-user.sh ├── django-gunicorn.py ├── django-uwsgi.ini ├── drf_network_pipeline ├── __init__.py ├── api │ ├── __init__.py │ ├── ml.py │ └── user.py ├── celery_config.py ├── docs │ ├── Makefile │ ├── doc-requirements.txt │ ├── make.bat │ └── source │ │ ├── antinex-api.rst │ │ ├── antinex-client.rst │ │ ├── antinex-core.rst │ │ ├── antinex-utils.rst │ │ ├── api-examples.rst │ │ ├── conf.py │ │ ├── deploy-antinex-on-kubernetes.rst │ │ ├── deploy-antinex-on-openshift-container-platform.rst │ │ ├── faq.rst │ │ ├── index.rst │ │ ├── job_utils.rst │ │ ├── modules │ │ └── models.rst │ │ ├── network-pipeline.rst │ │ ├── pipeline.rst │ │ ├── prepare-antinex-dataset-django.rst │ │ ├── prepare-antinex-dataset-flask-restplus.rst │ │ ├── prepare-antinex-dataset-react-redux.rst │ │ ├── prepare-antinex-dataset-spring.rst │ │ ├── prepare-antinex-dataset-vue.rst │ │ └── serializers.rst ├── index.py ├── job_utils │ ├── __init__.py │ ├── build_task_request.py │ ├── build_task_response.py │ ├── handle_task_method.py │ └── run_task.py ├── log │ ├── __init__.py │ ├── colors-logging.json │ ├── logging.json │ └── setup_logging.py ├── pipeline │ ├── __init__.py │ ├── build_worker_result_node.py │ ├── consts.py │ ├── create_ml_job_record.py │ ├── create_ml_prepare_record.py │ ├── models.py │ ├── process_worker_results.py │ ├── tasks.py │ └── utils.py ├── settings.py ├── sz │ ├── __init__.py │ ├── ml.py │ └── user.py ├── templates │ ├── home.html │ └── rest_registration │ │ ├── register │ │ ├── body.txt │ │ └── subject.txt │ │ ├── register_email │ │ ├── body.txt │ │ └── subject.txt │ │ └── reset_password │ │ ├── body.txt │ │ └── subject.txt ├── tests │ ├── __init__.py │ ├── datasets │ │ ├── cleaned_attack_scans.csv │ │ ├── cleaned_metadata.json │ │ ├── fulldata_attack_scans.csv │ │ ├── fulldata_metadata.json │ │ └── stock.csv │ ├── ml_models │ │ ├── ml_weights_job_1_result_1.h5 │ │ ├── model_and_weights.json │ │ └── prediction_manifest.json │ ├── prepare │ │ ├── django │ │ │ ├── netdata-2018-01-28-23-06-05.csv │ │ │ └── packets-netdata-2018-01-28-23-06-05.json │ │ ├── flask-restplus │ │ │ ├── netdata-2018-01-29-11-30-02.csv │ │ │ └── packets-netdata-2018-01-29-11-30-02.json │ │ └── react-redux │ │ │ ├── netdata-2018-01-29-13-36-35.csv │ │ │ └── packets-netdata-2018-01-29-13-36-35.json │ ├── pubsub │ │ ├── get-user.json │ │ └── publish-to-core.json │ ├── test_ml.py │ └── test_user.py ├── urls.py ├── users │ ├── __init__.py │ ├── admin.py │ ├── db_lookup_user.py │ ├── models.py │ └── tasks.py └── wsgi.py ├── manage.py ├── send-worker-get-user.py ├── send-worker-publish-to-core.py └── staticfiles └── .gitignore /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.gitignore.io 2 | 3 | ### OSX ### 4 | .DS_Store 5 | .AppleDouble 6 | .LSOverride 7 | 8 | # Icon must end with two \r 9 | Icon 10 | 11 | 12 | # Thumbnails 13 | ._* 14 | 15 | # Files that might appear on external disk 16 | .Spotlight-V100 17 | .Trashes 18 | 19 | # Directories potentially created on remote AFP share 20 | .AppleDB 21 | .AppleDesktop 22 | Network Trash Folder 23 | Temporary Items 24 | .apdisk 25 | 26 | 27 | ### Python ### 28 | # Byte-compiled / optimized / DLL files 29 | __pycache__/ 30 | *.py[cod] 31 | 32 | # C extensions 33 | *.so 34 | 35 | # Distribution / packaging 36 | .Python 37 | env/ 38 | ./build/* 39 | develop-eggs/ 40 | dist/ 41 | downloads/ 42 | eggs/ 43 | lib/ 44 | lib64/ 45 | parts/ 46 | sdist/ 47 | var/ 48 | *.egg-info/ 49 | .installed.cfg 50 | *.egg 51 | 52 | # PyInstaller 53 | # Usually these files are written by a python script from a template 54 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 55 | *.manifest 56 | *.spec 57 | 58 | # Installer logs 59 | pip-log.txt 60 | pip-delete-this-directory.txt 61 | 62 | # Unit test / coverage reports 63 | htmlcov/ 64 | .tox/ 65 | .coverage 66 | .cache 67 | nosetests.xml 68 | coverage.xml 69 | 70 | # Translations 71 | *.mo 72 | *.pot 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | 78 | ### Django ### 79 | *.log 80 | *.pot 81 | *.pyc 82 | __pycache__/ 83 | local_settings.py 84 | migrations 85 | 86 | .env 87 | db.sqlite3 88 | 89 | *.pyc 90 | .idea 91 | .eggs 92 | .tox 93 | *.swp 94 | *.swo 95 | *.o 96 | *.pickle 97 | *.zlib 98 | *.gz 99 | *.tar 100 | *.doctree 101 | *.bin 102 | *.retry 103 | *.egg-info 104 | *.tgz 105 | Error 106 | 107 | dist 108 | venv 109 | venv2 110 | py2venv 111 | py3venv 112 | 113 | *.log 114 | *.log.1* 115 | *.log.2* 116 | *.log.3* 117 | *.log.4* 118 | *.log.5* 119 | *.log.6* 120 | *.log.7* 121 | *.log.8* 122 | *.log.9* 123 | 124 | *.jpg 125 | *.jpeg 126 | *.tiff 127 | *.gif 128 | *.tgz 129 | *.gz 130 | 131 | dev-compose.yml 132 | prod-compose.yml 133 | 134 | nohup.out 135 | db.sqlite3-journal 136 | webapp/drf_network_pipeline/docs/build/html 137 | webapp/drf_network_pipeline/docs/build/doctrees 138 | webapp/drf_network_pipeline/docs/latest.log 139 | webapp/staticfiles 140 | openshift/.pgdeployment 141 | .pytest_cache 142 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | python: 4 | - 3.6 5 | 6 | services: 7 | - docker 8 | 9 | env: 10 | - DJANGO=2.0 11 | 12 | before_install: 13 | - export DJANGO_SETTINGS_MODULE=drf_network_pipeline.settings 14 | - export PYTHONPATH=$HOME/builds/jay-johnson/train-ai-with-django-swagger-jwt 15 | - export PIP_USE_MIRRORS=true 16 | - export REPO_API=jayjohnson/antinex-api 17 | - export REPO_WORKER=jayjohnson/antinex-worker 18 | - echo "" 19 | - echo "Testing Docker Hub credentials username=${DOCKER_USERNAME}" 20 | - docker login --username $DOCKER_USERNAME --password $DOCKER_PASSWORD 21 | - echo "Docker Hub credentials are working" 22 | - export ENVIRONMENT=Development 23 | - export DJANGO_CONFIGURATION=Development 24 | - export DJANGO_SECRET_KEY=supersecret 25 | - export DJANGO_DEBUG=yes 26 | - export DJANGO_TEMPLATE_DEBUG=yes 27 | - export COLLECT_STATICS=1 28 | - export POSTGRES_HOST=0.0.0.0 29 | - export POSTGRES_PORT=5432 30 | - export POSTGRES_USER=postgres 31 | - export POSTGRES_PASSWORD=postgres 32 | - export POSTGRES_DB=webapp 33 | - export CELERY_ENABLED=0 34 | - export CACHEOPS_ENABLED=0 35 | - export USE_ENV=drf-dev 36 | - export NUM_WORKERS=1 37 | - export ANTINEX_WORKER_ENABLED=0 38 | - export ANTINEX_WORKER_ONLY=0 39 | - export ANTINEX_DELIVERY_MODE=persistent 40 | - export ANTINEX_AUTH_URL=redis://localhost:6379/6 41 | - export ANTINEX_EXCHANGE_NAME=webapp.predict.requests 42 | - export ANTINEX_EXCHANGE_TYPE=topic 43 | - export ANTINEX_QUEUE_NAME=webapp.predict.requests 44 | - export ANTINEX_WORKER_SSL_ENABLED=0 45 | - export ANTINEX_CA_CERTS= 46 | - export ANTINEX_KEYFILE= 47 | - export ANTINEX_CERTFILE= 48 | - export ANTINEX_TLS_PROTOCOL= 49 | - export ANTINEX_KEYFILE= 50 | 51 | install: 52 | - pip install -r requirements.txt 53 | 54 | script: 55 | - echo "Starting Linting Tests" 56 | - flake8 57 | - pycodestyle 58 | - echo "Preparing DB" 59 | - . envs/dev.env && echo "Running Tests for Environment=${ENVIRONMENT} DB=${POSTGRES_DB} with antinex.env=${USE_ENV}" && cd webapp && pip list && python manage.py migrate --run-syncdb 60 | - echo "Starting Unit Tests" 61 | - python manage.py test 62 | 63 | after_success: 64 | - export USE_VERSION=1.3.11 65 | - echo "Building Docker images with tag=${USE_VERSION}" 66 | - echo "Test Success - Branch=${TRAVIS_BRANCH} Version=${USE_VERSION} Pull Request=${TRAVIS_PULL_REQUEST} Tag=${TRAVIS_TAG}" 67 | - if [[ "$TRAVIS_BRANCH" == "master" ]]; then echo -e "Push Container to Docker Hub"; fi 68 | - echo "Logging into Docker Hub" 69 | - docker login --username $DOCKER_USERNAME --password $DOCKER_PASSWORD 70 | - export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi` 71 | - export REPO=${REPO_API} 72 | - echo "starting: ../cd docker/api && docker build -f Dockerfile -t ${REPO} ." 73 | - cd ../docker/api 74 | - docker build -f Dockerfile -t $REPO . 75 | - echo "docker tag ${REPO} ${REPO}:${USE_VERSION}" 76 | - docker tag $REPO $REPO:$USE_VERSION 77 | - echo "docker push ${REPO}" 78 | - docker push $REPO:latest 79 | - echo "docker push ${REPO}:${USE_VERSION}" 80 | - docker push $REPO:$USE_VERSION 81 | - export REPO=${REPO_WORKER} 82 | - echo "starting: cd ../worker && docker build -f Dockerfile -t ${REPO} ." 83 | - cd ../worker 84 | - docker build -f Dockerfile -t $REPO . 85 | - echo "docker tag ${REPO} ${REPO}:${USE_VERSION}" 86 | - docker tag $REPO $REPO:$USE_VERSION 87 | - echo "docker push ${REPO}" 88 | - docker push $REPO:latest 89 | - echo "docker push ${REPO}:${USE_VERSION}" 90 | - docker push $REPO:$USE_VERSION 91 | -------------------------------------------------------------------------------- /data/pgadmin/.gitignore: -------------------------------------------------------------------------------- 1 | config 2 | sessions 3 | storage 4 | -------------------------------------------------------------------------------- /data/postgres/.gitignore: -------------------------------------------------------------------------------- 1 | pgdata 2 | -------------------------------------------------------------------------------- /data/redis/.gitignore: -------------------------------------------------------------------------------- 1 | *.rdb 2 | *.log 3 | -------------------------------------------------------------------------------- /docker/api/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jayjohnson/ai-core:latest 2 | 3 | RUN echo "preparing image and building" \ 4 | && mkdir -p -m 777 /var/log/antinex/api \ 5 | && mkdir -p -m 777 /opt/antinex \ 6 | && chmod 777 //var/log/antinex/api \ 7 | && touch /var/log/antinex/api/worker.log \ 8 | && touch /var/log/antinex/api/api.log \ 9 | && chmod 777 /var/log/antinex/api/worker.log \ 10 | && chmod 777 /var/log/antinex/api/api.log \ 11 | && echo "updating repo" \ 12 | && cd /opt/antinex/api \ 13 | && git checkout master \ 14 | && git pull \ 15 | && echo "checking repo in container" \ 16 | && ls -l /opt/antinex/api \ 17 | && echo "activating venv" \ 18 | && . /opt/venv/bin/activate \ 19 | && cd /opt/antinex/api \ 20 | && echo "installing pip upgrades" \ 21 | && pip install --upgrade -r /opt/antinex/api/requirements.txt \ 22 | && echo "building docs" \ 23 | && cd /opt/antinex/api/webapp/drf_network_pipeline/docs \ 24 | && pip install -r /opt/antinex/api/webapp/drf_network_pipeline/docs/doc-requirements.txt 25 | 26 | RUN echo "Downgrading numpy and setuptools for tensorflow" \ 27 | && . /opt/venv/bin/activate \ 28 | && pip install --upgrade numpy==1.14.5 \ 29 | && pip install --upgrade setuptools==39.1.0 30 | 31 | RUN echo "Making Sphinx docs" \ 32 | && . /opt/venv/bin/activate \ 33 | && cd /opt/antinex/api/webapp/drf_network_pipeline/docs \ 34 | && ls -l \ 35 | && make html 36 | 37 | ENV PROJECT_NAME="api" \ 38 | SHARED_LOG_CFG="/opt/antinex/core/antinex_core/log/debug-openshift-logging.json" \ 39 | DEBUG_SHARED_LOG_CFG="0" \ 40 | LOG_LEVEL="DEBUG" \ 41 | LOG_FILE="/var/log/antinex/api/worker.log" \ 42 | USE_ENV="drf-dev" \ 43 | USE_VENV="/opt/venv" \ 44 | API_USER="trex" \ 45 | API_PASSWORD="123321" \ 46 | API_EMAIL="bugs@antinex.com" \ 47 | API_FIRSTNAME="Guest" \ 48 | API_LASTNAME="Guest" \ 49 | API_URL="http://api.antinex.com:8010" \ 50 | API_VERBOSE="true" \ 51 | API_DEBUG="false" \ 52 | USE_FILE="false" \ 53 | SILENT="-s" \ 54 | RUN_API="/opt/antinex/api/run-django.sh" \ 55 | RUN_WORKER="/opt/antinex/api/run-worker.sh" 56 | 57 | WORKDIR /opt/antinex/api 58 | 59 | # set for anonymous user access in the container 60 | RUN find /opt/antinex/api -type d -exec chmod 777 {} \; 61 | RUN find /var/log/antinex -type d -exec chmod 777 {} \; 62 | 63 | ENTRYPOINT . /opt/venv/bin/activate \ 64 | && /opt/antinex/api \ 65 | && ${RUN_API} 66 | -------------------------------------------------------------------------------- /docker/worker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jayjohnson/ai-core:latest 2 | 3 | RUN echo "preparing image and building" \ 4 | && mkdir -p -m 777 /var/log/antinex/api \ 5 | && mkdir -p -m 777 /opt/antinex \ 6 | && chmod 777 //var/log/antinex/api \ 7 | && touch /var/log/antinex/api/worker.log \ 8 | && touch /var/log/antinex/api/api.log \ 9 | && chmod 777 /var/log/antinex/api/worker.log \ 10 | && chmod 777 /var/log/antinex/api/api.log \ 11 | && echo "updating repo" \ 12 | && cd /opt/antinex/api \ 13 | && git checkout master \ 14 | && git pull \ 15 | && echo "checking repo in container" \ 16 | && ls -l /opt/antinex/api \ 17 | && echo "activating venv" \ 18 | && . /opt/venv/bin/activate \ 19 | && cd /opt/antinex/api \ 20 | && echo "installing pip upgrades" \ 21 | && pip install --upgrade -r /opt/antinex/api/requirements.txt \ 22 | && echo "building docs" \ 23 | && cd /opt/antinex/api/webapp/drf_network_pipeline/docs \ 24 | && pip install -r /opt/antinex/api/webapp/drf_network_pipeline/docs/doc-requirements.txt 25 | 26 | RUN echo "Downgrading numpy and setuptools for tensorflow" \ 27 | && . /opt/venv/bin/activate \ 28 | && pip install --upgrade numpy==1.14.5 \ 29 | && pip install --upgrade setuptools==39.1.0 30 | 31 | RUN echo "Making Sphinx docs" \ 32 | && . /opt/venv/bin/activate \ 33 | && cd /opt/antinex/api/webapp/drf_network_pipeline/docs \ 34 | && ls -l \ 35 | && make html 36 | 37 | ENV PROJECT_NAME="api" \ 38 | SHARED_LOG_CFG="/opt/antinex/core/antinex_core/log/debug-openshift-logging.json" \ 39 | DEBUG_SHARED_LOG_CFG="0" \ 40 | LOG_LEVEL="DEBUG" \ 41 | LOG_FILE="/var/log/antinex/api/worker.log" \ 42 | USE_ENV="drf-dev" \ 43 | USE_VENV="/opt/venv" \ 44 | API_USER="trex" \ 45 | API_PASSWORD="123321" \ 46 | API_EMAIL="bugs@antinex.com" \ 47 | API_FIRSTNAME="Guest" \ 48 | API_LASTNAME="Guest" \ 49 | API_URL="http://api.antinex.com:8010" \ 50 | API_VERBOSE="true" \ 51 | API_DEBUG="false" \ 52 | USE_FILE="false" \ 53 | SILENT="-s" \ 54 | RUN_API="/opt/antinex/api/run-django.sh" \ 55 | RUN_WORKER="/opt/antinex/api/run-worker.sh" 56 | 57 | WORKDIR /opt/antinex/api 58 | 59 | # set for anonymous user access in the container 60 | RUN find /opt/antinex/api -type d -exec chmod 777 {} \; 61 | RUN find /var/log/antinex -type d -exec chmod 777 {} \; 62 | 63 | ENTRYPOINT . /opt/venv/bin/activate \ 64 | && /opt/antinex/api \ 65 | && ${RUN_WORKER} 66 | -------------------------------------------------------------------------------- /envs/all-dev.env: -------------------------------------------------------------------------------- 1 | ENVIRONMENT=Development 2 | DJANGO_CONFIGURATION=Development 3 | DJANGO_SECRET_KEY=supersecret 4 | DJANGO_DEBUG=yes 5 | DJANGO_TEMPLATE_DEBUG=yes 6 | WEBAPP_HOST=0.0.0.0 7 | WEBAPP_PORT=8010 8 | LOG_LEVEL=INFO 9 | COLLECT_STATICS=1 10 | POSTGRES_HOST=0.0.0.0 11 | POSTGRES_PORT=5432 12 | POSTGRES_USER=postgres 13 | POSTGRES_PASSWORD=postgres 14 | POSTGRES_DB=webapp 15 | CELERY_ENABLED=1 16 | CACHEOPS_ENABLED=1 17 | USE_ENV=all-dev 18 | USE_VENV=/opt/venv 19 | NUM_WORKERS=4 20 | ANTINEX_WORKER_ENABLED=1 21 | ANTINEX_WORKER_ONLY=0 22 | ANTINEX_DELIVERY_MODE=persistent 23 | ANTINEX_AUTH_URL=redis://localhost:6379/6 24 | ANTINEX_EXCHANGE_NAME=webapp.predict.requests 25 | ANTINEX_EXCHANGE_TYPE=topic 26 | ANTINEX_QUEUE_NAME=webapp.predict.requests 27 | ANTINEX_WORKER_SSL_ENABLED=0 28 | ANTINEX_CA_CERTS= 29 | ANTINEX_KEYFILE= 30 | ANTINEX_CERTFILE= 31 | ANTINEX_TLS_PROTOCOL= 32 | ANTINEX_KEYFILE= 33 | DJANGO_ALLOWED_HOSTS=* 34 | -------------------------------------------------------------------------------- /envs/core-dev.env: -------------------------------------------------------------------------------- 1 | export ENVIRONMENT="Development" 2 | export DJANGO_CONFIGURATION="${ENVIRONMENT}" 3 | export DJANGO_SECRET_KEY="supersecret" 4 | export DJANGO_DEBUG="yes" 5 | export DJANGO_TEMPLATE_DEBUG="yes" 6 | export COLLECT_STATICS="1" 7 | export POSTGRES_HOST="0.0.0.0" 8 | export POSTGRES_PORT="5432" 9 | export POSTGRES_USER="postgres" 10 | export POSTGRES_PASSWORD="postgres" 11 | export POSTGRES_DB="webapp" 12 | export CELERY_ENABLED="1" 13 | export CACHEOPS_ENABLED="1" 14 | export USE_ENV="core-dev" 15 | export NUM_WORKERS="3" 16 | export ANTINEX_WORKER_ENABLED="1" 17 | export ANTINEX_WORKER_ONLY="0" 18 | export ANTINEX_DELIVERY_MODE="persistent" 19 | export ANTINEX_AUTH_URL="redis://localhost:6379/6" 20 | export ANTINEX_EXCHANGE_NAME="webapp.predict.requests" 21 | export ANTINEX_EXCHANGE_TYPE="topic" 22 | export ANTINEX_QUEUE_NAME="webapp.predict.requests" 23 | export ANTINEX_WORKER_SSL_ENABLED="0" 24 | export ANTINEX_CA_CERTS= 25 | export ANTINEX_KEYFILE= 26 | export ANTINEX_CERTFILE= 27 | export ANTINEX_TLS_PROTOCOL= 28 | export ANTINEX_KEYFILE= 29 | -------------------------------------------------------------------------------- /envs/dev.env: -------------------------------------------------------------------------------- 1 | export ENVIRONMENT="Development" 2 | export DJANGO_CONFIGURATION="${ENVIRONMENT}" 3 | export DJANGO_SECRET_KEY="supersecret" 4 | export DJANGO_DEBUG="yes" 5 | export DJANGO_TEMPLATE_DEBUG="yes" 6 | export WEBAPP_HOST="0.0.0.0" 7 | export WEBAPP_PORT="8010" 8 | export COLLECT_STATICS="0" 9 | export CELERY_ENABLED="0" 10 | export CACHEOPS_ENABLED="0" 11 | export USE_ENV="dev" 12 | export ANTINEX_WORKER_ENABLED="0" 13 | export ANTINEX_WORKER_ONLY="0" 14 | unset POSTGRES_DB 15 | -------------------------------------------------------------------------------- /envs/drf-dev.env: -------------------------------------------------------------------------------- 1 | export ENVIRONMENT="Development" 2 | export DJANGO_CONFIGURATION="${ENVIRONMENT}" 3 | export DJANGO_SECRET_KEY="supersecret" 4 | export DJANGO_DEBUG="yes" 5 | export DJANGO_TEMPLATE_DEBUG="yes" 6 | export WEBAPP_HOST="0.0.0.0" 7 | export WEBAPP_PORT="8010" 8 | export COLLECT_STATICS="1" 9 | export POSTGRES_HOST="0.0.0.0" 10 | export POSTGRES_PORT="5432" 11 | export POSTGRES_USER="postgres" 12 | export POSTGRES_PASSWORD="postgres" 13 | export POSTGRES_DB="webapp" 14 | export CELERY_ENABLED="1" 15 | export CACHEOPS_ENABLED="1" 16 | export USE_ENV="drf-dev" 17 | export NUM_WORKERS="1" 18 | export ANTINEX_WORKER_ENABLED="0" 19 | export ANTINEX_WORKER_ONLY="0" 20 | export ANTINEX_DELIVERY_MODE="persistent" 21 | export ANTINEX_AUTH_URL="redis://localhost:6379/6" 22 | export ANTINEX_EXCHANGE_NAME="webapp.predict.requests" 23 | export ANTINEX_EXCHANGE_TYPE="topic" 24 | export ANTINEX_QUEUE_NAME="webapp.predict.requests" 25 | export ANTINEX_WORKER_SSL_ENABLED="0" 26 | export ANTINEX_CA_CERTS= 27 | export ANTINEX_KEYFILE= 28 | export ANTINEX_CERTFILE= 29 | export ANTINEX_TLS_PROTOCOL= 30 | export ANTINEX_KEYFILE= 31 | -------------------------------------------------------------------------------- /envs/k8-debug.env: -------------------------------------------------------------------------------- 1 | export POSTGRES_HOST=postgres-primary 2 | export POSTGRES_PORT=5432 3 | export REDIS_HOST=redis-master 4 | export REDIS_PORT=6379 5 | export WEBAPP_HOST=0.0.0.0 6 | export WEBAPP_PORT=8010 7 | export JUPYTER_PORT=8888 8 | export ANTINEX_URL=https://api-svc:8010 9 | export ANTINEX_API_URL=https://api-svc:8010 10 | export ANTINEX_AUTH_URL=redis://redis-master:6379/6 11 | export ANTINEX_RESULT_AUTH_URL=redis://redis-master:6379/9 12 | export ANTINEX_CORE_BROKER_URL=redis://redis-master:6379/6 13 | export FORWARD_BROKER_URL=redis://redis-master:6379/0 14 | export ENVIRONMENT=Development 15 | export DJANGO_CONFIGURATION=Development 16 | export DJANGO_SECRET_KEY=supersecret 17 | export DJANGO_DEBUG=yes 18 | export DJANGO_TEMPLATE_DEBUG=yes 19 | export HTTP_X_FORWARDED_PROTOCOL_KEY=HTTP_X_FORWARDED_PROTO 20 | export COLLECT_STATICS=1 21 | export CELERY_ENABLED=1 22 | export CACHEOPS_ENABLED=1 23 | export ANTINEX_API_NUM_WORKERS=4 24 | export ANTINEX_WORKER_ENABLED=1 25 | export ANTINEX_WORKER_ONLY=0 26 | export ANTINEX_WORKER_SSL_ENABLED=0 27 | export ANTINEX_DELIVERY_MODE=persistent 28 | export ANTINEX_EXCHANGE_NAME=webapp.predict.requests 29 | export ANTINEX_EXCHANGE_TYPE=topic 30 | export ANTINEX_QUEUE_NAME=webapp.predict.requests 31 | export ANTINEX_CA_CERTS= 32 | export ANTINEX_KEYFILE= 33 | export ANTINEX_CERTFILE= 34 | export ANTINEX_TLS_PROTOCOL= 35 | export ANTINEX_KEYFILE= 36 | export ANTINEX_CORE_NUM_WORKERS=1 37 | export DJANGO_ALLOWED_HOSTS=* 38 | export USE_ENV=k8 39 | export USE_VENV=/opt/venv 40 | export SHARED_DIR=/opt/data 41 | export SHARED_LOG_CFG=/opt/antinex/core/antinex_core/log/k8-logging.json 42 | -------------------------------------------------------------------------------- /envs/k8-splunk-debug.env: -------------------------------------------------------------------------------- 1 | export POSTGRES_HOST=postgres-primary 2 | export POSTGRES_PORT=5432 3 | export REDIS_HOST=redis-master 4 | export REDIS_PORT=6379 5 | export WEBAPP_HOST=0.0.0.0 6 | export WEBAPP_PORT=8010 7 | export JUPYTER_PORT=8888 8 | export ANTINEX_URL=https://api-svc:8010 9 | export ANTINEX_API_URL=https://api-svc:8010 10 | export ANTINEX_AUTH_URL=redis://redis-master:6379/6 11 | export ANTINEX_RESULT_AUTH_URL=redis://redis-master:6379/9 12 | export ANTINEX_CORE_BROKER_URL=redis://redis-master:6379/6 13 | export FORWARD_BROKER_URL=redis://redis-master:6379/0 14 | export ENVIRONMENT=Development 15 | export DJANGO_CONFIGURATION=Development 16 | export DJANGO_SECRET_KEY=supersecret 17 | export DJANGO_DEBUG=yes 18 | export DJANGO_TEMPLATE_DEBUG=yes 19 | export HTTP_X_FORWARDED_PROTOCOL_KEY=HTTP_X_FORWARDED_PROTO 20 | export COLLECT_STATICS=1 21 | export CELERY_ENABLED=1 22 | export CACHEOPS_ENABLED=1 23 | export ANTINEX_API_NUM_WORKERS=4 24 | export ANTINEX_WORKER_ENABLED=1 25 | export ANTINEX_WORKER_ONLY=0 26 | export ANTINEX_WORKER_SSL_ENABLED=0 27 | export ANTINEX_DELIVERY_MODE=persistent 28 | export ANTINEX_EXCHANGE_NAME=webapp.predict.requests 29 | export ANTINEX_EXCHANGE_TYPE=topic 30 | export ANTINEX_QUEUE_NAME=webapp.predict.requests 31 | export ANTINEX_CA_CERTS= 32 | export ANTINEX_KEYFILE= 33 | export ANTINEX_CERTFILE= 34 | export ANTINEX_TLS_PROTOCOL= 35 | export ANTINEX_KEYFILE= 36 | export ANTINEX_CORE_NUM_WORKERS=1 37 | export DJANGO_ALLOWED_HOSTS=* 38 | export USE_ENV=k8-splunk 39 | export USE_VENV=/opt/venv 40 | export SHARED_DIR=/opt/data 41 | export SHARED_LOG_CFG=/opt/antinex/core/antinex_core/log/k8-logging-splunk.json 42 | export SPLUNK_USER=trex 43 | export SPLUNK_PASSWORD=123321 44 | export SPLUNK_INDEX=antinex 45 | export SPLUNK_ADDRESS=splunk-svc:8088 46 | export SPLUNK_API_ADDRESS=splunk-svc:8089 47 | export SPLUNK_TCP_ADDRESS=splunk-tcp-svc:1514 48 | export SPLUNK_HANDLER_NAME=splunk 49 | export SPLUNK_VERIFY=1 50 | export SPLUNK_DEBUG=0 51 | -------------------------------------------------------------------------------- /envs/k8-splunk.env: -------------------------------------------------------------------------------- 1 | export POSTGRES_HOST=postgres-primary 2 | export POSTGRES_PORT=5432 3 | export REDIS_HOST=redis-master 4 | export REDIS_PORT=6379 5 | export WEBAPP_HOST=0.0.0.0 6 | export WEBAPP_PORT=8010 7 | export JUPYTER_PORT=8888 8 | export ANTINEX_URL=https://api-svc:8010 9 | export ANTINEX_API_URL=https://api-svc:8010 10 | export ANTINEX_AUTH_URL=redis://redis-master:6379/6 11 | export ANTINEX_RESULT_AUTH_URL=redis://redis-master:6379/9 12 | export ANTINEX_CORE_BROKER_URL=redis://redis-master:6379/6 13 | export FORWARD_BROKER_URL=redis://redis-master:6379/0 14 | export ENVIRONMENT=Development 15 | export DJANGO_CONFIGURATION=Development 16 | export DJANGO_DEBUG=no 17 | export DJANGO_TEMPLATE_DEBUG=no 18 | export HTTP_X_FORWARDED_PROTOCOL_KEY=HTTP_X_FORWARDED_PROTO 19 | export COLLECT_STATICS=1 20 | export CELERY_ENABLED=1 21 | export CACHEOPS_ENABLED=1 22 | export ANTINEX_API_NUM_WORKERS=4 23 | export ANTINEX_WORKER_ENABLED=1 24 | export ANTINEX_WORKER_ONLY=0 25 | export ANTINEX_WORKER_SSL_ENABLED=0 26 | export ANTINEX_DELIVERY_MODE=persistent 27 | export ANTINEX_EXCHANGE_NAME=webapp.predict.requests 28 | export ANTINEX_EXCHANGE_TYPE=topic 29 | export ANTINEX_QUEUE_NAME=webapp.predict.requests 30 | export ANTINEX_CA_CERTS= 31 | export ANTINEX_KEYFILE= 32 | export ANTINEX_CERTFILE= 33 | export ANTINEX_TLS_PROTOCOL= 34 | export ANTINEX_KEYFILE= 35 | export ANTINEX_CORE_NUM_WORKERS=1 36 | export DJANGO_ALLOWED_HOSTS=* 37 | export USE_ENV=k8-splunk 38 | export USE_VENV=/opt/venv 39 | export SHARED_DIR=/opt/data 40 | export SHARED_LOG_CFG=/opt/antinex/core/antinex_core/log/k8-logging-splunk.json 41 | export SPLUNK_USER=trex 42 | export SPLUNK_PASSWORD=123321 43 | export SPLUNK_INDEX=antinex 44 | export SPLUNK_ADDRESS=splunk-svc:8088 45 | export SPLUNK_API_ADDRESS=splunk-svc:8089 46 | export SPLUNK_TCP_ADDRESS=splunk-tcp-svc:1514 47 | export SPLUNK_HANDLER_NAME=splunk 48 | export SPLUNK_VERIFY=1 49 | export SPLUNK_DEBUG=0 50 | -------------------------------------------------------------------------------- /envs/k8.env: -------------------------------------------------------------------------------- 1 | export POSTGRES_HOST=postgres-primary 2 | export POSTGRES_PORT=5432 3 | export REDIS_HOST=redis-master 4 | export REDIS_PORT=6379 5 | export WEBAPP_HOST=0.0.0.0 6 | export WEBAPP_PORT=8010 7 | export JUPYTER_PORT=8888 8 | export ANTINEX_URL=https://api-svc:8010 9 | export ANTINEX_API_URL=https://api-svc:8010 10 | export ANTINEX_AUTH_URL=redis://redis-master:6379/6 11 | export ANTINEX_RESULT_AUTH_URL=redis://redis-master:6379/9 12 | export ANTINEX_CORE_BROKER_URL=redis://redis-master:6379/6 13 | export FORWARD_BROKER_URL=redis://redis-master:6379/0 14 | export ENVIRONMENT=Development 15 | export DJANGO_CONFIGURATION=Development 16 | export DJANGO_DEBUG=no 17 | export DJANGO_TEMPLATE_DEBUG=no 18 | export HTTP_X_FORWARDED_PROTOCOL_KEY=HTTP_X_FORWARDED_PROTO 19 | export COLLECT_STATICS=1 20 | export CELERY_ENABLED=1 21 | export CACHEOPS_ENABLED=1 22 | export ANTINEX_API_NUM_WORKERS=4 23 | export ANTINEX_WORKER_ENABLED=1 24 | export ANTINEX_WORKER_ONLY=0 25 | export ANTINEX_WORKER_SSL_ENABLED=0 26 | export ANTINEX_DELIVERY_MODE=persistent 27 | export ANTINEX_EXCHANGE_NAME=webapp.predict.requests 28 | export ANTINEX_EXCHANGE_TYPE=topic 29 | export ANTINEX_QUEUE_NAME=webapp.predict.requests 30 | export ANTINEX_CA_CERTS= 31 | export ANTINEX_KEYFILE= 32 | export ANTINEX_CERTFILE= 33 | export ANTINEX_TLS_PROTOCOL= 34 | export ANTINEX_KEYFILE= 35 | export ANTINEX_CORE_NUM_WORKERS=1 36 | export DJANGO_ALLOWED_HOSTS=* 37 | export USE_ENV=k8 38 | export USE_VENV=/opt/venv 39 | export SHARED_DIR=/opt/data 40 | export SHARED_LOG_CFG=/opt/antinex/core/antinex_core/log/k8-logging.json 41 | -------------------------------------------------------------------------------- /envs/minimal.env: -------------------------------------------------------------------------------- 1 | ENVIRONMENT=Development 2 | DJANGO_CONFIGURATION=Development 3 | DJANGO_SECRET_KEY=supersecret 4 | DJANGO_DEBUG=yes 5 | DJANGO_TEMPLATE_DEBUG=yes 6 | WEBAPP_HOST=0.0.0.0 7 | WEBAPP_PORT=8010 8 | COLLECT_STATICS=1 9 | POSTGRES_PORT=5432 10 | REDIS_PORT=6379 11 | CELERY_ENABLED=1 12 | CACHEOPS_ENABLED=1 13 | NUM_WORKERS=4 14 | ANTINEX_WORKER_ENABLED=1 15 | ANTINEX_WORKER_ONLY=0 16 | ANTINEX_DELIVERY_MODE=persistent 17 | ANTINEX_API_URL=http://api:8010 18 | ANTINEX_AUTH_URL=redis://redis:6379/6 19 | ANTINEX_EXCHANGE_NAME=webapp.predict.requests 20 | ANTINEX_EXCHANGE_TYPE=topic 21 | ANTINEX_QUEUE_NAME=webapp.predict.requests 22 | ANTINEX_WORKER_SSL_ENABLED=0 23 | ANTINEX_CA_CERTS= 24 | ANTINEX_KEYFILE= 25 | ANTINEX_CERTFILE= 26 | ANTINEX_TLS_PROTOCOL= 27 | ANTINEX_KEYFILE= 28 | DJANGO_ALLOWED_HOSTS=* 29 | USE_VENV=/opt/venv 30 | -------------------------------------------------------------------------------- /envs/only-core-dev.env: -------------------------------------------------------------------------------- 1 | export ENVIRONMENT="Development" 2 | export DJANGO_CONFIGURATION="${ENVIRONMENT}" 3 | export DJANGO_SECRET_KEY="supersecret" 4 | export DJANGO_DEBUG="yes" 5 | export DJANGO_TEMPLATE_DEBUG="yes" 6 | export COLLECT_STATICS="1" 7 | export CELERY_ENABLED="0" 8 | export CACHEOPS_ENABLED="0" 9 | export USE_ENV="only-core-dev" 10 | export ANTINEX_WORKER_ENABLED="1" 11 | export ANTINEX_WORKER_ONLY="1" 12 | export ANTINEX_DELIVERY_MODE="persistent" 13 | export ANTINEX_AUTH_URL="redis://localhost:6379/6" 14 | export ANTINEX_EXCHANGE_NAME="webapp.predict.requests" 15 | export ANTINEX_EXCHANGE_TYPE="topic" 16 | export ANTINEX_QUEUE_NAME="webapp.predict.requests" 17 | export ANTINEX_WORKER_SSL_ENABLED="0" 18 | export ANTINEX_CA_CERTS= 19 | export ANTINEX_KEYFILE= 20 | export ANTINEX_CERTFILE= 21 | export ANTINEX_TLS_PROTOCOL= 22 | export ANTINEX_KEYFILE= 23 | unset POSTGRES_DB 24 | -------------------------------------------------------------------------------- /envs/openshift-dev.env: -------------------------------------------------------------------------------- 1 | ENVIRONMENT=Development 2 | DJANGO_CONFIGURATION=Development 3 | DJANGO_SECRET_KEY=supersecret 4 | DJANGO_DEBUG=yes 5 | DJANGO_TEMPLATE_DEBUG=yes 6 | WEBAPP_HOST=0.0.0.0 7 | WEBAPP_PORT=8010 8 | COLLECT_STATICS=1 9 | POSTGRES_HOST=postgres 10 | POSTGRES_PORT=5432 11 | REDIS_HOST=redis 12 | REDIS_PORT=6379 13 | CELERY_ENABLED=1 14 | CACHEOPS_ENABLED=1 15 | NUM_WORKERS=4 16 | ANTINEX_WORKER_ENABLED=1 17 | ANTINEX_WORKER_ONLY=0 18 | ANTINEX_DELIVERY_MODE=persistent 19 | ANTINEX_API_URL=http://api:8010 20 | ANTINEX_AUTH_URL=redis://redis:6379/6 21 | ANTINEX_EXCHANGE_NAME=webapp.predict.requests 22 | ANTINEX_EXCHANGE_TYPE=topic 23 | ANTINEX_QUEUE_NAME=webapp.predict.requests 24 | ANTINEX_WORKER_SSL_ENABLED=0 25 | ANTINEX_CA_CERTS= 26 | ANTINEX_KEYFILE= 27 | ANTINEX_CERTFILE= 28 | ANTINEX_TLS_PROTOCOL= 29 | ANTINEX_KEYFILE= 30 | DJANGO_ALLOWED_HOSTS=* 31 | USE_ENV=openshift-dev 32 | USE_VENV=/opt/venv 33 | -------------------------------------------------------------------------------- /envs/openshift-no-hostnames.env: -------------------------------------------------------------------------------- 1 | ENVIRONMENT=Development 2 | DJANGO_CONFIGURATION=Development 3 | DJANGO_SECRET_KEY=supersecret 4 | DJANGO_DEBUG=yes 5 | DJANGO_TEMPLATE_DEBUG=yes 6 | WEBAPP_HOST=0.0.0.0 7 | WEBAPP_PORT=8010 8 | COLLECT_STATICS=1 9 | POSTGRES_PORT=5432 10 | REDIS_PORT=6379 11 | CELERY_ENABLED=1 12 | CACHEOPS_ENABLED=1 13 | NUM_WORKERS=4 14 | ANTINEX_WORKER_ENABLED=1 15 | ANTINEX_WORKER_ONLY=0 16 | ANTINEX_DELIVERY_MODE=persistent 17 | ANTINEX_API_URL=http://api:8010 18 | ANTINEX_AUTH_URL=redis://redis:6379/6 19 | ANTINEX_EXCHANGE_NAME=webapp.predict.requests 20 | ANTINEX_EXCHANGE_TYPE=topic 21 | ANTINEX_QUEUE_NAME=webapp.predict.requests 22 | ANTINEX_WORKER_SSL_ENABLED=0 23 | ANTINEX_CA_CERTS= 24 | ANTINEX_KEYFILE= 25 | ANTINEX_CERTFILE= 26 | ANTINEX_TLS_PROTOCOL= 27 | ANTINEX_KEYFILE= 28 | DJANGO_ALLOWED_HOSTS=* 29 | USE_ENV=openshift-no-hostnames 30 | USE_VENV=/opt/venv 31 | -------------------------------------------------------------------------------- /envs/pgadmin-dev.env: -------------------------------------------------------------------------------- 1 | SERVER_MODE=True 2 | DEFAULT_SERVER_PORT=5050 3 | DEFAULT_SERVER_HOST=0.0.0.0 4 | POSTGRES_HOSTNAME=postgres 5 | POSTGRES_PORT=5432 6 | PGADMIN_SETUP_EMAIL=admin@email.com 7 | PGADMIN_SETUP_PASSWORD=postgres 8 | -------------------------------------------------------------------------------- /envs/pipeline-dev.env: -------------------------------------------------------------------------------- 1 | ANTINEX_PUBLISH_ENABLED=1 2 | ANTINEX_URL=http://localhost:8010 3 | ANTINEX_USER=root 4 | ANTINEX_EMAIL=123321 5 | ANTINEX_PASSWORD=123321 6 | ANTINEX_PUBLISH_TO_CORE=1 7 | ANTINEX_PUBLISH_REQUEST_FILE=/opt/antinex/client/examples/predict-rows-scaler-full-django.json 8 | ANTINEX_FEATURES_TO_PROCESS=idx,arp_hwlen,arp_hwtype,arp_id,arp_op,arp_plen,arp_ptype,dns_default_aa,dns_default_ad,dns_default_an,dns_default_ancount,dns_default_ar,dns_default_arcount,dns_default_cd,dns_default_id,dns_default_length,dns_default_ns,dns_default_nscount,dns_default_opcode,dns_default_qd,dns_default_qdcount,dns_default_qr,dns_default_ra,dns_default_rcode,dns_default_rd,dns_default_tc,dns_default_z,dns_id,eth_id,eth_type,icmp_addr_mask,icmp_code,icmp_gw,icmp_id,icmp_ptr,icmp_seq,icmp_ts_ori,icmp_ts_rx,icmp_ts_tx,icmp_type,icmp_unused,ip_id,ip_ihl,ip_len,ip_tos,ip_version,ipv6_fl,ipv6_hlim,ipv6_nh,ipv6_plen,ipv6_tc,ipv6_version,ipvsix_id,pad_id,tcp_dport,tcp_fields_options.MSS,tcp_fields_options.NOP,tcp_fields_options.SAckOK,tcp_fields_options.Timestamp,tcp_fields_options.WScale,tcp_id,tcp_seq,tcp_sport,udp_dport,udp_id,udp_len,udp_sport 9 | ANTINEX_IGNORE_FEATURES= 10 | ANTINEX_SORT_VALUES= 11 | ANTINEX_ML_TYPE=classification 12 | ANTINEX_USE_MODEL_NAME=Full-Django-AntiNex-Simple-Scaler-DNN 13 | ANTINEX_PREDICT_FEATURE=label_value 14 | ANTINEX_SEED=42 15 | ANTINEX_TEST_SIZE=0.2 16 | ANTINEX_BATCH_SIZE=32 17 | ANTINEX_EPOCHS=15 18 | ANTINEX_NUM_SPLITS=2 19 | ANTINEX_LOSS=binary_crossentropy 20 | ANTINEX_OPTIMIZER=adam 21 | ANTINEX_METRICS=accuracy 22 | ANTINEX_HISTORIES=val_loss,val_acc,loss,acc 23 | ANTINEX_VERSION=1 24 | ANTINEX_CONVERT_DATA=1 25 | ANTINEX_CONVERT_DATA_TYPE=float 26 | ANTINEX_MISSING_VALUE=-1.0 27 | ANTINEX_INCLUDE_FAILED_CONVERSIONS=false 28 | ANTINEX_CLIENT_VERBOSE=1 29 | ANTINEX_CLIENT_DEBUG=0 30 | USE_VENV=/opt/venv 31 | -------------------------------------------------------------------------------- /envs/postgres-dev.env: -------------------------------------------------------------------------------- 1 | POSTGRES_USER=postgres 2 | POSTGRES_PASSWORD=postgres 3 | POSTGRES_DB=webapp 4 | PGDATA=/var/lib/postgresql/data/pgdata 5 | -------------------------------------------------------------------------------- /envs/prod.env: -------------------------------------------------------------------------------- 1 | export ENVIRONMENT="Production" 2 | export DJANGO_CONFIGURATION="${ENVIRONMENT}" 3 | export DJANGO_SECRET_KEY="supersecret" 4 | export DJANGO_DEBUG="no" 5 | export DJANGO_TEMPLATE_DEBUG="no" 6 | export POSTGRES_HOST="0.0.0.0" 7 | export POSTGRES_PORT="5432" 8 | export POSTGRES_USER="postgres" 9 | export POSTGRES_PASSWORD="postgres" 10 | export POSTGRES_DB="webapp" 11 | export PGDATA="/var/lib/postgresql/data/pgdata" 12 | export CELERY_ENABLED="1" 13 | -------------------------------------------------------------------------------- /envs/splunk.env: -------------------------------------------------------------------------------- 1 | ENVIRONMENT=Development 2 | DJANGO_CONFIGURATION=Development 3 | DJANGO_SECRET_KEY=supersecret 4 | DJANGO_DEBUG=yes 5 | DJANGO_TEMPLATE_DEBUG=yes 6 | WEBAPP_HOST=0.0.0.0 7 | WEBAPP_PORT=8010 8 | LOG_LEVEL=INFO 9 | COLLECT_STATICS=1 10 | POSTGRES_HOST=0.0.0.0 11 | POSTGRES_PORT=5432 12 | POSTGRES_USER=postgres 13 | POSTGRES_PASSWORD=postgres 14 | POSTGRES_DB=webapp 15 | CELERY_ENABLED=1 16 | CACHEOPS_ENABLED=1 17 | USE_ENV=all-dev 18 | USE_VENV=/opt/venv 19 | NUM_WORKERS=4 20 | ANTINEX_WORKER_ENABLED=1 21 | ANTINEX_WORKER_ONLY=0 22 | ANTINEX_DELIVERY_MODE=persistent 23 | ANTINEX_AUTH_URL=redis://localhost:6379/6 24 | ANTINEX_EXCHANGE_NAME=webapp.predict.requests 25 | ANTINEX_EXCHANGE_TYPE=topic 26 | ANTINEX_QUEUE_NAME=webapp.predict.requests 27 | ANTINEX_WORKER_SSL_ENABLED=0 28 | ANTINEX_CA_CERTS= 29 | ANTINEX_KEYFILE= 30 | ANTINEX_CERTFILE= 31 | ANTINEX_TLS_PROTOCOL= 32 | ANTINEX_KEYFILE= 33 | DJANGO_ALLOWED_HOSTS=* 34 | SHARED_LOG_CFG=/opt/spylunking/spylunking/log/shared-logging.json 35 | SPLUNK_USER=trex 36 | SPLUNK_PASSWORD=123321 37 | SPLUNK_INDEX=antinex 38 | SPLUNK_ADDRESS=splunkenterprise:8088 39 | SPLUNK_API_ADDRESS=splunkenterprise:8089 40 | SPLUNK_TCP_ADDRESS=splunkenterprise:1514 41 | SPLUNK_HANDLER_NAME=splunk 42 | SPLUNK_VERIFY=1 43 | SPLUNK_DEBUG=0 44 | -------------------------------------------------------------------------------- /full-stack-dev.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | 3 | services: 4 | 5 | # PostgreSQL database 6 | postgres: 7 | container_name: "postgres" 8 | image: postgres:10.2-alpine 9 | hostname: postgres 10 | env_file: 11 | - ./envs/postgres-dev.env 12 | ports: 13 | - "5432:5432" 14 | volumes: 15 | - ./data/postgres:/var/lib/postgresql/data 16 | 17 | # pgAdmin 18 | pgadmin: 19 | container_name: "pgadmin" 20 | image: jayjohnson/pgadmin4:1.0.0 21 | hostname: pgadmin 22 | env_file: 23 | - ./envs/pgadmin-dev.env 24 | ports: 25 | - "83:5050" 26 | volumes: 27 | - ./data/pgadmin:/data 28 | 29 | # Redis 30 | redis: 31 | container_name: "redis" 32 | hostname: redis 33 | image: redis:4.0.5-alpine 34 | ports: 35 | - "6379:6379" 36 | -------------------------------------------------------------------------------- /get_user_jwt_token.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | curl -s -X POST --header 'Content-Type: application/json' --header 'Accept: application/json' -d '{ "username": "root", "password": "123321" }' 'http://0.0.0.0:8010/api-token-auth/' 4 | echo "" 5 | -------------------------------------------------------------------------------- /install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | venv=~/.venvs/venvdrfpipeline 4 | env_name=dev 5 | 6 | if [[ ! -e ${venv} ]]; then 7 | mkdir -p -m 755 ~/.venvs >> /dev/null 2>&1 8 | virtualenv -p python3 ${venv} 9 | fi 10 | 11 | if [[ ! -e ${venv}/bin/activate ]]; then 12 | echo "" 13 | echo "Failed to create virtualenv: virtualenv -p python3 ${venv}" 14 | echo "" 15 | exit 1 16 | fi 17 | 18 | if [[ "${USE_ENV}" != "" ]]; then 19 | env_name="${USE_ENV}" 20 | fi 21 | 22 | if [[ ! -e ./envs/${env_name}.env ]]; then 23 | echo "" 24 | echo "Failed to find env file: envs/${env_name}.env" 25 | echo "" 26 | exit 1 27 | fi 28 | 29 | echo "Activating and installing pips" 30 | source ${venv}/bin/activate && pip install --upgrade -r ./requirements.txt 31 | echo "" 32 | 33 | ./run-migrations.sh 34 | 35 | echo "" 36 | echo "Run ./start.sh to run" 37 | echo "" 38 | -------------------------------------------------------------------------------- /log/colors-logging.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "disable_existing_loggers": false, 4 | "formatters": { 5 | "simple": { 6 | "()": "colorlog.ColoredFormatter", 7 | "format": "%(log_color)s%(asctime)s - %(name)s - %(levelname)s - %(message)s%(reset)s" 8 | } 9 | }, 10 | "handlers": { 11 | "console": { 12 | "class": "logging.StreamHandler", 13 | "level": "INFO", 14 | "formatter": "simple", 15 | "stream": "ext://sys.stdout" 16 | }, 17 | "info_file_handler": { 18 | "class": "logging.handlers.RotatingFileHandler", 19 | "level": "INFO", 20 | "formatter": "simple", 21 | "filename": "latest.log", 22 | "maxBytes": 1048576, 23 | "backupCount": 2, 24 | "encoding": "utf8" 25 | } 26 | }, 27 | "loggers": { 28 | "my_module": { 29 | "level": "ERROR", 30 | "handlers": ["console"], 31 | "propagate": "no" 32 | } 33 | }, 34 | "root": { 35 | "level": "INFO", 36 | "handlers": ["console", "info_file_handler"] 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /log/console.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "disable_existing_loggers": false, 4 | "formatters": { 5 | "simple": { 6 | "()": "colorlog.ColoredFormatter", 7 | "format": "%(log_color)s%(asctime)s - %(name)s - %(levelname)s - %(message)s%(reset)s" 8 | } 9 | }, 10 | "handlers": { 11 | "console": { 12 | "class": "logging.StreamHandler", 13 | "level": "INFO", 14 | "formatter": "simple", 15 | "stream": "ext://sys.stdout" 16 | } 17 | }, 18 | "loggers": { 19 | "my_module": { 20 | "level": "INFO", 21 | "handlers": ["console"], 22 | "propagate": "no" 23 | } 24 | }, 25 | "root": { 26 | "level": "INFO", 27 | "handlers": ["console"] 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /log/logging.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "disable_existing_loggers": false, 4 | "formatters": { 5 | "simple": { 6 | "()": "colorlog.ColoredFormatter", 7 | "format": "%(log_color)s%(asctime)s - %(name)s - %(levelname)s - %(message)s%(reset)s" 8 | } 9 | }, 10 | "handlers": { 11 | "console": { 12 | "class": "logging.StreamHandler", 13 | "level": "INFO", 14 | "formatter": "simple", 15 | "stream": "ext://sys.stdout" 16 | }, 17 | "info_file_handler": { 18 | "class": "logging.handlers.RotatingFileHandler", 19 | "level": "INFO", 20 | "formatter": "simple", 21 | "filename": "latest.log", 22 | "maxBytes": 1048576, 23 | "backupCount": 2, 24 | "encoding": "utf8" 25 | } 26 | }, 27 | "loggers": { 28 | "my_module": { 29 | "level": "ERROR", 30 | "handlers": ["console"], 31 | "propagate": "no" 32 | } 33 | }, 34 | "root": { 35 | "level": "INFO", 36 | "handlers": ["console", "info_file_handler"] 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /only-rest.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | 3 | services: 4 | 5 | # PostgreSQL database 6 | postgres: 7 | container_name: "postgres" 8 | image: postgres:10.2-alpine 9 | hostname: postgres 10 | env_file: 11 | - ./envs/postgres-dev.env 12 | ports: 13 | - "5432:5432" 14 | volumes: 15 | - ./data/postgres:/var/lib/postgresql/data 16 | 17 | # pgAdmin 18 | pgadmin: 19 | container_name: "pgadmin" 20 | image: jayjohnson/pgadmin4:1.0.0 21 | hostname: pgadmin 22 | env_file: 23 | - ./envs/pgadmin-dev.env 24 | ports: 25 | - "83:5050" 26 | volumes: 27 | - ./data/pgadmin:/data 28 | 29 | # Redis 30 | redis: 31 | container_name: "redis" 32 | hostname: redis 33 | image: redis:4.0.5-alpine 34 | ports: 35 | - "6379:6379" 36 | 37 | # Jupyter notebooks, converted noteooks as presentation html slides, and tensorboard 38 | jupyter: 39 | container_name: "jupyter" 40 | hostname: jupyter 41 | image: jayjohnson/ai-core:latest 42 | network_mode: "host" 43 | environment: 44 | - JUPYTER_PASS=admin 45 | - SHARED_DIR=/opt/data 46 | - BROKER_URL=redis://0.0.0.0:6379/6 47 | ports: 48 | - "8888:8888" 49 | - "8889:8889" 50 | - "8890:8890" 51 | - "6006:6006" 52 | entrypoint: "/opt/antinex/core/docker/jupyter/start-container.sh" 53 | 54 | # AntiNex Celery Worker for using pre-trained models 55 | core: 56 | container_name: "core" 57 | hostname: core 58 | image: jayjohnson/ai-core:latest 59 | network_mode: "host" 60 | environment: 61 | - SHARED_DIR=/opt/data 62 | - BROKER_URL=redis://0.0.0.0:6379/6 63 | depends_on: 64 | - postgres 65 | - redis 66 | volumes: 67 | - /tmp:/tmp 68 | entrypoint: "/bin/sh -c 'cd /opt/antinex/api && 69 | /opt/antinex/core/run-antinex-core.sh'" 70 | 71 | # Django Rest Framework + JWT + Swagger 72 | api: 73 | container_name: "api" 74 | hostname: api 75 | image: jayjohnson/ai-core:latest 76 | network_mode: "host" 77 | env_file: 78 | - ./envs/all-dev.env 79 | ports: 80 | - "8010:8010" 81 | depends_on: 82 | - postgres 83 | - redis 84 | volumes: 85 | - /tmp:/tmp 86 | entrypoint: "/bin/sh -c 'cd /opt/antinex/api && 87 | /opt/antinex/api/run-django.sh'" 88 | 89 | # Django Rest Framework Celery Worker 90 | worker: 91 | container_name: "worker" 92 | hostname: worker 93 | image: jayjohnson/ai-core:latest 94 | network_mode: "host" 95 | env_file: 96 | - ./envs/all-dev.env 97 | depends_on: 98 | - api 99 | - postgres 100 | - redis 101 | volumes: 102 | - /tmp:/tmp 103 | entrypoint: "/bin/sh -c 'cd /opt/antinex/api && 104 | /opt/antinex/api/run-worker.sh'" 105 | -------------------------------------------------------------------------------- /openshift/api/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | annotations: 5 | description: AntiNex API - Django REST Framework with JWT and Swagger - Celery Producer 6 | runtime: python3 7 | kompose.cmd: kompose convert -f compose.yml 8 | kompose.version: 1.13.0 (84fa826) 9 | creationTimestamp: null 10 | labels: 11 | antinex: api 12 | purpose: rest 13 | layer: rest 14 | auth: jwt 15 | toolchain: swagger 16 | messaging: redis 17 | cache: redis 18 | pubsub: publisher 19 | name: api 20 | spec: 21 | ports: 22 | - name: "8010" 23 | port: 8010 24 | targetPort: 8010 25 | selector: 26 | antinex: api 27 | purpose: rest 28 | layer: rest 29 | auth: jwt 30 | toolchain: swagger 31 | messaging: redis 32 | cache: redis 33 | pubsub: publisher 34 | status: 35 | loadBalancer: {} 36 | -------------------------------------------------------------------------------- /openshift/core/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Deployment 3 | metadata: 4 | annotations: 5 | description: AntiNex - AI Core - Celery Worker for Training and Making Predictions 6 | runtime: python3 7 | kompose.cmd: kompose convert -f compose.yml 8 | kompose.version: 1.13.0 (84fa826) 9 | creationTimestamp: null 10 | labels: 11 | antinex: core 12 | purpose: worker 13 | layer: backend 14 | ai.framework: keras 15 | ai.backend: tensorflow 16 | layer: backend 17 | name: core 18 | spec: 19 | replicas: 1 20 | strategy: {} 21 | template: 22 | metadata: 23 | creationTimestamp: null 24 | labels: 25 | antinex: core 26 | purpose: worker 27 | layer: backend 28 | ai.framework: keras 29 | ai.backend: tensorflow 30 | layer: backend 31 | spec: 32 | containers: 33 | - command: 34 | - /bin/bash 35 | - -c 36 | - cd /opt/antinex/core && . /opt/venv/bin/activate && source /opt/antinex/api/envs/openshift-no-hostnames.env && env | sort && /opt/antinex/core/run-antinex-core.sh 37 | env: 38 | - name: POSTGRES_HOST 39 | value: primary 40 | - name: POSTGRES_PORT 41 | value: "5432" 42 | - name: REDIS_HOST 43 | value: redis 44 | - name: REDIS_PORT 45 | value: "6379" 46 | - name: ANTINEX_AUTH_URL 47 | value: redis://redis:6379/6 48 | - name: ANTINEX_RESULT_AUTH_URL 49 | value: redis://redis:6379/9 50 | - name: BROKER_URL 51 | value: redis://redis:6379/6 52 | - name: SHARED_DIR 53 | value: /opt/data 54 | - name: USE_ENV 55 | value: openshift-no-hostnames 56 | - name: USE_VENV 57 | value: /opt/venv 58 | image: jayjohnson/antinex-core:latest 59 | name: core 60 | resources: {} 61 | hostname: core 62 | restartPolicy: Always 63 | status: {} 64 | -------------------------------------------------------------------------------- /openshift/core/log_to_splunk_deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Deployment 3 | metadata: 4 | annotations: 5 | description: AntiNex - AI Core - Celery Worker for Training and Making Predictions 6 | runtime: python3 7 | kompose.cmd: kompose convert -f compose.yml 8 | kompose.version: 1.13.0 (84fa826) 9 | creationTimestamp: null 10 | labels: 11 | antinex: core 12 | purpose: worker 13 | layer: backend 14 | ai.framework: keras 15 | ai.backend: tensorflow 16 | layer: backend 17 | name: core 18 | spec: 19 | replicas: 1 20 | strategy: {} 21 | template: 22 | metadata: 23 | creationTimestamp: null 24 | labels: 25 | antinex: core 26 | purpose: worker 27 | layer: backend 28 | ai.framework: keras 29 | ai.backend: tensorflow 30 | layer: backend 31 | spec: 32 | containers: 33 | - command: 34 | - /bin/bash 35 | - -c 36 | - cd /opt/antinex/core && . /opt/venv/bin/activate && source /opt/antinex/api/envs/openshift-no-hostnames.env && env | sort && /opt/antinex/core/run-antinex-core.sh 37 | env: 38 | - name: POSTGRES_HOST 39 | value: primary 40 | - name: POSTGRES_PORT 41 | value: "5432" 42 | - name: REDIS_HOST 43 | value: redis 44 | - name: REDIS_PORT 45 | value: "6379" 46 | - name: ANTINEX_AUTH_URL 47 | value: redis://redis:6379/6 48 | - name: ANTINEX_RESULT_AUTH_URL 49 | value: redis://redis:6379/9 50 | - name: BROKER_URL 51 | value: redis://redis:6379/6 52 | - name: SHARED_DIR 53 | value: /opt/data 54 | - name: USE_ENV 55 | value: openshift-no-hostnames 56 | - name: USE_VENV 57 | value: /opt/venv 58 | - name: DEPLOY_CONFIG 59 | value: log_to_splunk 60 | - name: LOG_NAME 61 | value: core 62 | - name: ENV_NAME 63 | value: ocp 64 | - name: SHARED_LOG_CFG 65 | value: /opt/spylunking/spylunking/log/shared-logging.json 66 | - name: LOG_HANDLER_NAME 67 | value: console 68 | - name: SPLUNK_HANDLER_NAME 69 | value: splunk 70 | - name: SPLUNK_INDEX 71 | value: antinex 72 | - name: SPLUNK_ADDRESS 73 | value: "192.168.0.21:8088" 74 | - name: SPLUNK_API_ADDRESS 75 | value: "192.168.0.21:8089" 76 | - name: SPLUNK_TCP_ADDRESS 77 | value: "192.168.0.21:1514" 78 | - name: SPLUNK_USER 79 | value: trex 80 | - name: SPLUNK_PASSWORD 81 | value: "123321" 82 | - name: SPLUNK_TOKEN 83 | value: 84 | - name: SPLUNK_VERIFY 85 | value: "0" 86 | image: jayjohnson/antinex-core:latest 87 | name: core 88 | resources: {} 89 | hostname: core 90 | restartPolicy: Always 91 | status: {} 92 | -------------------------------------------------------------------------------- /openshift/create-user.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | user_file="./users/user_1.sh" 4 | if [[ "${1}" != "" ]]; then 5 | if [[ -e ${1} ]]; then 6 | user_file="${1}" 7 | else 8 | echo "Did not find user_file: ${1}" 9 | exit 1 10 | fi 11 | fi 12 | 13 | echo "Loading user env: ${user_file}" 14 | source ${user_file} 15 | 16 | # View the AntiNex credentials for debugging: 17 | # echo "" 18 | # env | grep ANTINEX_ | sort 19 | # echo "" 20 | 21 | export ANTINEX_URL=$(./get-api-url.sh) 22 | ../tests/create-user.sh 23 | if [[ "${?}" != "0" ]]; then 24 | echo "Failed creating OpenShift user: ${ANTINEX_USER}" 25 | exit 1 26 | fi 27 | 28 | exit 0 29 | -------------------------------------------------------------------------------- /openshift/get-api-url.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | api_url=$(oc status | grep api | grep http | awk '{print $1}') 4 | echo "${api_url}" 5 | 6 | exit 0 7 | -------------------------------------------------------------------------------- /openshift/get-token.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | user="root" 4 | pw="123321" 5 | 6 | if [[ "${1}" != "" ]]; then 7 | user="${1}" 8 | fi 9 | 10 | if [[ "${ANTINEX_USER}" != "" ]]; then 11 | user="${ANTINEX_USER}" 12 | fi 13 | 14 | if [[ "${2}" != "" ]]; then 15 | pw="${2}" 16 | fi 17 | 18 | if [[ "${ANTINEX_PASSWORD}" != "" ]]; then 19 | pw="${ANTINEX_PASSWORD}" 20 | fi 21 | 22 | user_login_dict="{ \"username\": \"${user}\", \"password\": \"${pw}\" }" 23 | 24 | api_url=$(oc status | grep api | grep http | awk '{print $1}') 25 | if [[ "${api_url}" == "" ]]; then 26 | api_url="${ANTINEX_URL}" 27 | fi 28 | 29 | # echo "curl -s -X POST --header 'Content-Type: application/json' --header 'Accept: application/json' -d '${user_login_dict}' ${api_url}/api-token-auth/" 30 | curl -s -X POST --header 'Content-Type: application/json' --header 'Accept: application/json' -d "${user_login_dict}" "${api_url}/api-token-auth/" 31 | echo "" 32 | -------------------------------------------------------------------------------- /openshift/jupyter/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Deployment 3 | metadata: 4 | annotations: 5 | description: Jupyter - Web Application for IPython Notebooks Data Science and Analysis 6 | runtime: python3 7 | kompose.cmd: kompose convert -f compose.yml 8 | kompose.version: 1.13.0 (84fa826) 9 | creationTimestamp: null 10 | labels: 11 | antinex: jupyter 12 | purpose: analysis 13 | layer: web 14 | users: data-scientists 15 | auth: basic 16 | messaging: redis 17 | cache: redis 18 | pubsub: publisher 19 | name: jupyter 20 | spec: 21 | replicas: 1 22 | strategy: {} 23 | template: 24 | metadata: 25 | creationTimestamp: null 26 | labels: 27 | antinex: jupyter 28 | purpose: analysis 29 | layer: web 30 | users: data-scientists 31 | auth: basic 32 | messaging: redis 33 | cache: redis 34 | pubsub: publisher 35 | spec: 36 | containers: 37 | - command: 38 | - /bin/bash 39 | - -c 40 | - cd /opt/antinex/core/docker/jupyter && export HOME=/opt/antinex/core/docker/jupyter && . /opt/venv/bin/activate && source /opt/antinex/api/envs/openshift-dev.env && env | sort && /opt/antinex/core/docker/jupyter/start-container.sh && tail -f /var/log/antinex/api/api.log 41 | env: 42 | - name: ANTINEX_URL 43 | value: http://api:8010 44 | - name: ANTINEX_USER 45 | value: root 46 | - name: ANTINEX_PASSWORD 47 | value: "123321" 48 | - name: BROKER_URL 49 | value: redis://redis:6379/6 50 | - name: JUPYTER_PORT 51 | value: "8888" 52 | - name: JUPYTER_PASS 53 | value: admin 54 | - name: SHARED_DIR 55 | value: /opt/data 56 | image: jayjohnson/antinex-jupyter:latest 57 | name: jupyter 58 | ports: 59 | - containerPort: 8888 60 | - containerPort: 8889 61 | - containerPort: 8890 62 | - containerPort: 6006 63 | resources: {} 64 | hostname: jupyter 65 | restartPolicy: Always 66 | status: {} 67 | -------------------------------------------------------------------------------- /openshift/jupyter/log_to_splunk_deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Deployment 3 | metadata: 4 | annotations: 5 | description: Jupyter - Web Application for IPython Notebooks Data Science and Analysis 6 | runtime: python3 7 | kompose.cmd: kompose convert -f compose.yml 8 | kompose.version: 1.13.0 (84fa826) 9 | creationTimestamp: null 10 | labels: 11 | antinex: jupyter 12 | purpose: analysis 13 | layer: web 14 | users: data-scientists 15 | auth: basic 16 | messaging: redis 17 | cache: redis 18 | pubsub: publisher 19 | name: jupyter 20 | spec: 21 | replicas: 1 22 | strategy: {} 23 | template: 24 | metadata: 25 | creationTimestamp: null 26 | labels: 27 | antinex: jupyter 28 | purpose: analysis 29 | layer: web 30 | users: data-scientists 31 | auth: basic 32 | messaging: redis 33 | cache: redis 34 | pubsub: publisher 35 | spec: 36 | containers: 37 | - command: 38 | - /bin/bash 39 | - -c 40 | - cd /opt/antinex/core/docker/jupyter && export HOME=/opt/antinex/core/docker/jupyter && . /opt/venv/bin/activate && source /opt/antinex/api/envs/openshift-dev.env && env | sort && /opt/antinex/core/docker/jupyter/start-container.sh && tail -f /var/log/antinex/api/api.log 41 | env: 42 | - name: ANTINEX_URL 43 | value: http://api:8010 44 | - name: ANTINEX_USER 45 | value: root 46 | - name: ANTINEX_PASSWORD 47 | value: "123321" 48 | - name: BROKER_URL 49 | value: redis://redis:6379/6 50 | - name: JUPYTER_PORT 51 | value: "8888" 52 | - name: JUPYTER_PASS 53 | value: admin 54 | - name: SHARED_DIR 55 | value: /opt/data 56 | - name: DEPLOY_CONFIG 57 | value: log_to_splunk 58 | - name: LOG_NAME 59 | value: jupyter 60 | - name: ENV_NAME 61 | value: ocp 62 | - name: SHARED_LOG_CFG 63 | value: /opt/spylunking/spylunking/log/shared-logging.json 64 | - name: LOG_HANDLER_NAME 65 | value: console 66 | - name: SPLUNK_HANDLER_NAME 67 | value: splunk 68 | - name: SPLUNK_INDEX 69 | value: antinex 70 | - name: SPLUNK_ADDRESS 71 | value: "192.168.0.21:8088" 72 | - name: SPLUNK_API_ADDRESS 73 | value: "192.168.0.21:8089" 74 | - name: SPLUNK_TCP_ADDRESS 75 | value: "192.168.0.21:1514" 76 | - name: SPLUNK_USER 77 | value: trex 78 | - name: SPLUNK_PASSWORD 79 | value: "123321" 80 | - name: SPLUNK_TOKEN 81 | value: 82 | - name: SPLUNK_TOKEN 83 | value: 84 | - name: SPLUNK_VERIFY 85 | value: "0" 86 | image: jayjohnson/antinex-jupyter:latest 87 | name: jupyter 88 | ports: 89 | - containerPort: 8888 90 | - containerPort: 8889 91 | - containerPort: 8890 92 | - containerPort: 6006 93 | resources: {} 94 | hostname: jupyter 95 | restartPolicy: Always 96 | status: {} 97 | -------------------------------------------------------------------------------- /openshift/jupyter/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | annotations: 5 | description: Jupyter - Web Application for IPython Notebooks Data Science and Analysis 6 | runtime: python3 7 | kompose.cmd: kompose convert -f compose.yml 8 | kompose.version: 1.13.0 (84fa826) 9 | creationTimestamp: null 10 | labels: 11 | antinex: jupyter 12 | purpose: analysis 13 | layer: web 14 | users: data-scientists 15 | auth: basic 16 | messaging: redis 17 | cache: redis 18 | pubsub: publisher 19 | name: jupyter 20 | spec: 21 | ports: 22 | - name: "8888" 23 | port: 8888 24 | targetPort: 8888 25 | - name: "8889" 26 | port: 8889 27 | targetPort: 8889 28 | - name: "8890" 29 | port: 8890 30 | targetPort: 8890 31 | - name: "6006" 32 | port: 6006 33 | targetPort: 6006 34 | selector: 35 | antinex: jupyter 36 | status: 37 | loadBalancer: {} 38 | -------------------------------------------------------------------------------- /openshift/logs-api.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Tailing API Logs:" 4 | oc logs -f deployment/api 5 | 6 | exit 0 7 | -------------------------------------------------------------------------------- /openshift/logs-core.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Tailing Core Logs:" 4 | oc logs -f deployment/core 5 | 6 | exit 0 7 | -------------------------------------------------------------------------------- /openshift/logs-jupyter.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Tailing Jupyter Logs:" 4 | oc logs -f deployment/jupyter 5 | 6 | exit 0 7 | -------------------------------------------------------------------------------- /openshift/logs-pipeline.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Tailing Pipeline Logs:" 4 | oc logs -f deployment/pipeline 5 | 6 | exit 0 7 | -------------------------------------------------------------------------------- /openshift/logs-worker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Tailing Worker Logs:" 4 | oc logs -f deployment/worker 5 | 6 | exit 0 7 | -------------------------------------------------------------------------------- /openshift/pgadmin4/crunchy-template-http.json: -------------------------------------------------------------------------------- 1 | { 2 | "kind": "Service", 3 | "apiVersion": "v1", 4 | "metadata": { 5 | "name": "pgadmin4-http", 6 | "labels": { 7 | "name": "pgadmin4-http" 8 | } 9 | }, 10 | "spec": { 11 | "ports": [{ 12 | "name": "pgadmin4-http", 13 | "protocol": "TCP", 14 | "port": 5050, 15 | "targetPort": 5050, 16 | "nodePort": 30000 17 | }], 18 | "selector": { 19 | "name": "pgadmin4-http" 20 | }, 21 | "type": "NodePort", 22 | "sessionAffinity": "None" 23 | } 24 | } 25 | 26 | { 27 | "kind": "Pod", 28 | "apiVersion": "v1", 29 | "metadata": { 30 | "name": "pgadmin4-http", 31 | "labels": { 32 | "name": "pgadmin4-http" 33 | } 34 | }, 35 | "spec": { 36 | "securityContext": { 37 | $CCP_SECURITY_CONTEXT 38 | }, 39 | "volumes": [ 40 | { 41 | "name": "pgadmin", 42 | "persistentVolumeClaim": { 43 | "claimName": "pgadmin4-http-data" 44 | } 45 | }, 46 | { 47 | "name": "run", 48 | "emptyDir": {} 49 | } 50 | ], 51 | "containers": [ 52 | { 53 | "name": "pgadmin", 54 | "image": "$CCP_IMAGE_PREFIX/crunchy-pgadmin4:$CCP_PGADMIN_IMAGE_TAG", 55 | "ports": [ 56 | { 57 | "containerPort": 5050, 58 | "protocol": "TCP" 59 | } 60 | ], 61 | "env": [ 62 | { 63 | "name": "PGADMIN_SETUP_EMAIL", 64 | "value": "$PGADMIN_SETUP_EMAIL" 65 | }, 66 | { 67 | "name": "PGADMIN_SETUP_PASSWORD", 68 | "value": "$PGADMIN_SETUP_PASSWORD" 69 | } 70 | ], 71 | "volumeMounts": [ 72 | { 73 | "mountPath": "/var/lib/pgadmin", 74 | "name": "pgadmin", 75 | "readOnly": false 76 | }, 77 | { 78 | "mountPath": "/run/httpd", 79 | "name": "run", 80 | "readOnly": false 81 | } 82 | ] 83 | } 84 | ] 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /openshift/pgadmin4/persistent-volume.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "v1", 3 | "kind": "PersistentVolume", 4 | "metadata": { 5 | "annotations": { 6 | "pv.kubernetes.io/bound-by-controller": "yes" 7 | }, 8 | "creationTimestamp": null, 9 | "name": "pgadmin-antinex-volume" 10 | }, 11 | "spec": { 12 | "accessModes": [ 13 | "ReadWriteOnce" 14 | ], 15 | "capacity": { 16 | "storage": "10G" 17 | }, 18 | "nfs": { 19 | "path": "/exports/pgadmin-antinex", 20 | "server": "localhost" 21 | }, 22 | "persistentVolumeReclaimPolicy": "Retain", 23 | "volumeName": "pgadmin-antinex-volume" 24 | }, 25 | "status": {} 26 | } 27 | -------------------------------------------------------------------------------- /openshift/postgres/crunchy-template.json: -------------------------------------------------------------------------------- 1 | { 2 | "kind": "Service", 3 | "apiVersion": "v1", 4 | "metadata": { 5 | "name": "primary", 6 | "labels": { 7 | "name": "primary" 8 | } 9 | }, 10 | "spec": { 11 | "ports": [{ 12 | "protocol": "TCP", 13 | "port": 5432, 14 | "targetPort": 5432, 15 | "nodePort": 0 16 | }], 17 | "selector": { 18 | "name": "primary" 19 | }, 20 | "type": "ClusterIP", 21 | "sessionAffinity": "None" 22 | } 23 | } 24 | 25 | { 26 | "kind": "Pod", 27 | "apiVersion": "v1", 28 | "metadata": { 29 | "name": "primary", 30 | "labels": { 31 | "name": "primary" 32 | } 33 | }, 34 | "spec": { 35 | "securityContext": { 36 | $CCP_SECURITY_CONTEXT 37 | }, 38 | "containers": [{ 39 | "name": "postgres", 40 | "image": "$CCP_IMAGE_PREFIX/crunchy-postgres:$CCP_IMAGE_TAG", 41 | "ports": [{ 42 | "containerPort": 5432, 43 | "protocol": "TCP" 44 | }], 45 | "env": [{ 46 | "name": "PGHOST", 47 | "value": "/tmp" 48 | }, { 49 | "name": "PG_PRIMARY_USER", 50 | "value": "primaryuser" 51 | }, { 52 | "name": "PG_PRIMARY_PORT", 53 | "value": "5432" 54 | }, { 55 | "name": "PG_MODE", 56 | "value": "primary" 57 | }, { 58 | "name": "PG_PRIMARY_PASSWORD", 59 | "value": "123321" 60 | }, { 61 | "name": "PG_USER", 62 | "value": "antinex" 63 | }, { 64 | "name": "PG_PASSWORD", 65 | "value": "antinex" 66 | }, { 67 | "name": "PG_DATABASE", 68 | "value": "webapp" 69 | }, { 70 | "name": "PG_ROOT_PASSWORD", 71 | "value": "123321" 72 | }], 73 | "volumeMounts": [{ 74 | "mountPath": "/pgdata", 75 | "name": "pgdata", 76 | "readOnly": false 77 | }, { 78 | "mountPath": "/backup", 79 | "name": "backup", 80 | "readOnly": true 81 | }] 82 | }], 83 | "volumes": [{ 84 | "name": "pgdata", 85 | "persistentVolumeClaim": { 86 | "claimName": "primary-pgdata" 87 | } 88 | }, { 89 | "name": "backup", 90 | "emptyDir": {} 91 | }] 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /openshift/postgres/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Deployment 3 | metadata: 4 | annotations: 5 | description: Postgres - Database 6 | runtime: python3 7 | kompose.cmd: kompose convert -f compose.yml 8 | kompose.version: 1.13.0 (84fa826) 9 | creationTimestamp: null 10 | labels: 11 | antinex: postgres 12 | purpose: database 13 | layer: data 14 | name: postgres 15 | spec: 16 | replicas: 1 17 | strategy: {} 18 | template: 19 | metadata: 20 | creationTimestamp: null 21 | labels: 22 | antinex: postgres 23 | spec: 24 | containers: 25 | - env: 26 | - name: POSTGRESQL_USER 27 | value: antinex 28 | - name: POSTGRESQL_PASSWORD 29 | value: antinex 30 | - name: POSTGRESQL_DATABASE 31 | value: webapp 32 | image: centos/postgresql-96-centos7 33 | name: postgres 34 | ports: 35 | - containerPort: 5432 36 | resources: {} 37 | hostname: postgres 38 | restartPolicy: Always 39 | status: {} 40 | -------------------------------------------------------------------------------- /openshift/postgres/persistent-volume.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "v1", 3 | "kind": "PersistentVolume", 4 | "metadata": { 5 | "annotations": { 6 | "pv.kubernetes.io/bound-by-controller": "yes" 7 | }, 8 | "creationTimestamp": null, 9 | "name": "postgres-antinex-volume" 10 | }, 11 | "spec": { 12 | "accessModes": [ 13 | "ReadWriteOnce" 14 | ], 15 | "capacity": { 16 | "storage": "10G" 17 | }, 18 | "nfs": { 19 | "path": "/exports/postgres-antinex", 20 | "server": "localhost" 21 | }, 22 | "persistentVolumeReclaimPolicy": "Retain", 23 | "volumeName": "postgres-antinex-volume" 24 | }, 25 | "status": {} 26 | } 27 | -------------------------------------------------------------------------------- /openshift/postgres/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | annotations: 5 | description: Postgres - Database 6 | runtime: python3 7 | kompose.cmd: kompose convert -f compose.yml 8 | kompose.version: 1.13.0 (84fa826) 9 | creationTimestamp: null 10 | labels: 11 | antinex: postgres 12 | purpose: database 13 | layer: data 14 | name: postgres 15 | spec: 16 | ports: 17 | - name: "5432" 18 | port: 5432 19 | targetPort: 5432 20 | selector: 21 | antinex: postgres 22 | status: 23 | loadBalancer: {} 24 | -------------------------------------------------------------------------------- /openshift/primary-db.sh: -------------------------------------------------------------------------------- 1 | # https://crunchydata.github.io/crunchy-containers/getting-started/kubernetes-and-openshift/#_single_primary 2 | # sudo apt install golang-go 3 | # go get github.com/blang/expenv 4 | # mkdir -p -m 777 /opt/antinex 5 | # git clone https://github.com/CrunchyData/crunchy-containers.git /opt/antinex/crunchy 6 | # on ubuntu 18.04: 7 | # export GOPATH=$HOME/go 8 | # export PATH=$PATH:$GOROOT/bin:$GOPATH/bin 9 | 10 | export CCP_IMAGE_PREFIX="crunchydata" 11 | export CCP_IMAGE_TAG="centos7-10.4-1.8.3" 12 | export CCP_PGADMIN_IMAGE_TAG="centos7-10.3-1.8.2" 13 | export CCP_CLI="oc" 14 | export CCP_NAMESPACE="antinex" 15 | export CCPROOT="/opt/antinex/api/openshift/.pgdeployment/" 16 | export CCP_SECURITY_CONTEXT='"supplementalGroups": [65534]' 17 | export CCP_STORAGE_PATH="/exports/postgres-antinex" 18 | export CCP_NFS_IP="192.168.0.35" 19 | export CCP_STORAGE_MODE="ReadWriteMany" 20 | export CCP_STORAGE_CAPACITY="400M" 21 | 22 | export PROJECT="antinex" 23 | export PG_USER="antinex" 24 | export PG_PASSWORD="antinex" 25 | export PG_DATABASE="webapp" 26 | export PG_PRIMARY_PASSWORD="123321" 27 | export PG_SVC_NAME="primary" 28 | export PG_DEPLOYMENT_DIR="./.pgdeployment" 29 | export PG_REPO="https://github.com/jay-johnson/crunchy-containers.git" 30 | export PGADMIN_REPO="https://github.com/jay-johnson/crunchy-containers.git" 31 | export PGADMIN_SVC_NAME="pgadmin4-http" 32 | export PGADMIN_DEPLOYMENT_DIR="./.pgdeployment" 33 | export PGADMIN_SETUP_EMAIL="admin@admin.com" 34 | export PGADMIN_SETUP_PASSWORD="123321" 35 | -------------------------------------------------------------------------------- /openshift/redis/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: extensions/v1beta1 2 | kind: Deployment 3 | metadata: 4 | annotations: 5 | description: Redis - Database for Caching and Publisher-Subscribe Messaging 6 | runtime: python3 7 | kompose.cmd: kompose convert -f compose.yml 8 | kompose.version: 1.13.0 (84fa826) 9 | creationTimestamp: null 10 | labels: 11 | antinex: redis 12 | purpose: database 13 | layer: data 14 | name: redis 15 | spec: 16 | replicas: 1 17 | strategy: {} 18 | template: 19 | metadata: 20 | creationTimestamp: null 21 | labels: 22 | antinex: redis 23 | spec: 24 | containers: 25 | - image: redis:4.0.5-alpine 26 | name: redis 27 | ports: 28 | - containerPort: 6379 29 | resources: {} 30 | hostname: redis 31 | restartPolicy: Always 32 | status: {} 33 | -------------------------------------------------------------------------------- /openshift/redis/persistent-volume.json: -------------------------------------------------------------------------------- 1 | { 2 | "apiVersion": "v1", 3 | "kind": "PersistentVolume", 4 | "metadata": { 5 | "annotations": { 6 | "pv.kubernetes.io/bound-by-controller": "yes" 7 | }, 8 | "creationTimestamp": null, 9 | "name": "redis-antinex-pv" 10 | }, 11 | "spec": { 12 | "accessModes": [ 13 | "ReadWriteOnce" 14 | ], 15 | "capacity": { 16 | "storage": "10G" 17 | }, 18 | "nfs": { 19 | "path": "/exports/redis-antinex", 20 | "server": "localhost" 21 | }, 22 | "persistentVolumeReclaimPolicy": "Retain", 23 | "volumeName": "redis-antinex-pv" 24 | }, 25 | "status": {} 26 | } 27 | -------------------------------------------------------------------------------- /openshift/redis/pv.yml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: PersistentVolume 3 | metadata: 4 | annotations: 5 | pv.kubernetes.io/bound-by-controller: "yes" 6 | creationTimestamp: 2018-05-27T20:06:05Z 7 | name: redis-antinex-pv 8 | spec: 9 | accessModes: 10 | - ReadWriteOnce 11 | capacity: 12 | storage: 10G 13 | nfs: 14 | path: /exports/redis-antinex 15 | server: localhost 16 | persistentVolumeReclaimPolicy: Retain 17 | -------------------------------------------------------------------------------- /openshift/redis/pvc.yml: -------------------------------------------------------------------------------- 1 | apiVersion: "v1" 2 | kind: "PersistentVolumeClaim" 3 | metadata: 4 | name: "redis-antinex-pvc" 5 | spec: 6 | accessModes: 7 | - "ReadWriteOnce" 8 | resources: 9 | requests: 10 | storage: "10G" 11 | volumeName: "redis-antinex-pv" 12 | -------------------------------------------------------------------------------- /openshift/redis/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | annotations: 5 | description: Redis - Database for Caching and Publisher-Subscribe Messaging 6 | runtime: python3 7 | kompose.cmd: kompose convert -f compose.yml 8 | kompose.version: 1.13.0 (84fa826) 9 | creationTimestamp: null 10 | labels: 11 | antinex: redis 12 | purpose: database 13 | layer: data 14 | name: redis 15 | spec: 16 | ports: 17 | - name: "6379" 18 | port: 6379 19 | targetPort: 6379 20 | selector: 21 | antinex: redis 22 | status: 23 | loadBalancer: {} 24 | -------------------------------------------------------------------------------- /openshift/remove-all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | project="antinex" 4 | project_exists=$(oc get projects | grep ${project} | wc -l) 5 | 6 | if [[ "${project}" == "default" ]]; then 7 | echo "" 8 | echo "Unable to delete the 'default' project" 9 | echo "" 10 | exit 1 11 | fi 12 | 13 | echo "" 14 | 15 | if [[ "${project_exists}" != "0" ]]; then 16 | echo "Deleting everything under the ${project} project" 17 | oc delete all --all -n ${project} 18 | 19 | echo "Deleting all Secrets" 20 | oc delete secrets $(oc get secrets | grep -i opaque | awk '{print $1}') 21 | 22 | echo "Deleting ${project} project" 23 | oc delete project ${project} 24 | 25 | echo "Checking Cluster Status" 26 | oc status 27 | echo "" 28 | 29 | echo "Changing to the default project" 30 | oc project default 31 | else 32 | echo "There is no project named: ${project}" 33 | echo "" 34 | 35 | echo "Current project" 36 | oc project 37 | echo "" 38 | 39 | echo "Listing available projects" 40 | oc get projects 41 | 42 | echo "" 43 | fi 44 | 45 | not_done="1" 46 | while [[ "${not_done}" == "1" ]]; do 47 | project_exists=$(oc get projects | grep ${project} | wc -l) 48 | 49 | if [[ "${project_exists}" == "0" ]]; then 50 | echo "" 51 | echo "Successfully deleted: ${project}" 52 | echo "" 53 | not_done="0" 54 | fi 55 | sleep 1 56 | done 57 | 58 | exit 0 59 | -------------------------------------------------------------------------------- /openshift/run-pgadmin4.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | project="antinex" 4 | db_file="./primary-db.sh" 5 | svc_name="pgadmin4-http" 6 | deployment_dir="./.pgdeployment" 7 | repo="https://github.com/jay-johnson/crunchy-containers.git" 8 | first_time_deploy="0" 9 | 10 | if [[ "${1}" != "" ]]; then 11 | db_file="${1}" 12 | fi 13 | 14 | if [[ -e ${db_file} ]]; then 15 | echo "Loading db: ${db_file}" 16 | . ${db_file} 17 | else 18 | echo "Missing db file: ${db_file}" 19 | exit 1 20 | fi 21 | 22 | project="${PROJECT}" 23 | repo="${PGADMIN_REPO}" 24 | svc_name="${PGADMIN_SVC_NAME}" 25 | deployment_dir="${PGADMIN_DEPLOYMENT_DIR}" 26 | test_exists=$(oc project | grep ${project} | wc -l) 27 | 28 | if [[ "${test_exists}" == "0" ]]; then 29 | oc new-project ${project} 30 | first_time_deploy="1" 31 | fi 32 | 33 | env | sort | grep PGADMIN 34 | 35 | if [[ ! -e ${deployment_dir} ]]; then 36 | echo "Cloning Crunchy repo:" 37 | echo "git clone ${repo} ${deployment_dir}" 38 | git clone ${repo} ${deployment_dir} 39 | if [[ ! -e ${deployment_dir} ]]; then 40 | echo "Failed cloning Crunchy repo:" 41 | echo "git clone ${repo} ${deployment_dir}" 42 | exit 1 43 | fi 44 | fi 45 | 46 | echo "Creating project" 47 | oc project ${project} 48 | 49 | test_svc_exists=$(oc status | grep "svc/${svc_name}" | wc -l) 50 | 51 | echo "" 52 | echo "Getting status" 53 | oc status 54 | echo "" 55 | 56 | echo "Deploying Crunchy pgAdmin4 web application" 57 | if [[ "${test_svc_exists}" == "0" ]]; then 58 | echo " - checking file: ${deployment_dir}/examples/kube/${svc_name}/pgadmin4-http.json" 59 | if [[ ! -e ${deployment_dir}/examples/kube/${svc_name}/pgadmin4-http.json ]]; then 60 | echo "Installing Crunchy Containers Repository with command:" 61 | echo "git clone ${repo} ${deployment_dir}" 62 | git clone ${repo} ${deployment_dir} 63 | if [[ ! -e ${deployment_dir}/examples/kube/${svc_name}/pgadmin4-http.json ]]; then 64 | echo "Failed to clone Crunchy pgAdmin4 Deployment repository to: ${deployment_dir} - please confirm it exists" 65 | ls -lrt ${deployment_dir} 66 | echo "" 67 | echo "Tried cloning repository to deployment directory with command:" 68 | echo "git clone ${repo} ${deployment_dir}" 69 | echo "" 70 | exit 1 71 | else 72 | echo "Installed Crunchy Containers" 73 | fi 74 | else 75 | pushd ${deployment_dir} 76 | git checkout ./examples/kube/${svc_name}/pgadmin4-http.json 77 | git pull 78 | popd 79 | fi 80 | 81 | echo "${svc_name} - installing deployment" 82 | cp pgadmin4/crunchy-template-http.json ${deployment_dir}/examples/kube/${svc_name}/pgadmin4-http.json 83 | pushd ${deployment_dir}/examples/kube/${svc_name} 84 | echo "" 85 | echo "--------------------------------------------------" 86 | echo "${svc_name} - starting deployment directory: $(pwd)" 87 | ./run.sh 88 | echo "" 89 | echo "${svc_name} - end deployment" 90 | echo "--------------------------------------------------" 91 | popd 92 | 93 | test_route=$(oc status | grep "route/${svc_name}" | wc -l) 94 | if [[ "${test_route}" == "0" ]]; then 95 | echo "Exposing Service: svc/${svc_name}" 96 | oc expose svc/${svc_name} 97 | echo "" 98 | fi 99 | else 100 | echo "Detected running Crunchy ${svc_name}: svc/${svc_name}" 101 | fi 102 | -------------------------------------------------------------------------------- /openshift/show-create-db.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | db=webapp 4 | 5 | db_pod=$(oc get pods | grep postgres | awk '{print $1}' | tail -1) 6 | 7 | echo "" 8 | echo "Create the initial database with:" 9 | echo "oc rsh ${db_pod}" 10 | echo "createdb ${db}" 11 | echo "exit" 12 | echo "" 13 | 14 | exit 0 15 | -------------------------------------------------------------------------------- /openshift/show-migrate-cmds.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | user=antinex 4 | pw=antinex 5 | db=webapp 6 | 7 | api_pod=$(oc get pods | grep api | awk '{print $1}') 8 | 9 | if [[ "${ANTINEX_SILENT}" != "1" ]]; then 10 | echo "" 11 | echo "Run a migration with:" 12 | fi 13 | 14 | echo "oc rsh ${api_pod}" 15 | echo "/bin/bash" 16 | echo ". /opt/venv/bin/activate && cd /opt/antinex/api && source /opt/antinex/api/envs/openshift-no-hostnames.env && export POSTGRES_HOST=primary && export POSTGRES_DB=${db} && export POSTGRES_USER=${user} && export POSTGRES_PASSWORD=${pw} && ./run-migrations.sh" 17 | echo "exit;" 18 | echo "exit;" 19 | echo "" 20 | 21 | exit 0 22 | -------------------------------------------------------------------------------- /openshift/show-urls.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | api_url=$(oc status | grep api | grep http | awk '{print $1}') 4 | 5 | echo "Testing API Response:" 6 | echo "" 7 | echo "curl -k -i -X GET ${api_url}/swagger/" 8 | curl -k -i -X GET ${api_url}/swagger/ 9 | echo "" 10 | 11 | exit 0 12 | -------------------------------------------------------------------------------- /openshift/ssh-api.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Logging into deployment/api" 4 | oc rsh deployment/api /bin/bash 5 | -------------------------------------------------------------------------------- /openshift/ssh-core.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Logging into deployment/core" 4 | oc rsh deployment/core /bin/bash 5 | -------------------------------------------------------------------------------- /openshift/ssh-jupyter.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Logging into deployment/jupyter" 4 | oc rsh deployment/jupyter /bin/bash 5 | -------------------------------------------------------------------------------- /openshift/ssh-pipeline.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Logging into deployment/pipeline" 4 | oc rsh deployment/pipeline /bin/bash 5 | -------------------------------------------------------------------------------- /openshift/ssh-worker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Logging into deployment/worker" 4 | oc rsh deployment/worker /bin/bash 5 | -------------------------------------------------------------------------------- /openshift/stop-all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Deleting API" 4 | oc delete -f api/service.yaml -f api/deployment.yaml 5 | echo "" 6 | 7 | echo "Deleting Workers:" 8 | oc delete -f worker/deployment.yaml 9 | echo "" 10 | 11 | echo "Deleting Core:" 12 | oc delete -f core/deployment.yaml 13 | echo "" 14 | 15 | echo "Deleting Pipeline:" 16 | oc delete -f pipeline/deployment.yaml 17 | echo "" 18 | 19 | echo "Deleting Jupyter:" 20 | oc delete -f jupyter/deployment.yaml -f jupyter/service.yaml 21 | echo "" 22 | 23 | echo "Deleting Redis:" 24 | oc delete svc/redis dc/redis 25 | echo "" 26 | 27 | echo "Deleting Postgres:" 28 | oc delete svc/postgres dc/postgres 29 | echo "" 30 | 31 | echo "Checking Cluster Status:" 32 | oc status 33 | echo "" 34 | 35 | exit 0 36 | -------------------------------------------------------------------------------- /openshift/test-pg-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | project=antinex 4 | test_exists=$(oc project | grep ${project} | wc -l) 5 | first_time_deploy="0" 6 | if [[ "${test_exists}" == "0" ]]; then 7 | oc new-project ${project} 8 | first_time_deploy="1" 9 | fi 10 | echo "" 11 | echo "Creating project" 12 | oc project ${project} 13 | 14 | echo "" 15 | echo "Getting status" 16 | oc status 17 | 18 | echo "Deploying Postgres" 19 | oc new-app postgres/template.json \ 20 | -p DATABASE_SERVICE_NAME=postgres \ 21 | -p POSTGRESQL_USER=antinex \ 22 | -p POSTGRESQL_PASSWORD=antinex \ 23 | -p POSTGRESQL_DATABASE=webapp 24 | echo "" 25 | 26 | echo "Waiting for app to register" 27 | sleep 5 28 | 29 | echo "Creating Postgres - persistent volume" 30 | oc apply -f postgres/persistent-volume.json 31 | 32 | echo "Waiting for volume to register" 33 | sleep 5 34 | 35 | echo "Creating Postgres persistent volume claim" 36 | # map to template - objects[1].spec.volmes[0].persistentVolumeClaim.claimName 37 | pvc_claim_name=postgres-pvc 38 | # map to template - objects[1].spec.volmes[0].persistentVolumeClaim.claimName 39 | pvc_name=postgres-pvc 40 | # what path is this volume mounting into the container 41 | pvc_mount_path=/var/lib/pgsql/data 42 | 43 | oc volume \ 44 | dc/postgres \ 45 | --add \ 46 | --claim-size 10G \ 47 | --claim-name ${pvc_claim_name} \ 48 | --name ${pvc_name} \ 49 | --mount-path ${pvc_mount_path} 50 | -------------------------------------------------------------------------------- /openshift/test-redis-deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | project="antinex" 4 | # map to template - objects[1].spec.volmes[0].persistentVolumeClaim.claimName 5 | redis_pvc_name="redis-antinex-pvc" 6 | # volume name 7 | redis_pv_name="redis-antinex-pv" 8 | # what path is this volume mounting into the container 9 | redis_pvc_mount_path="/bitnami" 10 | 11 | test_exists=$(oc project | grep ${project} | wc -l) 12 | first_time_deploy="0" 13 | if [[ "${test_exists}" == "0" ]]; then 14 | oc new-project ${project} 15 | first_time_deploy="1" 16 | fi 17 | echo "" 18 | echo "Creating ${project} project" 19 | oc project ${project} 20 | 21 | echo "" 22 | echo "Getting Status" 23 | oc status 24 | 25 | echo "Deploying Redis" 26 | oc new-app \ 27 | --name=redis \ 28 | ALLOW_EMPTY_PASSWORD=yes \ 29 | --docker-image=bitnami/redis 30 | echo "" 31 | 32 | echo "Creating Redis - persistent volume" 33 | oc apply -f redis/persistent-volume.json 34 | echo "" 35 | 36 | echo "Waiting for volumes to register" 37 | sleep 5 38 | 39 | echo "Creating Redis persistent volume claim" 40 | oc apply -f redis/pvc.yml 41 | # oc volume \ 42 | # dc/redis \ 43 | # --add \ 44 | # --claim-size 10G \ 45 | # --claim-name ${redis_pvc_name} \ 46 | # --mount-path ${redis_pvc_mount_path} \ 47 | # --name ${redis_pv_name} 48 | echo "" 49 | 50 | echo "Exposing Redis service" 51 | oc expose svc/redis 52 | 53 | exit 0 54 | -------------------------------------------------------------------------------- /openshift/tools/delete-and-fix-volumes.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | target=ocp39.homelab.com 4 | 5 | oc delete pv primary-pgdata 6 | oc delete pvc primary-pgdata 7 | oc delete pv redis-antinex-pv 8 | oc delete pvc redis-antinex-pvc 9 | 10 | ssh root@${target} "rm -rf /exports/postgres-antinex; rm -rf /exports/redis-antinex; exportfs -rf; mkdir /exports/postgres-antinex; mkdir /pgdata; mkdir /exports/redis-antinex; chown nfsnobody:nfsnobody /exports -R;chown nfsnobody:nfsnobody /pgdata -R; chmod 777 /exports -R; chmod 777 /pgdata -R; exportfs -rf;echo '';showmount -e;echo '';cat /etc/hosts" 11 | -------------------------------------------------------------------------------- /openshift/tools/drop-database.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | db=webapp 4 | 5 | db_pod=$(oc get pods | grep primary | awk '{print $1}' | tail -1) 6 | 7 | echo "" 8 | echo "Drop the database with:" 9 | echo "oc rsh ${db_pod}" 10 | echo "psql -U postgres" 11 | echo "drop database ${db};" 12 | echo "\q" 13 | echo "createdb -U postgres ${db}" 14 | echo "exit" 15 | echo "" 16 | 17 | export ANTINEX_SILENT="1" 18 | if [[ -e ./show-migrate-cmds.sh ]]; then 19 | ./show-migrate-cmds.sh 20 | else 21 | ../show-migrate-cmds.sh 22 | fi 23 | 24 | exit 0 25 | -------------------------------------------------------------------------------- /openshift/users/user_1.sh: -------------------------------------------------------------------------------- 1 | export API_USER="trex" 2 | export API_PASSWORD="123321" 3 | export API_EMAIL="bugs@antinex.com" 4 | export API_FIRSTNAME="Guest" 5 | export API_LASTNAME="Guest" 6 | export API_URL="http://api-antinex.apps.homelab.com" 7 | export API_VERBOSE="true" 8 | export API_DEBUG="false" 9 | export OCP_URL="https://ocp39.homelab.com:8443" 10 | 11 | if [[ "${ANTINEX_USER}" != "" ]]; then 12 | export API_USER="${ANTINEX_USER}" 13 | else 14 | export ANTINEX_USER="${API_USER}" 15 | fi 16 | 17 | if [[ "${ANTINEX_PASSWORD}" != "" ]]; then 18 | export API_PASSWORD="${ANTINEX_PASSWORD}" 19 | else 20 | export ANTINEX_PASSWORD="${API_PASSWORD}" 21 | fi 22 | 23 | if [[ "${ANTINEX_URL}" != "" ]]; then 24 | export API_URL="${ANTINEX_URL}" 25 | else 26 | export ANTINEX_URL="${API_URL}" 27 | fi 28 | if [[ "${ANTINEX_EMAIL}" != "" ]]; then 29 | export API_EMAIL="${ANTINEX_EMAIL}" 30 | else 31 | export ANTINEX_EMAIL="${API_EMAIL}" 32 | fi 33 | if [[ "${ANTINEX_FIRSTNAME}" != "" ]]; then 34 | export API_FIRSTNAME="${ANTINEX_FIRSTNAME}" 35 | else 36 | export ANTINEX_FIRSTNAME="${API_FIRSTNAME}" 37 | fi 38 | if [[ "${ANTINEX_LASTNAME}" != "" ]]; then 39 | export API_LASTNAME="${ANTINEX_LASTNAME}" 40 | else 41 | export ANTINEX_LASTNAME="${API_LASTNAME}" 42 | fi 43 | if [[ "${ANTINEX_VERBOSE}" != "" ]]; then 44 | export API_VERBOSE="${ANTINEX_VERBOSE}" 45 | else 46 | export ANTINEX_VERBOSE="${API_VERBOSE}" 47 | fi 48 | if [[ "${ANTINEX_DEBUG}" != "" ]]; then 49 | export API_DEBUG="${ANTINEX_DEBUG}" 50 | else 51 | export ANTINEX_DEBUG="${API_DEBUG}" 52 | fi 53 | -------------------------------------------------------------------------------- /redis.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | 3 | services: 4 | 5 | # Redis 6 | nexredis1: 7 | hostname: redis1 8 | image: redis:4.0.5-alpine 9 | labels: 10 | NAME: "nexredis1" 11 | ports: 12 | - "6379:6379" 13 | container_name: "nexredis1" 14 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | antinex-utils 2 | celery>=4.1.0 3 | celery-connectors 4 | celery-loaders 5 | celery[redis] 6 | colorlog 7 | dj-database-url==0.4.2 8 | Django==2.0 9 | django-admin 10 | django-cacheops 11 | django-celery-results 12 | django-configurations==2.0.0 13 | django-debug-toolbar==1.9.1 14 | django-extensions==1.9.7 15 | django-redis 16 | django-redis-cache 17 | django-redis-sessions 18 | django-rest-registration 19 | django-rest-swagger 20 | django-six 21 | djangorestframework 22 | djangorestframework-jwt 23 | flake8<=3.4.1 24 | gunicorn 25 | h5py 26 | jsonfield 27 | inotify 28 | keras 29 | matplotlib 30 | newrelic 31 | numpy 32 | pandas 33 | psycopg2==2.7.3.2 34 | pycodestyle==2.3.1 35 | pydocstyle 36 | requests 37 | recommonmark 38 | scikit-learn 39 | sphinx 40 | sphinx-autobuild 41 | sphinx_bootstrap_theme 42 | sphinx_rtd_theme 43 | spylunking 44 | tables 45 | tensorflow 46 | uwsgi 47 | virtualenvwrapper 48 | whitenoise==4.0b4 49 | Werkzeug==0.12.2 50 | -------------------------------------------------------------------------------- /run-all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | compose="compose.yml" 4 | if [[ "$1" != "" ]]; then 5 | if [[ ! -e "$1" ]]; then 6 | echo "Missing compose file: ${1}" 7 | exit 1 8 | else 9 | compose="$1" 10 | fi 11 | fi 12 | 13 | echo "Starting all containers with: ${compose}" 14 | docker-compose -f $compose up -d 15 | 16 | exit 0 17 | -------------------------------------------------------------------------------- /run-celery-task.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import sys 5 | import json 6 | import argparse 7 | from spylunking.log.setup_logging import build_colorized_logger 8 | from celery import signals 9 | from celery_loaders.work_tasks.get_celery_app import get_celery_app 10 | 11 | 12 | name = 'run-celery-task' 13 | log = build_colorized_logger( 14 | name=name) 15 | 16 | parser = argparse.ArgumentParser(description="sending Celery task data") 17 | parser.add_argument( 18 | "-f", 19 | help="task data file: path to data file", 20 | required=True, 21 | dest="data_file") 22 | parser.add_argument( 23 | "-t", 24 | help="task name: drf_network_pipeline.users.tasks.task_get_user", 25 | required=True, 26 | dest="task_name") 27 | args = parser.parse_args() 28 | 29 | 30 | task_name = args.task_name 31 | task_data = None 32 | file_contents = None 33 | if args.data_file: 34 | if os.path.exists(args.data_file): 35 | file_contents = json.loads(open(args.data_file).read()) 36 | task_data = { 37 | "celery_enabled": True, 38 | "cache_key": None, 39 | "use_cache": False, 40 | "data": file_contents 41 | } 42 | # end of loading the data to send 43 | 44 | if not task_data: 45 | log.error(("Please provide a " 46 | "path to task_data file with -f ")) 47 | sys.exit(1) 48 | # end of checking if there is data to send 49 | 50 | 51 | # Disable celery log hijacking 52 | # https://github.com/celery/celery/issues/2509 53 | @signals.setup_logging.connect 54 | def setup_celery_logging(**kwargs): 55 | pass 56 | 57 | 58 | log.info(("start - {}") 59 | .format(name)) 60 | 61 | broker_url = os.getenv( 62 | "BROKER_URL", 63 | "redis://localhost:6379/9").strip().lstrip() 64 | backend_url = os.getenv( 65 | "BACKEND_URL", 66 | "redis://localhost:6379/10").strip().lstrip() 67 | 68 | # comma delimited 69 | tasks_str = os.getenv( 70 | "INCLUDE_TASKS", 71 | "drf_network_pipeline.users.tasks") 72 | include_tasks = tasks_str.split(",") 73 | 74 | log.info(("connecting Celery={} " 75 | "broker={} backend={} tasks={}") 76 | .format( 77 | name, 78 | broker_url, 79 | backend_url, 80 | include_tasks)) 81 | 82 | # Get the Celery app using the celery-loaders api 83 | app = get_celery_app( 84 | name, 85 | auth_url=broker_url, 86 | backend_url=backend_url, 87 | include_tasks=include_tasks) 88 | 89 | log.info(("app.broker_url={} calling task={} data={}") 90 | .format( 91 | app.conf.broker_url, 92 | task_name, 93 | task_data)) 94 | task_job = app.send_task( 95 | task_name, 96 | (task_data,)) 97 | log.info(("calling task={} - started job_id={}") 98 | .format( 99 | task_name, 100 | task_job.id)) 101 | task_result = task_job.get() 102 | log.info(("calling task={} - success " 103 | "job_id={} task_result={}") 104 | .format( 105 | task_name, 106 | task_job.id, 107 | task_result)) 108 | 109 | log.info(("end - {}") 110 | .format(name)) 111 | 112 | sys.exit(0) 113 | -------------------------------------------------------------------------------- /run-django.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | venv=~/.venvs/venvdrfpipeline 4 | env_name=drf-dev 5 | webapp_host="localhost" 6 | webapp_port="8010" 7 | 8 | # support for using venv in other locations 9 | if [[ "${USE_VENV}" != "" ]]; then 10 | if [[ -e ${USE_VENV}/bin/activate ]]; then 11 | echo "Using custom virtualenv: ${USE_VENV}" 12 | venv=${USE_VENV} 13 | else 14 | echo "Did not find custom virtualenv: ${USE_VENV}" 15 | exit 1 16 | fi 17 | fi 18 | 19 | if [[ "${USE_ENV}" != "" ]]; then 20 | env_name="${USE_ENV}" 21 | fi 22 | 23 | if [[ ! -e ./envs/${env_name}.env ]]; then 24 | echo "" 25 | echo "Failed to find env file: envs/${env_name}.env" 26 | echo "" 27 | exit 1 28 | fi 29 | 30 | if [[ "${WEBAPP_HOST}" != "" ]]; then 31 | webapp_host="${WEBAPP_HOST}" 32 | fi 33 | 34 | if [[ "${WEBAPP_PORT}" != "" ]]; then 35 | webapp_port="${WEBAPP_PORT}" 36 | fi 37 | 38 | echo "Activating pips: ${venv}/bin/activate" 39 | . ${venv}/bin/activate 40 | echo "" 41 | 42 | echo "Sourcing: ./envs/${env_name}.env" 43 | source ./envs/${env_name}.env 44 | echo "" 45 | 46 | cd webapp 47 | 48 | echo "" 49 | which python 50 | pip list 51 | echo "" 52 | echo "" 53 | env | grep DJANGO | sort 54 | echo "" 55 | 56 | # do not run these in the container in openshift 57 | if [[ "${SKIP_BUILD_DOCS}" != "1" ]]; then 58 | echo "" 59 | echo "Deploying Sphinx docs" 60 | ./build-docs.sh 61 | echo "" 62 | fi 63 | 64 | # do not run these in the container in openshift 65 | if [[ "${SKIP_COLLECT_STATICS}" != "1" ]]; then 66 | echo "" 67 | echo "Deploying Statics" 68 | ./collect-statics.sh 69 | echo "" 70 | fi 71 | 72 | if [[ "${SHARED_LOG_CFG}" != "" ]]; then 73 | echo "" 74 | echo "Logging config: ${SHARED_LOG_CFG}" 75 | echo "" 76 | fi 77 | 78 | echo "" 79 | echo "Starting Django listening on TCP port ${webapp_port}" 80 | echo "http://${webapp_host}:${webapp_port}/swagger" 81 | echo "" 82 | # runserver has issues with 83 | # threads which break keras 84 | # python ./manage.py runserver 0.0.0.0:8010 85 | 86 | if [[ "${APP_SERVER}" == "uwsgi" ]]; then 87 | uwsgi ./django-uwsgi.ini --thunder-lock 88 | else 89 | if [[ "${DJANGO_DEBUG}" == "yes" ]]; then 90 | gunicorn -c ./django-gunicorn.py drf_network_pipeline.wsgi 91 | else 92 | gunicorn -c ./django-gunicorn.py drf_network_pipeline.wsgi 93 | fi 94 | fi 95 | -------------------------------------------------------------------------------- /run-migrations.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | venv=~/.venvs/venvdrfpipeline 4 | env_name=dev 5 | 6 | # support for using venv in other locations 7 | if [[ "${USE_VENV}" != "" ]]; then 8 | if [[ -e ${USE_VENV}/bin/activate ]]; then 9 | echo "Using custom virtualenv: ${USE_VENV}" 10 | venv=${USE_VENV} 11 | else 12 | echo "Did not find custom virtualenv: ${USE_VENV}" 13 | exit 1 14 | fi 15 | fi 16 | 17 | if [[ ! -e ${venv}/bin/activate ]]; then 18 | echo "" 19 | echo "Failed to create virtualenv: virtualenv -p python3 ${venv}" 20 | echo "" 21 | exit 1 22 | fi 23 | 24 | if [[ "${USE_ENV}" != "" ]]; then 25 | env_name="${USE_ENV}" 26 | fi 27 | 28 | if [[ ! -e ./envs/${env_name}.env ]]; then 29 | echo "" 30 | echo "Failed to find env file: envs/${env_name}.env" 31 | echo "" 32 | exit 1 33 | fi 34 | 35 | echo "Activating pips: ${venv}/bin/activate" 36 | . ${venv}/bin/activate 37 | echo "" 38 | 39 | echo "Sourcing: ./envs/${env_name}.env" 40 | source ./envs/${env_name}.env 41 | echo "" 42 | 43 | cd webapp 44 | 45 | echo "Migrating" 46 | python manage.py makemigrations users 47 | python manage.py makemigrations pipeline 48 | 49 | echo "Syncing db" 50 | python manage.py migrate --run-syncdb 51 | echo "" 52 | 53 | echo "Running makemigrations" 54 | python manage.py makemigrations 55 | echo "" 56 | 57 | echo "Running initial migration" 58 | python manage.py migrate --noinput 59 | echo "" 60 | 61 | echo "Creating super user - this should only run once" 62 | ./create-super-user.sh 63 | echo "" 64 | 65 | cd .. 66 | 67 | exit 0 68 | -------------------------------------------------------------------------------- /run-redis.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # this assumes docker is running and docker-compose is installed 4 | 5 | echo "Starting redis" 6 | docker-compose -f redis.yml up -d 7 | 8 | exit 0 9 | -------------------------------------------------------------------------------- /run-stack.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | compose="full-stack-dev.yml" 4 | 5 | echo "Starting stack: ${compose}" 6 | docker-compose -f $compose up -d 7 | 8 | exit 0 9 | -------------------------------------------------------------------------------- /run-tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | venv=~/.venvs/venvdrfpipeline 4 | env_name=dev 5 | 6 | # support for using venv in other locations 7 | if [[ "${USE_VENV}" != "" ]]; then 8 | if [[ -e ${USE_VENV}/bin/activate ]]; then 9 | echo "Using custom virtualenv: ${USE_VENV}" 10 | venv=${USE_VENV} 11 | else 12 | echo "Did not find custom virtualenv: ${USE_VENV}" 13 | exit 1 14 | fi 15 | fi 16 | 17 | if [[ "${USE_ENV}" != "" ]]; then 18 | env_name="${USE_ENV}" 19 | fi 20 | 21 | if [[ ! -e ./envs/${env_name}.env ]]; then 22 | echo "" 23 | echo "Failed to find env file: envs/${env_name}.env" 24 | echo "" 25 | exit 1 26 | fi 27 | 28 | echo "Activating pips: ${venv}/bin/activate" 29 | . ${venv}/bin/activate 30 | echo "" 31 | 32 | echo "Sourcing: ./envs/${env_name}.env" 33 | source ./envs/${env_name}.env 34 | echo "" 35 | 36 | if [[ "${SHARED_LOG_CFG}" != "" ]]; then 37 | echo "" 38 | echo "Logging config: ${SHARED_LOG_CFG}" 39 | echo "" 40 | fi 41 | 42 | cd webapp 43 | 44 | echo "" 45 | echo "Running unit tests" 46 | python manage.py test 47 | last_status=$? 48 | if [[ "${last_status}" == "0" ]]; then 49 | echo "" 50 | echo "PASSED - unit tests" 51 | echo "" 52 | cd .. 53 | exit 0 54 | else 55 | echo "" 56 | echo "" 57 | echo "FAILED - unit tests - run manually with:" 58 | echo "" 59 | echo "cd webapp; python manage.py test" 60 | echo "" 61 | cd .. 62 | exit 1 63 | fi 64 | -------------------------------------------------------------------------------- /run-worker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | venv=~/.venvs/venvdrfpipeline/bin/activate 4 | env_name=drf-dev 5 | 6 | # support for using venv in other locations 7 | if [[ "${USE_VENV}" != "" ]]; then 8 | if [[ -e ${USE_VENV}/bin/activate ]]; then 9 | echo "Using custom virtualenv: ${USE_VENV}" 10 | venv=${USE_VENV} 11 | else 12 | echo "Did not find custom virtualenv: ${USE_VENV}" 13 | exit 1 14 | fi 15 | fi 16 | 17 | if [[ "${USE_ENV}" != "" ]]; then 18 | env_name="${USE_ENV}" 19 | fi 20 | 21 | if [[ ! -e ./envs/${env_name}.env ]]; then 22 | echo "" 23 | echo "Failed to find env file: envs/${env_name}.env" 24 | echo "" 25 | exit 1 26 | fi 27 | 28 | echo "Activating pips: ${venv}/bin/activate" 29 | . ${venv}/bin/activate 30 | echo "" 31 | 32 | echo "Sourcing: ./envs/${env_name}.env" 33 | source ./envs/${env_name}.env 34 | echo "" 35 | 36 | cd webapp 37 | 38 | echo "" 39 | which python 40 | pip list 41 | echo "" 42 | echo "" 43 | env | grep -E "DJANGO|CELERY" | sort 44 | echo "" 45 | 46 | echo "" 47 | echo "Loading Celery environment variables" 48 | echo "" 49 | 50 | num_workers=4 51 | log_level=INFO 52 | log_file=/tmp/worker.log 53 | worker_module=drf_network_pipeline 54 | worker_name="default@%h" 55 | 56 | if [[ "${NUM_WORKERS}" != "" ]]; then 57 | num_workers=$NUM_WORKERS 58 | fi 59 | if [[ "${LOG_LEVEL}" != "" ]]; then 60 | log_level=$LOG_LEVEL 61 | fi 62 | if [[ "${LOG_FILE}" != "" ]]; then 63 | log_file=$LOG_FILE 64 | fi 65 | if [[ "${WORKER_MODULE}" != "" ]]; then 66 | worker_module=$WORKER_MODULE 67 | fi 68 | if [[ "${WORKER_NAME}" != "" ]]; then 69 | worker_name=$WORKER_NAME 70 | fi 71 | 72 | custom_queues="celery,drf_network_pipeline.users.tasks.task_get_user,drf_network_pipeline.pipeline.tasks.task_ml_process_results,drf_network_pipeline.pipeline.tasks.task_publish_to_core,drf_network_pipeline.pipeline.tasks.task_ml_prepare,drf_network_pipeline.pipeline.tasks.task_ml_job" 73 | 74 | if [[ "${SHARED_LOG_CFG}" != "" ]]; then 75 | echo "" 76 | echo "Logging config: ${SHARED_LOG_CFG}" 77 | echo "" 78 | fi 79 | 80 | if [[ "${ANTINEX_API_NUM_WORKERS}" != "" ]]; then 81 | num_workers=${ANTINEX_API_NUM_WORKERS} 82 | fi 83 | 84 | # Use the WORKER_EXTRA_ARGS to pass in specific args: 85 | # http://docs.celeryproject.org/en/latest/reference/celery.bin.worker.html 86 | # 87 | # example args from 4.2.0: 88 | # --without-heartbeat 89 | # --heartbeat-interval N 90 | # --without-gossip 91 | # --without-mingle 92 | 93 | if [[ "${ANTINEX_API_WORKER_ARGS}" != "" ]]; then 94 | echo "Launching custom api worker=${ANTINEX_API_WORKER_ARGS}" 95 | celery worker ${ANTINEX_API_WORKER_ARGS} 96 | elif [[ "${num_workers}" == "1" ]]; then 97 | echo "Starting worker=${worker_module}" 98 | echo "celery worker -A ${worker_module} -c ${num_workers} -l ${log_level} -n ${worker_name} -Q ${custom_queues} ${WORKER_EXTRA_ARGS}" 99 | celery worker -A $worker_module -c ${num_workers} -l ${log_level} -n ${worker_name} -Q $custom_queues ${WORKER_EXTRA_ARGS} 100 | else 101 | echo "Starting workers=${worker_module}" 102 | echo "celery worker -A ${worker_module} -c ${num_workers} -l ${log_level} -n ${worker_name} --logfile=${log_file} -Q ${custom_queues} ${WORKER_EXTRA_ARGS}" 103 | celery worker -A $worker_module -c ${num_workers} -l ${log_level} -n ${worker_name} --logfile=${log_file} -Q ${custom_queues} ${WORKER_EXTRA_ARGS} 104 | fi 105 | echo "" 106 | -------------------------------------------------------------------------------- /start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | venv=~/.venvs/venvdrfpipeline 4 | env_name=dev 5 | 6 | if [[ "${USE_ENV}" != "" ]]; then 7 | env_name="${USE_ENV}" 8 | fi 9 | 10 | if [[ ! -e ./envs/${env_name}.env ]]; then 11 | echo "" 12 | echo "Failed to find env file: envs/${env_name}.env" 13 | echo "" 14 | exit 1 15 | fi 16 | 17 | echo "Activating and installing pips" 18 | source ${venv}/bin/activate 19 | echo "" 20 | 21 | echo "Sourcing: ./envs/${env_name}.env" 22 | source ./envs/${env_name}.env 23 | echo "" 24 | 25 | cd webapp 26 | 27 | echo "" 28 | which python 29 | pip list 30 | echo "" 31 | echo "" 32 | env | grep DJANGO | sort 33 | echo "" 34 | 35 | echo "" 36 | echo "Deploying Sphinx docs" 37 | ./build-docs.sh 38 | echo "" 39 | 40 | echo "" 41 | echo "Deploying Statics" 42 | ./collect-statics.sh 43 | echo "" 44 | 45 | echo "" 46 | echo "Starting Django listening on TCP port 8010" 47 | echo "http://localhost:8010/swagger" 48 | echo "" 49 | # runserver has issues with 50 | # threads which break keras 51 | # python ./manage.py runserver 0.0.0.0:8010 52 | 53 | if [[ "${APP_SERVER}" == "uwsgi" ]]; then 54 | uwsgi ./django-uwsgi.ini --thunder-lock 55 | else 56 | if [[ "${DJANGO_DEBUG}" == "yes" ]]; then 57 | gunicorn -c ./django-gunicorn.py drf_network_pipeline.wsgi 58 | else 59 | gunicorn -c ./django-gunicorn.py drf_network_pipeline.wsgi 60 | fi 61 | fi 62 | -------------------------------------------------------------------------------- /stop-all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | compose="compose.yml" 4 | 5 | echo "Stopping stack: ${compose}" 6 | docker-compose -f $compose stop 7 | docker stop postgres pgadmin jupyter redis core api worker pipeline >> /dev/null 2>&1 8 | docker rm postgres pgadmin jupyter redis core api worker pipeline >> /dev/null 2>&1 9 | 10 | exit 0 11 | -------------------------------------------------------------------------------- /stop-stack.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | compose="full-stack-dev.yml" 4 | 5 | echo "Stopping stack: ${compose}" 6 | docker-compose -f $compose stop 7 | docker stop postgres redis pgadmin >> /dev/null 2>&1 8 | docker rm postgres redis pgadmin >> /dev/null 2>&1 9 | 10 | exit 0 11 | -------------------------------------------------------------------------------- /tail-api.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker logs -f api 4 | 5 | -------------------------------------------------------------------------------- /tail-core.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker logs -f core 4 | 5 | -------------------------------------------------------------------------------- /tail-pipeline.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker logs -f pipeline 4 | 5 | -------------------------------------------------------------------------------- /tail-worker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker logs -f worker 4 | 5 | -------------------------------------------------------------------------------- /tests/celery/task_get_user.json: -------------------------------------------------------------------------------- 1 | { 2 | "user_id": 2 3 | } 4 | -------------------------------------------------------------------------------- /tests/create-user.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | user="trex" 4 | pw="123321" 5 | email="bugs@antinex.com" 6 | firstname="Guest" 7 | lastname="Guest" 8 | auth_url="http://0.0.0.0:8010/users/" 9 | 10 | if [[ "${1}" != "" ]]; then 11 | user=${1} 12 | fi 13 | if [[ "${ANTINEX_USER}" != "" ]]; then 14 | user=${ANTINEX_USER} 15 | fi 16 | if [[ "${API_USER}" != "" ]]; then 17 | user=${API_USER} 18 | fi 19 | 20 | if [[ "${2}" != "" ]]; then 21 | pw=${2} 22 | fi 23 | if [[ "${ANTINEX_PASSWORD}" != "" ]]; then 24 | pw=${ANTINEX_PASSWORD} 25 | fi 26 | if [[ "${API_PASSWORD}" != "" ]]; then 27 | pw=${API_PASSWORD} 28 | fi 29 | 30 | if [[ "${3}" != "" ]]; then 31 | email=${3} 32 | fi 33 | if [[ "${ANTINEX_EMAIL}" != "" ]]; then 34 | email=${ANTINEX_EMAIL} 35 | fi 36 | if [[ "${API_EMAIL}" != "" ]]; then 37 | email=${API_EMAIL} 38 | fi 39 | 40 | if [[ "${4}" != "" ]]; then 41 | firstname=${4} 42 | fi 43 | if [[ "${ANTINEX_FIRSTNAME}" != "" ]]; then 44 | firstname=${ANTINEX_FIRSTNAME} 45 | fi 46 | if [[ "${API_FIRSTNAME}" != "" ]]; then 47 | firstname=${API_FIRSTNAME} 48 | fi 49 | 50 | if [[ "${5}" != "" ]]; then 51 | lastname=${5} 52 | fi 53 | if [[ "${ANTINEX_LASTNAME}" != "" ]]; then 54 | lastname=${ANTINEX_LASTNAME} 55 | fi 56 | if [[ "${API_LASTNAME}" != "" ]]; then 57 | lastname=${API_LASTNAME} 58 | fi 59 | 60 | if [[ "${ANTINEX_URL}" != "" ]]; then 61 | auth_url="${ANTINEX_URL}/users/" 62 | fi 63 | 64 | user_login_dict="{\"username\":\"${user}\",\"password\":\"${pw}\",\"email\":\"${email}\",\"first\":\"${firstname}\",\"last\":\"${lastname}\"}" 65 | 66 | echo "" 67 | echo "Creating user: ${user} on ${auth_url}" 68 | curl -s -X POST --header 'Content-Type: application/json' --header 'Accept: application/json' -d "${user_login_dict}" ${auth_url} 69 | last_status=$? 70 | if [[ "${last_status}" != 0 ]]; then 71 | echo "" 72 | echo "Failed adding user ${user} with command:" 73 | echo "curl -s -X POST --header 'Content-Type: application/json' --header 'Accept: application/json' -d \"${user_login_dict}\" ${auth_url}" 74 | echo "" 75 | exit 1 76 | fi 77 | 78 | echo "" 79 | echo "Getting token for user: ${user}" 80 | curl -s -X POST --header 'Content-Type: application/json' --header 'Accept: application/json' -d "${user_login_dict}" "${ANTINEX_URL}/api-token-auth/" 81 | last_status=$? 82 | if [[ "${last_status}" != 0 ]]; then 83 | echo "" 84 | echo "Failed getting user ${user} token with command:" 85 | echo "curl -s -ii -X POST --header 'Content-Type: application/json' --header 'Accept: application/json' -d \"${user_login_dict}\" ${ANTINEX_URL}/api-token-auth/" 86 | echo "" 87 | exit 1 88 | fi 89 | 90 | echo "" 91 | 92 | exit 0 93 | -------------------------------------------------------------------------------- /tests/django-antinex.json: -------------------------------------------------------------------------------- 1 | { 2 | "csv_file": "/opt/antinex/antinex-datasets/v1/webapps/django/training-ready/v1_django_cleaned.csv", 3 | "meta_file": "/opt/antinex/antinex-datasets/v1/webapps/django/training-ready/cleaned_v1_django_metadata.json", 4 | "title": "AntiNex Django Defensive Neural Network", 5 | "desc": "build a deep neural network to defend django applications", 6 | "ml_type": "classification", 7 | "seed": 42, 8 | "test_size": 0.2, 9 | "batch_size": 32, 10 | "epochs": 5, 11 | "num_splits": 5, 12 | "loss": "binary_crossentropy", 13 | "optimizer": "adam", 14 | "model_desc": { 15 | "layers": [ 16 | { 17 | "num_neurons": 250, 18 | "init": "uniform", 19 | "activation": "relu" 20 | }, 21 | { 22 | "num_neurons": 200, 23 | "init": "uniform", 24 | "activation": "relu" 25 | }, 26 | { 27 | "num_neurons": 150, 28 | "init": "uniform", 29 | "activation": "relu" 30 | }, 31 | { 32 | "num_neurons": 100, 33 | "init": "uniform", 34 | "activation": "relu" 35 | }, 36 | { 37 | "num_neurons": 50, 38 | "init": "uniform", 39 | "activation": "relu" 40 | }, 41 | { 42 | "num_neurons": 1, 43 | "init": "uniform", 44 | "activation": "sigmoid" 45 | } 46 | ] 47 | }, 48 | "metrics": [ 49 | "accuracy" 50 | ], 51 | "histories": [ 52 | "val_loss", 53 | "val_acc", 54 | "loss", 55 | "acc" 56 | ], 57 | "ds_name": "cleaned", 58 | "algo_name": "dnn", 59 | "predict_feature": "label_value", 60 | "training_data": "{}", 61 | "pre_proc": "{}", 62 | "post_proc": "{}", 63 | "meta_data": "{ \"label_rules\": { \"set_if_above\": 85, \"labels\": [\"not_attack\", \"attack\"], \"label_values\": [0, 1] } }", 64 | "label_rules": { 65 | "set_if_above": 85, 66 | "labels": [ 67 | "not_attack", 68 | "attack" 69 | ], 70 | "label_values": [ 71 | 0, 72 | 1 73 | ] 74 | }, 75 | "version": 1 76 | } 77 | -------------------------------------------------------------------------------- /tests/django-deep-antinex.json: -------------------------------------------------------------------------------- 1 | { 2 | "csv_file": "/opt/antinex/antinex-datasets/v1/webapps/django/training-ready/v1_django_cleaned.csv", 3 | "meta_file": "/opt/antinex/antinex-datasets/v1/webapps/django/training-ready/cleaned_v1_django_metadata.json", 4 | "title": "AntiNex Django Defensive Neural Network", 5 | "desc": "build a deep neural network to defend django applications", 6 | "ml_type": "classification", 7 | "seed": 42, 8 | "test_size": 0.2, 9 | "batch_size": 32, 10 | "epochs": 5, 11 | "num_splits": 5, 12 | "loss": "binary_crossentropy", 13 | "optimizer": "adam", 14 | "model_desc": { 15 | "layers": [ 16 | { 17 | "num_neurons": 50, 18 | "init": "uniform", 19 | "activation": "relu" 20 | }, 21 | { 22 | "num_neurons": 45, 23 | "init": "uniform", 24 | "activation": "relu" 25 | }, 26 | { 27 | "num_neurons": 40, 28 | "init": "uniform", 29 | "activation": "relu" 30 | }, 31 | { 32 | "num_neurons": 35, 33 | "init": "uniform", 34 | "activation": "relu" 35 | }, 36 | { 37 | "num_neurons": 30, 38 | "init": "uniform", 39 | "activation": "relu" 40 | }, 41 | { 42 | "num_neurons": 25, 43 | "init": "uniform", 44 | "activation": "relu" 45 | }, 46 | { 47 | "num_neurons": 20, 48 | "init": "uniform", 49 | "activation": "relu" 50 | }, 51 | { 52 | "num_neurons": 15, 53 | "init": "uniform", 54 | "activation": "relu" 55 | }, 56 | { 57 | "num_neurons": 10, 58 | "init": "uniform", 59 | "activation": "relu" 60 | }, 61 | { 62 | "num_neurons": 9, 63 | "init": "uniform", 64 | "activation": "relu" 65 | }, 66 | { 67 | "num_neurons": 7, 68 | "init": "uniform", 69 | "activation": "relu" 70 | }, 71 | { 72 | "num_neurons": 5, 73 | "init": "uniform", 74 | "activation": "relu" 75 | }, 76 | { 77 | "num_neurons": 3, 78 | "init": "uniform", 79 | "activation": "relu" 80 | }, 81 | { 82 | "num_neurons": 1, 83 | "init": "uniform", 84 | "activation": "sigmoid" 85 | } 86 | ] 87 | }, 88 | "metrics": [ 89 | "accuracy" 90 | ], 91 | "histories": [ 92 | "val_loss", 93 | "val_acc", 94 | "loss", 95 | "acc" 96 | ], 97 | "ds_name": "cleaned", 98 | "algo_name": "dnn", 99 | "predict_feature": "label_value", 100 | "training_data": "{}", 101 | "pre_proc": "{}", 102 | "post_proc": "{}", 103 | "meta_data": "{ \"label_rules\": { \"set_if_above\": 85, \"labels\": [\"not_attack\", \"attack\"], \"label_values\": [0, 1] } }", 104 | "label_rules": { 105 | "set_if_above": 85, 106 | "labels": [ 107 | "not_attack", 108 | "attack" 109 | ], 110 | "label_values": [ 111 | 0, 112 | 1 113 | ] 114 | }, 115 | "version": 1 116 | } 117 | -------------------------------------------------------------------------------- /tests/get-a-job.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import sys 5 | import json 6 | import argparse 7 | import requests 8 | from spylunking.log.setup_logging import build_colorized_logger 9 | from antinex_utils.utils import ppj 10 | 11 | 12 | name = 'get-a-job' 13 | log = build_colorized_logger( 14 | name=name) 15 | 16 | 17 | parser = argparse.ArgumentParser(description="get a MLJobResult") 18 | parser.add_argument( 19 | "-i", 20 | help="MLJob.id for your user", 21 | required=False, 22 | dest="result_id") 23 | args = parser.parse_args() 24 | 25 | 26 | url = os.getenv( 27 | "ANTINEX_URL", 28 | "http://localhost:8010") 29 | username = os.getenv( 30 | "API_USER", 31 | "root") 32 | password = os.getenv( 33 | "API_PASS", 34 | "123321") 35 | 36 | # must be owned by the user logging in 37 | object_id = os.getenv( 38 | "JOB_ID", 39 | "1") 40 | 41 | # allow cli args to set the id 42 | if args.result_id: 43 | object_id = int(args.result_id) 44 | 45 | auth_url = "{}/api-token-auth/".format(url) 46 | resource_url = ("{}/ml/{}").format( 47 | url, 48 | object_id) 49 | use_headers = { 50 | "Content-type": "application/json" 51 | } 52 | login_data = { 53 | "username": username, 54 | "password": password 55 | } 56 | 57 | # Login as the user: 58 | log.info("Logging in user url={}".format(auth_url)) 59 | post_response = requests.post(auth_url, 60 | data=json.dumps(login_data), 61 | headers=use_headers) 62 | 63 | user_token = "" 64 | if post_response.status_code == 200: 65 | user_token = json.loads(post_response.text)["token"] 66 | 67 | if user_token == "": 68 | log.error(("Failed logging in as user={} - stopping" 69 | "post_response={}") 70 | .format(username, 71 | post_response.text)) 72 | sys.exit(1) 73 | else: 74 | log.info(("logged in user={} token={}") 75 | .format(username, 76 | user_token)) 77 | # end if/else 78 | 79 | log.info("building get data") 80 | 81 | use_headers = { 82 | "Content-type": "application/json", 83 | "Authorization": "JWT {}".format(user_token) 84 | } 85 | 86 | log.info(("Getting a Job url={}") 87 | .format(resource_url)) 88 | get_response = requests.get(resource_url, 89 | headers=use_headers) 90 | 91 | if get_response.status_code != 201 \ 92 | and get_response.status_code != 200: 93 | log.error(("Failed with GET response status={} reason={}") 94 | .format(get_response.status_code, 95 | get_response.reason)) 96 | log.error("Details:\n{}".format(get_response.text)) 97 | sys.exit(1) 98 | else: 99 | log.info(("SUCCESS - GET Response status={} reason={}") 100 | .format(get_response.status_code, 101 | get_response.reason)) 102 | 103 | as_json = True 104 | record = {} 105 | if as_json: 106 | record = json.loads(get_response.text) 107 | log.info(ppj(record)) 108 | # end of post for running an ML Job 109 | 110 | sys.exit(0) 111 | -------------------------------------------------------------------------------- /tests/get-a-prepared-dataset.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import sys 5 | import json 6 | import argparse 7 | import requests 8 | from spylunking.log.setup_logging import build_colorized_logger 9 | from antinex_utils.utils import ppj 10 | 11 | 12 | name = 'get-a-prepared-dataset' 13 | log = build_colorized_logger( 14 | name=name) 15 | 16 | 17 | parser = argparse.ArgumentParser(description="get a MLJobResult") 18 | parser.add_argument( 19 | "-i", 20 | help="MLJob.id for your user", 21 | required=False, 22 | dest="result_id") 23 | args = parser.parse_args() 24 | 25 | 26 | url = os.getenv( 27 | "ANTINEX_URL", 28 | "http://localhost:8010") 29 | username = os.getenv( 30 | "API_USER", 31 | "root") 32 | password = os.getenv( 33 | "API_PASS", 34 | "123321") 35 | 36 | # must be owned by the user logging in 37 | object_id = os.getenv( 38 | "PREPARE_JOB_ID", 39 | "1") 40 | 41 | # allow cli args to set the id 42 | if args.result_id: 43 | object_id = int(args.result_id) 44 | 45 | auth_url = "{}/api-token-auth/".format(url) 46 | resource_url = ("{}/mlprepare/{}").format( 47 | url, 48 | object_id) 49 | use_headers = { 50 | "Content-type": "application/json" 51 | } 52 | login_data = { 53 | "username": username, 54 | "password": password 55 | } 56 | 57 | # Login as the user: 58 | log.info("Logging in user url={}".format(auth_url)) 59 | post_response = requests.post(auth_url, 60 | data=json.dumps(login_data), 61 | headers=use_headers) 62 | 63 | user_token = "" 64 | if post_response.status_code == 200: 65 | user_token = json.loads(post_response.text)["token"] 66 | 67 | if user_token == "": 68 | log.error(("Failed logging in as user={} - stopping" 69 | "post_response={}") 70 | .format(username, 71 | post_response.text)) 72 | sys.exit(1) 73 | else: 74 | log.info(("logged in user={} token={}") 75 | .format(username, 76 | user_token)) 77 | # end if/else 78 | 79 | log.info("building get data") 80 | 81 | use_headers = { 82 | "Content-type": "application/json", 83 | "Authorization": "JWT {}".format(user_token) 84 | } 85 | 86 | log.info(("Getting a Prepare Dataset url={}") 87 | .format(resource_url)) 88 | get_response = requests.get(resource_url, 89 | headers=use_headers) 90 | 91 | if get_response.status_code != 201 \ 92 | and get_response.status_code != 200: 93 | log.error(("Failed with GET response status={} reason={}") 94 | .format(get_response.status_code, 95 | get_response.reason)) 96 | log.error("Details:\n{}".format(get_response.text)) 97 | sys.exit(1) 98 | else: 99 | log.info(("SUCCESS - GET Response status={} reason={}") 100 | .format(get_response.status_code, 101 | get_response.reason)) 102 | 103 | as_json = True 104 | record = {} 105 | if as_json: 106 | record = json.loads(get_response.text) 107 | log.info(ppj(record)) 108 | # end of post for running an ML Job 109 | 110 | sys.exit(0) 111 | -------------------------------------------------------------------------------- /tests/get-api-url.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | api_url=$(oc status | grep api | grep http | awk '{print $1}') 4 | echo "${api_url}" 5 | 6 | exit 0 7 | -------------------------------------------------------------------------------- /tests/get-recent-datasets.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import sys 5 | import json 6 | import requests 7 | from spylunking.log.setup_logging import build_colorized_logger 8 | from antinex_utils.utils import ppj 9 | 10 | 11 | name = 'get-recent-datasets' 12 | log = build_colorized_logger( 13 | name=name) 14 | 15 | 16 | url = os.getenv( 17 | "ANTINEX_URL", 18 | "http://localhost:8010") 19 | username = os.getenv( 20 | "API_USER", 21 | "root") 22 | password = os.getenv( 23 | "API_PASS", 24 | "123321") 25 | 26 | auth_url = "{}/api-token-auth/".format(url) 27 | resource_url = ("{}/mlprepare").format( 28 | url) 29 | use_headers = { 30 | "Content-type": "application/json" 31 | } 32 | login_data = { 33 | "username": username, 34 | "password": password 35 | } 36 | 37 | # Login as the user: 38 | log.info("Logging in user url={}".format(auth_url)) 39 | post_response = requests.post(auth_url, 40 | data=json.dumps(login_data), 41 | headers=use_headers) 42 | 43 | user_token = "" 44 | if post_response.status_code == 200: 45 | user_token = json.loads(post_response.text)["token"] 46 | 47 | if user_token == "": 48 | log.error(("Failed logging in as user={} - stopping" 49 | "post_response={}") 50 | .format(username, 51 | post_response.text)) 52 | sys.exit(1) 53 | else: 54 | log.info(("logged in user={} token={}") 55 | .format(username, 56 | user_token)) 57 | # end if/else 58 | 59 | log.info("building get data") 60 | 61 | use_headers = { 62 | "Content-type": "application/json", 63 | "Authorization": "JWT {}".format(user_token) 64 | } 65 | 66 | log.info(("Getting Recent Prepares url={}") 67 | .format(resource_url)) 68 | get_response = requests.get(resource_url, 69 | headers=use_headers) 70 | 71 | if get_response.status_code != 201 \ 72 | and get_response.status_code != 200: 73 | log.error(("Failed with GET response status={} reason={}") 74 | .format(get_response.status_code, 75 | get_response.reason)) 76 | log.error("Details:\n{}".format(get_response.text)) 77 | sys.exit(1) 78 | else: 79 | log.info(("SUCCESS - GET Response status={} reason={}") 80 | .format(get_response.status_code, 81 | get_response.reason)) 82 | 83 | as_json = True 84 | record = {} 85 | if as_json: 86 | record = json.loads(get_response.text) 87 | log.info(ppj(record)) 88 | # end of get 89 | 90 | sys.exit(0) 91 | -------------------------------------------------------------------------------- /tests/get-recent-jobs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import sys 5 | import json 6 | import requests 7 | from spylunking.log.setup_logging import build_colorized_logger 8 | from antinex_utils.utils import ppj 9 | 10 | 11 | name = 'get-recent-jobs' 12 | log = build_colorized_logger( 13 | name=name) 14 | 15 | 16 | url = os.getenv( 17 | "ANTINEX_URL", 18 | "http://localhost:8010") 19 | username = os.getenv( 20 | "API_USER", 21 | "root") 22 | password = os.getenv( 23 | "API_PASS", 24 | "123321") 25 | 26 | auth_url = "{}/api-token-auth/".format(url) 27 | resource_url = ("{}/ml").format( 28 | url) 29 | use_headers = { 30 | "Content-type": "application/json" 31 | } 32 | login_data = { 33 | "username": username, 34 | "password": password 35 | } 36 | 37 | # Login as the user: 38 | log.info("Logging in user url={}".format(auth_url)) 39 | post_response = requests.post(auth_url, 40 | data=json.dumps(login_data), 41 | headers=use_headers) 42 | 43 | user_token = "" 44 | if post_response.status_code == 200: 45 | user_token = json.loads(post_response.text)["token"] 46 | 47 | if user_token == "": 48 | log.error(("Failed logging in as user={} - stopping" 49 | "post_response={}") 50 | .format(username, 51 | post_response.text)) 52 | sys.exit(1) 53 | else: 54 | log.info(("logged in user={} token={}") 55 | .format(username, 56 | user_token)) 57 | # end if/else 58 | 59 | log.info("building get data") 60 | 61 | use_headers = { 62 | "Content-type": "application/json", 63 | "Authorization": "JWT {}".format(user_token) 64 | } 65 | 66 | log.info(("Getting Recent Jobs url={}") 67 | .format(resource_url)) 68 | get_response = requests.get(resource_url, 69 | headers=use_headers) 70 | 71 | if get_response.status_code != 201 \ 72 | and get_response.status_code != 200: 73 | log.error(("Failed with GET response status={} reason={}") 74 | .format(get_response.status_code, 75 | get_response.reason)) 76 | log.error("Details:\n{}".format(get_response.text)) 77 | sys.exit(1) 78 | else: 79 | log.info(("SUCCESS - GET Response status={} reason={}") 80 | .format(get_response.status_code, 81 | get_response.reason)) 82 | 83 | as_json = True 84 | record = {} 85 | if as_json: 86 | record = json.loads(get_response.text) 87 | log.info(ppj(record)) 88 | # end of get 89 | 90 | sys.exit(0) 91 | -------------------------------------------------------------------------------- /tests/get-recent-results.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import sys 5 | import json 6 | import requests 7 | from spylunking.log.setup_logging import build_colorized_logger 8 | from antinex_utils.utils import ppj 9 | 10 | 11 | name = 'get-recent-results' 12 | log = build_colorized_logger( 13 | name=name) 14 | 15 | 16 | url = os.getenv( 17 | "ANTINEX_URL", 18 | "http://localhost:8010") 19 | username = os.getenv( 20 | "API_USER", 21 | "root") 22 | password = os.getenv( 23 | "API_PASS", 24 | "123321") 25 | 26 | auth_url = "{}/api-token-auth/".format(url) 27 | resource_url = ("{}/mlresults").format( 28 | url) 29 | use_headers = { 30 | "Content-type": "application/json" 31 | } 32 | login_data = { 33 | "username": username, 34 | "password": password 35 | } 36 | 37 | # Login as the user: 38 | log.info("Logging in user url={}".format(auth_url)) 39 | post_response = requests.post(auth_url, 40 | data=json.dumps(login_data), 41 | headers=use_headers) 42 | 43 | user_token = "" 44 | if post_response.status_code == 200: 45 | user_token = json.loads(post_response.text)["token"] 46 | 47 | if user_token == "": 48 | log.error(("Failed logging in as user={} - stopping" 49 | "post_response={}") 50 | .format(username, 51 | post_response.text)) 52 | sys.exit(1) 53 | else: 54 | log.info(("logged in user={} token={}") 55 | .format(username, 56 | user_token)) 57 | # end if/else 58 | 59 | log.info("building get data") 60 | 61 | use_headers = { 62 | "Content-type": "application/json", 63 | "Authorization": "JWT {}".format(user_token) 64 | } 65 | 66 | log.info(("Getting Recent Results url={}") 67 | .format(resource_url)) 68 | get_response = requests.get(resource_url, 69 | headers=use_headers) 70 | 71 | if get_response.status_code != 201 \ 72 | and get_response.status_code != 200: 73 | log.error(("Failed with GET response status={} reason={}") 74 | .format(get_response.status_code, 75 | get_response.reason)) 76 | log.error("Details:\n{}".format(get_response.text)) 77 | sys.exit(1) 78 | else: 79 | log.info(("SUCCESS - GET Response status={} reason={}") 80 | .format(get_response.status_code, 81 | get_response.reason)) 82 | 83 | as_json = True 84 | record = {} 85 | if as_json: 86 | record = json.loads(get_response.text) 87 | log.info(ppj(record)) 88 | # end of post for running an ML Job 89 | 90 | sys.exit(0) 91 | -------------------------------------------------------------------------------- /tests/images/django-rest-framework-with-swagger-and-jwt-trains-a-deep-neural-network-using-keras-and-tensorflow-with-83-percent-accuracy.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jay-johnson/train-ai-with-django-swagger-jwt/3cdd798709b02047eadebccbe2fc9b855cfcac4f/tests/images/django-rest-framework-with-swagger-and-jwt-trains-a-deep-neural-network-using-keras-and-tensorflow-with-83-percent-accuracy.gif -------------------------------------------------------------------------------- /tests/only-publish-predict-rows-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Django-AntiNex-Simple-Scaler-DNN", 3 | "predict_rows": [ 4 | { 5 | "_dataset_index": 10, 6 | "eth_type": 2041.0, 7 | "idx": 40.0, 8 | "ip_ihl": 5.0, 9 | "ip_len": 100.0, 10 | "ip_tos": 0.0, 11 | "ip_version": 4.0, 12 | "label_value": 1.0, 13 | "tcp_dport": 8080.0, 14 | "tcp_fields_options.MSS": 65495.0, 15 | "tcp_fields_options.Timestamp": 1653838672.0, 16 | "tcp_fields_options.WScale": 7.0, 17 | "tcp_seq": 65072.0, 18 | "tcp_sport": 38587.0 19 | }, 20 | { 21 | "_dataset_index": 11, 22 | "eth_type": 2041.0, 23 | "idx": 41.0, 24 | "ip_ihl": 5.0, 25 | "ip_len": 100.0, 26 | "ip_tos": 0.0, 27 | "ip_version": 4.0, 28 | "label_value": 1.0, 29 | "tcp_dport": 8080.0, 30 | "tcp_fields_options.MSS": 65495.0, 31 | "tcp_fields_options.Timestamp": 1653838672.0, 32 | "tcp_fields_options.WScale": 7.0, 33 | "tcp_seq": 65072.0, 34 | "tcp_sport": 38587.0 35 | } 36 | ], 37 | "ml_type": "classification", 38 | "predict_feature": "label_value", 39 | "publish_to_core": true, 40 | "apply_scaler": true, 41 | "features_to_process": [ 42 | "eth_type", 43 | "idx", 44 | "ip_ihl", 45 | "ip_len", 46 | "ip_tos", 47 | "ip_version", 48 | "tcp_dport", 49 | "tcp_fields_options.MSS", 50 | "tcp_fields_options.Timestamp", 51 | "tcp_fields_options.WScale", 52 | "tcp_seq", 53 | "tcp_sport" 54 | ], 55 | "ignore_features": [ 56 | ], 57 | "sort_values": [ 58 | ], 59 | "seed": 42, 60 | "test_size": 0.2, 61 | "batch_size": 32, 62 | "epochs": 10, 63 | "num_splits": 2, 64 | "loss": "binary_crossentropy", 65 | "optimizer": "adam", 66 | "metrics": [ 67 | "accuracy" 68 | ], 69 | "histories": [ 70 | "val_loss", 71 | "val_acc", 72 | "loss", 73 | "acc" 74 | ], 75 | "model_desc": { 76 | "layers": [ 77 | { 78 | "num_neurons": 250, 79 | "init": "uniform", 80 | "activation": "relu" 81 | }, 82 | { 83 | "num_neurons": 1, 84 | "init": "uniform", 85 | "activation": "sigmoid" 86 | } 87 | ] 88 | }, 89 | "label_rules": { 90 | "labels": [ 91 | "not_attack", 92 | "not_attack", 93 | "attack" 94 | ], 95 | "label_values": [ 96 | -1, 97 | 0, 98 | 1 99 | ] 100 | }, 101 | "version": 1 102 | } 103 | -------------------------------------------------------------------------------- /tests/only-publish-scaler-full-django.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Full-Django-AntiNex-Simple-Scaler-DNN", 3 | "dataset": "/opt/antinex/antinex-datasets/v1/webapps/django/training-ready/v1_django_cleaned.csv", 4 | "ml_type": "classification", 5 | "publish_to_core": true, 6 | "predict_feature": "label_value", 7 | "features_to_process": [ 8 | "idx", 9 | "arp_hwlen", 10 | "arp_hwtype", 11 | "arp_id", 12 | "arp_op", 13 | "arp_plen", 14 | "arp_ptype", 15 | "dns_default_aa", 16 | "dns_default_ad", 17 | "dns_default_an", 18 | "dns_default_ancount", 19 | "dns_default_ar", 20 | "dns_default_arcount", 21 | "dns_default_cd", 22 | "dns_default_id", 23 | "dns_default_length", 24 | "dns_default_ns", 25 | "dns_default_nscount", 26 | "dns_default_opcode", 27 | "dns_default_qd", 28 | "dns_default_qdcount", 29 | "dns_default_qr", 30 | "dns_default_ra", 31 | "dns_default_rcode", 32 | "dns_default_rd", 33 | "dns_default_tc", 34 | "dns_default_z", 35 | "dns_id", 36 | "eth_id", 37 | "eth_type", 38 | "icmp_addr_mask", 39 | "icmp_code", 40 | "icmp_gw", 41 | "icmp_id", 42 | "icmp_ptr", 43 | "icmp_seq", 44 | "icmp_ts_ori", 45 | "icmp_ts_rx", 46 | "icmp_ts_tx", 47 | "icmp_type", 48 | "icmp_unused", 49 | "ip_id", 50 | "ip_ihl", 51 | "ip_len", 52 | "ip_tos", 53 | "ip_version", 54 | "ipv6_fl", 55 | "ipv6_hlim", 56 | "ipv6_nh", 57 | "ipv6_plen", 58 | "ipv6_tc", 59 | "ipv6_version", 60 | "ipvsix_id", 61 | "pad_id", 62 | "tcp_dport", 63 | "tcp_fields_options.MSS", 64 | "tcp_fields_options.NOP", 65 | "tcp_fields_options.SAckOK", 66 | "tcp_fields_options.Timestamp", 67 | "tcp_fields_options.WScale", 68 | "tcp_id", 69 | "tcp_seq", 70 | "tcp_sport", 71 | "udp_dport", 72 | "udp_id", 73 | "udp_len", 74 | "udp_sport" 75 | ], 76 | "ignore_features": [], 77 | "sort_values": [], 78 | "seed": 42, 79 | "test_size": 0.2, 80 | "batch_size": 32, 81 | "epochs": 15, 82 | "num_splits": 2, 83 | "loss": "binary_crossentropy", 84 | "optimizer": "adam", 85 | "metrics": [ 86 | "accuracy" 87 | ], 88 | "histories": [ 89 | "val_loss", 90 | "val_acc", 91 | "loss", 92 | "acc" 93 | ], 94 | "model_desc": { 95 | "layers": [ 96 | { 97 | "num_neurons": 200, 98 | "init": "uniform", 99 | "activation": "relu" 100 | }, 101 | { 102 | "num_neurons": 1, 103 | "init": "uniform", 104 | "activation": "sigmoid" 105 | } 106 | ] 107 | }, 108 | "label_rules": { 109 | "labels": [ 110 | "not_attack", 111 | "not_attack", 112 | "attack" 113 | ], 114 | "label_values": [ 115 | -1, 116 | 0, 117 | 1 118 | ] 119 | }, 120 | "version": 1 121 | } 122 | -------------------------------------------------------------------------------- /tests/predict-rows-scaler-django-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Django-AntiNex-Simple-Scaler-DNN", 3 | "predict_rows": [ 4 | { 5 | "_dataset_index": 10, 6 | "eth_type": 2041.0, 7 | "idx": 40.0, 8 | "ip_ihl": 5.0, 9 | "ip_len": 100.0, 10 | "ip_tos": 0.0, 11 | "ip_version": 4.0, 12 | "label_value": 1.0, 13 | "tcp_dport": 8080.0, 14 | "tcp_fields_options.MSS": 65495.0, 15 | "tcp_fields_options.Timestamp": 1653838672.0, 16 | "tcp_fields_options.WScale": 7.0, 17 | "tcp_seq": 65072.0, 18 | "tcp_sport": 38587.0 19 | }, 20 | { 21 | "_dataset_index": 11, 22 | "eth_type": 2041.0, 23 | "idx": 41.0, 24 | "ip_ihl": 5.0, 25 | "ip_len": 100.0, 26 | "ip_tos": 0.0, 27 | "ip_version": 4.0, 28 | "label_value": 1.0, 29 | "tcp_dport": 8080.0, 30 | "tcp_fields_options.MSS": 65495.0, 31 | "tcp_fields_options.Timestamp": 1653838672.0, 32 | "tcp_fields_options.WScale": 7.0, 33 | "tcp_seq": 65072.0, 34 | "tcp_sport": 38587.0 35 | } 36 | ], 37 | "ml_type": "classification", 38 | "predict_feature": "label_value", 39 | "features_to_process": [ 40 | "eth_type", 41 | "idx", 42 | "ip_ihl", 43 | "ip_len", 44 | "ip_tos", 45 | "ip_version", 46 | "tcp_dport", 47 | "tcp_fields_options.MSS", 48 | "tcp_fields_options.Timestamp", 49 | "tcp_fields_options.WScale", 50 | "tcp_seq", 51 | "tcp_sport" 52 | ], 53 | "ignore_features": [ 54 | ], 55 | "sort_values": [ 56 | ], 57 | "seed": 42, 58 | "test_size": 0.2, 59 | "batch_size": 32, 60 | "epochs": 10, 61 | "num_splits": 2, 62 | "loss": "binary_crossentropy", 63 | "optimizer": "adam", 64 | "metrics": [ 65 | "accuracy" 66 | ], 67 | "histories": [ 68 | "val_loss", 69 | "val_acc", 70 | "loss", 71 | "acc" 72 | ], 73 | "model_desc": { 74 | "layers": [ 75 | { 76 | "num_neurons": 250, 77 | "init": "uniform", 78 | "activation": "relu" 79 | }, 80 | { 81 | "num_neurons": 1, 82 | "init": "uniform", 83 | "activation": "sigmoid" 84 | } 85 | ] 86 | }, 87 | "label_rules": { 88 | "labels": [ 89 | "not_attack", 90 | "not_attack", 91 | "attack" 92 | ], 93 | "label_values": [ 94 | -1, 95 | 0, 96 | 1 97 | ] 98 | }, 99 | "version": 1 100 | } 101 | -------------------------------------------------------------------------------- /tests/prepare-new-dataset.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Prepare new Dataset from recordings", 3 | "desc": "", 4 | "ds_name": "new_recording", 5 | "full_file": "/tmp/fulldata_attack_scans.csv", 6 | "clean_file": "/tmp/cleaned_attack_scans.csv", 7 | "meta_suffix": "metadata.json", 8 | "output_dir": "/tmp/", 9 | "ds_dir": "/opt/antinex/datasets", 10 | "ds_glob_path": "/opt/antinex/datasets/*/*.csv", 11 | "pipeline_files": { 12 | "attack_files": [] 13 | }, 14 | "meta_data": {}, 15 | "post_proc": { 16 | "drop_columns": [ 17 | "src_file", 18 | "raw_id", 19 | "raw_load", 20 | "raw_hex_load", 21 | "raw_hex_field_load", 22 | "pad_load", 23 | "eth_dst", 24 | "eth_src", 25 | "ip_dst", 26 | "ip_src" 27 | ], 28 | "predict_feature": "label_name" 29 | }, 30 | "label_rules": { 31 | "set_if_above": 85, 32 | "labels": [ 33 | "not_attack", 34 | "attack" 35 | ], 36 | "label_values": [ 37 | 0, 38 | 1 39 | ] 40 | }, 41 | "version": 1 42 | } 43 | -------------------------------------------------------------------------------- /tests/readme-predict-demo-1.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Prediction-Model-Prototyping", 3 | "predict_rows": [ 4 | { 5 | "_dataset_index": 1, 6 | "label_value": 1, 7 | "more_keys": 54.0 8 | }, 9 | { 10 | "_dataset_index": 2, 11 | "label_value": 1, 12 | "more_keys": 24.0 13 | }, 14 | { 15 | "_dataset_index": 2, 16 | "label_value": 0, 17 | "more_keys": 33.0 18 | } 19 | ], 20 | "ml_type": "classification", 21 | "predict_feature": "label_value", 22 | "features_to_process": [ 23 | "more_keys" 24 | ], 25 | "ignore_features": [ 26 | ], 27 | "sort_values": [ 28 | ], 29 | "seed": 42, 30 | "test_size": 0.2, 31 | "batch_size": 32, 32 | "epochs": 15, 33 | "num_splits": 2, 34 | "loss": "binary_crossentropy", 35 | "optimizer": "adam", 36 | "metrics": [ 37 | "accuracy" 38 | ], 39 | "histories": [ 40 | "val_loss", 41 | "val_acc", 42 | "loss", 43 | "acc" 44 | ], 45 | "model_desc": { 46 | "layers": [ 47 | { 48 | "num_neurons": 200, 49 | "init": "uniform", 50 | "activation": "relu" 51 | }, 52 | { 53 | "num_neurons": 1, 54 | "init": "uniform", 55 | "activation": "sigmoid" 56 | } 57 | ] 58 | }, 59 | "label_rules": { 60 | "labels": [ 61 | "not_attack", 62 | "not_attack", 63 | "attack" 64 | ], 65 | "label_values": [ 66 | -1, 67 | 0, 68 | 1 69 | ] 70 | }, 71 | "version": 1 72 | } 73 | -------------------------------------------------------------------------------- /tests/regression-spy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | venv=~/.venvs/venvdrfpipeline 4 | env_name=dev 5 | 6 | if [[ "${USE_ENV}" != "" ]]; then 7 | env_name="${USE_ENV}" 8 | fi 9 | 10 | if [[ ! -e ../envs/${env_name}.env ]]; then 11 | echo "" 12 | echo "Failed to find env file: envs/${env_name}.env" 13 | echo "" 14 | exit 1 15 | fi 16 | 17 | echo "Activating and installing pips" 18 | source ${venv}/bin/activate 19 | echo "" 20 | 21 | echo "Sourcing: ../envs/${env_name}.env" 22 | source ../envs/${env_name}.env 23 | echo "" 24 | 25 | echo "Building dataset" 26 | ./build-new-dataset.py -f stocks/spy.json 27 | echo "Creating dnn" 28 | ./create-keras-dnn.py -f stocks/dnn-spy.json 29 | -------------------------------------------------------------------------------- /tests/scaler-django-antinex-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Django-AntiNex-Simple-Scaler-DNN", 3 | "dataset": "./drf_network_pipeline/tests/datasets/cleaned_attack_scans.csv", 4 | "ml_type": "classification", 5 | "predict_feature": "label_value", 6 | "features_to_process": [ 7 | "eth_type", 8 | "idx", 9 | "ip_ihl", 10 | "ip_len", 11 | "ip_tos", 12 | "ip_version", 13 | "tcp_dport", 14 | "tcp_fields_options.MSS", 15 | "tcp_fields_options.Timestamp", 16 | "tcp_fields_options.WScale", 17 | "tcp_seq", 18 | "tcp_sport" 19 | ], 20 | "ignore_features": [ 21 | ], 22 | "sort_values": [ 23 | ], 24 | "seed": 42, 25 | "test_size": 0.2, 26 | "batch_size": 32, 27 | "epochs": 10, 28 | "num_splits": 2, 29 | "loss": "binary_crossentropy", 30 | "optimizer": "adam", 31 | "metrics": [ 32 | "accuracy" 33 | ], 34 | "histories": [ 35 | "val_loss", 36 | "val_acc", 37 | "loss", 38 | "acc" 39 | ], 40 | "model_desc": { 41 | "layers": [ 42 | { 43 | "num_neurons": 250, 44 | "init": "uniform", 45 | "activation": "relu" 46 | }, 47 | { 48 | "num_neurons": 1, 49 | "init": "uniform", 50 | "activation": "sigmoid" 51 | } 52 | ] 53 | }, 54 | "label_rules": { 55 | "labels": [ 56 | "not_attack", 57 | "not_attack", 58 | "attack" 59 | ], 60 | "label_values": [ 61 | -1, 62 | 0, 63 | 1 64 | ] 65 | }, 66 | "version": 1 67 | } 68 | -------------------------------------------------------------------------------- /tests/scaler-full-django-antinex-simple.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Full-Django-AntiNex-Simple-Scaler-DNN", 3 | "dataset": "/opt/antinex/antinex-datasets/v1/webapps/django/training-ready/v1_django_cleaned.csv", 4 | "ml_type": "classification", 5 | "predict_feature": "label_value", 6 | "features_to_process": [ 7 | "idx", 8 | "arp_hwlen", 9 | "arp_hwtype", 10 | "arp_id", 11 | "arp_op", 12 | "arp_plen", 13 | "arp_ptype", 14 | "dns_default_aa", 15 | "dns_default_ad", 16 | "dns_default_an", 17 | "dns_default_ancount", 18 | "dns_default_ar", 19 | "dns_default_arcount", 20 | "dns_default_cd", 21 | "dns_default_id", 22 | "dns_default_length", 23 | "dns_default_ns", 24 | "dns_default_nscount", 25 | "dns_default_opcode", 26 | "dns_default_qd", 27 | "dns_default_qdcount", 28 | "dns_default_qr", 29 | "dns_default_ra", 30 | "dns_default_rcode", 31 | "dns_default_rd", 32 | "dns_default_tc", 33 | "dns_default_z", 34 | "dns_id", 35 | "eth_id", 36 | "eth_type", 37 | "icmp_addr_mask", 38 | "icmp_code", 39 | "icmp_gw", 40 | "icmp_id", 41 | "icmp_ptr", 42 | "icmp_seq", 43 | "icmp_ts_ori", 44 | "icmp_ts_rx", 45 | "icmp_ts_tx", 46 | "icmp_type", 47 | "icmp_unused", 48 | "ip_id", 49 | "ip_ihl", 50 | "ip_len", 51 | "ip_tos", 52 | "ip_version", 53 | "ipv6_fl", 54 | "ipv6_hlim", 55 | "ipv6_nh", 56 | "ipv6_plen", 57 | "ipv6_tc", 58 | "ipv6_version", 59 | "ipvsix_id", 60 | "pad_id", 61 | "tcp_dport", 62 | "tcp_fields_options.MSS", 63 | "tcp_fields_options.NOP", 64 | "tcp_fields_options.SAckOK", 65 | "tcp_fields_options.Timestamp", 66 | "tcp_fields_options.WScale", 67 | "tcp_id", 68 | "tcp_seq", 69 | "tcp_sport", 70 | "udp_dport", 71 | "udp_id", 72 | "udp_len", 73 | "udp_sport" 74 | ], 75 | "ignore_features": [], 76 | "sort_values": [], 77 | "seed": 42, 78 | "test_size": 0.2, 79 | "batch_size": 32, 80 | "epochs": 15, 81 | "num_splits": 2, 82 | "loss": "binary_crossentropy", 83 | "optimizer": "adam", 84 | "metrics": [ 85 | "accuracy" 86 | ], 87 | "histories": [ 88 | "val_loss", 89 | "val_acc", 90 | "loss", 91 | "acc" 92 | ], 93 | "model_desc": { 94 | "layers": [ 95 | { 96 | "num_neurons": 200, 97 | "init": "uniform", 98 | "activation": "relu" 99 | }, 100 | { 101 | "num_neurons": 1, 102 | "init": "uniform", 103 | "activation": "sigmoid" 104 | } 105 | ] 106 | }, 107 | "label_rules": { 108 | "labels": [ 109 | "not_attack", 110 | "not_attack", 111 | "attack" 112 | ], 113 | "label_values": [ 114 | -1, 115 | 0, 116 | 1 117 | ] 118 | }, 119 | "version": 1 120 | } 121 | -------------------------------------------------------------------------------- /tests/scaler-regression.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Scaler-Close-Regression", 3 | "dataset": "/opt/antinex/api/webapp/drf_network_pipeline/tests/datasets/stock.csv", 4 | "ml_type": "regression", 5 | "predict_feature": "close", 6 | "features_to_process": [ 7 | "high", 8 | "low", 9 | "open", 10 | "volume" 11 | ], 12 | "ignore_features": [], 13 | "sort_values": [], 14 | "seed": 7, 15 | "test_size": 0.2, 16 | "batch_size": 32, 17 | "epochs": 50, 18 | "num_splits": 2, 19 | "loss": "mse", 20 | "optimizer": "adam", 21 | "metrics": [ 22 | "accuracy" 23 | ], 24 | "model_desc": { 25 | "layers": [ 26 | { 27 | "activation": "relu", 28 | "init": "uniform", 29 | "num_neurons": 200 30 | }, 31 | { 32 | "activation": null, 33 | "init": "uniform", 34 | "num_neurons": 1 35 | } 36 | ] 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /tests/send-worker-get-user.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import json 5 | from celery import Celery 6 | from spylunking.log.setup_logging import build_colorized_logger 7 | from antinex_utils.utils import ppj 8 | 9 | 10 | name = 'send-worker-get-user' 11 | log = build_colorized_logger( 12 | name=name) 13 | 14 | log.info("creating celery app") 15 | app = Celery("test-decoupled-app") 16 | 17 | broker_settings = { 18 | "broker_url": os.getenv( 19 | "ANTINEX_REST_API_BROKER_URL", 20 | "redis://localhost:6379/9"), 21 | "result_backend": os.getenv( 22 | "ANTINEX_REST_API_BACKEND_URL", 23 | "redis://localhost:6379/10") 24 | } 25 | app.conf.update(**broker_settings) 26 | 27 | datafile = "../webapp/drf_network_pipeline/tests/pubsub/get-user.json" 28 | data = {} 29 | with open(datafile, "r") as f: 30 | data = json.loads(f.read()) 31 | 32 | # Celery task routing and queue 33 | parent_route = "drf_network_pipeline.users.tasks" 34 | task_name = ("{}.task_get_user").format( 35 | parent_route) 36 | queue_name = ("{}.task_get_user").format( 37 | parent_route) 38 | 39 | log.info(("sending args={} to broker={} task={}") 40 | .format( 41 | ppj(data), 42 | app.conf["BROKER_URL"], 43 | task_name)) 44 | 45 | task_res = app.send_task( 46 | task_name, 47 | args=[data], 48 | queue=queue_name) 49 | 50 | log.info(("task={} task.id={} result={}") 51 | .format( 52 | task_name, 53 | task_res.id, 54 | ppj(task_res.get()))) 55 | -------------------------------------------------------------------------------- /tests/send-worker-publish-to-core.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import json 5 | from celery import Celery 6 | from spylunking.log.setup_logging import build_colorized_logger 7 | from antinex_utils.utils import ppj 8 | 9 | 10 | name = 'send-worker-publish-to-core' 11 | log = build_colorized_logger( 12 | name=name) 13 | 14 | log.info("creating celery app") 15 | app = Celery("test-decoupled-app") 16 | 17 | broker_settings = { 18 | "broker_url": os.getenv( 19 | "ANTINEX_REST_API_BROKER_URL", 20 | "redis://localhost:6379/9") 21 | } 22 | app.conf.update(**broker_settings) 23 | 24 | datafile = "../webapp/drf_network_pipeline/tests/pubsub/publish-to-core.json" 25 | data = {} 26 | with open(datafile, "r") as f: 27 | data = json.loads(f.read()) 28 | 29 | # Celery task routing and queue 30 | parent_route = "drf_network_pipeline.pipeline.tasks" 31 | task_name = ("{}.task_publish_to_core").format( 32 | parent_route) 33 | queue_name = ("{}.task_publish_to_core").format( 34 | parent_route) 35 | 36 | log.info(("sending args={} to broker={} task={}") 37 | .format( 38 | ppj(data), 39 | app.conf["BROKER_URL"], 40 | task_name)) 41 | 42 | app.send_task( 43 | task_name, 44 | args=[data], 45 | queue=queue_name) 46 | -------------------------------------------------------------------------------- /tests/simulations/sim_user1.json: -------------------------------------------------------------------------------- 1 | { 2 | "num_loops": 5, 3 | "user": { 4 | "username": "ricksanchez", 5 | "password": "WubbaLubbaDubDub", 6 | "email": "rick@c137dimension.com", 7 | "alias": "user1", 8 | "unique": true 9 | }, 10 | "sim_requests": [ 11 | { 12 | "name": "login", 13 | "data": "user" 14 | }, 15 | { 16 | "name": "prepare", 17 | "unique_names": true, 18 | "file": "./simulations/{}/prepare1.json" 19 | }, 20 | { 21 | "name": "train", 22 | "unique_names": true, 23 | "file": "./simulations/{}/train1.json" 24 | }, 25 | { 26 | "name": "getjob", 27 | "pk": "lasttrain" 28 | }, 29 | { 30 | "name": "getjobresult", 31 | "pk": "lasttrain" 32 | }, 33 | { 34 | "name": "recentjobs", 35 | "pk": "lasttrain" 36 | }, 37 | { 38 | "name": "recentresults", 39 | "pk": "lasttrain" 40 | }, 41 | { 42 | "name": "train", 43 | "unique_names": true, 44 | "file": "./simulations/{}/train1.json" 45 | } 46 | ] 47 | } 48 | -------------------------------------------------------------------------------- /tests/simulations/sim_user2.json: -------------------------------------------------------------------------------- 1 | { 2 | "num_loops": 5, 3 | "user": { 4 | "username": "mortysmith", 5 | "password": "notevilmorty", 6 | "email": "morty@aol.com", 7 | "alias": "user2", 8 | "unique": true 9 | }, 10 | "sim_requests": [ 11 | { 12 | "name": "login", 13 | "data": "user" 14 | }, 15 | { 16 | "name": "prepare", 17 | "unique_names": true, 18 | "file": "./simulations/{}/prepare1.json" 19 | }, 20 | { 21 | "name": "train", 22 | "unique_names": true, 23 | "file": "./simulations/{}/train1.json" 24 | }, 25 | { 26 | "name": "getjob", 27 | "pk": "lasttrain" 28 | }, 29 | { 30 | "name": "getjobresult", 31 | "pk": "lasttrain" 32 | }, 33 | { 34 | "name": "recentjobs", 35 | "pk": "lasttrain" 36 | }, 37 | { 38 | "name": "recentresults", 39 | "pk": "lasttrain" 40 | }, 41 | { 42 | "name": "train", 43 | "unique_names": true, 44 | "file": "./simulations/{}/train1.json" 45 | } 46 | ] 47 | } 48 | -------------------------------------------------------------------------------- /tests/simulations/sim_user3.json: -------------------------------------------------------------------------------- 1 | { 2 | "num_loops": 5, 3 | "user": { 4 | "username": "jerrysmith", 5 | "password": "plutoisaplanet", 6 | "email": "jerryholeinone@aol.com", 7 | "alias": "user3", 8 | "unique": true 9 | }, 10 | "sim_requests": [ 11 | { 12 | "name": "login", 13 | "data": "user" 14 | }, 15 | { 16 | "name": "prepare", 17 | "unique_names": true, 18 | "file": "./simulations/{}/prepare1.json" 19 | }, 20 | { 21 | "name": "train", 22 | "unique_names": true, 23 | "file": "./simulations/{}/train1.json" 24 | }, 25 | { 26 | "name": "getjob", 27 | "pk": "lasttrain" 28 | }, 29 | { 30 | "name": "getjobresult", 31 | "pk": "lasttrain" 32 | }, 33 | { 34 | "name": "recentjobs", 35 | "pk": "lasttrain" 36 | }, 37 | { 38 | "name": "recentresults", 39 | "pk": "lasttrain" 40 | }, 41 | { 42 | "name": "train", 43 | "unique_names": true, 44 | "file": "./simulations/{}/train1.json" 45 | } 46 | ] 47 | } 48 | -------------------------------------------------------------------------------- /tests/simulations/user1/prepare1.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Prepare User 1 Dataset from recordings", 3 | "desc": "", 4 | "ds_name": "new_recording", 5 | "full_file": "/tmp/{}_full_user1.csv", 6 | "clean_file": "/tmp/{}_clean_user1.csv", 7 | "meta_suffix": "{}_user1_metadata.json", 8 | "output_dir": "/tmp/", 9 | "ds_dir": "/opt/antinex/datasets", 10 | "ds_glob_path": "/opt/antinex/datasets/*/*.csv", 11 | "pipeline_files": "{\"attack_files\": []}", 12 | "meta_data": "{}", 13 | "post_proc": "{ \"drop_columns\": [ \"src_file\", \"raw_id\", \"raw_load\", \"raw_hex_load\", \"raw_hex_field_load\", \"pad_load\", \"eth_dst\", \"eth_src\", \"ip_dst\", \"ip_src\" ], \"predict_feature\": \"label_name\" }", 14 | "label_rules": "{ \"set_if_above\": 85, \"labels\": [\"not_attack\", \"attack\"], \"label_values\": [0, 1] }", 15 | "version": 1 16 | } 17 | -------------------------------------------------------------------------------- /tests/simulations/user1/train1.json: -------------------------------------------------------------------------------- 1 | { 2 | "csv_file": "/tmp/cleaned_{}_attack_scans.csv", 3 | "meta_file": "/tmp/user1_{}_cleaned_metadata.json", 4 | "title": "Keras DNN - User 1 Training Result", 5 | "desc": "Tensorflow backend with simulated data", 6 | "ds_name": "cleaned", 7 | "algo_name": "dnn", 8 | "ml_type": "classification", 9 | "predict_feature": "label_value", 10 | "training_data": "{}", 11 | "pre_proc": "{}", 12 | "post_proc": "{}", 13 | "meta_data": "{}", 14 | "version": 1 15 | } 16 | -------------------------------------------------------------------------------- /tests/simulations/user2/prepare1.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Prepare User 2 Dataset from recordings", 3 | "desc": "", 4 | "ds_name": "new_recording", 5 | "full_file": "/tmp/{}_full_user2.csv", 6 | "clean_file": "/tmp/{}_clean_user2.csv", 7 | "meta_suffix": "{}_user2_metadata.json", 8 | "output_dir": "/tmp/", 9 | "ds_dir": "/opt/antinex/datasets", 10 | "ds_glob_path": "/opt/antinex/datasets/*/*.csv", 11 | "pipeline_files": "{\"attack_files\": []}", 12 | "meta_data": "{}", 13 | "post_proc": "{ \"drop_columns\": [ \"src_file\", \"raw_id\", \"raw_load\", \"raw_hex_load\", \"raw_hex_field_load\", \"pad_load\", \"eth_dst\", \"eth_src\", \"ip_dst\", \"ip_src\" ], \"predict_feature\": \"label_name\" }", 14 | "label_rules": "{ \"set_if_above\": 85, \"labels\": [\"not_attack\", \"attack\"], \"label_values\": [0, 1] }", 15 | "version": 1 16 | } 17 | -------------------------------------------------------------------------------- /tests/simulations/user2/train1.json: -------------------------------------------------------------------------------- 1 | { 2 | "csv_file": "/tmp/cleaned_{}_attack_scans.csv", 3 | "meta_file": "/tmp/user2_{}_cleaned_metadata.json", 4 | "title": "Keras DNN - User 2 Training Result", 5 | "desc": "Tensorflow backend with simulated data", 6 | "ds_name": "cleaned", 7 | "algo_name": "dnn", 8 | "ml_type": "classification", 9 | "predict_feature": "label_value", 10 | "training_data": "{}", 11 | "pre_proc": "{}", 12 | "post_proc": "{}", 13 | "meta_data": "{}", 14 | "version": 1 15 | } 16 | -------------------------------------------------------------------------------- /tests/simulations/user3/prepare1.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Prepare User 3 Dataset from recordings", 3 | "desc": "", 4 | "ds_name": "new_recording", 5 | "full_file": "/tmp/{}_full_user3.csv", 6 | "clean_file": "/tmp/{}_clean_user3.csv", 7 | "meta_suffix": "{}_user3_metadata.json", 8 | "output_dir": "/tmp/", 9 | "ds_dir": "/opt/antinex/datasets", 10 | "ds_glob_path": "/opt/antinex/datasets/*/*.csv", 11 | "pipeline_files": "{\"attack_files\": []}", 12 | "meta_data": "{}", 13 | "post_proc": "{ \"drop_columns\": [ \"src_file\", \"raw_id\", \"raw_load\", \"raw_hex_load\", \"raw_hex_field_load\", \"pad_load\", \"eth_dst\", \"eth_src\", \"ip_dst\", \"ip_src\" ], \"predict_feature\": \"label_name\" }", 14 | "label_rules": "{ \"set_if_above\": 85, \"labels\": [\"not_attack\", \"attack\"], \"label_values\": [0, 1] }", 15 | "version": 1 16 | } 17 | -------------------------------------------------------------------------------- /tests/simulations/user3/train1.json: -------------------------------------------------------------------------------- 1 | { 2 | "csv_file": "/tmp/cleaned_{}_attack_scans.csv", 3 | "meta_file": "/tmp/user3_{}_cleaned_metadata.json", 4 | "title": "Keras DNN - User 3 Training Result", 5 | "desc": "Tensorflow backend with simulated data", 6 | "ds_name": "cleaned", 7 | "algo_name": "dnn", 8 | "ml_type": "classification", 9 | "predict_feature": "label_value", 10 | "training_data": "{}", 11 | "pre_proc": "{}", 12 | "post_proc": "{}", 13 | "meta_data": "{}", 14 | "version": 1 15 | } 16 | -------------------------------------------------------------------------------- /tests/stocks/dnn-spy.json: -------------------------------------------------------------------------------- 1 | { 2 | "csv_file": "/tmp/cleaned_spy.csv", 3 | "meta_file": "/tmp/cleaned_spy_metadata.json", 4 | "predict_feature": "close", 5 | "title": "Keras DNN - SPY", 6 | "desc": "Tensorflow backend with simulated data", 7 | "ml_type": "regression", 8 | "seed": 42, 9 | "test_size": 0.2, 10 | "batch_size": 32, 11 | "epochs": 2, 12 | "num_splits": 2, 13 | "loss": "mse", 14 | "optimizer": "adam", 15 | "model_desc": { 16 | "layers": [ 17 | { 18 | "num_neurons": 250, 19 | "init": "normal", 20 | "activation": "relu" 21 | }, 22 | { 23 | "num_neurons": 20, 24 | "init": "normal", 25 | "activation": "relu" 26 | }, 27 | { 28 | "num_neurons": 1, 29 | "init": "uniform", 30 | "activation": null 31 | } 32 | ] 33 | }, 34 | "metrics": [ 35 | "mse", 36 | "mae", 37 | "mape", 38 | "cosine" 39 | ], 40 | "histories": [ 41 | "val_loss", 42 | "val_acc", 43 | "loss", 44 | "acc" 45 | ], 46 | "ds_name": "cleaned", 47 | "ds_name": "SPY", 48 | "algo_name": "SPY DNN", 49 | "image_file": "/media/sf_shared/spy_dnn_plots.png", 50 | "training_data": "{}", 51 | "pre_proc": "{}", 52 | "post_proc": "{}", 53 | "meta_data": "{}", 54 | "version": 1 55 | } 56 | -------------------------------------------------------------------------------- /tests/stocks/spy.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Prepare SPY dataset", 3 | "desc": "", 4 | "ds_name": "SPY Analysis", 5 | "full_file": "/tmp/full_spy.csv", 6 | "clean_file": "/tmp/cleaned_spy.csv", 7 | "meta_suffix": "spy_metadata.json", 8 | "output_dir": "/tmp/", 9 | "ds_dir": "/opt/antinex/datasets", 10 | "ds_glob_path": "/opt/stocks/SPY/*.csv", 11 | "pipeline_files": "{}", 12 | "meta_data": "{}", 13 | "post_proc": "{ \"drop_columns\": [ \"src_file\", \"ticker\", \"date\", \"exp_date\" ] }", 14 | "label_rules": "{}", 15 | "version": 1 16 | } 17 | -------------------------------------------------------------------------------- /tests/test-keras-dnn.json: -------------------------------------------------------------------------------- 1 | { 2 | "csv_file": "/tmp/cleaned_attack_scans.csv", 3 | "meta_file": "/tmp/cleaned_metadata.json", 4 | "title": "Keras DNN - network-pipeline==1.0.10", 5 | "desc": "Tensorflow backend with simulated data", 6 | "ds_name": "cleaned", 7 | "algo_name": "dnn", 8 | "ml_type": "classification", 9 | "predict_feature": "label_value", 10 | "training_data": "{}", 11 | "pre_proc": "{}", 12 | "post_proc": "{}", 13 | "meta_data": "{}", 14 | "version": 1 15 | } 16 | -------------------------------------------------------------------------------- /tests/users/user_1.sh: -------------------------------------------------------------------------------- 1 | export API_USER="trex" 2 | export API_PASSWORD="123321" 3 | export API_EMAIL="bugs@antinex.com" 4 | export API_FIRSTNAME="Guest" 5 | export API_LASTNAME="Guest" 6 | export API_URL="http://localhost:8010" 7 | export API_VERBOSE="true" 8 | export API_DEBUG="false" 9 | 10 | if [[ "${ANTINEX_USER}" != "" ]]; then 11 | export API_USER="${ANTINEX_USER}" 12 | else 13 | export ANTINEX_USER="${API_USER}" 14 | fi 15 | 16 | if [[ "${ANTINEX_PASSWORD}" != "" ]]; then 17 | export API_PASSWORD="${ANTINEX_PASSWORD}" 18 | else 19 | export ANTINEX_PASSWORD="${API_PASSWORD}" 20 | fi 21 | 22 | if [[ "${ANTINEX_URL}" != "" ]]; then 23 | export API_URL="${ANTINEX_URL}" 24 | else 25 | export ANTINEX_URL="${API_URL}" 26 | fi 27 | if [[ "${ANTINEX_EMAIL}" != "" ]]; then 28 | export API_EMAIL="${ANTINEX_EMAIL}" 29 | else 30 | export ANTINEX_EMAIL="${API_EMAIL}" 31 | fi 32 | if [[ "${ANTINEX_FIRSTNAME}" != "" ]]; then 33 | export API_FIRSTNAME="${ANTINEX_FIRSTNAME}" 34 | else 35 | export ANTINEX_FIRSTNAME="${API_FIRSTNAME}" 36 | fi 37 | if [[ "${ANTINEX_LASTNAME}" != "" ]]; then 38 | export API_LASTNAME="${ANTINEX_LASTNAME}" 39 | else 40 | export ANTINEX_LASTNAME="${API_LASTNAME}" 41 | fi 42 | if [[ "${ANTINEX_VERBOSE}" != "" ]]; then 43 | export API_VERBOSE="${ANTINEX_VERBOSE}" 44 | else 45 | export ANTINEX_VERBOSE="${API_VERBOSE}" 46 | fi 47 | if [[ "${ANTINEX_DEBUG}" != "" ]]; then 48 | export API_DEBUG="${ANTINEX_DEBUG}" 49 | else 50 | export ANTINEX_DEBUG="${API_DEBUG}" 51 | fi 52 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | 3.5 4 | 3.6 5 | flake8 6 | flakeplus 7 | configcheck 8 | pydocstyle 9 | 10 | basepython = 11 | 3.5: python3.5 12 | 3.6,flake8,flakeplus,configcheck,pydocstyle: python3 13 | 14 | [flake8] 15 | max-line-length = 80 16 | exclude = .tox/*,./build/*,./venv/*,./.eggs/*,./webapp/drf_network_pipeline/*/migrations,./openshift/ 17 | 18 | [pycodestyle] 19 | exclude = .tox/*,./build/*,./venv/*,./.eggs/*,./webapp/drf_network_pipeline/*/migrations,./openshift/ 20 | 21 | [testenv:lint] 22 | deps = flake8 23 | commands = flake8 24 | 25 | -------------------------------------------------------------------------------- /webapp/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | 3 | url = "https://pypi.python.org/simple" 4 | verify_ssl = true 5 | name = "pypi" 6 | 7 | 8 | [dev-packages] 9 | 10 | Werkzeug = "==0.12.2" 11 | 12 | 13 | [packages] 14 | 15 | dj-database-url = "==0.4.2" 16 | django-configurations = "==2.0.0" 17 | django-debug-toolbar = "==1.9.1" 18 | django-extensions = "==1.9.7" 19 | Django = "==2.0" 20 | "psycopg2" = "==2.7.3.2" 21 | gunicorn = "==19.7.1" 22 | newrelic = "==2.98.0.81" 23 | whitenoise = "==4.0b4" 24 | 25 | 26 | [requires] 27 | 28 | python_version = "3.6" 29 | -------------------------------------------------------------------------------- /webapp/Procfile: -------------------------------------------------------------------------------- 1 | web: newrelic-admin run-program gunicorn -b "0.0.0.0:8010" -w 3 drf_network_pipeline.wsgi 2 | -------------------------------------------------------------------------------- /webapp/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jay-johnson/train-ai-with-django-swagger-jwt/3cdd798709b02047eadebccbe2fc9b855cfcac4f/webapp/__init__.py -------------------------------------------------------------------------------- /webapp/build-docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "" 4 | use_doc_theme="alabaster" 5 | if [[ "${DOC_THEME}" != "" ]]; then 6 | use_doc_theme="${DOC_THEME}" 7 | else 8 | export DOC_THEME=${use_doc_theme} 9 | fi 10 | 11 | if [[ ! -e ./staticfiles ]]; then 12 | mkdir -p -m 775 ./staticfiles 13 | fi 14 | 15 | echo "Making HTML Documentation with theme: ${use_doc_theme}" 16 | doc_path=drf_network_pipeline/docs 17 | cd $doc_path 18 | make html 19 | cd ../.. 20 | 21 | echo "" 22 | echo "Fixing static url links for Django" 23 | fix_these_files=$(find drf_network_pipeline/docs/build/html -name "*.html" | grep -v "/rest_framework_swagger/") 24 | for html in $fix_these_files; do 25 | echo "fixing ${html}" 26 | sed -i 's|src="_static|src="/static|g' $html 27 | sed -i 's|href="_static|href="/static|g' $html 28 | sed -i 's|href="_images|href="/static/_images|g' $html 29 | sed -i 's|href="_sources|href="/static/_sources|g' $html 30 | sed -i 's|href="_downloads|href="/static/_downloads|g' $html 31 | done 32 | 33 | echo "" 34 | echo "Fixing static url for one-off cases" 35 | sed -i 's|/static/_sources/index.rst.txt|/docs/_sources/index.rst.txt|' drf_network_pipeline/docs/build/html/index.html 36 | 37 | exit 0 38 | -------------------------------------------------------------------------------- /webapp/collect-statics.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | venv=~/.venvs/venvdrfpipeline 4 | 5 | # support for using venv in other locations 6 | if [[ "${USE_VENV}" != "" ]]; then 7 | if [[ -e ${USE_VENV}/bin/activate ]]; then 8 | echo "Using custom virtualenv: ${USE_VENV}" 9 | venv=${USE_VENV} 10 | else 11 | echo "Did not find custom virtualenv: ${USE_VENV}" 12 | exit 1 13 | fi 14 | fi 15 | 16 | if [[ -e ${venv}/bin/activate ]]; then 17 | echo "Activating and pips: ${venv}/bin/activate" 18 | . ${venv}/bin/activate 19 | echo "" 20 | else 21 | if [[ -e /opt/venv/bin/activate ]]; then 22 | echo "Activating /opt/venv and pips" 23 | . /opt/venv/bin/activate 24 | echo "" 25 | fi 26 | fi 27 | 28 | if [[ "${COLLECT_STATICS}" == "1" ]]; then 29 | echo "Collecting static files" 30 | python manage.py collectstatic --noinput >> /dev/null 31 | echo "" 32 | else 33 | echo "Collect static disabled COLLECT_STATICS=${COLLECT_STATICS}" 34 | fi 35 | 36 | exit 0 37 | -------------------------------------------------------------------------------- /webapp/create-super-user.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | email="root@email.com" 4 | username="root" 5 | password="123321" 6 | 7 | echo "Creating Super User" 8 | echo "from django.contrib.auth import get_user_model; from django.contrib.auth.models import User; User = get_user_model(); User.objects.filter(email='${email}').delete(); User.objects.create_superuser('${username}', '${email}', '${password}')" | python manage.py shell 9 | echo "Done Creating Super User: ${username}" 10 | 11 | exit 0 12 | -------------------------------------------------------------------------------- /webapp/django-uwsgi.ini: -------------------------------------------------------------------------------- 1 | [uwsgi] 2 | http = 0.0.0.0:8010 3 | chdir = %v 4 | wsgi-file = ./drf_network_pipeline/wsgi.py 5 | log-format = %(ltime) %(addr) - %(user) "%(method) %(uri) %(proto)" %(status) %(size) "%(referer)" "%(uagent)" 6 | processes = 5 7 | threads = 1 8 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | 3 | # http://docs.celeryproject.org/en/master/django/first-steps-with-django.html#using-celery-with-django 4 | 5 | # This will make sure the app is always imported when 6 | # Django starts so that shared_task will use this app. 7 | from drf_network_pipeline.celery_config import app as celery_app # noqa 8 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jay-johnson/train-ai-with-django-swagger-jwt/3cdd798709b02047eadebccbe2fc9b855cfcac4f/webapp/drf_network_pipeline/api/__init__.py -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/celery_config.py: -------------------------------------------------------------------------------- 1 | import os 2 | import django 3 | from celery import Celery 4 | from celery import signals 5 | from spylunking.log.setup_logging import build_colorized_logger 6 | 7 | 8 | # Disable celery log hijacking 9 | # https://github.com/celery/celery/issues/2509 10 | @signals.setup_logging.connect 11 | def setup_celery_logging(**kwargs): 12 | pass 13 | 14 | 15 | name = 'worker' 16 | log = build_colorized_logger( 17 | name=name) 18 | 19 | 20 | # Required load order for backend workers 21 | import configurations # noqa 22 | os.environ.setdefault( 23 | "DJANGO_SETTINGS_MODULE", 24 | "drf_network_pipeline.settings") 25 | os.environ.setdefault( 26 | "DJANGO_CONFIGURATION", 27 | "Development") 28 | configurations.setup() # noqa 29 | import django # noqa 30 | 31 | app = Celery( 32 | "drf_network_pipeline") 33 | 34 | CELERY_TIMEZONE = "UTC" 35 | 36 | app.config_from_object( 37 | "django.conf:settings", 38 | namespace="CELERY") 39 | app.autodiscover_tasks() 40 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = AntiNex-DeepNeuralNetworksforDefense 8 | SOURCEDIR = ./source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/docs/doc-requirements.txt: -------------------------------------------------------------------------------- 1 | antinex-utils 2 | celery[redis] 3 | colorlog 4 | dj-database-url==0.4.2 5 | Django==2.0 6 | django-admin 7 | django-cacheops 8 | django-celery-results 9 | django-configurations==2.0.0 10 | django-debug-toolbar==1.9.1 11 | django-extensions==1.9.7 12 | django-redis 13 | django-redis-cache 14 | django-redis-sessions 15 | django-rest-registration 16 | django-rest-swagger 17 | django-six 18 | djangorestframework 19 | djangorestframework-jwt 20 | flake8<=3.4.1 21 | gunicorn 22 | jsonfield 23 | inotify 24 | keras 25 | matplotlib 26 | newrelic 27 | pandas 28 | numpy 29 | psycopg2==2.7.3.2 30 | pycodestyle==2.3.1 31 | pydocstyle 32 | requests 33 | recommonmark 34 | scikit-learn 35 | sphinx 36 | sphinx-autobuild 37 | sphinx_bootstrap_theme 38 | sphinx_rtd_theme 39 | spylunking 40 | uwsgi 41 | whitenoise==4.0b4 42 | Werkzeug==0.12.2 43 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | set SPHINXPROJ=AntiNex-DeepNeuralNetworksforDefense 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/docs/source/faq.rst: -------------------------------------------------------------------------------- 1 | Frequently Asked Questions 2 | ========================== 3 | 4 | What AntiNex is Not and Disclaimers 5 | ----------------------------------- 6 | 7 | There's a lot of moving pieces in AI, and I wanted to be clear what is currently not supported: 8 | 9 | #. Custom layers or custom Deep Neural Network models - only Keras Sequential neural networks, KerasRegressor, KerasClassifier, Stratified Kfolds, cross validation scoring, Scalers, Add and Dropout are supported. PR's are always welcomed! 10 | #. Able to tell what your applications are doing today that is good, non-attack traffic out of the box. AntiNex requires recording how the network is being used in normal operation + identifying what you want to protect (do you want tcp traffic only? or a combination of tcp + udp + arp?). It uses the captured traffic to build the intial training dataset. 11 | #. Exotic attacks - The network pipeline includes the Zed Attack Proxy (ZED) for OWASP dynamic security analysis. This tool attacks using a fuzzing attack on web applications. ZED was used to generate the latest attack datasets, and there is no guarantee the latest dnn's will always be effective with attacks I have not seen yet. Please share your findings and reach out if you know how to generate new, better attack simulations to help us all. PR's are always welcomed! 12 | #. Image predictions and Convoluted Neural Networks - it's only works on numeric datasets. 13 | #. Recurrent Neural Networks - I plan on adding LTSM support into the antinex-utils, but the scores were already good enough to release this first build. 14 | #. Embedding Layers - I want to add payload deserialization to the packet processing with support for decrypting traffic, but the dnn scores were good enough to skip this feature for now. 15 | #. Adversarial Neural Networks - I plan on creating attack neural networks from the datasets to beat up the trained ones, but this is a 2.0 feature at this point. 16 | #. Saving models to disk is broken - I have commented out the code and found a keras issue that looks like the same problem I am hitting... I hope it is resovled so we can share model files via S3. 17 | 18 | Why the name? 19 | ------------- 20 | 21 | I was describing what this did and my sister-in-law said it reminded her of antivirus but for network defense. So instead of calling it **Anti-Network Exploits** it's just **AntiNex** or **anex** for short. Thanks Alli for the name! 22 | 23 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/docs/source/job_utils.rst: -------------------------------------------------------------------------------- 1 | Source Code - Job Helpers 2 | ========================= 3 | 4 | These are the helper methods for abstracting celery calls from the Django REST Framework Serializers. These are optional for most users, I just find them helpful because the serializers all examine a common dictionary structure instead of custom ones all over the code. The response structure is: 5 | 6 | :: 7 | 8 | task_response_node = { 9 | "status": status, 10 | "err": err, 11 | "task_name": task_name, 12 | "data": data, 13 | "celery_enabled": celery_enabled, 14 | "use_cache": use_cache, 15 | "cache_key": cache_key 16 | } 17 | 18 | #. **status** will be a const value from the **drf_network_pipeline.pipeline.consts** 19 | 20 | **Response Status Codes** 21 | 22 | :: 23 | 24 | SUCCESS = 0 25 | FAILED = 1 26 | ERR = 2 27 | EX = 3 28 | NOTRUN = 4 29 | INVALID = 5 30 | NOTDONE = 6 31 | 32 | #. **err** will be an empty string on **SUCCESS** and not-empty if there was a problem 33 | #. **data** is the result from the Celery worker (if it was used instead of **python manage.py runserver 0.0.0.0:8010**) 34 | #. **use_cache** is a flag meaning the results ere also cached in the **cache_key** for **django-cacheops** to use (this is not supported yet) 35 | #. **task_name** is a human readable task label for debugging in the logs 36 | 37 | Build Task Request 38 | ------------------ 39 | 40 | .. automodule:: drf_network_pipeline.job_utils.build_task_request 41 | :members: 42 | 43 | Build Task Response 44 | ------------------- 45 | 46 | .. automodule:: drf_network_pipeline.job_utils.build_task_response 47 | :members: 48 | 49 | Handle Task Method 50 | ------------------ 51 | 52 | .. automodule:: drf_network_pipeline.job_utils.handle_task_method 53 | :members: 54 | 55 | Run Task 56 | -------- 57 | 58 | .. automodule:: drf_network_pipeline.job_utils.run_task 59 | :members: 60 | 61 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/docs/source/modules/models.rst: -------------------------------------------------------------------------------- 1 | Source Code - Database Models 2 | ============================= 3 | 4 | AntiNex DB Models 5 | ----------------- 6 | 7 | Here are the ``MLJob``, ``MLJobResult`` and ``MLPrepare`` classes. 8 | 9 | .. automodule:: drf_network_pipeline.pipeline.models 10 | :members: 11 | 12 | User DB Model 13 | ------------- 14 | 15 | .. automodule:: drf_network_pipeline.users.models 16 | :members: 17 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/docs/source/pipeline.rst: -------------------------------------------------------------------------------- 1 | Source Code - ML Pipeline 2 | ========================= 3 | 4 | These are the methods for developing with the current ML Pipeline app within the Django Rest Framework. 5 | 6 | Constants 7 | --------- 8 | 9 | Constants for the ML 10 | 11 | .. automodule:: drf_network_pipeline.pipeline.consts 12 | :members: 13 | 14 | Building a Response Dictionary 15 | ------------------------------ 16 | 17 | This builds a dictionary that is published to the AntiNex Core within the MLJob's prediction manifest. This dictionary contains how to send the results back to the core. This would allow for an environment to run many Rest APIs and reuse the same core workers. 18 | 19 | .. automodule:: drf_network_pipeline.pipeline.build_worker_result_node 20 | :members: 21 | 22 | Creating ML Job Stub Records for Tracking Purposes 23 | -------------------------------------------------- 24 | 25 | Creates initial ``MLJob`` and ``MLJobResult`` record stub in the database 26 | 27 | .. automodule:: drf_network_pipeline.pipeline.create_ml_job_record 28 | :members: 29 | 30 | Creating New Training Datasets 31 | ------------------------------ 32 | 33 | Creates an initial ``MLPrepare`` record stub in the database 34 | 35 | .. automodule:: drf_network_pipeline.pipeline.create_ml_prepare_record 36 | :members: 37 | 38 | Process AntiNex Core Worker Results 39 | ----------------------------------- 40 | 41 | Fills in the ``MLJob`` and ``MLJobResult`` records with the JSON response from the AntiNex Core. 42 | 43 | .. automodule:: drf_network_pipeline.pipeline.process_worker_results 44 | :members: handle_worker_results_message,process_worker_results 45 | 46 | Celery Tasks 47 | ------------ 48 | 49 | Celery tasks that are handled within the Django Rest API Worker when the environment variable ``CELERY_ENABLED`` is set to ``1`` 50 | 51 | .. autotask:: drf_network_pipeline.pipeline.tasks.task_ml_job 52 | .. autotask:: drf_network_pipeline.pipeline.tasks.task_ml_prepare 53 | .. autotask:: drf_network_pipeline.pipeline.tasks.task_ml_process_results 54 | .. autotask:: drf_network_pipeline.pipeline.tasks.task_publish_to_core 55 | 56 | Utility Methods 57 | --------------- 58 | 59 | Utility methods 60 | 61 | .. automodule:: drf_network_pipeline.pipeline.utils 62 | :members: 63 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/docs/source/serializers.rst: -------------------------------------------------------------------------------- 1 | Source Code - Django Rest Framework Serializers 2 | =============================================== 3 | 4 | User Serializers 5 | ---------------- 6 | 7 | These are the current User Serializers 8 | 9 | .. automodule:: drf_network_pipeline.sz.user 10 | :members: UserSerializer 11 | 12 | ML Serializers 13 | -------------- 14 | 15 | These are the current ML Serializers 16 | 17 | .. automodule:: drf_network_pipeline.sz.ml 18 | :members: MLPrepareSerializer,MLJobsSerializer,MLJobResultsSerializer 19 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/index.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | from django.shortcuts import render 3 | 4 | 5 | def handle_sphinx_doc_index( 6 | request, 7 | data=None): 8 | """handle_sphinx_doc_index 9 | 10 | Generic handler for sending the browser to the 11 | sphinx documentation index: 12 | 13 | /webapp/drf_network_pipeline/docs/build/html/index.html 14 | 15 | :param request: HTTPRequest 16 | :param data: extra data 17 | """ 18 | return render( 19 | request, 20 | settings.DEFAULT_DOC_INDEX_HTML) 21 | # end of handle_sphinx_doc_index 22 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/job_utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jay-johnson/train-ai-with-django-swagger-jwt/3cdd798709b02047eadebccbe2fc9b855cfcac4f/webapp/drf_network_pipeline/job_utils/__init__.py -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/job_utils/build_task_request.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | from drf_network_pipeline.pipeline.consts import NOTRUN 3 | 4 | 5 | def build_task_request( 6 | status=NOTRUN, 7 | err="not-set", 8 | task_name="", 9 | data=None, 10 | job_id=None, 11 | celery_enabled=settings.CELERY_ENABLED, 12 | use_cache=settings.CACHEOPS_ENABLED, 13 | cache_record=False, 14 | cache_key=None): 15 | """build_task_node 16 | 17 | Builds a common request dictionary for all Celery tasks 18 | being wrapped with the utils framework 19 | 20 | :param status: task return status code 21 | :param err: task error message for debugging 22 | :param task_name: task label for debugging 23 | :param data: task return data 24 | :param job_id: task job id 25 | :param celery_enabled: control flag for testing celery tasks 26 | :param use_cache: use the cached record if available 27 | :param cache_record: cache the result in redis after done 28 | :param cache_key: cache the result in this redis key 29 | """ 30 | 31 | task_node = { 32 | "status": status, 33 | "err": err, 34 | "task_name": task_name, 35 | "data": data, 36 | "job_id": job_id, 37 | "celery_enabled": celery_enabled, 38 | "use_cache": use_cache, 39 | "cache_record": cache_record, 40 | "cache_key": cache_key 41 | } 42 | 43 | return task_node 44 | # end of build_task_request 45 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/job_utils/build_task_response.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | from drf_network_pipeline.pipeline.consts import NOTRUN 3 | 4 | 5 | def build_task_response( 6 | status=NOTRUN, 7 | err="not-set", 8 | task_name="", 9 | data=None, 10 | celery_enabled=settings.CELERY_ENABLED, 11 | use_cache=settings.CACHEOPS_ENABLED, 12 | cache_key=None): 13 | """build_task_response 14 | 15 | Builds a common response dictionary for all Celery tasks 16 | being wrapped with the utils framework 17 | 18 | :param status: task return status code 19 | :param err: task error message for debugging 20 | :param task_name: task label for debugging 21 | :param data: task return data 22 | :param celery_enabled: control flag for testing celery tasks 23 | :param use_cache: use the cached record if available 24 | :param cache_key: cache the result in this redis key 25 | """ 26 | 27 | task_response_node = { 28 | "status": status, 29 | "err": err, 30 | "task_name": task_name, 31 | "data": data, 32 | "celery_enabled": celery_enabled, 33 | "use_cache": use_cache, 34 | "cache_key": cache_key 35 | } 36 | 37 | return task_response_node 38 | # end of build_task_response 39 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/log/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jay-johnson/train-ai-with-django-swagger-jwt/3cdd798709b02047eadebccbe2fc9b855cfcac4f/webapp/drf_network_pipeline/log/__init__.py -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/log/colors-logging.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "disable_existing_loggers": false, 4 | "formatters": { 5 | "simple": { 6 | "()": "colorlog.ColoredFormatter", 7 | "format": "%(log_color)s%(asctime)s - %(name)s - %(levelname)s - %(message)s%(reset)s" 8 | } 9 | }, 10 | "handlers": { 11 | "console": { 12 | "class": "logging.StreamHandler", 13 | "level": "INFO", 14 | "formatter": "simple", 15 | "stream": "ext://sys.stdout" 16 | } 17 | }, 18 | "loggers": { 19 | "my_module": { 20 | "level": "ERROR", 21 | "handlers": [ 22 | "console" 23 | ], 24 | "propagate": "no" 25 | } 26 | }, 27 | "root": { 28 | "level": "INFO", 29 | "handlers": [ 30 | "console" 31 | ] 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/log/logging.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "disable_existing_loggers": false, 4 | "formatters": { 5 | "simple": { 6 | "()": "colorlog.ColoredFormatter", 7 | "format": "%(log_color)s%(asctime)s - %(name)s - %(levelname)s - %(message)s%(reset)s" 8 | } 9 | }, 10 | "handlers": { 11 | "console": { 12 | "class": "logging.StreamHandler", 13 | "level": "INFO", 14 | "formatter": "simple", 15 | "stream": "ext://sys.stdout" 16 | } 17 | }, 18 | "loggers": { 19 | "my_module": { 20 | "level": "ERROR", 21 | "handlers": [ 22 | "console" 23 | ], 24 | "propagate": "no" 25 | } 26 | }, 27 | "root": { 28 | "level": "INFO", 29 | "handlers": [ 30 | "console" 31 | ] 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/pipeline/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jay-johnson/train-ai-with-django-swagger-jwt/3cdd798709b02047eadebccbe2fc9b855cfcac4f/webapp/drf_network_pipeline/pipeline/__init__.py -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/pipeline/build_worker_result_node.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | 3 | 4 | def build_worker_result_node( 5 | req=None): 6 | """build_worker_result_node 7 | 8 | :param req: incoming request dictionary - not used right now 9 | """ 10 | 11 | api_node = None 12 | if settings.ANTINEX_WORKER_ENABLED: 13 | api_node = { 14 | "source": settings.ANTINEX_API_NAME, 15 | "auth_url": settings.ANTINEX_RESULT_AUTH_URL, 16 | "ssl_options": settings.ANTINEX_RESULT_SSL_OPTIONS, 17 | "exchange": settings.ANTINEX_RESULT_EXCHANGE_NAME, 18 | "exchange_type": settings.ANTINEX_RESULT_EXCHANGE_TYPE, 19 | "routing_key": settings.ANTINEX_RESULT_ROUTING_KEY, 20 | "queue": settings.ANTINEX_RESULT_QUEUE_NAME, 21 | "delivery_mode": settings.ANTINEX_RESULT_DELIVERY_MODE, 22 | "task_name": settings.ANTINEX_RESULT_TASK_NAME, 23 | "manifest": req 24 | } 25 | # end of setting up the general api for responses back from the core 26 | 27 | return api_node 28 | # end of build_worker_result_node 29 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/pipeline/consts.py: -------------------------------------------------------------------------------- 1 | SUCCESS = 0 2 | FAILED = 1 3 | ERR = 2 4 | EX = 3 5 | NOTRUN = 4 6 | INVALID = 5 7 | NOTDONE = 6 8 | 9 | VALID = 0 10 | INVALID = 1 11 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/pipeline/utils.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | def convert_to_date( 4 | value=None, 5 | format="%Y-%m-%d %H:%M:%S"): 6 | """convert_to_date 7 | 8 | param: value - datetime object 9 | param: format - string format 10 | """ 11 | 12 | if value: 13 | return value.strftime(format) 14 | 15 | return "" 16 | # end of convert_to_date 17 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/sz/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jay-johnson/train-ai-with-django-swagger-jwt/3cdd798709b02047eadebccbe2fc9b855cfcac4f/webapp/drf_network_pipeline/sz/__init__.py -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/templates/home.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Hello world! 6 | 7 | 8 | 9 |

Running Django 2.0+

10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/templates/rest_registration/register/body.txt: -------------------------------------------------------------------------------- 1 | Please verify your account by clicking on this link: 2 | 3 | {{ verification_url | safe }} 4 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/templates/rest_registration/register/subject.txt: -------------------------------------------------------------------------------- 1 | Please verify your account 2 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/templates/rest_registration/register_email/body.txt: -------------------------------------------------------------------------------- 1 | You can verify the email {{ email }} by clicking on this link: 2 | 3 | {{ verification_url | safe }} 4 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/templates/rest_registration/register_email/subject.txt: -------------------------------------------------------------------------------- 1 | E-mail verification link was sent 2 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/templates/rest_registration/reset_password/body.txt: -------------------------------------------------------------------------------- 1 | You can reset your password by clicking on this link: 2 | 3 | {{ verification_url | safe }} 4 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/templates/rest_registration/reset_password/subject.txt: -------------------------------------------------------------------------------- 1 | Reset password link was sent 2 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jay-johnson/train-ai-with-django-swagger-jwt/3cdd798709b02047eadebccbe2fc9b855cfcac4f/webapp/drf_network_pipeline/tests/__init__.py -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/tests/datasets/cleaned_attack_scans.csv: -------------------------------------------------------------------------------- 1 | eth_type,idx,ip_ihl,ip_len,ip_tos,ip_version,label_value,tcp_dport,tcp_fields_options.MSS,tcp_fields_options.Timestamp,tcp_fields_options.WScale,tcp_seq,tcp_sport 2 | 2048,0,5,4078,0,4,0,8080,65495.0,1653831073.0,7.0,65072,48408 3 | 2048,1,5,4078,0,4,0,8080,65495.0,1653831073.0,7.0,65072,48408 4 | 2048,2,5,15546,0,4,0,48408,65495.0,1653831073.0,7.0,65072,8080 5 | 2048,3,5,15546,0,4,0,48408,65495.0,1653831073.0,7.0,65072,8080 6 | 2048,20,5,20225,0,4,0,8080,65495.0,1653836233.0,7.0,65072,48410 7 | 2048,21,5,20225,0,4,0,8080,65495.0,1653836233.0,7.0,65072,48410 8 | 2048,22,5,15546,0,4,0,48410,65495.0,1653836233.0,7.0,65072,8080 9 | 2048,23,5,15546,0,4,1,48410,65495.0,1653836233.0,7.0,65072,8080 10 | 2048,40,5,23779,0,4,0,8080,65495.0,1653838672.0,7.0,65072,38587 11 | 2048,41,5,23779,0,4,0,8080,65495.0,1653838672.0,7.0,65072,38587 12 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/tests/datasets/cleaned_metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "created": "2018-02-05 08:57:44", 3 | "feature_to_predict": "label_name", 4 | "features_to_process": [ 5 | "eth_type", 6 | "idx", 7 | "ip_ihl", 8 | "ip_len", 9 | "ip_tos", 10 | "ip_version", 11 | "label_value", 12 | "tcp_dport", 13 | "tcp_fields_options.MSS", 14 | "tcp_fields_options.Timestamp", 15 | "tcp_fields_options.WScale", 16 | "tcp_seq", 17 | "tcp_sport" 18 | ], 19 | "headers": [ 20 | "eth_type", 21 | "idx", 22 | "ip_ihl", 23 | "ip_len", 24 | "ip_tos", 25 | "ip_version", 26 | "label_value", 27 | "tcp_dport", 28 | "tcp_fields_options.MSS", 29 | "tcp_fields_options.Timestamp", 30 | "tcp_fields_options.WScale", 31 | "tcp_seq", 32 | "tcp_sport" 33 | ], 34 | "ignore_features": [ 35 | "label_name" 36 | ], 37 | "label_rules": { 38 | "label_values": [ 39 | 0, 40 | 1 41 | ], 42 | "labels": [ 43 | "not_attack", 44 | "attack" 45 | ], 46 | "set_if_above": 85 47 | }, 48 | "output_type": "clean", 49 | "pipeline_files": [ 50 | "/opt/antinex/datasets/react-redux/netdata-2018-01-29-13-36-35.csv", 51 | "/opt/antinex/datasets/spring/netdata-2018-01-29-15-00-12.csv", 52 | "/opt/antinex/datasets/vue/netdata-2018-01-29-14-12-44.csv", 53 | "/opt/antinex/datasets/django/netdata-2018-01-28-23-12-13.csv", 54 | "/opt/antinex/datasets/django/netdata-2018-01-28-23-06-05.csv", 55 | "/opt/antinex/datasets/flask-restplus/netdata-2018-01-29-11-30-02.csv" 56 | ], 57 | "post_proc_rules": { 58 | "drop_columns": [ 59 | "src_file", 60 | "raw_id", 61 | "raw_load", 62 | "raw_hex_load", 63 | "raw_hex_field_load", 64 | "pad_load", 65 | "eth_dst", 66 | "eth_src", 67 | "ip_dst", 68 | "ip_src" 69 | ], 70 | "predict_feature": "label_name" 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/tests/datasets/fulldata_attack_scans.csv: -------------------------------------------------------------------------------- 1 | arp_id,dns_id,eth_dst,eth_id,eth_src,eth_type,icmp_id,idx,ip_dst,ip_id,ip_ihl,ip_len,ip_src,ip_tos,ip_version,ipvsix_id,label_name,label_value,pad_id,pad_load,raw_hex_field_load,raw_id,raw_load,src_file,tcp_dport,tcp_fields_options.MSS,tcp_fields_options.NOP,tcp_fields_options.SAckOK,tcp_fields_options.Timestamp,tcp_fields_options.WScale,tcp_id,tcp_seq,tcp_sport,udp_id 2 | ,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,0,127.0.0.1,,5,4078,127.0.0.1,0,4,,not_attack,0,,,,,,/opt/antinex/datasets/react-redux/netdata-2018-01-29-13-36-35.csv,8080,65495.0,,,1653831073.0,7.0,,65072,48408, 3 | ,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,1,127.0.0.1,,5,4078,127.0.0.1,0,4,,not_attack,0,,,,,,/opt/antinex/datasets/react-redux/netdata-2018-01-29-13-36-35.csv,8080,65495.0,,,1653831073.0,7.0,,65072,48408, 4 | ,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,2,127.0.0.1,,5,15546,127.0.0.1,0,4,,not_attack,0,,,,,,/opt/antinex/datasets/react-redux/netdata-2018-01-29-13-36-35.csv,48408,65495.0,,,1653831073.0,7.0,,65072,8080, 5 | ,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,3,127.0.0.1,,5,15546,127.0.0.1,0,4,,not_attack,0,,,,,,/opt/antinex/datasets/react-redux/netdata-2018-01-29-13-36-35.csv,48408,65495.0,,,1653831073.0,7.0,,65072,8080, 6 | ,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,4,127.0.0.1,,5,4085,127.0.0.1,0,4,,not_attack,0,,,,,,/opt/antinex/datasets/react-redux/netdata-2018-01-29-13-36-35.csv,8080,,,,1653831073.0,,,65064,48408, 7 | ,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,5,127.0.0.1,,5,4085,127.0.0.1,0,4,,not_attack,0,,,,,,/opt/antinex/datasets/react-redux/netdata-2018-01-29-13-36-35.csv,8080,,,,1653831073.0,,,65064,48408, 8 | ,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,6,127.0.0.1,,5,3939,127.0.0.1,0,4,,attack,1,,,,,474554202f20485454502f312e310d0a486f73743a203132372e302e302e313a383038300d0a436f6e6e656374696f6e3a206b6565702d616c6976650d0a4163636570742d456e636f64696e673a20677a69702c206465666c6174650d0a4163636570743a202a2f2a0d0a557365722d4167656e743a20707974686f6e2d72657175657374732f322e31332e300d0a0d0a,/opt/antinex/datasets/react-redux/netdata-2018-01-29-13-36-35.csv,8080,,,,1653831073.0,,,65209,48408, 9 | ,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,7,127.0.0.1,,5,3939,127.0.0.1,0,4,,not_attack,0,,,,,474554202f20485454502f312e310d0a486f73743a203132372e302e302e313a383038300d0a436f6e6e656374696f6e3a206b6565702d616c6976650d0a4163636570742d456e636f64696e673a20677a69702c206465666c6174650d0a4163636570743a202a2f2a0d0a557365722d4167656e743a20707974686f6e2d72657175657374732f322e31332e300d0a0d0a,/opt/antinex/datasets/react-redux/netdata-2018-01-29-13-36-35.csv,8080,,,,1653831073.0,,,65209,48408, 10 | ,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,8,127.0.0.1,,5,33405,127.0.0.1,0,4,,not_attack,0,,,,,,/opt/antinex/datasets/react-redux/netdata-2018-01-29-13-36-35.csv,48408,,,,1653831073.0,,,65064,8080, 11 | ,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,9,127.0.0.1,,5,33405,127.0.0.1,0,4,,attack,1,,,,,,/opt/antinex/datasets/react-redux/netdata-2018-01-29-13-36-35.csv,48408,,,,1653831073.0,,,65064,8080, 12 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/tests/datasets/fulldata_metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "created": "2018-02-05 08:57:44", 3 | "feature_to_predict": "label_name", 4 | "features_to_process": [ 5 | "arp_id", 6 | "dns_id", 7 | "eth_id", 8 | "eth_type", 9 | "icmp_id", 10 | "idx", 11 | "ip_id", 12 | "ip_ihl", 13 | "ip_len", 14 | "ip_tos", 15 | "ip_version", 16 | "ipvsix_id", 17 | "label_value", 18 | "pad_id", 19 | "tcp_dport", 20 | "tcp_fields_options.MSS", 21 | "tcp_fields_options.NOP", 22 | "tcp_fields_options.SAckOK", 23 | "tcp_fields_options.Timestamp", 24 | "tcp_fields_options.WScale", 25 | "tcp_id", 26 | "tcp_seq", 27 | "tcp_sport", 28 | "udp_id" 29 | ], 30 | "headers": [ 31 | "arp_id", 32 | "dns_id", 33 | "eth_dst", 34 | "eth_id", 35 | "eth_src", 36 | "eth_type", 37 | "icmp_id", 38 | "idx", 39 | "ip_dst", 40 | "ip_id", 41 | "ip_ihl", 42 | "ip_len", 43 | "ip_src", 44 | "ip_tos", 45 | "ip_version", 46 | "ipvsix_id", 47 | "label_name", 48 | "label_value", 49 | "pad_id", 50 | "pad_load", 51 | "raw_hex_field_load", 52 | "raw_id", 53 | "raw_load", 54 | "src_file", 55 | "tcp_dport", 56 | "tcp_fields_options.MSS", 57 | "tcp_fields_options.NOP", 58 | "tcp_fields_options.SAckOK", 59 | "tcp_fields_options.Timestamp", 60 | "tcp_fields_options.WScale", 61 | "tcp_id", 62 | "tcp_seq", 63 | "tcp_sport", 64 | "udp_id" 65 | ], 66 | "ignore_features": [ 67 | "label_name", 68 | "src_file", 69 | "raw_id", 70 | "raw_load", 71 | "raw_hex_field_load", 72 | "pad_load", 73 | "eth_dst", 74 | "eth_src", 75 | "ip_dst", 76 | "ip_src" 77 | ], 78 | "label_rules": { 79 | "label_values": [ 80 | 0, 81 | 1 82 | ], 83 | "labels": [ 84 | "not_attack", 85 | "attack" 86 | ], 87 | "set_if_above": 85 88 | }, 89 | "output_type": "fulldata", 90 | "pipeline_files": [ 91 | "/opt/antinex/datasets/react-redux/netdata-2018-01-29-13-36-35.csv", 92 | "/opt/antinex/datasets/spring/netdata-2018-01-29-15-00-12.csv", 93 | "/opt/antinex/datasets/vue/netdata-2018-01-29-14-12-44.csv", 94 | "/opt/antinex/datasets/django/netdata-2018-01-28-23-12-13.csv", 95 | "/opt/antinex/datasets/django/netdata-2018-01-28-23-06-05.csv", 96 | "/opt/antinex/datasets/flask-restplus/netdata-2018-01-29-11-30-02.csv" 97 | ], 98 | "post_proc_rules": { 99 | "drop_columns": [ 100 | "src_file", 101 | "raw_id", 102 | "raw_load", 103 | "raw_hex_load", 104 | "raw_hex_field_load", 105 | "pad_load", 106 | "eth_dst", 107 | "eth_src", 108 | "ip_dst", 109 | "ip_src" 110 | ], 111 | "predict_feature": "label_name" 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/tests/datasets/stock.csv: -------------------------------------------------------------------------------- 1 | close,high,low,open,volume 2 | 220.15,220.18,219.00,219.17,72402638 3 | 232.77,233.07,232.05,232.08,55182050 4 | 237.81,237.86,237.24,237.62,57256824 5 | 247.84,248.33,247.67,247.92,62006989 6 | 208.55,209.89,208.38,208.91,109122059 7 | 246.06,247.52,244.95,247.26,91398777 8 | 265.51,265.52,264.03,264.99,76563873 9 | 242.19,242.55,240.85,242.16,50354614 10 | 246.90,247.28,246.23,246.84,57916867 11 | 241.80,242.71,241.57,242.28,86820694 12 | 235.20,236.00,234.64,235.15,74412311 13 | 248.93,249.55,248.08,249.15,57064357 14 | 221.70,221.74,220.66,221.22,59877377 15 | 243.21,243.98,243.12,243.34,50375430 16 | 234.28,234.61,233.05,233.77,97569204 17 | 243.13,243.51,242.47,242.91,66986801 18 | 238.98,239.42,238.67,239.09,53912730 19 | 242.37,242.80,241.76,241.95,36663274 20 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/tests/ml_models/ml_weights_job_1_result_1.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jay-johnson/train-ai-with-django-swagger-jwt/3cdd798709b02047eadebccbe2fc9b855cfcac4f/webapp/drf_network_pipeline/tests/ml_models/ml_weights_job_1_result_1.h5 -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/tests/ml_models/prediction_manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "job_id": 1, 3 | "result_id": 1, 4 | "csv_file": "/tmp/cleaned_attack_scans.csv", 5 | "meta_file": "/tmp/cleaned_metadata.json", 6 | "model_weights_file": "./drf_network_pipeline/tests/ml_models/ml_weights_job_1_result_1.h5", 7 | "test_size": 0.2, 8 | "epochs": 5, 9 | "batch_size": 32, 10 | "seed": 42, 11 | "loss": "binary_crossentropy", 12 | "metrics": [ 13 | "accuracy" 14 | ], 15 | "optimizer": "adam", 16 | "training_data": {}, 17 | "histories": [ 18 | "val_loss", 19 | "val_acc", 20 | "loss", 21 | "acc" 22 | ], 23 | "verbose": 1, 24 | "predict_feature": "label_value", 25 | "features_to_process": [ 26 | "eth_type", 27 | "idx", 28 | "ip_ihl", 29 | "ip_len", 30 | "ip_tos", 31 | "ip_version", 32 | "tcp_dport", 33 | "tcp_fields_options.MSS", 34 | "tcp_fields_options.Timestamp", 35 | "tcp_fields_options.WScale", 36 | "tcp_seq", 37 | "tcp_sport" 38 | ], 39 | "ignore_features": [ 40 | "label_name", 41 | "ip_src", 42 | "ip_dst", 43 | "eth_src", 44 | "eth_dst", 45 | "src_file", 46 | "raw_id", 47 | "raw_load", 48 | "raw_hex_load", 49 | "raw_hex_field_load", 50 | "pad_load" 51 | ], 52 | "label_rules": { 53 | "label_values": [ 54 | 0, 55 | 1 56 | ], 57 | "labels": [ 58 | "not_attack", 59 | "attack" 60 | ], 61 | "set_if_above": 85 62 | }, 63 | "post_proc_rules": { 64 | "drop_columns": [ 65 | "src_file", 66 | "raw_id", 67 | "raw_load", 68 | "raw_hex_load", 69 | "raw_hex_field_load", 70 | "pad_load", 71 | "eth_dst", 72 | "eth_src", 73 | "ip_dst", 74 | "ip_src" 75 | ], 76 | "predict_feature": "label_name" 77 | }, 78 | "version": 1 79 | } 80 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/tests/prepare/django/netdata-2018-01-28-23-06-05.csv: -------------------------------------------------------------------------------- 1 | idx,arp_id,dns_id,eth_dst,eth_id,eth_src,eth_type,icmp_id,ip_dst,ip_id,ip_ihl,ip_len,ip_src,ip_tos,ip_version,ipvsix_id,pad_id,pad_load,raw_id,raw_load,tcp_dport,tcp_fields_options.MSS,tcp_fields_options.NOP,tcp_fields_options.SAckOK,tcp_fields_options.Timestamp,tcp_fields_options.WScale,tcp_id,tcp_seq,tcp_sport,udp_id 2 | 0,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,4050,127.0.0.1,0,4,,,,,,54050,,,,3269735087.0,,,65064,8080, 3 | 1,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,4050,127.0.0.1,0,4,,,,,,54050,,,,3269735087.0,,,65064,8080, 4 | 2,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,4032,127.0.0.1,0,4,,,,,485454502f312e3120323030204f4b0d0a,54050,,,,3269735105.0,,,65081,8080, 5 | 3,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,4032,127.0.0.1,0,4,,,,,485454502f312e3120323030204f4b0d0a,54050,,,,3269735105.0,,,65081,8080, 6 | 4,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,22796,127.0.0.1,0,4,,,,,,8080,,,,3269735105.0,,,65064,54050, 7 | 5,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,22796,127.0.0.1,0,4,,,,,,8080,,,,3269735105.0,,,65064,54050, 8 | 6,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,3973,127.0.0.1,0,4,,,,,446174653a204d6f6e2c203239204a616e20323031382030363a35363a343920474d540d0a5365727665723a20575347495365727665722f302e322043507974686f6e2f332e362e330d0a,54050,,,,3269735105.0,,,65139,8080, 9 | 7,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,3973,127.0.0.1,0,4,,,,,446174653a204d6f6e2c203239204a616e20323031382030363a35363a343920474d540d0a5365727665723a20575347495365727665722f302e322043507974686f6e2f332e362e330d0a,54050,,,,3269735105.0,,,65139,8080, 10 | 8,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,22795,127.0.0.1,0,4,,,,,,8080,,,,3269735105.0,,,65064,54050, 11 | 9,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,22795,127.0.0.1,0,4,,,,,,8080,,,,3269735105.0,,,65064,54050, 12 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/tests/prepare/flask-restplus/netdata-2018-01-29-11-30-02.csv: -------------------------------------------------------------------------------- 1 | idx,arp_id,dns_id,eth_dst,eth_id,eth_src,eth_type,icmp_id,ip_dst,ip_id,ip_ihl,ip_len,ip_src,ip_tos,ip_version,ipvsix_id,pad_id,raw_id,raw_load,tcp_dport,tcp_fields_options.MSS,tcp_fields_options.NOP,tcp_fields_options.SAckOK,tcp_fields_options.Timestamp,tcp_fields_options.WScale,tcp_id,tcp_seq,tcp_sport,udp_id 2 | 0,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,11347,127.0.0.1,0,4,,,,,8080,65495.0,,,3540511654,7.0,,65072,36704, 3 | 1,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,11347,127.0.0.1,0,4,,,,,8080,65495.0,,,3540511654,7.0,,65072,36704, 4 | 2,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,15546,127.0.0.1,0,4,,,,,36704,65495.0,,,3540511654,7.0,,65072,8080, 5 | 3,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,15546,127.0.0.1,0,4,,,,,36704,65495.0,,,3540511654,7.0,,65072,8080, 6 | 4,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,11354,127.0.0.1,0,4,,,,,8080,,,,3540511654,,,65064,36704, 7 | 5,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,11354,127.0.0.1,0,4,,,,,8080,,,,3540511654,,,65064,36704, 8 | 6,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,11208,127.0.0.1,0,4,,,,474554202f20485454502f312e310d0a486f73743a203132372e302e302e313a383038300d0a436f6e6e656374696f6e3a206b6565702d616c6976650d0a4163636570742d456e636f64696e673a20677a69702c206465666c6174650d0a4163636570743a202a2f2a0d0a557365722d4167656e743a20707974686f6e2d72657175657374732f322e31332e300d0a0d0a,8080,,,,3540511654,,,65209,36704, 9 | 7,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,11208,127.0.0.1,0,4,,,,474554202f20485454502f312e310d0a486f73743a203132372e302e302e313a383038300d0a436f6e6e656374696f6e3a206b6565702d616c6976650d0a4163636570742d456e636f64696e673a20677a69702c206465666c6174650d0a4163636570743a202a2f2a0d0a557365722d4167656e743a20707974686f6e2d72657175657374732f322e31332e300d0a0d0a,8080,,,,3540511654,,,65209,36704, 10 | 8,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,39326,127.0.0.1,0,4,,,,,36704,,,,3540511654,,,65064,8080, 11 | 9,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,39326,127.0.0.1,0,4,,,,,36704,,,,3540511654,,,65064,8080, 12 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/tests/prepare/react-redux/netdata-2018-01-29-13-36-35.csv: -------------------------------------------------------------------------------- 1 | idx,arp_id,dns_id,eth_dst,eth_id,eth_src,eth_type,icmp_id,ip_dst,ip_id,ip_ihl,ip_len,ip_src,ip_tos,ip_version,ipvsix_id,pad_id,raw_hex_field_load,raw_id,raw_load,tcp_dport,tcp_fields_options.MSS,tcp_fields_options.NOP,tcp_fields_options.SAckOK,tcp_fields_options.Timestamp,tcp_fields_options.WScale,tcp_id,tcp_seq,tcp_sport,udp_id 2 | 0,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,4078,127.0.0.1,0,4,,,,,,8080,65495.0,,,1653831073,7.0,,65072,48408, 3 | 1,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,4078,127.0.0.1,0,4,,,,,,8080,65495.0,,,1653831073,7.0,,65072,48408, 4 | 2,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,15546,127.0.0.1,0,4,,,,,,48408,65495.0,,,1653831073,7.0,,65072,8080, 5 | 3,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,15546,127.0.0.1,0,4,,,,,,48408,65495.0,,,1653831073,7.0,,65072,8080, 6 | 4,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,4085,127.0.0.1,0,4,,,,,,8080,,,,1653831073,,,65064,48408, 7 | 5,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,4085,127.0.0.1,0,4,,,,,,8080,,,,1653831073,,,65064,48408, 8 | 6,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,3939,127.0.0.1,0,4,,,,,474554202f20485454502f312e310d0a486f73743a203132372e302e302e313a383038300d0a436f6e6e656374696f6e3a206b6565702d616c6976650d0a4163636570742d456e636f64696e673a20677a69702c206465666c6174650d0a4163636570743a202a2f2a0d0a557365722d4167656e743a20707974686f6e2d72657175657374732f322e31332e300d0a0d0a,8080,,,,1653831073,,,65209,48408, 9 | 7,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,3939,127.0.0.1,0,4,,,,,474554202f20485454502f312e310d0a486f73743a203132372e302e302e313a383038300d0a436f6e6e656374696f6e3a206b6565702d616c6976650d0a4163636570742d456e636f64696e673a20677a69702c206465666c6174650d0a4163636570743a202a2f2a0d0a557365722d4167656e743a20707974686f6e2d72657175657374732f322e31332e300d0a0d0a,8080,,,,1653831073,,,65209,48408, 10 | 8,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,33405,127.0.0.1,0,4,,,,,,48408,,,,1653831073,,,65064,8080, 11 | 9,,,00:00:00:00:00:00,,00:00:00:00:00:00,2048,,127.0.0.1,,5,33405,127.0.0.1,0,4,,,,,,48408,,,,1653831073,,,65064,8080, 12 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/tests/pubsub/get-user.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "user_id": 1 4 | }, 5 | "celery_enabled": true, 6 | "cache_key": "test_task_get_user" 7 | } 8 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/urls.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | from django.contrib import admin 3 | from django.urls import include 4 | from django.urls import path 5 | from django.urls import re_path 6 | from django.views.static import serve 7 | from rest_framework import routers 8 | from rest_framework_swagger.views import get_swagger_view 9 | from rest_framework_jwt.views import obtain_jwt_token 10 | from drf_network_pipeline.api.user import UserViewSet 11 | 12 | import drf_network_pipeline.api.ml as ml_api 13 | import drf_network_pipeline.index 14 | 15 | 16 | schema_view = get_swagger_view(title="DRF Swagger with JWT") 17 | 18 | 19 | # Routers provide an easy way of automatically determining the URL conf. 20 | router = routers.DefaultRouter() 21 | router.register(r"users", UserViewSet) 22 | router.register(r"mlprepare", ml_api.MLPrepareViewSet) 23 | router.register(r"ml", ml_api.MLJobViewSet) 24 | router.register(r"mlresults", ml_api.MLJobResultViewSet) 25 | 26 | 27 | urlpatterns = [ 28 | path("admin/", 29 | admin.site.urls, 30 | name="admin"), 31 | path("api-auth/", 32 | include("rest_framework.urls")), 33 | path("api-token-auth/", 34 | obtain_jwt_token), 35 | path("swagger/", 36 | schema_view), 37 | path("", 38 | include(router.urls)), 39 | path("accounts/", 40 | include("rest_registration.api.urls"), 41 | name="account-create"), 42 | ] 43 | 44 | if settings.DEBUG: 45 | import debug_toolbar # noqa 46 | urlpatterns = [ 47 | path("__debug__/", 48 | include(debug_toolbar.urls)), 49 | ] + urlpatterns 50 | 51 | if settings.INCLUDE_DOCS: 52 | # could also make the user login required: 53 | # noqa https://stackoverflow.com/questions/20386445/integrating-sphinx-and-django-in-order-to-require-users-to-log-in-to-see-the-doc 54 | urlpatterns = [ 55 | path( 56 | 'docs/', 57 | drf_network_pipeline.index.handle_sphinx_doc_index, 58 | name="sphinx_doc_index"), 59 | re_path( 60 | r'^docs/(?P.*)', 61 | serve, 62 | { 63 | 'document_root': settings.DOCS_ROOT 64 | }, 65 | name="sphinx_all_docs"), 66 | ] + urlpatterns 67 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/users/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jay-johnson/train-ai-with-django-swagger-jwt/3cdd798709b02047eadebccbe2fc9b855cfcac4f/webapp/drf_network_pipeline/users/__init__.py -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/users/admin.py: -------------------------------------------------------------------------------- 1 | from django.contrib import admin 2 | from django.contrib.auth.admin import UserAdmin 3 | from .models import User 4 | 5 | admin.site.register(User, UserAdmin) 6 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/users/db_lookup_user.py: -------------------------------------------------------------------------------- 1 | from spylunking.log.setup_logging import build_colorized_logger 2 | from django.contrib.auth import get_user_model 3 | from django.contrib.auth.models import User 4 | from django.db.models import Q 5 | from drf_network_pipeline.pipeline.consts import SUCCESS 6 | from drf_network_pipeline.pipeline.consts import FAILED 7 | from drf_network_pipeline.pipeline.consts import NOTRUN 8 | 9 | 10 | User = get_user_model() # noqa 11 | 12 | name = 'user_tasks' 13 | log = build_colorized_logger( 14 | name=name) 15 | 16 | 17 | def db_lookup_user( 18 | user_id=None, 19 | use_cache=False): 20 | """db_lookup_user 21 | 22 | :param user_id: user id 23 | :param use_cache: cache data 24 | """ 25 | 26 | res = { 27 | "status": NOTRUN, 28 | "err": "not-run", 29 | "data": None, 30 | "user_obj": None, 31 | "profile_obj": None 32 | } 33 | 34 | if not user_id: 35 | log.error(("db_lookup_user user_id={}") 36 | .format( 37 | user_id)) 38 | return res 39 | # check user_id is there 40 | 41 | db_query = (Q(id=user_id)) 42 | log.info(("finding user={} cache={}") 43 | .format( 44 | user_id, 45 | use_cache)) 46 | qset = None 47 | 48 | if use_cache: 49 | qset = User.objects.select_related().filter( 50 | db_query).cache() 51 | else: 52 | qset = User.objects.select_related().filter( 53 | db_query) 54 | # end of if caching all records or not 55 | 56 | if len(qset) == 0: 57 | res["err"] = ("failed to find user={}").format( 58 | user_id) 59 | log.error(res["err"]) 60 | res["status"] = FAILED 61 | res["data"] = None 62 | res["obj"] = None 63 | else: 64 | obj = qset[0] 65 | log.info(("found user.id={} name={}") 66 | .format( 67 | obj.id, 68 | obj.username)) 69 | # remember to only send json-serializables back 70 | res["data"] = { 71 | "id": obj.id, 72 | "username": obj.username, 73 | "email": obj.email 74 | } 75 | res["user_obj"] = obj 76 | res["profile_obj"] = None 77 | res["status"] = SUCCESS 78 | # if the query worked 79 | 80 | return res 81 | # end of db_lookup_user 82 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/users/models.py: -------------------------------------------------------------------------------- 1 | from django.contrib.auth.models import AbstractUser 2 | 3 | 4 | class User(AbstractUser): 5 | pass 6 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/users/tasks.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, unicode_literals 2 | from celery import shared_task 3 | from spylunking.log.setup_logging import build_colorized_logger 4 | from django.conf import settings 5 | from drf_network_pipeline.pipeline.consts import SUCCESS 6 | from drf_network_pipeline.pipeline.consts import FAILED 7 | from drf_network_pipeline.pipeline.consts import ERR 8 | from drf_network_pipeline.job_utils.build_task_response import \ 9 | build_task_response 10 | from drf_network_pipeline.users.db_lookup_user import \ 11 | db_lookup_user 12 | 13 | 14 | log = build_colorized_logger( 15 | name='users.tasks') 16 | 17 | 18 | # allow tasks to be sent straight to the worker 19 | @shared_task( 20 | name=("drf_network_pipeline.users.tasks." 21 | "task_get_user"), 22 | queue=("drf_network_pipeline.users.tasks." 23 | "task_get_user"), 24 | bind=True) 25 | def task_get_user( 26 | self=None, 27 | req_node=None): 28 | """task_get_user 29 | 30 | :param self: parent task object for bind=True 31 | :param req_node: dictionary for lookup values 32 | """ 33 | 34 | label = "task_get_user" 35 | 36 | log.info(("task - {} - start " 37 | "req_node={}") 38 | .format( 39 | label, 40 | req_node)) 41 | 42 | req_data = req_node.get("data", {}) 43 | use_cache = req_node.get("use_cache", settings.CACHEOPS_ENABLED) 44 | 45 | # create the response node from request 46 | res = build_task_response( 47 | use_cache=use_cache, 48 | celery_enabled=req_node["celery_enabled"], 49 | cache_key=req_node["cache_key"]) 50 | 51 | user_id = req_data.get("user_id", None) 52 | if user_id: 53 | full_user_res = db_lookup_user( 54 | user_id=user_id, 55 | use_cache=use_cache) 56 | if full_user_res["status"] == SUCCESS: 57 | res["status"] = SUCCESS 58 | res["err"] = "" 59 | res["data"] = full_user_res.get("data", None) 60 | else: 61 | res["err"] = ("did not find user_id={}").format( 62 | user_id) 63 | log.info(res["err"]) 64 | res["status"] = FAILED 65 | res["data"] = None 66 | # end of looking up user from db 67 | else: 68 | res["err"] = ("no user_id in data={}").format( 69 | req_data) 70 | log.info(res["err"]) 71 | res["status"] = ERR 72 | res["data"] = None 73 | # end of if user_id found 74 | 75 | log.info(("task - {} result={} - done") 76 | .format( 77 | label, 78 | res)) 79 | 80 | return res 81 | # end of task_get_user 82 | -------------------------------------------------------------------------------- /webapp/drf_network_pipeline/wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | WSGI config for drf_network_pipeline project. 3 | It exposes the WSGI callable as a module-level variable named ``application``. 4 | For more information on this file, see 5 | https://docs.djangoproject.com/en/{{ docs_version }}/howto/deployment/wsgi/ 6 | """ 7 | import os 8 | 9 | 10 | configuration = os.getenv( 11 | "ENVIRONMENT", 12 | "development").title() 13 | os.environ.setdefault( 14 | "DJANGO_SETTINGS_MODULE", 15 | "drf_network_pipeline.settings") 16 | os.environ.setdefault( 17 | "DJANGO_CONFIGURATION", 18 | configuration) 19 | 20 | from configurations.wsgi import get_wsgi_application # noqa 21 | 22 | application = get_wsgi_application() 23 | -------------------------------------------------------------------------------- /webapp/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | 5 | 6 | if __name__ == "__main__": 7 | configuration = os.getenv( 8 | "ENVIRONMENT", 9 | "development").title() 10 | os.environ.setdefault( 11 | "DJANGO_SETTINGS_MODULE", 12 | "drf_network_pipeline.settings") 13 | os.environ.setdefault( 14 | "DJANGO_CONFIGURATION", 15 | configuration) 16 | 17 | try: 18 | from configurations.management import execute_from_command_line 19 | except ImportError as exc: 20 | raise ImportError( 21 | "Couldn\'t import Django. Are you sure it\'s installed and " 22 | "available on your PYTHONPATH environment variable? Did you " 23 | "forget to activate a virtual environment? ex={}".format( 24 | exc)) 25 | execute_from_command_line(sys.argv) 26 | -------------------------------------------------------------------------------- /webapp/send-worker-get-user.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import json 5 | from celery import Celery 6 | from django.conf import settings 7 | from spylunking.log.setup_logging import build_colorized_logger 8 | from antinex_utils.utils import ppj 9 | 10 | 11 | name = 'send-worker-get-user' 12 | log = build_colorized_logger( 13 | name=name) 14 | 15 | os.environ.setdefault( 16 | "DJANGO_SETTINGS_MODULE", 17 | "drf_network_pipeline.settings") 18 | 19 | log.info("creating celery app") 20 | app = Celery("test-app") 21 | 22 | app.config_from_object( 23 | "django.conf:settings", 24 | namespace="CELERY") 25 | 26 | app.autodiscover_tasks( 27 | lambda: settings.INSTALLED_APPS) 28 | 29 | datafile = "./drf_network_pipeline/tests/pubsub/get-user.json" 30 | data = {} 31 | with open(datafile, "r") as f: 32 | data = json.loads(f.read()) 33 | 34 | # Celery task routing and queue 35 | parent_route = "drf_network_pipeline.users.tasks" 36 | task_name = ("{}.task_get_user").format( 37 | parent_route) 38 | queue_name = ("{}.task_get_user").format( 39 | parent_route) 40 | 41 | log.info(("sending args={} to broker={} task={}") 42 | .format( 43 | ppj(data), 44 | app.conf["BROKER_URL"], 45 | task_name)) 46 | 47 | task_res = app.send_task( 48 | task_name, 49 | args=[data], 50 | queue=queue_name) 51 | 52 | log.info(("task={} task.id={} result={}") 53 | .format( 54 | task_name, 55 | task_res.id, 56 | ppj(task_res.get()))) 57 | -------------------------------------------------------------------------------- /webapp/send-worker-publish-to-core.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import json 5 | from celery import Celery 6 | from django.conf import settings 7 | from spylunking.log.setup_logging import build_colorized_logger 8 | from antinex_utils.utils import ppj 9 | 10 | 11 | name = 'send-worker-publish-to-core' 12 | log = build_colorized_logger( 13 | name=name) 14 | 15 | os.environ.setdefault( 16 | "DJANGO_SETTINGS_MODULE", 17 | "drf_network_pipeline.settings") 18 | 19 | log.info("creating celery app") 20 | app = Celery("test-app") 21 | 22 | app.config_from_object( 23 | "django.conf:settings", 24 | namespace="CELERY") 25 | 26 | app.autodiscover_tasks( 27 | lambda: settings.INSTALLED_APPS) 28 | 29 | datafile = "./drf_network_pipeline/tests/pubsub/publish-to-core.json" 30 | data = {} 31 | with open(datafile, "r") as f: 32 | data = json.loads(f.read()) 33 | 34 | # Celery task routing and queue 35 | parent_route = "drf_network_pipeline.pipeline.tasks" 36 | task_name = ("{}.task_publish_to_core").format( 37 | parent_route) 38 | queue_name = ("{}.task_publish_to_core").format( 39 | parent_route) 40 | 41 | log.info(("sending args={} to broker={} task={}") 42 | .format( 43 | ppj(data), 44 | app.conf["BROKER_URL"], 45 | task_name)) 46 | 47 | app.send_task( 48 | task_name, 49 | args=[data], 50 | queue=queue_name) 51 | -------------------------------------------------------------------------------- /webapp/staticfiles/.gitignore: -------------------------------------------------------------------------------- 1 | *.log 2 | *.tff 3 | *.tiff 4 | *.js 5 | *.json 6 | *.png 7 | *.jpg 8 | *.jpeg 9 | *.bmp 10 | *.gif 11 | *.gifv 12 | *.ipynb 13 | *.pdf 14 | *.css 15 | *.less 16 | *.woff 17 | *.eot 18 | admin/* 19 | rest_framework/* 20 | rest_framework_swagger/* 21 | debug_toolbar/* 22 | django_extensions/* 23 | --------------------------------------------------------------------------------