├── ml
├── ml
│ ├── __init__.py
│ ├── models
│ │ └── __init__.py
│ ├── feature_selection
│ │ ├── __init__.py
│ │ ├── wrapper.py
│ │ ├── filter.py
│ │ ├── lgb.py
│ │ └── xgb.py
│ └── utils
│ │ ├── __init__.py
│ │ ├── input_data_report.py
│ │ ├── feature_exploration.py
│ │ └── metric.py
├── results
│ └── Strat-b85b15d0-b674-11e8-bedc-0242ac120006
│ │ ├── errors.txt
│ │ ├── summary_plot.png
│ │ ├── feature_exploration
│ │ ├── LIGHTGBM_0_analyze_gain_features.png
│ │ ├── LIGHTGBM_0_analyze_shap_features.png
│ │ ├── XGBOOST_0_analyze_cover_features.png
│ │ ├── XGBOOST_0_analyze_gain_features.png
│ │ ├── XGBOOST_0_analyze_shap_features.png
│ │ ├── XGBOOST_50_analyze_gain_features.png
│ │ ├── XGBOOST_50_analyze_shap_features.png
│ │ ├── LIGHTGBM_0_analyze_split_features.png
│ │ ├── LIGHTGBM_100_analyze_gain_features.png
│ │ ├── LIGHTGBM_100_analyze_shap_features.png
│ │ ├── LIGHTGBM_100_analyze_split_features.png
│ │ ├── LIGHTGBM_150_analyze_gain_features.png
│ │ ├── LIGHTGBM_150_analyze_shap_features.png
│ │ ├── LIGHTGBM_150_analyze_split_features.png
│ │ ├── LIGHTGBM_50_analyze_gain_features.png
│ │ ├── LIGHTGBM_50_analyze_shap_features.png
│ │ ├── LIGHTGBM_50_analyze_split_features.png
│ │ ├── XGBOOST_0_analyze_weight_features.png
│ │ ├── XGBOOST_100_analyze_cover_features.png
│ │ ├── XGBOOST_100_analyze_gain_features.png
│ │ ├── XGBOOST_100_analyze_shap_features.png
│ │ ├── XGBOOST_100_analyze_weight_features.png
│ │ ├── XGBOOST_150_analyze_cover_features.png
│ │ ├── XGBOOST_150_analyze_gain_features.png
│ │ ├── XGBOOST_150_analyze_shap_features.png
│ │ ├── XGBOOST_150_analyze_weight_features.png
│ │ ├── XGBOOST_50_analyze_cover_features.png
│ │ └── XGBOOST_50_analyze_weight_features.png
│ │ ├── metrics
│ │ ├── xgboost_confussion_matrix.txt
│ │ └── lightgbm_confussion_matrix.txt
│ │ └── backtest_summary.csv
├── Pipfile
├── cloudbuild.yaml
├── test.py
├── ml.yaml
├── requirements.txt
├── Dockerfile
└── README.md
├── app
├── app
│ ├── __init__.py
│ ├── utils
│ │ ├── __init__.py
│ │ ├── form_utils.py
│ │ └── build.py
│ ├── api
│ │ ├── __init__.py
│ │ └── views.py
│ ├── bot
│ │ ├── __init__.py
│ │ ├── dev-agent.zip
│ │ └── response.py
│ ├── forms
│ │ └── __init__.py
│ ├── web
│ │ ├── account
│ │ │ └── __init__.py
│ │ ├── public
│ │ │ ├── __init__.py
│ │ │ └── views.py
│ │ ├── strategy
│ │ │ └── __init__.py
│ │ └── __init__.py
│ ├── models
│ │ ├── migrations
│ │ │ ├── README
│ │ │ ├── script.py.mako
│ │ │ ├── versions
│ │ │ │ ├── 6c6917015a3c_.py
│ │ │ │ ├── 9d05939446cd_.py
│ │ │ │ ├── 1d39fb6ab24a_.py
│ │ │ │ ├── d6722da119b0_.py
│ │ │ │ ├── b060ded0ae81_.py
│ │ │ │ ├── f3458b927eba_.py
│ │ │ │ ├── 4814369ad5cc_basic_auth_model.py
│ │ │ │ ├── 2d71155a98ec_.py
│ │ │ │ ├── c5ca0f546394_.py
│ │ │ │ ├── 4d7d3d759e88_.py
│ │ │ │ ├── 5e29ad8a0e89_.py
│ │ │ │ ├── e3202524923f_.py
│ │ │ │ └── 7377a3591690_.py
│ │ │ ├── alembic.ini
│ │ │ └── env.py
│ │ └── __init__.py
│ ├── static
│ │ ├── kryptos-banner.jpg
│ │ └── images
│ │ │ ├── kryptos-icon.jpg
│ │ │ ├── WIP7BT6N_400x400.jpg
│ │ │ └── kryptos-background.jpg
│ ├── templates
│ │ ├── account
│ │ │ ├── strategies.html
│ │ │ ├── dashboard.html
│ │ │ ├── strategy_list.html
│ │ │ ├── telegram_auth.html
│ │ │ ├── user_exchanges.html
│ │ │ └── strategy_status.html
│ │ ├── flask_user
│ │ │ ├── emails
│ │ │ │ ├── base_message.txt
│ │ │ │ └── base_message.html
│ │ │ └── _common_base.html
│ │ ├── navbar.html
│ │ ├── _macros.html
│ │ ├── public
│ │ │ └── landing.html
│ │ ├── strategy
│ │ │ ├── signals.html
│ │ │ └── trading.html
│ │ └── base.html
│ ├── extensions.py
│ └── app.py
├── Procfile
├── autoapp.py
├── .dockerignore
├── Dockerfile
├── cloudbuild.yaml
├── Pipfile
├── app.yaml
├── updater.py
└── requirements.txt
├── core
├── kryptos
│ ├── analysis
│ │ ├── __init__.py
│ │ └── utils
│ │ │ └── __init__.py
│ ├── data
│ │ ├── __init__.py
│ │ ├── clients
│ │ │ ├── __init__.py
│ │ │ └── quandl_client.py
│ │ └── csv_data
│ │ │ ├── __init__.py
│ │ │ └── quandl
│ │ │ └── BCHAIN-datasets-codes.csv
│ ├── scripts
│ │ ├── __init__.py
│ │ ├── docker_shell.py
│ │ ├── strat.py
│ │ ├── trends.py
│ │ ├── stream_logs.py
│ │ ├── kill_strat.py
│ │ ├── bchain_activity.py
│ │ └── run_all_ta.py
│ ├── strategies
│ │ ├── __init__.py
│ │ ├── mean_reversion_simple.json
│ │ ├── dual_moving_average.json
│ │ ├── bbands_psar.json
│ │ ├── dynamic_rebalance.py
│ │ ├── buy_and_hodl.py
│ │ ├── bear_market.py
│ │ ├── bbands_psar.py
│ │ ├── mean_reversion_simple.py
│ │ └── buy_low_sell_high.py
│ ├── strategy
│ │ ├── signals
│ │ │ ├── __init_.py
│ │ │ └── utils.py
│ │ ├── __init__.py
│ │ ├── config.json
│ │ └── indicators
│ │ │ ├── basic.py
│ │ │ ├── ml.py
│ │ │ └── __init__.py
│ ├── __init__.py
│ ├── worker
│ │ ├── __init__.py
│ │ └── jobs.py
│ ├── utils
│ │ ├── __init__.py
│ │ ├── tasks.py
│ │ ├── load.py
│ │ └── auth.py
│ ├── logger.py
│ └── settings.py
├── Procfile
├── .dockerignore
├── cloudbuild.yaml
├── Pipfile
├── worker.yaml
├── monitor.py
├── Dockerfile
├── setup.py
├── requirements.txt
└── ingester.py
├── examples
├── paper_trade_example.json
├── obv.json
├── multiple_ta.json
├── sma_crossover.py
├── macdfix.py
├── rsi_test.json
├── sma_crossover.json
├── mystrategy.json
├── api_example.json
├── api_example.py
├── signals2.json
└── signals.json
├── LICENSE
├── .gitignore
├── ROADMAP.md
├── deployment.md
├── docker-compose.yaml
└── README.md
/ml/ml/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/app/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/app/app/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/ml/ml/models/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/core/kryptos/analysis/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/core/kryptos/data/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/core/kryptos/scripts/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/core/kryptos/data/clients/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/core/kryptos/data/csv_data/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/core/kryptos/strategies/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/ml/ml/feature_selection/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/core/kryptos/analysis/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/core/kryptos/strategy/signals/__init_.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/app/app/api/__init__.py:
--------------------------------------------------------------------------------
1 | from . import views
2 |
--------------------------------------------------------------------------------
/app/app/bot/__init__.py:
--------------------------------------------------------------------------------
1 | from . import assistant
2 |
--------------------------------------------------------------------------------
/app/app/forms/__init__.py:
--------------------------------------------------------------------------------
1 | from . import forms
2 |
--------------------------------------------------------------------------------
/app/app/web/account/__init__.py:
--------------------------------------------------------------------------------
1 | from . import views
2 |
--------------------------------------------------------------------------------
/app/app/web/public/__init__.py:
--------------------------------------------------------------------------------
1 | from . import views
2 |
--------------------------------------------------------------------------------
/app/app/web/strategy/__init__.py:
--------------------------------------------------------------------------------
1 | from . import views
2 |
--------------------------------------------------------------------------------
/app/app/models/migrations/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/app/app/models/__init__.py:
--------------------------------------------------------------------------------
1 | from .user import User, StrategyModel, UserExchangeAuth
2 |
--------------------------------------------------------------------------------
/core/kryptos/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | from .logger import logger_group, setup_logging, add_logger
3 |
--------------------------------------------------------------------------------
/core/kryptos/worker/__init__.py:
--------------------------------------------------------------------------------
1 | from . import jobs, manager, worker
2 | from .jobs import run_strat
--------------------------------------------------------------------------------
/app/app/bot/dev-agent.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/app/app/bot/dev-agent.zip
--------------------------------------------------------------------------------
/app/app/static/kryptos-banner.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/app/app/static/kryptos-banner.jpg
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/errors.txt:
--------------------------------------------------------------------------------
1 | ---------------
2 | 2018-09-12 17:10:24.428105
3 |
4 |
--------------------------------------------------------------------------------
/app/Procfile:
--------------------------------------------------------------------------------
1 | app: gunicorn -b :8080 autoapp:app
2 | dev: flask run --host=0.0.0.0 --port=8080
3 | updater: python updater.py
4 |
--------------------------------------------------------------------------------
/app/app/static/images/kryptos-icon.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/app/app/static/images/kryptos-icon.jpg
--------------------------------------------------------------------------------
/app/app/static/images/WIP7BT6N_400x400.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/app/app/static/images/WIP7BT6N_400x400.jpg
--------------------------------------------------------------------------------
/app/app/static/images/kryptos-background.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/app/app/static/images/kryptos-background.jpg
--------------------------------------------------------------------------------
/app/autoapp.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """Create an application instance."""
3 |
4 | from app.app import create_app
5 |
6 | app = create_app()
7 |
--------------------------------------------------------------------------------
/core/kryptos/utils/__init__.py:
--------------------------------------------------------------------------------
1 | from google.cloud import storage
2 |
3 | storage_client = storage.Client()
4 |
5 | from . import auth, load, outputs, tasks, viz
6 |
--------------------------------------------------------------------------------
/app/app/templates/account/strategies.html:
--------------------------------------------------------------------------------
1 | {% extends 'base.html' %}
2 |
3 | {% block content %}
4 | {% include "account/strategy_list.html" %}
5 | {% endblock %}
6 |
--------------------------------------------------------------------------------
/app/app/templates/flask_user/emails/base_message.txt:
--------------------------------------------------------------------------------
1 | Dear User,
2 |
3 | {% block message %}
4 | {% endblock %}
5 |
6 | Sincerely,
7 | {{ app_name }} by Produvia Team
8 |
--------------------------------------------------------------------------------
/app/app/web/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """The web module: a standard flask blueprint creating a landing page for the assistant"""
3 | from . import account, strategy, public
4 |
--------------------------------------------------------------------------------
/core/Procfile:
--------------------------------------------------------------------------------
1 | worker: python -u kryptos/worker/manager.py
2 | persist_worker: rqworker -c kryptos.settings
3 | ingest: python ingester.py
4 | monitor: gunicorn monitor:app --bind 0.0.0:8080
5 |
--------------------------------------------------------------------------------
/core/kryptos/scripts/docker_shell.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 |
3 | import click
4 |
5 | @click.command()
6 | def run():
7 | subprocess.call(["docker", "exec", "-i", "-t", "web", "/bin/bash",])
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/summary_plot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/summary_plot.png
--------------------------------------------------------------------------------
/app/app/templates/flask_user/emails/base_message.html:
--------------------------------------------------------------------------------
1 |
Dear {{ user.email }},
2 |
3 | {% block message %}
4 | {% endblock %}
5 |
6 | Sincerely,
7 | {{ app_name }} by Produvia Team
8 |
9 |
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_0_analyze_gain_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_0_analyze_gain_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_0_analyze_shap_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_0_analyze_shap_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_0_analyze_cover_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_0_analyze_cover_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_0_analyze_gain_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_0_analyze_gain_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_0_analyze_shap_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_0_analyze_shap_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_50_analyze_gain_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_50_analyze_gain_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_50_analyze_shap_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_50_analyze_shap_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_0_analyze_split_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_0_analyze_split_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_100_analyze_gain_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_100_analyze_gain_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_100_analyze_shap_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_100_analyze_shap_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_100_analyze_split_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_100_analyze_split_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_150_analyze_gain_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_150_analyze_gain_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_150_analyze_shap_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_150_analyze_shap_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_150_analyze_split_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_150_analyze_split_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_50_analyze_gain_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_50_analyze_gain_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_50_analyze_shap_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_50_analyze_shap_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_50_analyze_split_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/LIGHTGBM_50_analyze_split_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_0_analyze_weight_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_0_analyze_weight_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_100_analyze_cover_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_100_analyze_cover_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_100_analyze_gain_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_100_analyze_gain_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_100_analyze_shap_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_100_analyze_shap_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_100_analyze_weight_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_100_analyze_weight_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_150_analyze_cover_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_150_analyze_cover_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_150_analyze_gain_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_150_analyze_gain_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_150_analyze_shap_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_150_analyze_shap_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_150_analyze_weight_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_150_analyze_weight_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_50_analyze_cover_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_50_analyze_cover_features.png
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_50_analyze_weight_features.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/produvia/kryptos/HEAD/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/feature_exploration/XGBOOST_50_analyze_weight_features.png
--------------------------------------------------------------------------------
/app/app/templates/flask_user/_common_base.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block main %}
4 |
5 |
6 |
7 | {% block content %}{% endblock %}
8 |
9 |
10 |
11 | {% endblock %}
12 |
--------------------------------------------------------------------------------
/core/kryptos/scripts/strat.py:
--------------------------------------------------------------------------------
1 | import click
2 | from kryptos.scripts import build_strategy, stress_worker, kill_strat
3 |
4 |
5 | @click.group(name="strat")
6 | def cli():
7 | pass
8 |
9 |
10 | cli.add_command(build_strategy.run, "build")
11 | cli.add_command(stress_worker.run, "stress")
12 | cli.add_command(kill_strat.run, "kill")
13 |
--------------------------------------------------------------------------------
/core/kryptos/strategy/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 |
4 | CURRENT_DIR = os.path.dirname(__file__)
5 | DEFAULT_CONFIG_FILE = os.path.abspath(os.path.join(CURRENT_DIR, "./config.json"))
6 |
7 | with open(DEFAULT_CONFIG_FILE, "r") as f:
8 | DEFAULT_CONFIG = json.load(f)
9 |
10 |
11 | from kryptos.strategy.strategy import Strategy
12 |
--------------------------------------------------------------------------------
/examples/paper_trade_example.json:
--------------------------------------------------------------------------------
1 | {
2 | "trading": {
3 | "EXCHANGE": "bittrex",
4 | "ASSET": "btc_usdt",
5 | "DATA_FREQ": "minute",
6 | "HISTORY_FREQ": "30T",
7 | "CAPITAL_BASE": 5000,
8 | "QUOTE_CURRENCY": "usd",
9 | "BARS": 50,
10 | "START": "2017-10-12",
11 | "END": "2017-10-13"
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/app/app/extensions.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """Extensions module. Each extension is initialized in the app factory located in app.py."""
3 | from flask_cors import CORS
4 | from flask_sqlalchemy import SQLAlchemy
5 | from flask_migrate import Migrate
6 | from raven.contrib.flask import Sentry
7 |
8 |
9 | cors = CORS()
10 | db = SQLAlchemy()
11 | migrate = Migrate()
12 | sentry = Sentry()
--------------------------------------------------------------------------------
/app/.dockerignore:
--------------------------------------------------------------------------------
1 | .dockerignore
2 | Dockerfile
3 | db.sqlite3
4 | __pycache__
5 | *.pyc
6 | *.pyo
7 | *.pyd
8 | .Python
9 | env
10 | pip-log.txt
11 | pip-delete-this-directory.txt
12 | .tox
13 | .coverage
14 | .coverage.*
15 | .cache
16 | coverage.xml
17 | *,cover
18 | *.log
19 | .git
20 | *.egg-info
21 |
22 |
23 |
24 | # Editor directories and files
25 | .idea
26 | .vscode
27 | *.suo
28 | *.ntvs*
29 | *.njsproj
30 | *.sln
31 |
--------------------------------------------------------------------------------
/core/.dockerignore:
--------------------------------------------------------------------------------
1 | .dockerignore
2 | Dockerfile
3 | db.sqlite3
4 | __pycache__
5 | *.pyc
6 | *.pyo
7 | *.pyd
8 | .Python
9 | env
10 | pip-log.txt
11 | pip-delete-this-directory.txt
12 | .tox
13 | .coverage
14 | .coverage.*
15 | .cache
16 | coverage.xml
17 | *,cover
18 | *.log
19 | .git
20 | logs
21 | performance_results
22 | *.egg-info
23 | edas
24 |
25 |
26 | # Editor directories and files
27 | .idea
28 | .vscode
29 | *.suo
30 | *.ntvs*
31 | *.njsproj
32 | *.sln
33 |
--------------------------------------------------------------------------------
/ml/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | url = "https://pypi.python.org/simple"
3 | verify_ssl = true
4 | name = "pypi"
5 |
6 | [packages]
7 | pandas = "*"
8 | scipy = "*"
9 | ta-lib = "*"
10 | ta = "*"
11 | numpy = "*"
12 | rq = "*"
13 | logbook = "*"
14 | raven = "*"
15 | lightgbm = "*"
16 | matplotlib = "*"
17 | xgboost = "*"
18 | google-cloud-datastore = "*"
19 | hyperopt = "*"
20 | shap = "*"
21 | pandas-profiling = "*"
22 |
23 | [dev-packages]
24 |
25 | [requires]
26 | python_version = "3.6"
27 |
--------------------------------------------------------------------------------
/ml/ml/utils/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from ml.settings import PERF_DIR
4 |
5 |
6 | def merge_two_dicts(x, y):
7 | z = x.copy() # start with x's keys and values
8 | z.update(y) # modifies z with y's keys and values & returns None
9 | return z
10 |
11 |
12 | # TODO set up perf dir to be available to alls services
13 | def get_algo_dir(namespace):
14 | algo_dir = os.path.join(os.path.abspath(PERF_DIR), namespace)
15 | if not os.path.exists(algo_dir):
16 | os.makedirs(algo_dir)
17 | return algo_dir
18 |
--------------------------------------------------------------------------------
/app/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.6
2 |
3 | # copy only the requirements to prevent rebuild for any changes
4 | # need to have in subdir of app
5 | COPY requirements.txt /app/requirements.txt
6 | RUN pip install -r /app/requirements.txt
7 |
8 |
9 | # Above lines represent the dependencies
10 | # below lines represent the actual app
11 | # Only the actual app should be rebuilt upon changes
12 | COPY . /app
13 |
14 | ## all app code needs to be in /app/app
15 | ## uwsgi needs to be in /app
16 |
17 | WORKDIR /app
18 | ENTRYPOINT honcho start app updater
19 |
--------------------------------------------------------------------------------
/app/app/templates/account/dashboard.html:
--------------------------------------------------------------------------------
1 | {% extends 'base.html' %}
2 |
3 | {% block content %}
4 |
5 | {%trans%}User Dashboard{%endtrans%}
6 |
7 |
8 |
9 |
10 | {% include "account/telegram_auth.html" %}
11 |
12 |
13 | {% if current_user.strategies %}
14 | {% include "account/strategy_list.html" %}
15 | {% endif %}
16 |
17 |
18 |
19 |
20 | {% endblock %}
21 |
--------------------------------------------------------------------------------
/ml/cloudbuild.yaml:
--------------------------------------------------------------------------------
1 | steps:
2 | - name: 'gcr.io/cloud-builders/docker'
3 | args: ['pull', 'gcr.io/$PROJECT_ID/kryptos-ml']
4 | - name: 'gcr.io/cloud-builders/docker'
5 | args: [
6 | 'build',
7 | '-t', 'gcr.io/$PROJECT_ID/kryptos-ml',
8 | '--cache-from', 'gcr.io/$PROJECT_ID/kryptos-ml',
9 | 'ml/'
10 | ]
11 |
12 | - name: "gcr.io/cloud-builders/gcloud"
13 | args: ["app", "deploy", 'ml/ml.yaml', '--image-url=gcr.io/$PROJECT_ID/kryptos-ml', '--stop-previous-version']
14 |
15 | images: ['gcr.io/$PROJECT_ID/kryptos-ml']
16 | timeout: '1200s'
17 |
--------------------------------------------------------------------------------
/app/cloudbuild.yaml:
--------------------------------------------------------------------------------
1 | steps:
2 | - name: 'gcr.io/cloud-builders/docker'
3 | args: ['pull', 'gcr.io/$PROJECT_ID/kryptos-app']
4 | - name: 'gcr.io/cloud-builders/docker'
5 | args: [
6 | 'build',
7 | '-t', 'gcr.io/$PROJECT_ID/kryptos-app',
8 | '--cache-from', 'gcr.io/$PROJECT_ID/kryptos-app',
9 | 'app/'
10 | ]
11 |
12 | - name: "gcr.io/cloud-builders/gcloud"
13 | args: ["app", "deploy", 'app/app.yaml', '--image-url=gcr.io/$PROJECT_ID/kryptos-app', '--stop-previous-version']
14 |
15 | images: ['gcr.io/$PROJECT_ID/kryptos-app']
16 | timeout: '1200s'
17 |
--------------------------------------------------------------------------------
/ml/ml/utils/input_data_report.py:
--------------------------------------------------------------------------------
1 | import pandas_profiling
2 | import os
3 |
4 | from ml.utils import get_algo_dir
5 |
6 | def profile_report(df, idx, namespace, name, configuration):
7 | if configuration['enabled'] and idx % configuration['n_iterations'] == 0:
8 | profile = pandas_profiling.ProfileReport(df)
9 | folder = os.path.join(namespace, 'profiling_report')
10 | folder_path = get_algo_dir(folder)
11 | f_path = os.path.join(folder_path, "profiling_report_model_{}_id_{}_with_{}_columns.html".format(name, idx, len(df.columns)))
12 | profile.to_file(outputfile=f_path)
13 |
--------------------------------------------------------------------------------
/core/cloudbuild.yaml:
--------------------------------------------------------------------------------
1 | steps:
2 | - name: 'gcr.io/cloud-builders/docker'
3 | args: ['pull', 'gcr.io/$PROJECT_ID/kryptos-worker']
4 | - name: 'gcr.io/cloud-builders/docker'
5 | args: [
6 | 'build',
7 | '-t', 'gcr.io/$PROJECT_ID/kryptos-worker',
8 | '--cache-from', 'gcr.io/$PROJECT_ID/kryptos-worker',
9 | 'core/'
10 | ]
11 |
12 | - name: "gcr.io/cloud-builders/gcloud"
13 | args: ["app", "deploy", 'core/worker.yaml', '--image-url=gcr.io/$PROJECT_ID/kryptos-worker', '--stop-previous-version']
14 |
15 | images: ['gcr.io/$PROJECT_ID/kryptos-worker']
16 | timeout: '2400s'
17 |
--------------------------------------------------------------------------------
/app/app/models/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade():
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade():
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/core/kryptos/strategies/mean_reversion_simple.json:
--------------------------------------------------------------------------------
1 | {
2 | "trading": {
3 | "EXCHANGE": "bitfinex",
4 | "ASSET": "btc_usd",
5 | "DATA_FREQ": "daily",
6 | "HISTORY_FREQ": "1d",
7 | "CAPITAL_BASE": 5000,
8 | "QUOTE_CURRENCY": "usd",
9 | "START": "2017-10-10",
10 | "END": "2018-3-28"
11 | },
12 | "datasets": [],
13 | "indicators": [
14 | {
15 | "name": "RSI",
16 | "symbol": "btc_usd",
17 | "dataset": null,
18 | "label": "RSI",
19 | "params": {
20 | "timeperiod": 14
21 | }
22 | }
23 | ],
24 | "signals": {}
25 | }
26 |
--------------------------------------------------------------------------------
/examples/obv.json:
--------------------------------------------------------------------------------
1 | {
2 | "trading": {
3 | "EXCHANGE": "bitfinex",
4 | "ASSET": "btc_usd",
5 | "DATA_FREQ": "daily",
6 | "HISTORY_FREQ": "1d",
7 | "CAPITAL_BASE": 5000,
8 | "QUOTE_CURRENCY": "usd",
9 | "START": "2016-10-10",
10 | "END": "2018-3-28",
11 | "BARS": 50,
12 | "ORDER_SIZE": 0.5,
13 | "SLIPPAGE_ALLOWED": 0.05
14 | },
15 | "datasets": [],
16 | "indicators": [
17 | {
18 | "name": "OBV",
19 | "symbol": "btc_usd",
20 | "dataset": null,
21 | "label": "OBV",
22 | "params": {}
23 | }
24 | ],
25 | "signals": {}
26 | }
27 |
--------------------------------------------------------------------------------
/core/kryptos/strategy/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "EXCHANGE": "binance",
3 | "ASSET": "btc_usdt",
4 | "DATA_FREQ": "minute",
5 | "HISTORY_FREQ": "1T",
6 | "MINUTE_FREQ": "5", "doc":"minute interval",
7 | "MINUTE_TO_OPERATE": "1", "doc":"minute to operate on range [0, MINUTE_FREQ]",
8 | "CAPITAL_BASE": 20000,
9 | "QUOTE_CURRENCY": "usdt",
10 | "START": "2018-8-10",
11 | "END": "2018-8-15",
12 | "BARS": 2000, "doc": "Use values > 200 if you want apply ML techniques",
13 | "ORDER_SIZE": 0.5,
14 | "SLIPPAGE_ALLOWED": 0.01,
15 | "MAKER_COMMISSION": 0.0,
16 | "TAKER_COMMISSION": 0.0,
17 | "TAKE_PROFIT": 0.04,
18 | "STOP_LOSS": 0.02
19 |
20 | }
21 |
--------------------------------------------------------------------------------
/app/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | url = "https://pypi.python.org/simple"
3 | verify_ssl = true
4 | name = "pypi"
5 |
6 | [packages]
7 | flask = "*"
8 | rq = "*"
9 | rq-dashboard = "*"
10 | flask-cors = "*"
11 | flask-assistant = "*"
12 | "aniso8601" = "*"
13 | flask-user = "*"
14 | flask-migrate = "*"
15 | "psycopg2" = "*"
16 | python-telegram-bot = "*"
17 | gunicorn = "*"
18 | shortuuid = "*"
19 | honcho = "*"
20 | google-cloud-datastore = "*"
21 | logbook = "*"
22 | raven = {extras = ["flask"]}
23 | google-cloud-storage = "*"
24 | google-cloud-kms = "*"
25 | ccxt = "*"
26 | numpy = "*"
27 |
28 | [dev-packages]
29 | black = "==18.4a4"
30 |
31 | [requires]
32 | python_version = "3.6"
33 |
34 | [pipenv]
35 | allow_prereleases = true
36 |
--------------------------------------------------------------------------------
/app/app/templates/account/strategy_list.html:
--------------------------------------------------------------------------------
1 |
2 | Strategies
3 |
4 |
5 | {% for strat in current_user.strategies %}
6 |
7 |
8 |
9 |
{{strat.name}}
10 |
11 | Status {{strat.status}}
12 | Created: {{strat.created_at}}
13 |
View
14 |
15 |
16 |
17 | {% endfor %}
18 |
19 |
20 |
--------------------------------------------------------------------------------
/ml/test.py:
--------------------------------------------------------------------------------
1 | """File to test ml service"""
2 | import pandas as pd
3 | from worker import *
4 |
5 | # read data
6 | df = pd.read_csv('data/datas.csv', index_col="index", sep=',')
7 | df_final = pd.DataFrame()
8 |
9 | # prepare data
10 | df = df.to_json()
11 | name = 'LIGHTGBM' # 'XGBOOST' # 'LIGHTGBM'
12 | idx = 0
13 | current_datetime = pd.tslib.Timestamp('2016-03-03 00:00:00')
14 | df_final_json = df_final.to_json()
15 | data_freq = 'minute'
16 | hyper_params = None
17 | namespace = 'inventado'
18 |
19 | # calculate
20 | results = calculate(namespace, df, 'LIGHTGBM', idx, current_datetime, df_final_json, data_freq, hyper_params)
21 | results = calculate(namespace, df, 'XGBOOST', idx, current_datetime, df_final_json, data_freq, hyper_params)
22 | print('final')
23 |
--------------------------------------------------------------------------------
/core/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | url = "https://pypi.python.org/simple"
3 | verify_ssl = true
4 | name = "pypi"
5 |
6 | [packages]
7 | numpy = "*"
8 | matplotlib = "*"
9 | ta-lib = "*"
10 | quandl = "*"
11 | click = "*"
12 | pytrends = "*"
13 | colorama = "*"
14 | rq = "*"
15 | rq-dashboard = "*"
16 | enigma-catalyst = "*"
17 | setuptools = "==38.5.1"
18 | "psycopg2" = "*"
19 | logbook = "*"
20 | "e1839a8" = {path = ".", editable = true}
21 | honcho = "*"
22 | google-cloud = "*"
23 | google-cloud-datastore = "*"
24 | gunicorn = "*"
25 | raven = "*"
26 | google-cloud-storage = "*"
27 | google-cloud-kms = "*"
28 | google-cloud-logging = "*"
29 |
30 | [dev-packages]
31 | black = "==18.4a4"
32 |
33 | [requires]
34 | python_version = "3.6"
35 |
36 | [pipenv]
37 | allow_prereleases = true
38 |
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/metrics/xgboost_confussion_matrix.txt:
--------------------------------------------------------------------------------
1 | 2018/09/12 17:10:24
2 | Date Start: 2018-7-1
3 | Date End: 2018-8-15
4 | Minute Frequency: 360
5 | Accuracy: 0.5409836065573771
6 | Coefficient Kappa: 0.08237822349570201
7 | Classification Report:
8 | precision recall f1-score support
9 |
10 | KEEP 0.50 0.55 0.52 84
11 | UP 0.58 0.54 0.56 99
12 |
13 | avg / total 0.54 0.54 0.54 183
14 | Confussion Matrix:
15 | [[46 38]
16 | [46 53]]
17 | Return Profit Percentage: 0.012891579574750844
18 | Sharpe Ratio: 1.170596616355189
19 | Sortino Ratio:
20 | Sharpe Ratio (Bitcoin Benchmark): -0.7944863044409247
21 | Sortino Ratio (Bitcoin Benchmark):
22 |
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/metrics/lightgbm_confussion_matrix.txt:
--------------------------------------------------------------------------------
1 | 2018/09/12 17:10:25
2 | Date Start: 2018-7-1
3 | Date End: 2018-8-15
4 | Minute Frequency: 360
5 | Accuracy: 0.5300546448087432
6 | Coefficient Kappa: 0.06388294075660239
7 | Classification Report:
8 | precision recall f1-score support
9 |
10 | KEEP 0.49 0.56 0.52 84
11 | UP 0.57 0.51 0.54 99
12 |
13 | avg / total 0.54 0.53 0.53 183
14 | Confussion Matrix:
15 | [[47 37]
16 | [49 50]]
17 | Return Profit Percentage: 0.012891579574750844
18 | Sharpe Ratio: 1.170596616355189
19 | Sortino Ratio:
20 | Sharpe Ratio (Bitcoin Benchmark): -0.7944863044409247
21 | Sortino Ratio (Bitcoin Benchmark):
22 |
--------------------------------------------------------------------------------
/app/app/models/migrations/versions/6c6917015a3c_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: 6c6917015a3c
4 | Revises: 7377a3591690
5 | Create Date: 2018-07-24 17:09:47.242789
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '6c6917015a3c'
14 | down_revision = '7377a3591690'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_unique_constraint(None, 'strategies', ['id'])
22 | # ### end Alembic commands ###
23 |
24 |
25 | def downgrade():
26 | # ### commands auto generated by Alembic - please adjust! ###
27 | op.drop_constraint(None, 'strategies', type_='unique')
28 | # ### end Alembic commands ###
29 |
--------------------------------------------------------------------------------
/app/app/models/migrations/versions/9d05939446cd_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: 9d05939446cd
4 | Revises: 1d39fb6ab24a
5 | Create Date: 2018-07-24 20:12:53.214761
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '9d05939446cd'
14 | down_revision = '1d39fb6ab24a'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('users', sa.Column('telegram_auth_date', sa.Integer(), nullable=True))
22 | # ### end Alembic commands ###
23 |
24 |
25 | def downgrade():
26 | # ### commands auto generated by Alembic - please adjust! ###
27 | op.drop_column('users', 'telegram_auth_date')
28 | # ### end Alembic commands ###
29 |
--------------------------------------------------------------------------------
/core/worker.yaml:
--------------------------------------------------------------------------------
1 | runtime: custom
2 | env: flex
3 | service: worker
4 | skip_files: [logs, performance_results]
5 | env_variables:
6 | # this doesn't need to be kept secret
7 | # but may need to be changed if different instances
8 | # should log to different sentry projects
9 | SENTRY_DSN: 'https://919694577d5b4907a54de058d3df359b:f2426c5c9e274fcfb1d84295107e5b60@sentry.io/1269706'
10 | REDIS_HOST: 10.0.0.3
11 | REDIS_PORT: 6379
12 | PROJECT_ID: kryptos-205115
13 | CONFIG_ENV: 'production'
14 | CLOUD_LOGGING: True
15 | network:
16 | name: default
17 |
18 | readiness_check:
19 | app_start_timeout_sec: 3600
20 |
21 | resources:
22 | cpu: 1
23 | memory_gb: 6
24 | disk_size_gb: 10 # min
25 |
26 | automatic_scaling:
27 | min_num_instances: 1
28 | max_num_instances: 3
29 | cool_down_period_sec: 240
30 | cpu_utilization:
31 | target_utilization: 0.6
32 |
33 |
--------------------------------------------------------------------------------
/app/app/models/migrations/versions/1d39fb6ab24a_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: 1d39fb6ab24a
4 | Revises: 5e29ad8a0e89
5 | Create Date: 2018-07-24 20:09:48.432413
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | from sqlalchemy.dialects import postgresql
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '1d39fb6ab24a'
14 | down_revision = '5e29ad8a0e89'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.drop_column('users', 'telegram_auth_date')
22 | # ### end Alembic commands ###
23 |
24 |
25 | def downgrade():
26 | # ### commands auto generated by Alembic - please adjust! ###
27 | op.add_column('users', sa.Column('telegram_auth_date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
28 | # ### end Alembic commands ###
29 |
--------------------------------------------------------------------------------
/core/kryptos/strategies/dual_moving_average.json:
--------------------------------------------------------------------------------
1 | {
2 | "trading": {
3 | "EXCHANGE": "bitfinex",
4 | "ASSET": "btc_usd",
5 | "DATA_FREQ": "daily",
6 | "HISTORY_FREQ": "1d",
7 | "CAPITAL_BASE": 5000,
8 | "QUOTE_CURRENCY": "usd",
9 | "START": "2017-08-10",
10 | "END": "2018-3-28",
11 | "data_frequency": "daily"
12 | },
13 | "datasets": [],
14 | "indicators": [
15 | {
16 | "name": "EMA",
17 | "symbol": "btc_usd",
18 | "dataset": null,
19 | "label": "SMA_SHORT",
20 | "params": {
21 | "timeperiod": 150
22 | }
23 | },
24 | {
25 | "name": "EMA",
26 | "symbol": "btc_usd",
27 | "dataset": null,
28 | "label": "SMA_LONG",
29 | "params": {
30 | "timeperiod": 200
31 | }
32 | }
33 | ],
34 | "signals": {}
35 | }
36 |
--------------------------------------------------------------------------------
/core/kryptos/scripts/trends.py:
--------------------------------------------------------------------------------
1 | import click
2 | from logbook import Logger
3 |
4 | from kryptos.settings import DEFAULT_CONFIG as CONFIG
5 | from kryptos.strategy import Strategy
6 |
7 |
8 | log = Logger("Blockchain Activity")
9 |
10 |
11 | @click.command()
12 | @click.argument("keywords", nargs=-1)
13 | @click.option("--asset", "-a", is_flag=True, help="Include asset in keyword list")
14 | def run(keywords, asset):
15 | """Runs strategy using Google Search Trends
16 |
17 | Example:
18 | trends 'btc' 'btc usd' 'btc price'
19 | """
20 |
21 | keywords = list(keywords)
22 | if asset:
23 | keywords.append(CONFIG["ASSET"].replace("_", " "))
24 |
25 | strat = Strategy()
26 | strat.use_dataset("google", columns=keywords)
27 |
28 | click.secho("Analysis Google Trends:\n{}".format(keywords), fg="white")
29 |
30 | strat.run()
31 |
32 |
33 | if __name__ == "__main__":
34 | run()
35 |
--------------------------------------------------------------------------------
/app/app/models/migrations/versions/d6722da119b0_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: d6722da119b0
4 | Revises: 9d05939446cd
5 | Create Date: 2018-07-27 20:32:29.741736
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'd6722da119b0'
14 | down_revision = '9d05939446cd'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('strategies', sa.Column('results_json', sa.JSON(), nullable=True))
22 | op.add_column('strategies', sa.Column('status', sa.String(), nullable=True))
23 | # ### end Alembic commands ###
24 |
25 |
26 | def downgrade():
27 | # ### commands auto generated by Alembic - please adjust! ###
28 | op.drop_column('strategies', 'status')
29 | op.drop_column('strategies', 'results_json')
30 | # ### end Alembic commands ###
31 |
--------------------------------------------------------------------------------
/core/monitor.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from flask import Flask
3 | import redis
4 |
5 | from kryptos.settings import REDIS_HOST, REDIS_PORT
6 |
7 |
8 | app = Flask(__name__)
9 |
10 | redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT)
11 |
12 |
13 | @app.route('/')
14 | def index():
15 | logging.warn('Testing redis connection {}:{}'.format)
16 | value = redis_client.incr('counter', 1)
17 | return 'Visitor number: {}'.format(value), 200
18 |
19 |
20 | @app.errorhandler(500)
21 | def server_error(e):
22 | logging.exception('An error occurred during a request.')
23 | return """
24 | An internal error occurred: {}
25 | See logs for full stacktrace.
26 | """.format(e), 500
27 |
28 |
29 | if __name__ == '__main__':
30 | # This is used when running locally. Gunicorn is used to run the
31 | # application on Google App Engine. See entrypoint in app.yaml.
32 | app.run(host='127.0.0.1', port=8080, debug=True)
33 |
--------------------------------------------------------------------------------
/ml/ml.yaml:
--------------------------------------------------------------------------------
1 | runtime: custom
2 | env: flex
3 | service: ml
4 | runtime_config:
5 | python_version: 3
6 |
7 | env_variables:
8 |
9 | # this doesn't need to be kept secret
10 | # but may need to be changed if different instances
11 | # should log to different sentry projects
12 | SENTRY_DSN: 'https://919694577d5b4907a54de058d3df359b:f2426c5c9e274fcfb1d84295107e5b60@sentry.io/1269706'
13 | REDIS_HOST: 10.0.0.3
14 | REDIS_PORT: 6379
15 | CONFIG_ENV: 'production'
16 | PROJECT_ID: kryptos-205115
17 |
18 | network:
19 | name: default
20 | # This sample incurs costs to run on the App Engine flexible environment.
21 | # The settings below are to reduce costs during testing and are not appropriate
22 | # for production use. For more information, see:
23 | # https://cloud.google.com/appengine/docs/flexible/python/configuring-your-app-with-app-yaml
24 | manual_scaling:
25 | instances: 1
26 | # resources:
27 | # cpu: 1
28 | # memory_gb: 0.5
29 | # disk_size_gb: 10
30 |
--------------------------------------------------------------------------------
/app/app/models/migrations/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # template used to generate migration files
5 | # file_template = %%(rev)s_%%(slug)s
6 |
7 | # set to 'true' to run the environment during
8 | # the 'revision' command, regardless of autogenerate
9 | # revision_environment = false
10 |
11 |
12 | # Logging configuration
13 | [loggers]
14 | keys = root,sqlalchemy,alembic
15 |
16 | [handlers]
17 | keys = console
18 |
19 | [formatters]
20 | keys = generic
21 |
22 | [logger_root]
23 | level = WARN
24 | handlers = console
25 | qualname =
26 |
27 | [logger_sqlalchemy]
28 | level = WARN
29 | handlers =
30 | qualname = sqlalchemy.engine
31 |
32 | [logger_alembic]
33 | level = INFO
34 | handlers =
35 | qualname = alembic
36 |
37 | [handler_console]
38 | class = StreamHandler
39 | args = (sys.stderr,)
40 | level = NOTSET
41 | formatter = generic
42 |
43 | [formatter_generic]
44 | format = %(levelname)-5.5s [%(name)s] %(message)s
45 | datefmt = %H:%M:%S
46 |
--------------------------------------------------------------------------------
/ml/results/Strat-b85b15d0-b674-11e8-bedc-0242ac120006/backtest_summary.csv:
--------------------------------------------------------------------------------
1 | ,Backtest
2 | start_date,2018-07-01
3 | end_date,2018-08-15
4 | backtest_minutes,2878.0
5 | backtest_days,45.99930555555556
6 | backtest_weeks,6.571329365079365
7 | number_of_trades,37
8 | average_trades_per_week_avg,5.630519784417035
9 | average_trade_amount_usd,93.47481981647005
10 | initial_capital,20000.0
11 | ending_capital,20257.831591495004
12 | net_profit,257.8315914950035
13 | net_profit_pct,1.2891579574750176
14 | average_daily_profit,5.6051192160631205
15 | average_daily_profit_pct,0.028025596080315605
16 | average_exposure,0.0
17 | average_exposure_pct,0.0
18 | net_risk_adjusted_return_pct,inf
19 | max_drawdown_pct_catalyst,-5.147263992823796
20 | max_daily_drawdown_pct,-0.2281287278932038
21 | max_weekly_drawdown_pct,-0.4349018255988
22 | sharpe_ratio_avg,2.8366213891911722
23 | std_rolling_10_day_pct_avg,0.00010417725848038056
24 | std_rolling_100_day_pct_avg,0.00034387531302030206
25 | number_of_simulations,66240
26 |
--------------------------------------------------------------------------------
/app/app/templates/navbar.html:
--------------------------------------------------------------------------------
1 |
20 |
--------------------------------------------------------------------------------
/core/kryptos/strategies/bbands_psar.json:
--------------------------------------------------------------------------------
1 | {
2 | "trading": {
3 | "EXCHANGE": "bitfinex",
4 | "ASSET": "btc_usd",
5 | "DATA_FREQ": "daily",
6 | "HISTORY_FREQ": "1d",
7 | "CAPITAL_BASE": 5000,
8 | "QUOTE_CURRENCY": "usd",
9 | "START": "2016-10-10",
10 | "END": "2018-3-28",
11 | "data_frequency": "daily"
12 | },
13 | "datasets": [],
14 | "indicators": [
15 | {
16 | "name": "SAR",
17 | "symbol": "btc_usd",
18 | "dataset": null,
19 | "label": "SAR",
20 | "params": {
21 | "acceleration": 0.02,
22 | "maximum": 0.2
23 | }
24 | },
25 | {
26 | "name": "BBANDS",
27 | "symbol": "btc_usd",
28 | "dataset": null,
29 | "label": "BBANDS",
30 | "params": {
31 | "timeperiod": 5,
32 | "nbdevup": 2,
33 | "nbdevdn": 2,
34 | "matype": 0
35 | }
36 | }
37 | ],
38 | "signals": {}
39 | }
40 |
--------------------------------------------------------------------------------
/app/app.yaml:
--------------------------------------------------------------------------------
1 | runtime: custom
2 | env: flex
3 | service: default
4 | runtime_config:
5 | python_version: 3
6 |
7 | env_variables:
8 | FLASK_DEBUG: 0
9 | FLASK_ENV: 'prod'
10 | FLASK_APP: autoapp:app
11 | TELEGRAM_BOT: KryptosAIBot
12 | REDIS_HOST: 10.0.0.3
13 | REDIS_PORT: 6379
14 | CONFIG_ENV: 'production'
15 | SENTRY_DSN: 'https://919694577d5b4907a54de058d3df359b:f2426c5c9e274fcfb1d84295107e5b60@sentry.io/1269706'
16 | PROJECT_ID: kryptos-205115
17 |
18 | beta_settings:
19 | cloud_sql_instances: kryptos-205115:us-west1:kryptos-db
20 |
21 | network:
22 | name: default
23 | # This sample incurs costs to run on the App Engine flexible environment.
24 | # The settings below are to reduce costs during testing and are not appropriate
25 | # for production use. For more information, see:
26 | # https://cloud.google.com/appengine/docs/flexible/python/configuring-your-app-with-app-yaml
27 | manual_scaling:
28 | instances: 1
29 | # resources:
30 | # cpu: 1
31 | # memory_gb: 0.5
32 | # disk_size_gb: 10
33 |
--------------------------------------------------------------------------------
/app/app/models/migrations/versions/b060ded0ae81_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: b060ded0ae81
4 | Revises: d6722da119b0
5 | Create Date: 2018-07-28 14:08:59.602941
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | from sqlalchemy.dialects import postgresql
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'b060ded0ae81'
14 | down_revision = 'd6722da119b0'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('strategies', sa.Column('result_json', sa.JSON(), nullable=True))
22 | op.drop_column('strategies', 'results_json')
23 | # ### end Alembic commands ###
24 |
25 |
26 | def downgrade():
27 | # ### commands auto generated by Alembic - please adjust! ###
28 | op.add_column('strategies', sa.Column('results_json', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True))
29 | op.drop_column('strategies', 'result_json')
30 | # ### end Alembic commands ###
31 |
--------------------------------------------------------------------------------
/app/app/models/migrations/versions/f3458b927eba_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: f3458b927eba
4 | Revises: e3202524923f
5 | Create Date: 2018-07-29 03:25:24.358278
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'f3458b927eba'
14 | down_revision = 'e3202524923f'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('strategies', sa.Column('uuid', sa.String(), nullable=True))
22 | op.create_unique_constraint(None, 'strategies', ['uuid'])
23 | op.create_unique_constraint(None, 'strategies', ['id'])
24 | # ### end Alembic commands ###
25 |
26 |
27 | def downgrade():
28 | # ### commands auto generated by Alembic - please adjust! ###
29 | op.drop_constraint(None, 'strategies', type_='unique')
30 | op.drop_constraint(None, 'strategies', type_='unique')
31 | op.drop_column('strategies', 'uuid')
32 | # ### end Alembic commands ###
33 |
--------------------------------------------------------------------------------
/examples/multiple_ta.json:
--------------------------------------------------------------------------------
1 | {
2 | "datasets": [],
3 | "indicators": [
4 | {
5 | "name": "MACDFIX",
6 | "symbol": "btc_usd",
7 | "dataset": null,
8 | "label": "MACDFIX",
9 | "params": {
10 | "signalperiod": 9
11 | }
12 | },
13 | {
14 | "name": "EMA",
15 | "symbol": "btc_usd",
16 | "dataset": null,
17 | "label": "EMA",
18 | "params": {
19 | "timeperiod": 30
20 | }
21 | },
22 | {
23 | "name": "SMA",
24 | "symbol": "btc_usd",
25 | "dataset": null,
26 | "label": "SMA",
27 | "params": {
28 | "timeperiod": 30
29 | }
30 | },
31 | {
32 | "name": "BBANDS",
33 | "symbol": "btc_usd",
34 | "dataset": null,
35 | "label": "BBANDS",
36 | "params": {
37 | "timeperiod": 5,
38 | "nbdevup": 2,
39 | "nbdevdn": 2,
40 | "matype": 0
41 | }
42 | }
43 | ],
44 | "signals": {}
45 | }
46 |
--------------------------------------------------------------------------------
/app/app/models/migrations/versions/4814369ad5cc_basic_auth_model.py:
--------------------------------------------------------------------------------
1 | """basic auth model
2 |
3 | Revision ID: 4814369ad5cc
4 | Revises: f3458b927eba
5 | Create Date: 2018-10-16 19:06:58.961599
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '4814369ad5cc'
14 | down_revision = 'f3458b927eba'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('user_exchange_auth',
22 | sa.Column('id', sa.Integer(), nullable=False),
23 | sa.Column('exchange', sa.String(), nullable=False),
24 | sa.Column('user_id', sa.Integer(), nullable=True),
25 | sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
26 | sa.PrimaryKeyConstraint('id'),
27 | sa.UniqueConstraint('id')
28 | )
29 | # ### end Alembic commands ###
30 |
31 |
32 | def downgrade():
33 | # ### commands auto generated by Alembic - please adjust! ###
34 | op.drop_table('user_exchange_auth')
35 | # ### end Alembic commands ###
36 |
--------------------------------------------------------------------------------
/core/kryptos/scripts/stream_logs.py:
--------------------------------------------------------------------------------
1 | import click
2 | import time
3 |
4 | import google.cloud.logging
5 | cloud_client = google.cloud.logging.Client()
6 |
7 | @click.command()
8 | @click.option('--strat-id', '-id', help='Strategy ID to monitor')
9 | @click.option('--user-id', '-u')
10 | @click.option('--mode', '-m')
11 | @click.option('--logger', '-l')
12 | def run(strat_id, user_id, mode, logger):
13 |
14 | FILTER = 'logName:STRATEGY'
15 | if logger:
16 | FILTER = f'logName:{logger}'
17 | if strat_id:
18 | FILTER += f' AND jsonPayload.strat_id={strat_id}'
19 | if user_id:
20 | FILTER += f' AND jsonPayload.user_id={user_id}'
21 | if mode:
22 | FILTER += f' AND jsonPayload.mode={mode}'
23 |
24 | iterator = cloud_client.list_entries(filter_=FILTER)
25 | pages = iterator.pages
26 |
27 | while True:
28 | try:
29 | page = next(pages)
30 | for entry in page:
31 | click.secho(entry.payload['message'])
32 | except StopIteration:
33 | click.secho('No logs, waiting for more')
34 | time.sleep(5)
35 |
--------------------------------------------------------------------------------
/ml/ml/feature_selection/wrapper.py:
--------------------------------------------------------------------------------
1 | from sklearn.feature_selection import RFE
2 | from sklearn.ensemble import RandomForestClassifier
3 |
4 | def wrapper_feature_selection(X, y, percentage=0.8):
5 | """A predictive model is used to evaluate a combination of features and
6 | assign a score based on model accuracy.
7 |
8 | Args:
9 | X(pandas.DataFrame): Features columns.
10 | y(pandas.DataFrame): Target column.
11 | percentage(float): From 0 to 1, percentage of features to filter from total columns used.
12 |
13 | Returns:
14 | list: Name columns selected.
15 | """
16 | if percentage > 1.0 or percentage < 0.0:
17 | raise ValueError("'percentage' value is not valid [0, 1]")
18 |
19 | kBest = int(percentage*len(X.columns)/1.0)
20 |
21 | classifier = RandomForestClassifier()
22 | rfe = RFE(classifier, n_features_to_select=kBest)
23 | rfe.fit(X, y)
24 |
25 | features_sorted_by_rank = sorted(zip(map(lambda x: round(x, 4), rfe.ranking_), X.columns))
26 | used_cols = [x[1] for x in features_sorted_by_rank[:kBest]]
27 |
28 | return used_cols
29 |
--------------------------------------------------------------------------------
/app/app/models/migrations/versions/2d71155a98ec_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: 2d71155a98ec
4 | Revises: c5ca0f546394
5 | Create Date: 2018-07-24 13:52:27.310679
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '2d71155a98ec'
14 | down_revision = 'c5ca0f546394'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('strategy',
22 | sa.Column('id', sa.String(), nullable=False),
23 | sa.Column('name', sa.String(), nullable=False),
24 | sa.Column('created_at', sa.DateTime(), nullable=True),
25 | sa.Column('user_id', sa.Integer(), nullable=True),
26 | sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
27 | sa.PrimaryKeyConstraint('id'),
28 | sa.UniqueConstraint('id')
29 | )
30 | # ### end Alembic commands ###
31 |
32 |
33 | def downgrade():
34 | # ### commands auto generated by Alembic - please adjust! ###
35 | op.drop_table('strategy')
36 | # ### end Alembic commands ###
37 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Produvia Inc.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/app/app/web/public/views.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from flask import Blueprint, render_template
3 | from flask_user import current_user, current_app
4 | import redis
5 | import logging
6 |
7 | # Grouping 2 blueprints together
8 | blueprint = Blueprint("public", __name__, url_prefix="/")
9 |
10 |
11 | @blueprint.route("/")
12 | def home_page():
13 | return render_template("public/landing.html", current_user=current_user)
14 |
15 |
16 | @blueprint.route("/testredis")
17 | def index():
18 | REDIS_HOST, REDIS_PORT = current_app.config["REDIS_HOST"], current_app.config["REDIS_PORT"]
19 |
20 | redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT)
21 | current_app.logger.warn(f"Testing Redis Conenction: {REDIS_HOST}:{REDIS_PORT}")
22 | value = redis_client.incr("counter", 1)
23 | return "Visitor number: {}".format(value)
24 |
25 |
26 | @blueprint.errorhandler(500)
27 | def server_error(e):
28 | logging.exception("An error occurred during a request.")
29 | return (
30 | """
31 | An internal error occurred: {}
32 | See logs for full stacktrace.
33 | """.format(e), 500)
34 |
--------------------------------------------------------------------------------
/app/app/models/migrations/versions/c5ca0f546394_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: c5ca0f546394
4 | Revises:
5 | Create Date: 2018-07-24 09:34:23.252807
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'c5ca0f546394'
14 | down_revision = None
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('users',
22 | sa.Column('id', sa.Integer(), nullable=False),
23 | sa.Column('is_active', sa.Boolean(), server_default='1', nullable=False),
24 | sa.Column('email', sa.String(length=255), nullable=False),
25 | sa.Column('email_confirmed_at', sa.DateTime(), nullable=True),
26 | sa.Column('password', sa.String(length=255), nullable=False),
27 | sa.PrimaryKeyConstraint('id'),
28 | sa.UniqueConstraint('email')
29 | )
30 | # ### end Alembic commands ###
31 |
32 |
33 | def downgrade():
34 | # ### commands auto generated by Alembic - please adjust! ###
35 | op.drop_table('users')
36 | # ### end Alembic commands ###
37 |
--------------------------------------------------------------------------------
/app/updater.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | import redis
4 | from rq import Connection
5 | from rq.worker import HerokuWorker as Worker
6 | import logbook
7 |
8 | from telegram import Bot
9 |
10 | from app.settings import get_from_datastore
11 |
12 | log = logbook.Logger("UPDATER")
13 |
14 | CONFIG_ENV = os.getenv("CONFIG_ENV")
15 |
16 | if CONFIG_ENV == "dev":
17 | log.warn("Using dev telegram token")
18 | TELEGRAM_TOKEN = get_from_datastore("TELEGRAM_TOKEN", "dev")
19 | else:
20 | log.warn("Using production telegram token")
21 | TELEGRAM_TOKEN = get_from_datastore("TELEGRAM_TOKEN", "production")
22 |
23 |
24 | bot = Bot(TELEGRAM_TOKEN)
25 |
26 | REDIS_HOST = os.getenv("REDIS_HOST", "10.0.0.3")
27 | REDIS_PORT = os.getenv("REDIS_PORT", 6379)
28 |
29 | CONN = redis.Redis(host=REDIS_HOST, port=REDIS_PORT)
30 |
31 |
32 | def send_notification(msg, telegram_id):
33 | bot.send_message(text=msg, chat_id=telegram_id)
34 |
35 |
36 | def start_worker():
37 | with Connection(CONN):
38 | log.info("Starting update worker")
39 | worker = Worker("updates")
40 | worker.work()
41 |
42 |
43 | if __name__ == "__main__":
44 | start_worker()
45 |
--------------------------------------------------------------------------------
/ml/ml/feature_selection/filter.py:
--------------------------------------------------------------------------------
1 | from sklearn.feature_selection import SelectKBest
2 | from sklearn.feature_selection import f_regression# , mutual_info_regression
3 |
4 |
5 | def filter_feature_selection(X, y, percentage=0.8):
6 | """Apply a statistical measure to assign a scoring to each feature, features
7 | are ranked by the score.
8 | Consider each feature independently / with regard to the dependent variable (class value).
9 |
10 | Args:
11 | X(pandas.DataFrame): Features columns.
12 | y(pandas.DataFrame): Target column.
13 | percentage(float): From 0 to 1, percentage of features to filter from total columns used.
14 |
15 | Returns:
16 | list: Name columns selected.
17 | """
18 | if percentage > 1.0 or percentage < 0.0:
19 | raise ValueError("'percentage' value is not valid [0, 1]")
20 |
21 | kBest = int(percentage*len(X.columns)/1.0)
22 |
23 | test = SelectKBest(score_func=f_regression, k=kBest)
24 | fit = test.fit(X, y)
25 |
26 | # Summarize scores
27 | features = fit.transform(X)
28 |
29 | selected_cols = fit.get_support(indices=True)
30 |
31 | return list(X[selected_cols].columns.values)
32 |
--------------------------------------------------------------------------------
/examples/sma_crossover.py:
--------------------------------------------------------------------------------
1 | from kryptos.strategy import Strategy
2 | from kryptos.strategy.signals import utils
3 | from kryptos.utils import viz
4 | import matplotlib.pyplot as plt
5 |
6 | config = './sma_crossover.json'
7 |
8 | strat = Strategy()
9 |
10 | strat.load_json_file(config)
11 |
12 |
13 | sma_fast = strat.indicator('SMA_FAST')
14 | sma_slow = strat.indicator('SMA_SLOW')
15 |
16 |
17 |
18 |
19 | @strat.signal_sell(override=True)
20 | def signal_sell(context, data):
21 | return utils.cross_below(sma_fast.outputs.SMA_FAST, sma_slow.outputs.SMA_SLOW)
22 |
23 |
24 | @strat.signal_buy(override=True)
25 | def signal_buy(context, data):
26 | return utils.cross_above(sma_fast.outputs.SMA_FAST, sma_slow.outputs.SMA_SLOW)
27 |
28 | @strat.analyze(num_plots=1)
29 | def extra_plot(context, results, pos):
30 | viz.plot_column(results, 'SMA_FAST', pos, label='Fast', y_label='Crossover')
31 | viz.plot_column(results, 'SMA_SLOW', pos, label='Slow', y_label='Crossover')
32 | viz.plot_column(results, 'price', pos, y_label='price', linestyle="--")
33 | plt.legend()
34 |
35 |
36 |
37 |
38 | if __name__ == '__main__':
39 | print('Strategy:\n{}'.format(strat.serialize()))
40 | strat.run()
41 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ###Python###
2 | performance/results
3 |
4 | # Byte-compiled / optimized / DLL files
5 | __pycache__/
6 | *.py[cod]
7 |
8 | # C extensions
9 | *.so
10 |
11 | # Distribution / packaging
12 | .Python
13 | env/
14 | build/
15 | develop-eggs/
16 | dist/
17 | downloads/
18 | eggs/
19 | lib/
20 | lib64/
21 | parts/
22 | sdist/
23 | var/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .cache
43 | nosetests.xml
44 | coverage.xml
45 |
46 | # Translations
47 | *.mo
48 | *.pot
49 |
50 | # Django stuff:
51 | *.log
52 |
53 | # Sphinx documentation
54 | docs/_build/
55 |
56 | # PyBuilder
57 | target/
58 |
59 | # sublime
60 | *.sublime-porject
61 | *.sublime-workspace
62 |
63 | # Dev env
64 | .env
65 | performance_results/
66 | logs/
67 | data/csv_data/
68 |
69 | # Mac
70 | .DS_Store
71 |
72 | # Notebooks
73 | .ipynb_checkpoints/
74 |
75 | spa-mat/
76 |
--------------------------------------------------------------------------------
/core/kryptos/strategy/signals/utils.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from logbook import Logger
3 |
4 | log = Logger("SIGNALS")
5 |
6 |
7 | def cross_above(series, trigger):
8 | if isinstance(trigger, int):
9 | trigger = [trigger] * 3
10 | try:
11 | return series[-2] <= trigger[-2] and series[-1] > trigger[-1]
12 |
13 | except IndexError:
14 | log.warn("Not enough data to calculate cross above")
15 | return False
16 |
17 |
18 | def cross_below(series, trigger):
19 | if isinstance(trigger, int):
20 | trigger = [trigger] * 3
21 | try:
22 | return series[-2] >= trigger[-2] and series[-1] < trigger[-2]
23 |
24 | except IndexError:
25 | log.warn("Not enough data to calculate cross above")
26 | return False
27 |
28 |
29 | def increasing(series, period=4):
30 | arr = series[period * -1:]
31 | return np.all(np.diff(arr) > 0)
32 |
33 |
34 | def decreasing(series, period=4):
35 | arr = series[period * -1:]
36 | return np.all(np.diff(arr) < 0)
37 |
38 |
39 | def greater_than(series_1, series_2):
40 | return series_1[-1] > series_2[-1]
41 |
42 |
43 | def less_than(series_1, series_2):
44 | return series_1[-1] < series_2[-1]
45 |
--------------------------------------------------------------------------------
/examples/macdfix.py:
--------------------------------------------------------------------------------
1 | from kryptos.strategy import Strategy
2 | from kryptos.strategy.indicators import technical
3 | # from kryptos.strategy.signals import utils
4 | # from kryptos.utils import viz
5 | # import matplotlib.pyplot as plt
6 | import logbook
7 |
8 | log = logbook.Logger('EXAMPLE')
9 | log.level = logbook.INFO
10 |
11 | strat = Strategy('MacdFix')
12 |
13 | macdfix_9 = technical.get_indicator('MACDFIX', label='MACDFIX_9')
14 |
15 | macdfix_18 = technical.get_indicator('MACDFIX', label='MACDFIX_18')
16 | macdfix_18.update_param('signalperiod', 18)
17 |
18 |
19 | strat.add_market_indicator(macdfix_9)
20 | strat.add_market_indicator(macdfix_18)
21 |
22 | @strat.init
23 | def init(context):
24 | log.info('Algo is being initialzed, setting up context')
25 | context.i = 0
26 |
27 |
28 | @strat.handle_data
29 | def handle_data(context, data):
30 | log.debug('Processing new trading step')
31 | context.i += 1
32 |
33 |
34 | @strat.analyze()
35 | def analyze(context, results, pos):
36 | ending_cash = results.cash[-1]
37 | log.info('Ending cash: ${}'.format(ending_cash))
38 | log.info('Completed for {} trading periods'.format(context.i))
39 |
40 | if __name__ == '__main__':
41 | strat.run()
42 |
--------------------------------------------------------------------------------
/examples/rsi_test.json:
--------------------------------------------------------------------------------
1 | {
2 | "trading": {
3 | "EXCHANGE": "bitfinex",
4 | "ASSET": "btc_usd",
5 | "DATA_FREQ": "daily",
6 | "HISTORY_FREQ": "1d",
7 | "CAPITAL_BASE": 10000,
8 | "QUOTE_CURRENCY": "usd",
9 | "START": "2017-10-10",
10 | "END": "2018-3-28",
11 | "BARS": 50,
12 | "ORDER_SIZE": 0.5,
13 | "SLIPPAGE_ALLOWED": 0.05
14 | },
15 | "datasets": [],
16 | "indicators": [
17 | {
18 | "name": "RSI",
19 | "symbol": "btc_usd",
20 | "dataset": null,
21 | "label": "RSI",
22 | "params": {
23 | "timeperiod": 30,
24 | "oversold": 30,
25 | "overbought": 70
26 | }
27 | }
28 | ],
29 | "signals": {
30 | "buy": [
31 | {
32 | "func": "cross_below",
33 | "params": {
34 | "series": "RSI",
35 | "trigger": 50
36 | }
37 | }
38 | ],
39 | "sell": [
40 | {
41 | "func": "cross_above",
42 | "params": {
43 | "series": "RSI",
44 | "trigger": 50
45 | }
46 | }
47 |
48 | ]
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/core/kryptos/scripts/kill_strat.py:
--------------------------------------------------------------------------------
1 | import os
2 | import click
3 | import requests
4 |
5 |
6 | REMOTE_BASE_URL = "https://kryptos-205115.appspot.com"
7 | LOCAL_BASE_URL = "http://web:8080"
8 |
9 |
10 | @click.command(help="Kill a running strategy")
11 | @click.argument("strat_id", type=str)
12 | @click.option("--hosted", "-h", is_flag=True, help="Kill on a GCP instance via the API")
13 | def run(strat_id, hosted):
14 | click.secho(f"Killing strat {strat_id}", fg="yellow")
15 |
16 | resp = kill_from_api(strat_id, hosted=hosted)
17 | resp.raise_for_status()
18 | return
19 |
20 |
21 | def kill_from_api(strat_id, hosted=False):
22 | click.secho("Killing strat via API", fg="cyan")
23 | if hosted:
24 | click.secho("Running remotely", fg="yellow")
25 | base_url = REMOTE_BASE_URL
26 | else:
27 | click.secho("Running locally", fg="yellow")
28 | base_url = LOCAL_BASE_URL
29 |
30 | api_url = os.path.join(base_url, "api")
31 |
32 | data = {"strat_id": strat_id}
33 |
34 | endpoint = os.path.join(api_url, "strat/delete")
35 | click.secho(f"Killing strat {strat_id} at {endpoint}", fg="yellow")
36 |
37 | resp = requests.post(endpoint, json=data)
38 | click.echo(resp)
39 | return resp
40 |
--------------------------------------------------------------------------------
/ml/requirements.txt:
--------------------------------------------------------------------------------
1 | -i https://pypi.python.org/simple/
2 | backcall==0.1.0
3 | cachetools==3.1.1
4 | certifi==2019.9.11
5 | chardet==3.0.4
6 | click==7.0
7 | cycler==0.10.0
8 | decorator==4.4.0
9 | future==0.18.1
10 | google-api-core[grpc]==1.14.3
11 | google-auth==1.6.3
12 | google-cloud-core==0.28.1
13 | google-cloud-datastore==1.7.0
14 | googleapis-common-protos==1.6.0
15 | grpcio==1.24.1
16 | hyperopt==0.1.1
17 | idna==2.8
18 | ipython-genutils==0.2.0
19 | ipython==7.8.0
20 | jedi==0.15.1
21 | jinja2==2.10.3
22 | joblib==0.14.0
23 | kiwisolver==1.1.0
24 | lightgbm==2.1.2
25 | logbook==1.4.0
26 | markupsafe==1.1.1
27 | matplotlib==2.2.3
28 | networkx==2.4
29 | numpy==1.15.1
30 | pandas-profiling==1.4.1
31 | pandas==0.23.4
32 | parso==0.5.1
33 | pexpect==4.7.0 ; sys_platform != 'win32'
34 | pickleshare==0.7.5
35 | prompt-toolkit==2.0.10
36 | protobuf==3.10.0
37 | ptyprocess==0.6.0
38 | pyasn1-modules==0.2.7
39 | pyasn1==0.4.7
40 | pygments==2.4.2
41 | pymongo==3.9.0
42 | pyparsing==2.4.2
43 | python-dateutil==2.8.0
44 | pytz==2019.3
45 | raven==6.9.0
46 | redis==3.3.11
47 | requests==2.22.0
48 | rq==0.12.0
49 | rsa==4.0
50 | scikit-learn==0.21.3
51 | scipy==1.1.0
52 | shap==0.24.0
53 | six==1.12.0
54 | ta-lib==0.4.17
55 | ta==0.2.0
56 | tqdm==4.36.1
57 | traitlets==4.3.3
58 | urllib3==1.24.2
59 | wcwidth==0.1.7
60 | xgboost==0.80
61 |
--------------------------------------------------------------------------------
/examples/sma_crossover.json:
--------------------------------------------------------------------------------
1 | {
2 | "trading": {
3 | "EXCHANGE": "bitfinex",
4 | "ASSET": "btc_usd",
5 | "DATA_FREQ": "daily",
6 | "HISTORY_FREQ": "1d",
7 | "CAPITAL_BASE": 15000,
8 | "QUOTE_CURRENCY": "usd",
9 | "START": "2016-12-10",
10 | "END": "2018-05-12"
11 | },
12 | "datasets": [],
13 | "indicators": [
14 | {
15 | "name": "SMA",
16 | "symbol": "btc_usd",
17 | "dataset": null,
18 | "label": "SMA_FAST",
19 | "params": {
20 | "timeperiod": 10
21 | }
22 | },
23 | {
24 | "name": "SMA",
25 | "symbol": "btc_usd",
26 | "dataset": null,
27 | "label": "SMA_SLOW",
28 | "params": {
29 | "timeperiod": 50
30 | }
31 | }
32 | ],
33 | "signals": {
34 | "buy": [
35 | {
36 | "func": "cross_above",
37 | "params": {
38 | "series": "SMA_FAST",
39 | "trigger": "SMA_SLOW"
40 | }
41 | }
42 | ],
43 | "sell": [
44 | {
45 | "func": "cross_below",
46 | "params": {
47 | "series": "SMA_FAST",
48 | "trigger": "SMA_SLOW"
49 | }
50 |
51 | }
52 | ]
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/app/app/templates/_macros.html:
--------------------------------------------------------------------------------
1 | {% from "flask_user/_macros.html" import render_field, render_checkbox_field, render_submit_field, render_form %}
2 |
3 |
4 | {% macro render_form(form, action) -%}
5 |
17 |
18 | {%- endmacro %}
19 |
20 | {% macro input(name, value='', type='text', size=20) -%}
21 |
23 | {%- endmacro %}
24 |
25 |
26 | {% macro render_submit_as_btn(field, label=None, tabindex=None) -%}
27 | {% if not label %}{% set label=field.label.text %}{% endif %}
28 | {##}
29 |
32 | {%- endmacro %}
33 |
--------------------------------------------------------------------------------
/examples/mystrategy.json:
--------------------------------------------------------------------------------
1 | {
2 | "trading": {
3 | "EXCHANGE": "bitfinex",
4 | "ASSET": "btc_usd",
5 | "DATA_FREQ": "daily",
6 | "HISTORY_FREQ": "1d",
7 | "CAPITAL_BASE": 5000,
8 | "QUOTE_CURRENCY": "usd",
9 | "START": "2016-10-10",
10 | "END": "2018-3-28"
11 | },
12 | "datasets": [],
13 | "indicators": [
14 | {
15 | "name": "MACDFIX",
16 | "symbol": "btc_usd",
17 | "dataset": null,
18 | "label": "MACDFIX",
19 | "params": {
20 | "signalperiod": 4
21 | }
22 | },
23 | {
24 | "name": "SMA",
25 | "symbol": "btc_usd",
26 | "dataset": null,
27 | "label": "SMA",
28 | "params": {
29 | "timeperiod": 30
30 | }
31 | },
32 | {
33 | "name": "EMA",
34 | "symbol": "btc_usd",
35 | "dataset": null,
36 | "label": "EMA",
37 | "params": {
38 | "timeperiod": 30
39 | }
40 | },
41 | {
42 | "name": "BBANDS",
43 | "symbol": "btc_usd",
44 | "dataset": null,
45 | "label": "BBANDS",
46 | "params": {
47 | "timeperiod": 5,
48 | "nbdevup": 2,
49 | "nbdevdn": 2,
50 | "matype": 0
51 | }
52 | }
53 | ],
54 | "signals": {}
55 | }
56 |
--------------------------------------------------------------------------------
/app/app/models/migrations/versions/4d7d3d759e88_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: 29498b27f9c4
4 | Revises: b060ded0ae81
5 | Create Date: 2018-07-29 01:43:41.940509
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '29498b27f9c4'
14 | down_revision = 'b060ded0ae81'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # Drop primary key constraint. Note the CASCASE clause - this deletes the foreign key constraint.
21 | op.drop_table('strategies')
22 | # Re-create the foreign key constraint
23 | # op.create_foreign_key('fk_roles_user_user_id_user', 'roles_users', 'user', ['user_id'], ['id'], ondelete='CASCADE')
24 |
25 | def downgrade():
26 | op.create_table('strategies',
27 | sa.Column('id', sa.String(), nullable=False),
28 | sa.Column('name', sa.String(), nullable=False),
29 | sa.Column('created_at', sa.DateTime(), nullable=True),
30 | sa.Column('trading_config', sa.JSON(), nullable=False),
31 | sa.Column('dataset_config', sa.JSON(), nullable=False),
32 | sa.Column('indicators_config', sa.JSON(), nullable=False),
33 | sa.Column('signals_config', sa.JSON(), nullable=False),
34 | sa.Column('user_id', sa.Integer(), nullable=True),
35 | sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
36 | sa.PrimaryKeyConstraint('id'),
37 | sa.UniqueConstraint('id')
38 | )
39 |
--------------------------------------------------------------------------------
/core/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.6 as base
2 |
3 | # install TA_LIB library and other dependencies
4 | RUN apt-get -y update \
5 | && apt-get -y install libfreetype6-dev libpng-dev libopenblas-dev liblapack-dev gfortran \
6 | && curl -L -O http://prdownloads.sourceforge.net/ta-lib/ta-lib-0.4.0-src.tar.gz \
7 | && tar -zxf ta-lib-0.4.0-src.tar.gz \
8 | && cd ta-lib/ \
9 | && ./configure --prefix=/usr \
10 | && make \
11 | && make install \
12 | && rm -rf ta-lib*
13 |
14 | FROM base as builder
15 |
16 | RUN mkdir /install
17 | WORKDIR /install
18 | # copy only the requirements to prevent rebuild for any changes
19 | COPY requirements.txt /requirements.txt
20 |
21 | # ensure numpy installed before ta-lib, matplotlib, etc
22 | RUN pip install 'numpy==1.14.3'
23 | RUN pip install -r /requirements.txt
24 |
25 |
26 | FROM builder
27 |
28 | COPY --from=builder /root/.cache /root/.cache
29 | COPY --from=builder /requirements.txt /core/requirements.txt
30 | RUN pip install -r /core/requirements.txt && rm -rf /root/.cache
31 |
32 |
33 | # Above lines represent the dependencies
34 | # below lines represent the actual app
35 | # Only the actual app should be rebuilt upon changes
36 | COPY . /core
37 |
38 | # Install kryptos package
39 | # COPY setup.py /core/setup.py
40 | # COPY README.md /core/README.md
41 |
42 | WORKDIR /core
43 | RUN pip install -e .
44 |
45 | EXPOSE 8080
46 | ENTRYPOINT ["honcho", "start", "--no-prefix"]
47 | CMD ["worker", "monitor"]
48 |
--------------------------------------------------------------------------------
/ml/ml/feature_selection/lgb.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import itertools
3 |
4 | from ml.feature_selection.feature_selector import FeatureSelector
5 |
6 |
7 | def lgb_embedded_feature_selection(X_train, y_train):
8 | """Perform feature selection using LightGBM embedded method:
9 | https://github.com/WillKoehrsen/feature-selector
10 | https://towardsdatascience.com/a-feature-selection-tool-for-machine-learning-in-python-b64dd23710f0
11 | https://github.com/WillKoehrsen/feature-selector/blob/master/Feature%20Selector%20Usage.ipynb
12 |
13 | Returns:
14 | list: Name columns selected.
15 | """
16 |
17 | fs = FeatureSelector(data=X_train, labels=y_train)
18 |
19 | fs.identify_missing(missing_threshold=0.6)
20 | # fs.identify_single_unique() # NOTE: Pandas version 0.23.4 required
21 | fs.identify_collinear(correlation_threshold=0.995) # 0.98
22 | fs.identify_zero_importance(task = 'regression', eval_metric = 'mse',
23 | n_iterations = 10, early_stopping = True)
24 | fs.identify_low_importance(cumulative_importance = 0.99)
25 |
26 | excl = []
27 | excl = [i for i in itertools.chain(*itertools.zip_longest(excl,
28 | fs.ops['missing'], fs.ops['collinear'],
29 | fs.ops['zero_importance'], fs.ops['low_importance'])) if i is not None]
30 |
31 | selected_cols = [c for c in X_train.columns if c not in excl]
32 |
33 | return selected_cols
34 |
--------------------------------------------------------------------------------
/app/app/models/migrations/versions/5e29ad8a0e89_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: 5e29ad8a0e89
4 | Revises: 6c6917015a3c
5 | Create Date: 2018-07-24 19:55:21.518610
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '5e29ad8a0e89'
14 | down_revision = '6c6917015a3c'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('users', sa.Column('telegram_auth_date', sa.DateTime(), nullable=True))
22 | op.add_column('users', sa.Column('telegram_id', sa.Integer(), nullable=True))
23 | op.add_column('users', sa.Column('telegram_photo', sa.String(), nullable=True))
24 | op.add_column('users', sa.Column('telegram_username', sa.String(length=255), nullable=True))
25 | op.create_unique_constraint(None, 'users', ['telegram_username'])
26 | op.create_unique_constraint(None, 'users', ['telegram_id'])
27 | # ### end Alembic commands ###
28 |
29 |
30 | def downgrade():
31 | # ### commands auto generated by Alembic - please adjust! ###
32 | op.drop_constraint(None, 'users', type_='unique')
33 | op.drop_constraint(None, 'users', type_='unique')
34 | op.drop_column('users', 'telegram_username')
35 | op.drop_column('users', 'telegram_photo')
36 | op.drop_column('users', 'telegram_id')
37 | op.drop_column('users', 'telegram_auth_date')
38 | # ### end Alembic commands ###
39 |
--------------------------------------------------------------------------------
/app/app/models/migrations/versions/e3202524923f_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: e3202524923f
4 | Revises: 29498b27f9c4
5 | Create Date: 2018-07-29 02:32:34.601493
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'e3202524923f'
14 | down_revision = '29498b27f9c4'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('strategies',
22 | sa.Column('id', sa.Integer(), nullable=False),
23 | sa.Column('name', sa.String(), nullable=False),
24 | sa.Column('created_at', sa.DateTime(), nullable=True),
25 | sa.Column('trading_config', sa.JSON(), nullable=False),
26 | sa.Column('dataset_config', sa.JSON(), nullable=False),
27 | sa.Column('indicators_config', sa.JSON(), nullable=False),
28 | sa.Column('signals_config', sa.JSON(), nullable=False),
29 | sa.Column('status', sa.String(), nullable=True),
30 | sa.Column('result_json', sa.JSON(), nullable=True),
31 | sa.Column('user_id', sa.Integer(), nullable=True),
32 | sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
33 | sa.PrimaryKeyConstraint('id'),
34 | sa.UniqueConstraint('id')
35 | )
36 | # ### end Alembic commands ###
37 |
38 |
39 | def downgrade():
40 | # ### commands auto generated by Alembic - please adjust! ###
41 | op.drop_table('strategies')
42 | # ### end Alembic commands ###
43 |
--------------------------------------------------------------------------------
/app/app/templates/public/landing.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 |
4 |
5 | {% block content %}
6 |
7 |
8 |
9 |
Kryptos AI
10 |
A virtual investment assistant that manages your cryptocurrency portfolio
11 |
12 |
Join the Cryptocurrency Revolution Today
13 |
14 | {% if current_user.is_authenticated %}
15 |
Manage Exchanges
16 |
Build A Strategy
17 |
View Strategies
18 | {% else %}
19 |
Register
20 |
{%trans%}Sign in{%endtrans%}
21 |
22 | {% endif %}
23 |
24 |
25 |
26 |
27 |
28 | Start Chatting
29 |
30 |
31 |
32 |
33 |
34 |
35 | {% endblock %}
36 |
--------------------------------------------------------------------------------
/core/kryptos/data/clients/quandl_client.py:
--------------------------------------------------------------------------------
1 | import os
2 | import csv
3 | import quandl
4 |
5 | from kryptos.data import csv_data
6 |
7 | API_KEY = os.getenv("QUANDL_API_KEY")
8 | DATA_DIR = os.path.dirname(os.path.abspath(csv_data.__file__))
9 | QUANDL_DIR = os.path.join(DATA_DIR, "quandl")
10 |
11 | quandl.ApiConfig.api_key = API_KEY
12 |
13 |
14 | def code_csv():
15 | return os.path.join(QUANDL_DIR, "BCHAIN-datasets-codes.csv")
16 |
17 |
18 | def data_csv():
19 | return os.path.join(QUANDL_DIR, "data.csv")
20 |
21 |
22 | def codes_from_csv():
23 | codes = []
24 | with open(code_csv(), "r") as f:
25 | for i in csv.reader(f):
26 | codes.append(i[0])
27 | return codes
28 |
29 |
30 | def fetch_datasets(
31 | codes, start_date=None, end_date=None, collapse=None, transformation=None, rows=None
32 | ):
33 | """Retrieves datasets provides by list of codes (DATABASE/DATASET)
34 |
35 | https://docs.quandl.com/docs/parameters-2
36 |
37 | Returns pandas Dataframe
38 | """
39 |
40 | codes = codes_from_csv()
41 | df = quandl.get(codes)
42 | return df
43 |
44 |
45 | def clean_dataframe(df):
46 | df = df.rename(columns=lambda x: x.replace("BCHAIN/", ""))
47 | df = df.rename(columns=lambda x: x.replace(" - Value", ""))
48 | df["Symbol"] = "btc"
49 | return df
50 |
51 |
52 | def fetch_all():
53 | codes = codes_from_csv()
54 | df = fetch_datasets(codes)
55 | df = clean_dataframe(df)
56 |
57 | df.to_csv(data_csv(), mode="w")
58 |
--------------------------------------------------------------------------------
/examples/api_example.json:
--------------------------------------------------------------------------------
1 | {
2 | "trading": {
3 | "EXCHANGE": "bitfinex",
4 | "ASSET": "btc_usd",
5 | "DATA_FREQ": "daily",
6 | "HISTORY_FREQ": "1d",
7 | "CAPITAL_BASE": 10000,
8 | "QUOTE_CURRENCY": "usd",
9 | "START": "2017-10-10",
10 | "END": "2018-3-28"
11 | },
12 | "datasets": [
13 | {
14 | "name": "quandl",
15 | "columns": [
16 | "MKTCP"
17 | ],
18 | "indicators": []
19 | },
20 | {
21 | "name": "google",
22 | "columns": [
23 | "bitcoin futures"
24 | ],
25 | "indicators": [
26 | {
27 | "name": "RELCHANGE",
28 | "symbol": "bitcoin futures",
29 | "dataset": "google",
30 | "label": "RELCHANGE",
31 | "params": {}
32 | }
33 | ]
34 | }
35 | ],
36 | "indicators": [
37 | {
38 | "name": "STOCH",
39 | "symbol": "btc_usd",
40 | "dataset": null,
41 | "label": "STOCH",
42 | "params": {
43 | "fastk_period": 5,
44 | "slowk_period": 3,
45 | "slowk_matype": "SMA",
46 | "slowd_matype": "EMA"
47 |
48 | }
49 | },
50 | {
51 | "name": "BBANDS",
52 | "symbol": "btc_usd",
53 | "dataset": null,
54 | "label": "BBANDS",
55 | "params": {
56 | "matype": "DEMA"
57 | }
58 | }
59 | ],
60 | "signals": {}
61 | }
62 |
--------------------------------------------------------------------------------
/ml/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.6 as base
2 |
3 | # install xgboost library
4 | RUN git clone --recursive https://github.com/dmlc/xgboost \
5 | && cd xgboost; make -j4
6 |
7 | # install TA_LIB library and other dependencies
8 | RUN apt-get -y update \
9 | && apt-get -y install libfreetype6-dev libpng-dev libopenblas-dev liblapack-dev gfortran \
10 | && curl -L -O http://prdownloads.sourceforge.net/ta-lib/ta-lib-0.4.0-src.tar.gz \
11 | && tar -zxf ta-lib-0.4.0-src.tar.gz \
12 | && cd ta-lib/ \
13 | && ./configure --prefix=/usr \
14 | && make \
15 | && make install \
16 | && rm -rf ta-lib*
17 |
18 | FROM base as builder
19 |
20 | RUN mkdir /install
21 | WORKDIR /install
22 | # copy only the requirements to prevent rebuild for any changes
23 | COPY requirements.txt /requirements.txt
24 |
25 | # ensure numpy installed before ta-lib, matplotlib, etc
26 | RUN pip install 'numpy==1.14.3'
27 | RUN pip install -r /requirements.txt
28 |
29 |
30 | # matplotlib config (used by benchmark)
31 | RUN mkdir -p /root/.config/matplotlib
32 | RUN echo "backend : Agg" > /root/.config/matplotlib/matplotlibrc
33 |
34 |
35 | FROM builder
36 |
37 | COPY --from=builder /root/.cache /root/.cache
38 | COPY --from=builder /requirements.txt /ml/requirements.txt
39 | RUN pip install -r /ml/requirements.txt && rm -rf /root/.cache
40 |
41 |
42 | # Above lines represent the dependencies
43 | # below lines represent the actual app
44 | # Only the actual app should be rebuilt upon changes
45 | COPY . /ml
46 |
47 | WORKDIR /ml
48 | ENTRYPOINT python worker.py
49 |
--------------------------------------------------------------------------------
/app/app/api/views.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | from flask import Blueprint, request, jsonify, current_app
3 |
4 | from app import task
5 |
6 |
7 | api = Blueprint("api", __name__, url_prefix="/api")
8 |
9 |
10 | @api.route("/monitor", methods=["GET"])
11 | def strat_status():
12 | strat_id = request.args["strat_id"]
13 | queue_name = request.args.get("queue_name")
14 |
15 | current_app.logger.info(f"Fetching strat {strat_id} from {queue_name} queue")
16 | data = task.get_job_data(strat_id, queue_name)
17 |
18 | return jsonify(strat_info=data)
19 |
20 |
21 | @api.route("/strat", methods=["POST"])
22 | def run_strat():
23 | data = request.json
24 | strat_dict = data.get("strat_json")
25 | queue_name = data.get("queue_name")
26 | user_id = data.get("user_id")
27 |
28 | live, simulate_orders = False, True
29 | if queue_name in ["paper", "live"]:
30 | live = True
31 |
32 | if queue_name == "live":
33 | simulate_orders = False
34 |
35 | current_app.logger.info(f"Enqueuing strat to {queue_name} queue")
36 | job_id, _ = task.queue_strat(
37 | strat_dict, user_id=user_id, live=live, simulate_orders=simulate_orders
38 | )
39 | current_app.logger.info(f"Strat running in job {job_id}")
40 | return jsonify(strat_id=job_id)
41 |
42 |
43 | @api.route("/strat/delete", methods=["POST"])
44 | def delete_strat():
45 | data = request.json
46 | strat_id = data.get("strat_id")
47 | if task.kill_strat(strat_id):
48 | return "Shutdown initiated", 200
49 | else:
50 | return "Could not shutdown", 409
51 |
--------------------------------------------------------------------------------
/core/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import os
3 | from setuptools import setup, find_packages
4 |
5 | __version__ = "0.2.3"
6 |
7 | #
8 | # See https://packaging.python.org/requirements/ and
9 | # https://caremad.io/posts/2013/07/setup-vs-requirement/ for more details.
10 | requires = ["enigma-catalyst", "matplotlib", "TA-Lib", "quandl", "click", "logbook"]
11 |
12 |
13 | def package_files(directory):
14 | for path, _, filenames in os.walk(directory):
15 | for filename in filenames:
16 | yield os.path.join("..", path, filename)
17 |
18 |
19 | package_name = "kryptos"
20 | base_dir = os.path.abspath(os.path.dirname(__file__))
21 | # Get the long description from the README file
22 | with open(os.path.join(base_dir, "README.md"), "rb") as f:
23 | long_description = f.read().decode("utf-8")
24 |
25 | setup(
26 | name=package_name,
27 | version=__version__,
28 | author="Produvia",
29 | author_email="hello@produvia.com",
30 | url="https://produvia.com",
31 | description="AI-Driven Cryptocurrency Trading Platform",
32 | long_description=long_description,
33 | keywords="cryptocurrency AI algorithmic trading",
34 | # license='MIT',
35 | packages=find_packages(base_dir),
36 | install_requires=requires,
37 | entry_points="""
38 | [console_scripts]
39 | bchain=kryptos.scripts.bchain_activity:run
40 | trends=kryptos.scripts.trends:run
41 | strat=kryptos.scripts.strat:cli
42 | compare_all_ta=kryptos.scripts.run_all_ta:run
43 | workers=kryptos.worker.worker:manage_workers
44 | """,
45 | zip_safe=False,
46 | )
47 |
--------------------------------------------------------------------------------
/examples/api_example.py:
--------------------------------------------------------------------------------
1 | from kryptos.strategy import Strategy
2 | from kryptos.strategy.indicators import technical
3 |
4 | import logbook
5 |
6 | log = logbook.Logger('EXAMPLE')
7 | log.level = logbook.INFO
8 |
9 |
10 | strat = Strategy('Simple Stragey', data_frequency='daily')
11 |
12 | bbands = technical.get_indicator('BBANDS')
13 | bbands.update_param('matype', 'EMA')
14 |
15 | stoch = technical.get_indicator('STOCH')
16 |
17 | strat.add_market_indicator(bbands)
18 | strat.add_market_indicator(stoch)
19 |
20 | strat.use_dataset('quandl', columns=['MKTCP'])
21 |
22 | strat.use_dataset('google', columns=['bitcoin futures'])
23 | strat.add_data_indicator('google', 'relchange', col='bitcoin futures')
24 |
25 | # trading config can be set via json or api
26 | # Note that minute data is not supported for external datasets
27 | # strat.trading_info['CAPITAL_BASE'] = 10000
28 | # strat.trading_info['DATA_FREQ'] = 'minute'
29 | # strat.trading_info['HISTORY_FREQ'] = '1m'
30 | # strat.trading_info['START'] = '2017-12-10'
31 | # strat.trading_info['END'] = '2017-12-11'
32 |
33 |
34 | @strat.init
35 | def init(context):
36 | log.info('Algo is being initialzed, setting up context')
37 | context.i = 0
38 |
39 |
40 | @strat.handle_data
41 | def handle_data(context, data):
42 | log.debug('Processing new trading step')
43 | context.i += 1
44 |
45 |
46 | @strat.analyze()
47 | def analyze(context, results, pos):
48 | ending_cash = results.cash[-1]
49 | log.info('Ending cash: ${}'.format(ending_cash))
50 | log.info('Completed for {} trading periods'.format(context.i))
51 |
52 |
53 | if __name__ == '__main__':
54 | log.info('Strategy Schema:\n{}'.format(strat.serialize()))
55 | strat.run()
56 |
--------------------------------------------------------------------------------
/core/kryptos/data/csv_data/quandl/BCHAIN-datasets-codes.csv:
--------------------------------------------------------------------------------
1 | BCHAIN/TOTBC,Total Bitcoins
2 | BCHAIN/MKTCP,Bitcoin Market Capitalization
3 | BCHAIN/TRFEE,Bitcoin Total Transaction Fees
4 | BCHAIN/TRFUS,Bitcoin Total Transaction Fees USD
5 | BCHAIN/NETDF,Bitcoin Network Deficit
6 | BCHAIN/NTRAN,Bitcoin Number of Transactions
7 | BCHAIN/NTRAT,Bitcoin Total Number of Transactions
8 | BCHAIN/NTREP,Bitcoin Number of Transactions Excluding Popular Addresses
9 | BCHAIN/NADDU,Bitcoin Number of Unique Bitcoin Addresses Used
10 | BCHAIN/NTRBL,Bitcoin Number of Transaction per Block
11 | BCHAIN/TOUTV,Bitcoin Total Output Volume
12 | BCHAIN/ETRAV,Bitcoin Estimated Transaction Volume
13 | BCHAIN/ETRVU,Bitcoin Estimated Transaction Volume USD
14 | BCHAIN/TRVOU,Bitcoin USD Exchange Trade Volume
15 | BCHAIN/TVTVR,Bitcoin Trade Volume vs Transaction Volume Ratio
16 | BCHAIN/MKPRU,Bitcoin Market Price USD
17 | BCHAIN/CPTRV,Bitcoin Cost % of Transaction Volume
18 | BCHAIN/CPTRA,Bitcoin Cost Per Transaction
19 | BCHAIN/HRATE,Bitcoin Hash Rate
20 | BCHAIN/MIREV,Bitcoin Miners Revenue
21 | BCHAIN/ATRCT,Bitcoin Median Transaction Confirmation Time
22 | BCHAIN/BCDDC,Bitcoin Days Destroyed Cumulative
23 | BCHAIN/BCDDE,Bitcoin Days Destroyed
24 | BCHAIN/BCDDW,Bitcoin Days Destroyed (Minimum Age 1 Week)
25 | BCHAIN/BCDDM,Bitcoin Days Destroyed (Minimum Age 1 Month)
26 | BCHAIN/BCDDY,Bitcoin Days Destroyed (Minimum Age 1 Year)
27 | BCHAIN/BLCHS,Bitcoin api.blockchain Size
28 | BCHAIN/AVBLS,Bitcoin Average Block Size
29 | BCHAIN/MWTRV,Bitcoin My Wallet Transaction Volume
30 | BCHAIN/MWNUS,Bitcoin My Wallet Number of Users
31 | BCHAIN/MWNTD,Bitcoin My Wallet Number of Transaction Per Day
32 | BCHAIN/MIOPM,Bitcoin Mining Operating Margin
33 | BCHAIN/DIFF,Bitcoin Difficulty
34 |
--------------------------------------------------------------------------------
/app/app/templates/account/telegram_auth.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | {% if current_user.telegram_id %}
6 |
7 |
8 |
9 |
Telegram
10 |
11 |
@{{current_user.telegram_username}}
12 |
13 |
Kryptos is able to send you notifications
14 |
Logout
15 |
16 |
17 |
18 |
19 | {% else %}
20 |
21 |
22 |
23 |
Authenticate With Telegram
24 |
25 |
26 |
27 |
Authorizing with Telegram will allow Kryptos to send you notifications about your strategies
28 |
29 |
30 |
31 |
32 | {% endif %}
33 |
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/core/kryptos/utils/tasks.py:
--------------------------------------------------------------------------------
1 | from rq import Connection, Queue
2 | import redis
3 | from kryptos.settings import REDIS_HOST, REDIS_PORT, DEFAULT_CONFIG
4 |
5 | CONN = redis.Redis(host=REDIS_HOST, port=REDIS_PORT)
6 |
7 |
8 | def queue_notification(msg, telegram_id):
9 | if telegram_id is None:
10 | return
11 | with Connection(CONN):
12 | q = Queue("updates")
13 | q.enqueue("updater.send_notification", msg=msg, telegram_id=telegram_id)
14 |
15 |
16 | def enqueue_ml_calculate(
17 | df_current, namespace, name, idx, current_datetime, hyper_params, df_final, **kw
18 | ):
19 | df_current_json = df_current.to_json()
20 | df_final_json = df_final.to_json()
21 | with Connection(CONN):
22 | q = Queue("ml")
23 | return q.enqueue(
24 | "worker.calculate",
25 | args=[
26 | namespace,
27 | df_current_json,
28 | name,
29 | idx,
30 | current_datetime,
31 | df_final_json,
32 | DEFAULT_CONFIG["DATA_FREQ"],
33 | hyper_params,
34 | ],
35 | kwargs=kw,
36 | timeout=str(DEFAULT_CONFIG["MINUTE_FREQ"]) + "m", # allow job to run for full iteration
37 | )
38 |
39 |
40 | def enqueue_ml_analyze(namespace, name, df_final, df_results, data_freq, extra_results):
41 | df_final_json = df_final.to_json()
42 | df_results_json = df_results.to_json()
43 | with Connection(CONN):
44 | q = Queue("ml")
45 | return q.enqueue(
46 | "worker.analyze",
47 | args=[namespace, name, df_final_json, df_results_json, data_freq, extra_results],
48 | timeout=str(DEFAULT_CONFIG["MINUTE_FREQ"]) + "m", # allow job to run for full iteration
49 | )
50 |
--------------------------------------------------------------------------------
/core/kryptos/strategies/dynamic_rebalance.py:
--------------------------------------------------------------------------------
1 | from kryptos.strategy import Strategy
2 | from kryptos.strategy.indicators import technical
3 | from catalyst.api import order_target_percent, order, record, get_open_orders
4 |
5 | import logbook
6 |
7 | log = logbook.Logger("DYNAMIC_REBALANCE")
8 | log.level = logbook.INFO
9 |
10 |
11 | strat = Strategy("DYNAMIC_REBALANCE", data_frequency="daily")
12 |
13 |
14 | @strat.init
15 | def initialize(context):
16 | context.i = 0
17 | context.base_price = None
18 |
19 |
20 | @strat.handle_data
21 | def trade_logic(context, data):
22 |
23 | # Cancel any outstanding orders
24 | orders = get_open_orders(context.asset) or []
25 | for order in orders:
26 | cancel_order(order)
27 |
28 | # Define base price and make initial trades to achieve target investment ratio of 0.5
29 | order_target_percent(context.asset, 0.5)
30 |
31 | # Retrieve current asset price from pricing data
32 | price = data.current(context.asset, "price")
33 |
34 | # Compute portfolio cumulative return
35 | Portfolio_cumulative_return = (
36 | context.portfolio.portfolio_value / context.portfolio.starting_cash - 1
37 | ) * 100
38 | # Save values for later inspection
39 | record(
40 | price=price,
41 | cash=context.portfolio.cash,
42 | leverage=context.account.leverage,
43 | Portfolio_cumulative_return=Portfolio_cumulative_return,
44 | )
45 |
46 |
47 | @strat.analyze()
48 | def analyze(context, results, pos):
49 | ending_cash = results.cash[-1]
50 | log.info("Ending cash: ${}".format(ending_cash))
51 | log.info("Completed for {} trading periods".format(context.i))
52 |
53 |
54 | if __name__ == "__main__":
55 | log.info("Strategy Schema:\n{}".format(strat.serialize()))
56 | strat.run()
57 |
--------------------------------------------------------------------------------
/ml/ml/utils/feature_exploration.py:
--------------------------------------------------------------------------------
1 | import os
2 | import xgboost as xgb
3 | import lightgbm as lgb
4 | import shap
5 |
6 | from ml.utils import get_algo_dir
7 |
8 | import matplotlib
9 | matplotlib.use('agg')
10 | import matplotlib.pyplot as plt
11 |
12 |
13 | def visualize_model(model, X, idx, configuration, namespace, name):
14 |
15 | if configuration['enabled'] and idx % configuration['n_iterations'] == 0:
16 |
17 | explainer = shap.TreeExplainer(model)
18 | shap_values = explainer.shap_values(X)
19 | shap.summary_plot(shap_values, X, plot_type="bar", show=False)
20 | save_fig(namespace, name, idx, importance_type='shap')
21 |
22 | if name == 'XGBOOST':
23 | for i in ['weight', 'cover', 'gain']:
24 | if i == 'gain':
25 | xgb.plot_importance(model.get_score(fmap='', importance_type=i), importance_type=i, max_num_features=20)
26 | else:
27 | xgb.plot_importance(model, importance_type=i, max_num_features=20)
28 | save_fig(namespace, name, idx, importance_type=i)
29 |
30 | elif name == 'LIGHTGBM':
31 | for i in ['split', 'gain']:
32 | lgb.plot_importance(model, importance_type=i, max_num_features=20)
33 | save_fig(namespace, name, idx, importance_type=i)
34 |
35 | else:
36 | pass
37 |
38 |
39 | def save_fig(namespace, name, idx, importance_type):
40 |
41 | folder = os.path.join(namespace, 'feature_exploration')
42 | folder_path = get_algo_dir(folder)
43 | f_path = os.path.join(folder_path, "analyze_features_model_{}_idx_{}_importance_type_{}.png".format(name, idx, importance_type))
44 |
45 | if importance_type == 'gain' and name == 'XGBOOST':
46 | plt.savefig(f_path, dpi='figure')
47 | else:
48 | plt.savefig(f_path, bbox_inches="tight", dpi=300)
49 |
--------------------------------------------------------------------------------
/app/requirements.txt:
--------------------------------------------------------------------------------
1 | -i https://pypi.python.org/simple/
2 | aiodns==1.1.1 ; python_version >= '3.5.2'
3 | aiohttp==4.0.0a1 ; python_version >= '3.5.2'
4 | alembic==1.2.1
5 | aniso8601==4.0.1
6 | arrow==0.15.2
7 | async-timeout==3.0.1
8 | attrs==19.3.0
9 | bcrypt==3.1.7
10 | blinker==1.4
11 | cachetools==3.1.1
12 | ccxt==1.17.425
13 | certifi==2019.9.11
14 | cffi==1.13.1
15 | chardet==3.0.4
16 | click==7.0
17 | cryptography==2.8
18 | flask-assistant==0.2.99
19 | flask-cors==3.0.6
20 | flask-login==0.4.1
21 | flask-mail==0.9.1
22 | flask-migrate==2.3.0
23 | flask-sqlalchemy==2.4.1
24 | flask-user==1.0.1.5
25 | flask-wtf==0.14.2
26 | flask==1.0.2
27 | future==0.18.1
28 | google-api-core[grpc]==1.14.3
29 | google-auth==1.6.3
30 | google-cloud-core==0.28.1
31 | google-cloud-datastore==1.7.0
32 | google-cloud-kms==0.2.0
33 | google-cloud-storage==1.13.0
34 | google-resumable-media==0.4.1
35 | googleapis-common-protos[grpc]==1.6.0
36 | grpc-google-iam-v1==0.11.4
37 | grpcio==1.24.1
38 | gunicorn==19.9.0
39 | honcho==1.0.1
40 | idna-ssl==1.1.0 ; python_version < '3.7'
41 | idna==2.8
42 | itsdangerous==1.1.0
43 | jinja2==2.10.3
44 | logbook==1.4.1
45 | mako==1.1.0
46 | markupsafe==1.1.1
47 | multidict==4.5.2
48 | numpy==1.15.3
49 | passlib==1.7.1
50 | protobuf==3.10.0
51 | psycopg2==2.7.5
52 | pyasn1-modules==0.2.7
53 | pyasn1==0.4.7
54 | pycares==3.0.0
55 | pycparser==2.19
56 | python-dateutil==2.8.0
57 | python-editor==1.0.4
58 | python-telegram-bot==11.1.0
59 | pytz==2019.3
60 | raven[flask]==6.9.0
61 | redis==3.3.11
62 | requests==2.22.0
63 | rq-dashboard==0.3.12
64 | rq==0.12.0
65 | rsa==4.0
66 | ruamel.yaml.clib==0.2.0 ; platform_python_implementation == 'CPython' and python_version < '3.8'
67 | ruamel.yaml==0.16.5
68 | shortuuid==0.5.0
69 | six==1.12.0
70 | sqlalchemy==1.3.10
71 | typing-extensions==3.7.4
72 | urllib3==1.25.6
73 | werkzeug==0.15.3
74 | wtforms==2.2.1
75 | yarl==1.1.0 ; python_version >= '3.5.2'
76 |
--------------------------------------------------------------------------------
/app/app/templates/account/user_exchanges.html:
--------------------------------------------------------------------------------
1 | {% extends 'flask_user/_authorized_base.html' %}
2 |
3 | {% block content %}
4 | {% from "flask_user/_macros.html" import render_field, render_checkbox_field, render_submit_field %}
5 | {% from "_macros.html" import render_form, render_submit_as_btn %}
6 | {%trans%}Authorize Exchange Accounts{%endtrans%}
7 |
8 |
9 |
26 |
27 | {#
#}
28 |
29 |
30 |
31 |
32 |
33 |
34 |
Authenticated Exchanges
35 |
36 |
37 | {{ render_form(remove_form, url_for('account.remove_exchange_auth'))}}
38 |
39 |
40 |
41 |
42 |
43 |
44 | {#
#}
45 |
46 | {% endblock %}
47 |
--------------------------------------------------------------------------------
/ROADMAP.md:
--------------------------------------------------------------------------------
1 | # Cryptocurrency Trading Platform Roadmap
2 |
3 | ## About
4 |
5 | We are building an AI-Driven Cryptocurrency Trading Platform.
6 | Our mission is to empower Crypto Investors and Traders with the latest state-of-the-art trading algorithms.
7 |
8 | Our goal is to discover the most profitable trading Strategies by evaluating the performance of algorithms using back-testing and forward-testing. We plan to optimize trading strategies using data science, machine learning and AI.
9 |
10 | We envision AI-driven suggestions for trading Indicators, Signals and Strategies to Investors and Traders.
11 |
12 | ## Roadmap
13 | 1. Select most promising Catalyst algorithms
14 | 2. Compare Catalyst algorithms by analyzing strategy performance metrics
15 | 3. Create Indicator Modules allowing the use of Indicators, which process data and send the results to Signals
16 | 4. Create Signal Modules allowing the use of Signals, which are actions that are triggered in response to Indicators
17 | 5. Create Indicator Modules for:
18 | - Basic Indicators ("if-then" conditions)
19 | - Technical Analysis Indicators
20 | 6. Compare multiple strategies using back-testing and performance metrics
21 | 7. Utilize aggregated performance statistics to curate most successful trategies
22 | 8. Compare multiple strategies using forward-testing
23 | 9. Create a machine learning model to determine the most-profitable strategies based on back-testing, forward-testing, performance metrics, etc.
24 | 10. Create additional Indicator utilities for:
25 | - Sentiment Analysis Indicators
26 | - Machine Learning Based Indicators
27 | - Crowd Wisdom Based Indicators
28 | - Blockchain Monitoring Indicators
29 | -. Custom Indicators
30 | 11. Provide user interface for a "Personal Hedge Fund" which offers Investors and Traders updates, suggestions, comparisons, performance reports
31 |
--------------------------------------------------------------------------------
/app/app/models/migrations/versions/7377a3591690_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: 7377a3591690
4 | Revises: 2d71155a98ec
5 | Create Date: 2018-07-24 17:00:43.179690
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | from sqlalchemy.dialects import postgresql
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '7377a3591690'
14 | down_revision = '2d71155a98ec'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('strategies',
22 | sa.Column('id', sa.String(), nullable=False),
23 | sa.Column('name', sa.String(), nullable=False),
24 | sa.Column('created_at', sa.DateTime(), nullable=True),
25 | sa.Column('trading_config', sa.JSON(), nullable=False),
26 | sa.Column('dataset_config', sa.JSON(), nullable=False),
27 | sa.Column('indicators_config', sa.JSON(), nullable=False),
28 | sa.Column('signals_config', sa.JSON(), nullable=False),
29 | sa.Column('user_id', sa.Integer(), nullable=True),
30 | sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
31 | sa.PrimaryKeyConstraint('id'),
32 | sa.UniqueConstraint('id')
33 | )
34 | op.drop_table('strategy')
35 | # ### end Alembic commands ###
36 |
37 |
38 | def downgrade():
39 | # ### commands auto generated by Alembic - please adjust! ###
40 | op.create_table('strategy',
41 | sa.Column('id', sa.VARCHAR(), autoincrement=False, nullable=False),
42 | sa.Column('name', sa.VARCHAR(), autoincrement=False, nullable=False),
43 | sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
44 | sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True),
45 | sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='strategy_user_id_fkey'),
46 | sa.PrimaryKeyConstraint('id', name='strategy_pkey')
47 | )
48 | op.drop_table('strategies')
49 | # ### end Alembic commands ###
50 |
--------------------------------------------------------------------------------
/core/kryptos/strategies/buy_and_hodl.py:
--------------------------------------------------------------------------------
1 | from kryptos.strategy import Strategy
2 | from kryptos.strategy.indicators import technical
3 | from catalyst.api import order_target_value, record
4 |
5 | import logbook
6 |
7 | log = logbook.Logger("BUY_AND_HODL")
8 | log.level = logbook.INFO
9 |
10 |
11 | strat = Strategy("BUY_AND_HODL", data_frequency="daily")
12 |
13 |
14 | @strat.init
15 | def init(context):
16 | log.info("Algo is being initialzed, setting up context")
17 | context.TARGET_HODL_RATIO = 0.8
18 | context.RESERVE_RATIO = 1.0 - context.TARGET_HODL_RATIO
19 |
20 | context.starting_cash = context.portfolio.starting_cash
21 | context.target_hodl_value = context.TARGET_HODL_RATIO * context.starting_cash
22 | context.reserve_value = context.RESERVE_RATIO * context.starting_cash
23 |
24 | context.is_buying = True
25 | context.i = 0
26 |
27 |
28 | @strat.handle_data
29 | def handle_data(context, data):
30 | context.i += 1
31 | if context.i == 1:
32 | order_target_value(context.asset, context.target_hodl_value, limit_price=context.price * 1.1)
33 |
34 | # Stop buying after passing the reserve threshold
35 | context.cash = context.portfolio.cash
36 | if context.cash <= context.reserve_value:
37 | context.is_buying = False
38 |
39 | context.price = data.current(context.asset, "price")
40 |
41 |
42 | @strat.signal_buy
43 | def signal_buy(context, data):
44 | return context.is_buying and context.cash > context.price
45 |
46 |
47 | @strat.buy_order
48 | def buy(context):
49 | order_target_value(context.asset, context.target_hodl_value, limit_price=context.price * 1.1)
50 |
51 |
52 | @strat.analyze()
53 | def analyze(context, results, pos):
54 | ending_cash = results.cash[-1]
55 | log.info("Ending cash: ${}".format(ending_cash))
56 | log.info("Completed for {} trading periods".format(context.i))
57 |
58 |
59 | if __name__ == "__main__":
60 | log.info("Strategy Schema:\n{}".format(strat.serialize()))
61 | strat.run()
62 |
--------------------------------------------------------------------------------
/ml/ml/utils/metric.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 | from sklearn.metrics import confusion_matrix, classification_report, cohen_kappa_score, accuracy_score
4 |
5 | from ml.utils import get_algo_dir
6 |
7 | def classification_metrics(namespace, file_name, y_true, y_pred, extra_results, y_pred_proba=False):
8 | target_names = ['KEEP', 'UP', 'DOWN']
9 | algo_dir = get_algo_dir(namespace)
10 | f_path = os.path.join(algo_dir, file_name)
11 |
12 | # Check solution and prediction size
13 | # assert len(y_true) == len(y_pred)
14 | if len(y_true) != len(y_pred):
15 | raise ValueError('Prediction and solution longitude can not be different.')
16 |
17 | if len(y_true) > 0 and len(y_pred):
18 | with open(f_path, "a") as f:
19 | f.write(time.strftime("%Y/%m/%d %H:%M:%S") + '\n')
20 | f.write('Date Start: {}'.format(extra_results['start']) + '\n')
21 | f.write('Date End: {}'.format(extra_results['end']) + '\n')
22 | f.write('Minute Frequency: {}'.format(extra_results['minute_freq']) + '\n')
23 | f.write('Accuracy: {}'.format(accuracy_score(y_true, y_pred)) + '\n')
24 | f.write('Coefficient Kappa: {}'.format(cohen_kappa_score(y_true, y_pred)) + '\n')
25 | f.write('Classification Report:' + '\n')
26 | f.write(classification_report(y_true, y_pred, target_names=target_names))
27 | f.write("Confussion Matrix:" + '\n')
28 | f.write(str(confusion_matrix(y_true, y_pred)))
29 | f.write('\n')
30 | f.write('Return Profit Percentage: {}'.format(extra_results['return_profit_pct']) + '\n')
31 | f.write('Sharpe Ratio: {}'.format(extra_results['sharpe_ratio']) + '\n')
32 | f.write('Sortino Ratio: {}'.format(extra_results['sortino_ratio']) + '\n')
33 | f.write('Sharpe Ratio (Bitcoin Benchmark): {}'.format(extra_results['sharpe_ratio_benchmark']) + '\n')
34 | f.write('Sortino Ratio (Bitcoin Benchmark): {}'.format(extra_results['sortino_ratio_benchmark']) + '\n')
35 | f.close()
36 |
--------------------------------------------------------------------------------
/core/kryptos/strategy/indicators/basic.py:
--------------------------------------------------------------------------------
1 | from catalyst.api import record
2 | from logbook import Logger
3 |
4 | from kryptos.utils import viz
5 | from kryptos.strategy.indicators import AbstractIndicator
6 | from kryptos import logger_group
7 |
8 | log = Logger("BasicIndicator")
9 | logger_group.add_logger(log)
10 |
11 |
12 | def get_indicator(name, **kw):
13 | subclass = globals().get(name.upper())
14 | if subclass is not None:
15 | return subclass(**kw)
16 |
17 | raise LookupError("No dataset found with name {}".format(name))
18 |
19 |
20 | class RELCHANGE(AbstractIndicator):
21 |
22 | def __init__(self, delta_t=4, **kw):
23 | super().__init__("RELCHANGE", delta_t=delta_t, **kw)
24 | self.delta_t = delta_t
25 |
26 | def calculate(self, trend_series):
27 | self.data = trend_series
28 |
29 | df = trend_series.to_frame(name="val")
30 | df["mean"] = df["val"].rolling(self.delta_t).mean()
31 |
32 | df["rel_change"] = df["val"] - df["mean"].shift(periods=1, freq=self.delta_t)
33 | df["rel_change_ratio"] = df["rel_change"] / df["mean"].shift(periods=1, freq=self.delta_t)
34 |
35 | self.outputs = df.fillna(value=0)
36 |
37 | @property
38 | def default_params(self):
39 | return {}
40 |
41 | def record(self):
42 | record(
43 | rel_change=self.outputs.rel_change[-1],
44 | rel_change_ratio=self.outputs.rel_change_ratio[-1],
45 | )
46 |
47 | def plot(self, results, pos, **kw):
48 | ax = viz.plot_column(results, "rel_change", pos, label="Relative Change", color="r", **kw)
49 | ax2 = viz.plot_column(
50 | results,
51 | "rel_change_ratio",
52 | pos,
53 | label="Relative Change Ratio",
54 | color="g",
55 | twin=ax,
56 | **kw
57 | )
58 |
59 | viz.add_twin_legends([ax, ax2])
60 |
61 | @property
62 | def signals_sell(self):
63 | try:
64 | return self.outputs.rel_change[-1] < 0.0
65 |
66 | except AttributeError as e:
67 | self.log.exception(e)
68 | return False
69 |
70 | @property
71 | def signals_buy(self):
72 | try:
73 | return self.outputs.rel_change[-1] > 0.0
74 |
75 | except AttributeError as e:
76 | self.log.exception(e)
77 | return False
78 |
--------------------------------------------------------------------------------
/app/app/utils/form_utils.py:
--------------------------------------------------------------------------------
1 | from flask import current_app
2 | import datetime
3 | from app.utils import choices
4 |
5 |
6 | def process_trading_form(form):
7 | trading_dict = {
8 | "EXCHANGE": form.exchange.data,
9 | "ASSET": form.asset.data,
10 | "CAPITAL_BASE": form.capital_base.data,
11 | "QUOTE_CURRENCY": form.quote_currency.data,
12 | "START": form.start.data,
13 | "END": form.end.data,
14 | # the following are not provided in the basic form
15 | "DATA_FREQ": form.data.get("data_freq"),
16 | "HISTORY_FREQ": form.data.get("history_freq"),
17 | "BARS": form.data.get("bar_period"),
18 | "ORDER_SIZE": form.data.get("order_size"),
19 | "SLIPPAGE_ALLOWED": form.data.get("slippage_allowed"),
20 | }
21 |
22 | # remove None vals so strat will use defaults
23 | return {k: v for k, v in trading_dict.items() if v is not None}
24 |
25 |
26 | def build_strat_dict_from_form(form):
27 | current_app.logger.info(form.data)
28 |
29 | trading_dict = process_trading_form(form)
30 |
31 | trading_dict["START"] = datetime.datetime.strftime(
32 | form.start.data, "%Y-%m-%d %H:%M"
33 | )
34 | trading_dict["END"] = datetime.datetime.strftime(form.end.data, "%Y-%m-%d %H:%M")
35 |
36 | strat_dict = {"name": form.name.data, "trading": trading_dict}
37 |
38 | strat_tmpl = form.strat_template.data
39 | if strat_tmpl in choices.ML_MODELS:
40 | strat_dict["models"] = [{"name": strat_tmpl}]
41 | else:
42 | strat_dict["inidicators"] = [{"name": strat_tmpl}]
43 |
44 | current_app.logger.info("Built reading dict")
45 | current_app.logger.info(strat_dict)
46 |
47 | return strat_dict
48 |
49 |
50 | def process_indicator_form(form):
51 | indicator_dict = {
52 | "name": form.indicator_name.data,
53 | "symbol": form.symbol.data,
54 | "label": form.custom_label.data,
55 | }
56 | return indicator_dict
57 |
58 |
59 | def process_signal_form(form):
60 | signal_dict = {}
61 | signal_dict["func"] = form.func.data
62 | signal_dict["params"] = {}
63 | signal_dict["params"]["series"] = form.target_series.data
64 |
65 | if form.period.data is not None:
66 | signal_dict["params"]["period"] = form.period.data
67 | else:
68 | signal_dict["params"]["trigger"] = form.trigger_series.data
69 |
70 | return signal_dict
71 |
--------------------------------------------------------------------------------
/core/kryptos/utils/load.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pandas as pd
3 | from logbook import Logger
4 |
5 | from kryptos import strategies, logger_group
6 |
7 |
8 | STRATS = os.path.dirname(os.path.abspath(strategies.__file__))
9 |
10 | log = Logger("Load")
11 |
12 |
13 | def import_with_3(module_name, path):
14 | import importlib.util
15 |
16 | spec = importlib.util.spec_from_file_location(module_name, path)
17 | module = importlib.util.module_from_spec(spec)
18 | spec.loader.exec_module(module)
19 | return module
20 |
21 |
22 | def import_with_2(module_name, path):
23 | import imp
24 |
25 | return imp.load_source(module_name, path)
26 |
27 |
28 | def get_strat(path):
29 | """Imports a module from filename as a string"""
30 | log.info("Attempting to import {}".format(path))
31 | filename = os.path.split(path)[1]
32 | module_name = os.path.splitext(filename)[0]
33 |
34 | try:
35 | strat_module = import_with_3(module_name, os.path.join(STRATS, filename))
36 |
37 | except ImportError:
38 | strat_module = import_with_2(module_name, os.path.join(STRATS, filename))
39 |
40 | except Exception as e:
41 | log.warn("Could not import strat {} by name".format(module_name))
42 | log.error(e)
43 | raise e
44 |
45 | try:
46 | return getattr(strat_module, 'strat')
47 |
48 | except AttributeError:
49 | log.warn("No Strategy object found in {}. Note that the object must be named `strat`".format(module_name))
50 | log.error(e)
51 | raise e
52 |
53 |
54 |
55 | return strat_module
56 |
57 |
58 | def load_by_name(namespace):
59 | strat = None
60 | f = namespace + ".py"
61 | strat = get_strat(f)
62 |
63 | if hasattr(strat, "NAMESPACE"):
64 | return strat
65 |
66 | log.info("Searching strat files for {} namespace".format(namespace))
67 | for s in load_strats():
68 | log.warning(getattr(s, "NAMESPACE", None))
69 | if s.NAMESPACE == namespace:
70 | strat = s
71 |
72 | if strat is None:
73 | raise FileNotFoundError("Could not import strategy with namespace: {}".format(namespace))
74 |
75 |
76 | def load_strats():
77 | strats = []
78 | log.info("Grabbing strats from {}".format(STRATS))
79 | for f in os.listdir(STRATS):
80 | if "__" not in f and f[-3:] == ".py":
81 | strat = get_strat(f)
82 | if hasattr(strat, "NAMESPACE"):
83 | strats.append(strat)
84 | return strats
85 |
--------------------------------------------------------------------------------
/deployment.md:
--------------------------------------------------------------------------------
1 | ## Deployment
2 |
3 | ### Initial deployement setup
4 | If this is the first time deploying, begin by pushing the images to GCR
5 |
6 | ```bash
7 | # worker
8 | cd /core
9 | gcloud builds submit --tag gcr.io/kryptos-205115/kryptos-worker --timeout 1200 .
10 |
11 | # then the app image
12 | cd /app
13 | gcloud builds submit --tag gcr.io/kryptos-205115/kryptos-app . --timeout 1200
14 |
15 | # then the ml image
16 | cd /ml
17 | gcloud builds submit --tag gcr.io/kryptos-205115/kryptos-ml . --timeout 1200
18 | ```
19 |
20 | Then deploy the app and ml services to Google App engine using the pushed images
21 |
22 | ```bash
23 | # we could drop the image_url, but this way is quicker
24 |
25 | # in app/
26 | gcloud app deploy app.yaml --image-url=gcr.io/kryptos-205115/kryptos-app
27 |
28 | # in /ml/
29 | gcloud app deploy ml.yaml --image-url=gcr.io/kryptos-205115/kryptos-ml
30 |
31 | # in /core
32 | gcloud app deploy worker.yaml --image-url=gcr.io/kryptos-205115/kryptos-worker
33 | ```
34 |
35 |
36 |
37 | ### Triggered deployments
38 | There are three build triggers in place to help automate the deployments
39 |
40 | 1. The first rebuilds and deploys the worker image if a pushed commit changes any files in the /core directory
41 | 2. The third rebuilds and deploys the ml service if changes are made to the /ml directory
42 | 2. The third rebuilds and deploys the app/default service if changes are made to the /app directory
43 |
44 | You can view the cloudbuild.yaml file in the /core and /app directories to see the steps
45 |
46 | These steps are
47 | - pulls the latest relevant image (which is why manual building needs to be done initially)
48 | - rebuilds the image by caching the latest version (this speeds up the builds)
49 | - Tags the the newly built image, making it the latest version
50 |
51 | In the case of changes to the app directory, the new image is also deployed from the cloud
52 |
53 | Always check to see if there were any errors or if the build was not triggered.
54 |
55 | ### Getting production info
56 | To view GAE instance logs
57 | ```bash
58 | $ gcloud app logs read -s
59 | ```
60 | To view worker statuses, run the following inside the *core/* dir
61 | ```bash
62 | $ rq info -c kryptos.settings
63 | ```
64 | or for the web dashboard
65 | ```bash
66 | $ rq-dashboard -c kryptos.settings
67 | ```
68 |
69 | To connect to the production database, install the google cloud local cloud-sql-proxy
70 | ```bash
71 | ./cloud_sql_proxy -instances=kryptos-205115:us-west1:kryptos-db=tcp:5432
72 | ```
73 |
--------------------------------------------------------------------------------
/core/kryptos/strategies/bear_market.py:
--------------------------------------------------------------------------------
1 | from kryptos.strategy import Strategy
2 | from kryptos.strategy.indicators import technical
3 | from catalyst.api import order_target_percent, record
4 |
5 | import logbook
6 |
7 | log = logbook.Logger("BEAR_MARKET")
8 | log.level = logbook.INFO
9 |
10 |
11 | strat = Strategy("BEAR_MARKET", data_frequency="daily")
12 |
13 |
14 | @strat.init
15 | def init(context):
16 | log.info("Algo is being initialzed, setting up context")
17 | context.i = 0
18 | context.IS_MARKET_BEAR = False
19 |
20 |
21 | @strat.handle_data
22 | def handle_data(context, data):
23 | log.debug("Processing new trading step")
24 | context.i += 1
25 |
26 | # Get price history for the last two months. Find peak, bottom, and last
27 | # prices for the period
28 | price_history = data.history(context.asset, fields="price", bar_count=60, frequency="1d")
29 | context.peak = price_history.max()
30 | context.bottom = price_history.min()
31 | context.price = price_history.ix[-1]
32 |
33 | Portfolio_cumulative_return = (
34 | context.portfolio.portfolio_value / context.portfolio.starting_cash - 1
35 | ) * 100
36 |
37 | record(
38 | peak=context.peak,
39 | bottom=context.bottom,
40 | cash=context.portfolio.cash,
41 | leverage=context.account.leverage,
42 | Portfolio_cumulative_return=Portfolio_cumulative_return,
43 | )
44 |
45 |
46 | # Trading logic:
47 | # If current price is more than 20% lower than highest-closing price over a
48 | # 2-month period, market enters Bear territory and algorithm sells all
49 | # asset and holds only cash. Market exits bear market when prices are at
50 | # least 20% higher than lowest-closing price over a 2-month period. In this
51 | # case, algorithm invests 90% of portfolio in the asset.
52 | @strat.signal_sell
53 | def enter_bear(context, data):
54 | return context.price < 0.75 * context.peak
55 |
56 |
57 | @strat.signal_buy
58 | def exit_bear(context, data):
59 | return context.price > 1.2 * context.bottom
60 |
61 |
62 | @strat.sell_order
63 | def sell(context):
64 | order_target_percent(context.asset, 0.3)
65 |
66 |
67 | @strat.buy_order
68 | def buy(context):
69 | order_target_percent(context.asset, 0.75)
70 |
71 |
72 | @strat.analyze()
73 | def analyze(context, results, pos):
74 | ending_cash = results.cash[-1]
75 | log.info("Ending cash: ${}".format(ending_cash))
76 | log.info("Completed for {} trading periods".format(context.i))
77 |
78 |
79 | if __name__ == "__main__":
80 | log.info("Strategy Schema:\n{}".format(strat.serialize()))
81 | strat.run()
82 |
--------------------------------------------------------------------------------
/core/requirements.txt:
--------------------------------------------------------------------------------
1 | aiodns==1.1.1; python_version >= '3.5.2'
2 | aiohttp==3.4.4; python_version >= '3.5.2'
3 | alembic==0.9.7
4 | arrow==0.12.1
5 | async-timeout==3.0.1
6 | attrdict==2.0.0
7 | attrs==18.2.0
8 | bcolz==1.2.1
9 | boto3==1.5.27
10 | botocore==1.8.50
11 | bottleneck==1.2.1
12 | cachetools==2.1.0
13 | cchardet==2.1.1; python_version >= '3.5.2'
14 | ccxt==1.17.94
15 | certifi==2018.1.18
16 | chardet==3.0.4
17 | click==6.7
18 | colorama==0.4.0
19 | contextlib2==0.5.5
20 | cycler==0.10.0
21 | cyordereddict==1.0.0
22 | cython==0.27.3
23 | cytoolz==0.9.0.1
24 | decorator==4.3.0
25 | docutils==0.14
26 | empyrical==0.2.1
27 | enigma-catalyst==0.5.20
28 | eth-abi==1.2.2
29 | eth-account==0.2.3
30 | eth-hash==0.2.0
31 | eth-keyfile==0.5.1
32 | eth-keys==0.2.0b3
33 | eth-rlp==0.1.2
34 | eth-typing==1.3.0
35 | eth-utils==1.3.0b0
36 | flask==1.0.2
37 | google-api-core==1.6.0a1
38 | google-auth==1.5.1
39 | google-cloud-core==0.28.1
40 | google-cloud-datastore==1.7.1
41 | google-cloud-kms==0.2.0
42 | google-cloud-logging==1.8.0
43 | google-cloud-storage==1.13.0
44 | google-cloud==0.34.0
45 | google-resumable-media==0.3.1
46 | googleapis-common-protos==1.6.0b6
47 | grpc-google-iam-v1==0.11.4
48 | grpcio==1.16.0
49 | gunicorn==19.9.0
50 | hexbytes==0.1.0
51 | honcho==1.0.1
52 | idna-ssl==1.1.0; python_version < '3.7'
53 | idna==2.6
54 | inflection==0.3.1
55 | intervaltree==2.1.0
56 | itsdangerous==1.1.0
57 | jinja2==2.10
58 | jmespath==0.9.3
59 | kiwisolver==1.0.1
60 | logbook==0.12.5
61 | lru-dict==1.1.6
62 | lxml==4.2.5
63 | mako==1.0.7
64 | markupsafe==1.0
65 | matplotlib==3.0.1
66 | more-itertools==4.3.0
67 | multidict==4.4.2
68 | multipledispatch==0.4.9
69 | networkx==2.1
70 | numexpr==2.6.4
71 | numpy==1.14.0
72 | pandas-datareader==0.6.0
73 | pandas==0.19.2
74 | parsimonious==0.8.1
75 | patsy==0.5.0
76 | protobuf==3.6.1
77 | psycopg2==2.7.5
78 | pyasn1-modules==0.2.2
79 | pyasn1==0.4.4
80 | pycares==2.3.0
81 | pycryptodome==3.7.0
82 | pyparsing==2.2.2
83 | python-dateutil==2.7.3
84 | python-editor==1.0.3
85 | pytrends==4.4.0
86 | pytz==2016.4
87 | quandl==3.4.4
88 | raven==6.9.0
89 | redis==2.10.6
90 | redo==1.6
91 | requests-file==1.4.3
92 | requests-ftp==0.3.1
93 | requests-toolbelt==0.8.0
94 | requests==2.18.4
95 | rlp==1.0.3
96 | rq-dashboard==0.3.12
97 | rq==0.12.0
98 | rsa==4.0
99 | s3transfer==0.1.13
100 | scipy==1.0.0
101 | six==1.11.0
102 | sortedcontainers==1.5.9
103 | sqlalchemy==1.2.2
104 | statsmodels==0.8.0
105 | ta-lib==0.4.17
106 | tables==3.4.2
107 | toolz==0.9.0
108 | urllib3==1.22
109 | web3==4.4.1
110 | websockets==5.0.1
111 | werkzeug==0.14.1
112 | wrapt==1.10.11
113 | yarl==1.1.0; python_version >= '3.5.2'
114 |
--------------------------------------------------------------------------------
/core/kryptos/scripts/bchain_activity.py:
--------------------------------------------------------------------------------
1 | import click
2 |
3 | # import matplotlib.pyplot as plt
4 | from logbook import Logger
5 |
6 | # from kryptos.utils import viz, algo
7 | # from kryptos.datasets.quandl_data.manager import QuandleDataManager
8 |
9 | from kryptos.strategy import Strategy
10 |
11 |
12 | log = Logger("Blockchain Activity")
13 |
14 |
15 | # qdata = QuandleDataManager()
16 |
17 |
18 | @click.command()
19 | @click.option("--datasets", "-s", multiple=True)
20 | def run(datasets):
21 | """Runs s strategy based on provided Blockchain dataset codes
22 |
23 | \b
24 | Example:
25 | bchain -s NTRAN -s CPTRA
26 |
27 | \b
28 | Available Dataset Codes:
29 | - TOTBC - Total Bitcoins
30 | - MKTCP - Bitcoin Market Capitalization
31 | - TRFEE - Bitcoin Total Transaction Fees
32 | - TRFUS - Bitcoin Total Transaction Fees USD
33 | - NETDF - Bitcoin Network Deficit
34 | - NTRAN - Bitcoin Number of Transactions
35 | - NTRAT - Bitcoin Total Number of Transactions
36 | - NTREP - Bitcoin Number of Transactions Excluding Popular Addresses
37 | - NADDU - Bitcoin Number of Unique Bitcoin Addresses Used
38 | - NTRBL - Bitcoin Number of Transaction per Block
39 | - TOUTV - Bitcoin Total Output Volume
40 | - ETRAV - Bitcoin Estimated Transaction Volume
41 | - ETRVU - Bitcoin Estimated Transaction Volume USD
42 | - TRVOU - Bitcoin USD Exchange Trade Volume
43 | - TVTVR - Bitcoin Trade Volume vs Transaction Volume Ratio
44 | - MKPRU - Bitcoin Market Price USD
45 | - CPTRV - Bitcoin Cost % of Transaction Volume
46 | - CPTRA - Bitcoin Cost Per Transaction
47 | - HRATE - Bitcoin Hash Rate
48 | - MIREV - Bitcoin Miners Revenue
49 | - ATRCT - Bitcoin Median Transaction Confirmation Time
50 | - BCDDC - Bitcoin Days Destroyed Cumulative
51 | - BCDDE - Bitcoin Days Destroyed
52 | - BCDDW - Bitcoin Days Destroyed (Minimum Age 1 Week)
53 | - BCDDM - Bitcoin Days Destroyed (Minimum Age 1 Month)
54 | - BCDDY - Bitcoin Days Destroyed (Minimum Age 1 Year)
55 | - BLCHS - Bitcoin api.blockchain Size
56 | - AVBLS - Bitcoin Average Block Size
57 | - MWTRV - Bitcoin My Wallet Transaction Volume
58 | - MWNUS - Bitcoin My Wallet Number of Users
59 | - MWNTD - Bitcoin My Wallet Number of Transaction Per Day
60 | - MIOPM - Bitcoin Mining Operating Margin
61 | - DIFF - Bitcoin Difficulty
62 | """
63 |
64 | click.secho("Executing using datasets:\n{}".format(datasets), fg="white")
65 |
66 | strat = Strategy()
67 |
68 | strat.use_dataset("quandl", columns=list(datasets))
69 | strat.run()
70 |
71 |
72 | if __name__ == "__main__":
73 | run()
74 |
--------------------------------------------------------------------------------
/ml/ml/feature_selection/xgb.py:
--------------------------------------------------------------------------------
1 | import operator
2 | import random
3 |
4 |
5 | def xgb_embedded_feature_selection(model, importance_type='all', percentage=0.9):
6 | """Perform feature selection using XGBoost embedded method.
7 |
8 | Args:
9 | model(xgboost.core.Booster): XGBoost model trained.
10 | importance_type(string):
11 | 'weight' - the number of times a feature is used to split the data across all trees.
12 | 'gain' - the average gain of the feature when it is used in trees.
13 | 'cover' - the average coverage of the feature when it is used in trees.
14 | 'all' - the number of times a feature is used to split the data across all trees.
15 | percentage(float): From 0 to 1, percentage of features to filter from total columns used.
16 |
17 | Returns:
18 | list: Name columns selected.
19 | """
20 |
21 | # Check input values
22 | if importance_type != 'weight' and importance_type != 'gain' and importance_type != 'cover' and importance_type != 'all':
23 | raise ValueError("'importance_type' value is not valid ['weight', 'gain', 'cover', 'all']")
24 | if percentage > 1.0 or percentage < 0.0:
25 | raise ValueError("'percentage' value is not valid [0, 1]")
26 |
27 | # Feature selection
28 | if importance_type == 'weight' or importance_type == 'all':
29 | used_cols_weight = _get_colums_score(model, 'weight')
30 | selected_cols = _get_percentage_selected_cols(used_cols_weight, percentage)
31 |
32 | if importance_type == 'gain' or importance_type == 'all':
33 | used_cols_gain = _get_colums_score(model, 'gain')
34 | selected_cols = _get_percentage_selected_cols(used_cols_gain, percentage)
35 |
36 | if importance_type == 'cover' or importance_type == 'all':
37 | used_cols_cover = _get_colums_score(model, 'cover')
38 | selected_cols = _get_percentage_selected_cols(used_cols_cover, percentage)
39 |
40 | if importance_type == 'all':
41 | lists = [used_cols_weight, used_cols_gain, used_cols_cover]
42 | used_cols = [x for t in zip(*lists) for x in t]
43 | used_cols = list(set(used_cols))
44 | selected_cols = _get_percentage_selected_cols(used_cols, percentage)
45 |
46 | return selected_cols
47 |
48 |
49 | def _get_colums_score(model, importance_type):
50 | """Get columns used by XGBoost model ordered by importance.
51 | """
52 | importance = model.get_score(fmap='', importance_type=importance_type)
53 | importance = sorted(importance.items(), key=operator.itemgetter(1))
54 | used_cols = [x[0] for x in reversed(importance)]
55 | return used_cols
56 |
57 |
58 | def _get_percentage_selected_cols(used_cols, percentage):
59 | """Get a percentage of best used columns.
60 | """
61 | num_columns = int(percentage*len(used_cols)/1.0)
62 | selected_cols = used_cols[0:num_columns]
63 | return selected_cols
64 |
--------------------------------------------------------------------------------
/core/kryptos/strategies/bbands_psar.py:
--------------------------------------------------------------------------------
1 | import talib as ta
2 | from kryptos.strategy import Strategy
3 | from kryptos.strategy.indicators import technical
4 | from catalyst.api import order_target_percent, order, record, get_open_orders
5 |
6 | import logbook
7 |
8 |
9 | strat = Strategy("BBANDS_PSAR", data_frequency="daily")
10 |
11 | strat.load_json_file('bbands_psar.json')
12 |
13 | log = logbook.Logger(strat.name)
14 |
15 | @strat.init
16 | def initialize(context):
17 |
18 |
19 | context.swallow_errors = True
20 | context.errors = []
21 |
22 | # Bars to look at per iteration should be bigger than SMA_SLOW
23 | # context.BARS = 365
24 |
25 | context.ORDER_SIZE = 0.5
26 | # context.SLIPPAGE_ALLOWED =
27 |
28 |
29 | @strat.handle_data
30 | def trade_logic(context, data):
31 | log.info("handling bar {}".format(data.current_dt))
32 |
33 | context.price = data.current(context.asset, "price")
34 |
35 | today = data.current_dt.floor("1D")
36 | if today != context.current_day:
37 | context.traded_today = False
38 | context.current_day = today
39 |
40 |
41 | @strat.buy_order
42 | def buy(context):
43 |
44 | position = context.portfolio.positions[context.asset]
45 |
46 | if context.portfolio.cash < context.price * context.ORDER_SIZE:
47 | log.warn(
48 | "Skipping signaled buy due to cash amount: {} < {}".format(
49 | context.portfolio.cash, (context.price * context.ORDER_SIZE)
50 | )
51 | )
52 | return
53 | order(
54 | asset=context.asset,
55 | amount=context.ORDER_SIZE,
56 | limit_price=context.price * (1 + context.SLIPPAGE_ALLOWED),
57 | )
58 | log.info(
59 | "Bought {amount} @ {price}".format(amount=context.ORDER_SIZE, price=context.price)
60 | )
61 |
62 | @strat.sell_order
63 | def sell(context):
64 | position = context.portfolio.positions[context.asset]
65 |
66 | if position == 0:
67 | log.info("Position Zero")
68 | return
69 | profit = (context.price * position.amount) - (cost_basis * position.amount)
70 | order_target_percent(
71 | asset=context.asset,
72 | target=0,
73 | limit_price=context.price * (1 - context.SLIPPAGE_ALLOWED),
74 | )
75 | log.info(
76 | "Sold {amount} @ {price} Profit: {profit}".format(
77 | amount=position.amount, price=context.price, profit=profit
78 | )
79 | )
80 |
81 |
82 | @strat.signal_buy(override=True)
83 | def is_buy(context, analysis):
84 | log.info('{} {}'.format(strat.indicator('BBANDS').outputs['upperband'][-1], context.price))
85 | if context.price > strat.indicator('BBANDS').outputs['upperband'][-1]:
86 | return True
87 |
88 |
89 | @strat.signal_sell(override=True)
90 | def isSell(context, analysis):
91 | if context.price < strat.indicator('SAR').outputs['SAR'][-1]:
92 | log.info("Closing position due to PSAR")
93 | return True
94 |
--------------------------------------------------------------------------------
/app/app/bot/response.py:
--------------------------------------------------------------------------------
1 | from flask_assistant import ask
2 | from flask_assistant.response import _Response
3 |
4 |
5 | class ask(_Response):
6 | def __init__(self, speech, display_text=None):
7 | """Returns a response to the user and keeps the current session alive. Expects a response from the user.
8 | Arguments:
9 | speech {str} -- Text to be pronounced to the user / shown on the screen
10 | """
11 | super(ask, self).__init__(speech, display_text)
12 | self._response["data"]["google"]["expect_user_response"] = True
13 |
14 | self._quick_reply = None
15 |
16 | def reprompt(self, prompt):
17 | self._response["data"]["google"]["no_input_prompts"] = [
18 | {"text_to_speech": prompt}
19 | ]
20 |
21 | return self
22 |
23 | def with_quick_reply(self, *options, text=None):
24 | if text is None:
25 | text = self._speech
26 | msg = {
27 | "type": 2,
28 | "platform": "telegram",
29 | "title": text,
30 | "parse_mode": "Markdown",
31 | "replies": options,
32 | }
33 | self._quick_reply = msg
34 | return self
35 | # self._response["messages"].append(msg)
36 | # return self
37 |
38 | def render_response(self):
39 | if self._quick_reply:
40 | self._response["messages"].append(self._quick_reply)
41 | return super().render_response()
42 |
43 |
44 | class inline_keyboard(ask):
45 | def __init__(self, msg, buttons=None):
46 | super().__init__(speech=msg)
47 |
48 | if buttons is None:
49 | buttons = []
50 |
51 | self._buttons = buttons
52 |
53 | def render_response(self):
54 | self._response["messages"].append(self._custom_msg)
55 | return super().render_response()
56 |
57 | @property
58 | def _custom_msg(self):
59 | return {"type": 4, "platform": "telegram", "payload": self._payload}
60 |
61 | @property
62 | def _payload(self):
63 | return {
64 | "telegram": {
65 | "text": self._speech,
66 | "parse_mode": "Markdown",
67 | "reply_markup": {"inline_keyboard": self._buttons},
68 | }
69 | }
70 |
71 | def add_button(self, callback_data, text):
72 | btn = {"text": text, "callback_data": callback_data}
73 | btn_row = [btn] # inline keybaord accepts array of button row arrays
74 | self._buttons.append(btn_row)
75 |
76 | def add_link(self, url, text):
77 | btn = {"text": text, "url": url}
78 | btn_row = [btn] # inline keybaord accepts array of button row arrays
79 | self._buttons.append(btn_row)
80 |
81 | def with_quick_reply(self, *options):
82 | # don't include text or self._speech to prevent duplictae msg
83 | msg = {"type": 2, "platform": "telegram", "replies": options}
84 | self._quick_reply = msg
85 | return self
86 |
--------------------------------------------------------------------------------
/app/app/templates/strategy/signals.html:
--------------------------------------------------------------------------------
1 | {% extends 'base.html' %}
2 | {% from "flask_user/_macros.html" import render_field, render_submit_field %}
3 | {% from "_macros.html" import render_submit_as_btn %}
4 | {% block content %}
5 |
6 |
7 |
8 |
{%trans%}Build a Strategy{%endtrans%}
9 | Indicators
10 |
11 |
12 |
13 |
14 |
15 |
52 |
53 |
54 |
55 | {% endblock %}
56 | {% block extra_js %}
57 |
58 |
95 | {% endblock %}
96 |
--------------------------------------------------------------------------------
/examples/signals2.json:
--------------------------------------------------------------------------------
1 | {
2 | "trading": {
3 | "EXCHANGE": "bitfinex",
4 | "ASSET": "btc_usd",
5 | "DATA_FREQ": "daily",
6 | "HISTORY_FREQ": "1d",
7 | "CAPITAL_BASE": 5000,
8 | "QUOTE_CURRENCY": "usd",
9 | "START": "2017-10-10",
10 | "END": "2018-3-28",
11 | "BARS": 50,
12 | "ORDER_SIZE": 0.5,
13 | "SLIPPAGE_ALLOWED": 0.05
14 | },
15 | "datasets": [],
16 | "indicators": [
17 | {
18 | "name": "EMA",
19 | "symbol": "btc_usd",
20 | "dataset": null,
21 | "label": "EMA",
22 | "params": {
23 | "timeperiod": 30
24 | },
25 | "outputs": [
26 | "real"
27 | ]
28 | },
29 | {
30 | "name": "BBANDS",
31 | "symbol": "btc_usd",
32 | "dataset": null,
33 | "label": "BBANDS",
34 | "params": {
35 | "timeperiod": 5,
36 | "nbdevup": 2,
37 | "nbdevdn": 2,
38 | "matype": 0
39 | },
40 | "outputs": [
41 | "upperband",
42 | "middleband",
43 | "lowerband"
44 | ]
45 | },
46 | {
47 | "name": "SMA",
48 | "symbol": "btc_usd",
49 | "dataset": null,
50 | "label": "SMA_FAST",
51 | "params": {
52 | "timeperiod": 10
53 | }
54 | },
55 | {
56 | "name": "SMA",
57 | "symbol": "btc_usd",
58 | "dataset": null,
59 | "label": "SMA_SLOW",
60 | "params": {
61 | "timeperiod": 50
62 | }
63 | },
64 | {
65 | "name": "RSI",
66 | "symbol": "btc_usd",
67 | "dataset": null,
68 | "label": "RSI",
69 | "params": {
70 | "timeperiod": 14,
71 | "oversold": 30,
72 | "overbought": 70
73 | },
74 | "outputs": [
75 | "real"
76 | ]
77 | }
78 | ],
79 | "signals": {
80 | "buy": [
81 | {
82 | "func": "cross_above",
83 | "params": {
84 | "series": "SMA_FAST",
85 | "trigger": "SMA_SLOW"
86 | }
87 | },
88 | {
89 | "func": "increasing",
90 | "params": {
91 | "series": "EMA",
92 | "period": 5
93 | }
94 | },
95 | {
96 | "func": "increasing",
97 | "params": {
98 | "series": "EMA",
99 | "period": 2
100 | }
101 | }
102 | ],
103 | "sell": [
104 | {
105 | "func": "decreasing",
106 | "params": {
107 | "series": "EMA",
108 | "period": 2
109 | }
110 | },
111 | {
112 | "func": "cross_below",
113 | "params": {
114 | "series": "SMA_FAST",
115 | "trigger": "SMA_SLOW"
116 | }
117 |
118 | }
119 | ]
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: '3'
2 | services:
3 | web:
4 | container_name: web
5 | image: kryptos-local-app
6 | build:
7 | context: ./app
8 | dockerfile: Dockerfile
9 | ports:
10 | - "8080:8080"
11 | volumes:
12 | - ./app:/app
13 | environment:
14 | - FLASK_DEBUG=1
15 | - FLASK_ENV=docker-dev
16 | # uses docker redis and db, but fetches other credentials from datastore
17 | - REDIS_HOST=redis
18 | - REDIS_PORT=6379
19 | - FLASK_APP=autoapp.py
20 | - SQLALCHEMY_DATABASE_URI=postgres://postgres:postgres@db:5432/kryptos
21 | - GOOGLE_APPLICATION_CREDENTIALS=/app/Kryptos-Staging-3deb8860d570.json
22 | - CONFIG_ENV=dev
23 | - PYTHONUNBUFFERED=1
24 | depends_on:
25 | - redis
26 | - db
27 | # # Infinite loop, to keep it alive, for debugging
28 | # command: bash -c "while true; do echo 'sleeping...' && sleep 10; done"\
29 | entrypoint: ['honcho', 'start', 'dev', '--no-prefix']
30 |
31 | updater:
32 | container_name: updater
33 | image: kryptos-local-app
34 | build:
35 | context: ./app
36 | dockerfile: Dockerfile
37 | ports:
38 | - "9181:9181" # for rq-dashboard
39 |
40 | volumes:
41 | - ./app:/app
42 | environment:
43 | - REDIS_HOST=redis
44 | - REDIS_PORT=6379
45 | - CONFIG_ENV=dev
46 | - GOOGLE_APPLICATION_CREDENTIALS=/app/Kryptos-Staging-3deb8860d570.json
47 | depends_on:
48 | - redis
49 | - db
50 | entrypoint: ['honcho', 'start', 'updater', '--no-prefix']
51 |
52 | worker:
53 | image: kryptos-local-worker
54 | container_name: worker
55 | build:
56 | context: ./core
57 | dockerfile: Dockerfile
58 | volumes:
59 | # - catalyst:/root/.catalyst
60 | - ./core:/core
61 | environment:
62 | - REDIS_HOST=redis
63 | - REDIS_PORT=6379
64 | - GOOGLE_APPLICATION_CREDENTIALS=Kryptos-Staging-3deb8860d570.json
65 | - PYTHONUNBUFFERED=1
66 | - CONFIG_ENV=dev
67 |
68 | depends_on:
69 | - redis
70 |
71 | ml:
72 | image: kryptos-local-ml
73 | container_name: ml
74 | build:
75 | context: ./ml
76 | dockerfile: Dockerfile
77 | volumes:
78 | - ./ml:/ml
79 | environment:
80 | - REDIS_HOST=redis
81 | - REDIS_PORT=6379
82 | - GOOGLE_APPLICATION_CREDENTIALS=Kryptos-Staging-3deb8860d570.json
83 |
84 | depends_on:
85 | - redis
86 |
87 |
88 | db:
89 | container_name: db
90 | image: postgres:10.4-alpine
91 | volumes:
92 | - postgres_data:/var/lib/postgresql/data
93 | environment:
94 | POSTGRES_DB: 'kryptos'
95 | POSTGRES_USER: postgres
96 | POSTGRES_PASSWORD: postgres
97 |
98 | # expose port 5433 instead of default
99 | # in case postgres already on host machine
100 | ports:
101 | - "5433:5432"
102 |
103 | redis:
104 | container_name: redis
105 | image: redis:4.0.5-alpine
106 | command: redis-server
107 |
108 |
109 | volumes:
110 | catalyst:
111 | postgres_data:
112 |
--------------------------------------------------------------------------------
/core/kryptos/scripts/run_all_ta.py:
--------------------------------------------------------------------------------
1 | import os
2 | import csv
3 | import talib as ta
4 | import click
5 | from multiprocessing import Pool
6 | import pandas as pd
7 |
8 | from kryptos.strategy import Strategy
9 | from kryptos.strategy.indicators import technical
10 | from kryptos.settings import PERF_DIR
11 | from kryptos.analysis.utils import quant_utils
12 |
13 |
14 | RESULT_FILE = os.path.join(PERF_DIR, 'all_ta.csv')
15 |
16 |
17 |
18 | def run_indicator(indicator_name):
19 | strat = Strategy(indicator_name)
20 | strat.add_market_indicator(indicator_name)
21 | click.secho('Running {}'.format(indicator_name), fg='cyan')
22 | strat.run(viz=False)
23 | click.secho('{}: {}'.format(indicator_name, strat.quant_results['net_profit_pct']), fg='cyan')
24 | # import pdb; pdb.set_trace()
25 |
26 | return strat
27 |
28 |
29 | @click.command('run')
30 | def run():
31 | all_ta = ta.get_functions()
32 |
33 | field_names = ['TA_INDICATOR', 'start_date', 'end_date', 'backtest_minutes', 'backtest_days',
34 | 'backtest_weeks', 'number_of_trades', 'average_trades_per_week_avg',
35 | 'average_trade_amount_usd', 'initial_capital', 'ending_capital',
36 | 'net_profit', 'net_profit_pct', 'average_daily_profit',
37 | 'average_daily_profit_pct', 'average_exposure', 'average_exposure_pct',
38 | 'net_risk_adjusted_return_pct', 'max_drawdown_pct_catalyst',
39 | 'max_daily_drawdown_pct', 'max_weekly_drawdown_pct', 'sharpe_ratio_avg',
40 | 'std_rolling_10_day_pct_avg', 'std_rolling_100_day_pct_avg',
41 | 'number_of_simulations']
42 |
43 | best_profit_pct = 0
44 | best_indicator = None
45 |
46 | if not os.path.exists(RESULT_FILE):
47 | os.makedirs(PERF_DIR, exist_ok=True)
48 |
49 | with open(RESULT_FILE, 'a') as f:
50 | writer = csv.DictWriter(f, fieldnames=field_names)
51 | writer.writeheader()
52 | for i in all_ta:
53 | strat = run_indicator(i)
54 | result_dict = strat.quant_results.to_dict()
55 |
56 | profit_pct = result_dict['net_profit_pct']['Backtest']
57 | if profit_pct > best_profit_pct:
58 | best_profit_pct, best_indicator = profit_pct, i
59 |
60 |
61 |
62 | row = {'TA_INDICATOR': i}
63 | for k, v in result_dict.items():
64 | row[k] = v['Backtest']
65 |
66 | # nested dict with trading type as key
67 | writer.writerow(row)
68 |
69 |
70 | # df_results.append(strat.quant_results)
71 |
72 | click.secho('Best peforming indicator: {}'.format(best_indicator), fg='cyan')
73 | click.secho('Net Profit Percent: {}'.format(best_profit_pct), fg='cyan')
74 |
75 | #
76 | #
77 | # # Build the table
78 | # df_quant = pd.DataFrame()
79 | # for df in df_results:
80 | # df = quant_utils.build_row_table(df['results'], strat.trading_info, strat.name)
81 | # df_quant = df_quant.append(df, ignore_index=True)
82 |
83 |
84 |
85 |
86 |
87 | # pool = Pool(processes=4)
88 | # pool.map_async(run_indicator, ta.get_functions())
89 |
90 |
91 |
92 |
93 | if __name__ == '__main__':
94 | run_all_ta()
95 |
--------------------------------------------------------------------------------
/app/app/models/migrations/env.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 | from alembic import context
3 | from sqlalchemy import engine_from_config, pool
4 | from logging.config import fileConfig
5 | import logging
6 |
7 | # this is the Alembic Config object, which provides
8 | # access to the values within the .ini file in use.
9 | config = context.config
10 |
11 | # Interpret the config file for Python logging.
12 | # This line sets up loggers basically.
13 | fileConfig(config.config_file_name)
14 | logger = logging.getLogger('alembic.env')
15 |
16 | # add your model's MetaData object here
17 | # for 'autogenerate' support
18 | # from myapp import mymodel
19 | # target_metadata = mymodel.Base.metadata
20 | from flask import current_app
21 | config.set_main_option('sqlalchemy.url',
22 | current_app.config.get('SQLALCHEMY_DATABASE_URI'))
23 | target_metadata = current_app.extensions['migrate'].db.metadata
24 |
25 | # other values from the config, defined by the needs of env.py,
26 | # can be acquired:
27 | # my_important_option = config.get_main_option("my_important_option")
28 | # ... etc.
29 |
30 |
31 | def run_migrations_offline():
32 | """Run migrations in 'offline' mode.
33 |
34 | This configures the context with just a URL
35 | and not an Engine, though an Engine is acceptable
36 | here as well. By skipping the Engine creation
37 | we don't even need a DBAPI to be available.
38 |
39 | Calls to context.execute() here emit the given string to the
40 | script output.
41 |
42 | """
43 | url = config.get_main_option("sqlalchemy.url")
44 | context.configure(url=url)
45 |
46 | with context.begin_transaction():
47 | context.run_migrations()
48 |
49 |
50 | def run_migrations_online():
51 | """Run migrations in 'online' mode.
52 |
53 | In this scenario we need to create an Engine
54 | and associate a connection with the context.
55 |
56 | """
57 |
58 | # this callback is used to prevent an auto-migration from being generated
59 | # when there are no changes to the schema
60 | # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
61 | def process_revision_directives(context, revision, directives):
62 | if getattr(config.cmd_opts, 'autogenerate', False):
63 | script = directives[0]
64 | if script.upgrade_ops.is_empty():
65 | directives[:] = []
66 | logger.info('No changes in schema detected.')
67 |
68 | engine = engine_from_config(config.get_section(config.config_ini_section),
69 | prefix='sqlalchemy.',
70 | poolclass=pool.NullPool)
71 |
72 | connection = engine.connect()
73 | context.configure(connection=connection,
74 | target_metadata=target_metadata,
75 | process_revision_directives=process_revision_directives,
76 | **current_app.extensions['migrate'].configure_args)
77 |
78 | try:
79 | with context.begin_transaction():
80 | context.run_migrations()
81 | finally:
82 | connection.close()
83 |
84 | if context.is_offline_mode():
85 | run_migrations_offline()
86 | else:
87 | run_migrations_online()
88 |
--------------------------------------------------------------------------------
/core/kryptos/strategies/mean_reversion_simple.py:
--------------------------------------------------------------------------------
1 | # From catalyst examples
2 |
3 | # For this example, we're going to write a simple momentum script. When the
4 | # stock goes up quickly, we're going to buy; when it goes down quickly, we're
5 | # going to sell. Hopefully we'll ride the waves.
6 | import time
7 |
8 | import talib
9 | from kryptos.strategy import Strategy
10 | from kryptos.strategy.indicators import technical
11 | from catalyst.api import order_target_percent, order, record, get_open_orders, symbol
12 |
13 | import logbook
14 |
15 |
16 |
17 | strat = Strategy("MEAN_REVERSION", data_frequency="daily")
18 |
19 | strat.load_json_file('mean_reversion.json')
20 |
21 | log = logbook.Logger(strat.name)
22 |
23 |
24 | # To run an algorithm in Catalyst, you need two functions: initialize and
25 | # handle_data.
26 |
27 | @strat.initialize
28 | def initialize(context):
29 | # This initialize function sets any data or variables that you'll use in
30 | # your algorithm. For instance, you'll want to define the trading pair (or
31 | # trading pairs) you want to backtest. You'll also want to define any
32 | # parameters or values you're going to use.
33 |
34 | # In our example, we're looking at Neo in Ether.
35 | context.base_price = None
36 | context.current_day = None
37 |
38 | context.RSI_OVERSOLD = 55
39 | context.RSI_OVERBOUGHT = 65
40 | context.CANDLE_SIZE = "5T"
41 |
42 | context.start_time = time.time()
43 |
44 | # context.set_commission(maker=0.1, taker=0.2)
45 | context.set_slippage(spread=0.0001)
46 |
47 | @strat.handle_data
48 | def trade_logic(context, data):
49 | # If base_price is not set, we use the current value. This is the
50 | # price at the first bar which we reference to calculate price_change.
51 | if context.base_price is None:
52 | context.base_price = price
53 |
54 | price_change = (price - context.base_price) / context.base_price
55 | cash = context.portfolio.cash
56 |
57 | # Now that we've collected all current data for this frame, we use
58 | # the record() method to save it. This data will be available as
59 | # a parameter of the analyze() function for further analysis.
60 |
61 |
62 | @strat.signal_buy(override=True)
63 | def signal_buy(context, analyze):
64 | pos_amount = context.portfolio.positions[context.market].amount
65 | rsi = strat.indicator('RSI').outputs['RSI']
66 | return rsi[-1] <= context.RSI_OVERSOLD and pos_amount == 0
67 |
68 | @strat.signal_sell(override=True)
69 | def signal_sell(context, analyze):
70 | pos_amount = context.portfolio.positions[context.market].amount
71 | rsi = strat.indicator('RSI').outputs['RSI']
72 | return rsi[-1] >= context.RSI_OVERBOUGHT and pos_amount > 0
73 |
74 |
75 | @strat.buy_order
76 | def buy(context)
77 | # Set a style for limit orders,
78 | limit_price = price * 1.005
79 | order_target_percent(context.market, 1, limit_price=limit_price)
80 | context.traded_today = True
81 |
82 | @strat.sell_order
83 | def sell(context)
84 | log.info("{}: selling - price: {}, rsi: {}".format(data.current_dt, price, rsi[-1]))
85 | limit_price = price * 0.995
86 | order_target_percent(context.market, 0, limit_price=limit_price)
87 | context.traded_today = True
88 |
--------------------------------------------------------------------------------
/app/app/templates/base.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | {{ user_manager.USER_APP_NAME }}
8 |
9 |
10 |
11 |
12 |
13 |
14 |
26 |
27 |
28 |
29 |
33 |
34 | {# *** Allow sub-templates to insert extra html to the head section *** #}
35 | {% block extra_css %}{% endblock %}
36 |
37 |
38 |
39 |
40 | {% block body %}
41 | {% include "navbar.html" %}
42 | {% block menu %}
43 |
44 | {% endblock %}
45 |
46 |
47 |
48 | {# One-time system messages called Flash messages #}
49 | {% block flash_messages %}
50 | {%- with messages = get_flashed_messages(with_categories=true) -%}
51 | {% if messages %}
52 | {% for category, message in messages %}
53 | {% if category=='error' %}
54 | {% set category='danger' %}
55 | {% endif %}
56 |
{{ message|safe }}
57 | {% endfor %}
58 | {% endif %}
59 | {%- endwith %}
60 | {% endblock %}
61 |
62 | {% block main %}
63 | {% block content %}{% endblock %}
64 | {% endblock %}
65 |
66 |
67 |
68 |
69 |
73 | {% endblock %}
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 | {# *** Allow sub-templates to insert extra html to the bottom of the body *** #}
82 | {% block extra_js %}{% endblock %}
83 |
84 |
85 |
86 |
--------------------------------------------------------------------------------
/examples/signals.json:
--------------------------------------------------------------------------------
1 | {
2 | "trading": {
3 | "EXCHANGE": "bitfinex",
4 | "ASSET": "btc_usd",
5 | "DATA_FREQ": "daily",
6 | "HISTORY_FREQ": "1d",
7 | "CAPITAL_BASE": 5000,
8 | "QUOTE_CURRENCY": "usd",
9 | "START": "2017-10-10",
10 | "END": "2018-3-28",
11 | "BARS": 50,
12 | "ORDER_SIZE": 0.5,
13 | "SLIPPAGE_ALLOWED": 0.05
14 | },
15 | "datasets": [],
16 | "indicators": [
17 | {
18 | "name": "BBANDS",
19 | "symbol": "btc_usd",
20 | "dataset": null,
21 | "label": "MY_BBANDS",
22 | "params": {
23 | "timeperiod": 5,
24 | "nbdevup": 2,
25 | "nbdevdn": 2,
26 | "matype": 0
27 | },
28 | "outputs": [
29 | "upperband",
30 | "middleband",
31 | "lowerband"
32 | ]
33 | },
34 | {
35 | "name": "SMA",
36 | "symbol": "btc_usd",
37 | "dataset": null,
38 | "label": "SMA_FAST",
39 | "params": {
40 | "timeperiod": 10
41 | }
42 | },
43 | {
44 | "name": "SMA",
45 | "symbol": "btc_usd",
46 | "dataset": null,
47 | "label": "SMA_SLOW",
48 | "params": {
49 | "timeperiod": 50
50 | }
51 | },
52 | {
53 | "name": "SMA",
54 | "symbol": "btc_usd",
55 | "dataset": null,
56 | "label": "SMA",
57 | "params": {
58 | "timeperiod": 30
59 | },
60 | "outputs": [
61 | "real"
62 | ]
63 | },
64 | {
65 | "name": "EMA",
66 | "symbol": "btc_usd",
67 | "dataset": null,
68 | "label": "EMA",
69 | "params": {
70 | "timeperiod": 30
71 | },
72 | "outputs": [
73 | "real"
74 | ]
75 | },
76 | {
77 | "name": "MACDFIX",
78 | "symbol": "btc_usd",
79 | "dataset": null,
80 | "label": "MACDFIX",
81 | "params": {
82 | "signalperiod": 9
83 | },
84 | "outputs": [
85 | "macd",
86 | "macdsignal",
87 | "macdhist"
88 | ]
89 | }
90 | ],
91 | "signals": {
92 | "buy": [
93 | {
94 | "func": "cross_above",
95 | "params": {
96 | "series": "SMA_FAST",
97 | "trigger": "SMA_SLOW"
98 | }
99 | },
100 | {
101 | "func": "increasing",
102 | "params": {
103 | "series": "EMA",
104 | "period": 5
105 | }
106 | },
107 | {
108 | "func": "increasing",
109 | "params": {
110 | "series": "MY_BBANDS.middleband",
111 | "period": 2
112 | }
113 | }
114 | ],
115 | "sell": [
116 | {
117 | "func": "decreasing",
118 | "params": {
119 | "series": "EMA",
120 | "period": 2
121 | }
122 | },
123 | {
124 | "func": "cross_below",
125 | "params": {
126 | "series": "SMA_FAST",
127 | "trigger": "SMA_SLOW"
128 | }
129 |
130 | }
131 | ]
132 | }
133 | }
134 |
--------------------------------------------------------------------------------
/ml/README.md:
--------------------------------------------------------------------------------
1 | ### Runing Machine Learning Strategies from the CLI
2 |
3 | To create a strategy using ML models:
4 | ```bash
5 | $ strat -ml xgboost
6 | $ strat -ml lightgbm
7 | $ strat -ml lightgbm -ml xgboost # You buy if both models get buy signal and vice-versa.
8 | ```
9 |
10 | By default, Machine Learning models use:
11 | * MIN_ROWS_TO_ML -> Minimum number of rows in the dataset to apply Machine Learning
12 |
13 | * CLASSIFICATION_TYPE -> Labeling type:
14 | 1. Regression
15 | 2. Binary Classification (DOWN / UP)
16 | 3. Multiclass Classification (DOWN / KEEP / UP)
17 |
18 | * STOP_LOSS -> Percentage to Stop-Loss
19 | * TAKE_PROFIT -> Percentage to Take-Profit
20 | * NORMALIZATION -> True to set up data normalizated; False don't set up. Also, you can select the method to use ('max', 'diff' or 'std').
21 |
22 |
23 | #### Feature Engineering techniques
24 |
25 | Using dates features, tsfresh, fbprophet and technical analysis (ta-lib) libraries.
26 |
27 | You need to set the next setting variables:
28 |
29 | * FE_DATES -> True to add dates features; False don't add any feature.
30 | * FE_TSFRESH -> True to add tsfresh features; False don't add any feature.
31 | * FE_TA -> True to add ta features; False don't add any feature.
32 | * FE_FBPROPHET -> True to add fbprophet features; False don't add any feature.
33 | * FE_UTILS -> True to add utils features; False don't add any feature.
34 |
35 |
36 | #### Hyper parameters optimization
37 |
38 | Using Hyperopt library.
39 |
40 | You need to set the OPTIMIZE_PARAMS setting variable:
41 | * 'enabled' -> True to apply hyper model params optimization; False don't apply.
42 | * 'iterations' -> Test dataframe size to optimize model params
43 | * 'n_evals' -> Number of evaluations to hyperopt
44 | * 'size' -> Test dataframe size to optimize model params
45 |
46 |
47 | #### Feature Selection techniques
48 |
49 | Using embedded, filter and wrapper methods: https://machinelearningmastery.com/an-introduction-to-feature-selection/
50 |
51 | You need to set the FEATURE SELECTION setting variable:
52 |
53 | * 'enabled' -> Apply feature selection
54 | * 'n_iterations' -> Number of iterations to perform feature selection
55 | * 'method' -> https://machinelearningmastery.com/an-introduction-to-feature-selection/ -> embedded | filter | wrapper
56 |
57 |
58 | #### Feature Exploration techniques
59 |
60 | We use plot_importance methods of XGBoost and LightGBM to explore in detail the feature importance in the models. Also, we use 'shap library' to get more information.
61 |
62 | You can set the VISUALIZE_MODEL setting variable:
63 |
64 | * 'enabled' -> Apply feature Exploration
65 | * 'n_iterations' -> Number of iterations to get detailed information.
66 |
67 |
68 | #### Extra datasets
69 |
70 | Also, you can add external datasets as features too (to work with daily frequency only):
71 |
72 | Google Trends
73 | ```bash
74 | $ strat -d google -c "bitcoin" -c "btc" -ml xgboost
75 | or
76 | $ strat -ml xgboost -d google -c "bitcoin" -c "btc"
77 | ```
78 |
79 | Quandle
80 | ```bash
81 | $ strat -d quandl -c 'MKTCP' -c 'NTRAN' -ml xgboost
82 | or
83 | $ strat -ml xgboost -d quandl -c 'MKTCP' -c 'NTRAN'
84 | ```
85 |
86 |
87 | #### Data pre visualization
88 |
89 | We generate profile reports from a pandas DataFrame using pandas-profiling tool.
90 |
91 | You can set the PROFILING_REPORT setting variable:
92 |
93 | * 'enabled' -> Apply feature Exploration
94 | * 'n_iterations' -> Number of iterations to visualize input data.
95 |
96 | #### Results
97 |
98 | TODO: talk about confussion matrix...
99 |
--------------------------------------------------------------------------------
/core/kryptos/worker/jobs.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logbook
3 | import talib as ta
4 | from typing import List, Set
5 | from talib import abstract as ab
6 | import ccxt
7 |
8 | from kryptos import logger_group
9 | from kryptos.strategy import Strategy
10 | from kryptos.settings import REDIS_HOST, REDIS_PORT
11 |
12 |
13 | log = logbook.Logger("WorkerJobs")
14 | logger_group.add_logger(log)
15 | log.warn(f"Using Redis connection {REDIS_HOST}:{REDIS_PORT}")
16 |
17 |
18 | def run_strat(
19 | strat_json,
20 | strat_id,
21 | user_id=None,
22 | telegram_id=None,
23 | live=False,
24 | simulate_orders=True,
25 | ):
26 | log.info(f"Worker received job for strat {strat_id}")
27 | strat_dict = json.loads(strat_json)
28 | strat = Strategy.from_dict(strat_dict)
29 | strat.id = strat_id
30 | strat.telegram_id = telegram_id
31 |
32 | strat.run(
33 | viz=False,
34 | live=live,
35 | simulate_orders=simulate_orders,
36 | user_id=user_id,
37 | as_job=True,
38 | )
39 | result_df = strat.quant_results
40 | if result_df is None:
41 | log.warning("No results from strategy")
42 | return
43 |
44 | return result_df.to_json()
45 |
46 |
47 | ## TA-LIB utils ##
48 | def indicator_group_name_selectors() -> [(str, str)]:
49 | """Returns list of select options of indicator group names"""
50 | selectors = []
51 | for k in ta.get_function_groups().keys():
52 | selectors.append((k, k))
53 | return selectors
54 |
55 |
56 | def all_indicator_selectors() -> [(str, str)]:
57 | """Returns the entire list of possible indicator abbreviation select options"""
58 | selectors = []
59 | for i in ta.get_functions():
60 | selectors.append((i, i))
61 | return selectors
62 |
63 |
64 | def _get_indicator_params(indicator_abbrev):
65 | func = getattr(ab, indicator_abbrev)
66 | return func.parameters
67 |
68 |
69 | def get_indicators_by_group(group: str) -> [(str, str)]:
70 | """Returns list of select options containing abbreviations of the groups indicators"""
71 | indicator_selects = []
72 | group_indicators = ta.get_function_groups()[group]
73 | for i in range(len(group_indicators)):
74 | abbrev = group_indicators[i]
75 | func = getattr(ab, abbrev)
76 | name = func.info["display_name"]
77 | indicator_selects.append((abbrev, abbrev))
78 | return indicator_selects
79 |
80 |
81 | def get_exchange_asset_pairs(exchange: str) -> [str]:
82 | log.debug(f"Fetching {exchange} markets with ccxt")
83 | exchange_class = getattr(ccxt, exchange)
84 | markets = exchange_class().load_markets()
85 | symbols = []
86 | for pair in markets:
87 | s = pair.replace("/", "_").lower()
88 | symbols.append(s)
89 | return symbols
90 |
91 |
92 | def get_exchange_quote_currencies(exchange: str) -> Set[str]:
93 | symbols = get_exchange_asset_pairs(exchange)
94 | quotes = set()
95 | for s in symbols:
96 | _, quote = s.split("_")
97 | quotes.add(quote)
98 | return quotes
99 |
100 |
101 | def get_available_base_currencies(exchange: str, quote_currency: str) -> Set[str]:
102 | symbols = get_exchange_asset_pairs(exchange)
103 | base_currencies = set()
104 | for s in [s for s in symbols if quote_currency in s]:
105 | base, quote = s.split("_")
106 | if quote == quote_currency:
107 | base_currencies.add(base)
108 |
109 | return base_currencies
110 |
--------------------------------------------------------------------------------
/core/ingester.py:
--------------------------------------------------------------------------------
1 | import time
2 | from logbook import Logger
3 | import multiprocessing
4 | from catalyst.exchange.exchange_bundle import ExchangeBundle
5 | from rq import Connection, Worker
6 | import pandas as pd
7 | import redis
8 |
9 | from kryptos import logger_group
10 | from kryptos.settings import REDIS_HOST, REDIS_PORT
11 |
12 |
13 | CONN = redis.Redis(host=REDIS_HOST, port=REDIS_PORT)
14 |
15 |
16 | log = Logger("INGESTER")
17 | logger_group.add_logger(log)
18 | log.warn(f"Using Redis connection {REDIS_HOST}:{REDIS_PORT}")
19 |
20 |
21 | def ingest_exchange(exchange, symbol=None, start=None, end=None):
22 | exchange_bundle = ExchangeBundle(exchange)
23 | if symbol is None:
24 | log.warn(f"Queuing ingest {exchange} for all symbols")
25 | else:
26 | log.warn(f"Queuing ingest {exchange} for {symbol}")
27 |
28 | log.warn(f"Will ingest timeframe {start} - {end}")
29 |
30 | log.info(f"Ingesting {exchange} daily data")
31 | exchange_bundle.ingest(
32 | "daily",
33 | start=pd.to_datetime(start, utc=True),
34 | end=pd.to_datetime(end, utc=True),
35 | include_symbols=symbol,
36 | show_progress=True,
37 | show_breakdown=True,
38 | show_report=True,
39 | )
40 | log.info(f"Done ingesting daily {exchange} data")
41 |
42 | log.info(f"Ingesting {exchange} minute data")
43 | exchange_bundle.ingest(
44 | "minute",
45 | start=pd.to_datetime(start, utc=True),
46 | end=pd.to_datetime(end, utc=True),
47 | include_symbols=symbol,
48 | show_progress=True,
49 | show_breakdown=True,
50 | show_report=True,
51 | )
52 | log.info(f"Done ingesting minute {exchange} data")
53 | log.info("Ingest completed")
54 |
55 |
56 | def ingest_from_trade_config(config):
57 | "Ingest exchange bundle data for a given strategy time frame"
58 |
59 | if config.get("EXCHANGE") is None:
60 | log.error("must specify an exchange name")
61 |
62 | exchange_bundle = ExchangeBundle(config["EXCHANGE"])
63 |
64 | log.notice(
65 | "Ingesting {} exchange bundle {} - {}...".format(
66 | config["EXCHANGE"], config["START"], config["END"]
67 | )
68 | )
69 | exchange_bundle.ingest(
70 | data_frequency=config["DATA_FREQ"],
71 | include_symbols=config["ASSET"],
72 | exclude_symbols=None,
73 | start=pd.to_datetime(config["START"], utc=True),
74 | end=pd.to_datetime(config["END"], utc=True),
75 | show_progress=True,
76 | show_breakdown=True,
77 | show_report=True,
78 | csv=None,
79 | )
80 |
81 |
82 | # def queue_ingest(exchange, symbol=None, start=None, end=None):
83 | # if symbol is None:
84 | # log.warn(f'Queuing ingest {exchange} for all symbols')
85 | # else:
86 | # log.warn(f'Queuing ingest {exchange} for {symbol}')
87 |
88 | # q = Queue('ingest', connection=CONN)
89 | # return q.enqueue(load.ingest_exchange, args=(exchange, symbol, start, end))
90 |
91 |
92 | if __name__ == "__main__":
93 |
94 | with Connection(CONN):
95 | log.info("Starting ingest worker")
96 | multiprocessing.Process(target=Worker(["ingest"]).work).start()
97 |
98 | # allow worker to start up
99 | time.sleep(5)
100 |
101 | while True:
102 | for ex in ["bitfinex", "bittrex", "poloniex"]:
103 | ingest_exchange(ex)
104 |
105 | # re-ingest every 12 hours
106 | time.sleep(43200)
107 |
--------------------------------------------------------------------------------
/core/kryptos/logger.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import logbook
4 | from logbook.more import ColorizedStderrHandler
5 | from logbook.handlers import StringFormatterHandlerMixin
6 | import google.cloud.logging
7 |
8 |
9 | from kryptos.settings import LOG_DIR, CLOUD_LOGGING
10 |
11 | logger_group = logbook.LoggerGroup()
12 | # logger_group.level = logbook.INFO
13 |
14 | logbook.set_datetime_format("utc")
15 |
16 | cloud_client = google.cloud.logging.Client()
17 |
18 | APP_LOG = os.path.join(LOG_DIR, "app.log")
19 | ERROR_LOG = os.path.join(LOG_DIR, "error.log")
20 |
21 |
22 | def add_logger(logger, *handlers):
23 | logger.handlers.extend(handlers)
24 | logger_group.add_logger(logger)
25 |
26 |
27 | class GoogleCloudHandler(logbook.Handler, StringFormatterHandlerMixin):
28 | def __init__(self, level=0, filter=None, bubble=False, format_string=None):
29 |
30 | logbook.Handler.__init__(self, level, filter, bubble)
31 | StringFormatterHandlerMixin.__init__(self, format_string)
32 |
33 | def log_to_cloud(self, record):
34 | cloud_logger = cloud_client.logger(record.channel)
35 | # StratLogger logs extra info
36 | if record.channel == "STRATEGY":
37 | cloud_logger.log_struct(
38 | {
39 | "strat_id": record.extra["strat_id"],
40 | "mode": record.extra["mode"],
41 | "message": self.format(record),
42 | "user_id": record.extra["user_id"],
43 | },
44 | severity=record.level_name,
45 | labels={"channel": record.channel},
46 | )
47 |
48 | else:
49 | cloud_logger.log_text(self.format(record), severity=record.level_name)
50 |
51 | def emit(self, record):
52 | if CLOUD_LOGGING:
53 | try:
54 | self.log_to_cloud(record)
55 | except Exception:
56 | Warning("Could not emit cloud log")
57 |
58 |
59 | def setup_logging():
60 | os.makedirs(LOG_DIR, exist_ok=True)
61 |
62 | format_string = "[{record.time:%H:%M:%S}] {record.level_name}: {record.channel}:{record.extra[strat_id]} {record.message}"
63 |
64 | handlers = [logbook.NullHandler()]
65 |
66 | if CLOUD_LOGGING:
67 | cloud_handler = GoogleCloudHandler(level="DEBUG", bubble=True, format_string=format_string)
68 | handlers.append(cloud_handler)
69 |
70 | file_handler = logbook.RotatingFileHandler(
71 | APP_LOG, level="DEBUG", bubble=True, format_string=format_string
72 | )
73 |
74 | stream_handler = logbook.StreamHandler(sys.stdout, level="INFO", bubble=True)
75 | stream_handler.format_string = format_string
76 |
77 | error_file_handler = logbook.RotatingFileHandler(ERROR_LOG, level="ERROR", bubble=True)
78 | error_file_handler.format_string = """
79 | ----------------------------------------------------------------------------------
80 | {record.time:%H:%M:%S} KRYPTOS:{record.channel}:{record.level_name}:
81 |
82 | {record.message}
83 |
84 | Module: {record.module}:{record.lineno}
85 | Function: {record.func_name}
86 |
87 | Channel: {record.channel}
88 | Trade Date: {record.extra[strat_date]}
89 |
90 | Exception: {record.formatted_exception}
91 |
92 | ----------------------------------------------------------------------------------
93 | """
94 |
95 | handlers.extend([file_handler, stream_handler, error_file_handler])
96 |
97 | setup = logbook.NestedSetup(handlers)
98 |
99 | setup.push_thread()
100 |
--------------------------------------------------------------------------------
/core/kryptos/settings.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | from google.cloud import datastore
4 |
5 |
6 | def get_from_datastore(config_key, env):
7 | ds = datastore.Client()
8 | print("Fetching {}".format(config_key))
9 |
10 | product_key = ds.key("Settings", env)
11 | entity = ds.get(product_key)
12 |
13 | return entity[config_key]
14 |
15 |
16 | CONFIG_ENV = os.getenv("CONFIG_ENV", 'production')
17 | PROJECT_ID = os.getenv("PROJECT_ID", "kryptos-205115")
18 | PLATFORM_DIR = os.path.abspath(os.path.dirname(__file__))
19 | BASE_DIR = os.path.dirname(PLATFORM_DIR)
20 | PERF_DIR = os.path.join(BASE_DIR, "performance_results")
21 | LOG_DIR = os.path.join(BASE_DIR, "logs")
22 |
23 | REDIS_HOST = os.getenv("REDIS_HOST", "10.0.0.3")
24 | REDIS_PORT = os.getenv("REDIS_PORT", 6379)
25 |
26 | SENTRY_DSN = os.getenv("SENTRY_DSN", None)
27 |
28 | CLOUD_LOGGING = os.getenv('CLOUD_LOGGING', False)
29 |
30 | REMOTE_BASE_URL = "https://kryptos-205115.appspot.com"
31 | LOCAL_BASE_URL = "http://web:8080"
32 |
33 | if CONFIG_ENV == 'dev':
34 | WEB_URL = LOCAL_BASE_URL
35 | else:
36 | WEB_URL = REMOTE_BASE_URL
37 |
38 |
39 | STRAT_DIR = os.path.join(PLATFORM_DIR, "strategy")
40 | DEFAULT_CONFIG_FILE = os.path.join(STRAT_DIR, "config.json")
41 |
42 |
43 | QUEUE_NAMES = ["paper", "live", "backtest", "ta"]
44 |
45 | with open(DEFAULT_CONFIG_FILE, "r") as f:
46 | DEFAULT_CONFIG = json.load(f)
47 |
48 |
49 | # Optionally set metrics here instead of with the metrics "-m" option
50 | METRICS = [
51 | # 'algo_volatility',
52 | "alpha",
53 | # 'benchmark_period_return',
54 | # 'benchmark_volatility',
55 | "beta",
56 | # 'gross_leverage',
57 | # 'long_exposure',
58 | # 'long_value',
59 | # 'longs_count',
60 | "max_drawdown",
61 | # 'max_leverage',
62 | # 'net_leverage',
63 | "pnl",
64 | "sharpe",
65 | # 'short_exposure',
66 | # 'short_value',
67 | # 'shorts_count',
68 | "sortino",
69 | ]
70 |
71 |
72 | # Technical Analysis Settings
73 | class TAConfig(object):
74 |
75 | # global
76 | BARS = 365
77 | ORDER_SIZE = 0.01
78 | SLIPPAGE_ALLOWED = 0.05
79 |
80 | # bbands.py
81 | # MATYPE = ta.MA_Type.T3
82 | SAR_ACCEL = 0.02
83 | SAR_MAX = 0.2
84 |
85 | # macdfix.py
86 | MACD_SIGNAL = 9
87 | RSI_OVERSOLD = 55
88 | RSI_OVERBOUGHT = 65
89 |
90 | # mean_reversion.py
91 | # RSI_OVERSOLD = 55 # defined in macdfix section
92 | # RSI_OVERBOUGHT = 65 # defined in macdfix section
93 | CANDLE_SIZE = "5T"
94 |
95 | # rsi_profit_target.py
96 | MAX_HOLDINGS = 0.2
97 | # RSI_OVERSOLD = 30 # defined in macdfix section
98 | RSI_OVERSOLD_BBANDS = 45
99 | RSI_OVERBOUGHT_BBANDS = 55
100 | TARGET = 0.15
101 | STOP_LOSS = 0.1
102 | STOP = 0.03
103 |
104 | # rsi_ta.py
105 | RSI_PERIOD = 7
106 | RSI_OVER_BOUGHT = 70
107 | RSI_OVER_SOLD = 30
108 | RSI_AVG_PERIOD = 15
109 |
110 | # sma_crossover.py
111 | # sma_macd.py
112 | SMA_FAST = 5503.84
113 | SMA_SLOW = 4771.08
114 | MACD_FAST = 12
115 | MACD_SLOW = 26
116 | # MACD_SIGNAL = 9 # defined in macdfix
117 |
118 | # stoch_rsi.py
119 | TIMEPERIOD = 9
120 | FASTK_PERIOD = 5
121 | FASTD_PERIOD = 3
122 | FASTD_MATYPE = 0
123 | # STOCH_OVER_BOUGHT = 20 # defined in stochastics section
124 | # STOCH_OVER_SOLD = 80 # defined in stochastics section
125 |
126 | # stochastics.py
127 | STOCH_K = 14
128 | STOCH_D = 3
129 |
130 | STOCH_OVERBOUGHT = 80
131 | STOCH_OVERSOLD = 20
132 |
--------------------------------------------------------------------------------
/app/app/templates/strategy/trading.html:
--------------------------------------------------------------------------------
1 | {% extends 'flask_user/_authorized_base.html' %}
2 | {% from "flask_user/_macros.html" import render_field, render_submit_field %}
3 | {% from "_macros.html" import render_form %}
4 | {% block content %}
5 |
6 | {%trans%}Build a Strategy{%endtrans%}
7 |
8 | Basic Configuration
9 |
10 | {{ render_form(form) }}
11 |
12 |
13 | {% block extra_js %}
14 |
120 | {% endblock %}
121 |
122 |
123 |
124 | {% endblock %}
125 |
--------------------------------------------------------------------------------
/app/app/app.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """The flask app module, containing the app factory function."""
3 | import os
4 | from flask import Flask
5 | from flask.helpers import get_debug_flag
6 | import logging
7 |
8 | from flask_user import UserManager
9 | import rq_dashboard
10 |
11 | from app import api, bot, models, task
12 | from app.web import account, strategy, public
13 | from app.extensions import cors, db, migrate, sentry
14 | from app.settings import DockerDevConfig, ProdConfig
15 |
16 |
17 | logging.getLogger("flask_assistant").setLevel(logging.INFO)
18 |
19 |
20 | def in_docker():
21 | if not os.path.exists("/proc/self/cgroup"):
22 | return False
23 | with open("/proc/self/cgroup", "r") as procfile:
24 | for line in procfile:
25 | fields = line.strip().split("/")
26 | if "docker" in fields:
27 | print("**Inside Docker container, will disable visualization**")
28 | return True
29 |
30 | return False
31 |
32 |
33 | def get_config():
34 | # if not in_docker():
35 | # config = DevConfig
36 |
37 | if get_debug_flag():
38 | config = DockerDevConfig
39 |
40 | else:
41 | config = ProdConfig
42 |
43 | return config
44 |
45 |
46 | def create_app(config_object=None):
47 | """An application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/.
48 |
49 | :param config_object: The configuration object to use.
50 | """
51 | if config_object is None:
52 | config_object = get_config()
53 |
54 | app = Flask(__name__.split(".")[0])
55 | app.config.from_object(rq_dashboard.default_settings)
56 | app.config.from_object(config_object)
57 | app.logger.warn("Using {}".format(config_object))
58 | register_extensions(app)
59 | register_blueprints(app)
60 | app.logger.warn("USING DB {}".format(app.config["SQLALCHEMY_DATABASE_URI"]))
61 |
62 | return app
63 |
64 |
65 | def register_extensions(app):
66 | """Register Flask extensions.
67 |
68 | Flask-Assistant does not need to be initalized here if declared as a blueprint.
69 | Other extensions such as flask-sqlalchemy and flask-migrate are reigstered here.
70 | If the entire flask app consists of only the Assistant, uncomment the code below.
71 | """
72 |
73 | sentry.init_app(app)
74 | cors.init_app(app, resources={r"*": {"origins": "*"}})
75 | db.init_app(app)
76 | migrate.init_app(app, db, directory=app.config["MIGRATIONS_DIR"])
77 |
78 | # Setup Flask-User and specify the User data-model
79 | UserManager(app, db, models.User)
80 |
81 | # apply any/all pending migrations.
82 | # with app.app_context():
83 | # from flask_migrate import upgrade as _upgrade
84 | # _upgrade()
85 |
86 | return None
87 |
88 |
89 | def register_blueprints(app):
90 | """Register Flask blueprints.
91 |
92 | When Flask-Assistant is used to create a blueprint within a standard flask app,
93 | it must be registered as such, rather that with init_app().
94 |
95 | If the entire flask app consists of only the Assistant, comment out the code below.
96 | """
97 | # web blueprints
98 | app.register_blueprint(public.views.blueprint)
99 | app.register_blueprint(account.views.blueprint)
100 | app.register_blueprint(strategy.views.blueprint)
101 |
102 | # backend blueprints
103 | app.register_blueprint(api.views.api)
104 | app.register_blueprint(bot.assistant.blueprint)
105 | app.register_blueprint(rq_dashboard.blueprint, url_prefix="/rq")
106 |
107 | return None
108 |
--------------------------------------------------------------------------------
/app/app/templates/account/strategy_status.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | {% extends "base.html" %}
9 | {% block content %}
10 |
11 |
[[stratInfo.meta.config.name]]
12 |
13 |
14 |
15 |
16 |
17 | Status: [[stratInfo.status]]
18 |
19 |
20 |
21 | Strategy ID: [[strat_id]]
22 | Name: [[stratInfo.meta.config.name]]
23 | Job started at: [[stratInfo.started_at || '']]
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 | Config
37 |
38 |
39 |
[[stratInfo.meta.config || '']]
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 | Logs
50 |
51 |
52 | Date:
[[stratInfo.meta.date || ""]]
53 | Output:
[[stratInfo.meta.output]]
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 | Result
64 |
65 |
66 |
[[stratInfo.result || '']]
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
120 |
121 |
122 | {% endblock %}
123 |
--------------------------------------------------------------------------------
/core/kryptos/utils/auth.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | from typing import Dict
4 | from pathlib import Path
5 |
6 | import logbook
7 |
8 | from kryptos.settings import PROJECT_ID
9 | from kryptos.utils import storage_client
10 |
11 |
12 | from google.cloud import kms_v1
13 |
14 | key_client = kms_v1.KeyManagementServiceClient()
15 |
16 |
17 | log = logbook.Logger("ExchangeAuth")
18 |
19 |
20 | def get_auth_alias_path(user_id: str, exchange_name: str) -> str:
21 | home_dir = str(Path.home())
22 | exchange_dir = os.path.join(home_dir, ".catalyst/data/exchanges/", exchange_name.lower())
23 | os.makedirs(exchange_dir, exist_ok=True)
24 | user_file = f"auth{user_id}.json"
25 | file_name = os.path.join(exchange_dir, user_file)
26 | return file_name
27 |
28 |
29 | def decrypt_auth_key(user_id: int, exchange_name: str, ciphertext: bytes) -> Dict[str, str]:
30 | """Decrypts auth data using google cloud KMS
31 |
32 | Args:
33 | user_id (int)
34 | exchange_name (str)
35 | ciphertext (bytes): encrypted data
36 |
37 | Returns:
38 | Dict[str, str]: Description
39 | """
40 | log.debug("decrypting exchange auth")
41 | key_path = key_client.crypto_key_path_path(
42 | PROJECT_ID, "global", "exchange_auth", f"{exchange_name}_{user_id}_key"
43 | )
44 |
45 | response = key_client.decrypt(key_path, ciphertext)
46 | log.debug(f"successfully decrypted user {user_id} {exchange_name} auth")
47 | return json.loads(response.plaintext)
48 |
49 |
50 | def get_encrypted_auth(user_id: int, exchange_name: str) -> bytes:
51 | """Fetches encrypted auth data as blob from storage bucket
52 |
53 | Args:
54 | user_id (int): Description
55 | exchange_name (str): Description
56 |
57 | Returns:
58 | bytes: ciphertext - encrypted auth json
59 | """
60 |
61 | log.debug("Fetching encrypted user exchange auth from storage")
62 | bucket = storage_client.get_bucket("catalyst_auth")
63 | blob = bucket.blob(f"auth_{exchange_name}_{user_id}_json")
64 | encrypted_text = blob.download_as_string()
65 | log.debug("obtained encrypted auth")
66 | return encrypted_text
67 |
68 |
69 | def save_to_catalyst(user_id: int, exchange_name: str, auth_dict: Dict[str, str]) -> None:
70 | """Saves decrypted auth data to catalyst dir"""
71 | file_name = get_auth_alias_path(user_id, exchange_name)
72 |
73 | with open(file_name, "w") as f:
74 | log.debug(f"Writing auth_json_str to {file_name}")
75 | json.dump(auth_dict, f)
76 |
77 |
78 | def get_user_auth_alias(user_id: int, exchange_name: str) -> Dict[str, str]:
79 | """Fetches user exchange auth data and returns the catalyst auth alias
80 |
81 | Args:
82 | user_id (int): strategy's user ID
83 | exchange_name (str): name of exchange to to authenticate
84 |
85 | Returns:
86 | str: auth alias specifying json file for catalyst to use
87 |
88 | Returns:
89 | Dict[str, str]: auth alias specifying file for catalyst to use
90 | """
91 | encrypted = get_encrypted_auth(user_id, exchange_name)
92 | auth_dict = decrypt_auth_key(user_id, exchange_name, encrypted)
93 | save_to_catalyst(user_id, exchange_name, auth_dict)
94 | auth_alias = {exchange_name: f"auth{user_id}"}
95 | log.info("Fetched user auth and set up auth_alias")
96 |
97 | return auth_alias
98 |
99 |
100 | def delete_alias_file(user_id: int, exchange_name: str) -> None:
101 | log.debug(f"Deleting user {user_id}'s {exchange_name} auth alias file")
102 | file_name = get_auth_alias_path(user_id, exchange_name)
103 | os.remove(file_name)
104 |
105 |
106 |
--------------------------------------------------------------------------------
/core/kryptos/strategy/indicators/ml.py:
--------------------------------------------------------------------------------
1 | from catalyst.api import get_datetime, record
2 | import pandas as pd
3 | from rq import Queue
4 | import time
5 |
6 | from kryptos.utils import tasks
7 | from kryptos.strategy.indicators import AbstractIndicator
8 |
9 |
10 | def get_indicator(name, **kw):
11 | subclass = globals().get(name.upper())
12 | if subclass is not None:
13 | return subclass(**kw)
14 |
15 | return MLIndicator(name, **kw)
16 |
17 |
18 | class MLIndicator(AbstractIndicator):
19 |
20 | def __init__(self, name, **kw):
21 | super().__init__(name, **kw)
22 | """Factory for creating an indicator using the machine learning models
23 |
24 | The costructor is passed the name of the indicator.
25 | The calculation is performed at each iteration and is recored
26 | and plotted based on a ML model function's outputs.
27 |
28 | To signal trade opportunities, subclassed objects can implement
29 | the signals_buy and signals_sell methods.
30 | """
31 | self.hyper_params = None
32 | self.first_iteration = True
33 | self.current_date = None
34 | self.current_job_id = None
35 | # buy/sell are set as attributes rather than calculated properties for ML
36 | # because the results are returned from the worker processes
37 | # in which the MLIndicator instance is not available
38 | self._signals_buy = False
39 | self._signals_sell = False
40 |
41 | @property
42 | def signals_buy(self):
43 | return self._signals_buy
44 |
45 | @property
46 | def signals_sell(self):
47 | return self._signals_buy
48 |
49 | def calculate(self, df, namespace, **kw):
50 | self._signals_buy = False
51 | self._signals_sell = False
52 | self.idx += 1
53 | self.current_date = get_datetime()
54 | self.log.info(str(self.idx) + ' - ' + str(self.current_date) + ' - ' + str(df.iloc[-1].price))
55 | self.log.info(str(df.iloc[0].name) + ' - ' + str(df.iloc[-1].name))
56 | self.log.info(f'Queuing {self.name} ML calculation')
57 | job = tasks.enqueue_ml_calculate(df, namespace, self.name, self.idx, self.current_date, self.hyper_params, df_final=self.df_final, **kw)
58 | self.current_job_id = job.id
59 |
60 | def record(self):
61 | q = Queue('ml', connection=tasks.CONN)
62 | job = q.fetch_job(self.current_job_id)
63 |
64 | if job is None:
65 | self.log.error('Failed to find job calculation job')
66 |
67 | self.log.info(f'Waiting for ML job: {self.current_job_id}')
68 | while not job.is_finished:
69 | pass
70 | self.log.info('Job complete, recording results')
71 | self.result, df_results_json, df_final_json, self._signals_buy, self._signals_sell, self.hyper_params = job.result
72 | self.current_job_id = None
73 | df_results = pd.read_json(df_results_json)
74 | self.df_results = self.df_results.append(df_results)
75 | self.df_final = pd.read_json(df_final_json)
76 | payload = {self.name: self.result}
77 | record(**payload)
78 |
79 | def analyze(self, namespace, data_freq, extra_results):
80 | job = tasks.enqueue_ml_analyze(namespace, self.name, self.df_final, self.df_results, data_freq, extra_results)
81 |
82 |
83 | class XGBOOST(MLIndicator):
84 |
85 | def __init__(self, **kw):
86 | self.feature_selected_columns = []
87 | self.num_boost_rounds = None
88 | super(XGBOOST, self).__init__("XGBOOST", **kw)
89 |
90 |
91 | class LIGHTGBM(MLIndicator):
92 | def __init__(self, **kw):
93 | self.feature_selected_columns = []
94 | self.num_boost_rounds = None
95 | super(LIGHTGBM, self).__init__("LIGHTGBM", **kw)
96 |
--------------------------------------------------------------------------------
/core/kryptos/strategy/indicators/__init__.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import logbook
3 | from rq import get_current_job
4 |
5 | from kryptos import logger_group
6 |
7 |
8 | MA_TYPE_MAP = {
9 | "SMA": 0, # Simple Moving Average
10 | "EMA": 1, # Exponential Moving Average
11 | "WMA": 2, # Weighted Moving Average
12 | "DEMA": 3, # Double Exponential Moving Average
13 | "TEMA": 4, # Triple Exponential Moving Average
14 | "TRIMA": 5, # Triangular Moving Average
15 | "KAMA": 6, # Kaufman Adaptive Moving Average
16 | "MAMA": 7, # MESA Adaptive Moving Average
17 | "T3": 8, # Triple Generalized Double Exponential Moving Average
18 | }
19 |
20 |
21 | class IndicatorLogger(logbook.Logger):
22 |
23 | def __init__(self, indicator):
24 | self.indicator = indicator
25 | super().__init__(name="INDICATOR:{}".format(self.indicator.name))
26 |
27 | def process_record(self, record):
28 | logbook.Logger.process_record(self, record)
29 | record.extra["trade_date"] = self.indicator.current_date
30 | record.extra["ind_data"] = self.indicator.data
31 | record.extra["ind_outputs"] = self.indicator.outputs
32 | job = get_current_job()
33 | if job is not None:
34 | job.meta[self.indicator.name] = record.msg
35 | job.save_meta()
36 |
37 |
38 |
39 |
40 | class AbstractIndicator(object):
41 |
42 | def __init__(self, name, label=None, symbol=None, dataset=None, **kw):
43 | """Abstract class defining required methods utilized by Strategy objects"""
44 |
45 | self.name = name.upper()
46 | self.label = label or self.name
47 | self.symbol = symbol
48 | self.dataset = dataset
49 |
50 | self.params = {}
51 |
52 | func_params = kw.get("params", {})
53 | self._parse_params(func_params)
54 |
55 | self.data = None
56 | self.outputs = None
57 | self.current_date = None
58 | self.result = None
59 | self.df_results = pd.DataFrame(columns=['pred'])
60 | self.df_final = pd.DataFrame()
61 | self.results_pred = []
62 | self.results_real = []
63 | self.idx = -1
64 |
65 | self.log = IndicatorLogger(self)
66 | logger_group.add_logger(self.log)
67 | self.log.info("Attached {} indicator".format(self.name))
68 |
69 | @property
70 | def default_params(self):
71 | return {}
72 |
73 | @property
74 | def output_names(self):
75 | return []
76 |
77 | def update_param(self, param, val):
78 | self._parse_params({param: val})
79 |
80 | def serialize(self):
81 | d = {
82 | "name": self.name,
83 | "symbol": self.symbol,
84 | "dataset": self.dataset,
85 | "label": self.label,
86 | "params": self.params,
87 | "outputs": self.output_names
88 | }
89 | return d
90 |
91 | def _parse_params(self, func_params):
92 | self.params.update(self.default_params)
93 | for k, v in func_params.items():
94 | if "matype" in k and isinstance(v, str):
95 | v = MA_TYPE_MAP[v]
96 | self.params[k] = v
97 |
98 | def calculate(self, df):
99 | raise NotImplementedError
100 |
101 | def record(self):
102 | raise NotImplementedError
103 |
104 | def plot(self, results, pos, *kw):
105 | raise NotImplementedError
106 |
107 | @property
108 | def signals_buy(self):
109 | raise NotImplementedError
110 |
111 | @property
112 | def signals_sell(self):
113 | raise NotImplementedError
114 |
115 | def set_signal_threshold(self, *args, **kwargs):
116 | raise NotImplementedError
117 |
--------------------------------------------------------------------------------
/core/kryptos/strategies/buy_low_sell_high.py:
--------------------------------------------------------------------------------
1 | from kryptos.strategy import Strategy
2 | from kryptos.strategy.indicators import technical
3 | from catalyst.api import order_target_percent, order, record
4 |
5 | import logbook
6 |
7 | log = logbook.Logger("BUY_LOW_SELL_HIGH")
8 | log.level = logbook.INFO
9 |
10 |
11 | strat = Strategy("BUY_LOW_SELL_HIGH", data_frequency="daily")
12 |
13 | rsi = technical.get_indicator("RSI")
14 | rsi.update_param("timeperiod", 14)
15 |
16 | strat.add_market_indicator(rsi)
17 |
18 |
19 | @strat.init
20 | def init(context):
21 | context.TARGET_POSITIONS = 30
22 | context.PROFIT_TARGET = 0.1
23 | context.SLIPPAGE_ALLOWED = 0.02
24 | context.cost_basis = None
25 | context.buy_increment = None
26 | context.i = 0
27 |
28 |
29 | @strat.handle_data
30 | def handle_data(context, data):
31 | context.i += 1
32 |
33 | context.price = data.current(context.asset, "price")
34 | context.position = context.portfolio.positions.get(context.asset)
35 |
36 | rsi = strat.indicator("RSI").outputs["RSI"][-1]
37 |
38 | # Buying more when RSI is low, this should lower our cost basis
39 | if rsi <= 30:
40 | context.buy_increment = 1
41 | elif rsi <= 40:
42 | context.buy_increment = 0.5
43 | elif rsi <= 70:
44 | context.buy_increment = 0.2
45 | else:
46 | context.buy_increment = 0.1
47 |
48 | if context.position:
49 | context.cost_basis = context.position.cost_basis
50 |
51 | log.info(
52 | "found {amount} positions with cost basis {cost_basis}".format(
53 | amount=context.position.amount, cost_basis=context.cost_basis
54 | )
55 | )
56 |
57 | if context.position.amount >= context.TARGET_POSITIONS:
58 | log.info("reached positions target: {}".format(context.position.amount))
59 | return
60 |
61 |
62 | @strat.signal_buy(override=False)
63 | def signal_buy(context, data):
64 | if context.cost_basis:
65 | return context.price < context.cost_basis
66 |
67 |
68 | @strat.signal_sell(override=False)
69 | def signal_sell(context, data):
70 | if not context.position:
71 | return False
72 | if context.cost_basis and context.price < context.cost_basis:
73 | return False
74 |
75 | return (
76 | context.position.amount > 0
77 | and context.price > context.cost_basis * (1 + context.PROFIT_TARGET)
78 | )
79 |
80 |
81 | @strat.buy_order
82 | def buy(context):
83 | if context.buy_increment is None:
84 | rsi = strat.indicator("RSI")
85 | log.info("the rsi is too high to consider buying {}".format(strat.rsi.outputs[-1]))
86 | return
87 |
88 | if context.price * context.buy_increment > context.portfolio.cash:
89 | log.info("not enough base currency to consider buying")
90 | return
91 |
92 | log.info(
93 | "buying position cheaper than cost basis {} < {}".format(context.price, context.cost_basis)
94 | )
95 | order(
96 | asset=context.asset,
97 | amount=context.buy_increment,
98 | limit_price=context.price * (1 + context.SLIPPAGE_ALLOWED),
99 | )
100 |
101 |
102 | @strat.sell_order
103 | def sell(context):
104 | profit = (context.price * context.position.amount) - (
105 | context.cost_basis * context.position.amount
106 | )
107 | log.info("closing position, taking profit: {}".format(profit))
108 | order_target_percent(
109 | asset=context.asset, target=0, limit_price=context.price * (1 - context.SLIPPAGE_ALLOWED)
110 | )
111 |
112 |
113 | @strat.analyze()
114 | def analyze(context, results, pos):
115 | ending_cash = results.cash[-1]
116 | log.info("Ending cash: ${}".format(ending_cash))
117 | log.info("Completed for {} trading periods".format(context.i))
118 |
119 |
120 | if __name__ == "__main__":
121 | log.info("Strategy Schema:\n{}".format(strat.serialize()))
122 | strat.run()
123 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Kryptos
2 |
3 | ## About
4 |
5 | Kryptos AI is a virtual investment assistant that manages your cryptocurrency portfolio. To learn more, check out the [Kryptos Slide Deck](https://docs.google.com/presentation/d/1O3BQ6fS9SuokJud8TZ1XPXX5QbjefAEiXNR3cxJIJwE/view) and the [Kryptos White Paper](https://docs.google.com/document/d/1Um9yoosEj-oZdEF3yMK2pt5TI0O2aRYhgkC0XJf_BVo/view).
6 |
7 |
8 | ## Installation
9 |
10 | To get the entire project up and running locally:
11 |
12 |
13 | Clone the repo:
14 | ```bash
15 | $ git clone https://github.com/produvia/kryptos.git
16 | $ cd kryptos
17 | ```
18 |
19 | Build the docker images
20 | ```bash
21 | $ docker-compose build
22 | ```
23 |
24 | ## Running locally
25 |
26 | ```bash
27 | $ docker-compose up
28 | ```
29 |
30 | This will spin up a web, worker, ml, postgres, and redis container.
31 |
32 | The web app will be accessible at http://0.0.0.0:8080
33 |
34 | You can also view the RQ dashboard at http://0.0.0.0:8080/rq
35 |
36 | Hitting Ctl-C will stop all the containers.
37 | To prevent this and run the containers in the background:
38 |
39 | ``` bash
40 | $ docker-compose up -d
41 | ```
42 |
43 | You can then selectively view the logs of any of the containers
44 |
45 | ``` bash
46 | $ docker-compose logs -f
47 | ```
48 |
49 |
50 | ## Local Development
51 |
52 | Once the containers are running, you can access the the shell of any of the containers, use the `exec` command
53 |
54 |
55 | For instance, to run strategies from CLI:
56 | ```bash
57 | $ docker-compose exec worker bash
58 | ```
59 |
60 | This will provide a command prompt inside the worker container from which you can run the `strat` command
61 |
62 | For example, to work on the ML service:
63 | ```bash
64 | # start all containers w/o logging
65 | $ docker-compose up -d
66 |
67 | # enter the ml shell
68 | $ docker-compose exec ml bash
69 |
70 | # or enter the worker shell to run a strategy
71 | $ docker-compose exec worker bash
72 | ```
73 |
74 | Then to stream ML logs in a separate terminal
75 | ```bash
76 | docker-compose logs -f ml
77 | ```
78 |
79 | To stop all containers
80 |
81 | ``` bash
82 | $ docker-compose stop
83 | ```
84 |
85 | To stopa specific container
86 |
87 | ``` bash
88 | $ docker-compose stop
89 | ```
90 |
91 |
92 |
93 |
94 |
95 | ## Contributing
96 |
97 | When contributing to the codebase, please follow the branching model described [here](https://nvie.com/posts/a-successful-git-branching-model/)
98 |
99 | Essentially, the two main branches are
100 |
101 | - `master`: the main branch containing the latest stable code released to production
102 | - `develop`: the "Work in Progress" branch where all new changes are merged into
103 |
104 | Then there are [feature branches](https://nvie.com/posts/a-successful-git-branching-model/#feature-branches). These are the branches where you will make most of your commits. They branch off of develop, and are merged back into develop when the feature is complete.
105 |
106 | ### Setting up the development envrionment
107 |
108 | Remember to get the lastest changes
109 |
110 | ``` bash
111 | $ git checkout develop
112 | $ git pull
113 | ```
114 |
115 | Then create your new feature branch
116 |
117 | ``` bash
118 | $ git checkout -b feature/
119 | ```
120 |
121 | To push your latest changes to the repo
122 |
123 | ``` bash
124 | $ git push origin feature
125 | ```
126 |
127 | When you are ready to merge your feature branch back into develop
128 |
129 | 1. Ensure you have pushed your latest changes to the origin feature/ branch
130 | 2. Submit a pull request to the `develop` branch
131 |
132 |
133 |
134 | ## Project Components
135 |
136 | For more information, check out documentation for the different services:
137 |
138 | - [core](core/README.md) - for strategy related logic
139 | - [ml](ml/README.md) - for machine learning models
140 | - [web](web/README.md) - for the Telegram bot and web frontend
141 |
--------------------------------------------------------------------------------
/app/app/utils/build.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | from typing import List, Dict, Set
3 | import json
4 | from flask import request, current_app
5 | import ccxt
6 |
7 | from app.models import User
8 | from app import task
9 | from app.utils import choices
10 |
11 |
12 | # TODO possibly use telegram chat_id
13 | def get_user() -> User:
14 | telegram_id = get_message_payload()["id"]
15 | user = User.query.filter_by(telegram_id=telegram_id).first()
16 | current_app.logger.debug(f"Got user {user}")
17 | return user
18 |
19 |
20 | def get_first_name() -> str:
21 | name = get_message_payload().get("first_name", None)
22 | if name is not None:
23 | return name
24 | return ""
25 |
26 |
27 | def get_message_payload() -> Dict:
28 | platform_data = request.json.get("originalRequest", {}).get("data", {})
29 | current_app.logger.info(platform_data)
30 | if not platform_data:
31 | return {"first_name": "DialogFlow", "id": 111}
32 |
33 | if platform_data.get("message"):
34 | return platform_data["message"]["from"]
35 |
36 | elif platform_data.get("callback_query"):
37 | return platform_data["callback_query"]["from"]
38 |
39 |
40 | def build_strat_dict_from_context(context, mode):
41 | strat = context.get("existing_strategy")
42 | exchange = context.get("exchange").title()
43 | base_currency = context.get("trade_currency").upper()
44 | quote_currency = context.get("quote_currency").upper()
45 | capital_base = context.get("capital_base")
46 | trade_pair = f"{base_currency}_{quote_currency}".lower()
47 | hours = int(context.get("hours"))
48 |
49 | start = datetime.datetime.utcnow()
50 | end = start + datetime.timedelta(hours=hours)
51 |
52 | if strat in choices.ML_MODELS:
53 | strat_dict = {"trading": {}, "models": [{"name": strat}]}
54 | else:
55 | strat_dict = {"trading": {}, "indicators": [{"name": strat}]}
56 | strat_dict["trading"]["START"] = datetime.datetime.strftime(start, "%Y-%m-%d-%H-%M")
57 | strat_dict["trading"]["END"] = datetime.datetime.strftime(end, "%Y-%m-%d %H:%M:%S")
58 |
59 | strat_dict["trading"]["EXCHANGE"] = exchange
60 | strat_dict["trading"]["ASSET"] = trade_pair
61 | strat_dict["trading"]["CAPITAL_BASE"] = float(capital_base)
62 | strat_dict["trading"]["QUOTE_CURRENCY"] = quote_currency.lower()
63 |
64 | strat_dict["name"] = f"{strat}-{mode.title()}"
65 | return strat_dict
66 |
67 |
68 | def launch_backtest(config_context):
69 | strat_dict = build_strat_dict_from_context(config_context, "backtest")
70 |
71 | # Can't use today as the end date bc data bundles are updated daily,
72 | # so current market data won't be avialable for backtest until the following day
73 | # use past week up to yesterday
74 | back_start = datetime.datetime.today() - datetime.timedelta(days=4)
75 | back_end = datetime.datetime.today() - datetime.timedelta(days=1)
76 |
77 | strat_dict["trading"]["START"] = datetime.datetime.strftime(back_start, "%Y-%m-%d")
78 | strat_dict["trading"]["END"] = datetime.datetime.strftime(back_end, "%Y-%m-%d")
79 |
80 | backtest_id, _ = task.queue_strat(
81 | json.dumps(strat_dict), user_id=None, live=False, simulate_orders=True
82 | )
83 | return backtest_id
84 |
85 |
86 | def launch_paper(config_context):
87 | user = get_user()
88 | strat_dict = build_strat_dict_from_context(config_context, "paper")
89 | job_id, _ = task.queue_strat(
90 | json.dumps(strat_dict), user.id, live=True, simulate_orders=True
91 | )
92 |
93 | return job_id
94 |
95 |
96 | def launch_live(config_context):
97 | user = get_user()
98 | strat_dict = build_strat_dict_from_context(config_context, "live")
99 | cap_base = strat_dict["trading"]["CAPITAL_BASE"]
100 | quote_curr = strat_dict["trading"]["QUOTE_CURRENCY"]
101 | current_app.logger.info(
102 | f"Queuing live strat for user {user.id}: {cap_base} {quote_curr}"
103 | )
104 | job_id, _ = task.queue_strat(
105 | json.dumps(strat_dict), user.id, live=True, simulate_orders=False
106 | )
107 | return job_id
108 |
--------------------------------------------------------------------------------