├── .devcontainer.json
├── .env
├── .gitignore
├── LICENSE
├── README.md
├── Storage
├── airflow
│ └── dags
│ │ ├── __pycache__
│ │ ├── cred.cpython-37.pyc
│ │ ├── dag_dynamic.cpython-37.pyc
│ │ ├── dag_fx_data.cpython-37.pyc
│ │ ├── dag_fx_data_download copy.cpython-37.pyc
│ │ ├── dag_fx_data_download.cpython-37.pyc
│ │ ├── dag_helper_files.cpython-37.pyc
│ │ ├── dag_mlflowserve.cpython-37.pyc
│ │ ├── dag_strategy_dynamic.cpython-37.pyc
│ │ ├── example_dag - 2.cpython-37.pyc
│ │ ├── example_dag.cpython-36.pyc
│ │ ├── example_dag.cpython-37.pyc
│ │ ├── fx_data.cpython-37.pyc
│ │ ├── fx_oanda_daily.cpython-37.pyc
│ │ ├── fx_oanda_download.cpython-37.pyc
│ │ ├── fx_oanda_minute.cpython-37.pyc
│ │ ├── mlflow_test.cpython-37.pyc
│ │ ├── test_dag - Copy.cpython-37.pyc
│ │ └── test_dag.cpython-37.pyc
│ │ ├── dag_fx_data_download.py
│ │ ├── dag_strategy_dynamic.py
│ │ └── process_configuration.json
├── minio
│ └── storage
│ │ ├── airflow-files
│ │ ├── interested_tickers.xlsx
│ │ └── strategy.csv
│ │ ├── mlflow-models
│ │ └── 2a71796bd39c429d89a1cf2006624240
│ │ │ └── artifacts
│ │ │ └── model
│ │ │ ├── MLmodel
│ │ │ ├── conda.yaml
│ │ │ └── model.pkl
│ │ └── model-support-files
│ │ ├── 1_ml_log.csv
│ │ ├── 2_ml_log.csv
│ │ └── processed_2_ml_log.csv
├── notebooks
│ ├── .ipynb_checkpoints
│ │ ├── 10-19-2019-checkpoint.ipynb
│ │ ├── Example-checkpoint.ipynb
│ │ ├── Mlflow Serving Test - Only look at this-checkpoint.ipynb
│ │ └── PODA PATTI-checkpoint.ipynb
│ ├── .vscode
│ │ ├── launch.json
│ │ └── settings.json
│ ├── Example.ipynb
│ └── mlflow_project
│ │ ├── MLproject
│ │ ├── conda.yaml
│ │ ├── ml_log_processed.csv
│ │ └── train.py
├── pgadmin
│ ├── pgadmin4.db
│ └── sessions
│ │ ├── 124afc37-5bde-4a12-a5fb-c25c49bc48c5
│ │ ├── 3e137754-12cf-4207-ac2d-e5523bb2e476
│ │ ├── 72632a26-21ef-4487-8f34-8201b729213c
│ │ ├── e4af4537-fc0b-4bb0-8ef0-430f8678aaef
│ │ └── f618e8d8-213d-4139-a7cf-cff78baec139
├── postgress_db
│ └── scripts
│ │ └── 01_create_user.sh
├── q_pack
│ ├── __init__.py
│ ├── __pycache__
│ │ └── __init__.cpython-37.pyc
│ ├── btoandav20
│ │ ├── __init__.py
│ │ ├── __pycache__
│ │ │ ├── __init__.cpython-36.pyc
│ │ │ └── __init__.cpython-37.pyc
│ │ ├── brokers
│ │ │ ├── __init__.py
│ │ │ ├── __pycache__
│ │ │ │ ├── __init__.cpython-36.pyc
│ │ │ │ ├── __init__.cpython-37.pyc
│ │ │ │ ├── oandav20broker.cpython-36.pyc
│ │ │ │ └── oandav20broker.cpython-37.pyc
│ │ │ └── oandav20broker.py
│ │ ├── feeds
│ │ │ ├── __init__.py
│ │ │ ├── __pycache__
│ │ │ │ ├── __init__.cpython-36.pyc
│ │ │ │ ├── __init__.cpython-37.pyc
│ │ │ │ ├── oandav20feed.cpython-36.pyc
│ │ │ │ └── oandav20feed.cpython-37.pyc
│ │ │ └── oandav20feed.py
│ │ ├── sizers
│ │ │ ├── __init__.py
│ │ │ ├── __pycache__
│ │ │ │ ├── __init__.cpython-36.pyc
│ │ │ │ ├── __init__.cpython-37.pyc
│ │ │ │ ├── oandav20sizer.cpython-36.pyc
│ │ │ │ └── oandav20sizer.cpython-37.pyc
│ │ │ └── oandav20sizer.py
│ │ └── stores
│ │ │ ├── __init__.py
│ │ │ ├── __pycache__
│ │ │ ├── __init__.cpython-36.pyc
│ │ │ ├── __init__.cpython-37.pyc
│ │ │ ├── oandav20store.cpython-36.pyc
│ │ │ └── oandav20store.cpython-37.pyc
│ │ │ └── oandav20store.py
│ ├── db_pack
│ │ ├── oanda
│ │ │ ├── __pycache__
│ │ │ │ ├── fx_oanda_daily.cpython-37.pyc
│ │ │ │ └── fx_oanda_minute.cpython-37.pyc
│ │ │ ├── fx_oanda_daily.py
│ │ │ └── fx_oanda_minute.py
│ │ └── schema
│ │ │ ├── risk_db_schema_builder.py
│ │ │ ├── secmaster_db_schema_builder.py
│ │ │ └── secmaster_db_symbol_loader.py
│ ├── ml_pack
│ │ └── preprocessing
│ │ │ ├── __pycache__
│ │ │ └── ml_preprocessing.cpython-37.pyc
│ │ │ └── ml_preprocessing.py
│ ├── q_analyzers
│ │ ├── __pycache__
│ │ │ ├── bt_analyzers.cpython-37.pyc
│ │ │ ├── bt_logger_analyzer.cpython-37.pyc
│ │ │ ├── bt_perform_analyzer.cpython-37.pyc
│ │ │ ├── bt_pos_perform_analyzer.cpython-37.pyc
│ │ │ ├── bt_strat_perform_analyzer.cpython-37.pyc
│ │ │ ├── bt_strategy_id_analyzer.cpython-37.pyc
│ │ │ └── bt_transaction_analyzer.cpython-37.pyc
│ │ ├── bt_logger_analyzer.py
│ │ ├── bt_pos_perform_analyzer.py
│ │ ├── bt_strat_perform_analyzer.py
│ │ ├── bt_strategy_id_analyzer.py
│ │ └── bt_transaction_analyzer.py
│ ├── q_credentials
│ │ ├── __pycache__
│ │ │ ├── db_cred.cpython-37.pyc
│ │ │ ├── db_risk_cred.cpython-37.pyc
│ │ │ ├── db_secmaster_cred.cpython-37.pyc
│ │ │ └── oanda_cred.cpython-37.pyc
│ │ ├── db_risk_cred.py
│ │ ├── db_secmaster_cred.py
│ │ └── oanda_cred.py
│ ├── q_datafeeds
│ │ ├── __pycache__
│ │ │ └── bt_datafeed_postgres.cpython-37.pyc
│ │ └── bt_datafeed_postgres.py
│ ├── q_run
│ │ └── run_BT.py
│ ├── q_strategies
│ │ ├── __init__.py
│ │ ├── __pycache__
│ │ │ ├── __init__.cpython-37.pyc
│ │ │ ├── simple_strategy.cpython-37.pyc
│ │ │ └── simple_strategy_2.cpython-37.pyc
│ │ ├── simple_strategy.py
│ │ └── simple_strategy_2.py
│ └── q_tools
│ │ ├── __pycache__
│ │ ├── args_parse_dict.cpython-37.pyc
│ │ ├── args_parse_other.cpython-37.pyc
│ │ └── write_to_db.cpython-37.pyc
│ │ ├── args_parse_other.py
│ │ └── write_to_db.py
└── superset
│ ├── superset.db
│ └── superset_config.py
├── docker-compose.yml
├── dockerfile_airflow
├── Dockerfile
└── requirements.txt
├── dockerfile_jupyter_notebook
├── Dockerfile
├── README.md
└── requirements.txt
├── dockerfile_minio
└── Dockerfile
├── dockerfile_mlflowserver
├── Dockerfile
└── requirements.txt
├── dockerfile_superset
└── Dockerfile
├── public
└── images
│ ├── architecture-cloud.png
│ ├── architecture.png
│ ├── backtrader.png
│ ├── components.png
│ ├── logo.PNG
│ ├── logo2.png
│ ├── logo_0.PNG
│ ├── old.png
│ └── superset2.PNG
└── starter_script.bat
/.devcontainer.json:
--------------------------------------------------------------------------------
1 | // See https://aka.ms/vscode-remote/devcontainer.json for format details or
2 | // https://aka.ms/vscode-dev-containers/definitions for sample configurations.
3 | {
4 | "dockerComposeFile": "docker-compose.yml",
5 | "service": "jupyter-image",
6 | "workspaceFolder": "/home/jovyan/work",
7 | "extensions": [
8 | "ms-python.python"
9 | ]
10 | }
--------------------------------------------------------------------------------
/.env:
--------------------------------------------------------------------------------
1 | WD=/c/wsl/Microservices-Based-Algorithmic-Trading-System/Storage
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | unwanted/
2 | Storage/mlflow
3 | Storage/minio/storage/.minio.sys
4 | Storage/superset/Additional
5 | Storage/minio/storage/mlflow-models/
6 | Storage/minio/storage/support-files/
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
2 |
3 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
4 |
5 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
6 |
7 | 3. Neither the name MBATS nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
8 |
9 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Microservices Based Algorithmic Trading System
6 |
7 |
8 |
9 |
10 | [](https://opensource.org/licenses/BSD-3-Clause)
11 |
12 | ---
13 |
14 | MBATS is a docker based platform for developing, testing and deploying Algorthmic Trading strategies with a focus on Machine Learning based algorithms.
15 |
16 | MBATS aim to make a Quant's life a lot easier by providing a modular easy to setup trading infrastrucutre based on open source tools that can get his/her trading strategy from idea to production within few minutes.
17 |
18 | Using MBATS, you can easily create Trading Strategies in Backtrader, manage Machine Learning models with MLflow, use Postgres database with pgAdmin for storing and querying Market data. Store files and objects in Minio and use Superset to visualize performance of backtested and live strategies. And tie all the different components together and orchestrate jobs using Airflow and many more features to help you get started from idea to live trading faster and with least effort.
19 |
20 | [Linkedin Article about MBATS](https://www.linkedin.com/pulse/microservices-based-algorithmic-trading-system-saeed-rahman/?trackingId=dsWm4zZBRIe3%2FznJMBaBHQ%3D%3D)
21 |
22 | Updates:
23 | - 2024 - [Quant Infra Design](https://github.com/saeed349/quant_infra/wiki) and [Linkedin Article](https://www.linkedin.com/pulse/quant-platform-cash-equities-saeed-rahman-txw3c/)
24 | - 2021 - [MBATS V2.0](https://github.com/saeed349/Microservices-Based-Algorithmic-Trading-System-V-2.0) and [Cloud Version](https://github.com/saeed349/Quant-Trading-Cloud-Infrastructure) are now available.
25 | Also,[Linkedin Article on scaling MBATS to the cloud using Terraform](https://www.linkedin.com/post/edit/6619730514188267520/)
26 |
27 |
28 |
29 | 
30 |
31 | Table of Contents:
32 |
33 | - [Quickstart](#Quickstart)
34 | - [Getting Started](#getting-started)
35 | - [Backtrader](#Backtrader)
36 | - [MLflow](#MLflow)
37 | - [Airflow](#Apache-Airflow)
38 | - [Superset](#Apache-Superset)
39 | - [Minio](#Minio)
40 | - [PostgreSQL](#Postgres)
41 | - [Cloud](#Cloud)
42 | - [Current Features](#Current-Features)
43 | - [Planned Features](#Planned-Features)
44 | - [Contributing](#Contributing)
45 | - [License](#License)
46 | - [Authors](#Authors)
47 | - [Acknowledgments](#Acknowledgments)
48 |
49 | ## Quickstart
50 | Check out the video below to see the platform in action
51 | [
](https://youtu.be/hLSGgW4-WC8)
52 |
53 |
54 | MBATS is based on Docker containers. Running your Infrastructure is as easy as running one command from your terminal. You can either run MBATS on your local machine or on the cloud using docker-compose. The easiest way to setup MBATS is by running the docker-compose file. Before running the installation command make sure you have [Docker](https://www.docker.com/products/docker-desktop) installed on your machine.
55 |
56 |
57 | 1. Downlod/Clone the Github Repository (Make sure your Docker Machine has access to the location):
58 | ```git clone https://github.com/saeed349/Microservices-Based-Algorithmic-Trading-System.git```
59 | 2. Update the 'WD' variable in .env file to the location of the Cloned directory.
60 | 3. Run docker compose:
61 | ```docker-compose up -d --build```
62 | First run would take some time as all the Docker base images need to be downloaded.
63 | Once its is running, you can access the following components from the webaddress
64 | * Jupyter Notebook:http://localhost:8888
65 | * Airflow: http://localhost:8080
66 | * MLflow: http://localhost:5500
67 | * PgAdmin: http://localhost:1234
68 | * Superset: http://localhost:8088
69 | * Minio: http://localhost:9000
70 |
71 | 4. Run the script to setup up the database schema
72 | ```.\starter_script.bat```
73 | 5. All the infrastructure and business logic is in *Storage* folder and the necessary components are shared across containers.
74 | - [Trading Strategies](./Storage/q_pack/q_strategies)
75 | - [Analyzers](./Storage/q_pack/q_analyzers)
76 | - [Datafeed Connectors](./Storage/q_pack/q_datafeeds)
77 | - [Airflow DAGS](./Storage/airflow/dags)
78 | - [Supporting files for Airflow](./Storage/minio/storage/airflow-files)
79 | - [Minio Storage](./Storage/minio)
80 | - [DB Schema builder code](./Storage/q_pack/db_pack)
81 | - [Machine Learning input files](./Storage/minio/storage/model-support-files)(./Storage/minio/storage/model-support-files)
82 | - [MLflow artifacts](./Storage/minio/storage/mlflow-models)
83 |
84 | 6. You can choose what Securities to download by listing it in [*interested_tickers.xlsx*](./Storage/minio/storage/airflow-files/)
85 | The *daily* tab for listing the Securities for which EOD data is to be downloaded and *minute* tab for downloading at 1 minute interval.
86 | 7. Turn on the [*fx_data_download*](./Storage/airflow/dags/dag_fx_data_download.py) DAG in Airflow(http://localhost:8080) and this will download the Daily and Minute data for Securities you have set in the *interested_tickers.xlsx*
87 | 8. Go to Jupyter Notebook (http://localhost:8888) and use the Notebook [*Example.ipynb*](./Storage/notebooks/Example.ipynb) to run through the example strategy implementation where you can
88 | - Run Backtrader trading strategies (Backtest or Live)
89 | - Preprocess the logs (Market Data and Indicator for each run) for preparing for Machine Learning model.
90 | - Run Machine Learning models on the preprocessed data and track it to MLflow.
91 | - Serve the Machine Learning Artifcat(model) via MLflow
92 | - Bonus features of MLflow (Packaging and Serving via Rest API)
93 | 9. To check the Backtest or Live trading results go to Superset:http://localhost:8088
94 | 10. You can schedule Live trading strategies by using the [strategy.csv](./Storage/minio/storage/airflow-files) and the dynamic DAG [dag_strategy_dynamic](./Storage/airflow/dags/dag_strategy_dynamic.py)
95 |
96 |
97 |
98 | ## Architecture
99 |
100 | 
101 |
102 | MBATS is a collection of 9 docker containers acting synchronously to create an environment to develop and productionise trading strategies with ease. The main parts of MBATS are as follows.
103 |
104 | ### [Backtrader](https://www.backtrader.com/)
105 | Backtrader is a python based opensource event-driven trading strategy backtester with support for live trading. The reason why I choose Backtrader over other opensource backtesters like [Zipline](https://github.com/quantopian/zipline) and [QuantConnect](https://github.com/QuantConnect/Lean) is because of the good documentation and its community support.
106 | Here's a list of submodules I have written for this project that are derived from Backtrader package.
107 | * [**Run**](./Storage/q_pack/q_run/run_BT.py) - Script that combines the strategy, analyzers and the datafeeds.
108 | * [**Strategy**](./Storage/q_pack/q_strategies/simple_strategy_2.py) - A simple Daily trading strategy that initiates bracket orders based on RSI and Stochastic Indicator.
109 | * [**Logger Analyzer**](./Storage/q_pack/q_analyzers/bt_logger_analyzer.py) - Logs the price data and the indicator which is then used for training the Machine Learning model
110 | * [**Strategy Performance Analyzer**](./Storage/q_pack/q_analyzers/bt_strat_perform_analyzer.py) - Measures the performance of the strategy and save it in the database which is later consumed in BI tool (Superset).
111 | * [**Round trip trade Performance Analyzer**](./Storage/q_pack/q_analyzers/bt_pos_perform_analyzer.py) - Measures difference performance metrics of round trip trades and save it in the database which is later consumed in BI tool (Superset).
112 | * [**Transaction Analyzer**](./Storage/q_pack/q_analyzers/bt_pos_perform_analyzer.py) - Records the executed orders into the database.
113 | * [**Stategy ID Analyzer**](h./Storage/q_pack/q_analyzers/bt_pos_perform_analyzer.py) - Keep a record of the metadata of the backtest or live strategy ran.
114 | * [**Oanda Broker Store**](https://github.com/ftomassetti/backtrader-oandav20) - Oanda Broker Integration for Backtrader live trading
115 | * [**Postgress Data Feed**](./Storage/q_pack/q_datafeeds/bt_datafeed_postgres.py)
116 |
117 | 
118 |
119 |
120 | ### [MLflow](https://MLflow.org/)
121 |
122 | Anyone who has worked in the Datascience field would have heard about [Spark](https://spark.apache.org/), well the founders of Spark have brought a similar disruptive tool to revolutionize the Machine Learning landscape and that is MLflow. MLflow is an open source platform to manage the ML lifecycle, including experimentation, reproducibility and deployment. It currently offers four components:
123 | * MLflow Tracking
124 | * MLflow Projects
125 | * MLflow Models
126 | * MLflow Registry
127 |
128 | There are a few other organizations that try to address this problem but what seperates MLflow from the likes of [Google-TFX](https://www.tensorflow.org/tfx), [Facebook-FBLearner Flow](https://engineering.fb.com/core-data/introducing-fblearner-flow-facebook-s-ai-backbone/) and [Uber-Michelangelo](https://eng.uber.com/michelangelo/) is that MLflow try to address the concerns of the crowd rather than a single organization and therefore they are universal and community driven to an extend that [AWS](https://aws.amazon.com/blogs/machine-learning/build-end-to-end-machine-learning-workflows-with-amazon-sagemaker-and-apache-airflow/) and [Azure](https://docs.microsoft.com/en-us/azure/machine-learning/how-to-use-MLflow) has provided integration for MLflow.
129 |
130 | In this project all the ML model can be tracked by the MLflow Tracker and the model artifacts are stored in Minio, the main reason for doing so is that later on I can swap Minio for a Cloud object store like S3. The ML models are then served using MLflow pyfunc. We also have the option to serve the model as Rest API using MLflow (code in sample jupyter notebook)
131 |
132 | ## [Apache Airflow](https://airflow.apache.org/)
133 | Apache Airflow is an open-source workflow management platform, basically Chron on steroids and it has wide array of integration with popular platforms and data stores.
134 | In this this project we use airflow for scheduling two tasks mainly. One [DAG](./Storage/airflow/dags/dag_fx_data_download.py) for downloading daily and minute data into the Database controlled by an excel file and another [Dynamic DAG](./Storage/airflow/dags/dag_strategy_dynamic.py) for schedulling live strategies controlled by a csv file.
135 |
136 | ## [Apache Superset](https://superset.apache.org/)
137 | From the creators of Apache Airflow, Apache Superset is a Data Visualization tool initially designed by Airbnb and later open sourced for the community.
138 | Superset is an interactive Data Exploration tool that will let you slice, dice and visualize data. Why pay for Tableau and PowerBi when you can use something that is opensource. We use Superset to visualize Backtesting and Live trading performance.
139 |
140 | Username:guest
141 | Password:guest
142 |
143 | The dashboards and user details are stored in Storage/superset/superset.db
144 | If you want to reset the credentials and create reset, just delete this sqlite [superset.db](./Storage/superset/) and create a new one with
145 | ```touch superset.db```
146 | Then once the container is up and running execute
147 | ```docker exec -it superset superset-init```
148 |
149 | 
150 | ## [Minio](https://min.io/)
151 | MinIO is pioneering high performance object storage. With READ/WRITE speeds of 55 GB/s and 35 GB/s on standard hardware, object storage can operate as the primary storage tier for a diverse set of workloads. Amazon’s S3 API is the defacto standard in the object storage world and represents the most modern storage API in the market. MinIO adopted S3 compatibiity early on and was the first to extend it to support S3 Select. Because of this S3 Compatibility by using Minio we have an upper hand of moving this object store towards cloud (AWS S3, Google Cloud Storage, Azure Storage) with minimal change to the codebase.
152 |
153 | ## [PostgreSQL](https://www.postgresql.org/)
154 | We have 2 Databases in our PosgresSQL server, 1 is the Security Master database that stores the Daily and Minute data for Forex Symbols in 2 seperate tables.
155 | Another Database is used for storing the position information and the performance metrics.
156 | The Databases can be managed through PgAdmin
157 | Username:guest
158 | Pass:guest
159 |
160 | ## Cloud
161 | MEvery technology used in this project has a analogues managed service offered in the cloud. And the best part of scaling a microservices based architecture is that you can approach it in many ways to fits your need. Whether you want to move one functionality to the cloud or if you want to offload the workload of a component to the Cloud but keep all the critical parts on premise, the migration is quite easy when compared to a monolithic architecture. Moreover if the Cloud service is using the same technology then the migration is effortless. A simple example for this is GCP Cloud Composer which is built on top of Apache Airflow and Kubernetes which means that all the tasks/DAG's that we are using in this project can be used in Cloud Composer as well. In general I have found GCP has a better strategy and technology in place for building a hybrid Cloud infrastructure and for that reason here's an architecture if this project has to be transferred entirely into the GCP platform.
162 | 
163 |
164 | ## Current-Features
165 | * Infrastructure as Code – less than 5 minutes from scratch to a fully functional trading infrastructure.
166 | * Backtesting and Live trading Forex using Oanda Broker API (Can be easily be modified to accommodate IB for Equity).
167 | * Machine Learning model development and deployment using MLflow.
168 | * Multiple symbol strategy support.
169 | * Multiple strategy support.
170 | * Superset BI Dashboard for real-time monitoring of Live trading and backtesting performance results.
171 | * Easily extensible to support any kind of structured data.
172 | * Full code base in Python except for docker-compose setup.
173 |
174 |
175 | ## Planned-Features
176 |
177 | * Support for Equity Database (Backtrader supports [Interactive Brokers out of the box](https://www.backtrader.com/docu/live/ib/ib/))
178 | * Celery/Kubernetes cluster support for Airflow
179 | * More performance and trade analytics dashboards on Superset
180 | * Dynamic DAG for model retraining.
181 | * More Backtrader Examples involving -
182 | - Custom Indicators.
183 | - Alternative Data (Unstructured Data Pipeline)
184 | - [Reinforcement Learning](https://github.com/saeed349/Deep-Reinforcement-Learning-in-Trading).
185 | * Use [MLflow Model Registry](https://www.MLflow.org/docs/latest/model-registry.html).
186 | * Integrate [Alpaca API Store](https://alpaca.markets/)
187 | * Automatic Model Selection for Strategies based on ML performance metrics.
188 |
189 | ## Built With
190 | This project has been devloped and tested on 2 Docker environments
191 | * [WSL](https://docs.microsoft.com/en-us/windows/wsl/about)
192 | * [Docker Toolbox](https://docs.docker.com/toolbox/toolbox_install_windows/).
193 | * IDE - Visual Studio Code: Main reason being the [Container Debugger Feature](https://code.visualstudio.com/docs/remote/containers#_debugging-in-a-container)
194 |
195 |
196 | ## Contributing
197 |
198 | All code contributions must go through a pull request and approved by a core developer before being merged. This is to ensure proper review of all the code.
199 |
200 | ## License
201 |
202 | This repository is available under the [BSD 3-Clause License](./LICENSE).
203 |
204 | ## Authors
205 |
206 | * **Saeed Rahman** - [LinkedIn](https://www.linkedin.com/in/saeed-349/)
207 |
208 |
209 | ## Acknowledgments
210 |
211 | * [Backtrader community](https://community.backtrader.com/)
212 | * [Backtest-rookies](https://backtest-rookies.com/category/backtrader/)
213 | * [Backtrader Oanda V20 Store](https://github.com/ftomassetti/backtrader-oandav20)
214 | * [Beyond Jupyter notebook - Udemy course](https://www.udemy.com/course/beyond-jupyter-notebooks/)
215 | * [Quantstart](https://www.quantstart.com/)
216 |
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/cred.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/cred.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/dag_dynamic.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/dag_dynamic.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/dag_fx_data.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/dag_fx_data.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/dag_fx_data_download copy.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/dag_fx_data_download copy.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/dag_fx_data_download.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/dag_fx_data_download.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/dag_helper_files.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/dag_helper_files.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/dag_mlflowserve.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/dag_mlflowserve.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/dag_strategy_dynamic.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/dag_strategy_dynamic.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/example_dag - 2.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/example_dag - 2.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/example_dag.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/example_dag.cpython-36.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/example_dag.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/example_dag.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/fx_data.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/fx_data.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/fx_oanda_daily.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/fx_oanda_daily.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/fx_oanda_download.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/fx_oanda_download.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/fx_oanda_minute.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/fx_oanda_minute.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/mlflow_test.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/mlflow_test.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/test_dag - Copy.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/test_dag - Copy.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/__pycache__/test_dag.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/airflow/dags/__pycache__/test_dag.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/airflow/dags/dag_fx_data_download.py:
--------------------------------------------------------------------------------
1 | from airflow import DAG
2 | from airflow.operators.python_operator import PythonOperator
3 | from airflow.contrib.sensors.file_sensor import FileSensor
4 | from datetime import date, timedelta, datetime
5 |
6 | from db_pack.oanda import fx_oanda_daily
7 | from db_pack.oanda import fx_oanda_minute
8 |
9 | DAG_DEFAULT_ARGS={
10 | 'owner':'airflow',
11 | 'depends_on_past':False,
12 | 'retries':1,
13 | 'retry_delay':timedelta(minutes=1)
14 | }
15 |
16 | with DAG('fx_data_download', start_date=datetime(2019,1,1), schedule_interval='@daily',default_args=DAG_DEFAULT_ARGS, catchup=False) as dag:
17 |
18 | updating_db_daily = PythonOperator(task_id="updating_db_daily",python_callable=fx_oanda_daily.main)
19 |
20 | updating_db_minute = PythonOperator(task_id="updating_db_minute",python_callable=fx_oanda_minute.main)
21 |
22 | updating_db_daily >> updating_db_minute
23 |
--------------------------------------------------------------------------------
/Storage/airflow/dags/dag_strategy_dynamic.py:
--------------------------------------------------------------------------------
1 | #https://bigdata-etl.com/apache-airflow-create-dynamic-dag/
2 |
3 | from airflow.models import DAG
4 | from datetime import datetime, timedelta
5 | from airflow.operators.dummy_operator import DummyOperator
6 | from airflow.operators.bash_operator import BashOperator
7 | from datetime import date, timedelta, datetime
8 | import json
9 | import pandas as pd
10 | import boto3
11 |
12 |
13 | def create_dag(dag_id,
14 | schedule,
15 | default_args,
16 | conf):
17 | dag = DAG(dag_id, default_args=default_args, schedule_interval=schedule)
18 | with dag:
19 | init = BashOperator(
20 | bash_command='echo START' ,
21 | task_id='Init',
22 | dag=dag
23 | )
24 | clear = BashOperator(
25 | bash_command='echo STOPPING',
26 | task_id='clear',
27 | dag=dag
28 | )
29 | for i,row in df.iterrows():
30 | command={}
31 | command['--strat_name']=row['Strategy']
32 | command['--mode']=str(row['Mode'])
33 | command['--tickers']=row['Securities']
34 | command['--broker_token']=row['Token']
35 | command['--broker_account']=row['Account']
36 | if row['Model ID']!="" or row["Strategy Parameters"]!="":
37 | command['--strat_param']=("model_uri="+row['Model ID']+","+row["Strategy Parameters"]) if row["Strategy Parameters"]!="" else ("model_uri="+row['Model ID'])
38 | final_commmand='python /usr/local/airflow/dags/q_pack/q_run/run_BT.py '+' '.join([(k+"="+v) for k, v in command.items() if v!=''])
39 | tab = BashOperator(
40 | bash_command=final_commmand,
41 | task_id=(str(i)+"_"+row['Strategy']),
42 | dag=dag
43 | )
44 | init >> tab >> clear
45 | return dag
46 | schedule = None #"@daily"
47 | dag_id = "strategy_dynamic_DAG"
48 | s3 = boto3.client('s3',endpoint_url="http://minio-image:9000",aws_access_key_id="minio-image",aws_secret_access_key="minio-image-pass")
49 | Bucket="airflow-files"
50 | Key="strategy.csv"
51 | read_file = s3.get_object(Bucket=Bucket, Key=Key)
52 | df = pd.read_csv(read_file['Body'],sep=',')
53 |
54 | df.fillna('', inplace=True)
55 | args = {
56 | 'owner': 'airflow',
57 | 'depends_on_past': False,
58 | 'start_date':datetime(2019,1,1),
59 | # 'start_date': datetime.now(),
60 | 'email_on_failure': False,
61 | 'email_on_retry': False,
62 | 'retries': 1,
63 | 'retry_delay': timedelta(minutes=1),
64 | 'concurrency': 1,
65 | 'max_active_runs': 1
66 | }
67 | globals()[dag_id] = create_dag(dag_id, schedule, args, df)
68 |
--------------------------------------------------------------------------------
/Storage/airflow/dags/process_configuration.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Dynamic_DAG",
3 | "schedule": "@hourly",
4 | "tables": [
5 | "Customer",
6 | "Concact",
7 | "Address",
8 | "Product",
9 | "Test-Test"
10 | ]
11 | }
--------------------------------------------------------------------------------
/Storage/minio/storage/airflow-files/interested_tickers.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/minio/storage/airflow-files/interested_tickers.xlsx
--------------------------------------------------------------------------------
/Storage/minio/storage/airflow-files/strategy.csv:
--------------------------------------------------------------------------------
1 | Strategy,Mode,Securities,Model ID,Account,Token,Strategy Parameters
2 | simple_strategy_2,backtest,"EUR_USD,GBP_USD,NZD_USD,USD_JPY",0255c7673c164de6815d89a4228b846c,,,ml_serving=True
3 | simple_strategy,live,"EUR_USD,GBP_USD",,,,
4 |
--------------------------------------------------------------------------------
/Storage/minio/storage/mlflow-models/2a71796bd39c429d89a1cf2006624240/artifacts/model/MLmodel:
--------------------------------------------------------------------------------
1 | artifact_path: model
2 | flavors:
3 | python_function:
4 | data: model.pkl
5 | env: conda.yaml
6 | loader_module: mlflow.sklearn
7 | python_version: 3.7.3
8 | sklearn:
9 | pickled_model: model.pkl
10 | serialization_format: cloudpickle
11 | sklearn_version: 0.21.3
12 | run_id: 2a71796bd39c429d89a1cf2006624240
13 | utc_time_created: '2020-01-07 07:08:48.638504'
14 |
--------------------------------------------------------------------------------
/Storage/minio/storage/mlflow-models/2a71796bd39c429d89a1cf2006624240/artifacts/model/conda.yaml:
--------------------------------------------------------------------------------
1 | channels:
2 | - defaults
3 | dependencies:
4 | - python=3.7.3
5 | - scikit-learn=0.21.3
6 | - pip:
7 | - mlflow
8 | - cloudpickle==1.2.2
9 | name: mlflow-env
10 |
--------------------------------------------------------------------------------
/Storage/minio/storage/mlflow-models/2a71796bd39c429d89a1cf2006624240/artifacts/model/model.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/minio/storage/mlflow-models/2a71796bd39c429d89a1cf2006624240/artifacts/model/model.pkl
--------------------------------------------------------------------------------
/Storage/notebooks/.ipynb_checkpoints/10-19-2019-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/Storage/notebooks/.ipynb_checkpoints/Example-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Running the strategy Backtests"
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": null,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "! python /home/jovyan/work/q_pack/q_run/run_BT.py \\\n",
17 | "--strat_name=simple_strategy_2 \\\n",
18 | "--mode=backtest \\\n",
19 | "--tickers=EUR_USD,GBP_USD,USD_JPY,NZD_USD"
20 | ]
21 | },
22 | {
23 | "cell_type": "code",
24 | "execution_count": 1,
25 | "metadata": {},
26 | "outputs": [
27 | {
28 | "name": "stdout",
29 | "output_type": "stream",
30 | "text": [
31 | "Strategy run finished with Run ID: 1\n",
32 | "ML Log Saved in Minio Bucket: model-support-files as 1_ml_log.csv\n",
33 | "Profit ... or Loss: 32.62\n"
34 | ]
35 | }
36 | ],
37 | "source": [
38 | "! python /home/jovyan/work/q_pack/q_run/run_BT.py \\\n",
39 | "--strat_name=simple_strategy_2 \\\n",
40 | "--mode=backtest \\\n",
41 | "--tickers=EUR_USD,GBP_USD \\\n",
42 | "--ml_log=True"
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": 2,
48 | "metadata": {},
49 | "outputs": [],
50 | "source": [
51 | "import os\n",
52 | "import warnings\n",
53 | "import sys\n",
54 | "import pandas as pd\n",
55 | "import numpy as np\n",
56 | "from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score\n",
57 | "from sklearn.model_selection import train_test_split\n",
58 | "from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor\n",
59 | "import mlflow\n",
60 | "import mlflow.sklearn\n",
61 | "import mlflow.pyfunc\n",
62 | "import io\n",
63 | "import boto3"
64 | ]
65 | },
66 | {
67 | "cell_type": "markdown",
68 | "metadata": {},
69 | "source": [
70 | "# Preprocessing the Log file\n",
71 | "The Log file generated from the Backtest is in a simple form than can be preprocessed into a format that can be fed into our ML Model"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": 3,
77 | "metadata": {},
78 | "outputs": [
79 | {
80 | "data": {
81 | "text/html": [
82 | "\n",
83 | "\n",
96 | "
\n",
97 | " \n",
98 | " \n",
99 | " | \n",
100 | " security | \n",
101 | " datetime | \n",
102 | " close | \n",
103 | " RSI | \n",
104 | " STOCHASTIC | \n",
105 | " ATR | \n",
106 | "
\n",
107 | " \n",
108 | " \n",
109 | " \n",
110 | " 0 | \n",
111 | " EUR_USD | \n",
112 | " 2010-12-30 22:00:00 | \n",
113 | " 1.32855 | \n",
114 | " NaN | \n",
115 | " NaN | \n",
116 | " NaN | \n",
117 | "
\n",
118 | " \n",
119 | " 1 | \n",
120 | " EUR_USD | \n",
121 | " 2010-12-31 22:00:00 | \n",
122 | " 1.33815 | \n",
123 | " NaN | \n",
124 | " NaN | \n",
125 | " NaN | \n",
126 | "
\n",
127 | " \n",
128 | " 2 | \n",
129 | " EUR_USD | \n",
130 | " 2011-01-01 22:00:00 | \n",
131 | " 1.33280 | \n",
132 | " NaN | \n",
133 | " NaN | \n",
134 | " NaN | \n",
135 | "
\n",
136 | " \n",
137 | " 3 | \n",
138 | " EUR_USD | \n",
139 | " 2011-01-02 22:00:00 | \n",
140 | " 1.32512 | \n",
141 | " NaN | \n",
142 | " NaN | \n",
143 | " NaN | \n",
144 | "
\n",
145 | " \n",
146 | " 4 | \n",
147 | " EUR_USD | \n",
148 | " 2011-01-03 22:00:00 | \n",
149 | " 1.32919 | \n",
150 | " NaN | \n",
151 | " NaN | \n",
152 | " NaN | \n",
153 | "
\n",
154 | " \n",
155 | " 5 | \n",
156 | " EUR_USD | \n",
157 | " 2011-01-04 22:00:00 | \n",
158 | " 1.31258 | \n",
159 | " NaN | \n",
160 | " NaN | \n",
161 | " 0.007796 | \n",
162 | "
\n",
163 | " \n",
164 | " 6 | \n",
165 | " EUR_USD | \n",
166 | " 2011-01-05 22:00:00 | \n",
167 | " 1.29978 | \n",
168 | " NaN | \n",
169 | " NaN | \n",
170 | " 0.007135 | \n",
171 | "
\n",
172 | " \n",
173 | " 7 | \n",
174 | " EUR_USD | \n",
175 | " 2011-01-06 22:00:00 | \n",
176 | " 1.29057 | \n",
177 | " NaN | \n",
178 | " NaN | \n",
179 | " 0.006176 | \n",
180 | "
\n",
181 | " \n",
182 | " 8 | \n",
183 | " EUR_USD | \n",
184 | " 2011-01-08 22:00:00 | \n",
185 | " 1.28607 | \n",
186 | " NaN | \n",
187 | " NaN | \n",
188 | " 0.005193 | \n",
189 | "
\n",
190 | " \n",
191 | " 9 | \n",
192 | " EUR_USD | \n",
193 | " 2011-01-09 22:00:00 | \n",
194 | " 1.28742 | \n",
195 | " NaN | \n",
196 | " NaN | \n",
197 | " 0.006228 | \n",
198 | "
\n",
199 | " \n",
200 | "
\n",
201 | "
"
202 | ],
203 | "text/plain": [
204 | " security datetime close RSI STOCHASTIC ATR\n",
205 | "0 EUR_USD 2010-12-30 22:00:00 1.32855 NaN NaN NaN\n",
206 | "1 EUR_USD 2010-12-31 22:00:00 1.33815 NaN NaN NaN\n",
207 | "2 EUR_USD 2011-01-01 22:00:00 1.33280 NaN NaN NaN\n",
208 | "3 EUR_USD 2011-01-02 22:00:00 1.32512 NaN NaN NaN\n",
209 | "4 EUR_USD 2011-01-03 22:00:00 1.32919 NaN NaN NaN\n",
210 | "5 EUR_USD 2011-01-04 22:00:00 1.31258 NaN NaN 0.007796\n",
211 | "6 EUR_USD 2011-01-05 22:00:00 1.29978 NaN NaN 0.007135\n",
212 | "7 EUR_USD 2011-01-06 22:00:00 1.29057 NaN NaN 0.006176\n",
213 | "8 EUR_USD 2011-01-08 22:00:00 1.28607 NaN NaN 0.005193\n",
214 | "9 EUR_USD 2011-01-09 22:00:00 1.28742 NaN NaN 0.006228"
215 | ]
216 | },
217 | "execution_count": 3,
218 | "metadata": {},
219 | "output_type": "execute_result"
220 | }
221 | ],
222 | "source": [
223 | "s3 = boto3.client('s3',endpoint_url=\"http://minio-image:9000\",aws_access_key_id=\"minio-image\",aws_secret_access_key=\"minio-image-pass\")\n",
224 | "data = pd.read_csv(s3.get_object(Bucket=\"model-support-files\", Key='1_ml_log.csv')['Body'],sep=',')\n",
225 | "data.head(10)"
226 | ]
227 | },
228 | {
229 | "cell_type": "code",
230 | "execution_count": 4,
231 | "metadata": {},
232 | "outputs": [
233 | {
234 | "data": {
235 | "text/plain": [
236 | "'processed_1_ml_log.csv'"
237 | ]
238 | },
239 | "execution_count": 4,
240 | "metadata": {},
241 | "output_type": "execute_result"
242 | }
243 | ],
244 | "source": [
245 | "from ml_pack.preprocessing.ml_preprocessing import ml_preprocessing\n",
246 | "preprocessed_file=ml_preprocessing(input_file=\"1_ml_log.csv\",fwd_returns=5)\n",
247 | "preprocessed_file"
248 | ]
249 | },
250 | {
251 | "cell_type": "code",
252 | "execution_count": 5,
253 | "metadata": {},
254 | "outputs": [],
255 | "source": [
256 | "s3 = boto3.client('s3',endpoint_url=\"http://minio-image:9000\",aws_access_key_id=\"minio-image\",aws_secret_access_key=\"minio-image-pass\")\n",
257 | "data = pd.read_csv(s3.get_object(Bucket=\"model-support-files\", Key=preprocessed_file)['Body'],sep=',')"
258 | ]
259 | },
260 | {
261 | "cell_type": "markdown",
262 | "metadata": {},
263 | "source": [
264 | "Now the preprocesssed file is in a format that we can feed into the ML model\n",
265 | "The target variable is 5 day forward returns and the features are the technical indicator"
266 | ]
267 | },
268 | {
269 | "cell_type": "code",
270 | "execution_count": 6,
271 | "metadata": {},
272 | "outputs": [
273 | {
274 | "data": {
275 | "text/html": [
276 | "\n",
277 | "\n",
290 | "
\n",
291 | " \n",
292 | " \n",
293 | " | \n",
294 | " RSI | \n",
295 | " STOCHASTIC | \n",
296 | " fwd_returns | \n",
297 | "
\n",
298 | " \n",
299 | " \n",
300 | " \n",
301 | " 0 | \n",
302 | " 56.991619 | \n",
303 | " 78.882076 | \n",
304 | " -0.005146 | \n",
305 | "
\n",
306 | " \n",
307 | " 1 | \n",
308 | " 59.448606 | \n",
309 | " 77.279931 | \n",
310 | " 0.003864 | \n",
311 | "
\n",
312 | " \n",
313 | " 2 | \n",
314 | " 67.632318 | \n",
315 | " 67.385341 | \n",
316 | " 0.019120 | \n",
317 | "
\n",
318 | " \n",
319 | " 3 | \n",
320 | " 60.945457 | \n",
321 | " 79.322084 | \n",
322 | " 0.013553 | \n",
323 | "
\n",
324 | " \n",
325 | " 4 | \n",
326 | " 56.546196 | \n",
327 | " 72.484186 | \n",
328 | " -0.000088 | \n",
329 | "
\n",
330 | " \n",
331 | " 5 | \n",
332 | " 67.387740 | \n",
333 | " 72.292758 | \n",
334 | " 0.016796 | \n",
335 | "
\n",
336 | " \n",
337 | " 6 | \n",
338 | " 54.832174 | \n",
339 | " 60.957090 | \n",
340 | " -0.002328 | \n",
341 | "
\n",
342 | " \n",
343 | " 7 | \n",
344 | " 64.459591 | \n",
345 | " 63.829216 | \n",
346 | " 0.013410 | \n",
347 | "
\n",
348 | " \n",
349 | " 8 | \n",
350 | " 55.613371 | \n",
351 | " 49.955854 | \n",
352 | " 0.000744 | \n",
353 | "
\n",
354 | " \n",
355 | " 9 | \n",
356 | " 65.796292 | \n",
357 | " 58.935306 | \n",
358 | " 0.018234 | \n",
359 | "
\n",
360 | " \n",
361 | "
\n",
362 | "
"
363 | ],
364 | "text/plain": [
365 | " RSI STOCHASTIC fwd_returns\n",
366 | "0 56.991619 78.882076 -0.005146\n",
367 | "1 59.448606 77.279931 0.003864\n",
368 | "2 67.632318 67.385341 0.019120\n",
369 | "3 60.945457 79.322084 0.013553\n",
370 | "4 56.546196 72.484186 -0.000088\n",
371 | "5 67.387740 72.292758 0.016796\n",
372 | "6 54.832174 60.957090 -0.002328\n",
373 | "7 64.459591 63.829216 0.013410\n",
374 | "8 55.613371 49.955854 0.000744\n",
375 | "9 65.796292 58.935306 0.018234"
376 | ]
377 | },
378 | "execution_count": 6,
379 | "metadata": {},
380 | "output_type": "execute_result"
381 | }
382 | ],
383 | "source": [
384 | "data.head(10)"
385 | ]
386 | },
387 | {
388 | "cell_type": "markdown",
389 | "metadata": {},
390 | "source": [
391 | "# Train a ML-model using MLflow"
392 | ]
393 | },
394 | {
395 | "cell_type": "markdown",
396 | "metadata": {},
397 | "source": [
398 | "### Point to the MLflow tracking endpoint"
399 | ]
400 | },
401 | {
402 | "cell_type": "code",
403 | "execution_count": 7,
404 | "metadata": {},
405 | "outputs": [],
406 | "source": [
407 | "mlflow.tracking.set_tracking_uri('http://mlflow-image:5500')"
408 | ]
409 | },
410 | {
411 | "cell_type": "markdown",
412 | "metadata": {},
413 | "source": [
414 | "### Create a MLflow experiment"
415 | ]
416 | },
417 | {
418 | "cell_type": "code",
419 | "execution_count": 8,
420 | "metadata": {},
421 | "outputs": [
422 | {
423 | "data": {
424 | "text/plain": [
425 | "'1'"
426 | ]
427 | },
428 | "execution_count": 8,
429 | "metadata": {},
430 | "output_type": "execute_result"
431 | }
432 | ],
433 | "source": [
434 | "mlflow.create_experiment(name='simple_trading_models', artifact_location='s3://mlflow-models')"
435 | ]
436 | },
437 | {
438 | "cell_type": "code",
439 | "execution_count": 9,
440 | "metadata": {},
441 | "outputs": [],
442 | "source": [
443 | "mlflow.set_experiment('simple_trading_models')"
444 | ]
445 | },
446 | {
447 | "cell_type": "markdown",
448 | "metadata": {},
449 | "source": [
450 | "### Fetching the preprocessed data from Minio"
451 | ]
452 | },
453 | {
454 | "cell_type": "code",
455 | "execution_count": 10,
456 | "metadata": {},
457 | "outputs": [
458 | {
459 | "name": "stdout",
460 | "output_type": "stream",
461 | "text": [
462 | "RandomForest Model (n_estimators=300.000000, max_depth=10.000000):\n",
463 | " RMSE: 0.008593323344445921\n",
464 | " MAE: 0.006673740830905886\n",
465 | " R2: 0.3790275404533735\n"
466 | ]
467 | }
468 | ],
469 | "source": [
470 | "def eval_metrics(actual, pred):\n",
471 | " rmse = np.sqrt(mean_squared_error(actual, pred))\n",
472 | " mae = mean_absolute_error(actual, pred)\n",
473 | " r2 = r2_score(actual, pred)\n",
474 | " return rmse, mae, r2\n",
475 | "\n",
476 | "warnings.filterwarnings(\"ignore\")\n",
477 | "np.random.seed(40)\n",
478 | "# Split the data into training and test sets. (0.75, 0.25) split.\n",
479 | "train, test = train_test_split(data)\n",
480 | "\n",
481 | "# The predicted column is \"fwd_returns\"\n",
482 | "train_x = train.drop([\"fwd_returns\"], axis=1)\n",
483 | "test_x = test.drop([\"fwd_returns\"], axis=1)\n",
484 | "train_y = train[[\"fwd_returns\"]]\n",
485 | "test_y = test[[\"fwd_returns\"]]\n",
486 | "\n",
487 | "n_estimators=300\n",
488 | "max_depth=10\n",
489 | "\n",
490 | "lr = RandomForestRegressor(n_estimators=n_estimators,max_depth=max_depth)\n",
491 | "lr.fit(train_x, train_y)\n",
492 | "predicted_qualities = lr.predict(test_x)\n",
493 | "(rmse, mae, r2) = eval_metrics(test_y, predicted_qualities)\n",
494 | "\n",
495 | "print(\"RandomForest Model (n_estimators=%f, max_depth=%f):\" % (n_estimators, max_depth))\n",
496 | "print(\" RMSE: %s\" % rmse)\n",
497 | "print(\" MAE: %s\" % mae)\n",
498 | "print(\" R2: %s\" % r2)\n",
499 | "\n",
500 | "mlflow.log_param(\"n_estimators\", n_estimators)\n",
501 | "mlflow.log_param(\"max_depth\", max_depth)\n",
502 | "mlflow.log_metric(\"rmse\", rmse)\n",
503 | "mlflow.log_metric(\"r2\", r2)\n",
504 | "mlflow.log_metric(\"mae\", mae)\n",
505 | "mlflow.sklearn.log_model(lr, \"model\")"
506 | ]
507 | },
508 | {
509 | "cell_type": "markdown",
510 | "metadata": {},
511 | "source": [
512 | "## Model Serving"
513 | ]
514 | },
515 | {
516 | "cell_type": "code",
517 | "execution_count": 11,
518 | "metadata": {},
519 | "outputs": [],
520 | "source": [
521 | "import mlflow.pyfunc"
522 | ]
523 | },
524 | {
525 | "cell_type": "code",
526 | "execution_count": 12,
527 | "metadata": {},
528 | "outputs": [],
529 | "source": [
530 | "model_predict=mlflow.pyfunc.load_model(model_uri=\"s3://mlflow-models/adebcab9b2d949289e24bd0afb4b3846/artifacts/model\")"
531 | ]
532 | },
533 | {
534 | "cell_type": "code",
535 | "execution_count": 13,
536 | "metadata": {},
537 | "outputs": [
538 | {
539 | "data": {
540 | "text/plain": [
541 | "array([0.00771085])"
542 | ]
543 | },
544 | "execution_count": 13,
545 | "metadata": {},
546 | "output_type": "execute_result"
547 | }
548 | ],
549 | "source": [
550 | "model_predict.predict([[80,20]])"
551 | ]
552 | },
553 | {
554 | "cell_type": "markdown",
555 | "metadata": {},
556 | "source": [
557 | "## Running the strategy with the model\n",
558 | "Look at the simple_strategy_2.py Backtrader strategy file to see how the ML model is being served "
559 | ]
560 | },
561 | {
562 | "cell_type": "code",
563 | "execution_count": 15,
564 | "metadata": {},
565 | "outputs": [
566 | {
567 | "name": "stdout",
568 | "output_type": "stream",
569 | "text": [
570 | "s3://mlflow-models/adebcab9b2d949289e24bd0afb4b3846/artifacts/model\n",
571 | "Strategy run finished with Run ID: 3\n",
572 | "Profit ... or Loss: -1621.91\n"
573 | ]
574 | }
575 | ],
576 | "source": [
577 | "!python /home/jovyan/work/q_pack/q_run/run_BT.py \\\n",
578 | "--strat_name=simple_strategy_2 \\\n",
579 | "--strat_param=ml_serving=True,model_uri=adebcab9b2d949289e24bd0afb4b3846 \\\n",
580 | "--ml_log=False \\\n",
581 | "--mode=backtest \\\n",
582 | "--tickers=EUR_USD,GBP_USD"
583 | ]
584 | },
585 | {
586 | "cell_type": "markdown",
587 | "metadata": {},
588 | "source": [
589 | "## Packaging the model using MLflow (BONUS)\n",
590 | "For reproducibility and for sharing"
591 | ]
592 | },
593 | {
594 | "cell_type": "code",
595 | "execution_count": 55,
596 | "metadata": {},
597 | "outputs": [],
598 | "source": [
599 | "mlflow.projects.run(\"/home/jovyan/work/BT/mlflow_project\",parameters={'n_estimators':200,'max_depth':10})\n"
600 | ]
601 | },
602 | {
603 | "cell_type": "markdown",
604 | "metadata": {},
605 | "source": [
606 | "## Serving the model as a rest API using MLflow (BONUS)\n",
607 | "Serrve the model in the mlflow tracking container using"
608 | ]
609 | },
610 | {
611 | "cell_type": "markdown",
612 | "metadata": {},
613 | "source": [
614 | "docker exec ekholabs-mlflow /bin/sh -c \"mlflow models serve -m /ekholabs-mlflow/mlruns/0/a85ab97a393045afaea2b550a79686e8/artifacts/model --host=0.0.0.0 -p 2349\""
615 | ]
616 | },
617 | {
618 | "cell_type": "markdown",
619 | "metadata": {},
620 | "source": [
621 | "Then you can call in the program using curl"
622 | ]
623 | },
624 | {
625 | "cell_type": "code",
626 | "execution_count": null,
627 | "metadata": {},
628 | "outputs": [],
629 | "source": [
630 | "! curl -X POST -H \"Content-Type:application/json; format=pandas-split\" --data '{\"columns\":[\"alcohol\", \"chlorides\", \"citric acid\", \"density\", \"fixed acidity\", \"free sulfur dioxide\", \"pH\", \"residual sugar\", \"sulphates\", \"total sulfur dioxide\", \"volatile acidity\"],\"data\":[[12.8, 2, 10, 0.98, 1, 45, 2, 1.2, 44, 4, 0.66]]}' http://mlflow-image:2349/invocations"
631 | ]
632 | }
633 | ],
634 | "metadata": {
635 | "kernelspec": {
636 | "display_name": "Python 3",
637 | "language": "python",
638 | "name": "python3"
639 | },
640 | "language_info": {
641 | "codemirror_mode": {
642 | "name": "ipython",
643 | "version": 3
644 | },
645 | "file_extension": ".py",
646 | "mimetype": "text/x-python",
647 | "name": "python",
648 | "nbconvert_exporter": "python",
649 | "pygments_lexer": "ipython3",
650 | "version": "3.7.3"
651 | }
652 | },
653 | "nbformat": 4,
654 | "nbformat_minor": 2
655 | }
656 |
--------------------------------------------------------------------------------
/Storage/notebooks/.ipynb_checkpoints/Mlflow Serving Test - Only look at this-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {
7 | "scrolled": true
8 | },
9 | "outputs": [],
10 | "source": [
11 | "import mlflow\n",
12 | "mlflow.tracking.set_tracking_uri('http://ekholabs-mlflow:5500')"
13 | ]
14 | },
15 | {
16 | "cell_type": "code",
17 | "execution_count": null,
18 | "metadata": {},
19 | "outputs": [],
20 | "source": [
21 | "! python tutorial.py .35 .8"
22 | ]
23 | },
24 | {
25 | "cell_type": "code",
26 | "execution_count": 18,
27 | "metadata": {},
28 | "outputs": [
29 | {
30 | "name": "stderr",
31 | "output_type": "stream",
32 | "text": [
33 | "2019/09/23 04:23:19 INFO mlflow.projects: === Creating conda environment mlflow-e999157b78f16dc8fc605058fa1daec52ebf1842 ===\n",
34 | "2019/09/23 04:24:14 INFO mlflow.projects: === Created directory /tmp/tmpn9umxwh5 for downloading remote URIs passed to arguments of type 'path' ===\n",
35 | "2019/09/23 04:24:14 INFO mlflow.projects: === Running command 'source activate mlflow-e999157b78f16dc8fc605058fa1daec52ebf1842 1>&2 && python train.py 0.3 0.8' in run with ID 'a85ab97a393045afaea2b550a79686e8' === \n",
36 | "2019/09/23 04:24:16 INFO mlflow.projects: === Run (ID 'a85ab97a393045afaea2b550a79686e8') succeeded ===\n"
37 | ]
38 | },
39 | {
40 | "data": {
41 | "text/plain": [
42 | ""
43 | ]
44 | },
45 | "execution_count": 18,
46 | "metadata": {},
47 | "output_type": "execute_result"
48 | }
49 | ],
50 | "source": [
51 | "mlflow.projects.run(\"/home/jovyan/work/sklearn_elasticnet_wine\",parameters={'alpha':0.3,'l1_ratio':0.8})"
52 | ]
53 | },
54 | {
55 | "cell_type": "markdown",
56 | "metadata": {},
57 | "source": [
58 | "### Then serve the model in the mlflow tracking container using \n",
59 | "docker exec ekholabs-mlflow /bin/sh -c \"mlflow models serve -m /ekholabs-mlflow/mlruns/0/a85ab97a393045afaea2b550a79686e8/artifacts/model --host=0.0.0.0 -p 2349\""
60 | ]
61 | },
62 | {
63 | "cell_type": "code",
64 | "execution_count": 1,
65 | "metadata": {},
66 | "outputs": [
67 | {
68 | "name": "stdout",
69 | "output_type": "stream",
70 | "text": [
71 | "[4.945446608985396]"
72 | ]
73 | }
74 | ],
75 | "source": [
76 | "! curl -X POST -H \"Content-Type:application/json; format=pandas-split\" --data '{\"columns\":[\"alcohol\", \"chlorides\", \"citric acid\", \"density\", \"fixed acidity\", \"free sulfur dioxide\", \"pH\", \"residual sugar\", \"sulphates\", \"total sulfur dioxide\", \"volatile acidity\"],\"data\":[[12.8, 2, 10, 0.98, 1, 45, 99, 1.2, 44, 75, 0.66]]}' http://ekholabs-mlflow:2349/invocations"
77 | ]
78 | },
79 | {
80 | "cell_type": "code",
81 | "execution_count": null,
82 | "metadata": {},
83 | "outputs": [],
84 | "source": []
85 | }
86 | ],
87 | "metadata": {
88 | "kernelspec": {
89 | "display_name": "Python 3",
90 | "language": "python",
91 | "name": "python3"
92 | },
93 | "language_info": {
94 | "codemirror_mode": {
95 | "name": "ipython",
96 | "version": 3
97 | },
98 | "file_extension": ".py",
99 | "mimetype": "text/x-python",
100 | "name": "python",
101 | "nbconvert_exporter": "python",
102 | "pygments_lexer": "ipython3",
103 | "version": "3.7.3"
104 | }
105 | },
106 | "nbformat": 4,
107 | "nbformat_minor": 2
108 | }
109 |
--------------------------------------------------------------------------------
/Storage/notebooks/.ipynb_checkpoints/PODA PATTI-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [],
3 | "metadata": {},
4 | "nbformat": 4,
5 | "nbformat_minor": 2
6 | }
7 |
--------------------------------------------------------------------------------
/Storage/notebooks/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | // Use IntelliSense to learn about possible attributes.
3 | // Hover to view descriptions of existing attributes.
4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5 | "version": "0.2.0",
6 | "configurations": [
7 | {
8 | "name": "Python: Current File",
9 | "type": "python",
10 | "request": "launch",
11 | "program": "${file}",
12 | "console": "integratedTerminal"
13 | }
14 | ]
15 | }
--------------------------------------------------------------------------------
/Storage/notebooks/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "python.pythonPath": "/opt/conda/bin/python"
3 | }
--------------------------------------------------------------------------------
/Storage/notebooks/Example.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Running the strategy Backtests"
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": 1,
13 | "metadata": {},
14 | "outputs": [
15 | {
16 | "name": "stdout",
17 | "output_type": "stream",
18 | "text": [
19 | "Strategy run finished with Run ID: 1\r\n",
20 | "Profit ... or Loss: 6804.89\r\n"
21 | ]
22 | }
23 | ],
24 | "source": [
25 | "! python /home/jovyan/work/q_pack/q_run/run_BT.py \\\n",
26 | "--strat_name=simple_strategy \\\n",
27 | "--mode=backtest \\\n",
28 | "--tickers=EUR_USD,GBP_USD,USD_JPY,NZD_USD"
29 | ]
30 | },
31 | {
32 | "cell_type": "code",
33 | "execution_count": 1,
34 | "metadata": {},
35 | "outputs": [
36 | {
37 | "name": "stdout",
38 | "output_type": "stream",
39 | "text": [
40 | "Strategy run finished with Run ID: 6\n",
41 | "ML Log Saved in Minio Bucket: model-support-files as 6_ml_log.csv\n",
42 | "Profit ... or Loss: 36.68\n"
43 | ]
44 | }
45 | ],
46 | "source": [
47 | "! python /home/jovyan/work/q_pack/q_run/run_BT.py \\\n",
48 | "--strat_name=simple_strategy_2 \\\n",
49 | "--mode=backtest \\\n",
50 | "--tickers=EUR_USD,GBP_USD,USD_JPY \\\n",
51 | "--ml_log=True"
52 | ]
53 | },
54 | {
55 | "cell_type": "code",
56 | "execution_count": 3,
57 | "metadata": {},
58 | "outputs": [],
59 | "source": [
60 | "import os\n",
61 | "import warnings\n",
62 | "import sys\n",
63 | "import pandas as pd\n",
64 | "import numpy as np\n",
65 | "from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score\n",
66 | "from sklearn.model_selection import train_test_split\n",
67 | "from sklearn.ensemble import RandomForestRegressor, GradientBoostingRegressor\n",
68 | "import mlflow\n",
69 | "import mlflow.sklearn\n",
70 | "import mlflow.pyfunc\n",
71 | "import io\n",
72 | "import boto3"
73 | ]
74 | },
75 | {
76 | "cell_type": "markdown",
77 | "metadata": {},
78 | "source": [
79 | "# Preprocessing the Log file\n",
80 | "The Log file generated from the Backtest is in a simple form than can be preprocessed into a format that can be fed into our ML Model"
81 | ]
82 | },
83 | {
84 | "cell_type": "code",
85 | "execution_count": 4,
86 | "metadata": {},
87 | "outputs": [
88 | {
89 | "data": {
90 | "text/html": [
91 | "\n",
92 | "\n",
105 | "
\n",
106 | " \n",
107 | " \n",
108 | " | \n",
109 | " security | \n",
110 | " datetime | \n",
111 | " close | \n",
112 | " RSI | \n",
113 | " STOCHASTIC | \n",
114 | " ATR | \n",
115 | "
\n",
116 | " \n",
117 | " \n",
118 | " \n",
119 | " 0 | \n",
120 | " EUR_USD | \n",
121 | " 2010-12-30 22:00:00 | \n",
122 | " 1.32855 | \n",
123 | " NaN | \n",
124 | " NaN | \n",
125 | " NaN | \n",
126 | "
\n",
127 | " \n",
128 | " 1 | \n",
129 | " EUR_USD | \n",
130 | " 2010-12-31 22:00:00 | \n",
131 | " 1.33815 | \n",
132 | " NaN | \n",
133 | " NaN | \n",
134 | " NaN | \n",
135 | "
\n",
136 | " \n",
137 | " 2 | \n",
138 | " EUR_USD | \n",
139 | " 2011-01-01 22:00:00 | \n",
140 | " 1.33280 | \n",
141 | " NaN | \n",
142 | " NaN | \n",
143 | " NaN | \n",
144 | "
\n",
145 | " \n",
146 | " 3 | \n",
147 | " EUR_USD | \n",
148 | " 2011-01-02 22:00:00 | \n",
149 | " 1.32512 | \n",
150 | " NaN | \n",
151 | " NaN | \n",
152 | " NaN | \n",
153 | "
\n",
154 | " \n",
155 | " 4 | \n",
156 | " EUR_USD | \n",
157 | " 2011-01-03 22:00:00 | \n",
158 | " 1.32919 | \n",
159 | " NaN | \n",
160 | " NaN | \n",
161 | " NaN | \n",
162 | "
\n",
163 | " \n",
164 | " 5 | \n",
165 | " EUR_USD | \n",
166 | " 2011-01-04 22:00:00 | \n",
167 | " 1.31258 | \n",
168 | " NaN | \n",
169 | " NaN | \n",
170 | " 0.007796 | \n",
171 | "
\n",
172 | " \n",
173 | " 6 | \n",
174 | " EUR_USD | \n",
175 | " 2011-01-05 22:00:00 | \n",
176 | " 1.29978 | \n",
177 | " NaN | \n",
178 | " NaN | \n",
179 | " 0.007135 | \n",
180 | "
\n",
181 | " \n",
182 | " 7 | \n",
183 | " EUR_USD | \n",
184 | " 2011-01-06 22:00:00 | \n",
185 | " 1.29057 | \n",
186 | " NaN | \n",
187 | " NaN | \n",
188 | " 0.006176 | \n",
189 | "
\n",
190 | " \n",
191 | " 8 | \n",
192 | " EUR_USD | \n",
193 | " 2011-01-08 22:00:00 | \n",
194 | " 1.28607 | \n",
195 | " NaN | \n",
196 | " NaN | \n",
197 | " 0.005193 | \n",
198 | "
\n",
199 | " \n",
200 | " 9 | \n",
201 | " EUR_USD | \n",
202 | " 2011-01-09 22:00:00 | \n",
203 | " 1.28742 | \n",
204 | " NaN | \n",
205 | " NaN | \n",
206 | " 0.006228 | \n",
207 | "
\n",
208 | " \n",
209 | "
\n",
210 | "
"
211 | ],
212 | "text/plain": [
213 | " security datetime close RSI STOCHASTIC ATR\n",
214 | "0 EUR_USD 2010-12-30 22:00:00 1.32855 NaN NaN NaN\n",
215 | "1 EUR_USD 2010-12-31 22:00:00 1.33815 NaN NaN NaN\n",
216 | "2 EUR_USD 2011-01-01 22:00:00 1.33280 NaN NaN NaN\n",
217 | "3 EUR_USD 2011-01-02 22:00:00 1.32512 NaN NaN NaN\n",
218 | "4 EUR_USD 2011-01-03 22:00:00 1.32919 NaN NaN NaN\n",
219 | "5 EUR_USD 2011-01-04 22:00:00 1.31258 NaN NaN 0.007796\n",
220 | "6 EUR_USD 2011-01-05 22:00:00 1.29978 NaN NaN 0.007135\n",
221 | "7 EUR_USD 2011-01-06 22:00:00 1.29057 NaN NaN 0.006176\n",
222 | "8 EUR_USD 2011-01-08 22:00:00 1.28607 NaN NaN 0.005193\n",
223 | "9 EUR_USD 2011-01-09 22:00:00 1.28742 NaN NaN 0.006228"
224 | ]
225 | },
226 | "execution_count": 4,
227 | "metadata": {},
228 | "output_type": "execute_result"
229 | }
230 | ],
231 | "source": [
232 | "s3 = boto3.client('s3',endpoint_url=\"http://minio-image:9000\",aws_access_key_id=\"minio-image\",aws_secret_access_key=\"minio-image-pass\")\n",
233 | "data = pd.read_csv(s3.get_object(Bucket=\"model-support-files\", Key='2_ml_log.csv')['Body'],sep=',')\n",
234 | "data.head(10)"
235 | ]
236 | },
237 | {
238 | "cell_type": "code",
239 | "execution_count": 5,
240 | "metadata": {},
241 | "outputs": [
242 | {
243 | "data": {
244 | "text/plain": [
245 | "'processed_2_ml_log.csv'"
246 | ]
247 | },
248 | "execution_count": 5,
249 | "metadata": {},
250 | "output_type": "execute_result"
251 | }
252 | ],
253 | "source": [
254 | "from ml_pack.preprocessing.ml_preprocessing import ml_preprocessing\n",
255 | "preprocessed_file=ml_preprocessing(input_file=\"2_ml_log.csv\",fwd_returns=5)\n",
256 | "preprocessed_file"
257 | ]
258 | },
259 | {
260 | "cell_type": "code",
261 | "execution_count": 6,
262 | "metadata": {},
263 | "outputs": [],
264 | "source": [
265 | "s3 = boto3.client('s3',endpoint_url=\"http://minio-image:9000\",aws_access_key_id=\"minio-image\",aws_secret_access_key=\"minio-image-pass\")\n",
266 | "data = pd.read_csv(s3.get_object(Bucket=\"model-support-files\", Key=preprocessed_file)['Body'],sep=',')"
267 | ]
268 | },
269 | {
270 | "cell_type": "markdown",
271 | "metadata": {},
272 | "source": [
273 | "Now the preprocesssed file is in a format that we can feed into the ML model\n",
274 | "The target variable is 5 day forward returns and the features are the technical indicator"
275 | ]
276 | },
277 | {
278 | "cell_type": "code",
279 | "execution_count": 7,
280 | "metadata": {},
281 | "outputs": [
282 | {
283 | "data": {
284 | "text/html": [
285 | "\n",
286 | "\n",
299 | "
\n",
300 | " \n",
301 | " \n",
302 | " | \n",
303 | " RSI | \n",
304 | " STOCHASTIC | \n",
305 | " fwd_returns | \n",
306 | "
\n",
307 | " \n",
308 | " \n",
309 | " \n",
310 | " 0 | \n",
311 | " 56.991619 | \n",
312 | " 78.882076 | \n",
313 | " -0.005146 | \n",
314 | "
\n",
315 | " \n",
316 | " 1 | \n",
317 | " 59.448606 | \n",
318 | " 77.279931 | \n",
319 | " 0.003864 | \n",
320 | "
\n",
321 | " \n",
322 | " 2 | \n",
323 | " 67.632318 | \n",
324 | " 67.385341 | \n",
325 | " 0.019120 | \n",
326 | "
\n",
327 | " \n",
328 | " 3 | \n",
329 | " 60.945457 | \n",
330 | " 79.322084 | \n",
331 | " 0.013553 | \n",
332 | "
\n",
333 | " \n",
334 | " 4 | \n",
335 | " 56.546196 | \n",
336 | " 72.484186 | \n",
337 | " -0.000088 | \n",
338 | "
\n",
339 | " \n",
340 | " 5 | \n",
341 | " 67.387740 | \n",
342 | " 72.292758 | \n",
343 | " 0.016796 | \n",
344 | "
\n",
345 | " \n",
346 | " 6 | \n",
347 | " 54.832174 | \n",
348 | " 60.957090 | \n",
349 | " -0.002328 | \n",
350 | "
\n",
351 | " \n",
352 | " 7 | \n",
353 | " 64.459591 | \n",
354 | " 63.829216 | \n",
355 | " 0.013410 | \n",
356 | "
\n",
357 | " \n",
358 | " 8 | \n",
359 | " 55.613371 | \n",
360 | " 49.955854 | \n",
361 | " 0.000744 | \n",
362 | "
\n",
363 | " \n",
364 | " 9 | \n",
365 | " 65.796292 | \n",
366 | " 58.935306 | \n",
367 | " 0.018234 | \n",
368 | "
\n",
369 | " \n",
370 | "
\n",
371 | "
"
372 | ],
373 | "text/plain": [
374 | " RSI STOCHASTIC fwd_returns\n",
375 | "0 56.991619 78.882076 -0.005146\n",
376 | "1 59.448606 77.279931 0.003864\n",
377 | "2 67.632318 67.385341 0.019120\n",
378 | "3 60.945457 79.322084 0.013553\n",
379 | "4 56.546196 72.484186 -0.000088\n",
380 | "5 67.387740 72.292758 0.016796\n",
381 | "6 54.832174 60.957090 -0.002328\n",
382 | "7 64.459591 63.829216 0.013410\n",
383 | "8 55.613371 49.955854 0.000744\n",
384 | "9 65.796292 58.935306 0.018234"
385 | ]
386 | },
387 | "execution_count": 7,
388 | "metadata": {},
389 | "output_type": "execute_result"
390 | }
391 | ],
392 | "source": [
393 | "data.head(10)"
394 | ]
395 | },
396 | {
397 | "cell_type": "markdown",
398 | "metadata": {},
399 | "source": [
400 | "# Train a ML-model using MLflow"
401 | ]
402 | },
403 | {
404 | "cell_type": "markdown",
405 | "metadata": {},
406 | "source": [
407 | "### Point to the MLflow tracking endpoint"
408 | ]
409 | },
410 | {
411 | "cell_type": "code",
412 | "execution_count": 8,
413 | "metadata": {},
414 | "outputs": [],
415 | "source": [
416 | "mlflow.tracking.set_tracking_uri('http://mlflow-image:5500')"
417 | ]
418 | },
419 | {
420 | "cell_type": "markdown",
421 | "metadata": {},
422 | "source": [
423 | "### Create a MLflow experiment"
424 | ]
425 | },
426 | {
427 | "cell_type": "code",
428 | "execution_count": 9,
429 | "metadata": {},
430 | "outputs": [
431 | {
432 | "data": {
433 | "text/plain": [
434 | "'1'"
435 | ]
436 | },
437 | "execution_count": 9,
438 | "metadata": {},
439 | "output_type": "execute_result"
440 | }
441 | ],
442 | "source": [
443 | "mlflow.create_experiment(name='simple_trading_models', artifact_location='s3://mlflow-models')"
444 | ]
445 | },
446 | {
447 | "cell_type": "code",
448 | "execution_count": 10,
449 | "metadata": {},
450 | "outputs": [],
451 | "source": [
452 | "mlflow.set_experiment('simple_trading_models')"
453 | ]
454 | },
455 | {
456 | "cell_type": "markdown",
457 | "metadata": {},
458 | "source": [
459 | "### Fetching the preprocessed data from Minio"
460 | ]
461 | },
462 | {
463 | "cell_type": "code",
464 | "execution_count": 11,
465 | "metadata": {},
466 | "outputs": [
467 | {
468 | "name": "stdout",
469 | "output_type": "stream",
470 | "text": [
471 | "RandomForest Model (n_estimators=300.000000, max_depth=10.000000):\n",
472 | " RMSE: 0.008593323344445921\n",
473 | " MAE: 0.006673740830905886\n",
474 | " R2: 0.3790275404533735\n"
475 | ]
476 | }
477 | ],
478 | "source": [
479 | "def eval_metrics(actual, pred):\n",
480 | " rmse = np.sqrt(mean_squared_error(actual, pred))\n",
481 | " mae = mean_absolute_error(actual, pred)\n",
482 | " r2 = r2_score(actual, pred)\n",
483 | " return rmse, mae, r2\n",
484 | "\n",
485 | "warnings.filterwarnings(\"ignore\")\n",
486 | "np.random.seed(40)\n",
487 | "# Split the data into training and test sets. (0.75, 0.25) split.\n",
488 | "train, test = train_test_split(data)\n",
489 | "\n",
490 | "# The predicted column is \"fwd_returns\"\n",
491 | "train_x = train.drop([\"fwd_returns\"], axis=1)\n",
492 | "test_x = test.drop([\"fwd_returns\"], axis=1)\n",
493 | "train_y = train[[\"fwd_returns\"]]\n",
494 | "test_y = test[[\"fwd_returns\"]]\n",
495 | "\n",
496 | "n_estimators=300\n",
497 | "max_depth=10\n",
498 | "\n",
499 | "lr = RandomForestRegressor(n_estimators=n_estimators,max_depth=max_depth)\n",
500 | "lr.fit(train_x, train_y)\n",
501 | "predicted_qualities = lr.predict(test_x)\n",
502 | "(rmse, mae, r2) = eval_metrics(test_y, predicted_qualities)\n",
503 | "\n",
504 | "print(\"RandomForest Model (n_estimators=%f, max_depth=%f):\" % (n_estimators, max_depth))\n",
505 | "print(\" RMSE: %s\" % rmse)\n",
506 | "print(\" MAE: %s\" % mae)\n",
507 | "print(\" R2: %s\" % r2)\n",
508 | "\n",
509 | "mlflow.log_param(\"n_estimators\", n_estimators)\n",
510 | "mlflow.log_param(\"max_depth\", max_depth)\n",
511 | "mlflow.log_metric(\"rmse\", rmse)\n",
512 | "mlflow.log_metric(\"r2\", r2)\n",
513 | "mlflow.log_metric(\"mae\", mae)\n",
514 | "mlflow.sklearn.log_model(lr, \"model\")"
515 | ]
516 | },
517 | {
518 | "cell_type": "markdown",
519 | "metadata": {},
520 | "source": [
521 | "## Model Serving"
522 | ]
523 | },
524 | {
525 | "cell_type": "code",
526 | "execution_count": 11,
527 | "metadata": {},
528 | "outputs": [],
529 | "source": [
530 | "import mlflow.pyfunc"
531 | ]
532 | },
533 | {
534 | "cell_type": "code",
535 | "execution_count": 12,
536 | "metadata": {},
537 | "outputs": [],
538 | "source": [
539 | "model_predict=mlflow.pyfunc.load_model(model_uri=\"s3://mlflow-models/adebcab9b2d949289e24bd0afb4b3846/artifacts/model\")"
540 | ]
541 | },
542 | {
543 | "cell_type": "code",
544 | "execution_count": 13,
545 | "metadata": {},
546 | "outputs": [
547 | {
548 | "data": {
549 | "text/plain": [
550 | "array([0.00771085])"
551 | ]
552 | },
553 | "execution_count": 13,
554 | "metadata": {},
555 | "output_type": "execute_result"
556 | }
557 | ],
558 | "source": [
559 | "model_predict.predict([[80,20]])"
560 | ]
561 | },
562 | {
563 | "cell_type": "markdown",
564 | "metadata": {},
565 | "source": [
566 | "## Running the strategy with the model\n",
567 | "Look at the simple_strategy_2.py Backtrader strategy file to see how the ML model is being served "
568 | ]
569 | },
570 | {
571 | "cell_type": "code",
572 | "execution_count": null,
573 | "metadata": {},
574 | "outputs": [
575 | {
576 | "name": "stdout",
577 | "output_type": "stream",
578 | "text": [
579 | "s3://mlflow-models/2a71796bd39c429d89a1cf2006624240/artifacts/model\r\n"
580 | ]
581 | }
582 | ],
583 | "source": [
584 | "!python /home/jovyan/work/q_pack/q_run/run_BT.py \\\n",
585 | "--strat_name=simple_strategy_2 \\\n",
586 | "--strat_param=ml_serving=True,model_uri=2a71796bd39c429d89a1cf2006624240 \\\n",
587 | "--ml_log=False \\\n",
588 | "--mode=backtest \\\n",
589 | "--tickers=EUR_USD,GBP_USD"
590 | ]
591 | },
592 | {
593 | "cell_type": "markdown",
594 | "metadata": {},
595 | "source": [
596 | "## Packaging the model using MLflow (BONUS)\n",
597 | "For reproducibility and for sharing"
598 | ]
599 | },
600 | {
601 | "cell_type": "code",
602 | "execution_count": 55,
603 | "metadata": {},
604 | "outputs": [],
605 | "source": [
606 | "mlflow.projects.run(\"/home/jovyan/work/BT/mlflow_project\",parameters={'n_estimators':200,'max_depth':10})\n"
607 | ]
608 | },
609 | {
610 | "cell_type": "markdown",
611 | "metadata": {},
612 | "source": [
613 | "## Serving the model as a rest API using MLflow (BONUS)\n",
614 | "Serrve the model in the mlflow tracking container using"
615 | ]
616 | },
617 | {
618 | "cell_type": "markdown",
619 | "metadata": {},
620 | "source": [
621 | "docker exec ekholabs-mlflow /bin/sh -c \"mlflow models serve -m /ekholabs-mlflow/mlruns/0/a85ab97a393045afaea2b550a79686e8/artifacts/model --host=0.0.0.0 -p 2349\""
622 | ]
623 | },
624 | {
625 | "cell_type": "markdown",
626 | "metadata": {},
627 | "source": [
628 | "Then you can call in the program using curl"
629 | ]
630 | },
631 | {
632 | "cell_type": "code",
633 | "execution_count": null,
634 | "metadata": {},
635 | "outputs": [],
636 | "source": [
637 | "! curl -X POST -H \"Content-Type:application/json; format=pandas-split\" --data '{\"columns\":[\"alcohol\", \"chlorides\", \"citric acid\", \"density\", \"fixed acidity\", \"free sulfur dioxide\", \"pH\", \"residual sugar\", \"sulphates\", \"total sulfur dioxide\", \"volatile acidity\"],\"data\":[[12.8, 2, 10, 0.98, 1, 45, 2, 1.2, 44, 4, 0.66]]}' http://mlflow-image:2349/invocations"
638 | ]
639 | }
640 | ],
641 | "metadata": {
642 | "kernelspec": {
643 | "display_name": "Python 3",
644 | "language": "python",
645 | "name": "python3"
646 | },
647 | "language_info": {
648 | "codemirror_mode": {
649 | "name": "ipython",
650 | "version": 3
651 | },
652 | "file_extension": ".py",
653 | "mimetype": "text/x-python",
654 | "name": "python",
655 | "nbconvert_exporter": "python",
656 | "pygments_lexer": "ipython3",
657 | "version": "3.7.3"
658 | }
659 | },
660 | "nbformat": 4,
661 | "nbformat_minor": 2
662 | }
663 |
--------------------------------------------------------------------------------
/Storage/notebooks/mlflow_project/MLproject:
--------------------------------------------------------------------------------
1 | name: tutorial
2 |
3 | conda_env: conda.yaml
4 |
5 | entry_points:
6 | main:
7 | parameters:
8 | n_estimators: {type: int, default: 100}
9 | max_depth: {type: int, default: 10}
10 | command: "python train.py {n_estimators} {max_depth}"
11 |
--------------------------------------------------------------------------------
/Storage/notebooks/mlflow_project/conda.yaml:
--------------------------------------------------------------------------------
1 | name: tutorial
2 | channels:
3 | - defaults
4 | dependencies:
5 | - python=3.7.3
6 | - scikit-learn=0.19.1
7 | - pip:
8 | - mlflow>=1.0
9 |
--------------------------------------------------------------------------------
/Storage/notebooks/mlflow_project/ml_log_processed.csv:
--------------------------------------------------------------------------------
1 | sma1,sma2,fwd_returns
2 | 1.135649,1.1381450000000002,-0.0004381621725833007
3 | 1.296327,1.2829235,-9.957412910943653e-05
4 | 109.1678,108.4436,-0.0029414190285069752
5 | 1.136307,1.1387795,0.002603824202414451
6 | 1.298984,1.2861355,0.0033398956665620094
7 | 109.149,108.62755,-0.0028673835125448965
8 | 1.1371200000000001,1.139223,-0.00013116474291696711
9 | 1.300852,1.288275,-0.004138067934554401
10 | 109.1157,108.68805,0.002101421224354416
11 | 1.137795,1.139359,-0.0008832917923828898
12 | 1.3028389999999999,1.2898535,-0.0010886481596479314
13 | 109.1113,108.7586,0.006511781910490511
14 | 1.1384459999999998,1.139255,-0.0020394944154615846
15 | 1.303536,1.290945,-0.00802793660539558
16 | 109.1747,108.8254,0.003161723047681697
17 | 1.1385459999999998,1.138877,-0.003534746647253284
18 | 1.3033709999999998,1.2920185,4.642202260773942e-05
19 | 109.1981,108.9047,-0.002004008016032066
20 | 1.138899,1.1380774999999999,-0.0031775931272444158
21 | 1.301796,1.292649,-0.005500754322850354
22 | 109.2166,108.99645,0.0004289886820008082
23 | 1.139107,1.1373925,-0.00030905895961930163
24 | 1.300446,1.293709,0.00522778192681117
25 | 109.2358,109.0716,0.0003923107100822776
26 | 1.137877,1.1364755,-0.004752146415574421
27 | 1.297523,1.2938395,-0.005935797424467593
28 | 109.2972,109.1611,0.0011491108071135514
29 | 1.136342,1.1358535,-0.0008520155492836023
30 | 1.295293,1.2946625,-0.0009575784941882048
31 | 109.4189,109.2715,0.0052106106981488765
32 | 1.134881,1.135265,0.00021318552470295238
33 | 1.293202,1.2947645,0.0009818742889204923
34 | 109.5809,109.37435,0.00074310363577057
35 | 1.133014,1.1346604999999998,-0.0009680112253779294
36 | 1.2899559999999999,1.29447,-0.005597421604957686
37 | 109.7769,109.46295,0.0002535542877839525
38 | 1.1310120000000001,1.134066,-0.0013334163014587785
39 | 1.2873670000000002,1.2941095,0.0009003155018671993
40 | 109.9298,109.52275,-0.0018378026037045858
41 | 1.129664,1.1337294999999998,0.004922425073213388
42 | 1.28599,1.2944145,0.008369313559852376
43 | 110.0324,109.57185,0.001859326107659598
44 | 1.128409,1.1334275,-0.0012400793650791941
45 | 1.285703,1.2946195,0.00034130222311867797
46 | 110.0998,109.63725,-5.43183052689189e-05
47 | 1.128051,1.1332985,0.004381141580048986
48 | 1.286571,1.294971,0.00900265194398342
49 | 110.199,109.69855,0.000887247179821582
50 | 1.128008,1.1334535,-0.0004061810154526446
51 | 1.288291,1.2950435,0.0010835901416352822
52 | 110.2951,109.75585,0.00014472827266809318
53 | 1.127958,1.1335324999999998,-0.00037101162503083707
54 | 1.288759,1.2946024999999999,-0.00445249646870971
55 | 110.3858,109.8108,-9.948628898048817e-05
56 | 1.12857,1.1332235000000002,0.0010957741998196546
57 | 1.2908190000000002,1.294171,0.006361617470158176
58 | 110.4662,109.8817,0.00020803748292741453
59 | 1.129445,1.1328935,0.0014741450841231352
60 | 1.29365,1.2944715,0.00496517481553016
61 | 110.4736,109.94625,-0.0014288298064748517
62 | 1.130467,1.132674,0.00150723206966763
63 | 1.2975340000000002,1.295368,0.008989226652027726
64 | 110.4661,110.02349999999998,-0.0006067631450255018
65 | 1.13157,1.132292,-0.00024642681123687726
66 | 1.302339,1.2961475,0.001526417603675334
67 | 110.4865,110.1317,0.0027819310407322106
68 | 1.132759,1.1318855,-0.0005634001197225569
69 | 1.30621,1.2967885,-0.0061793598816943796
70 | 110.5934,110.2616,0.005982179971444568
71 | 1.132955,1.1313095,-0.003875559313673671
72 | 1.30896,1.297475,-0.0003871878772234716
73 | 110.7114,110.3719,0.002838561316517607
74 | 1.133095,1.130752,-0.0017331022530331364
75 | 1.310975,1.298339,-0.005248048121031523
76 | 110.8383,110.46905,0.0007434611250447531
77 | 1.132699,1.130375,-0.00037202380952383596
78 | 1.312094,1.2993325,0.002023255991510098
79 | 110.9447,110.57185,-0.0009577258039973335
80 | 1.131259,1.1296335,-0.009658496818898632
81 | 1.312515,1.300403,-0.004244068209871887
82 | 111.0363,110.6657,-0.0011826261467889898
83 | 1.129951,1.1289545,0.0008052682437993397
84 | 1.312735,1.300747,-0.005976202318552337
85 | 111.056,110.7209,-0.006548024828675003
86 | 1.128883,1.1287265,0.0032542421370715946
87 | 1.311833,1.301326,-0.002286303731986683
88 | 111.08599999999998,110.7761,0.0011376564277587597
89 | 1.127882,1.1286635,0.0020852269689355385
90 | 1.31073,1.30219,0.003448887791554389
91 | 111.155,110.8143,0.0020923520923521455
92 | 1.127034,1.1287505,0.002881229324511958
93 | 1.309003,1.3032685000000002,0.0042674581327755146
94 | 111.2203,110.8432,-0.0009359925120600021
95 | 1.12638,1.128975,0.0014719443853301595
96 | 1.30854,1.3054395,0.011224341354730605
97 | 111.2689,110.8777,0.0012611703660998508
98 | 1.125846,1.1293024999999999,0.0004958297179082916
99 | 1.308853,1.3075315,-0.0003255725913305918
100 | 111.2749,110.93415,0.0021232950660381444
101 | 1.125846,1.1293024999999999,0.0
102 | 1.308853,1.3075315,0.0
103 | 111.2589,110.98515,0.0008618832148243882
104 | 1.125948,1.1294515,0.0017345439742295632
105 | 1.309031,1.3089955,-0.001408749394086417
106 | 111.2164,111.02735,-0.0016325798349481335
107 | 1.12639,1.1297424999999999,0.001272152234217172
108 | 1.310467,1.3107209999999998,0.004300471762510805
109 | 111.17069999999998,111.0577,-0.0012488993512911328
110 | 1.126894,1.1297965,0.00017646332209864113
111 | 1.3106909999999998,1.3113925,-0.007151865753362685
112 | 111.0759,111.0561,-0.005604584424113157
113 | 1.128558,1.1299085,0.0006175126590093338
114 | 1.31005,1.3112825,-0.01081648487061282
115 | 111.0306,111.0433,-0.00212600419772746
116 | 1.12944,1.1296955,-0.006100786401946445
117 | 1.31096,1.3118475,0.00592107286764465
118 | 110.9168,111.0014,-0.005067950426560053
119 | 1.12944,1.1296955,0.0
120 | 1.31096,1.3118475,0.0
121 | 110.79799999999999,110.97650000000002,0.0016584352390149881
122 | 1.1301139999999998,1.1294985,0.0013837638376383854
123 | 1.3129570000000002,1.312395,0.006039108963872275
124 | 110.66799999999999,110.94415,-0.001964993995851594
125 | 1.130296,1.129089,-0.0022853700882261574
126 | 1.314481,1.3126055,-0.00019756238412205196
127 | 110.5468,110.90785,0.0020782440660662704
128 | 1.129943,1.1284885,-0.0018733242182644716
129 | 1.315539,1.312271,0.0006764048701151104
130 | 110.4321,110.8535,0.002737956629311533
131 | 1.129138,1.127759,-0.002543963423854434
132 | 1.313816,1.311178,-0.009987316487806375
133 | 110.2862,110.77255,-0.001959414715567487
134 | 1.12824,1.127043,-0.00032995353897458113
135 | 1.3115620000000001,1.3102075,-0.00440346139683323
136 | 110.21,110.7132,0.004680924550767518
137 | 1.1270870000000002,1.1265174999999998,-0.000526315789473597
138 | 1.309816,1.3094235,0.0024811601349996604
139 | 110.1755,110.6731,0.002515017731779645
140 | 1.1255879999999998,1.125989,-0.0018029114342071884
141 | 1.307543,1.309005,0.0003074558032283292
142 | 110.2469,110.6614,0.003934520908910555
143 | 1.12424,1.125567,0.0015289836282514901
144 | 1.307287,1.308989,0.008221914860919055
145 | 110.3378,110.6842,-0.00035955056179781675
146 | 1.122873,1.1257155,0.0004553164895990669
147 | 1.3078459999999998,1.308948,-0.004626171785687028
148 | 110.4972,110.70700000000001,0.0011329916374427373
149 | 1.12224,1.12584,0.0003747958701063592
150 | 1.306906,1.308933,-0.005581801336875913
151 | 110.6609,110.72945,0.00202090967881019
152 | 1.121492,1.125803,0.00036573508291493084
153 | 1.305603,1.30928,0.003287802023499653
154 | 110.8185,110.74325000000002,-0.002482946549421494
155 | 1.1214110000000002,1.1258535,0.00364709659009832
156 | 1.304333,1.309407,5.372176729268041e-05
157 | 110.9229,110.73485,-0.0027317493979367446
158 | 1.121286,1.1256145,-0.0022655993176547984
159 | 1.303117,1.309328,0.0010974000061392264
160 | 110.9836,110.70785,-0.0012254460263111389
161 | 1.121649,1.1253935,0.0017987853746284088
162 | 1.303269,1.3085425000000002,0.00040628281883603456
163 | 111.0723,110.67925,0.0005773880408501686
164 | 1.122085,1.1251625,0.000320000000000098
165 | 1.3039969999999999,1.3077795,1.5325200760196722e-05
166 | 111.1773,110.69365,0.0061131748836875666
167 | 1.1230200000000001,1.1250535,0.003909859955925077
168 | 1.304607,1.3072115,0.0015631465219989593
169 | 111.2849,110.7302,0.0027243561020200424
170 | 1.123977,1.1247825,-0.0015932587452202052
171 | 1.3048950000000001,1.306219,-0.00215744778517335
172 | 111.3446,110.79575,-0.0003843060148359667
173 | 1.124755,1.1244975,-7.092450086887414e-05
174 | 1.303975,1.305631,-0.001058055018860915
175 | 111.4159,110.87685,0.0006794996736612102
176 | 1.124957,1.123915,-0.0046547505053728955
177 | 1.303164,1.305505,-0.003822242689385247
178 | 111.459,110.9781,-0.0013938153909381912
179 | 1.125144,1.123692,0.00024050666737918114
180 | 1.303128,1.305017,0.00035441321498996814
181 | 111.4926,111.07675,0.0011631340198805518
182 | 1.125144,1.123692,0.0
183 | 1.302769,1.304186,0.0008009981669465649
184 | 111.4926,111.07675,0.0
185 | 1.12536,1.123426,0.0006233858758570854
186 | 1.3022170000000002,1.303275,-0.001431407858890843
187 | 111.5502,111.18435,-0.0003306612330983283
188 | 1.124731,1.123071,-0.0038803844784620622
189 | 1.301046,1.3020815,-0.0036684238108450895
190 | 111.6175,111.2702,-0.0018505274450206777
191 | 1.123839,1.1225625,-0.004628140523926905
192 | 1.2994219999999999,1.3013455,-0.0030940594059406523
193 | 111.7013,111.34245,0.0002597332807896535
194 | 1.122523,1.1220860000000001,-0.0019927113440928457
195 | 1.2975759999999998,1.3007865,-0.001707014276846719
196 | 111.7478,111.41005,-0.002766784262459576
197 | 1.121102,1.1215935,-0.0006205928910633096
198 | 1.295632,1.3001195,0.0008238768848127798
199 | 111.73200000000001,111.45465,0.0004938360284809651
200 | 1.121102,1.1215935,0.0
201 | 1.295632,1.3001195,0.0
202 | 111.7021,111.4935,0.001462828014502815
203 | 1.119578,1.121299,0.0030328938487154122
204 | 1.294254,1.2995745,0.0022055511548078055
205 | 111.6723,111.50845,-0.0003763744387990098
206 | 1.118542,1.1212594999999999,0.0027635215159886872
207 | 1.293228,1.2986015,0.0016582719876017116
208 | 111.6038,111.50985,-0.002788012442962229
209 | 1.117627,1.121191,0.0010110952040085497
210 | 1.2937370000000001,1.2984505,0.008022341873994199
211 | 111.5321,111.49555,-0.0016900699401284358
212 | 1.117074,1.1210155000000002,-0.001457009287316846
213 | 1.294082,1.298605,-0.0009055955057906129
214 | 111.4778,111.4852,0.0027375056280953824
215 | 1.116138,1.120641,-0.003186823023901164
216 | 1.2940399999999999,1.2984045,-0.002173862946774685
217 | 111.3988,111.4745,-0.002550424771449622
218 | 1.115486,1.1204230000000002,0.0031790505953983583
219 | 1.295101,1.298659,0.007059276366435663
220 | 111.2617,111.4396,-0.007094625011254019
221 | 1.115228,1.1199795,-0.00037598023418183857
222 | 1.296219,1.2986325,-0.00320294762913087
223 | 111.1105,111.4059,-0.0010155782448632023
224 | 1.1156510000000002,1.119745,0.001459710207225129
225 | 1.297213,1.2983175,-0.004018466540897769
226 | 110.9637,111.35575,-0.002405373513660658
227 | 1.116203,1.1193629999999999,-0.0008316268588647313
228 | 1.298229,1.2979025,-0.0015245545682737172
229 | 110.7682,111.2501,-0.003930667394568044
230 | 1.11723,1.1191659999999999,0.003633564831388325
231 | 1.299376,1.297504,0.0018276319442300792
232 | 110.5566,111.12935,1.8269344952637212e-05
233 | 1.118,1.118789,0.0007133812487738478
234 | 1.2997379999999998,1.2969959999999998,-0.0038564269934492357
235 | 110.3039,110.9881,-0.004137931034482678
236 | 1.118252,1.1183969999999999,-0.0018712908342393142
237 | 1.299515,1.2963715,-0.002866812969431032
238 | 110.0949,110.84935,0.0011557300361393352
239 | 1.1181590000000001,1.117893,-0.0020712066564297915
240 | 1.2974780000000001,1.2956075,-0.0060213420540758955
241 | 109.90529999999998,110.71869999999998,5.497123172215623e-05
242 | 1.118113,1.1175935,-0.0010377527285739818
243 | 1.2951739999999998,1.2946280000000001,-0.0030016216553360175
244 | 109.7035,110.59065,0.0016673690382396078
245 | 1.118312,1.117225,-0.0009940535893393498
246 | 1.2924200000000001,1.29323,-0.005731981013301524
247 | 109.5461,110.47245,0.0014633789419771226
248 | 1.118312,1.117225,0.0
249 | 1.2924200000000001,1.29323,0.0
250 | 109.5253,110.3935,0.005278731643164836
251 | 1.118112,1.116799,-0.0004033956953197748
252 | 1.2887499999999998,1.2919254999999998,7.86497412419429e-06
253 | 109.4892,110.29985,-0.0024074713374638845
254 | 1.117866,1.116547,-0.0007891810453060089
255 | 1.2852059999999998,1.2907125000000002,-0.0023044192942028863
256 | 109.5009,110.2323,0.00193973171597972
257 | 1.117525,1.116588,0.0006103033566682292
258 | 1.281576,1.2893945,-0.004808677692465402
259 | 109.5778,110.17299999999999,0.0019996000799840097
260 | 1.116861,1.116532,-0.0037313432835818228
261 | 1.277954,1.2880915000000002,-0.0015050220208485676
262 | 109.5763,110.06645,-0.007093485241559616
263 | 1.116456,1.116843,0.005987107461826513
264 | 1.274517,1.2869465,0.0033477715899536964
265 | 109.6017,109.9528,-0.0016809793531882278
266 | 1.1161059999999998,1.117053,0.001208194241835825
267 | 1.271779,1.2857584999999998,0.0015655144058950299
268 | 109.6151,109.855,5.490684139242141e-05
269 | 1.115688,1.11697,-0.0024849828375288485
270 | 1.26925,1.2843825,-0.0012788733283861653
271 | 109.6207,109.763,-0.0006588459215608466
272 | 1.1151520000000001,1.1166555,-0.0031363693388534175
273 | 1.267103,1.2822905,-0.0031222334640191862
274 | 109.6021,109.6528,-0.000549400238073372
275 | 1.11465,1.1163815000000001,-0.000737118406385795
276 | 1.265034,1.280104,-0.0024342475637701977
277 | 109.6,109.57305,0.002977553825011414
278 | 1.114349,1.1163305000000001,0.0008096291898311847
279 | 1.263478,1.277949,-0.001748668627295058
280 | 109.4205,109.4729,-0.010924868691482015
281 | 1.114444,1.116278,0.0031549994606836496
282 | 1.262437,1.2755935,0.004108607373198625
283 | 109.228,109.3586,-0.003647983450161152
284 | 1.115289,1.1165775,0.0059317401861958
285 | 1.2620069999999999,1.2736065,0.0025216881036587147
286 | 109.0105,109.2557,-0.00034296095806596316
287 | 1.115999,1.116762,-0.0005968022090588887
288 | 1.262575,1.2720755,0.0030690375245208124
289 | 108.7679,109.17285,-0.00028744424972415583
290 | 1.116945,1.116903,-0.0016043066721331822
291 | 1.263211,1.2705825000000002,-0.0009620540642842901
292 | 108.62449999999998,109.1004,0.0019477628552349024
293 | 1.1177219999999999,1.117089,0.004427859808245227
294 | 1.263624,1.2690705,0.0015707632804484728
295 | 108.485,109.04335,-0.0013422694536501245
296 | 1.1177219999999999,1.117089,0.0
297 | 1.263624,1.2690705,0.0
298 | 108.3715,108.9933,0.002465679776791241
299 | 1.118751,1.1174285,0.00343957196437783
300 | 1.263482,1.2676305,-0.0028134826501904353
301 | 108.2819,108.9513,0.0015441944760370419
302 | 1.12017,1.1179290000000002,0.0009920195569568246
303 | 1.263677,1.2664635,0.0013830493472009042
304 | 108.2019,108.902,0.00033236700703520583
305 | 1.1217510000000002,1.1184515,-0.0016635254351268225
306 | 1.264376,1.2657395,0.0008602523933167738
307 | 108.0764,108.8382,-0.0011998154130133942
308 | 1.123275,1.1189625,-0.0012319964546864126
309 | 1.265192,1.265113,-0.0014982336613676983
310 | 108.0652,108.74285,-0.0004897431158750187
311 | 1.124048,1.1191985,-0.005865857338089864
312 | 1.265406,1.264442,-0.006491557815930249
313 | 108.0926,108.6603,-8.320467425382283e-05
314 | 1.124484,1.119464,0.00012497210444095685
315 | 1.264624,1.2635305,-0.0038154589679184614
316 | 108.1557,108.5831,0.0029586345901366773
317 | 1.1240350000000001,1.1196620000000002,-0.0019903783503958383
318 | 1.263262,1.2626344999999999,-0.00209058121349448
319 | 108.1803,108.4741,-0.003834878961632593
320 | 1.12371,1.1198545,0.0005097660442154872
321 | 1.2618779999999998,1.2622265,0.0029265484319775936
322 | 108.1681,108.3963,-0.001462123595713738
323 | 1.1239560000000002,1.1204505,0.003495034548103515
324 | 1.261516,1.2623635,0.0071754313231495725
325 | 108.1012,108.2931,-0.006413107947805363
326 | 1.1242729999999999,1.1209975,0.005050595026010463
327 | 1.261055,1.2623395,0.0007915901462858255
328 | 107.9913,108.1814,-0.0015296795134874808
329 | 1.1242729999999999,1.1209975,0.0
330 | 1.261973,1.2627275,0.008083652355491022
331 | 107.9913,108.1814,0.0
332 | 1.1250440000000002,1.1218975,0.007453625333463387
333 | 1.262348,1.2630125,-0.0028874068262062513
334 | 107.8848,108.08335,0.0018776623570735396
335 | 1.125473,1.1228215,-0.002023365472587968
336 | 1.2622520000000002,1.263314,-0.002848554476637122
337 | 107.7278,107.96485,-0.004373001146863786
338 | 1.126124,1.1239375,0.00029971262847983127
339 | 1.262254,1.2637230000000002,-0.0007260101010102549
340 | 107.6162,107.8463,0.0030342760816632897
341 | 1.126917,1.125096,2.6437308329407827e-05
342 | 1.263068,1.264237,-7.89714755030646e-05
343 | 107.5645,107.81485,0.005097848819838768
344 | 1.1284,1.126224,0.00025555389102827064
345 | 1.264398,1.2645110000000002,0.0002843197649624596
346 | 107.5052,107.7989,-0.0007895959126800189
347 | 1.1284,1.126224,0.0
348 | 1.264398,1.2645110000000002,0.0
349 | 107.4486,107.80215,0.0032259563984569084
350 | 1.129173,1.1268285,-0.006131726398139192
351 | 1.265657,1.2644594999999998,-0.002629210289450068
352 | 107.4528,107.81655,0.001779227518718951
353 | 1.130109,1.1270719999999999,-0.0005318583129453325
354 | 1.26607,1.263974,-0.0037998432564656515
355 | 107.4392,107.80365,-0.0031081181084882115
356 | 1.130925,1.1273175,-0.0005587484035760593
357 | 1.265314,1.263415,-0.002137618105386818
358 | 107.4716,107.7864,-0.0021527726226707733
359 | 1.131394,1.127675,0.00039045514646507407
360 | 1.264575,1.262815,0.0009317363743508444
361 | 107.5376,107.76444999999998,0.0015994643654217988
362 | 1.130639,1.127456,-0.005827929710022772
363 | 1.261939,1.2619559999999999,-0.00696162750916951
364 | 107.5911,107.73795,0.0007056114680432657
365 | 1.130639,1.127456,0.0
366 | 1.261939,1.2619559999999999,0.0
367 | 107.7601,107.74395,0.006364581013879711
368 | 1.129038,1.127041,-4.461258431764925e-05
369 | 1.259858,1.261103,0.0014982293652956447
370 | 107.8778,107.74700000000001,-0.0017424172582279907
371 | 1.1275309999999998,1.1265020000000001,-0.0012135164315478786
372 | 1.257536,1.259894,-0.004823961408308874
373 | 107.98100000000001,107.77275,0.003703327453569827
374 | 1.126075,1.1260995,0.0007593692768124161
375 | 1.2553459999999999,1.2588,0.00032154857795130276
376 | 108.0593,107.78225,-0.0030731859921606652
377 | 1.12505,1.1259834999999998,0.003874308159257156
378 | 1.253745,1.2584065,0.004652919526189958
379 | 108.0541,107.75135,-0.0045040056115479565
380 | 1.123924,1.126162,-0.0006402617959342383
381 | 1.2522950000000002,1.2583465,0.001495796571666208
382 | 108.0241,107.73845,-0.0005191915445947881
383 | 1.123924,1.126162,0.0
384 | 1.2522950000000002,1.2583465,0.0
385 | 108.0375,107.73835,0.0009090571778411771
386 | 1.123644,1.1264085,0.0013347333202826661
387 | 1.25108,1.2583685,-0.0007827225967221851
388 | 108.06390000000002,107.76775,-0.0009453022186799442
389 | 1.1229120000000001,1.1265105,-0.004549816940959017
390 | 1.249205,1.2576375,-0.009112272792671905
391 | 108.0753,107.80645,0.0002040816326531747
392 | 1.122221,1.126573,-0.00019639350116062992
393 | 1.247458,1.256386,-0.0011374086443056708
394 | 108.0912,107.84115,0.0011222199551110812
395 | 1.12154,1.126467,0.0004821514669905991
396 | 1.2460460000000002,1.2553105,0.0036503129416516344
397 | 107.9654,107.86275,-0.0067720926785432844
398 | 1.1215,1.1260695,-0.00014279084710688839
399 | 1.2459959999999999,1.2539675,0.003918665561608137
400 | 107.8589,107.86835,3.7309256426532045e-05
401 | 1.1215,1.1260695,0.0
402 | 1.246035,1.2529465,0.0022121765894007606
403 | 107.8589,107.86835,0.0
404 | 1.1214920000000002,1.1252650000000002,0.0002409939662251226
405 | 1.246194,1.251865,-0.003862763915546963
406 | 107.7609,107.87095,0.0045329055364871085
407 | 1.121017,1.124274,-0.005380901814158068
408 | 1.2459360000000002,1.250641,-0.0030267267194938885
409 | 107.7088,107.88405,0.00116060983082944
410 | 1.120267,1.123171,-0.0017046474071416018
411 | 1.245191,1.249468,0.0007408600418747913
412 | 107.716,107.88505,0.0009737820768451755
413 | 1.118827,1.1219385000000002,-0.0023007099847219825
414 | 1.244359,1.2483270000000002,0.000804686494142004
415 | 107.74,107.88205,0.0010376903976576646
416 | 1.117565,1.1207445,0.0009548345253751478
417 | 1.243017,1.2470485,-0.004888560126073416
418 | 107.806,107.92175,0.0047943430454260305
419 | 1.116163,1.1199035,8.999280057597758e-05
420 | 1.241175,1.24519,-0.01325102614653706
421 | 107.8683,107.9661,-0.0012803743482988628
422 | 1.115464,1.1191879999999998,0.0017187078196707617
423 | 1.238545,1.2430014999999999,-0.0076070223707049855
424 | 107.9315,108.0034,0.0002859171946911765
425 |
--------------------------------------------------------------------------------
/Storage/notebooks/mlflow_project/train.py:
--------------------------------------------------------------------------------
1 | # The data set used in this example is from http://archive.ics.uci.edu/ml/datasets/Wine+Quality
2 | # P. Cortez, A. Cerdeira, F. Almeida, T. Matos and J. Reis.
3 | # Modeling wine preferences by data mining from physicochemical properties. In Decision Support Systems, Elsevier, 47(4):547-553, 2009.
4 |
5 | import os
6 | import warnings
7 | import sys
8 |
9 | import pandas as pd
10 | import numpy as np
11 | from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score
12 | from sklearn.model_selection import train_test_split
13 | from sklearn.linear_model import ElasticNet
14 |
15 | import mlflow
16 | import mlflow.sklearn
17 |
18 |
19 |
20 | def eval_metrics(actual, pred):
21 | rmse = np.sqrt(mean_squared_error(actual, pred))
22 | mae = mean_absolute_error(actual, pred)
23 | r2 = r2_score(actual, pred)
24 | return rmse, mae, r2
25 |
26 |
27 |
28 | if __name__ == "__main__":
29 | warnings.filterwarnings("ignore")
30 | np.random.seed(40)
31 |
32 | # Read the wine-quality csv file (make sure you're running this from the root of MLflow!)
33 | log_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "ml_log_processed.csv")
34 | data = pd.read_csv(log_path)
35 |
36 | mlflow.tracking.set_tracking_uri('http://mlflow-image:5500')
37 | # Split the data into training and test sets. (0.75, 0.25) split.
38 | train, test = train_test_split(data)
39 |
40 | # The predicted column is "fwd_returns"
41 | train_x = train.drop(["fwd_returns"], axis=1)
42 | test_x = test.drop(["fwd_returns"], axis=1)
43 | train_y = train[["fwd_returns"]]
44 | test_y = test[["fwd_returns"]]
45 |
46 | n_estimators=float(sys.argv[1]) if len(sys.argv) > 1 else 200
47 | max_depth=float(sys.argv[2]) if len(sys.argv) > 2 else 10
48 |
49 | lr = RandomForestRegressor(n_estimators=n_estimators,max_depth=max_depth)
50 | lr.fit(train_x, train_y)
51 | predicted_qualities = lr.predict(test_x)
52 | (rmse, mae, r2) = eval_metrics(test_y, predicted_qualities)
53 |
54 | print("RandomForest Model (n_estimators=%f, max_depth=%f):" % (n_estimators, max_depth))
55 | print(" RMSE: %s" % rmse)
56 | print(" MAE: %s" % mae)
57 | print(" R2: %s" % r2)
58 |
59 | mlflow.log_param("n_estimators", n_estimators)
60 | mlflow.log_param("max_depth", max_depth)
61 | mlflow.log_metric("rmse", rmse)
62 | mlflow.log_metric("r2", r2)
63 | mlflow.log_metric("mae", mae)
64 | mlflow.sklearn.log_model(lr, "model")
65 |
--------------------------------------------------------------------------------
/Storage/pgadmin/pgadmin4.db:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/pgadmin/pgadmin4.db
--------------------------------------------------------------------------------
/Storage/pgadmin/sessions/124afc37-5bde-4a12-a5fb-c25c49bc48c5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/pgadmin/sessions/124afc37-5bde-4a12-a5fb-c25c49bc48c5
--------------------------------------------------------------------------------
/Storage/pgadmin/sessions/3e137754-12cf-4207-ac2d-e5523bb2e476:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/pgadmin/sessions/3e137754-12cf-4207-ac2d-e5523bb2e476
--------------------------------------------------------------------------------
/Storage/pgadmin/sessions/72632a26-21ef-4487-8f34-8201b729213c:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/pgadmin/sessions/72632a26-21ef-4487-8f34-8201b729213c
--------------------------------------------------------------------------------
/Storage/pgadmin/sessions/e4af4537-fc0b-4bb0-8ef0-430f8678aaef:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/pgadmin/sessions/e4af4537-fc0b-4bb0-8ef0-430f8678aaef
--------------------------------------------------------------------------------
/Storage/pgadmin/sessions/f618e8d8-213d-4139-a7cf-cff78baec139:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/pgadmin/sessions/f618e8d8-213d-4139-a7cf-cff78baec139
--------------------------------------------------------------------------------
/Storage/postgress_db/scripts/01_create_user.sh:
--------------------------------------------------------------------------------
1 | #! /bin/bash
2 | set -e
3 |
4 | POSTGRES="psql -U postgres"
5 |
6 | # create a shared role to read & write general datasets into postgres
7 | echo "Creating database role: shared"
8 | $POSTGRES <<-EOSQL
9 | CREATE USER shared WITH
10 | LOGIN
11 | NOSUPERUSER
12 | NOCREATEDB
13 | NOCREATEROLE
14 | NOINHERIT
15 | NOREPLICATION
16 | PASSWORD '$SHARED_PASSWORD';
17 | EOSQL
18 |
--------------------------------------------------------------------------------
/Storage/q_pack/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/__init__.py
--------------------------------------------------------------------------------
/Storage/q_pack/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from __future__ import (absolute_import, division, print_function,
3 | unicode_literals)
4 |
5 |
6 | from . import feeds as feeds
7 | from . import stores as stores
8 | from . import brokers as brokers
9 | from . import sizers as sizers
10 |
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/brokers/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from __future__ import (absolute_import, division, print_function,
3 | unicode_literals)
4 |
5 | try:
6 | from .oandav20broker import OandaV20Broker
7 | except ImportError as e:
8 | pass # The user may not have something installed
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/brokers/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/brokers/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/brokers/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/brokers/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/brokers/__pycache__/oandav20broker.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/brokers/__pycache__/oandav20broker.cpython-36.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/brokers/__pycache__/oandav20broker.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/brokers/__pycache__/oandav20broker.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/brokers/oandav20broker.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from __future__ import (absolute_import, division, print_function,
3 | unicode_literals)
4 |
5 | import collections
6 | from copy import copy
7 | from datetime import date, datetime, timedelta
8 | import threading
9 |
10 | from backtrader.feed import DataBase
11 | from backtrader import (TimeFrame, num2date, date2num, BrokerBase,
12 | Order, BuyOrder, SellOrder, OrderBase, OrderData)
13 | from backtrader.utils.py3 import bytes, with_metaclass, MAXFLOAT
14 | from backtrader.metabase import MetaParams
15 | from backtrader.comminfo import CommInfoBase
16 | from backtrader.position import Position
17 | from backtrader.utils import AutoDict, AutoOrderedDict
18 | from backtrader.comminfo import CommInfoBase
19 |
20 | from ..stores import oandav20store
21 |
22 | class OandaV20CommInfo(CommInfoBase):
23 | def getvaluesize(self, size, price):
24 | # In real life the margin approaches the price
25 | return abs(size) * price
26 |
27 | def getoperationcost(self, size, price):
28 | '''Returns the needed amount of cash an operation would cost'''
29 | # Same reasoning as above
30 | return abs(size) * price
31 |
32 |
33 | class MetaOandaV20Broker(BrokerBase.__class__):
34 | def __init__(cls, name, bases, dct):
35 | '''Class has already been created ... register'''
36 | # Initialize the class
37 | super(MetaOandaV20Broker, cls).__init__(name, bases, dct)
38 | oandav20store.OandaV20Store.BrokerCls = cls
39 |
40 |
41 | class OandaV20Broker(with_metaclass(MetaOandaV20Broker, BrokerBase)):
42 | '''Broker implementation for Oanda v20.
43 |
44 | This class maps the orders/positions from Oanda to the
45 | internal API of ``backtrader``.
46 |
47 | Params:
48 |
49 | - ``use_positions`` (default:``True``): When connecting to the broker
50 | provider use the existing positions to kickstart the broker.
51 |
52 | Set to ``False`` during instantiation to disregard any existing
53 | position
54 | '''
55 | params = (
56 | ('use_positions', True),
57 | )
58 |
59 | def __init__(self, **kwargs):
60 | super(OandaV20Broker, self).__init__()
61 | self.o = oandav20store.OandaV20Store(**kwargs)
62 |
63 | self.orders = collections.OrderedDict() # orders by order id
64 | self.notifs = collections.deque() # holds orders which are notified
65 |
66 | self.opending = collections.defaultdict(list) # pending transmission
67 | self.brackets = dict() # confirmed brackets
68 |
69 | self.startingcash = self.cash = 0.0
70 | self.startingvalue = self.value = 0.0
71 | self.positions = collections.defaultdict(Position)
72 | self.addcommissioninfo(self, OandaV20CommInfo(mult=1.0, stocklike=False))
73 |
74 | def start(self):
75 | super(OandaV20Broker, self).start()
76 | self.addcommissioninfo(self, OandaV20CommInfo(mult=1.0, stocklike=False))
77 | self.o.start(broker=self)
78 | self.startingcash = self.cash = cash = self.o.get_cash()
79 | self.startingvalue = self.value = self.o.get_value()
80 |
81 | if self.p.use_positions:
82 | for p in self.o.get_positions():
83 | print('position for instrument:', p['instrument'])
84 | size = float(p['long']['units']) + float(p['short']['units'])
85 | price = float(p['long']['averagePrice']) if size > 0 else float(p['short']['averagePrice'])
86 | self.positions[p['instrument']] = Position(size, price)
87 |
88 | def data_started(self, data):
89 | pos = self.getposition(data)
90 |
91 | if pos.size == 0:
92 | return
93 |
94 | if pos.size < 0:
95 | order = SellOrder(data=data,
96 | size=pos.size, price=pos.price,
97 | exectype=Order.Market,
98 | simulated=True)
99 | else:
100 | order = BuyOrder(data=data,
101 | size=pos.size, price=pos.price,
102 | exectype=Order.Market,
103 | simulated=True)
104 |
105 | order.addcomminfo(self.getcommissioninfo(data))
106 | order.execute(0, pos.size, pos.price,
107 | 0, 0.0, 0.0,
108 | pos.size, 0.0, 0.0,
109 | 0.0, 0.0,
110 | pos.size, pos.price)
111 |
112 | order.completed()
113 | self.notify(order)
114 |
115 | def stop(self):
116 | super(OandaV20Broker, self).stop()
117 | self.o.stop()
118 |
119 | def getcash(self):
120 | # This call cannot block if no answer is available from oanda
121 | self.cash = cash = self.o.get_cash()
122 | return cash
123 |
124 | def getvalue(self, datas=None):
125 | self.value = self.o.get_value()
126 | return self.value
127 |
128 | def getposition(self, data, clone=True):
129 | # return self.o.getposition(data._dataname, clone=clone)
130 | pos = self.positions[data._dataname]
131 | if clone:
132 | pos = pos.clone()
133 |
134 | return pos
135 |
136 | def orderstatus(self, order):
137 | o = self.orders[order.ref]
138 | return o.status
139 |
140 | def _submit(self, oref):
141 | order = self.orders[oref]
142 | order.submit()
143 | self.notify(order)
144 |
145 | def _reject(self, oref):
146 | order = self.orders[oref]
147 | order.reject()
148 | self.notify(order)
149 |
150 | def _accept(self, oref):
151 | order = self.orders[oref]
152 | order.accept()
153 | self.notify(order)
154 |
155 | def _cancel(self, oref):
156 | order = self.orders[oref]
157 | order.cancel()
158 | self.notify(order)
159 |
160 | def _expire(self, oref):
161 | order = self.orders[oref]
162 | order.expire()
163 | self.notify(order)
164 |
165 | def _bracketize(self, order):
166 | pref = getattr(order.parent, 'ref', order.ref) # parent ref or self
167 | br = self.brackets.pop(pref, None) # to avoid recursion
168 | if br is None:
169 | return
170 |
171 | if len(br) == 3: # all 3 orders in place, parent was filled
172 | br = br[1:] # discard index 0, parent
173 | for o in br:
174 | o.activate() # simulate activate for children
175 | self.brackets[pref] = br # not done - reinsert children
176 |
177 | elif len(br) == 2: # filling a children
178 | oidx = br.index(order) # find index to filled (0 or 1)
179 | self._cancel(br[1 - oidx].ref) # cancel remaining (1 - 0 -> 1)
180 |
181 | def _fill_external(self, data, size, price):
182 | if size == 0:
183 | return
184 |
185 | pos = self.getposition(data, clone=False)
186 | pos.update(size, price)
187 |
188 | if size < 0:
189 | order = SellOrder(data=data,
190 | size=size, price=price,
191 | exectype=Order.Market,
192 | simulated=True)
193 | else:
194 | order = BuyOrder(data=data,
195 | size=size, price=price,
196 | exectype=Order.Market,
197 | simulated=True)
198 |
199 | order.addcomminfo(self.getcommissioninfo(data))
200 | order.execute(0, size, price,
201 | 0, 0.0, 0.0,
202 | size, 0.0, 0.0,
203 | 0.0, 0.0,
204 | size, price)
205 |
206 | order.completed()
207 | self.notify(order)
208 |
209 | def _fill(self, oref, size, price, reason, **kwargs):
210 | order = self.orders[oref]
211 | if not order.alive(): # can be a bracket
212 | pref = getattr(order.parent, 'ref', order.ref)
213 | if pref not in self.brackets:
214 | msg = ('Order fill received for {}, with price {} and size {} '
215 | 'but order is no longer alive and is not a bracket. '
216 | 'Unknown situation {}')
217 | msg = msg.format(order.ref, price, size, reason)
218 | self.o.put_notification(msg)
219 | return
220 |
221 | # [main, stopside, takeside], neg idx to array are -3, -2, -1
222 | if reason == 'STOP_LOSS_ORDER':
223 | order = self.brackets[pref][-2]
224 | elif reason == 'TAKE_PROFIT_ORDER':
225 | order = self.brackets[pref][-1]
226 | else:
227 | msg = ('Order fill received for {}, with price {} and size {} '
228 | 'but order is no longer alive and is a bracket. '
229 | 'Unknown situation {}')
230 | msg = msg.format(order.ref, price, size, reason)
231 | self.o.put_notification(msg)
232 | return
233 |
234 | data = order.data
235 | pos = self.getposition(data, clone=False)
236 | psize, pprice, opened, closed = pos.update(size, price)
237 | comminfo = self.getcommissioninfo(data)
238 |
239 | closedvalue = closedcomm = 0.0
240 | openedvalue = openedcomm = 0.0
241 | margin = pnl = 0.0
242 |
243 | order.execute(data.datetime[0], size, price,
244 | closed, closedvalue, closedcomm,
245 | opened, openedvalue, openedcomm,
246 | margin, pnl,
247 | psize, pprice)
248 |
249 | if order.executed.remsize:
250 | order.partial()
251 | self.notify(order)
252 | else:
253 | order.completed()
254 | self.notify(order)
255 | self._bracketize(order)
256 |
257 | def _transmit(self, order):
258 | oref = order.ref
259 | pref = getattr(order.parent, 'ref', oref) # parent ref or self
260 |
261 | if order.transmit:
262 | if oref != pref: # children order
263 | # Put parent in orders dict, but add stopside and takeside
264 | # to order creation. Return the takeside order, to have 3s
265 | takeside = order # alias for clarity
266 | parent, stopside = self.opending.pop(pref)
267 | for o in parent, stopside, takeside:
268 | self.orders[o.ref] = o # write them down
269 |
270 | self.brackets[pref] = [parent, stopside, takeside]
271 | self.o.order_create(parent, stopside, takeside)
272 | return takeside # parent was already returned
273 |
274 | else: # Parent order, which is being transmitted
275 | self.orders[order.ref] = order
276 | return self.o.order_create(order)
277 |
278 | # Not transmitting
279 | self.opending[pref].append(order)
280 | return order
281 |
282 | def buy(self, owner, data,
283 | size, price=None, plimit=None,
284 | exectype=None, valid=None, tradeid=0, oco=None,
285 | trailamount=None, trailpercent=None,
286 | parent=None, transmit=True,
287 | **kwargs):
288 |
289 | order = BuyOrder(owner=owner, data=data,
290 | size=size, price=price, pricelimit=plimit,
291 | exectype=exectype, valid=valid, tradeid=tradeid,
292 | trailamount=trailamount, trailpercent=trailpercent,
293 | parent=parent, transmit=transmit)
294 |
295 | order.addinfo(**kwargs)
296 | order.addcomminfo(self.getcommissioninfo(data))
297 | return self._transmit(order)
298 |
299 | def sell(self, owner, data,
300 | size, price=None, plimit=None,
301 | exectype=None, valid=None, tradeid=0, oco=None,
302 | trailamount=None, trailpercent=None,
303 | parent=None, transmit=True,
304 | **kwargs):
305 |
306 | order = SellOrder(owner=owner, data=data,
307 | size=size, price=price, pricelimit=plimit,
308 | exectype=exectype, valid=valid, tradeid=tradeid,
309 | trailamount=trailamount, trailpercent=trailpercent,
310 | parent=parent, transmit=transmit)
311 |
312 | order.addinfo(**kwargs)
313 | order.addcomminfo(self.getcommissioninfo(data))
314 | return self._transmit(order)
315 |
316 | def cancel(self, order):
317 | o = self.orders[order.ref]
318 | if order.status == Order.Cancelled: # already cancelled
319 | return
320 |
321 | return self.o.order_cancel(order)
322 |
323 | def notify(self, order):
324 | self.notifs.append(order.clone())
325 |
326 | def get_notification(self):
327 | if not self.notifs:
328 | return None
329 |
330 | return self.notifs.popleft()
331 |
332 | def next(self):
333 | self.notifs.append(None) # mark notification boundary
334 |
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/feeds/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from __future__ import (absolute_import, division, print_function,
3 | unicode_literals)
4 |
5 | try:
6 | from .oandav20feed import OandaV20Data
7 | except ImportError as e:
8 | pass # The user may not have something installed
9 |
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/feeds/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/feeds/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/feeds/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/feeds/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/feeds/__pycache__/oandav20feed.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/feeds/__pycache__/oandav20feed.cpython-36.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/feeds/__pycache__/oandav20feed.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/feeds/__pycache__/oandav20feed.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/sizers/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from __future__ import (absolute_import, division, print_function,
3 | unicode_literals)
4 |
5 | try:
6 | from .oandav20sizer import *
7 | except ImportError as e:
8 | pass # The user may not have something installed
9 |
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/sizers/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/sizers/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/sizers/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/sizers/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/sizers/__pycache__/oandav20sizer.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/sizers/__pycache__/oandav20sizer.cpython-36.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/sizers/__pycache__/oandav20sizer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/sizers/__pycache__/oandav20sizer.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/sizers/oandav20sizer.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from __future__ import (absolute_import, division, print_function,
3 | unicode_literals)
4 |
5 | import backtrader as bt
6 | from btoandav20.stores import oandav20store
7 |
8 | class OandaV20Sizer(bt.Sizer):
9 |
10 | params = (
11 | ('percents', 0), # percents of cash
12 | ('amount', 0), # fixed amount
13 | )
14 |
15 | def __init__(self, **kwargs):
16 | self.o = oandav20store.OandaV20Store(**kwargs)
17 |
18 | def _getsizing(self, comminfo, cash, data, isbuy):
19 | position = self.broker.getposition(data)
20 | if position:
21 | return position.size
22 |
23 | avail = 0
24 | name = data.contractdetails['name']
25 | price = self.o.get_pricing(name)
26 | if price is not None:
27 | if isbuy:
28 | avail = float(price['unitsAvailable']['default']['long'])
29 | else:
30 | avail = float(price['unitsAvailable']['default']['short'])
31 | if self.p.percents is not 0:
32 | size = avail * (self.p.percents / 100)
33 | elif self.p.amount is not 0:
34 | size = (avail / cash) * self.p.amount
35 | else:
36 | size = 0
37 | return int(size)
38 |
39 | class OandaV20Percent(OandaV20Sizer):
40 |
41 | params = (
42 | ('percents', 5),
43 | )
44 |
45 | class OandaV20Cash(OandaV20Sizer):
46 |
47 | params = (
48 | ('amount', 50),
49 | )
50 |
51 | class OandaV20Risk(OandaV20Sizer):
52 |
53 | params = (
54 | ('risk_amount', 0), # risk amount
55 | ('risk_percents', 0), # risk percents
56 | ('stoploss', 10), # stop loss in pips
57 | )
58 |
59 | def _getsizing(self, comminfo, cash, data, isbuy):
60 |
61 | position = self.broker.getposition(data)
62 | if position:
63 | return position.size
64 |
65 | name = data.contractdetails['name']
66 |
67 | sym_from = name[:3]
68 | sym_to = name[4:]
69 | sym_src = self.o.get_currency()
70 |
71 | cash_to_use = 0
72 | if self.p.risk_percents is not 0:
73 | cash_to_use = cash * (self.p.risk_percents / 100)
74 | elif self.p.risk_amount is not 0:
75 | cash_to_use = self.p.risk_amount
76 |
77 | if sym_src != sym_to:
78 | # convert cash to target currency
79 | price = self.o.get_pricing(sym_src + '_' + sym_to)
80 | if price is not None:
81 | cash_to_use = cash_to_use / (1 / float(price['closeoutAsk']))
82 |
83 | size = 0
84 | price_per_pip = cash_to_use / self.p.stoploss
85 | price = self.o.get_pricing(name)
86 | if price is not None:
87 | size = price_per_pip * (1 / 10 ** data.contractdetails['pipLocation'])
88 | if isbuy:
89 | size = min(size, float(price['unitsAvailable']['default']['long']))
90 | else:
91 | size = min(size, float(price['unitsAvailable']['default']['short']))
92 |
93 | return int(size)
94 |
95 | class OandaV20RiskPercent(OandaV20Sizer):
96 |
97 | params = (
98 | ('risk_percents', 5),
99 | )
100 |
101 | class OandaV20RiskCash(OandaV20Sizer):
102 |
103 | params = (
104 | ('risk_amount', 50),
105 | )
106 |
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/stores/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from __future__ import (absolute_import, division, print_function,
3 | unicode_literals)
4 |
5 | try:
6 | from .oandav20store import OandaV20Store
7 | except ImportError as e:
8 | pass # The user may not have something installed
9 |
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/stores/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/stores/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/stores/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/stores/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/stores/__pycache__/oandav20store.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/stores/__pycache__/oandav20store.cpython-36.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/btoandav20/stores/__pycache__/oandav20store.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/btoandav20/stores/__pycache__/oandav20store.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/db_pack/oanda/__pycache__/fx_oanda_daily.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/db_pack/oanda/__pycache__/fx_oanda_daily.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/db_pack/oanda/__pycache__/fx_oanda_minute.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/db_pack/oanda/__pycache__/fx_oanda_minute.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/db_pack/oanda/fx_oanda_daily.py:
--------------------------------------------------------------------------------
1 | """
2 | Created: October 06 2019 2018
3 |
4 | Aauthor: Saeed Rahman
5 |
6 | Use Case: Download Historical Data for Forex Majors and update the DB based on the interested_tickers.csv
7 |
8 | Successful Test Cases:
9 | - daily_data table is empty
10 | - only 1 ticker in the interested_tickers.csv (comma appended at the end of tuple)
11 | - No items in interested_tickers.csv
12 | - There are missing as well new (new to daily_data) items in the interested_ticker.csv
13 |
14 | Future work:
15 | - File Paths dynamic
16 | - Parameterize
17 | * Filelocation
18 | * DB Details and Credential
19 | * DB Table
20 | - Add Date Range in interested_tickers.csv
21 | """
22 |
23 | import datetime
24 | import psycopg2
25 | import pandas as pd
26 | import os
27 | import io
28 | import boto3
29 |
30 | from oandapyV20.contrib.factories import InstrumentsCandlesFactory
31 | import oandapyV20.endpoints.accounts as accounts
32 | import oandapyV20
33 | import q_credentials.db_secmaster_cred as db_secmaster_cred
34 | import q_credentials.oanda_cred as oanda_cred
35 | MASTER_LIST_FAILED_SYMBOLS = []
36 |
37 | def obtain_list_db_tickers(conn):
38 | """
39 | query our Postgres database table 'symbol' for a list of all tickers in our symbol table
40 | args:
41 | conn: a Postgres DB connection object
42 | returns:
43 | list of tuples
44 | """
45 | with conn:
46 | cur = conn.cursor()
47 | cur.execute("SELECT id, ticker FROM symbol")
48 | data = cur.fetchall()
49 | return [(d[0], d[1]) for d in data]
50 |
51 | def fetch_vendor_id(vendor_name, conn):
52 | """
53 | Retrieve our vendor id from our PostgreSQL DB, table data_vendor.
54 | args:
55 | vendor_name: name of our vendor, type string.
56 | conn: a Postgres DB connection object
57 | return:
58 | vendor id as integer
59 | """
60 | cur = conn.cursor()
61 | cur.execute("SELECT id FROM data_vendor WHERE name = %s", (vendor_name,))
62 | # will return a list of tuples
63 | vendor_id = cur.fetchall()
64 | # index to our first tuple and our first value
65 | vendor_id = vendor_id[0][0]
66 | return vendor_id
67 |
68 |
69 | def load_data(symbol, symbol_id, vendor_id, conn, start_date):
70 | """
71 | This will load stock data (date+OHLCV) and additional info to our daily_data table.
72 | args:
73 | symbol: stock ticker, type string.
74 | symbol_id: stock id referenced in symbol(id) column, type integer.
75 | vendor_id: data vendor id referenced in data_vendor(id) column, type integer.
76 | conn: a Postgres DB connection object
77 | return:
78 | None
79 | """
80 |
81 | client = oandapyV20.API(access_token=oanda_cred.token_practice)
82 | cur = conn.cursor()
83 | end_dt = datetime.datetime.now()
84 | if end_dt.isoweekday() in set((6, 7)): # to take the nearest weekday
85 | end_dt -= datetime.timedelta(days=end_dt.isoweekday() % 5)
86 |
87 | try:
88 | data = oanda_historical_data(instrument=symbol,start_date=start_date.strftime("%Y-%m-%dT%H:%M:%SZ"),end_date=end_dt.strftime("%Y-%m-%dT%H:%M:%SZ"),client=client)
89 | # data = yf.download(symbol, start=start_dt, end=end_dt)
90 | except:
91 | MASTER_LIST_FAILED_SYMBOLS.append(symbol)
92 | raise Exception('Failed to load {}'.format(symbol))
93 |
94 | if data.empty:
95 | print(symbol," already updated")
96 |
97 | else:
98 | # create new dataframe matching our table schema
99 | # and re-arrange our dataframe to match our database table
100 | columns_table_order = ['data_vendor_id', 'stock_id', 'created_date',
101 | 'last_updated_date', 'date_price', 'open_price',
102 | 'high_price', 'low_price', 'close_price', 'volume']
103 | newDF = pd.DataFrame()
104 | newDF['date_price'] = data.index
105 | data.reset_index(drop=True,inplace=True)
106 | newDF['open_price'] = data['open']
107 | newDF['high_price'] = data['high']
108 | newDF['low_price'] = data['low']
109 | newDF['close_price'] = data['close']
110 | newDF['volume'] = data['volume']
111 | newDF['stock_id'] = symbol_id
112 | newDF['data_vendor_id'] = vendor_id
113 | newDF['created_date'] = datetime.datetime.utcnow()
114 | newDF['last_updated_date'] = datetime.datetime.utcnow()
115 | newDF = newDF[columns_table_order]
116 |
117 |
118 | # ensure our data is sorted by date
119 | newDF = newDF.sort_values(by=['date_price'], ascending = True)
120 |
121 | print(newDF['stock_id'].unique())
122 | print(newDF['date_price'].min())
123 | print(newDF['date_price'].max())
124 | print("")
125 |
126 | # convert our dataframe to a list
127 | list_of_lists = newDF.values.tolist()
128 | # convert our list to a list of tuples
129 | tuples_mkt_data = [tuple(x) for x in list_of_lists]
130 |
131 | # WRITE DATA TO DB
132 | insert_query = """
133 | INSERT INTO daily_data (data_vendor_id, stock_id, created_date,
134 | last_updated_date, date_price, open_price, high_price, low_price, close_price, volume)
135 | VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
136 | """
137 | cur.executemany(insert_query, tuples_mkt_data)
138 | conn.commit()
139 | print('{} complete!'.format(symbol))
140 |
141 |
142 | def oanda_historical_data(instrument,start_date,end_date,granularity='D',client=None):
143 | params = {
144 | "from": start_date,
145 | "to": end_date,
146 | "granularity": granularity,
147 | "count": 2500,
148 | }
149 |
150 | df_full=pd.DataFrame()
151 | for r in InstrumentsCandlesFactory(instrument=instrument,params=params):
152 | client.request(r)
153 | dat = []
154 | api_data=r.response.get('candles')
155 | if(api_data):
156 | for oo in r.response.get('candles'):
157 | dat.append([oo['time'], oo['volume'], oo['mid']['o'], oo['mid']['h'], oo['mid']['l'], oo['mid']['c']])
158 |
159 | df = pd.DataFrame(dat)
160 | df.columns = ['time', 'volume', 'open', 'high', 'low', 'close']
161 | df = df.set_index('time')
162 | if df_full.empty:
163 | df_full=df
164 | else:
165 | df_full=df_full.append(df)
166 | df_full.index=pd.to_datetime(df_full.index)
167 | return df_full
168 |
169 | def main():
170 |
171 | initial_start_date = datetime.datetime(2010,12,30)
172 |
173 | db_host=db_secmaster_cred.dbHost
174 | db_user=db_secmaster_cred.dbUser
175 | db_password=db_secmaster_cred.dbPWD
176 | db_name=db_secmaster_cred.dbName
177 |
178 | # connect to our securities_master database
179 | conn = psycopg2.connect(host=db_host, database=db_name, user=db_user, password=db_password)
180 |
181 | vendor = 'Oanda'
182 | vendor_id = fetch_vendor_id(vendor, conn)
183 |
184 | s3 = boto3.client('s3',endpoint_url="http://minio-image:9000",aws_access_key_id="minio-image",aws_secret_access_key="minio-image-pass")
185 | Bucket="airflow-files"
186 | Key="interested_tickers.xlsx"
187 | read_file = s3.get_object(Bucket=Bucket, Key=Key)
188 | df_tickers = pd.read_excel(io.BytesIO(read_file['Body'].read()),sep=',',sheet_name="daily")
189 |
190 |
191 | if df_tickers.empty:
192 | print("Empty Ticker List")
193 | else:
194 | # Getting the last date for each interested tickers
195 | sql="""select a.last_date, b.id as stock_id, b.ticker from
196 | (select max(date_price) as last_date, stock_id
197 | from daily_data
198 | group by stock_id) a right join symbol b on a.stock_id = b.id
199 | where b.ticker in {}""".format(tuple(df_tickers['Tickers'])).replace(",)", ")")
200 | df_ticker_last_day=pd.read_sql(sql,con=conn)
201 |
202 | # Filling the empty dates returned from the DB with the initial start date
203 | df_ticker_last_day['last_date'].fillna(initial_start_date,inplace=True)
204 |
205 | # Adding 1 day, so that the data is appended starting next date
206 | df_ticker_last_day['last_date']=df_ticker_last_day['last_date']+datetime.timedelta(days=1)
207 |
208 | startTime = datetime.datetime.now()
209 |
210 | print (datetime.datetime.now() - startTime)
211 |
212 | for i,stock in df_ticker_last_day.iterrows() :
213 | # download stock data and dump into daily_data table in our Postgres DB
214 | last_date = stock['last_date']
215 | symbol_id = stock['stock_id']
216 | symbol = stock['ticker']
217 | try:
218 | load_data(symbol, symbol_id, vendor_id, conn, start_date=last_date)
219 | except:
220 | continue
221 |
222 | # lets write our failed stock list to text file for reference
223 | file_to_write = open('failed_symbols.txt', 'w')
224 |
225 | for symbol in MASTER_LIST_FAILED_SYMBOLS:
226 | file_to_write.write("%s\n" % symbol)
227 |
228 | print(datetime.datetime.now() - startTime)
229 |
230 |
231 | if __name__ == "__main__":
232 | main()
--------------------------------------------------------------------------------
/Storage/q_pack/db_pack/oanda/fx_oanda_minute.py:
--------------------------------------------------------------------------------
1 | """
2 | Created: October 06 2019 2018
3 |
4 | Aauthor: Saeed Rahman
5 |
6 | Use Case: Download Historical Data for Forex Majors and update the DB based on the interested_tickers.csv
7 |
8 | Successful Test Cases:
9 | - daily_data table is empty
10 | - only 1 ticker in the interested_tickers.csv (comma appended at the end of tuple)
11 | - No items in interested_tickers.csv
12 | - There are missing as well new (new to daily_data) items in the interested_ticker.csv
13 |
14 | Future work:
15 | - File Paths dynamic
16 | - Parameterize
17 | * Filelocation
18 | * DB Details and Credential
19 | * DB Table
20 | - Add Date Range in interested_tickers.csv
21 | """
22 |
23 | import datetime
24 | import psycopg2
25 | import pandas as pd
26 | import os
27 | import boto3
28 | import io
29 |
30 | from oandapyV20.contrib.factories import InstrumentsCandlesFactory
31 | import oandapyV20.endpoints.accounts as accounts
32 | import oandapyV20
33 |
34 | import q_credentials.db_secmaster_cred as db_secmaster_cred
35 | import q_credentials.oanda_cred as oanda_cred
36 |
37 | MASTER_LIST_FAILED_SYMBOLS = []
38 |
39 | def obtain_list_db_tickers(conn):
40 | """
41 | query our Postgres database table 'symbol' for a list of all tickers in our symbol table
42 | args:
43 | conn: a Postgres DB connection object
44 | returns:
45 | list of tuples
46 | """
47 | with conn:
48 | cur = conn.cursor()
49 | cur.execute("SELECT id, ticker FROM symbol")
50 | data = cur.fetchall()
51 | return [(d[0], d[1]) for d in data]
52 |
53 | def fetch_vendor_id(vendor_name, conn):
54 | """
55 | Retrieve our vendor id from our PostgreSQL DB, table data_vendor.
56 | args:
57 | vendor_name: name of our vendor, type string.
58 | conn: a Postgres DB connection object
59 | return:
60 | vendor id as integer
61 | """
62 | cur = conn.cursor()
63 | cur.execute("SELECT id FROM data_vendor WHERE name = %s", (vendor_name,))
64 | # will return a list of tuples
65 | vendor_id = cur.fetchall()
66 | # index to our first tuple and our first value
67 | vendor_id = vendor_id[0][0]
68 | return vendor_id
69 |
70 |
71 | def load_data(symbol, symbol_id, vendor_id, conn, start_date):
72 | """
73 | This will load stock data (date+OHLCV) and additional info to our daily_data table.
74 | args:
75 | symbol: stock ticker, type string.
76 | symbol_id: stock id referenced in symbol(id) column, type integer.
77 | vendor_id: data vendor id referenced in data_vendor(id) column, type integer.
78 | conn: a Postgres DB connection object
79 | return:
80 | None
81 | """
82 | client = oandapyV20.API(access_token=oanda_cred.token_practice)
83 | cur = conn.cursor()
84 | end_dt = datetime.datetime.now()
85 | if end_dt.isoweekday() in set((6, 7)): # to take the nearest weekday
86 | end_dt -= datetime.timedelta(days=end_dt.isoweekday() % 5)
87 |
88 | try:
89 | data = oanda_historical_data(instrument=symbol,start_date=start_date.strftime("%Y-%m-%dT%H:%M:%SZ"),end_date=end_dt.strftime("%Y-%m-%dT%H:%M:%SZ"),client=client)
90 | # data = yf.download(symbol, start=start_dt, end=end_dt)
91 | except:
92 | MASTER_LIST_FAILED_SYMBOLS.append(symbol)
93 | raise Exception('Failed to load {}'.format(symbol))
94 |
95 | if data.empty:
96 | print(symbol," already updated")
97 |
98 | else:
99 | # create new dataframe matching our table schema
100 | # and re-arrange our dataframe to match our database table
101 | columns_table_order = ['data_vendor_id', 'stock_id', 'created_date',
102 | 'last_updated_date', 'date_price', 'open_price',
103 | 'high_price', 'low_price', 'close_price', 'volume']
104 | newDF = pd.DataFrame()
105 | newDF['date_price'] = data.index
106 | data.reset_index(drop=True,inplace=True)
107 | newDF['open_price'] = data['open']
108 | newDF['high_price'] = data['high']
109 | newDF['low_price'] = data['low']
110 | newDF['close_price'] = data['close']
111 | newDF['volume'] = data['volume']
112 | newDF['stock_id'] = symbol_id
113 | newDF['data_vendor_id'] = vendor_id
114 | newDF['created_date'] = datetime.datetime.utcnow()
115 | newDF['last_updated_date'] = datetime.datetime.utcnow()
116 | newDF = newDF[columns_table_order]
117 |
118 |
119 | # ensure our data is sorted by date
120 | newDF = newDF.sort_values(by=['date_price'], ascending = True)
121 |
122 | print(newDF['stock_id'].unique())
123 | print(newDF['date_price'].min())
124 | print(newDF['date_price'].max())
125 | print("")
126 |
127 | # convert our dataframe to a list
128 | list_of_lists = newDF.values.tolist()
129 | # convert our list to a list of tuples
130 | tuples_mkt_data = [tuple(x) for x in list_of_lists]
131 |
132 | # WRITE DATA TO DB
133 | insert_query = """
134 | INSERT INTO minute_data (data_vendor_id, stock_id, created_date,
135 | last_updated_date, date_price, open_price, high_price, low_price, close_price, volume)
136 | VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
137 | """
138 | cur.executemany(insert_query, tuples_mkt_data)
139 | conn.commit()
140 | print('{} complete!'.format(symbol))
141 |
142 |
143 | def oanda_historical_data(instrument,start_date,end_date,granularity='M1',client=None):
144 | params = {
145 | "from": start_date,
146 | "to": end_date,
147 | "granularity": granularity,
148 | "count": 2500,
149 | }
150 |
151 | df_full=pd.DataFrame()
152 | for r in InstrumentsCandlesFactory(instrument=instrument,params=params):
153 | client.request(r)
154 | dat = []
155 | api_data=r.response.get('candles')
156 | if(api_data):
157 | for oo in r.response.get('candles'):
158 | dat.append([oo['time'], oo['volume'], oo['mid']['o'], oo['mid']['h'], oo['mid']['l'], oo['mid']['c']])
159 |
160 | df = pd.DataFrame(dat)
161 | df.columns = ['time', 'volume', 'open', 'high', 'low', 'close']
162 | df = df.set_index('time')
163 | if df_full.empty:
164 | df_full=df
165 | else:
166 | df_full=df_full.append(df)
167 | df_full.index=pd.to_datetime(df_full.index)
168 | return df_full
169 |
170 | def main():
171 |
172 | initial_start_date = datetime.datetime(2019,12,30)
173 |
174 | db_host=db_secmaster_cred.dbHost
175 | db_user=db_secmaster_cred.dbUser
176 | db_password=db_secmaster_cred.dbPWD
177 | db_name=db_secmaster_cred.dbName
178 |
179 | # connect to our securities_master database
180 | conn = psycopg2.connect(host=db_host, database=db_name, user=db_user, password=db_password)
181 |
182 | vendor = 'Oanda'
183 | vendor_id = fetch_vendor_id(vendor, conn)
184 |
185 | s3 = boto3.client('s3',endpoint_url="http://minio-image:9000",aws_access_key_id="minio-image",aws_secret_access_key="minio-image-pass")
186 | Bucket="airflow-files"
187 | Key="interested_tickers.xlsx"
188 | read_file = s3.get_object(Bucket=Bucket, Key=Key)
189 | df_tickers = pd.read_excel(io.BytesIO(read_file['Body'].read()),sep=',',sheet_name="minute")
190 |
191 | if df_tickers.empty:
192 | print("Empty Ticker List")
193 | else:
194 | # Getting the last date for each interested tickers
195 | sql="""select a.last_date, b.id as stock_id, b.ticker from
196 | (select max(date_price) as last_date, stock_id
197 | from minute_data
198 | group by stock_id) a right join symbol b on a.stock_id = b.id
199 | where b.ticker in {}""".format(tuple(df_tickers['Tickers'])).replace(",)", ")")
200 | df_ticker_last_day=pd.read_sql(sql,con=conn)
201 |
202 | # Filling the empty dates returned from the DB with the initial start date
203 | df_ticker_last_day['last_date'].fillna(initial_start_date,inplace=True)
204 |
205 | # Adding 1 day, so that the data is appended starting next date
206 | df_ticker_last_day['last_date']=df_ticker_last_day['last_date']+datetime.timedelta(minutes=1)
207 |
208 | startTime = datetime.datetime.now()
209 |
210 | print (datetime.datetime.now() - startTime)
211 |
212 | for i,stock in df_ticker_last_day.iterrows() :
213 | # download stock data and dump into daily_data table in our Postgres DB
214 | last_date = stock['last_date']
215 | symbol_id = stock['stock_id']
216 | symbol = stock['ticker']
217 | try:
218 | load_data(symbol, symbol_id, vendor_id, conn, start_date=last_date)
219 | except:
220 | continue
221 |
222 | # lets write our failed stock list to text file for reference
223 | file_to_write = open('failed_symbols.txt', 'w')
224 |
225 | for symbol in MASTER_LIST_FAILED_SYMBOLS:
226 | file_to_write.write("%s\n" % symbol)
227 |
228 | print(datetime.datetime.now() - startTime)
229 |
230 |
231 | if __name__ == "__main__":
232 | main()
--------------------------------------------------------------------------------
/Storage/q_pack/db_pack/schema/risk_db_schema_builder.py:
--------------------------------------------------------------------------------
1 | import psycopg2
2 | from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
3 | import os
4 | import q_credentials.db_risk_cred as db_risk_cred
5 |
6 |
7 | def create_db(db_credential_info):
8 | """
9 | create a new database if it does not exist in the PostgreSQL database
10 | will use method 'check_db_exists' before creating a new database
11 | args:
12 | db_credential_info: database credentials including host, user, password and db name, type array
13 | returns:
14 | NoneType
15 | """
16 | db_host, db_user, db_password, db_name = db_credential_info
17 |
18 | if check_db_exists(db_credential_info):
19 | pass
20 | else:
21 | print('Creating new database.')
22 | conn = psycopg2.connect(host=db_host, database='postgres', user=db_user, password=db_password)
23 | conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
24 | cur = conn.cursor()
25 | cur.execute("CREATE DATABASE %s ;" % db_name)
26 | cur.close()
27 |
28 |
29 | def check_db_exists(db_credential_info):
30 | """
31 | checks to see if a database already exists in the PostgreSQL database
32 | args:
33 | db_credential_info: database credentials including host, user, password and db name, type array
34 | returns:
35 | boolean value (True or False)
36 | """
37 | db_host, db_user, db_password, db_name = db_credential_info
38 | try:
39 | conn = psycopg2.connect(host=db_host, database=db_name, user=db_user, password=db_password)
40 | cur = conn.cursor()
41 | cur.close()
42 | print('Database exists.')
43 | return True
44 | except:
45 | print("Database does not exist.")
46 | return False
47 |
48 |
49 | def create_risk_tables(db_credential_info):
50 | """
51 | create table in designated PostgreSQL database
52 | will use method 'check_db_exists' before creating table
53 | args:
54 | db_credential_info: database credentials including host, user, password and db name, type array
55 | returns:
56 | NoneType
57 | """
58 | db_host, db_user, db_password, db_name = db_credential_info
59 | conn = None
60 |
61 | if check_db_exists(db_credential_info):
62 | commands = (
63 | """
64 | CREATE TABLE run_information (
65 | run_id SERIAL PRIMARY KEY,
66 | run_type TEXT NOT NULL,
67 | recorded_time TIMESTAMP NOT NULL NOT NULL,
68 | start_time TIMESTAMP NOT NULL NOT NULL,
69 | end_time TIMESTAMP NULL,
70 | strategy TEXT NOT NULL,
71 | tickers TEXT NOT NULL,
72 | indicators TEXT NULL,
73 | frequency TEXT NOT NULL,
74 | account TEXT NULL,
75 | log_file TEXT NULL
76 | )
77 | """,
78 | """
79 | CREATE TABLE position_performance (
80 | id SERIAL PRIMARY KEY,
81 | run_id INTEGER NOT NULL,
82 | recorded_time TIMESTAMP NOT NULL,
83 | strategy TEXT NOT NULL,
84 | ref INTEGER NULL,
85 | direction TEXT NOT NULL,
86 | ticker TEXT NOT NULL,
87 | datein TIMESTAMP NOT NULL,
88 | pricein NUMERIC NOT NULL,
89 | dateout TIMESTAMP NOT NULL,
90 | priceout NUMERIC NOT NULL,
91 | change_percentage NUMERIC NULL,
92 | pnl NUMERIC NOT NULL,
93 | pnl_percentage NUMERIC NULL,
94 | size NUMERIC NOT NULL,
95 | value NUMERIC NOT NULL,
96 | cumpnl NUMERIC NOT NULL,
97 | nbars INTEGER NOT NULL,
98 | pnl_per_bar NUMERIC NULL,
99 | mfe_percentage NUMERIC NULL,
100 | mae_percentage NUMERIC NULL,
101 | FOREIGN KEY (run_id) REFERENCES run_information(run_id)
102 | )
103 | """,
104 | """
105 | CREATE TABLE strategy_performance (
106 | id SERIAL PRIMARY KEY,
107 | run_id INTEGER NOT NULL,
108 | total_open NUMERIC NULL,
109 | total_closed NUMERIC NULL,
110 | total_won NUMERIC NULL,
111 | total_lost NUMERIC NULL,
112 | win_streak NUMERIC NULL,
113 | lose_streak NUMERIC NULL,
114 | pnl_net NUMERIC NULL,
115 | strike_rate NUMERIC NULL,
116 | sqn NUMERIC NULL,
117 | total_compound_return NUMERIC NULL,
118 | avg_return NUMERIC NULL,
119 | annual_norm_return NUMERIC NULL,
120 | max_draw_per NUMERIC NULL,
121 | max_draw_val NUMERIC NULL,
122 | max_draw_len NUMERIC NULL,
123 | FOREIGN KEY (run_id) REFERENCES run_information(run_id)
124 | )
125 | """,
126 | """
127 | CREATE TABLE positions (
128 | id SERIAL PRIMARY KEY,
129 | run_id INTEGER NOT NULL,
130 | recorded_time TIMESTAMP NOT NULL,
131 | strategy TEXT NOT NULL,
132 | transaction_date TIMESTAMP NOT NULL,
133 | size NUMERIC NULL,
134 | price NUMERIC NULL,
135 | sid INTEGER NOT NULL,
136 | ticker TEXT NOT NULL,
137 | value NUMERIC NULL,
138 | FOREIGN KEY (run_id) REFERENCES run_information(run_id)
139 | )
140 | """
141 | )
142 | try:
143 | for command in commands:
144 | print('Building tables.')
145 | conn = psycopg2.connect(host=db_host,database=db_name, user=db_user, password=db_password)
146 | cur = conn.cursor()
147 | cur.execute(command)
148 | # need to commit this change
149 | conn.commit()
150 | cur.close()
151 | except (Exception, psycopg2.DatabaseError) as error:
152 | print(error)
153 | cur.close()
154 | finally:
155 | if conn:
156 | conn.close()
157 | else:
158 | pass
159 |
160 | def main():
161 |
162 | # create our instance variables for host, username, password and database name
163 | db_host=db_risk_cred.dbHost
164 | db_user=db_risk_cred.dbUser
165 | db_password=db_risk_cred.dbPWD
166 | db_name=db_risk_cred.dbName
167 |
168 | # first lets create our database from postgres
169 | create_db([db_host, db_user, db_password, db_name])
170 |
171 | # second lets create our tables for our new database
172 | create_risk_tables([db_host, db_user, db_password, db_name])
173 |
174 |
175 | if __name__ == "__main__":
176 | main()
--------------------------------------------------------------------------------
/Storage/q_pack/db_pack/schema/secmaster_db_schema_builder.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | import psycopg2
4 | from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
5 | import os
6 | import q_credentials.db_secmaster_cred as db_secmaster_cred
7 |
8 |
9 | def create_db(db_credential_info):
10 | """
11 | create a new database if it does not exist in the PostgreSQL database
12 | will use method 'check_db_exists' before creating a new database
13 | args:
14 | db_credential_info: database credentials including host, user, password and db name, type array
15 | returns:
16 | NoneType
17 | """
18 | db_host, db_user, db_password, db_name = db_credential_info
19 |
20 |
21 | if check_db_exists(db_credential_info):
22 | pass
23 | else:
24 | print('Creating new database.')
25 | # Here we are connecting to the existing DB to create a new DB
26 | conn = psycopg2.connect(host=db_host, database='postgres', user=db_user, password=db_password)
27 | conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
28 | cur = conn.cursor()
29 | cur.execute("CREATE DATABASE %s ;" % db_name)
30 | cur.close()
31 |
32 |
33 | def check_db_exists(db_credential_info):
34 | """
35 | checks to see if a database already exists in the PostgreSQL database
36 | args:
37 | db_credential_info: database credentials including host, user, password and db name, type array
38 | returns:
39 | boolean value (True or False)
40 | """
41 | db_host, db_user, db_password, db_name = db_credential_info
42 | try:
43 | conn = psycopg2.connect(host=db_host, database=db_name, user=db_user, password=db_password)
44 | cur = conn.cursor()
45 | cur.close()
46 | print('Database exists.')
47 | return True
48 | except:
49 | print("Database does not exist.")
50 | return False
51 |
52 |
53 | def create_mkt_tables(db_credential_info):
54 | """
55 | create table in designated PostgreSQL database
56 | will use method 'check_db_exists' before creating table
57 | args:
58 | db_credential_info: database credentials including host, user, password and db name, type array
59 | returns:
60 | NoneType
61 | """
62 | db_host, db_user, db_password, db_name = db_credential_info
63 | conn = None
64 |
65 | if check_db_exists(db_credential_info):
66 | commands = (
67 | """
68 | CREATE TABLE exchange (
69 | id SERIAL PRIMARY KEY,
70 | abbrev TEXT NOT NULL,
71 | name TEXT NOT NULL,
72 | currency VARCHAR(64) NULL,
73 | created_date TIMESTAMP NOT NULL,
74 | last_updated_date TIMESTAMP NOT NULL
75 | )
76 | """,
77 | """
78 | CREATE TABLE data_vendor (
79 | id SERIAL PRIMARY KEY,
80 | name TEXT UNIQUE NOT NULL,
81 | website_url VARCHAR(255) NULL,
82 | created_date TIMESTAMP NOT NULL,
83 | last_updated_date TIMESTAMP NOT NULL
84 | )
85 | """,
86 | """
87 | CREATE TABLE symbol (
88 | id SERIAL PRIMARY KEY,
89 | exchange_id integer NULL,
90 | ticker TEXT NOT NULL,
91 | instrument TEXT NOT NULL,
92 | name TEXT NOT NULL,
93 | sector TEXT NOT NULL,
94 | currency VARCHAR(64) NULL,
95 | created_date TIMESTAMP NOT NULL,
96 | last_updated_date TIMESTAMP NOT NULL,
97 | FOREIGN KEY (exchange_id) REFERENCES exchange(id)
98 | )
99 | """,
100 | """
101 | CREATE TABLE daily_data (
102 | id SERIAL PRIMARY KEY,
103 | data_vendor_id INTEGER NOT NULL,
104 | stock_id INTEGER NOT NULL,
105 | created_date TIMESTAMP NOT NULL,
106 | last_updated_date TIMESTAMP NOT NULL,
107 | date_price TIMESTAMP,
108 | open_price NUMERIC,
109 | high_price NUMERIC,
110 | low_price NUMERIC,
111 | close_price NUMERIC,
112 | volume BIGINT,
113 | FOREIGN KEY (data_vendor_id) REFERENCES data_vendor(id),
114 | FOREIGN KEY (stock_id) REFERENCES symbol(id)
115 | )
116 | """,
117 | """
118 | CREATE TABLE minute_data (
119 | id SERIAL PRIMARY KEY,
120 | data_vendor_id INTEGER NOT NULL,
121 | stock_id INTEGER NOT NULL,
122 | created_date TIMESTAMP NOT NULL,
123 | last_updated_date TIMESTAMP NOT NULL,
124 | date_price TIMESTAMP,
125 | open_price NUMERIC,
126 | high_price NUMERIC,
127 | low_price NUMERIC,
128 | close_price NUMERIC,
129 | volume BIGINT,
130 | FOREIGN KEY (data_vendor_id) REFERENCES data_vendor(id),
131 | FOREIGN KEY (stock_id) REFERENCES symbol(id)
132 | )
133 | """)
134 | try:
135 | for command in commands:
136 | print('Building tables.')
137 | conn = psycopg2.connect(host=db_host,database=db_name, user=db_user, password=db_password)
138 | cur = conn.cursor()
139 | cur.execute(command)
140 | # need to commit this change
141 | conn.commit()
142 | cur.close()
143 | except (Exception, psycopg2.DatabaseError) as error:
144 | print(error)
145 | cur.close()
146 | finally:
147 | if conn:
148 | conn.close()
149 | else:
150 | pass
151 |
152 |
153 |
154 |
155 |
156 | def main():
157 | db_host=db_secmaster_cred.dbHost
158 | db_user=db_secmaster_cred.dbUser
159 | db_password=db_secmaster_cred.dbPWD
160 | db_name=db_secmaster_cred.dbName
161 |
162 | # first lets create our database from postgres
163 | create_db([db_host, db_user, db_password, db_name])
164 |
165 | # second lets create our tables for our new database
166 | create_mkt_tables([db_host, db_user, db_password, db_name])
167 |
168 |
169 | if __name__ == "__main__":
170 | main()
--------------------------------------------------------------------------------
/Storage/q_pack/db_pack/schema/secmaster_db_symbol_loader.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Created on 8/31/2019
4 |
5 | @author: Saeed Rahman
6 | """
7 | from __future__ import print_function
8 |
9 | import datetime
10 | import psycopg2
11 | import os
12 |
13 |
14 | import oandapyV20
15 | import oandapyV20.endpoints.trades as trades
16 | import oandapyV20.endpoints.accounts as accounts
17 | import oandapyV20.endpoints.instruments as instruments
18 | import configparser
19 |
20 | import pandas as pd
21 | import json
22 |
23 | import q_credentials.db_secmaster_cred as db_secmaster_cred
24 | import q_credentials.oanda_cred as oanda_cred
25 |
26 | def parse_wiki_forex():
27 | """
28 | Download current list of OANDA Instruments.
29 | return:
30 | list of tuples to add to PostgreSQL.
31 | """
32 | now = datetime.datetime.utcnow()
33 | accountID=oanda_cred.acc_id_practice
34 | token=oanda_cred.token_practice
35 | client = oandapyV20.API(access_token=token)
36 |
37 | r = accounts.AccountInstruments(accountID=accountID)
38 | rv = client.request(r)
39 |
40 | df=pd.read_json(json.dumps(rv, indent=2))
41 | df=pd.io.json.json_normalize(data=df['instruments'])
42 |
43 | symbols = []
44 | for i, symbol in df.iterrows():
45 | symbols.append(
46 | (symbol['name'],'Forex',
47 | symbol['displayName'],
48 | 'Forex', 'USD', now, now)
49 | )
50 | return symbols
51 |
52 | def insert_new_vendor(vendor, conn):
53 | """
54 | Create a new vendor in data_vendor table.
55 | args:
56 | vendor: name of our vendor, type string.
57 | conn: a Postgres DB connection object
58 | return:
59 | None
60 | """
61 | todays_date = datetime.datetime.utcnow()
62 | cur = conn.cursor()
63 | cur.execute(
64 | "INSERT INTO data_vendor(name, created_date, last_updated_date) VALUES (%s, %s, %s)",
65 | (vendor, todays_date, todays_date)
66 | )
67 | conn.commit()
68 |
69 |
70 | def insert_forex_symbols_postgres(symbols, conn):
71 | """
72 | Load S&P500 symbols into our PostgreSQL database.
73 | args:
74 | symbols: list of tuples which holds our stock info data.
75 | db_host: name of host to connect to db, type string.
76 | db_user: name of user_name to connect to db, type string.
77 | db_name: name of our database, type string.
78 | returns:
79 | None
80 | """
81 |
82 |
83 | column_str = """
84 | ticker, instrument, name, sector, currency, created_date, last_updated_date
85 | """
86 | insert_str = ("%s, " * 7)[:-2]
87 | final_str = "INSERT INTO symbol (%s) VALUES (%s)" % (column_str, insert_str)
88 | with conn:
89 | cur = conn.cursor()
90 | cur.executemany(final_str, symbols)
91 |
92 |
93 | def load_db_info(f_name_path):
94 | """
95 | load text file holding our database credential info and the database name
96 | args:
97 | f_name_path: name of file preceded with "\\", type string
98 | returns:
99 | array of 4 values that should match text file info
100 | """
101 | cur_path = os.getcwd()
102 | # lets load our database credentials and info
103 | f = open(cur_path + f_name_path, 'r')
104 | lines = f.readlines()[1:]
105 | lines = lines[0].split(',')
106 | return lines
107 |
108 |
109 | def main():
110 | db_host=db_secmaster_cred.dbHost
111 | db_user=db_secmaster_cred.dbUser
112 | db_password=db_secmaster_cred.dbPWD
113 | db_name=db_secmaster_cred.dbName
114 |
115 | # Connect to our PostgreSQL database
116 | conn = psycopg2.connect(host=db_host, database=db_name, user=db_user, password=db_password)
117 |
118 | symbols = parse_wiki_forex()
119 | insert_forex_symbols_postgres(symbols, conn)
120 | print("%s symbols were successfully added." % len(symbols))
121 |
122 | vendor = 'Oanda'
123 | # insert new vendor to data_vendor table and fetch its id needed for stock data dump
124 | try:
125 | insert_new_vendor(vendor, conn)
126 | print("Adding new Vendor ",vendor)
127 | except:
128 | print("vendor already there")
129 |
130 | if __name__ == "__main__":
131 | main()
--------------------------------------------------------------------------------
/Storage/q_pack/ml_pack/preprocessing/__pycache__/ml_preprocessing.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/ml_pack/preprocessing/__pycache__/ml_preprocessing.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/ml_pack/preprocessing/ml_preprocessing.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import boto3
3 | from io import StringIO
4 |
5 | def ml_preprocessing(input_file,bucket="model-support-files",fwd_returns=5):
6 | s3 = boto3.client('s3',endpoint_url="http://minio-image:9000",aws_access_key_id="minio-image",aws_secret_access_key="minio-image-pass")
7 | Bucket=bucket
8 | Key=input_file
9 | read_file = s3.get_object(Bucket=Bucket, Key=Key)
10 | df = pd.read_csv(read_file['Body'],sep=',',index_col=['datetime'],parse_dates=True)
11 | df=df.loc[:, df.columns != 'ATR'] # Removing the ATR indicator if it exists
12 | df['fwd_returns']=df.groupby("security")["close"].pct_change(5)
13 | df.sort_values(by='datetime',inplace=True)
14 | df=df.reset_index().drop(columns=['datetime','security','close'])
15 | csv_buffer = StringIO()
16 | df.dropna(inplace=True)
17 | df.to_csv(csv_buffer,index=False)
18 | s3.put_object(Bucket=Bucket, Key=("processed_"+Key),Body=csv_buffer.getvalue())
19 | return ("processed_"+Key)
--------------------------------------------------------------------------------
/Storage/q_pack/q_analyzers/__pycache__/bt_analyzers.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_analyzers/__pycache__/bt_analyzers.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_analyzers/__pycache__/bt_logger_analyzer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_analyzers/__pycache__/bt_logger_analyzer.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_analyzers/__pycache__/bt_perform_analyzer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_analyzers/__pycache__/bt_perform_analyzer.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_analyzers/__pycache__/bt_pos_perform_analyzer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_analyzers/__pycache__/bt_pos_perform_analyzer.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_analyzers/__pycache__/bt_strat_perform_analyzer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_analyzers/__pycache__/bt_strat_perform_analyzer.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_analyzers/__pycache__/bt_strategy_id_analyzer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_analyzers/__pycache__/bt_strategy_id_analyzer.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_analyzers/__pycache__/bt_transaction_analyzer.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_analyzers/__pycache__/bt_transaction_analyzer.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_analyzers/bt_logger_analyzer.py:
--------------------------------------------------------------------------------
1 | import backtrader as bt
2 | import datetime
3 | import pandas as pd
4 | import os
5 |
6 | import boto3
7 | from io import StringIO
8 |
9 | class logger_analyzer(bt.Analyzer):
10 |
11 | def get_analysis(self):
12 | return None
13 |
14 | def stop(self):
15 | ml_list=[]
16 | data_size=len(self.data)
17 | num_of_sec=len(self.datas)
18 | if self.strategy.p.backtest:
19 | for i, d in enumerate(self.datas):
20 | ml_dict={}
21 | data_size=len(d)
22 | ml_dict["security"]=[d._name]*data_size
23 | ml_dict["datetime"]=[self.data.num2date(x) for x in d.datetime.get(size=data_size)]#self.data
24 | ml_dict["open"]=d.open.get(size=data_size)
25 | ml_dict["high"]=d.high.get(size=data_size)
26 | ml_dict["low"]=d.low.get(size=data_size)
27 | ml_dict["close"]=d.close.get(size=data_size)
28 | # ml_dict["close"]=d.get(size=data_size)
29 | num_of_indicators=int(len(self.strategy.getindicators())/len(self.strategy.datas))
30 | for j in range(num_of_indicators):
31 | ml_dict[self.strategy.getindicators()[j*num_of_sec+i].aliased]=self.strategy.getindicators()[j*num_of_sec+i].get(size=data_size) # tested for 3 conditions , indicators >,<,= securities
32 | ml_list.append(pd.DataFrame(ml_dict))
33 | ml_df = pd.concat(ml_list,axis=0)
34 | s3 = boto3.client('s3',endpoint_url="http://minio-image:9000",aws_access_key_id="minio-image",aws_secret_access_key="minio-image-pass")
35 | Bucket="model-support-files"
36 | Key=str(self.strategy.db_run_id)+"_ml_log.csv"
37 | csv_buffer = StringIO()
38 | ml_df.to_csv(csv_buffer,index=False)
39 | s3.put_object(Bucket=Bucket, Key=Key,Body=csv_buffer.getvalue())
40 | print("ML Log Saved in Minio Bucket:",Bucket,"as",Key)
41 |
42 |
--------------------------------------------------------------------------------
/Storage/q_pack/q_analyzers/bt_pos_perform_analyzer.py:
--------------------------------------------------------------------------------
1 | ####
2 | # Important Note: If there's already an open position in the account during live trading, this will error out because we are recording round trip trades
3 | ####
4 | import backtrader as bt
5 | import datetime
6 |
7 | import psycopg2
8 | import q_credentials.db_risk_cred as db_risk_cred
9 | import q_tools.write_to_db as write_to_db
10 |
11 | class pos_performance_analyzer(bt.Analyzer):
12 |
13 | def get_analysis(self):
14 |
15 | return self.trades
16 |
17 |
18 | def __init__(self):
19 |
20 | self.trades = []
21 | self.cumprofit = 0.0
22 | self.conn = psycopg2.connect(host=db_risk_cred.dbHost , database=db_risk_cred.dbName, user=db_risk_cred.dbUser, password=db_risk_cred.dbPWD)
23 |
24 | def notify_trade(self, trade):
25 |
26 | if trade.isclosed:
27 |
28 | brokervalue = self.strategy.broker.getvalue()
29 |
30 | dir = 'short'
31 | if trade.history[0].event.size > 0: dir = 'long'
32 |
33 | pricein = trade.history[len(trade.history)-1].status.price
34 | priceout = trade.history[len(trade.history)-1].event.price
35 | datein = bt.num2date(trade.history[0].status.dt)
36 | dateout = bt.num2date(trade.history[len(trade.history)-1].status.dt)
37 | if trade.data._timeframe >= bt.TimeFrame.Days:
38 | datein = datein.date()
39 | dateout = dateout.date()
40 |
41 | pcntchange = 100 * priceout / pricein - 100
42 | pnl = trade.history[len(trade.history)-1].status.pnlcomm
43 | pnlpcnt = 100 * pnl / brokervalue
44 | barlen = trade.history[len(trade.history)-1].status.barlen
45 | pbar = (pnl / barlen) if barlen else pnl # to avoid divide by 0 error
46 | self.cumprofit += pnl
47 |
48 | size = value = 0.0
49 | for record in trade.history:
50 | if abs(size) < abs(record.status.size):
51 | size = record.status.size
52 | value = record.status.value
53 |
54 | highest_in_trade = max(trade.data.high.get(ago=0, size=barlen+1))
55 | lowest_in_trade = min(trade.data.low.get(ago=0, size=barlen+1))
56 | hp = 100 * (highest_in_trade - pricein) / pricein
57 | lp = 100 * (lowest_in_trade - pricein) / pricein
58 | if dir == 'long':
59 | mfe = hp
60 | mae = lp
61 | if dir == 'short':
62 | mfe = -lp
63 | mae = -hp
64 |
65 | analyzer_result={'run_id':self.strategy.db_run_id,'strategy':self.strategy.alias,'recorded_time':datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),'ref': trade.ref, 'ticker': trade.data._name, 'direction': dir,
66 | 'datein': datein, 'pricein': pricein, 'dateout': dateout, 'priceout': priceout,
67 | 'change_percentage': round(pcntchange, 2), 'pnl': pnl, 'pnl_percentage': round(pnlpcnt, 2),
68 | 'size': size, 'value': value, 'cumpnl': self.cumprofit,
69 | 'nbars': barlen, 'pnl_per_bar': round(pbar, 2),
70 | 'mfe_percentage': round(mfe, 2), 'mae_percentage': round(mae, 2)}
71 |
72 | write_to_db.write_to_db(conn=self.conn, data_dict=analyzer_result, table='position_performance')
73 | self.trades.append(analyzer_result)
74 |
--------------------------------------------------------------------------------
/Storage/q_pack/q_analyzers/bt_strat_perform_analyzer.py:
--------------------------------------------------------------------------------
1 | #####
2 | # Ran, Broke and Mod from https://github.com/backtrader/backtrader/blob/master/backtrader/analyzers/transactions.py
3 | #####
4 |
5 | from __future__ import (absolute_import, division, print_function,
6 | unicode_literals)
7 |
8 |
9 | import collections
10 |
11 | import backtrader as bt
12 | import psycopg2
13 | import q_credentials.db_risk_cred as db_risk_cred
14 | import q_tools.write_to_db as write_to_db
15 | import datetime
16 |
17 | class strat_performance_analyzer(bt.Analyzer):
18 |
19 | def __init__(self):
20 | self.performance = {}
21 | self.conn = psycopg2.connect(host=db_risk_cred.dbHost , database=db_risk_cred.dbName, user=db_risk_cred.dbUser, password=db_risk_cred.dbPWD)
22 | self.analyzer_sharpe = bt.analyzers.SharpeRatio()
23 | self.analyzer_returns = bt.analyzers.Returns()
24 | self.analyzer_sqn = bt.analyzers.SQN()
25 | self.analyzer_drawdown = bt.analyzers.DrawDown()
26 | self.analyzer_tradeanalyzer = bt.analyzers.TradeAnalyzer()
27 |
28 | def stop(self):
29 | self.performance['run_id']=self.strategy.db_run_id
30 | self.performance['total_open']=self.analyzer_tradeanalyzer.get_analysis().total.open
31 | self.performance['total_closed'] = self.analyzer_tradeanalyzer.get_analysis().total.closed
32 | self.performance['total_won'] = self.analyzer_tradeanalyzer.get_analysis().won.total
33 | self.performance['total_lost'] = self.analyzer_tradeanalyzer.get_analysis().lost.total
34 | self.performance['win_streak'] = self.analyzer_tradeanalyzer.get_analysis().streak.won.longest
35 | self.performance['lose_streak'] = self.analyzer_tradeanalyzer.get_analysis().streak.lost.longest
36 | self.performance['pnl_net'] = round(self.analyzer_tradeanalyzer.get_analysis().pnl.net.total,2)
37 | self.performance['strike_rate'] = (self.performance['total_won'] / self.performance['total_closed']) * 100
38 | self.performance['sqn']=self.analyzer_sqn.get_analysis()['sqn']
39 | self.performance['total_compound_return']=self.analyzer_returns.get_analysis()['rtot']
40 | self.performance['avg_return']=self.analyzer_returns.get_analysis()['ravg']
41 | self.performance['annual_norm_return']=self.analyzer_returns.get_analysis()['rnorm100']
42 | self.performance['max_draw_per']=self.analyzer_drawdown.get_analysis()['max']['drawdown']
43 | self.performance['max_draw_val']=self.analyzer_drawdown.get_analysis()['max']['moneydown']
44 | self.performance['max_draw_len']=self.analyzer_drawdown.get_analysis()['max']['len']
45 |
46 | write_to_db.write_to_db(conn=self.conn, data_dict=self.performance, table='strategy_performance')
47 |
48 |
49 | def get_analysis(self):
50 | return self.performance
--------------------------------------------------------------------------------
/Storage/q_pack/q_analyzers/bt_strategy_id_analyzer.py:
--------------------------------------------------------------------------------
1 | #####
2 | # Ran, Broke and Mod from https://github.com/backtrader/backtrader/blob/master/backtrader/analyzers/transactions.py
3 | #####
4 |
5 | from __future__ import (absolute_import, division, print_function,
6 | unicode_literals)
7 |
8 |
9 | import collections
10 |
11 | import backtrader as bt
12 | from backtrader import Order, Position
13 | import psycopg2
14 | import q_credentials.db_risk_cred as db_risk_cred
15 | import datetime
16 | import q_tools.write_to_db as write_to_db
17 |
18 | class strategy_id_analyzer(bt.Analyzer):
19 |
20 | def __init__(self):
21 | self.strat_info = {}
22 | self.conn = psycopg2.connect(host=db_risk_cred.dbHost , database=db_risk_cred.dbName, user=db_risk_cred.dbUser, password=db_risk_cred.dbPWD)
23 | self.current_time=datetime.datetime.now()
24 |
25 | def get_analysis(self):
26 | return self.strat_info
27 |
28 | def start(self):
29 | info_run_type = 'Backtest' if self.strategy.p.backtest else 'Live'
30 | info_tickers=','.join([d for d in (self.strategy.getdatanames())])
31 | info_indicators = ','.join([i.aliased for i in (self.strategy.getindicators())])
32 | info_timeframe = self.strategy.data0._timeframe # This is currently a number, have to change it later
33 | if self.strategy.p.backtest:
34 | info_start_date = bt.num2date(self.strategy.data0.fromdate) # would have to change for live due to the backfill.
35 | info_end_date = bt.num2date(self.strategy.data0.todate)
36 | else:
37 | info_start_date = self.current_time # would have to change for live due to the backfill.
38 | info_end_date = None
39 |
40 | info_end_date =None
41 | info_account = ""
42 | info_log_file = ""
43 | self.strat_info={'run_type':info_run_type,'recorded_time':self.current_time,'start_time':info_start_date,'end_time':info_end_date,
44 | 'strategy':self.strategy.alias,'tickers':info_tickers,'indicators':info_indicators,'frequency':info_timeframe,'account':info_account,'log_file':info_log_file}
45 |
46 | self.strategy.db_run_id=write_to_db.write_to_db(conn=self.conn, data_dict=self.strat_info, table='run_information',return_col='run_id')
47 |
48 |
--------------------------------------------------------------------------------
/Storage/q_pack/q_analyzers/bt_transaction_analyzer.py:
--------------------------------------------------------------------------------
1 | #####
2 | # Ran, Broke and Mod from https://github.com/backtrader/backtrader/blob/master/backtrader/analyzers/transactions.py
3 | #####
4 |
5 | from __future__ import (absolute_import, division, print_function,
6 | unicode_literals)
7 |
8 |
9 | import collections
10 |
11 | import backtrader as bt
12 | from backtrader import Order, Position
13 | import psycopg2
14 | import q_credentials.db_risk_cred as db_risk_cred
15 | import datetime
16 | import q_tools.write_to_db as write_to_db
17 |
18 | class transactions_analyzer(bt.Analyzer):
19 | '''This analyzer reports the transactions occurred with each an every data in
20 | the system
21 |
22 | It looks at the order execution bits to create a ``Position`` starting from
23 | 0 during each ``next`` cycle.
24 |
25 | The result is used during next to record the transactions
26 |
27 | Params:
28 |
29 | - headers (default: ``True``)
30 |
31 | Add an initial key to the dictionary holding the results with the names
32 | of the datas
33 |
34 | This analyzer was modeled to facilitate the integration with
35 | ``pyfolio`` and the header names are taken from the samples used for
36 | it::
37 |
38 | 'date', 'amount', 'price', 'sid', 'symbol', 'value'
39 |
40 | Methods:
41 |
42 | - get_analysis
43 |
44 | Returns a dictionary with returns as values and the datetime points for
45 | each return as keys
46 | '''
47 | params = (
48 | ('headers', False),
49 | ('_pfheaders', ('date', 'amount', 'price', 'sid', 'symbol', 'value')),
50 | )
51 |
52 | def __init__(self):
53 | self.trades = []
54 | self.conn = psycopg2.connect(host=db_risk_cred.dbHost , database=db_risk_cred.dbName, user=db_risk_cred.dbUser, password=db_risk_cred.dbPWD)
55 |
56 | def get_analysis(self):
57 | return self.trades
58 |
59 | def start(self):
60 | super(transactions_analyzer, self).start()
61 | if self.p.headers:
62 | self.rets[self.p._pfheaders[0]] = [list(self.p._pfheaders[1:])]
63 |
64 | self._positions = collections.defaultdict(Position)
65 | self._idnames = list(enumerate(self.strategy.getdatanames()))
66 |
67 | def notify_order(self, order):
68 | # An order could have several partial executions per cycle (unlikely
69 | # but possible) and therefore: collect each new execution notification
70 | # and let the work for next
71 |
72 | # We use a fresh Position object for each round to get summary of what
73 | # the execution bits have done in that round
74 | if order.status not in [Order.Partial, Order.Completed]:
75 | return # It's not an execution
76 |
77 | pos = self._positions[order.data._name]
78 | for exbit in order.executed.iterpending():
79 | if exbit is None:
80 | break # end of pending reached
81 |
82 | pos.update(exbit.size, exbit.price)
83 |
84 | def next(self):
85 | # super(Transactions, self).next() # let dtkey update
86 | entries = []
87 | for i, dname in self._idnames:
88 | pos = self._positions.get(dname, None)
89 | if pos is not None:
90 | size, price = pos.size, pos.price
91 | if size:
92 | # instead of datetime.now u can use self.strategy.current_time
93 | analyzer_result={'run_id':self.strategy.db_run_id,'recorded_time':datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"),'strategy':self.strategy.alias,
94 | 'transaction_date':self.strategy.datetime.datetime(),'size':size, 'price':price, 'sid':i, 'ticker':dname, 'value':(-size * price)}
95 | write_to_db.write_to_db(conn=self.conn, data_dict=analyzer_result, table='positions')
96 | self.trades.append(analyzer_result)
97 | # if entries:
98 | # self.rets[self.strategy.datetime.datetime()] = entries
99 |
100 | self._positions.clear()
--------------------------------------------------------------------------------
/Storage/q_pack/q_credentials/__pycache__/db_cred.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_credentials/__pycache__/db_cred.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_credentials/__pycache__/db_risk_cred.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_credentials/__pycache__/db_risk_cred.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_credentials/__pycache__/db_secmaster_cred.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_credentials/__pycache__/db_secmaster_cred.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_credentials/__pycache__/oanda_cred.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_credentials/__pycache__/oanda_cred.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_credentials/db_risk_cred.py:
--------------------------------------------------------------------------------
1 | dbHost="postgres_secmaster"
2 | dbUser="postgres"
3 | dbPWD="posgres349"
4 | dbName="risk_db"
5 |
--------------------------------------------------------------------------------
/Storage/q_pack/q_credentials/db_secmaster_cred.py:
--------------------------------------------------------------------------------
1 | dbHost="postgres_secmaster"
2 | dbUser="postgres"
3 | dbPWD="posgres349"
4 | dbName="securities_master"
--------------------------------------------------------------------------------
/Storage/q_pack/q_credentials/oanda_cred.py:
--------------------------------------------------------------------------------
1 | acc_id_practice="101-001-5119662-001"
2 | token_practice="5a4146aa9dd7becee00fd86618d79671-62d870e9f17e7eff11853125b82a75bd"
3 |
--------------------------------------------------------------------------------
/Storage/q_pack/q_datafeeds/__pycache__/bt_datafeed_postgres.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_datafeeds/__pycache__/bt_datafeed_postgres.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_datafeeds/bt_datafeed_postgres.py:
--------------------------------------------------------------------------------
1 | # from __future__ import (absolute_import, division, print_function,
2 | # unicode_literals)
3 |
4 | import datetime
5 | from backtrader.feed import DataBase
6 | from backtrader import date2num
7 | from sqlalchemy import create_engine
8 |
9 |
10 | class PostgreSQL_Daily(DataBase):
11 | params = (
12 | ('dbHost', None),
13 | ('dbUser', None),
14 | ('dbPWD', None),
15 | ('dbName', None),
16 | ('ticker', 'EUR_USD'),
17 | ('fromdate', datetime.datetime.min),
18 | ('todate', datetime.datetime.max),
19 | ('name', ''),
20 | )
21 |
22 | def __init__(self):
23 | self.engine = create_engine('postgresql+psycopg2://'+self.p.dbUser+':'+ self.p.dbPWD +'@'+ self.p.dbHost +'/'+ self.p.dbName)
24 | # self.engine = psycopg2.connect(host=self.p.dbHost, database=self.p.dbName, user=self.p.dbUser, password=self.p.dbPWD)
25 |
26 | def start(self):
27 | self.conn = self.engine.connect()
28 | sql = "select a.date_price date, a.close_price as close, a.high_price high, a.open_price open, a.low_price low from daily_data a inner join symbol b on a.stock_id = b.id where b.ticker='"+ self.p.ticker + "' and a.date_price between '"+self.p.fromdate.strftime("%Y-%m-%d")+"' and '"+self.p.todate.strftime("%Y-%m-%d")+"' order by date ASC"
29 | self.result = self.conn.execute(sql)
30 |
31 |
32 | def stop(self):
33 | #self.conn.close()
34 | self.engine.dispose()
35 |
36 | def _load(self):
37 | one_row = self.result.fetchone()
38 | if one_row is None:
39 | return False
40 | self.lines.datetime[0] = date2num(one_row[0])
41 | self.lines.open[0] = float(one_row[1])
42 | self.lines.high[0] = float(one_row[2])
43 | self.lines.low[0] = float(one_row[3])
44 | self.lines.close[0] = float(one_row[4])
45 | # self.lines.volume[0] = int(one_row[5])
46 | self.lines.openinterest[0] = -1
47 | return True
48 |
49 | class PostgreSQL_Minute(DataBase):
50 | params = (
51 | ('dbHost', None),
52 | ('dbUser', None),
53 | ('dbPWD', None),
54 | ('dbName', None),
55 | ('ticker', 'EUR_USD'),
56 | ('fromdate', datetime.datetime.min),
57 | ('todate', datetime.datetime.max),
58 | ('name', ''),
59 | )
60 |
61 | def __init__(self):
62 | self.engine = create_engine('postgresql+psycopg2://'+self.p.dbUser+':'+ self.p.dbPWD +'@'+ self.p.dbHost +'/'+ self.p.dbName)
63 | print(self.engine)
64 | # self.engine = psycopg2.connect(host=self.p.dbHost, database=self.p.dbName, user=self.p.dbUser, password=self.p.dbPWD)
65 |
66 | def start(self):
67 | self.conn = self.engine.connect()
68 | sql = "select a.date_price date, a.close_price as close, a.high_price high, a.open_price open, a.low_price low from minute_data a inner join symbol b on a.stock_id = b.id where b.ticker='"+ self.p.ticker + "' and a.date_price between '"+self.p.fromdate.strftime("%Y-%m-%d")+"' and '"+self.p.todate.strftime("%Y-%m-%d")+"' order by date ASC"
69 | self.result = self.conn.execute(sql)
70 |
71 |
72 | def stop(self):
73 | #self.conn.close()
74 | self.engine.dispose()
75 |
76 | def _load(self):
77 | one_row = self.result.fetchone()
78 | if one_row is None:
79 | return False
80 | self.lines.datetime[0] = date2num(one_row[0])
81 | self.lines.open[0] = float(one_row[1])
82 | self.lines.high[0] = float(one_row[2])
83 | self.lines.low[0] = float(one_row[3])
84 | self.lines.close[0] = float(one_row[4])
85 | # self.lines.volume[0] = int(one_row[5])
86 | self.lines.openinterest[0] = -1
87 | return True
--------------------------------------------------------------------------------
/Storage/q_pack/q_run/run_BT.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import datetime
3 | import glob
4 | import os.path
5 | import backtrader as bt
6 |
7 |
8 | import btoandav20
9 | import pytz
10 |
11 | import q_datafeeds.bt_datafeed_postgres as bt_datafeed_postgres
12 | from q_strategies import *
13 | import q_credentials.oanda_cred as oanda_cred
14 | import q_credentials.db_secmaster_cred as db_cred
15 | import q_analyzers.bt_strat_perform_analyzer as bt_strat_performance_analyzer
16 | import q_analyzers.bt_pos_perform_analyzer as bt_pos_performance_analyzer
17 | import q_analyzers.bt_transaction_analyzer as bt_trans_analyzer
18 | import q_analyzers.bt_strategy_id_analyzer as bt_strategy_id_analyzer
19 | import q_analyzers.bt_logger_analyzer as bt_logger_analyzer
20 | import q_tools.args_parse_other as args_parse_other
21 |
22 | def run(args=None):
23 | args = parse_args(args)
24 |
25 | cerebro = bt.Cerebro()
26 |
27 | # Data feed kwargs
28 | dkwargs = dict(**eval('dict(' + args.dargs + ')'))
29 |
30 | ticker_list=args.tickers[0].split(',')
31 |
32 | dtfmt, tmfmt = '%Y-%m-%d', 'T%H:%M:%S'
33 | if args.fromdate:
34 | fmt = dtfmt + tmfmt * ('T' in args.fromdate)
35 | dkwargs['fromdate'] = datetime.datetime.strptime(args.fromdate, fmt)
36 |
37 | if args.todate:
38 | fmt = dtfmt + tmfmt * ('T' in args.todate)
39 | dkwargs['todate'] = datetime.datetime.strptime(args.todate, fmt)
40 |
41 | cerebro.addanalyzer(bt_trans_analyzer.transactions_analyzer,_name='position_list')
42 | cerebro.addanalyzer(bt_strategy_id_analyzer.strategy_id_analyzer,_name='strategy_id')
43 | cerebro.addanalyzer(bt_strat_performance_analyzer.strat_performance_analyzer,_name='strat_perf')
44 | cerebro.addanalyzer(bt_pos_performance_analyzer.pos_performance_analyzer,_name='pos_perf')
45 |
46 | if args.ml_log:
47 | cerebro.addanalyzer(bt_logger_analyzer.logger_analyzer,_name='ml_logger')
48 |
49 | if args.mode=='live':
50 | oandastore = btoandav20.stores.OandaV20Store(token=args.broker_token, account=args.broker_account, practice=True)
51 | for ticker in ticker_list:
52 | data = oandastore.getdata(dataname = ticker,timeframe = bt.TimeFrame.Minutes,compression=1,tz=pytz.timezone('US/Eastern'))
53 | cerebro.adddata(data)
54 | cerebro.broker = oandastore.getbroker()
55 | cerebro.addstrategy(globals()[args.strat_name].St, backtest=False)
56 |
57 | elif args.mode=='backtest':
58 |
59 | for ticker in ticker_list:
60 | data = bt_datafeed_postgres.PostgreSQL_Daily(dbHost=db_cred.dbHost,dbUser=db_cred.dbUser,dbPWD=db_cred.dbPWD,dbName=db_cred.dbName,ticker=ticker, name=ticker,**dkwargs)
61 | cerebro.adddata(data)
62 | cerebro.broker.setcash(args.cash)
63 | cerebro.addstrategy(globals()[args.strat_name].St, **args.strat_param)
64 |
65 |
66 |
67 | cerebro.addsizer(bt.sizers.FixedSize, stake=1000)
68 |
69 |
70 | results = cerebro.run(tradehistory=True)
71 |
72 | pnl = cerebro.broker.get_value() - args.cash
73 | print('Profit ... or Loss: {:.2f}'.format(pnl))
74 |
75 | strats = results
76 | if args.plot:
77 | cerebro.plot(style='candlestick',iplot=False,volume=False)
78 |
79 |
80 |
81 | def parse_args(pargs=None):
82 | parser = argparse.ArgumentParser(
83 | formatter_class=argparse.ArgumentDefaultsHelpFormatter,
84 | description=('Rebalancing with the Conservative Formula'),
85 | )
86 |
87 | parser.add_argument('--tickers', nargs='*' ,required=False,default=['EUR_USD,GBP_USD'], type=str,
88 | help='Pass the tickers with space')
89 |
90 | parser.add_argument('--dargs', default='',
91 | metavar='kwargs', help='kwargs in k1=v1,k2=v2 format')
92 |
93 | parser.add_argument('--fromdate', required=False, default='2010-1-1',
94 | help='Date[time] in YYYY-MM-DD[THH:MM:SS] format')
95 |
96 | parser.add_argument('--todate', required=False, default='2019-7-30',
97 | help='Date[time] in YYYY-MM-DD[THH:MM:SS] format')
98 |
99 | parser.add_argument('--cerebro', required=False, default='',
100 | metavar='kwargs', help='kwargs in k1=v1,k2=v2 format')
101 |
102 | parser.add_argument('--cash', default=10000, type=float,
103 | metavar='kwargs', help='kwargs in k1=v1,k2=v2 format')
104 |
105 | parser.add_argument('--strat_name', required=False, default='simple_strategy_2',
106 | metavar='kwargs', help='kwargs in k1=v1,k2=v2 format')
107 |
108 | parser.add_argument('--strat_param', required=False, default=dict(ml_serving=False),
109 | action=args_parse_other.StoreDictKeyPair, metavar='kwargs', help='kwargs in k1=v1,k2=v2 format')
110 |
111 | parser.add_argument('--ml_log', required=False, default=False, type=args_parse_other.str2bool, const=True, nargs='?',
112 | help='To save ML log or not')
113 |
114 | parser.add_argument('--mode', required=False, default='backtest',
115 | help='Live or Backtest')
116 |
117 | parser.add_argument('--broker_token', required=False, default=oanda_cred.token_practice,
118 | help='Oanda Broker Token id')
119 |
120 | parser.add_argument('--broker_account', required=False, default=oanda_cred.acc_id_practice,
121 | help='Oanda Broker Account id')
122 |
123 | parser.add_argument('--plot', required=False, default=False, type=args_parse_other.str2bool, const=True, nargs='?',
124 | help='Plot the results')
125 |
126 | return parser.parse_args(pargs)
127 |
128 |
129 | if __name__ == '__main__':
130 | run()
131 |
132 |
--------------------------------------------------------------------------------
/Storage/q_pack/q_strategies/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ["simple_strategy_2", "simple_strategy"]
--------------------------------------------------------------------------------
/Storage/q_pack/q_strategies/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_strategies/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_strategies/__pycache__/simple_strategy.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_strategies/__pycache__/simple_strategy.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_strategies/__pycache__/simple_strategy_2.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_strategies/__pycache__/simple_strategy_2.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_strategies/simple_strategy.py:
--------------------------------------------------------------------------------
1 | ####
2 | # 1) Has the logging functionality embeded into the BT strategy file
3 | ####
4 |
5 |
6 |
7 | import backtrader as bt
8 | import backtrader.indicators as btind
9 | import datetime
10 | import psycopg2
11 | import pandas as pd
12 | import os
13 |
14 |
15 |
16 | class St(bt.Strategy):
17 | alias = 'Simple Strategy'
18 | params = dict(
19 | period=10,
20 | limdays=200,
21 | backtest=True,
22 | ml_serving=False,
23 | )
24 |
25 |
26 | def log(self, arg):
27 | if not self.p.backtest:
28 | print('{} {}'.format(self.datetime.datetime(), arg))
29 |
30 |
31 | def __init__(self):
32 | self.ml_log = []
33 | self.db_run_id = None
34 | self.sma = [bt.indicators.SimpleMovingAverage(d, period=10) for d in self.datas]
35 | self.sma2 = [bt.indicators.SimpleMovingAverage(d, period=20) for d in self.datas]
36 | self.sma3 = [bt.indicators.SimpleMovingAverage(d, period=50) for d in self.datas]
37 | for i in self.sma:
38 | i.aliased='MovingAverageSimple_0'
39 | for i in self.sma2:
40 | i.aliased='MovingAverageSimple_1'
41 | for i in self.sma3:
42 | i.aliased='MovingAverageSimple_2'
43 |
44 | self.order = None
45 | self.buyprice = None
46 | self.buycomm = None
47 | # if arg:
48 | if self.p.backtest:
49 | self.datastatus = 1
50 | else:
51 | self.datastatus = 0
52 |
53 |
54 | def notify_data(self, data, status, *args, **kwargs):
55 | print('*' * 5, 'DATA NOTIF:', data._getstatusname(status), *args)
56 | if status == data.LIVE:
57 | # self.counttostop = self.p.stopafter
58 | self.datastatus = 1
59 |
60 | def notify_order(self, order):
61 | if order.status in [order.Submitted, order.Accepted]:
62 | # Buy/Sell order submitted/accepted to/by broker - Nothing to do
63 | return
64 |
65 | # Check if an order has been completed
66 | # Attention: broker could reject order if not enough cash
67 | if order.status in [order.Completed]:
68 | if order.isbuy():
69 | self.log(
70 | 'BUY EXECUTED, Price: %.5f, Cost: %.2f, Comm %.2f' %
71 | (order.executed.price,
72 | order.executed.value,
73 | order.executed.comm))
74 |
75 | self.buyprice = order.executed.price
76 | self.buycomm = order.executed.comm
77 | else: # Sell
78 | self.log('SELL EXECUTED, Price: %.5f, Cost: %.2f, Comm %.2f' %
79 | (order.executed.price,
80 | order.executed.value,
81 | order.executed.comm))
82 |
83 | self.bar_executed = len(self)
84 |
85 | def notify_trade(self, trade):
86 | if not trade.isclosed:
87 | return
88 |
89 | self.log('OPERATION PROFIT, GROSS %.5f, NET %.2f' %
90 | (trade.pnl, trade.pnlcomm))
91 |
92 |
93 |
94 | def next(self):
95 | for i, d in enumerate(self.datas):
96 | dt, dn = self.datetime.datetime(), d._name
97 | pos = self.getposition(d).size
98 | if self.datastatus:
99 | if d.close[0] > self.sma[i] and pos<=0:
100 | self.order=self.close(data=d)
101 | self.order=self.buy(data=d)
102 | self.log('BUY CREATE {:.2f} at {}'.format(d.close[0],dn))
103 |
104 | elif d.close[0] < self.sma[i] and pos>=0:
105 | self.order=self.close(data=d)
106 | self.order=self.sell(data=d)
107 | self.log('SELL CREATE {:.2f} at {}'.format(d.close[0],dn))
108 |
109 | def stop(self):
110 | print("Strategy run finished with Run ID:",self.db_run_id)
--------------------------------------------------------------------------------
/Storage/q_pack/q_strategies/simple_strategy_2.py:
--------------------------------------------------------------------------------
1 | import backtrader as bt
2 | import backtrader.indicators as btind
3 | import datetime
4 | import psycopg2
5 | import pandas as pd
6 | import os
7 |
8 | import mlflow.pyfunc
9 |
10 | class St(bt.Strategy):
11 | alias = 'Simple Strategy'
12 | params = dict(
13 | period=10,
14 | limdays=200,
15 | backtest=True,
16 | ml_serving=False,
17 | model_uri="24cbdab283244fac8d54405d58b2bbf1"
18 | )
19 |
20 |
21 | def log(self, arg):
22 | if not self.p.backtest:
23 | print('{} {}'.format(self.datetime.datetime(), arg))
24 |
25 |
26 | def __init__(self):
27 | self.db_run_id = None
28 | self.rsi = [bt.indicators.RSI(d, period=30) for d in self.datas]
29 |
30 | self.stoc = [bt.indicators.Stochastic(d, period=20) for d in self.datas]
31 | self.atr = [bt.indicators.ATR(d, period=5) for d in self.datas]
32 | for i in self.rsi:
33 | i.aliased='RSI'
34 | for i in self.stoc:
35 | i.aliased='STOCHASTIC'
36 | for i in self.atr:
37 | i.aliased='ATR'
38 |
39 | self.order = None
40 | self.buyprice = None
41 | self.buycomm = None
42 | # if arg:
43 | if self.p.backtest:
44 | self.datastatus = 1
45 | else:
46 | self.datastatus = 0
47 |
48 | if self.p.ml_serving:
49 | print("s3://mlflow-models/"+self.p.model_uri+"/artifacts/model")
50 | self.model_predict=mlflow.pyfunc.load_model(model_uri=("s3://mlflow-models/"+self.p.model_uri+"/artifacts/model"))
51 |
52 |
53 | def notify_data(self, data, status, *args, **kwargs):
54 | print('*' * 5, 'DATA NOTIF:', data._getstatusname(status), *args)
55 | if status == data.LIVE:
56 | self.datastatus = 1
57 |
58 |
59 | def notify_order(self, order):
60 | if (order.status>1): # 0 and 1 are created and submitted
61 | self.log('Order Status: {}: Ref: {}, Size: {}, Price: {}' \
62 | .format(order.Status[order.status], order.ref, order.size,
63 | 'NA' if not order.price else round(order.price,5)))
64 |
65 | def notify_trade(self, trade):
66 | if not trade.isclosed:
67 | return
68 |
69 | self.log('OPERATION PROFIT, GROSS %.5f, NET %.2f' %
70 | (trade.pnl, trade.pnlcomm))
71 |
72 |
73 |
74 | def next(self):
75 | for i, d in enumerate(self.datas):
76 | dt, dn = self.datetime.datetime(), d._name
77 | pos = self.getposition(d).size
78 | order_valid = datetime.timedelta(self.p.limdays)
79 | if self.datastatus and pos==0:
80 | if self.p.ml_serving:
81 | pred=self.model_predict.predict([[self.rsi[i][0],self.stoc[i][0]]])[0]
82 | if pred>0:
83 | price_sl = d.close[0]-(self.atr[0] * 1)
84 | price_tp = d.close[0]+(self.atr[0] * 2)
85 | self.order=self.buy_bracket(data=d,exectype=bt.Order.Market , stopprice=price_sl, limitprice=price_tp, valid=order_valid) #, valid=order_valid,price=None
86 | self.log('BUY CREATE {:.2f} at {}'.format(d.close[0],dn))
87 | elif pred<=0:
88 | price_sl = d.close[0]+(self.atr[0] * 1)
89 | price_tp = d.close[0]-(self.atr[0] * 2)
90 | self.order=self.sell_bracket(data=d,exectype=bt.Order.Market, stopprice=price_sl, limitprice=price_tp, valid=order_valid)
91 | self.log('SELL CREATE {:.2f} at {}'.format(d.close[0],dn))
92 |
93 | elif self.rsi[i] < 40:
94 | price_sl = d.close[0]-(self.atr[0] * 1)
95 | price_tp = d.close[0]+(self.atr[0] * 2)
96 | self.order=self.buy_bracket(data=d,exectype=bt.Order.Market , stopprice=price_sl, limitprice=price_tp, valid=order_valid) #, valid=order_valid,price=None
97 | self.log('BUY CREATE {:.2f} at {}'.format(d.close[0],dn))
98 |
99 | elif self.rsi[i] > 60:
100 | price_sl = d.close[0]+(self.atr[0] * 1)
101 | price_tp = d.close[0]-(self.atr[0] * 2)
102 | self.order=self.sell_bracket(data=d,exectype=bt.Order.Market, stopprice=price_sl, limitprice=price_tp, valid=order_valid)
103 | self.log('SELL CREATE {:.2f} at {}'.format(d.close[0],dn))
104 |
105 | def stop(self):
106 | print("Strategy run finished with Run ID:",self.db_run_id)
--------------------------------------------------------------------------------
/Storage/q_pack/q_tools/__pycache__/args_parse_dict.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_tools/__pycache__/args_parse_dict.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_tools/__pycache__/args_parse_other.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_tools/__pycache__/args_parse_other.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_tools/__pycache__/write_to_db.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/q_pack/q_tools/__pycache__/write_to_db.cpython-37.pyc
--------------------------------------------------------------------------------
/Storage/q_pack/q_tools/args_parse_other.py:
--------------------------------------------------------------------------------
1 | import argparse
2 |
3 | # To overcome the error of Passing a dictionary as arguement with string value
4 | # Sample usage
5 | # parser.add_argument('--strat_param', action=StoreDictKeyPair, metavar='kwargs', help='kwargs in k1=v1,k2=v2 format')
6 | class StoreDictKeyPair(argparse.Action):
7 | def __call__(self, parser, namespace, values, option_string=None):
8 | my_dict = {}
9 | for kv in values.split(","):
10 | k,v = kv.split("=")
11 | my_dict[k] = v
12 | setattr(namespace, self.dest, my_dict)
13 |
14 |
15 | # To evaluate boolean args parts
16 | def str2bool(v):
17 | if isinstance(v, bool):
18 | return v
19 | if v.lower() in ('yes', 'true', 't', 'y', '1'):
20 | return True
21 | elif v.lower() in ('no', 'false', 'f', 'n', '0'):
22 | return False
23 | else:
24 | raise argparse.ArgumentTypeError('Boolean value expected.')
--------------------------------------------------------------------------------
/Storage/q_pack/q_tools/write_to_db.py:
--------------------------------------------------------------------------------
1 | import psycopg2
2 |
3 | def write_to_db(conn, data_dict, table, return_col=""):
4 | cols= data_dict.keys()
5 | cols_val_list=['%('+i+')s' for i in cols]
6 | cols_val=", ".join(cols_val_list)
7 | cols=", ".join(cols)
8 |
9 | cur = conn.cursor()
10 | if return_col:
11 | sql="""INSERT INTO """+table+"""("""+cols+""") VALUES ("""+cols_val+""") RETURNING """+return_col
12 | else:
13 | sql="""INSERT INTO """+table+"""("""+cols+""") VALUES ("""+cols_val+""")"""
14 | cur.executemany(sql,[data_dict])
15 | cur.execute('SELECT LASTVAL()')
16 | db_run_id = cur.fetchall()[0][0] # fetching the value returned by ".....RETURNING ___"
17 | conn.commit()
18 | if db_run_id:
19 | return db_run_id
--------------------------------------------------------------------------------
/Storage/superset/superset.db:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/Storage/superset/superset.db
--------------------------------------------------------------------------------
/Storage/superset/superset_config.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | MAPBOX_API_KEY = os.getenv('MAPBOX_API_KEY', '')
4 | CACHE_CONFIG = {
5 | 'CACHE_TYPE': 'redis',
6 | 'CACHE_DEFAULT_TIMEOUT': 300,
7 | 'CACHE_KEY_PREFIX': 'superset_',
8 | 'CACHE_REDIS_HOST': 'redis',
9 | 'CACHE_REDIS_PORT': 6379,
10 | 'CACHE_REDIS_DB': 1,
11 | 'CACHE_REDIS_URL': 'redis://redis:6379/1'}
12 | SQLALCHEMY_DATABASE_URI = 'sqlite:////var/lib/superset/superset.db'
13 | SQLALCHEMY_TRACK_MODIFICATIONS = True
14 | SECRET_KEY = 'thisISaSECRET_1234'
15 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 | services:
3 | minio-image:
4 | container_name: minio-image
5 | build:
6 | context: ./dockerfile_minio
7 | restart: always
8 | working_dir: "/minio-image/storage"
9 | volumes:
10 | - ${WD}/minio/storage:/minio-image/storage
11 | ports:
12 | - "9000:9000"
13 | environment:
14 | MINIO_ACCESS_KEY: minio-image
15 | MINIO_SECRET_KEY: minio-image-pass
16 | command: server /minio-image/storage
17 |
18 | mlflow-image:
19 | container_name: "mlflow-image"
20 | build:
21 | context: ./dockerfile_mlflowserver
22 | working_dir: "/mlflow-image"
23 | volumes:
24 | - ${WD}/mlflow:/mlflow-image
25 | environment:
26 | MLFLOW_S3_ENDPOINT_URL: http://minio-image:9000
27 | AWS_ACCESS_KEY_ID: minio-image
28 | AWS_SECRET_ACCESS_KEY: minio-image-pass
29 | ports:
30 | - "5500:5500"
31 | command: mlflow server --host 0.0.0.0 --port 5500 --backend-store-uri /mlflow-image/mlruns
32 |
33 |
34 | jupyter-image:
35 | container_name: "jupyter-image"
36 | build:
37 | context: ./dockerfile_jupyter_notebook
38 | volumes:
39 | - ${WD}/notebooks:/home/jovyan/work
40 | - ${WD}/q_pack:/home/jovyan/work/q_pack
41 | - ${WD}/mlflow:/mlflow-image
42 | environment:
43 | MLFLOW_S3_ENDPOINT_URL: http://minio-image:9000
44 | AWS_ACCESS_KEY_ID: minio-image
45 | AWS_SECRET_ACCESS_KEY: minio-image-pass
46 | MLFLOW_TRACKING_URI: http://mlflow-image:5500
47 | ports:
48 | - "8888:8888"
49 |
50 |
51 | redis:
52 | image: redis
53 | restart: always
54 | volumes:
55 | - redis:/data
56 |
57 | superset:
58 | container_name: "superset"
59 | build:
60 | context: ./dockerfile_superset
61 | restart: always
62 | depends_on:
63 | - redis
64 | environment:
65 | MAPBOX_API_KEY: ${MAPBOX_API_KEY}
66 | SUPERSET_HOME: /etc/superset
67 | ports:
68 | - "8088:8088"
69 | volumes:
70 | - ${WD}/superset/superset_config.py:/etc/superset/superset_config.py
71 | - ${WD}/superset/superset.db:/var/lib/superset/superset.db
72 |
73 | postgres_secmaster:
74 | image: postgres
75 | restart: always
76 | container_name: "my_postgres"
77 | ports:
78 | - 5431:5431
79 | environment:
80 | - SHARED_PASSWORD=password
81 | - POSTGRES_PASSWORD=posgres349
82 | volumes:
83 | - ${WD}/postgress_db/scripts/:/docker-entrypoint-initdb.d/
84 | - pg_data:/var/lib/postgresql/data
85 |
86 | pgadmin:
87 | image: dpage/pgadmin4
88 | container_name: "pgadmin"
89 | environment:
90 | PGADMIN_DEFAULT_EMAIL: "guest"
91 | PGADMIN_DEFAULT_PASSWORD: "guest"
92 | volumes:
93 | - ${WD}/pgadmin/:/var/lib/pgadmin
94 | ports:
95 | - 1234:80
96 | depends_on:
97 | - postgres_secmaster
98 |
99 | postgres:
100 | image: postgres
101 | container_name: "airflow_postgres"
102 | environment:
103 | - POSTGRES_USER=airflow
104 | - POSTGRES_PASSWORD=airflow
105 | - POSTGRES_DB=airflow
106 | volumes:
107 | - pg_data_airflow:/var/lib/postgresql/data
108 |
109 | airflow:
110 | image: airflow
111 | container_name: "my_airflow"
112 | build:
113 | context: ./dockerfile_airflow
114 | restart: always
115 | depends_on:
116 | - postgres
117 | environment:
118 | - LOAD_EX=n
119 | - EXECUTOR=Local
120 | - FERNET_KEY=46BKJoQYlPPOexq0OhDZnIlNepKFf87WFwLbfzqDDho=
121 | volumes:
122 | - ${WD}/airflow/dags:/usr/local/airflow/dags
123 | - ${WD}/q_pack:/usr/local/airflow/dags/q_pack
124 | ports:
125 | - 8080:8080
126 | command: webserver
127 |
128 | volumes:
129 | pg_data:
130 | external: false
131 | name: pg_data
132 | pg_data_airflow:
133 | external: false
134 | name: pg_data_airflow
135 | redis:
136 | external: false
137 | name: redis
138 |
--------------------------------------------------------------------------------
/dockerfile_airflow/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM puckel/docker-airflow
2 |
3 | COPY requirements.txt /requirements.txt
4 |
5 | ENV PYTHONPATH "${PYTHONPATH}:/usr/local/airflow/dags/q_pack"
6 | # For airflow container defining the environment variables in the docker compose doesnt work except for the airflow variables
7 | ENV MLFLOW_S3_ENDPOINT_URL "http://minio-image:9000"
8 | ENV AWS_ACCESS_KEY_ID "minio-image"
9 | ENV AWS_SECRET_ACCESS_KEY "minio-image-pass"
10 | ENV MLFLOW_TRACKING_URI "http://mlflow-image:5500"
--------------------------------------------------------------------------------
/dockerfile_airflow/requirements.txt:
--------------------------------------------------------------------------------
1 | psycopg2-binary
2 | oandapyV20
3 | xlrd
4 | backtrader
5 | v20
6 | mlflow==1.4.0
7 | minio
8 | boto3
9 | sklearn
--------------------------------------------------------------------------------
/dockerfile_jupyter_notebook/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM jupyter/scipy-notebook
2 |
3 | USER root
4 |
5 | COPY requirements.txt /tmp/
6 | RUN pip install --requirement /tmp/requirements.txt && \
7 | fix-permissions $CONDA_DIR && \
8 | fix-permissions /home/$NB_USER && \
9 | apt update && \
10 | apt-get install curl -y
11 |
12 | ENV PYTHONPATH "${PYTHONPATH}:/home/jovyan/work/q_pack"
13 |
14 | CMD ["jupyter", "notebook", "--no-browser","--NotebookApp.token=''","--NotebookApp.password=''", "--allow-root"]
15 |
16 |
17 | # Another way to give root access without password in the compose
18 | # environment:
19 | # GRANT_SUDO: "yes"
20 | # user:
21 | # "root"
--------------------------------------------------------------------------------
/dockerfile_jupyter_notebook/README.md:
--------------------------------------------------------------------------------
1 | # Scipy jupyter notebook
2 | Login and tokenization disabled
3 |
4 |
5 | # Instructions to Run
6 | docker build -t jupyter_scipy .
7 | docker run -p 8888:8888 jupyter_scipy
--------------------------------------------------------------------------------
/dockerfile_jupyter_notebook/requirements.txt:
--------------------------------------------------------------------------------
1 | psycopg2-binary
2 | mlflow==1.4.0
3 | xgboost
4 | boto3
5 | oandapyV20
6 | backtrader
7 | v20
8 | pyfolio
9 | minio
--------------------------------------------------------------------------------
/dockerfile_minio/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM minio/minio
2 |
3 | RUN wget https://dl.min.io/client/mc/release/linux-amd64/mc && \
4 | chmod +x mc
5 |
--------------------------------------------------------------------------------
/dockerfile_mlflowserver/Dockerfile:
--------------------------------------------------------------------------------
1 | # FROM python:3.7
2 | FROM continuumio/miniconda
3 |
4 | MAINTAINER Wilder Rodrigues (wilder.rodrigues@ekholabs.ai)
5 |
6 | COPY requirements.txt /tmp/
7 | RUN pip install --requirement /tmp/requirements.txt
8 |
9 | ENV LC_ALL=C.UTF-8
10 | ENV LANG=C.UTF-8
11 |
--------------------------------------------------------------------------------
/dockerfile_mlflowserver/requirements.txt:
--------------------------------------------------------------------------------
1 | mlflow==1.4.0
2 | boto3
--------------------------------------------------------------------------------
/dockerfile_superset/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM amancevice/superset
2 |
3 | MAINTAINER Saeed Rahman
--------------------------------------------------------------------------------
/public/images/architecture-cloud.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/public/images/architecture-cloud.png
--------------------------------------------------------------------------------
/public/images/architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/public/images/architecture.png
--------------------------------------------------------------------------------
/public/images/backtrader.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/public/images/backtrader.png
--------------------------------------------------------------------------------
/public/images/components.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/public/images/components.png
--------------------------------------------------------------------------------
/public/images/logo.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/public/images/logo.PNG
--------------------------------------------------------------------------------
/public/images/logo2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/public/images/logo2.png
--------------------------------------------------------------------------------
/public/images/logo_0.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/public/images/logo_0.PNG
--------------------------------------------------------------------------------
/public/images/old.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/public/images/old.png
--------------------------------------------------------------------------------
/public/images/superset2.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/saeed349/Microservices-Based-Algorithmic-Trading-System/f1d6b92ee5aa475969f5f14d8282b0097a2c6676/public/images/superset2.PNG
--------------------------------------------------------------------------------
/starter_script.bat:
--------------------------------------------------------------------------------
1 | echo "scripts on jupyter notebook"
2 | docker exec -it jupyter-image /bin/sh -c "python /home/jovyan/work/q_pack/db_pack/schema/secmaster_db_schema_builder.py"
3 | docker exec -it jupyter-image /bin/sh -c "python /home/jovyan/work/q_pack/db_pack/schema/secmaster_db_symbol_loader.py"
4 | docker exec -it jupyter-image /bin/sh -c "python /home/jovyan/work/q_pack/db_pack/schema/risk_db_schema_builder.py"
5 | echo "..."
--------------------------------------------------------------------------------