├── .dockerignore
├── .gitattributes
├── .github
├── FUNDING.yml
└── workflows
│ ├── main.yml
│ └── pypi.yml
├── .gitignore
├── CHANGELOG.md
├── Dockerfile
├── INSTALL.md
├── LICENCE
├── MANIFEST.in
├── Makefile
├── PLANNED_FEATURES.md
├── README.md
├── batch_scripts
├── linux
│ ├── cache_market_data.sh
│ ├── create_ssl_certificate.sh
│ ├── installation
│ │ ├── activate_python_environment.sh
│ │ ├── add_ip_to_firewall.sh
│ │ ├── amazon_linux
│ │ │ └── install_amazon_linux_extras.sh
│ │ ├── create_tcapy_working_folders.sh
│ │ ├── environment_linux_py37tca.yml
│ │ ├── export_conda_environment.sh
│ │ ├── get-docker.sh
│ │ ├── get_directory_above.sh
│ │ ├── google-chrome.repo
│ │ ├── increase_file_limits.sh
│ │ ├── influxdb.repo
│ │ ├── install_all_tcapy.sh
│ │ ├── install_anaconda.sh
│ │ ├── install_chrome.sh
│ │ ├── install_clickhouse.sh
│ │ ├── install_conda_from_env_yaml.sh
│ │ ├── install_dash_only.sh
│ │ ├── install_influxdb.sh
│ │ ├── install_jupyter_extensions.sh
│ │ ├── install_memcached.sh
│ │ ├── install_mongo.sh
│ │ ├── install_mysql.sh
│ │ ├── install_nginx.sh
│ │ ├── install_pdf.sh
│ │ ├── install_pip_python_packages.sh
│ │ ├── install_pycharm.sh
│ │ ├── install_python_tools_apache.sh
│ │ ├── install_rabbitmq.sh
│ │ ├── install_redis.sh
│ │ ├── install_sql_driver.sh
│ │ ├── install_tcapy_on_apache.sh
│ │ ├── install_tcapy_on_apache_gunicorn.sh
│ │ ├── install_tcapy_on_nginx_gunicorn.sh
│ │ ├── install_virtual_env.sh
│ │ ├── install_weasyprint.sh
│ │ ├── limits.conf
│ │ ├── mongodb-org-3.6.repo
│ │ ├── mongodb-org-4.2.repo
│ │ ├── mysql80.repo
│ │ ├── nginx.repo
│ │ ├── patch_celery_python_37.sh
│ │ └── set_tcapy_env_vars.sh
│ ├── kill_tcapy.sh
│ ├── orca.sh
│ ├── read_celery_log.sh
│ ├── read_logs_gnome_terminal.sh
│ ├── read_mongo_log.sh
│ ├── render_jupyter_notebooks.sh
│ ├── reset_docker_containers.sh
│ ├── restart_celery.sh
│ ├── restart_db.sh
│ ├── restart_influxdb.sh
│ ├── restart_tcapy.sh
│ ├── run_python_script.sh
│ ├── start_jupyter.sh
│ ├── start_pproxy.sh
│ └── update_chartpy_dev.sh
└── windows
│ ├── installation
│ ├── activate_python_environment.bat
│ ├── environment_windows_py37tca.yml
│ ├── export_conda_environment.bat
│ ├── install_conda_from_env_yaml.bat
│ ├── install_jupyter_extensions.bat
│ ├── install_pip_python_packages.bat
│ ├── install_virtual_env.bat
│ └── set_tcapy_env_vars.bat
│ ├── restart_wsl.bat
│ ├── start_excel.bat
│ └── start_jupyter.bat
├── binder
└── Dockerfile
├── cuemacro_logo.png
├── docker-compose.test.yml
├── docker-compose.yml
├── img
├── anomalous_trades.PNG
├── compare_slippage.PNG
├── create_html_pdf.PNG
├── distribution.PNG
├── executions.PNG
├── gui.PNG
├── markout.PNG
├── slippage_by_ticker.PNG
├── std_create_html_pdf.png
├── std_distribution.png
├── std_executions.png
├── std_gui.png
├── std_slippage_by_ticker.png
├── std_tca_request.png
├── std_timeline_slippage.png
├── tca_request.PNG
├── tcapy_montage.png
└── timeline_slippage.PNG
├── requirements.txt
├── setup.py
├── source
└── .gitkeep
├── tcapy
├── __init__.py
├── analysis
│ ├── __init__.py
│ ├── algos
│ │ ├── __init__.py
│ │ ├── benchmark.py
│ │ ├── metric.py
│ │ ├── resultsform.py
│ │ └── resultssummary.py
│ ├── dataframeholder.py
│ ├── tcaengine.py
│ ├── tcamarkettradeloader.py
│ ├── tcamarkettradeloaderimpl.py
│ ├── tcarequest.py
│ ├── tcatickerloader.py
│ ├── tcatickerloaderimpl.py
│ └── tradeorderfilter.py
├── api
│ ├── __init__.py
│ └── app_api.py
├── client
│ └── __init__.py
├── conf
│ ├── __init__.py
│ ├── celery_calls.py
│ ├── celeryconfig.py
│ ├── common-session
│ ├── common-session-noninteractive
│ ├── constants.py
│ ├── influxdb.conf
│ ├── limits.conf
│ ├── logging.conf
│ ├── mongo.conf
│ ├── my.cnf
│ ├── redis.conf
│ ├── tcapy.wsgi
│ ├── tcapy_apache.conf
│ ├── tcapy_apache_gunicorn.conf
│ ├── tcapy_apache_gunicorn_80.conf
│ ├── tcapy_nginx_gunicorn.conf
│ ├── tcapy_nginx_gunicorn_docker.conf
│ ├── tcapy_wsgi.py
│ ├── tcapyapi.wsgi
│ ├── tcapyapi_wsgi.py
│ ├── tcapyboard.wsgi
│ └── tcapyboard_wsgi.py
├── data
│ ├── __init__.py
│ ├── accesscontrol.py
│ ├── databasepopulator.py
│ ├── databasesource.py
│ ├── datadumper.py
│ ├── datafactory.py
│ ├── datatestcreator.py
│ └── volatilecache.py
├── excel
│ ├── __init__.py
│ ├── tcapy_xl.py
│ └── tcapy_xl.xlsm
├── util
│ ├── __init__.py
│ ├── colors.py
│ ├── customexceptions.py
│ ├── deltaizeserialize.py
│ ├── fxconv.py
│ ├── loggermanager.py
│ ├── mediator.py
│ ├── singleton.py
│ ├── ssl_no_verify.py
│ ├── swim.py
│ ├── timeseries.py
│ └── utilfunc.py
└── vis
│ ├── __init__.py
│ ├── app.py
│ ├── app_board.py
│ ├── app_imports.py
│ ├── assets
│ ├── android-chrome-192x192.png
│ ├── android-chrome-256x256.png
│ ├── android-chrome-512x512.png
│ ├── apple-touch-icon.png
│ ├── browserconfig.xml
│ ├── favicon-16x16.png
│ ├── favicon-32x32.png
│ ├── favicon.ico
│ ├── head.htm
│ ├── logo.png
│ ├── logo_crop.png
│ ├── mstile-150x150.png
│ ├── safari-pinned-tab.svg
│ ├── site.webmanifest
│ └── tabs.css
│ ├── candlestick.py
│ ├── computationcaller.py
│ ├── computationresults.py
│ ├── displaylisteners.py
│ ├── favicon.ico
│ ├── layoutdash.py
│ ├── logo.png
│ ├── logo_cropped.png
│ ├── report
│ ├── __init__.py
│ ├── computationreport.py
│ ├── tcareport.py
│ └── templates
│ │ ├── clean_report.css
│ │ ├── clean_report.css.map
│ │ ├── clean_report.html
│ │ ├── clean_report.sass
│ │ ├── cuemacro_logo.png
│ │ └── img
│ │ ├── OpenSans-Bold.ttf
│ │ ├── OpenSans-BoldItalic.ttf
│ │ ├── OpenSans-ExtraBold.ttf
│ │ ├── OpenSans-ExtraBoldItalic.ttf
│ │ ├── OpenSans-Italic.ttf
│ │ ├── OpenSans-Light.ttf
│ │ ├── OpenSans-LightItalic.ttf
│ │ ├── OpenSans-Regular.ttf
│ │ ├── OpenSans-Semibold.ttf
│ │ └── OpenSans-SemiboldItalic.ttf
│ ├── tcacaller.py
│ ├── tcapy.css
│ └── tcaresults.py
├── tcapy_examples
├── __init__.py
└── gen
│ ├── __init__.py
│ ├── benchmark_market_calculation_examples.py
│ ├── benchmark_trade_calculation_examples.py
│ ├── data_loading_examples.py
│ ├── date_calculation_examples.py
│ ├── filtering_examples.py
│ ├── metric_calculation_examples.py
│ ├── mongo_aws_examples.py
│ ├── non_db_tca_example.py
│ ├── parse_trade_csv_tca.py
│ ├── simple_tca_example.py
│ ├── test_tca_report.pdf
│ └── time_series_ops_examples.py
├── tcapy_notebooks
├── a_10_minute_view_of_tcapy.ipynb
├── additional_benchmarks_metrics_for_tcapy.ipynb
├── compliance_tca_calculations.ipynb
├── cuemacro_logo.png
├── excel_xlwings_with_tcapy.ipynb
├── introducing_tcapy.ipynb
├── jupyter_notebooks_description.md
├── market_microstructure_with_tcapy.ipynb
├── populating_databases_for_tcapy.ipynb
└── real_life_tcapy_case_study.ipynb
├── tcapy_scripts
└── gen
│ ├── __init__.py
│ ├── cache_data_vendor_data.py
│ ├── convert_csv_to_hdf5_parquet.py
│ ├── convert_hdf5_to_csv.py
│ ├── convert_hdf5_to_parquet.py
│ ├── copy_parquet_to_arrow.py
│ ├── create_small_market_trade_dataset.py
│ ├── create_small_market_trade_dataset_eikon.py
│ ├── data_create_market_trade_test_population.py
│ ├── download_data_vendor_data.py
│ ├── dump_data_vendor_large_chunk_to_parquet_csv_hdf5.py
│ ├── dump_data_vendor_to_parquet_csv_hdf5.py
│ ├── dump_market_data_from_database_to_parquet.py
│ ├── dump_trade_data_from_database_to_csv.py
│ ├── edit_folder_hdf5_parquet.py
│ ├── plot_market_data_from_database.py
│ ├── plot_parquet_file.py
│ ├── query_available_trades.py
│ ├── rename_file_names.py
│ ├── resample_market_parquet_on_disk_to_parquet.py
│ ├── upload_market_parquet_csv_hdf5.py
│ ├── upload_trade_data_csv.py
│ └── volatile_cache_market_trade_data.py
├── tcapygen
├── __init__.py
├── constantsgen.py
├── layoutboardgen.py
├── layoutgen.py
├── logo.png
├── tcacallerboardgen.py
└── tcacallergen.py
├── tcapyuser
├── CREATE_USER_DEFINED_TCAPY.txt
└── __init__.py
└── test
├── __init__.py
├── config.py
├── conftest.py
├── resources
├── mini_test_trade_df.csv
├── small_test_market_df.parquet
├── small_test_market_df_reverse.parquet
├── small_test_order_df.csv
└── small_test_trade_df.csv
├── run_tests.sh
└── test_tcapy
├── __init__.py
├── test_data_read_write.py
├── test_data_vendor_feed.py
├── test_overlapping_data_caching.py
├── test_results_agg.py
├── test_tca_functionality_gen.py
├── test_tca_multithreading.py
├── test_time_series.py
└── test_trade_data_generation_gen.py
/.dockerignore:
--------------------------------------------------------------------------------
1 | # https://stackoverflow.com/questions/28097064/dockerignore-ignore-everything-except-a-file-and-the-dockerfile
2 |
3 | # Ignore Everything
4 | **
5 |
6 | !requirements.txt
7 |
8 | !test
9 | !tcapy
10 | !tcapy_notebooks
11 | !tcapy_scripts
12 | !tcapy_examples
13 | !tcapygen
14 | !tcapyuser
15 | !setup.py
16 | !README.md
17 | !requirements.txt
18 |
19 | **/__pycache__
20 | **/*.pyc
21 | **/*cred*
22 | **/*.key*
23 | **/*.crt*
24 | **/*.sr*
25 | **/.cache
26 | **/.pytest_cache
27 | **/*.log
28 | **/server.*
29 | **/.idea
30 | **/.github
31 | **/.git
32 | *.env
33 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | *.py linguist-language=Python
2 | *.ipynb linguist-language=Python
3 | *.html linguist-language=Python
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: [cuemacro]
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | otechie: # Replace with a single Otechie username
12 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
13 |
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | # This is a basic workflow to help you get started with Actions
2 |
3 | name: CI
4 |
5 | # Controls when the action will run. Triggers the workflow on push or pull request
6 | # events but only for the master branch
7 | on:
8 | push:
9 | branches: [ master ]
10 | pull_request:
11 | branches: [ master ]
12 |
13 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel
14 | jobs:
15 | # This workflow contains a single job called "build"
16 | test:
17 | # The type of runner that the job will run on
18 | runs-on: ubuntu-latest
19 |
20 | # Steps represent a sequence of tasks that will be executed as part of the job
21 | steps:
22 | # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
23 | - uses: actions/checkout@v2
24 | # Runs a single command using the runners shell
25 | #- name: test
26 | - run: make test
27 | env:
28 | # Run part of the tests, avoiding the multithreading part which runs out of memory on GitHub
29 | RUN_PART: 1
30 |
31 | #- name: artifacts
32 | - uses: actions/upload-artifact@v2
33 | with:
34 | # declare the artifacts...
35 | name: artifacts
36 | path: artifacts # or path/to/artifact
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/.github/workflows/pypi.yml:
--------------------------------------------------------------------------------
1 | name: Release
2 |
3 | on:
4 | push:
5 | tags:
6 | - '[0-9]+.[0-9]+.[0-9]'
7 |
8 | jobs:
9 | build:
10 | runs-on: ubuntu-latest
11 |
12 | steps:
13 | - uses: actions/checkout@master
14 | - run: make test
15 |
16 | - name: Publish Python Package
17 | uses: mariamrf/py-package-publish-action@v1.0.0
18 | with:
19 | python_version: '3.7.0'
20 | env:
21 | TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
22 | TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 |
5 | # C extensions
6 | *.so
7 |
8 | # Credentials files
9 | *cred.py
10 | *cred*.py
11 | *credpro.py
12 | *creduser.py
13 | *.env
14 |
15 | # orca executable
16 | orca-1.2.1-x86_64.AppImage
17 |
18 | # Distribution / packaging
19 | .idea/
20 | .pytest_cache
21 | .Python
22 | .ipynb_*
23 | env/
24 | build/
25 | develop-eggs/
26 | dist/
27 | downloads/
28 | eggs/
29 | external/
30 | .eggs/
31 | htmlcov/
32 | lib/
33 | lib64/
34 | log/
35 | parts/
36 | sdist/
37 | var/
38 | *.egg-info/
39 | .installed.cfg
40 | *.egg
41 | output_data
42 | output_files
43 | dash/
44 | dash-master/
45 | tests/tests_data/
46 | *.log
47 |
48 | # PyInstaller
49 | # Usually these files are written by a python script from a template
50 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
51 | *.manifest
52 | *.spec
53 |
54 | # Installer logs
55 | pip-log.txt
56 | pip-delete-this-directory.txt
57 |
58 | # Unit test / coverage reports
59 | htmlcov/
60 | .tox/
61 | .coverage
62 | .coverage.*
63 | .cache
64 | nosetests.xml
65 | coverage.xml
66 | *,cover
67 |
68 | # Translations
69 | *.mo
70 | *.pot
71 |
72 | # Django stuff:
73 | *.log
74 | *.log.*
75 |
76 | # Sphinx documentation
77 | docs/_build/
78 |
79 | # PyBuilder
80 | target/
81 |
82 | # Output Files
83 |
84 | **/.ipynb_checkpoints
85 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Set the base image to Ubuntu, use a public image
2 | FROM python:3.7.7-slim-stretch as builder
3 |
4 | # To build tests run
5 | # docker-compose -f docker-compose.test.yml build
6 |
7 | # File Author / Maintainer
8 | # MAINTAINER Thomas Schmelzer "thomas.schmelzer@gmail.com"
9 |
10 | COPY requirements.txt /tmp/tcapy/requirements.txt
11 |
12 | # Dependencies for pystore and weasyprint in buildDeps
13 | # If we don't want to use weasyprint we
14 | # build-essential libcairo2 libpango-1.0-0 libpangocairo-1.0-0 libgdk-pixbuf2.0-0 libffi-dev shared-mime-info
15 | RUN buildDeps='gcc g++ libsnappy-dev unixodbc-dev build-essential libcairo2 libpango-1.0-0 libpangocairo-1.0-0 libgdk-pixbuf2.0-0 libffi-dev shared-mime-info' && \
16 | apt-get update && apt-get install -y $buildDeps --no-install-recommends && \
17 | pip install --no-cache-dir -r /tmp/tcapy/requirements.txt && \
18 | rm /tmp/tcapy/requirements.txt
19 | # && \
20 | #apt-get purge -y --auto-remove $buildDeps
21 |
22 | # Copy to /
23 | COPY ./tcapy /tcapy/tcapy
24 | COPY ./tcapygen /tcapy/tcapygen
25 | COPY ./tcapyuser /tcapy/tcapyuser
26 | COPY ./test /tcapy/test
27 | COPY ./test /test
28 |
29 | # Make sure tcapy on the PYTHONPATH
30 | ENV PYTHONPATH "${PYTHONPATH}:/tcapy"
31 |
32 | #### Here's the test-configuration
33 | FROM builder as test
34 |
35 | # We install some extra libraries purely for testing
36 | RUN pip install --no-cache-dir httpretty pytest pytest-cov pytest-html sphinx mongomock requests-mock
37 |
38 | WORKDIR /tcapy
39 |
40 | # For temp caching for the tests
41 | RUN mkdir -p /tmp/csv
42 | RUN mkdir -p /tmp/tcapy
43 |
44 | CMD echo "${RUN_PART}"
45 |
46 | # Run the pytest
47 | # If RUN_PART is not defined, we're not running on GitHub CI, we're running tests locally
48 | # Otherwise if RUN_PART is defined, it's likely we're running on GitHub, so we avoid running multithreading tests which run
49 | # out of memory (machines have limited memory)
50 | CMD if [ "${RUN_PART}" = 1 ]; \
51 | then py.test --cov=tcapy --cov-report html:artifacts/html-coverage --cov-report term --html=artifacts/html-report/report.html --ignore-glob='*multithreading*.py'; \
52 | else py.test --cov=tcapy --cov-report html:artifacts/html-coverage --cov-report term \
53 | --html=artifacts/html-report/report.html; \
54 | fi
55 |
56 | # Run everything
57 | # CMD py.test --cov=tcapy --cov-report html:artifacts/html-coverage --cov-report term \
58 | # --html=artifacts/html-report/report.html
59 |
60 | # Example to run a specific test script
61 | # CMD py.test --cov=tcapy --cov-report html:artifacts/html-coverage --cov-report term \
62 | # --html=artifacts/html-report/report.html test/test_tcapy/test_tca_multithreading.py
63 |
64 | # Example to run an individual test function
65 | # CMD py.test --cov=tcapy --cov-report html:artifacts/html-coverage --cov-report term \
66 | # --html=artifacts/html-report/report.html test/test_tcapy/test_data_read_write.py::test_write_trade_data_sql
67 |
68 | # For debugging to keep container going
69 | # CMD tail -f /dev/null
70 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | recursive-include tcapy/ *.*
2 | include tcapy/conf/*
3 | recursive-include tcapy_examples/ *.*
4 | recursive-include tcapygen/ *.*
5 | recursive-include tcapyuser/ *.*
6 |
7 | global-exclude *.py[cod] __pycache__ *.so *.pdf *.env
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | #!make
2 | PROJECT_VERSION := $(shell python setup.py --version)
3 |
4 | SHELL := /bin/bash
5 | PACKAGE := tcapy
6 |
7 | .PHONY: help build jupyter test doc tag
8 |
9 |
10 | .DEFAULT: help
11 |
12 | help:
13 | @echo "make build"
14 | @echo " Build the docker image."
15 | @echo "make test"
16 | @echo " Build the docker image for testing and run them."
17 | @echo "make doc"
18 | @echo " Construct the documentation."
19 | @echo "make tag"
20 | @echo " Make a tag on Github."
21 |
22 |
23 |
24 | build:
25 | docker-compose build tcapy
26 |
27 | jupyter:
28 | docker-compose build jupyter
29 |
30 | test:
31 | docker-compose -f docker-compose.test.yml run sut
32 |
33 | test_local:
34 | docker-compose -f docker-compose.local.test.yml run sut
35 |
36 | doc:
37 | docker-compose -f docker-compose.test.yml run sut sphinx-build /source artifacts/build
38 |
39 | tag:
40 | git tag -a ${PROJECT_VERSION} -m "new tag"
41 | git push --tags
42 |
43 |
--------------------------------------------------------------------------------
/PLANNED_FEATURES.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # tcapy planned features
4 |
5 | Here are some of the features we'd like to add to tcapy which include the below. If you'd be interested in contributing
6 | or sponsoring the addition of new features, please contact saeed@cuemacro.com
7 |
8 | * Adding more metrics and benchmarks
9 | * Adding more asset classes
10 | * Cash equities, FX swaps etc.
11 | * Add wrappers to enable tcapy users to use external TCA providers with tcapy
12 | * This would make it easier to compare TCA output between providers and compare against internal computation
13 | * Add feature to also tcapy to consume streaming market data from a realtime datafeed to dump to database
14 | * For example using Redis Streams or Kakfa
15 | * Adding ability to do more general computations (non-TCA) on market data in the same framework
16 | * Eg. calculate volatility based on market tick data
17 | * Adding more visualisations from Plotly
18 | * Adding more data providers for market tick data
19 | * Adding more database wrappers both for trade/order data and market data (eg. [PyStore](https://github.com/ranaroussi/pystore))
20 | * Making it easier to install and start tcapy
21 | * Creating a Docker container for tcapy
22 | * Improving the installation/starting scripts for tcapy
23 | * Adding more ways to interact with tcapy
24 | * Eg. RESTful API client and Excel wrapper
25 | * Making it easier to configure for parameters which can change often, such as tickers and storing these in a flat
26 | database like SQLite
27 | * Add authentication for the web app
28 | * Investigating the use of Dask Dataframes for distributed computation and profiling the code more to make it faster
29 | * Adding full support for cloud services like AWS, Google Cloud and Azure to take advantage of serverless computing and
30 | easy setup on the various cloud services (eg. AWS Lambda, Cloud Functions & Azure Functions)
31 | * Adding more unit tests to cover more functionality
32 |
33 |
--------------------------------------------------------------------------------
/batch_scripts/linux/cache_market_data.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
4 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
5 |
6 | # set Python environment
7 | source $SCRIPT_FOLDER/installation/activate_python_environment.sh
8 |
9 | # run python scripts in tcapy
10 | python $TCAPY_CUEMACRO/tcapy_scripts/gen/volatile_market_trade_data_gen.py
--------------------------------------------------------------------------------
/batch_scripts/linux/create_ssl_certificate.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # If we want to use a proxy server for https (eg. for NCFX) like pproxy need SSL certificate
4 | # this script creates a self signed SSL certificate
5 |
6 | openssl genrsa -des3 -out server.key 1024
7 | openssl req -new -key server.key -out server.csr
8 | cp server.key server.key.org
9 | openssl rsa -in server.key.org -out server.key
10 | openssl x509 -req -days 365 -in server.csr -signkey server.key -out server.crt
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/activate_python_environment.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This will activate our Python environment which has been created for tcapy
4 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
5 |
6 | if [ -d "$SCRIPT_FOLDER/installation/" ]; then
7 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
8 | elif [ -d "$SCRIPT_FOLDER/" ]; then
9 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
10 | fi
11 |
12 | echo 'Activating Python environment' $TCAPY_PYTHON_ENV '... and adding tcapy to PYTHONPATH' $TCAPY_CUEMACRO
13 | export PYTHONPATH=$TCAPY_CUEMACRO/:$PYTHONPATH
14 |
15 | if [ $TCAPY_PYTHON_ENV_TYPE == "conda" ]; then
16 | echo 'Python env type' $TCAPY_PYTHON_ENV_TYPE 'and' $CONDA_ACTIVATE
17 | source $CONDA_ACTIVATE
18 | source activate $TCAPY_PYTHON_ENV
19 | elif [ $TCAPY_PYTHON_ENV_TYPE == "virtualenv" ]; then
20 | echo 'Python env type ' $TCAPY_PYTHON_ENV_TYPE
21 | source $TCAPY_PYTHON_ENV/bin/activate
22 | fi
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/add_ip_to_firewall.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # In this case we assume our user is running on 192.168.82.85 (you will need to change IPs as appropriate) and
4 | # are opening up ports so they can access
5 | # tcapy webapp and also so MongoDB can be accessed
6 |
7 | # We can remove the rules by editing file
8 | # /etc/firewalld/zones/public.xml
9 |
10 | # careful ONLY add the IPs of trader/compliance machines you want to have access
11 | # do not expose to the whole network!
12 |
13 | # only these browsers will able to access the applications which are on these IPs (server_port 80)
14 | sudo firewall-cmd --permanent --zone=public --add-rich-rule='rule family="ipv4" source address="192.168.82.85/32" port protocol="tcp" port="80" accept'
15 |
16 | # for https
17 | sudo firewall-cmd --permanent --zone=public --add-rich-rule='rule family="ipv4" source address="192.168.82.85/32" port protocol="tcp" port="443" accept'
18 |
19 | # for MongoDB access (only add IP for systems where we are running tcapy) - server_port 27017
20 | sudo firewall-cmd --permanent --zone=public --add-rich-rule='rule family="ipv4" destination address="192.168.1.192/32" port protocol="tcp" port="27017" accept'
21 | sudo firewall-cmd --reload
22 |
23 | # we are assuming that Redis is running on the tcapy machine
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/amazon_linux/install_amazon_linux_extras.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Installs minimal amount of extra packages on Amazon Linux, so that we can run Docker and run git
3 | # All the major dependencies will be installed within Docker containers by tcapy in a sandboxed environment
4 |
5 | # Assumes Amazon username is tcapyuser (can change to ec2-user, which is the default name too)
6 | # Whilst in practice you will likely use Amazon Linux on AWS/EC2, you can also install it for testing locally/onpremises
7 | # for example on VWmare, by downloading the image from Amazon at
8 | # https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/amazon-linux-2-virtual-machine.html
9 | #
10 | # See https://medium.com/shehuawwal/download-and-run-amazon-linux-2-ami-locally-on-your-virtualbox-or-vmware-b554a98dcb1c
11 | # which shows how to login locally
12 |
13 | export AMAZON_USER=ec2-user
14 |
15 | # Install docker
16 | sudo amazon-linux-extras install docker
17 | sudo service docker start
18 | sudo usermod -a -G docker $AMAZON_USER
19 | sudo chkconfig docker on
20 |
21 | # Install docker-compose
22 | sudo curl -L https://github.com/docker/compose/releases/download/1.26.2/docker-compose-$(uname -s)-$(uname -m) -o /usr/local/bin/docker-compose https://github.com/docker/compose/releases/latest/download/docker-compose-$(uname -s)-$(uname -m) -o /usr/local/bin/docker-compose
23 | sudo chmod +x /usr/local/bin/docker-compose
24 | docker-compose version
25 |
26 | # Install git
27 | sudo yum update
28 | sudo yum install --yes git
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/create_tcapy_working_folders.sh:
--------------------------------------------------------------------------------
1 | # Create folders for database
2 | sudo mkdir -p /data/db_mongodb
3 | sudo chown -R mongodb:mongodb /data/db_mongodb
4 | sudo mkdir -p /data/db_mysql
5 | sudo chown -R mysql:mysql /data/db_mysql
6 | sudo mkdir -p /data/db_mysql
7 | sudo mkdir -p /data/sqlite
8 | sudo mkdir -p /data/pystore
9 |
10 | # Create folders for CSV dumps of market data
11 | sudo mkdir -p /data/csv_dump
12 | sudo mkdir -p /data/csv_dump/dukascopy
13 | sudo mkdir -p /data/csv_dump/ncfx
14 | sudo mkdir -p /data/csv_dump/trade_order
15 | sudo mkdir -p /data/csv_dump/temp
16 | sudo mkdir -p /data/csv_dump/temp/large
17 | sudo mkdir -p /data/csv_output
18 | sudo chmod -R a+rw /data/csv_dump
19 | sudo chmod -R a+rw /data/csv_output
20 |
21 | # Temporary files
22 | sudo mkdir -p /tmp/csv
23 | sudo mkdir -p /tmp/tcapy
24 |
25 | # Create log folder
26 | sudo mkdir -p /home/$USER/cuemacro/tcapy/log
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/export_conda_environment.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Create an environment YAML file from the conda environment, that can later be used to recreate that
4 | # conda environment
5 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
6 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
7 |
8 | # Set Python environment
9 | source $SCRIPT_FOLDER/activate_python_environment.sh
10 |
11 | conda update -n base conda --yes
12 | conda env export > $TCAPY_CUEMACRO/batch_scripts/linux/installation/environment_linux_py37tca.yml
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/get_directory_above.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
4 |
5 | echo $SCRIPT_FOLDER
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/google-chrome.repo:
--------------------------------------------------------------------------------
1 | [google-chrome]
2 | name=google-chrome
3 | baseurl=http://dl.google.com/linux/chrome/rpm/stable/$basearch
4 | enabled=1
5 | gpgcheck=1
6 | gpgkey=https://dl-ssl.google.com/linux/linux_signing_key.pub
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/increase_file_limits.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Increase file open limits for MongoDB (tested with Ubunut)
4 | # Increasing the file open limits MongoDB tends to be more more stable
5 | # We assume that the root user will run MongoDB
6 |
7 | # See https://docs.mongodb.com/manual/reference/ulimit/
8 | # See https://askubuntu.com/questions/162229/how-do-i-increase-the-open-files-limit-for-a-non-root-user
9 |
10 | # Note, will overwrite with tcapy prepared versions
11 | # * /etc/security/limits.conf (increases the nofile limit for root)
12 | # * /etc/pam.d/common-session (enforces that limits.conf is read, by adding pam_limits.so)
13 | # * /etc/pam.d/common-session-noninteractive /etc/pam.d
14 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
15 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
16 |
17 | sudo cp $TCAPY_CUEMACRO/tcapy/conf/limits.conf /etc/security/
18 | sudo cp $TCAPY_CUEMACRO/tcapy/conf/common-session /etc/pam.d/
19 | sudo cp $TCAPY_CUEMACRO/tcapy/conf/common-session-noninteractive /etc/pam.d/
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/influxdb.repo:
--------------------------------------------------------------------------------
1 | [influxdb]
2 | name = InfluxDB Repository - RHEL \$releasever
3 | baseurl = https://repos.influxdata.com/rhel/\$releasever/\$basearch/stable
4 | enabled = 1
5 | gpgcheck = 1
6 | gpgkey = https://repos.influxdata.com/influxdb.key
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_anaconda.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This will install Anaconda into /home/$USER/anaconda3 - note you might need to change the URL where you
4 | # are downloading from
5 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
6 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
7 |
8 | # Get correct installer script and config file for OS (you may need to change this)
9 | URL="https://repo.continuum.io/archive/Anaconda3-2020.02-Linux-x86_64.sh"
10 | CONFIG=".bashrc"
11 |
12 | # Download, run, and clean up installer script
13 | wget -O ~/anaconda-installer.sh $URL
14 | bash ~/anaconda-installer.sh -b -p ~/anaconda3
15 | rm ~/anaconda-installer.sh
16 |
17 | # Enable Anaconda and add to path from within config file
18 | chmod -R 777 ~/anaconda3
19 | echo 'export PATH=~/anaconda3/bin:$PATH' >> ~/${CONFIG}
20 | source ~/${CONFIG}
21 |
22 | echo "Anaconda3 has been successfully installed to ~/anaconda3."
23 | echo "This version will override all other versions of Python on your system."
24 | echo "Your ~/${CONFIG} file has been modified to add Anaconda 3 to your PATH variable."
25 | echo "For more info about Anaconda, see http://docs.continuum.io/anaconda/index.html"
26 |
27 | # also install a Python 3.6 conda environment
28 | # ~/anaconda3/bin/conda create -n $TCAPY_PYTHON_ENV python=3.6
29 |
30 | # So conda can be used in bash shell
31 | ~/anaconda3/bin/conda init bash
32 |
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_chrome.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Install Google Chrome
4 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
5 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
6 |
7 | # Linux installation
8 | if [ $DISTRO == "ubuntu" ]; then
9 | wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
10 | sudo apt -y install ./google-chrome-stable_current_amd64.deb
11 |
12 | elif [ $DISTRO == "redhat" ]; then
13 | # Copy repo file (for Google Chrome - by default not in Red Hat distribution)
14 | sudo cp $TCAPY_CUEMACRO/batch_scripts/installation/google-chrome.repo /etc/yum.repos.d/google-chrome.repo
15 |
16 | # Now install Chrome
17 | sudo yum -y install google-chrome-stable
18 | fi
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_clickhouse.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This script installs ClickHouse on your machine. This will require editing, depending on where you want to install.
4 |
5 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
6 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
7 |
8 | if [ $DISTRO == "ubuntu" ]; then
9 | # Ubuntu installation
10 | sudo apt-get install apt-transport-https ca-certificates dirmngr
11 | sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv E0C56BD4
12 |
13 | echo "deb https://repo.clickhouse.tech/deb/stable/ main/" | sudo tee \
14 | /etc/apt/sources.list.d/clickhouse.list
15 | sudo apt-get update
16 |
17 | sudo apt-get install -y clickhouse-server clickhouse-client
18 |
19 | elif [ $DISTRO == "redhat" ]; then
20 | # Red Hat installation
21 | sudo yum install yum-utils
22 | sudo rpm --import https://repo.clickhouse.tech/CLICKHOUSE-KEY.GPG
23 | sudo yum-config-manager --add-repo https://repo.clickhouse.tech/rpm/clickhouse.repo
24 | sudo yum install clickhouse-server clickhouse-client
25 | fi
26 |
27 | # By default data folder is at
28 | # /var/lib/clickhouse/
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_conda_from_env_yaml.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Create a conda environment (py36tca) from a pre-made YAML file, which is much faster than
4 | # running conda/pip for individual packages, as done in install_pip_python_packages.sh
5 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
6 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
7 |
8 | conda env create -f $TCAPY_CUEMACRO/batch_scripts/linux/installation/environment_linux_py37tca.yml
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_dash_only.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
4 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
5 |
6 | # Set Python environment
7 | source $SCRIPT_FOLDER/activate_python_environment.sh
8 |
9 | pip install chartpy==0.1.8 dash-auth==1.3.2 cufflinks==0.17.3 plotly==4.9.0 \
10 | chart-studio==1.1.0 kaleido dash-bootstrap-components==0.10.3 \
11 | dash==1.12.0 dash-html-components==1.0.3 dash-core-components==1.10.0 dash-table==4.7.0 jupyter-dash==0.2.1
12 |
13 |
14 |
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_influxdb.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
4 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
5 |
6 | # Installation of influxdb time series database (can be used instead of MongoDB/Arctic)
7 | # further details at https://docs.influxdata.com/influxdb/v1.7/introduction/installation/
8 | if [ $DISTRO == "ubuntu" ]; then
9 | wget -qO- https://repos.influxdata.com/influxdb.key | sudo apt-key add -
10 | source /etc/os-release
11 | echo "deb https://repos.influxdata.com/debian $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/influxdb.list
12 | sudo apt-get -y install influxdb
13 |
14 | elif [ $DISTRO == "redhat" ]; then
15 | # Update repo file
16 | sudo cp $SCRIPT_FOLDER/influxdb.repo /etc/yum.repos.d/influxdb.repo
17 |
18 | # Now install influxdb
19 | sudo yum install -y influxdb
20 | fi
21 |
22 |
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_jupyter_extensions.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Installs all the Python packages needed for tcapy
4 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
5 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
6 |
7 | # Set Python environment
8 | source $SCRIPT_FOLDER/activate_python_environment.sh
9 |
10 | echo 'Installing Jupyter extensions...'
11 |
12 | # Jupyter extensions
13 | jupyter contrib nbextension install --user # to activate js on Jupyter
14 | jupyter nbextension enable execute_time/ExecuteTime
15 | jupyter-nbextension install rise --py --sys-prefix
16 | jupyter nbextension enable rise --py --sys-prefix
17 | jupyter nbextension enable toc2/main --sys-prefix
18 | jupyter nbextension install --sys-prefix --symlink --py jupyter_dash
19 | jupyter nbextension enable --py jupyter_dash
20 |
21 | # JupyterLab extensions optional
22 | # jupyter labextension install @jupyter-widgets/jupyterlab-manager@2.0.0 --no-build
23 | # jupyter labextension install plotlywidget@1.5.4 --no-build
24 | # jupyter labextension install jupyterlab-plotly@1.5.4 --no-build
25 | # jupyter labextension install bqplot
26 | # jupyter lab build
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_memcached.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Installs Memcached (in memory cache, like Redis) which is used by Celery as a results backend
4 |
5 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
6 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
7 |
8 | if [ $DISTRO == "ubuntu" ]; then
9 | sudo apt-get install --yes memcached libmemcached-tools
10 |
11 | elif [ $DISTRO == "redhat" ]; then
12 | # Based on https://access.redhat.com/solutions/1160613
13 | # You need to enable the RHEL7 repo to download Memcached from
14 | # sudo subscription-manager repos --enable=rhel-7-server-rpms
15 | sudo yum install --yes memcached
16 | fi
17 |
18 | # By default it is started at 127.0.0.1:11211
19 | # If you want to change the server_port and other settings edit /etc/sysconfig/memcached
20 | # Note: it should NOT be accessible from outside
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_mongo.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This script installs MongoDB on your machine. This will require editing, depending on where you want to install
4 | # MongoDB, or if have already installed MongoDB.
5 |
6 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
7 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
8 |
9 | if [ $DISTRO == "ubuntu" ]; then
10 | # Ubuntu installation
11 | sudo rm /etc/apt/sources.list.d/mongodb*.list
12 |
13 | sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 4B7C549A058F8B6B
14 |
15 | # For Mongo 3.6
16 | # echo "deb [ arch=amd64,arm64 ] http://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/3.6 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.6.list
17 |
18 | # For Mongo 4.2
19 | echo "deb [arch=amd64] http://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-4.2.list
20 |
21 | sudo apt-get update
22 | sudo apt-get install --yes mongodb-org mongodb-org-tools
23 |
24 | elif [ $DISTRO == "redhat" ]; then
25 | # Red Hat installation
26 | # Update repo file (assumes MongoDB 3.6) and then install
27 |
28 | # For Mongo 3.6
29 | sudo cp $TCAPY_CUEMACRO/batch_scripts/installation/mongodb-org-3.6.repo /etc/yum.repos.d/mongodb-org-3.6.repo
30 |
31 | # For Mongo 4.2
32 | # sudo cp $TCAPY_CUEMACRO/batch_scripts/installation/mongodb-org-4.2.repo /etc/yum.repos.d/mongodb-org-4.2.repo
33 |
34 | sudo yum install --yes mongodb-org
35 | fi
36 |
37 | # Create data folder and make MongoDB the owner
38 | sudo mkdir -p /data/db
39 | sudo chown -R mongodb:mongodb /data/db
40 | sudo chmod -R a+rw /data/db
41 |
42 | # Make sure to edit mongo.conf to your tcapy log folder location!
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_mysql.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Install MySQL 8 on Red Hat
4 |
5 | # Based on instructions from
6 | # https://dev.mysql.com/doc/refman/8.0/en/linux-installation-yum-repo.html
7 |
8 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
9 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
10 |
11 | if [ $DISTRO == "ubuntu" ]; then
12 | # mkdir -p /tmp
13 | # cd /tmp
14 | # sudo curl -OL https://dev.mysql.com/get/mysql-apt-config_0.8.13-1_all.deb
15 | # yes | sudo dpkg -i mysql-apt-config*
16 | # sudo rm -f mysql-apt-config*
17 | # sudo apt update
18 | # sudo apt install --yes mysql-server
19 | sudo apt-get install --yes mysql-server
20 | elif [ $DISTRO == "redhat" ]; then
21 | # update repo file (assumes MySQL8)
22 | sudo cp $SCRIPT_FOLDER/mysql80.repo /etc/yum.repos.d/mysql80.repo
23 |
24 | # sudo yum-config-manager --disable mysql57-community
25 | # sudo yum-config-manager --enable mysql80-community
26 |
27 | # sudo yum module disable mysql
28 |
29 | sudo yum install --yes mysql-community-server
30 | fi
31 |
32 | sudo service mysql start
33 |
34 | # Will set various parameters like passwords
35 | sudo mysql_secure_installation
36 |
37 |
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_nginx.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Install nginx web server (for tcapy it is assumed to be default web server, instead of Apache) on Red Hat
4 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
5 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
6 |
7 | if [ $DISTRO == "ubuntu" ]; then
8 | sudo apt-get install --yes nginx
9 |
10 | elif [ $DISTRO == "redhat" ]; then
11 | # Update repo file
12 | sudo cp $SCRIPT_FOLDER/installation/nginx.repo /etc/yum.repos.d/nginx.repo
13 |
14 | # Now install nginx
15 | sudo yum install --yes install nginx-1.12.1
16 | fi
17 |
18 |
19 |
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_pdf.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # orca is an additional plotly application for converting Plotly charts to binary PNG images (so can be inlined on webpages)
4 | # it is only necessary to install if you wish to generate PDF reports (HTML reports do not need this) or want
5 | # to allow Plotly to create PNG
6 | #
7 | # If you install with conda you don't need to do this, you can just do something like...
8 | # conda install -c plotly plotly-orca=1.2.1 --yes
9 |
10 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
11 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
12 |
13 | # Need to install xvfb server and also wkhtmltopdf (orca will be run inside this too)
14 | if [ $DISTRO == "ubuntu" ]; then
15 | sudo apt-get update
16 | sudo apt-get install --yes xvfb libfontconfig wkhtmltopdf
17 |
18 | elif [ $DISTRO == "redhat" ]; then
19 | # isn't usually in Red Hat repo, but can download manually from centos (compatible Linux distribution with Red hat)
20 | wget http://vault.centos.org/6.2/os/x86_64/Packages/xorg-x11-server-Xvfb-1.10.4-6.el6.x86_64.rpm
21 | sudo yum --yes localinstall xorg-x11-server-Xvfb-1.10.4-6.el6.x86_64.rpm
22 |
23 | # Remove the downloaded file
24 | rm xorg-x11-server-Xvfb-1.10.4-6.el6.x86_64.rpm.*
25 |
26 | # wkhtmltopdf is an a command line application for converting HTML to PDF, which is required by orca (and pdfkit)
27 | cd /tmp
28 |
29 | rm wkhtmltox-0.12.4_linux-generic-amd64.tar.xz
30 |
31 | wget https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.4/wkhtmltox-0.12.4_linux-generic-amd64.tar.xz
32 | sudo tar -xvf wkhtmltox-0.12.4_linux-generic-amd64.tar.xz
33 | sudo cp wkhtmltox/bin/wkhtmltopdf /usr/bin/
34 | fi
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_pip_python_packages.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Installs all the Python packages needed for tcapy, it also installs some additional packages for Jupyter, so you
4 | # can interact with tcapy with Jupyter (strictly speaking if we could ignore these, if you don't wish to run Jupyter on
5 | # top of tcapy
6 |
7 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
8 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
9 |
10 | # Set Python environment
11 | source $SCRIPT_FOLDER/activate_python_environment.sh
12 |
13 | echo 'Installing Python packages...'
14 |
15 | if [ $TCAPY_PYTHON_ENV_TYPE == "virtualenv" ]; then
16 |
17 | # Install everything by pip
18 | pip install \
19 | setuptools-git==1.2 cython arctic==1.79.4 sqlalchemy==1.3.17 redis==3.3.7 \
20 | pandas==1.0.5 numpy scipy statsmodels==0.11.1 blosc==1.8.3 pyarrow==2.0.0 \
21 | pathos==0.2.1 multiprocess==0.70.9 fastparquet==0.5.0 \
22 | flask-restplus==0.13.0 gunicorn==19.9.0 \
23 | beautifulsoup4==4.8.0 pdfkit==0.6.1 psutil==5.6.6 \
24 | matplotlib==3.3.1 \
25 | boto3==1.5.11 \
26 | pyodbc==4.0.23 \
27 | pytest==5.4.3 pytest-cov==2.5.1 \
28 | mysql-connector-python==8.0.19 \
29 | IPython==7.14.0 chartpy==0.1.8 findatapy==0.1.19 dash-auth==1.3.2 cufflinks==0.17.3 plotly==4.14.3 kaleido \
30 | dash==1.20.0 dash-html-components==1.1.3 dash-core-components==1.16.0 dash-table==4.11.3 jupyter-dash==0.4.0 chart_studio==1.1.0 dtale kaleido \
31 | dash-bootstrap-components==0.12.0 \
32 | qpython==2.0.0 influxdb==5.2.3 \
33 | Flask-Session==0.3.1 \
34 | celery==5.0.5 pytest-tap kombu python-memcached==1.59 numba==0.48.0 vispy==0.6.4 jinja2==2.11.2 \
35 | jupyterlab jupyter_contrib_nbextensions jupyter_nbextensions_configurator RISE bqplot WeasyPrint==51 \
36 | dask==2.14.0 distributed==2.14.0 cloudpickle==1.3.0 python-snappy==0.5.4 bokeh==2.0.1 msgpack==1.0.0 pystore==0.1.22 fsspec==0.3.3 eikon==1.1.2 vaex
37 |
38 | # Can't install orca with pip (has to be done manually or via conda)
39 | sudo apt-get install nodejs npm
40 |
41 | elif [ $TCAPY_PYTHON_ENV_TYPE == "conda" ] && [ $CONDA_FROM_YAML == 0 ]; then
42 |
43 | # Install conda forge packages
44 | conda install -c conda-forge \
45 | setuptools-git cython sqlalchemy redis-py python=3.7 \
46 | pandas=1.0.5 numpy scipy statsmodels python-blosc \
47 | pathos multiprocess fastparquet \
48 | flask-restplus gunicorn \
49 | beautifulsoup4 python-pdfkit psutil \
50 | matplotlib \
51 | boto3 \
52 | pyodbc \
53 | pytest pytest-cov \
54 | numba pyarrow=2.0.0 vispy jinja2 \
55 | jupyterlab jupyter_contrib_nbextensions jupyter_nbextensions_configurator nodejs rise bqplot \
56 | dask distributed cloudpickle python-snappy bokeh msgpack-python vaex --yes
57 |
58 | # Install charting libraries
59 | # for flash recording of session variables
60 | # to allow celery to use Redis
61 | pip install arctic==1.79.4 mysql-connector-python==8.0.19 chartpy==0.1.8 findatapy==0.1.19 dash-auth==1.3.2 cufflinks==0.17.3 plotly==4.14.3 kaleido \
62 | dash==1.20.0 dash-html-components==1.1.3 dash-core-components==1.16.0 dash-table==4.11.3 jupyter-dash==0.4.0 chart_studio==1.1.0 \
63 | dash-bootstrap-components==0.12.0 \
64 | dtale==1.8.1 \
65 | qpython==2.0.0 influxdb==5.2.3 \
66 | Flask-Session==0.3.1 \
67 | celery==5.0.5 pytest-tap kombu python-memcached==1.59 WeasyPrint==51 pystore==0.1.22 fsspec==0.3.3 eikon==1.1.2
68 | fi
69 |
70 | # Hack for vaex
71 | pip uninstall progressbar2
72 | pip install progressbar2
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_pycharm.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Installs Pycharm if you want to edit the Python code of tcapy
4 |
5 | cd /home/$USER/.
6 | sudo wget https://download-cf.jetbrains.com/python/pycharm-professional-2017.2.4.tar.gz
7 | tar -xvzf pycharm-professional-2017.2.4.tar.gz -C ~
8 |
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_python_tools_apache.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This script will install a number of different applications
4 | # - Python development tools
5 | # - gcc compiler
6 | # - tofrodos useful utility for converting Windows to Linux text files
7 | # - openssl
8 | # - Apache web server
9 |
10 | # Update repo
11 | # Install EPEL repo access (required for downloading certain packages like python-pip)
12 | # Install python-pip (to install Python packages), python-devel (needed for Python libraries) and git (source control)
13 | # Install GCC (can be required to compile some Python packages & dependencies)
14 | # Install converter between Windows and Linux files (converts line endings)
15 | # Install openssl-lib (for accessing https proxy for some market data providers, like NCFX)
16 | # Install Apache web server
17 | # For Red Hat/Amazon Linux for a Apache module
18 | # Install snappy compression for PyStore
19 |
20 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
21 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
22 |
23 | if [ $DISTRO == "ubuntu" ]; then
24 | sudo apt-get update
25 | sudo apt-get install --yes \
26 | python-setuptools python-dev python-pip git \
27 | gcc g++ wget make \
28 | tofrodos \
29 | apache2 apache2-utils ssl-cert openssl liblasso3 libapache2-mod-wsgi \
30 | libsnappy-dev
31 |
32 | elif [ $DISTRO == "redhat" ]; then
33 | # For Red Hat force it to enable subscription (once it has been paid) - otherwise can't access mod_ssl
34 | sudo subscription-manager attach --auto
35 | sudo subscription-manager repos --enable=rhel-7-server-rpms
36 |
37 | sudo yum update
38 | sudo yum -y install \
39 | https://dl.fedoraproject.org/pub/epel/epel-release-latest-$(rpm -E '%{rhel}').noarch.rpm \
40 | python-setuptools python-devel python-pip git \
41 | gcc gcc-c++ wget make \
42 | tofrodos \
43 | openssl-libs \
44 | httpd httpd-tools openssl mod_auth_mellon mod_wsgi \
45 | mod_ssl \
46 | snappy-devel
47 | fi
48 |
49 |
50 | # for pycurl (NOTE: you might need to change "libcrypto.so.1.0.1e" to whatever version is installed on your machine)
51 | # pycurl won't start if libcrypto.so.1.0.0 not present, so create a symbolic link
52 | # sudo ln -sf /usr/lib64/libcrypto.so.1.0.1e /usr/lib64/libcrypto.so.1.0.0
53 | # sudo ln -sf /usr/lib64/libssl.so.1.0.1e /usr/lib64/libssl.so.1.0.0
54 |
55 | # enable proxy modules to be able to run gunicorn through apache
56 | # sudo a2enmod proxy proxy_ajp proxy_http rewrite deflate headers proxy_balancer proxy_connect proxy_html
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_rabbitmq.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Installs RabbitMQ messaging broker (needs Erlang as a prerequisite first) - note this is optional
4 | # By default tcapy uses memcached as Celery results backend and Redis as a message broker, which are easier to manage
5 |
6 | sudo yum install --yes erlang
7 | sudo yum install --yes socat
8 | sudo rpm --import https://www.rabbitmq.com/rabbitmq-signing-key-public.asc
9 | sudo rpm -Uvh https://www.rabbitmq.com/releases/rabbitmq-server/v3.6.9/rabbitmq-server-3.6.9-1.el6.noarch.rpm
10 | sudo rabbitmq-plugins enable rabbitmq_management
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_redis.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Installs Redis, which is used in many places in tcapy
4 | # - to cache market and trade data
5 | # - manage communication between tcapy app and Celery as a message broker
6 | # - it is recommended to install latest versions of Redis, which support features like UNLINK
7 | #
8 | # Can either install from Chris Lea's repo or compile from source (downloading from Redis website), which are latest versions
9 | # of Redis
10 | #
11 | # Generally, the standard Linux repo's tend to have older versions of Redis, which will work, but have fewer features
12 | #
13 | # Need GCC to compile Redis - ie. wget make gcc, these will be installed by install_python_tools_apache.sh
14 |
15 | # Reference from http://www.nixtechnix.com/how-to-install-redis-4-0-on-centos-6-and-amazon-linux/
16 | # also see https://redis.io/topics/quickstart
17 |
18 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
19 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
20 |
21 | sudo adduser --system --group --no-create-home redis
22 |
23 | if [ $DISTRO == "ubuntu" ] && [ $COMPILE_REDIS_FROM_SOURCE == 0 ]; then
24 | # Use Chris Lea's repo which has newer versions of Redis
25 | sudo add-apt-repository -y ppa:chris-lea/redis-server
26 | sudo apt-get update
27 | sudo apt -y install redis-server
28 | else
29 | # Create temporary folder for redis
30 | cd /home/$USER/
31 |
32 | # Remove any previous redis temporary file installations
33 | rm -rf /home/$USER/redis-stable
34 | rm -rf /home/$USER/redis-stable.tar.gz
35 |
36 | # Download latest stable version and unzip
37 | wget -c http://download.redis.io/redis-stable.tar.gz
38 | tar -xvzf redis-stable.tar.gz
39 |
40 | # Now build redis from the source code
41 | cd redis-stable
42 | make
43 | make test
44 | sudo make install
45 |
46 | sudo cp -f src/redis-server /usr/bin/
47 | sudo cp -f src/redis-cli /usr/bin/
48 |
49 | cd utils
50 |
51 | # Finally
52 | # sudo ./install_server.sh
53 | echo -n | sudo ./install_server.sh
54 | fi
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_sql_driver.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Install Microsoft SQL Server driver on Linux (only necessary if your trade/order data is stored on a Microsoft
4 | # SQL Server
5 |
6 | # See https://docs.microsoft.com/en-us/sql/connect/odbc/linux-mac/installing-the-microsoft-odbc-driver-for-sql-server?view=sql-server-ver15#ubuntu17
7 |
8 | # Run this script with sudo.. eg sudo ./install_sql_driver.sh
9 | # adapted from https://blogs.msdn.microsoft.com/sqlnativeclient/2017/06/30/servicing-update-for-odbc-driver-13-1-for-linux-and-macos-released/
10 |
11 | # Also see https://serverfault.com/questions/838166/installing-odbc-driver-13-for-mssql-server-in-amazon-linux-on-ec2-instance
12 | # for alternative way of installing on Amazon Linux
13 |
14 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
15 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
16 |
17 | if [ $DISTRO == "ubuntu" ]; then
18 | # sudo su
19 | curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add -
20 |
21 | # Download appropriate package for the OS version
22 | # Choose only ONE of the following, corresponding to your OS version
23 |
24 | # Ubuntu 16.04
25 | # curl https://packages.microsoft.com/config/ubuntu/16.04/prod.list > /etc/apt/sources.list.d/mssql-release.list
26 |
27 | # Ubuntu 18.04
28 | curl https://packages.microsoft.com/config/ubuntu/18.04/prod.list > /etc/apt/sources.list.d/mssql-release.list
29 |
30 | # Ubuntu 19.10
31 | # curl https://packages.microsoft.com/config/ubuntu/19.10/prod.list > /etc/apt/sources.list.d/mssql-release.list
32 |
33 | # exit
34 | sudo apt-get update
35 | sudo apt-get remove msodbcsql mssql-tools
36 | sudo ACCEPT_EULA=Y apt-get --yes install msodbcsql17 #=13.0.1.0-1
37 |
38 | # Optional: for bcp and sqlcmd
39 | sudo ACCEPT_EULA=Y apt-get --yes install mssql-tools #=14.0.2.0-1
40 | echo 'export PATH="$PATH:/opt/mssql-tools/bin"' >> ~/.bash_profile
41 | echo 'export PATH="$PATH:/opt/mssql-tools/bin"' >> ~/.bashrc
42 | source ~/.bashrc
43 |
44 | # Optional: for unixODBC development headers
45 | sudo apt-get --yes install unixodbc-dev
46 |
47 | elif [ $DISTRO == "redhat" ]; then
48 | # sudo su
49 |
50 | # Download appropriate package for the OS version
51 | # Choose only ONE of the following, corresponding to your OS version
52 |
53 | # RedHat Enterprise Server 6
54 | # curl https://packages.microsoft.com/config/rhel/6/prod.repo > /etc/yum.repos.d/mssql-release.repo
55 |
56 | # RedHat Enterprise Server 7
57 | curl https://packages.microsoft.com/config/rhel/7/prod.repo > /etc/yum.repos.d/mssql-release.repo
58 |
59 | # RedHat Enterprise Server 8 and Oracle Linux 8
60 | # curl https://packages.microsoft.com/config/rhel/8/prod.repo > /etc/yum.repos.d/mssql-release.repo
61 |
62 | sudo yum remove unixODBC-utf16 unixODBC-utf16-devel msodbcsql # To avoid conflicts
63 | sudo ACCEPT_EULA=Y yum install --yes msodbcsql=13.0.1.0-1
64 |
65 | # Optional: for bcp and sqlcmd
66 | # sudo ACCEPT_EULA=Y yum install --yes mssql-tools
67 | # echo 'export PATH="$PATH:/opt/mssql-tools/bin"' >> ~/.bash_profile
68 | # echo 'export PATH="$PATH:/opt/mssql-tools/bin"' >> ~/.bashrc
69 | source ~/.bashrc
70 |
71 | # Optional: for unixODBC development headers
72 | sudo yum install unixODBC-devel
73 |
74 | elif [ $DISTRO == "redhat-old" ]; then
75 | # Install MSSQL driver
76 | sudo curl https://packages.microsoft.com/config/rhel/7/prod.repo > /etc/yum.repos.d/mssql-release.repo # (for Redhat)
77 | # sudo curl https://packages.microsoft.com/config/rhel/7/prod.repo | sudo tee /etc/yum.repos.d/msprod.repo # (for Amazon Linux)
78 |
79 | sudo yum remove unixODBC-utf16 unixODBC-utf16-devel # To avoid conflicts
80 | # sudo ACCEPT_EULA=Y yum install msodbcsql-13.1.9.0-1 mssql-tools-14.0.6.0-1 unixODBC-devel
81 | sudo ACCEPT_EULA=Y yum install msodbcsql mssql-tools unixODBC-devel
82 | fi
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_tcapy_on_apache.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Installs tcapy app on Apache (using WSGI)
4 |
5 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
6 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
7 |
8 | site_name="tcapy"
9 | site_name_conf="tcapy.conf"
10 | # site_name_apache_conf="tcapy_apache.conf"
11 | site_name_apache_conf="tcapy_apache.conf"
12 | site_folder="$TCAPY_CUEMACRO/tcapy/conf"
13 |
14 | sudo mkdir -p /etc/httpd/sites-available
15 | sudo mkdir -p /etc/httpd/sites-enabled
16 | sudo chmod a+x $site_folder/$site_name_apache_conf
17 | sudo cp $site_folder/$site_name_apache_conf /etc/httpd/sites-available/$site_name_conf
18 | sudo cp $site_folder/$site_name_apache_conf /etc/httpd/sites-enabled/$site_name_conf
19 | sudo cp $site_folder/$site_name_apache_conf /etc/httpd/conf.d/$site_name_conf
20 |
21 | # on Red Hat this file doesn't usually exist
22 | sudo rm /etc/httpd/sites-enabled/000-default.conf
23 |
24 | # need to link Python script to web server
25 | sudo mkdir /var/www/$site_name
26 | sudo chown $TCAPY_USER /var/www/$site_name
27 | sudo cp $site_folder/"$site_name.wsgi" /var/www/$site_name
28 | sudo cd /var/www/$site_name
29 |
30 | # allows reading of files outside of Apache's folder
31 | sudo setenforce 0
32 |
33 | sudo chmod -R o+rx $site_folder
34 | sudo chmod a+xr /var/www/$site_name/"$site_name.wsgi"
35 | sudo chmod -R a+r /var/log/httpd
36 | # sudo ln /var/log/httpd/error_log $site_folder/log/error_log
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_tcapy_on_apache_gunicorn.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Installs tcapy app on Apache (using gunicorn to run the app)
4 |
5 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
6 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
7 |
8 | # for running Flask/Dash app in Apache but redirected via Gunicorn (rather than using mod_wsgi, which tends to be slower)
9 | site_name="tcapy"
10 | site_name_conf="tcapy.conf"
11 | site_name_apache_conf="tcapy_apache_gunicorn.conf"
12 | site_folder="$TCAPY_CUEMACRO/tcapy/conf"
13 |
14 | sudo mkdir -p /etc/httpd/sites-available
15 | sudo mkdir -p /etc/httpd/sites-enabled
16 | sudo chmod a+x $site_folder/$site_name_apache_conf
17 | sudo cp $site_folder/$site_name_apache_conf /etc/httpd/sites-available/$site_name_conf
18 | sudo cp $site_folder/$site_name_apache_conf /etc/httpd/sites-enabled/$site_name_conf
19 | sudo cp $site_folder/$site_name_apache_conf /etc/httpd/conf.d/$site_name_conf
20 |
21 | # on Red Hat this file doesn't usually exist
22 | sudo rm /etc/httpd/sites-enabled/000-default.conf
23 |
24 | # allows reading of files outside of Apache's folder
25 | sudo setenforce 0
26 |
27 | sudo chmod -R o+rx $site_folder
28 | sudo chmod -R a+r /var/log/httpd
29 | # sudo ln /var/log/httpd/error_log $site_folder/log/error_log
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_tcapy_on_nginx_gunicorn.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Installs tcapy app on nginx web server, using gunicorn to run the Python app, generally much easier to use
4 | # than any of the Apache combinations
5 |
6 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
7 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
8 |
9 | # for running Flask/Dash app in nginx but redirected via Gunicorn (rather than using mod_wsgi, which tends to be slower)
10 | site_name="tcapy"
11 | site_name_conf="tcapy.conf"
12 | site_name_nginx_conf="tcapy_nginx_gunicorn.conf"
13 | site_folder="$TCAPY_CUEMACRO/tcapy/conf"
14 |
15 | sudo mkdir -p /etc/nginx/sites-available
16 | sudo mkdir -p /etc/nginx/sites-enabled
17 | sudo chmod a+x $site_folder/$site_name_nginx_conf
18 | sudo cp $site_folder/$site_name_nginx_conf /etc/nginx/sites-available/$site_name_conf
19 | sudo cp $site_folder/$site_name_nginx_conf /etc/nginx/sites-enabled/$site_name_conf
20 | sudo cp $site_folder/$site_name_nginx_conf /etc/nginx/conf.d/$site_name_conf
21 |
22 | # on Red Hat these file doesn't usually exist
23 | sudo rm -f /etc/nginx/sites-enabled/000-default.conf
24 | sudo rm -f /etc/nginx/conf.d/000-default.conf
25 | sudo rm -f /etc/nginx/conf.d/default.conf
26 |
27 | # Allows reading of files outside of nginx folder (just for Red Hat/Centos
28 | if [ $DISTRO == "redhat" ]; then
29 | sudo setenforce 0
30 | fi
31 |
32 | sudo chmod -R o+rx $site_folder
33 | # sudo chmod -R a+r /var/log/httpd
34 | # sudo ln /var/log/httpd/error_log $site_folder/log/error_log
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_virtual_env.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This file with create a virtual environment for Python3 which can be specifically used with tcapy, so it does not
4 | # impact other Python applications on the server
5 |
6 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
7 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
8 |
9 | if [ $TCAPY_PYTHON_ENV_TYPE == "virtualenv" ]; then
10 | if [ $TCAPY_PYTHON_VERSION == 3 ]; then
11 | echo 'Creating Python3 virtualenv...'
12 | sudo pip3 install virtualenv
13 | virtualenv -p /usr/bin/python3 $TCAPY_PYTHON_ENV
14 | fi
15 |
16 | source $TCAPY_PYTHON_ENV_BIN/activate
17 | elif [ $TCAPY_PYTHON_ENV_TYPE == "conda" ]; then
18 | echo 'Creating Python3 conda...'
19 | source $CONDA_ACTIVATE
20 |
21 | # Can be quite slow to update conda (also latest versions can have issues!)
22 | conda update -n base conda --yes
23 | conda remove --name $TCAPY_PYTHON_ENV --all --yes
24 |
25 | if [ $CONDA_FROM_YAML == 1 ]; then
26 | source $TCAPY_CUEMACRO/batch_scripts/linux/installation/install_conda_from_env_yaml.sh
27 | source activate $TCAPY_PYTHON_ENV
28 | elif [ $CONDA_FROM_YAML == 0 ]; then
29 | # Sometimes might help to try an older version of conda - https://github.com/conda/conda/issues/9004
30 | # conda install conda=4.6.14
31 | conda create -n $TCAPY_PYTHON_ENV python=3.7 --yes
32 | fi
33 |
34 | source activate $TCAPY_PYTHON_ENV
35 | fi
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/install_weasyprint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This installs required dependencies for WeasyPrint
4 |
5 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
6 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
7 |
8 | if [ $DISTRO == "ubuntu" ]; then
9 | sudo apt-get update
10 | sudo apt-get install --yes \
11 | build-essential libcairo2 libpango-1.0-0 libpangocairo-1.0-0 libgdk-pixbuf2.0-0 libffi-dev shared-mime-info
12 |
13 | elif [ $DISTRO == "redhat" ]; then
14 | # For Red Hat force it to enable subscription (once it has been paid) - otherwise can't access mod_ssl
15 | sudo subscription-manager attach --auto
16 | sudo subscription-manager repos --enable=rhel-7-server-rpms
17 |
18 | sudo yum update
19 | sudo yum install --yes \
20 | redhat-rpm-config libffi-devel cairo pango gdk-pixbuf2
21 | fi
22 |
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/limits.conf:
--------------------------------------------------------------------------------
1 | # /etc/security/limits.conf
2 | # COPY THIS FILE to /etc/security/limits.conf
3 | #
4 | #This file sets the resource limits for the users logged in via PAM.
5 | #It does not affect resource limits of the system services.
6 | #
7 | #Also note that configuration files in /etc/security/limits.d directory,
8 | #which are read in alphabetical order, override the settings in this
9 | #file in case the domain is the same or more specific.
10 | #That means for example that setting a limit for wildcard domain here
11 | #can be overriden with a wildcard setting in a config file in the
12 | #subdirectory, but a user specific setting here can be overriden only
13 | #with a user specific setting in the subdirectory.
14 | #
15 | #Each line describes a limit for a user in the form:
16 | #
17 | # -
18 | #
19 | #Where:
20 | # can be:
21 | # - a user name
22 | # - a group name, with @group syntax
23 | # - the wildcard *, for default entry
24 | # - the wildcard %, can be also used with %group syntax,
25 | # for maxlogin limit
26 | #
27 | # can have the two values:
28 | # - "soft" for enforcing the soft limits
29 | # - "hard" for enforcing hard limits
30 | #
31 | #
- can be one of the following:
32 | # - core - limits the core file size (KB)
33 | # - data - max data size (KB)
34 | # - fsize - maximum filesize (KB)
35 | # - memlock - max locked-in-memory address space (KB)
36 | # - nofile - max number of open file descriptors
37 | # - rss - max resident set size (KB)
38 | # - stack - max stack size (KB)
39 | # - cpu - max CPU time (MIN)
40 | # - nproc - max number of processes
41 | # - as - address space limit (KB)
42 | # - maxlogins - max number of logins for this user
43 | # - maxsyslogins - max number of logins on the system
44 | # - priority - the priority to run user process with
45 | # - locks - max number of file locks the user can hold
46 | # - sigpending - max number of pending signals
47 | # - msgqueue - max memory used by POSIX message queues (bytes)
48 | # - nice - max nice priority allowed to raise to values: [-20, 19]
49 | # - rtprio - max realtime priority
50 | #
51 | #
-
52 | #
53 |
54 | #* soft core 0
55 | #* hard rss 10000
56 | #@student hard nproc 20
57 | #@faculty soft nproc 20
58 | #@faculty hard nproc 50
59 | #ftp hard nproc 0
60 | #@student - maxlogins 4
61 |
62 | redhat hard nofile 60000
63 | redhat soft nofile 60000
64 | root hard nofile 60000
65 | root soft nofile 60000
66 |
67 | # End of file
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/mongodb-org-3.6.repo:
--------------------------------------------------------------------------------
1 | [mongodb-org-3.6]
2 | name=MongoDB Repository
3 | baseurl=https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/3.6/x86_64/
4 | gpgcheck=1
5 | enabled=1
6 | gpgkey=https://www.mongodb.org/static/pgp/server-3.6.asc
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/mongodb-org-4.2.repo:
--------------------------------------------------------------------------------
1 | [mongodb-org-4.2]
2 | name=MongoDB Repository
3 | baseurl=https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/4.2/x86_64/
4 | gpgcheck=1
5 | enabled=1
6 | gpgkey=https://www.mongodb.org/static/pgp/server-4.2.asc
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/mysql80.repo:
--------------------------------------------------------------------------------
1 | # Enable to use MySQL 8.0
2 | [mysql80-community]
3 | name=MySQL 8.0 Community Server
4 | baseurl=http://repo.mysql.com/yum/mysql-8.0-community/el/6/$basearch/
5 | enabled=1
6 | gpgcheck=1
7 | gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-mysql
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/nginx.repo:
--------------------------------------------------------------------------------
1 | [nginx]
2 | name=nginx repo
3 | baseurl=http://nginx.org/packages/rhel/7/$basearch/
4 | gpgcheck=0
5 | enabled=1
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/patch_celery_python_37.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Celery 4.2 uses "async" for some script names, but in Python 3.7, "async" is a reserved keyword
4 | # This script manually patches the Celery code changing all instances of "async" to "asynchronous"
5 |
6 | # This should be fixed in Celery 5.0 (https://github.com/celery/celery/issues/4500) - in general it is recommended
7 | # to stick to Python 3.6 for Celery 4.2, till Celery 5.0 is released
8 | # Celery 4.3 supports 3.7
9 |
10 | # For older version of Python, we do not need to run this script
11 |
12 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
13 | source $SCRIPT_FOLDER/set_tcapy_env_vars.sh
14 |
15 | TARGET=$TCAPY_PYTHON_ENV/site-packages/celery/backends
16 |
17 | cd $TARGET
18 |
19 | if [ -e async.py ]
20 | then
21 | mv async.py asynchronous.py
22 | sed -i 's/async/asynchronous/g' redis.py
23 | sed -i 's/async/asynchronous/g' rpc.py
24 | fi
--------------------------------------------------------------------------------
/batch_scripts/linux/installation/set_tcapy_env_vars.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This script has various configuration flags for tcapy which need to be set
4 | # It is unlikely you'll need to change many of these (except possibly the folder where tcapy resides)
5 |
6 | echo "Setting environment variables for tcapy for current script (recommended to add these globally to /etc/environment)"
7 |
8 | ## Python environment settings #########################################################################################
9 |
10 | # Folder where tcapy is (note: if you will need to change this in tcapy/conf/mongo.conf too)
11 | export TCAPY_CUEMACRO=/home/$USER/cuemacro/tcapy
12 |
13 | # Is the Python environment either "conda" or "virtualenv"?
14 | export TCAPY_PYTHON_ENV_TYPE="conda"
15 | # export TCAPY_PYTHON_ENV=/home/$USER/py37tca/ # virtualenv folder or conda name
16 | export TCAPY_PYTHON_ENV=py37tca # virtualenv folder or conda name
17 | export TCAPY_PYTHON_ENV_BIN=/home/$USER/$TCAPY_PYTHON_ENV/bin/
18 | export TCAPY_PYTHON_VERSION=3 # Only Python 3 is now supported
19 |
20 | export CONDA_ACTIVATE=/home/$USER/anaconda3/bin/activate
21 | export TCAPY_USER=$USER # which user to run tcapy
22 |
23 | export TCAPY_CELERY_WORKERS=14
24 |
25 | # Add Anaconda to the path (you might need to change this)
26 | # export PATH=~/anaconda3/bin:$PATH
27 |
28 | ## Database settings ###################################################################################################
29 |
30 | export START_MYSQL=1
31 | export START_MONGODB=1
32 | export START_CLICKHOUSE=1
33 |
34 | ## Web server settings #################################################################################################
35 |
36 | # Can use gunicorn with either apache or nginx (mod_wsgi can only be used with apache and is depreciated)
37 | # recommended to use gunicorn with nginx
38 | export TCAPY_PYTHON_STARTER='gunicorn' # 'gunicorn' ('mod_wsgi' is deprecated)
39 | export TCAPY_WEB_SERVER="nginx" # apache or nginx
40 |
41 | # Start other web interfaces? (we always start the default tcapy web interface server)
42 | # but we can also add other interfaces if we want
43 | export START_TCAPY_API=1 # Start Gunicorn RESTful API
44 | export START_TCAPY_BOARD=1 # Start Gunicorn tcapyboard web front-end
45 |
46 | ## Installation parameters #############################################################################################
47 |
48 | export CONDA_FROM_YAML=1 # Install tca py37tca environment from environment_py37tca.yml
49 | export COMPILE_REDIS_FROM_SOURCE=0 # Compiling from source is slower, instead we use Chris Rea's repo version of Redis
50 |
51 | ## Get Linux distribution ##############################################################################################
52 |
53 | # Determine OS distribution
54 | case "`/usr/bin/lsb_release -si`" in
55 | Ubuntu) export DISTRO="ubuntu" ;;
56 | *) export DISTRO="redhat"
57 | esac
58 |
59 |
--------------------------------------------------------------------------------
/batch_scripts/linux/kill_tcapy.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This will kill all the databases used by tcapy and all the tcapy processes
4 | # 1) Celery - for distributed computation
5 | # 2) Gunicorn - to serve Python web app for tcapy
6 | # 3) Apache - web server
7 | # 4) Nginx - web server
8 | # 5) MongoDB - for market tick data
9 | # 6) MySQL - for trade/order data
10 | # 7) Redis - key/value store for short term caching of market/trade/order data and as a message broker for Celery
11 | # 8) Memcached - results backend for Celery
12 |
13 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
14 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
15 |
16 | # Sometimes security settings can prevent MongoDB from running
17 | sudo setenforce 0
18 |
19 | # Kill web app
20 | sudo killall celery
21 | sudo killall gunicorn
22 | sudo killall httpd
23 | sudo killall apache2
24 | sudo service nginx stop
25 | sudo service httpd stop
26 |
27 | # Note you will have to edit this if you choose to use a different database other than Arctic/MongoDB
28 | # sudo service mongod stop
29 | sudo rm -f /data/db/mongod.lock
30 | sudo rm -f /tmp/mongod-27017.sock
31 | sudo killall mongod
32 |
33 | # Stop MySQL
34 | sudo service mysql stop
35 |
36 | # Kill Redis
37 | sudo service redis-server stop
38 | sudo killall redis-server
39 | sudo redis-cli -p 6379 shutdown
40 |
41 | # Kill Memcached
42 | sudo service memcached stop
43 |
--------------------------------------------------------------------------------
/batch_scripts/linux/orca.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # script for starting plotly orca server, needs to start it via xvfb-run
4 |
5 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
6 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
7 |
8 | xvfb-run -a $TCAPY_CUEMACRO/tcapy/orca-1.2.1-x86_64.AppImage "$@"
--------------------------------------------------------------------------------
/batch_scripts/linux/read_celery_log.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
4 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
5 |
6 | # Celery log
7 | tail -f $TCAPY_CUEMACRO/log/celery.log ---disable-inotify
--------------------------------------------------------------------------------
/batch_scripts/linux/read_logs_gnome_terminal.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
4 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
5 |
6 | # This file will start Gnome Terminal windows in Redhat to read all the various logs for tcapy
7 |
8 | gnome-terminal -x sh -c 'top'
9 |
10 | # Apache log (only for when using Apache/WSGI which is deprecated)
11 | # gnome-terminal -x sh -c 'sudo tail -f /var/log/httpd/error_log'
12 |
13 | # Gunicorn log
14 | gnome-terminal -x sh -c 'sudo tail -f $TCAPY_CUEMACRO/log/linux*.log ---disable-inotify'
15 |
16 | # Celery log
17 | gnome-terminal -x sh -c 'tail -f $TCAPY_CUEMACRO/log/celery.log ---disable-inotify'
18 |
19 | # MongoDB log
20 | gnome-terminal -x sh -c 'sudo tail -f $TCAPY_CUEMACRO/log/mongo.log ---disable-inotify'
21 |
--------------------------------------------------------------------------------
/batch_scripts/linux/read_mongo_log.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
4 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
5 |
6 | # Celery log
7 | tail -f $TCAPY_CUEMACRO/log/mongo.log ---disable-inotify
--------------------------------------------------------------------------------
/batch_scripts/linux/render_jupyter_notebooks.sh:
--------------------------------------------------------------------------------
1 | # Note that on some instances of Linux this may not fetch the required directory (it has been tested with RedHat)
2 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
3 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
4 |
5 | echo 'Batch folder' $SCRIPT_FOLDER
6 | echo 'Cuemacro TCAPY' $TCAPY_CUEMACRO
7 |
8 | # Set Python environment
9 | source $SCRIPT_FOLDER/installation/activate_python_environment.sh
10 |
11 | cd $TCAPY_CUEMACRO/tcapy_notebooks
12 | jupyter nbconvert --to html *.ipynb
--------------------------------------------------------------------------------
/batch_scripts/linux/reset_docker_containers.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # This will kill all Docker containers and then remove volumes for some of the containers used by tcapy
4 | # This will often be necessary to do when we are switching between test and product docker-compose
5 |
6 | docker-compose rm -f mysql
7 | docker-compose rm -f mongo
8 | docker-compose rm -f redis
9 | docker-compose rm -f memcached
10 | docker-compose rm -f mysql
11 | docker-compose rm -f celery
12 | docker-compose rm -f gunicorn_tcapy
13 | docker-compose rm -f gunicorn_tcapyboard nginx
14 |
15 | docker-compose rm -v -f mysql
16 | docker-compose rm -v -f mongo
17 | docker-compose rm -v -f redis
18 | docker-compose rm -v -f memcached
19 | docker-compose rm -v -f mysql
20 | docker-compose rm -v -f celery
21 | docker-compose rm -v -f gunicorn_tcapy
22 | docker-compose rm -v -f gunicorn_tcapyboard
23 | docker-compose rm -v -f nginx
24 |
25 | docker kill $(docker ps -q)
--------------------------------------------------------------------------------
/batch_scripts/linux/restart_celery.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
4 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
5 |
6 | # kill all Celery processes
7 | sudo killall celery
8 |
9 | # make sure permissions set properly
10 | sudo setenforce 0
11 |
12 | echo 'Set Python paths...'
13 |
14 | export PYTHONPATH=$TCAPY_CUEMACRO/:$PYTHONPATH
15 |
16 | # Make sure permissions set properly
17 | sudo setenforce 0
18 |
19 | # Set Python environment
20 | source $SCRIPT_FOLDER/installation/activate_python_environment.sh
21 |
22 | echo 'Flush Celery cache...'
23 |
24 | # Purge every message from the "celery" queue everything on celery (not strcitly necessary if using Redis, as flushing in next line)
25 | # celery purge -f
26 |
27 | # Flush redis of everything in cache (saved dataframes and message queue)
28 | echo 'Flushing Redis cache...'
29 |
30 | redis-cli flushall
31 |
32 | echo 'Current working folder'
33 | echo $PWD
34 |
35 | echo 'About to start celery...'
36 | celery -A tcapy.conf.celery_calls worker --purge --discard --loglevel=debug -Q celery --concurrency=$TCAPY_CELERY_WORKERS -f $TCAPY_CUEMACRO/log/celery.log &
--------------------------------------------------------------------------------
/batch_scripts/linux/restart_db.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Will restart the databases/key-value stores used by tcapy
4 | # 1) MongoDB - for market tick data
5 | # 2) MySQL - for trade/order data
6 | # 3) Redis - key/value store for short term caching of market/trade/order data and as a message broker for Celery
7 | # 4) Memcached - results backend for Celery
8 | #
9 | # If you use different databases for market tick data and/or trade/order data, you will need to edit this script to
10 | # restart those servers. Also if you want to use RabbitMQ as a message broker/results backend you'll also need to edit.
11 |
12 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
13 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
14 |
15 | # Sometimes security settings can prevent MongoDB from running
16 | sudo setenforce 0
17 |
18 | # Note you will have to edit this if you choose to use a different database other than Arctic/MongoDB
19 | # sudo service mongod stop
20 | sudo rm -f /data/db/mongod.lock
21 | sudo rm -f /tmp/mongod-27017.sock
22 | sudo killall mongod
23 |
24 | # Stop MySQL
25 | sudo service mysql stop
26 |
27 | # Kill Redis
28 | sudo service redis-server stop
29 | sudo killall redis-server
30 | sudo redis-cli -p 6379 shutdown
31 |
32 | # Kill Memcached
33 | sudo service memcached stop
34 |
35 | # Wait for mongod to shutdown first
36 | sleep 10s
37 |
38 | # Make sure file opened limit is very large (otherwise Mongo will rollover!)
39 | # This should have been adjusted in limits.conf
40 | # sudo sh -c "ulimit -c 100000 && exec su $LOGNAME"
41 |
42 | # source $SCRIPT_FOLDER/activate_file_limits.sh
43 |
44 | # Now start up redis, mysql and mongod in the background
45 | # sudo service mongod start
46 | if [ $START_CLICKHOUSE == 1 ]; then
47 | # Starting Clickhouse columnar database
48 | sudo service clickhouse-server start
49 | echo "Started Clickhouse"
50 | fi
51 |
52 | if [ $START_MONGODB == 1 ]; then
53 | # Starting MongoDB as root
54 | sudo mongod --config $TCAPY_CUEMACRO/tcapy/conf/mongo.conf
55 | echo "Started MongoDB"
56 | fi
57 |
58 | # Start MySQL
59 | if [ $START_MYSQL == 1 ]; then
60 | sudo service mysql restart
61 | echo "Started MySQL"
62 | fi
63 |
64 | # Start Redis and flush cache
65 | sudo redis-server $TCAPY_CUEMACRO/tcapy/conf/redis.conf --daemonize yes
66 | sudo redis-cli flushall
67 |
68 | # Start Memcached and flush cache
69 | sudo service memcached start
70 | echo flush_all > /dev/tcp/localhost/11211
71 |
--------------------------------------------------------------------------------
/batch_scripts/linux/restart_influxdb.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
4 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
5 |
6 | sudo killall influxd
7 | sudo influxd -config $TCAPY_CUEMACRO/tcapy/conf/influxdb.conf
--------------------------------------------------------------------------------
/batch_scripts/linux/run_python_script.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Demonstrates how to run a Python script in the Python environment setup for tcapy (usually Anaconda)
4 |
5 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
6 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
7 |
8 | # set Python environment
9 | source $SCRIPT_FOLDER/installation/activate_python_environment.sh
10 |
11 | # Python command (add you python command here)
12 | python $TCAPY_CUEMACRO/tcapy/tcapy_scripts/gen/dump_ncfx_to_csv.py
--------------------------------------------------------------------------------
/batch_scripts/linux/start_jupyter.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Starts a Jupyter notebook, so tcapy can be accessed through that
4 |
5 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
6 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
7 |
8 | source $SCRIPT_FOLDER/installation/activate_python_environment.sh
9 |
10 | # Only allow local access
11 | jupyter notebook \
12 | --notebook-dir=$TCAPY_CUEMACRO/tcapy_notebooks --ip=* --port=9999
13 |
14 | # --ip=$(hostname -i)
15 |
16 | # Alternatively have a key to access
17 | # Create your own pem and key by following https://support.microfocus.com/kb/doc.php?id=7013103
18 | # jupyter notebook \
19 | # --certfile='mycert.pem' \
20 | # --keyfile='mykey.key' --ip=* --server_port=9999
--------------------------------------------------------------------------------
/batch_scripts/linux/start_pproxy.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # starts a proxy (mainly useful for testing)
4 | # pproxy needs Python3
5 | # SSL is usually 443 - but we choose a higher server_port so can run without sudo
6 |
7 | pproxy -l http+ssl://127.0.0.1:7000 -l http://127.0.0.1:8080 --ssl server.crt,server.key --pac /autopac
--------------------------------------------------------------------------------
/batch_scripts/linux/update_chartpy_dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # updates chartpy from dev instance of chartpy on disk
4 |
5 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
6 | source $SCRIPT_FOLDER/installation/set_tcapy_env_vars.sh
7 |
8 | cd $TCAPY_PYTHON_ENV/lib/python2.7/site-packages/chartpy
9 | rm -rf *
10 |
11 | cd $TCAPY_CUEMACRO/chartpy/
12 | python setup.py install
--------------------------------------------------------------------------------
/batch_scripts/windows/installation/activate_python_environment.bat:
--------------------------------------------------------------------------------
1 | REM This will activate our Python environment which has been created for tcapy
2 |
3 | set SCRIPT_FOLDER=%~dp0
4 | call %SCRIPT_FOLDER%\set_tcapy_env_vars
5 |
6 | echo Activating Python environment %TCAPY_PYTHON_ENV% ... and adding tcapy to PYTHONPATH %TCAPY_CUEMACRO%
7 | set PYTHONPATH=%TCAPY_CUEMACRO%:%PYTHONPATH%
8 |
9 | REM if it's a conda environment start that
10 | if %TCAPY_PYTHON_ENV_TYPE%==conda (
11 | call %CONDA_ACTIVATE%
12 | call activate %TCAPY_PYTHON_ENV%
13 | )
14 |
15 | REM otherwise start the virtualenv
16 | if %TCAPY_PYTHON_ENV_TYPE%==virtualenv (
17 | call %TCAPY_PYTHON_ENV%\Scripts\activate
18 | )
--------------------------------------------------------------------------------
/batch_scripts/windows/installation/export_conda_environment.bat:
--------------------------------------------------------------------------------
1 | REM Creates the environment YAML file for the conda environment py36tca
2 | set SCRIPT_FOLDER=%~dp0
3 | call %SCRIPT_FOLDER%\set_tcapy_env_vars
4 |
5 | REM Set Python environment
6 | call %SCRIPT_FOLDER%\activate_python_environment
7 |
8 | echo 'Export conda environment'
9 |
10 | call conda update -n base conda --yes
11 | call conda env export > %TCAPY_CUEMACRO%\batch_scripts\windows\installation\environment_windows_py37tca.yml
--------------------------------------------------------------------------------
/batch_scripts/windows/installation/install_conda_from_env_yaml.bat:
--------------------------------------------------------------------------------
1 | REM Makes a conda environment from a premade YAML file
2 | set SCRIPT_FOLDER=%~dp0
3 | call %SCRIPT_FOLDER%\set_tcapy_env_vars
4 |
5 | REM Set Python environment
6 | call %SCRIPT_FOLDER%\activate_python_environment
7 |
8 | echo 'Create a conda environment from YAML file...'
9 |
10 | call conda activate
11 | call conda remove --name %TCAPY_PYTHON_ENV% --all --yes
12 |
13 | call conda env create -f %TCAPY_CUEMACRO%\batch_scripts\windows\installation\environment_windows_py37tca.yml
14 | call conda activate %TCAPY_PYTHON_ENV%
--------------------------------------------------------------------------------
/batch_scripts/windows/installation/install_jupyter_extensions.bat:
--------------------------------------------------------------------------------
1 | REM Installs the Jupyter extensions which might be useful if you want to use Jupyter as a front end for tcapy
2 | set SCRIPT_FOLDER=%~dp0
3 | call %SCRIPT_FOLDER%\set_tcapy_env_vars
4 |
5 | REM Set Python environment
6 | call %SCRIPT_FOLDER%\activate_python_environment
7 |
8 | echo 'Installing Jupyter extensions...'
9 |
10 | REM Jupyter and Jupyterlab extensions
11 | call jupyter contrib nbextension install --user # to activate js on Jupyter
12 | call jupyter nbextension enable execute_time/ExecuteTime
13 | call jupyter-nbextension install rise --py --sys-prefix
14 | call jupyter nbextension enable rise --py --sys-prefix
15 | call jupyter nbextension enable toc2/main --sys-prefix
16 | call jupyter nbextension install --sys-prefix --symlink --py jupyter_dash
17 | call jupyter nbextension enable --py jupyter_dash
18 |
19 | call jupyter labextension install @jupyter-widgets/jupyterlab-manager --no-build
20 | call jupyter labextension install plotlywidget --no-build
21 | call jupyter labextension install jupyterlab-plotly --no-build
22 | call jupyter labextension install bqplot
23 | call jupyter lab build
--------------------------------------------------------------------------------
/batch_scripts/windows/installation/install_pip_python_packages.bat:
--------------------------------------------------------------------------------
1 | REM Installs all the Python packages needed for tcapy
2 | set SCRIPT_FOLDER=%~dp0
3 | call %SCRIPT_FOLDER%\set_tcapy_env_vars
4 |
5 | REM Set Python environment
6 | call %SCRIPT_FOLDER%\activate_python_environment
7 |
8 | echo 'Installing Python packages...'
9 |
10 | if %TCAPY_PYTHON_ENV_TYPE%==virtualenv (
11 |
12 | REM Install everything by pip
13 | call pip install ^
14 | setuptools-git==1.2 cython arctic==1.79.4 sqlalchemy==1.3.17 redis==3.3.7 ^
15 | pandas==1.0.5 numpy scipy statsmodels==0.11.1 blosc==1.8.3 pyarrow ^
16 | pathos==0.2.1 multiprocess==0.70.9 fastparquet==0.5.0 ^
17 | flask-restplus==0.13.0 ^
18 | beautifulsoup4==4.8.0 psutil==5.6.6 ^
19 | matplotlib==3.3.1 ^
20 | boto3==1.5.11 ^
21 | pyodbc==4.0.23 ^
22 | pytest==5.4.3 pytest-cov==2.5.1 ^
23 | mysql-connector-python==8.0.19 ^
24 | chartpy==0.1.8 findatapy==0.1.19 IPython==7.14.0 dash-auth==1.4.0 ^
25 | cufflinks==0.17.3 plotly==4.14.3 kaleido ^
26 | dash==1.20.0 dash-html-components==1.1.3 dash-core-components==1.16.0 dash-table==4.11.3 jupyter-dash==0.4.0 chart_studio==1.1.0 dtale kaleido ^
27 | dash-bootstrap-components==0.12.0 ^
28 | qpython==2.0.0 influxdb==5.2.3 ^
29 | Flask-Session==0.3.1 ^
30 | celery==celery==5.0.5 msgpack-python pytest-tap kombu==4.6.7 python-memcached==1.59 ^
31 | numba==0.48.0 vispy==0.6.4 pdfkit==0.6.1 jinja2==2.11.2 xlwings==0.23.0 ^
32 | jupyterlab jupyter_contrib_nbextensions jupyter_nbextensions_configurator RISE bqplot WeasyPrint==51 ^
33 | dask==2.14.0 distributed==2.14.0 cloudpickle==1.3.0 python-snappy==0.5.4 bokeh==2.0.1 msgpack==1.0.0 pystore==0.1.22 ^
34 | fsspec==0.3.3 eikon=1.1.2
35 | )
36 |
37 | if %TCAPY_PYTHON_ENV_TYPE%==conda (
38 |
39 | if %CONDA_FROM_YAML%==0 (
40 |
41 | REM Install conda forge packages (removed gunicorn and python-pdfkit)
42 | call conda install -c conda-forge ^
43 | setuptools-git cython sqlalchemy redis-py ^
44 | pandas=1.0.5 numpy scipy statsmodels python-blosc ^
45 | pathos multiprocess fastparquet ^
46 | beautifulsoup4 psutil ^
47 | matplotlib ^
48 | pyodbc ^
49 | pytest pytest-cov ^
50 | numba pyarrow vispy jinja2 xlwings=0.23.0 ^
51 | jupyterlab jupyter_contrib_nbextensions jupyter_nbextensions_configurator nodejs rise bqplot ^
52 | dask distributed cloudpickle python-snappy bokeh msgpack-python --yes
53 |
54 | REM Install charting libraries for flash recording of session variables to allow celery to use Redis
55 | call pip install arctic==1.79.4 mysql-connector-python==8.0.19 chartpy==0.1.8 findatapy==0.1.19 dash-auth==1.3.2 cufflinks==0.17.3 plotly==4.14.3 kaleido \
56 | dash==1.20.0 dash-html-components==1.1.3 dash-core-components==1.16.0 dash-table==4.11.3 jupyter-dash==0.4.0 chart_studio==1.1.0 dtale kaleido \
57 | dash-bootstrap-components==0.12.0 ^
58 | qpython==2.0.0 influxdb==5.2.3 ^
59 | Flask-Session==0.3.1 ^
60 | celery==5.0.5 pytest-tap kombu python-memcached==1.59 boto3==1.5.11 pdfkit==0.6.1 WeasyPrint==51 pystore==0.1.22 ^
61 | fsspec==0.3.3 eikon==1.1.2 pandas==1.0.5
62 | )
63 | )
--------------------------------------------------------------------------------
/batch_scripts/windows/installation/install_virtual_env.bat:
--------------------------------------------------------------------------------
1 | REM This file with create a virtual environment for Python3 which can be specifically used with tcapy, so it does not
2 | REM impact other Python applications on the server
3 |
4 | set SCRIPT_FOLDER=%~dp0
5 | call %SCRIPT_FOLDER%\set_tcapy_env_vars
6 |
7 | if %TCAPY_PYTHON_ENV_TYPE%==virtualenv (
8 | echo 'Creating Python3 virtualenv...'
9 | call pip install virtualenv
10 | call virtualenv -p python %TCAPY_PYTHON_ENV%
11 |
12 | call %TCAPY_PYTHON_ENV%\Scripts\activate
13 | )
14 |
15 | if %TCAPY_PYTHON_ENV_TYPE%==conda (
16 | echo 'Creating Python3 conda...'
17 | call %CONDA_ACTIVATE%
18 |
19 | REM can be quite slow to update conda (also latest versions can have issues!)
20 | call conda update -n base conda --yes
21 | call conda remove --name %TCAPY_PYTHON_ENV% --all --yes
22 |
23 | REM setup the conda environment (and all the libaries) directly from YAML so don't need
24 | REM to install all the libraries via conda/pip later (ie. don't need to run install_pip_python_packages.bat later)
25 | if %CONDA_FROM_YAML%==1 (
26 | call %TCAPY_CUEMACRO%\batch_scripts\windows\installation\install_conda_from_env_yaml.bat
27 | ) else if %CONDA_FROM_YAML%==0 (
28 | REM try an older version of conda - https://github.com/conda/conda/issues/9004
29 | call conda create -n %TCAPY_PYTHON_ENV% python=3.7 --yes
30 | )
31 |
32 | call activate %TCAPY_PYTHON_ENV%
33 | )
--------------------------------------------------------------------------------
/batch_scripts/windows/installation/set_tcapy_env_vars.bat:
--------------------------------------------------------------------------------
1 | REM This script has various configuration flags for tcapy which need to be set
2 | REM It is unlikely you will need to change many of these, except possibly the folder where tcapy resides
3 |
4 | echo "Setting environment variables for tcapy for current script"
5 |
6 | REM Python environment settings ########################################################################################
7 |
8 | REM Folder where tcapy is, note, if you will need to change this in tcapy/conf/mongo.conf too
9 | set TCAPY_CUEMACRO=e:\cuemacro\tcapy
10 |
11 | REM Is the Python environment either "conda" or "virtualenv"?
12 | set TCAPY_PYTHON_ENV_TYPE=conda
13 |
14 | REM virtualenv folder or conda name
15 | REM set TCAPY_PYTHON_ENV=/home/$USER/py37tca/
16 |
17 | REM virtualenv folder or conda name
18 | set CONDA_ACTIVATE=C:\Anaconda3\Scripts\activate.bat
19 | set TCAPY_PYTHON_ENV=py37tca
20 | set TCAPY_PYTHON_ENV_BIN=%TCAPY_PYTHON_ENV%\bin\
21 |
22 | REM Installation parameters - create conda environment from YAML (1 or 0)
23 | set CONDA_FROM_YAML=0
24 |
25 | REM Only Python 3 is now supported
26 | set TCAPY_PYTHON_VERSION=3
27 |
28 | REM for using Excel/xlwings front end for tcapy
29 | set EXCEL_PATH="C:\Program Files\Microsoft Office\root\Office16\EXCEL.EXE"
--------------------------------------------------------------------------------
/batch_scripts/windows/restart_wsl.bat:
--------------------------------------------------------------------------------
1 | REM Restarts Windows Subsystem for Linux (needs to be run as administrator)
2 | REM can be necessary to restart sometimes to kill processes
3 |
4 | call net stop LxssManager
5 | call net start LxssManager
--------------------------------------------------------------------------------
/batch_scripts/windows/start_excel.bat:
--------------------------------------------------------------------------------
1 | REM This file will start Python in the right environment and then start Excel
2 | set SCRIPT_FOLDER=%~dp0
3 | call %SCRIPT_FOLDER%\installation\set_tcapy_env_vars
4 |
5 | REM Set Python environment
6 | call %SCRIPT_FOLDER%\installation\activate_python_environment
7 |
8 | cd %SCRIPT_FOLDER%
9 |
10 | REM Open Excel with the tcapy_xl.xlsm spreadsheet
11 | call %EXCEL_PATH% ..\..\..\tcapy\excel\tcapy_xl.xlsm /x
12 |
--------------------------------------------------------------------------------
/batch_scripts/windows/start_jupyter.bat:
--------------------------------------------------------------------------------
1 | REM Starts a Jupyter notebook, so tcapy can be accessed through that
2 | set SCRIPT_FOLDER=%~dp0
3 | call %SCRIPT_FOLDER%\installation\set_tcapy_env_vars
4 |
5 | REM Set Python environment
6 | call %SCRIPT_FOLDER%\installation\activate_python_environment
7 |
8 | REM Only allow local access
9 | jupyter notebook ^
10 | --notebook-dir=%TCAPY_CUEMACRO%\tcapy_notebooks --ip=* --port=9999
11 |
12 | REM Alternatively have a key to access
13 | REM Create your own pem and key by following https://support.microfocus.com/kb/doc.php?id=7013103
14 | REM jupyter notebook ^
15 | REM --certfile='mycert.pem' ^
16 | REM --keyfile='mykey.key' --ip=* --port=9999
--------------------------------------------------------------------------------
/binder/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM tschm/jupyter:1.2.0 as builder
2 |
3 | # File Author / Maintainer
4 | # MAINTAINER Thomas Schmelzer "thomas.schmelzer@gmail.com"
5 | ENV HOME /home/${NB_USER}
6 | ENV APP_ENV docker
7 |
8 | COPY --chown=jovyan:users . /tmp/tcapy
9 |
10 | COPY . ${HOME}
11 | USER root
12 | RUN chown -R ${NB_UID} ${HOME}
13 |
14 | # Install snappy for pystore and ODBC
15 | USER root
16 |
17 | # Minimal dependencies
18 | #RUN apt-get update && \
19 | # apt-get install -y --no-install-recommends \
20 | # libsnappy-dev gcc g++ unixodbc-dev
21 |
22 | # Dependencies for pystore and weasyprint in buildDeps
23 | # If we don't want to use weasyprint we
24 | # build-essential libcairo2 libpango-1.0-0 libpangocairo-1.0-0 libgdk-pixbuf2.0-0 libffi-dev shared-mime-info
25 | RUN buildDeps='gcc g++ libsnappy-dev unixodbc-dev build-essential libcairo2 libpango-1.0-0 libpangocairo-1.0-0 libgdk-pixbuf2.0-0 libffi-dev shared-mime-info' && \
26 | apt-get update && apt-get install -y $buildDeps --no-install-recommends
27 |
28 | # Switch back to jovyan to avoid accidental container runs as root
29 | USER $NB_UID
30 |
31 | RUN pip install --no-cache-dir /tmp/tcapy && \
32 | rm -rf /tmp/tcapy
33 |
34 | RUN ln -s /home/${NB_USER}/tcapy /home/${NB_USER}/work/tcapy
35 | RUN ln -s /home/${NB_USER}/tcapyuser /home/${NB_USER}/work/tcapyuser
36 | RUN ln -s /home/${NB_USER}/tcapy_notebooks /home/${NB_USER}/work/tcapy_notebooks
37 | RUN ln -s /home/${NB_USER}/tcapy_scripts /home/${NB_USER}/work/tcapy_scripts
--------------------------------------------------------------------------------
/cuemacro_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/cuemacro_logo.png
--------------------------------------------------------------------------------
/docker-compose.test.yml:
--------------------------------------------------------------------------------
1 | version: '3.6'
2 | services:
3 | sut:
4 | build:
5 | context: .
6 | dockerfile: Dockerfile
7 | target: test
8 | volumes:
9 | - ./tcapy:/tcapy/tcapy:ro
10 | - ./tcapygen:/tcapy/tcapygen:ro
11 | - ./artifacts:/tcapy/artifacts
12 | - ./source:/source:ro
13 | env_file: .tcapy.env
14 | environment:
15 | - APP_ENV=docker
16 | depends_on:
17 | - celery
18 | - redis
19 | - memcached
20 | - mongo
21 | - mysql
22 |
23 | celery:
24 | build:
25 | context: .
26 | dockerfile: Dockerfile
27 | target: test
28 | working_dir: /tcapy
29 | command: "celery -A tcapy.conf.celery_calls worker --purge --discard --loglevel=debug -Q celery --concurrency=14 -f test_celery.log"
30 | env_file: .tcapy.env
31 | environment:
32 | - APP_ENV=docker
33 | - C_FORCE_ROOT=true
34 | depends_on:
35 | - redis
36 | - mongo
37 | - mysql
38 | - memcached
39 |
40 | redis:
41 | image: "redis:alpine"
42 | command: redis-server --server_port 6379 --save ""
43 | ports:
44 | - 6379:6379
45 |
46 | memcached:
47 | image: memcached
48 | ports:
49 | - 11211:11211
50 |
51 | mongo:
52 | image: mongo:latest
53 | env_file: .tcapy.env
54 | ports:
55 | - 27017:27017
56 |
57 | mysql:
58 | image: mysql
59 | command: --default-authentication-plugin=mysql_native_password
60 | env_file: .tcapy.env
61 | environment:
62 | - MYSQL_DATABASE=trade_database_test_harness
63 | ports:
64 | - 3306:3306
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.6'
2 | services:
3 |
4 | nginx:
5 | image: nginx:latest
6 | container_name: nginx
7 | ports:
8 | - 9500:9500
9 | - 80:80
10 | volumes:
11 | - ./tcapy:/tcapy
12 | - ./tcapy/conf/tcapy_nginx_gunicorn_docker.conf:/etc/nginx/conf.d/default.conf
13 | - ./log:/var/log/nginx
14 | depends_on:
15 | - gunicorn_tcapy
16 | - gunicorn_tcapyboard
17 |
18 | jupyter:
19 | build:
20 | context: .
21 | dockerfile: binder/Dockerfile
22 | target: builder
23 | environment:
24 | - APP_ENV=docker
25 | env_file: .tcapy.env
26 | ports:
27 | - 8888:8888
28 | depends_on:
29 | - redis
30 | - celery
31 | volumes:
32 | - /tmp/csv:/tmp/csv
33 | - /tmp/tcapy:/tmp/tcapy
34 | - /data/csv_dump:/data/csv_dump
35 | - ./tcapy_notebooks:/home/jovyan/tcapy_notebooks
36 | - ./tcapy_scripts:/home/jovyan/tcapy_scripts
37 |
38 | gunicorn_tcapy:
39 | build:
40 | context: .
41 | dockerfile: Dockerfile
42 | target: builder
43 | command: "gunicorn --bind 0.0.0.0:8090 --workers 4 --threads 6 --preload --chdir /tcapy/tcapy/conf \
44 | --access-logfile /tcapy/log/gunicorn_tcapy_access.log \
45 | --error-logfile /tcapy/log/gunicorn_tcapy_error.log tcapy_wsgi:application & "
46 | volumes:
47 | - ./log:/tcapy/log
48 | - /tmp/csv:/tmp/csv
49 | - /tmp/tcapy:/tmp/tcapy
50 | - /data/csv_dump:/data/csv_dump
51 | env_file: .tcapy.env
52 | environment:
53 | - APP_ENV=docker
54 | - TCAPY_CUEMACRO=/tcapy
55 | depends_on:
56 | - redis
57 | - mongo
58 | - mysql
59 | - memcached
60 | - celery
61 | ports:
62 | - 8090:8090
63 |
64 | gunicorn_tcapyboard:
65 | build:
66 | context: .
67 | dockerfile: Dockerfile
68 | target: builder
69 | command: "gunicorn --bind 0.0.0.0:8092 --workers 4 --threads 6 --preload --chdir /tcapy/tcapy/conf \
70 | --access-logfile /tcapy/log/gunicorn_tcapy_access.log \
71 | --error-logfile /tcapy/log/gunicorn_tcapy_error.log tcapyboard_wsgi:application & "
72 | volumes:
73 | - ./log:/tcapy/log
74 | - /tmp/csv:/tmp/csv
75 | - /tmp/tcapy:/tmp/tcapy
76 | - /data/csv_dump:/data/csv_dump
77 | env_file: .tcapy.env
78 | environment:
79 | - APP_ENV=docker
80 | - TCAPY_CUEMACRO=/tcapy
81 | depends_on:
82 | - redis
83 | - mongo
84 | - mysql
85 | - memcached
86 | - celery
87 | ports:
88 | - 8092:8092
89 |
90 | celery:
91 | build:
92 | context: .
93 | dockerfile: Dockerfile
94 | target: builder
95 | working_dir: /tcapy
96 | command: "celery -A tcapy.conf.celery_calls worker --purge --discard --loglevel=debug -Q celery --concurrency=14 -f log/celery.log"
97 | volumes:
98 | - ./log:/tcapy/log
99 | - /tmp/csv:/tmp/csv
100 | - /tmp/tcapy:/tmp/tcapy
101 | - /data/csv_dump:/data/csv_dump
102 | env_file: .tcapy.env
103 | environment:
104 | - APP_ENV=docker
105 | - C_FORCE_ROOT=true
106 | depends_on:
107 | - redis
108 | - mongo
109 | - mysql
110 | - memcached
111 |
112 | redis:
113 | image: "redis:alpine"
114 | command: redis-server --port 6379 --save ""
115 | ports:
116 | - 6379:6379
117 |
118 | memcached:
119 | image: memcached
120 | ports:
121 | - 11211:11211
122 |
123 | mongo:
124 | image: mongo:latest
125 | env_file: .tcapy.env
126 | ports:
127 | - 27017:27017
128 | volumes:
129 | - /data/db_mongodb:/data/db
130 | - ./log:/var/log/mongodb
131 |
132 | mysql:
133 | image: mysql
134 | command: --default-authentication-plugin=mysql_native_password
135 | volumes:
136 | - /data/db_mysql:/var/lib/mysql
137 | env_file: .tcapy.env
138 | ports:
139 | - 3306:3306
140 |
141 | adminer:
142 | image: adminer
143 | ports:
144 | - 8080:8080
--------------------------------------------------------------------------------
/img/anomalous_trades.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/anomalous_trades.PNG
--------------------------------------------------------------------------------
/img/compare_slippage.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/compare_slippage.PNG
--------------------------------------------------------------------------------
/img/create_html_pdf.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/create_html_pdf.PNG
--------------------------------------------------------------------------------
/img/distribution.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/distribution.PNG
--------------------------------------------------------------------------------
/img/executions.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/executions.PNG
--------------------------------------------------------------------------------
/img/gui.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/gui.PNG
--------------------------------------------------------------------------------
/img/markout.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/markout.PNG
--------------------------------------------------------------------------------
/img/slippage_by_ticker.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/slippage_by_ticker.PNG
--------------------------------------------------------------------------------
/img/std_create_html_pdf.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/std_create_html_pdf.png
--------------------------------------------------------------------------------
/img/std_distribution.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/std_distribution.png
--------------------------------------------------------------------------------
/img/std_executions.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/std_executions.png
--------------------------------------------------------------------------------
/img/std_gui.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/std_gui.png
--------------------------------------------------------------------------------
/img/std_slippage_by_ticker.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/std_slippage_by_ticker.png
--------------------------------------------------------------------------------
/img/std_tca_request.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/std_tca_request.png
--------------------------------------------------------------------------------
/img/std_timeline_slippage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/std_timeline_slippage.png
--------------------------------------------------------------------------------
/img/tca_request.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/tca_request.PNG
--------------------------------------------------------------------------------
/img/tcapy_montage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/tcapy_montage.png
--------------------------------------------------------------------------------
/img/timeline_slippage.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/img/timeline_slippage.PNG
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | setuptools-git==1.2
2 | cython
3 | arctic==1.79.4
4 | sqlalchemy
5 | redis
6 | pandas==1.0.5
7 | numpy
8 | scipy
9 | statsmodels
10 | blosc==1.8.3
11 | pyarrow==2.0.0
12 | pathos
13 | multiprocess
14 | fastparquet==0.4.1
15 | flask-restplus==0.13.0
16 | gunicorn
17 | beautifulsoup4
18 | pdfkit==0.6.1
19 | psutil
20 | matplotlib
21 | boto3
22 | pyodbc==4.0.23
23 | pytest
24 | pytest-cov
25 | mysql-connector-python==8.0.19
26 | IPython
27 | chartpy==0.1.8
28 | findatapy==0.1.19
29 | dash-auth==1.3.2
30 | cufflinks==0.17.3
31 | plotly==4.14.3
32 | kaleido
33 | eikon==1.1.2
34 | chart-studio==1.1.0
35 | dash==1.20.0
36 | dash-bootstrap-components==0.12.0
37 | dash-html-components==1.1.3
38 | dash-core-components==1.16.0
39 | dash-table==4.11.3
40 | jupyter-dash==0.4.0
41 | dtale
42 | qpython==2.0.0
43 | influxdb==5.2.3
44 | celery==5.0.5
45 | pytest-tap
46 | kombu
47 | python-memcached==1.59
48 | numba
49 | vispy
50 | jinja2
51 | jupyterlab
52 | jupyter_contrib_nbextensions
53 | jupyter_nbextensions_configurator
54 | RISE
55 | bqplot
56 | WeasyPrint==51
57 | dask
58 | distributed
59 | cloudpickle==1.3.0
60 | python-snappy==0.5.4
61 | bokeh
62 | msgpack==1.0.0
63 | pystore
64 | fsspec==0.3.3
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | from tcapy import __version__ as version
4 |
5 | long_description = """tcapy is a Python library for doing transaction cost analysis (TCA), essentially finding the cost of your trading activity.
6 | Across the industry many financial firms and corporates trading within financial markets spend a lot of money on TCA, either
7 | by developing in house tools or using external services. It is estimated that the typical buy side equities trading desk
8 | spends around 225k USD a year on TCA (see MarketsMedia report at https://www.marketsmedia.com/tca-growth-fueled-regulators-investors/).
9 | Many sell side firms and larger buy side firms build and maintain their own TCA libraries, which is very expensive. The cost of TCA
10 | across the industry is likely to run into many hundreds of millions of dollars or possibly billions of dollars.
11 |
12 | Much of the complexity in TCA is due to the need to handle large tick datasets and do calculations on them and is largely a
13 | software engineering problem. This work needs to be repeated in every single implementation. By open sourcing the library
14 | we hope that the industry will no longer need to keep reinventing the wheel when it comes to TCA. At the same time,
15 | because all the code is visible to users, tcapy allows you can add your own customized metrics and benchmarks,
16 | which is where you are likely have very particular IP in financial markets. You get the flexibility of a fully internal
17 | TCA solution for free.
18 |
19 | tcapy is one of the first open source libraries for TCA. You can run the library on your own hardware, so your trade/order
20 | data can be kept private. It has been in development since June 2017, originally for a large asset manager and was open
21 | sourced in March 2020.
22 |
23 | We've made tcapy to be vendor independent, hence for example it supports, multiple database types for storing
24 | market tick data (including Arctic/MongoDB, KDB and InfluxDB) and for trade/order data (including MySQL, PostgreSQL and
25 | Microsoft SQL Server). As well as supporting Linux (tested on Ubuntu/Red Hat) - also it also works on Windows
26 | (need Windows Subsystem for Linux to make all features accessible)
27 |
28 | tcapy has also been written to distribute the computation and make a lot of use of caching. In the future, we are hoping to
29 | add features to make it easy to use serverless computing features on the cloud. Since you can see all the code, it also
30 | makes the TCA totally transparent. If you are doing TCA for regulatory reasons, it makes sense that the process should
31 | be fully open, rather than a black box. Having an open source library, makes it easier to make changes and fitting it to your
32 | use case.
33 | """
34 |
35 | with open('requirements.txt') as f:
36 | install_requires = f.read()
37 |
38 | setup(name='tcapy',
39 | version=version,
40 | description='Tranasction cost analysis library',
41 | author='Saeed Amen',
42 | author_email='saeed@cuemacro.com',
43 | license='Apache 2.0',
44 | long_description=long_description,
45 | keywords=['pandas', 'TCA', 'transaction cost analysis'],
46 | url='https://github.com/cuemacro/tcapy',
47 | #packages=find_packages(),
48 | packages=find_packages(include=["tcapy*"]),
49 | include_package_data=True,
50 | install_requires=install_requires,
51 | zip_safe=False)
52 |
--------------------------------------------------------------------------------
/source/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/source/.gitkeep
--------------------------------------------------------------------------------
/tcapy/__init__.py:
--------------------------------------------------------------------------------
1 | __title__ = 'tcapy'
2 | __version__ = '0.1.2'
3 | __author__ = 'Saeed Amen, Cuemacro'
4 | __license__ = 'Apache 2'
5 | __copyright__ = 'Copyright 2021 by Cuemacro'
--------------------------------------------------------------------------------
/tcapy/analysis/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/analysis/__init__.py
--------------------------------------------------------------------------------
/tcapy/analysis/algos/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/analysis/algos/__init__.py
--------------------------------------------------------------------------------
/tcapy/api/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/api/__init__.py
--------------------------------------------------------------------------------
/tcapy/client/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/client/__init__.py
--------------------------------------------------------------------------------
/tcapy/conf/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/conf/__init__.py
--------------------------------------------------------------------------------
/tcapy/conf/celeryconfig.py:
--------------------------------------------------------------------------------
1 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
2 |
3 | #
4 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
5 | #
6 | # See the License for the specific language governing permissions and limitations under the License.
7 | #
8 |
9 | """Has the configuration settings for celery. The main thing that needs to be changed is the broker URL settings (in
10 | the ConstantsGen file"
11 | """
12 |
13 | from tcapy.conf.constants import Constants
14 |
15 | constants = Constants()
16 |
17 | broker_url = constants.celery_broker_url
18 | result_backend = constants.celery_result_backend
19 |
20 | # from kombu import serialization
21 | # serialization.registry._decoders.("application/x-python-serialize")
22 |
23 | # the below should not need to be changed by nearly all users
24 | # result_backend = "amqp"
25 | # result_backend = "redis://localhost:6379/2"
26 | event_serializer = 'pickle'
27 | accept_content = ['pickle'] #
28 | task_serializer = 'pickle'
29 | result_serializer = 'pickle'
30 | worker_hijack_root_logger = False
31 | task_store_errors_even_if_ignored = True
32 | worker_max_tasks_per_child = 50 # Stop memory leaks, so restart workers after a 100 tasks
33 | tasks_acks_late = True
34 | result_expires = 900 # Clear memory after a while of results, if not picked up
35 | # task_always_eager = True # For debugging, to run Celery in the same process
36 | broker_transport_options = {'socket_timeout': 900}
37 | # broker_pool_limit = 0
38 |
--------------------------------------------------------------------------------
/tcapy/conf/common-session:
--------------------------------------------------------------------------------
1 | #
2 | # /etc/pam.d/common-session - session-related modules common to all services
3 | #
4 | # This file is included from other service-specific PAM config files,
5 | # and should contain a list of modules that define tasks to be performed
6 | # at the start and end of sessions of *any* kind (both interactive and
7 | # non-interactive).
8 | #
9 | # As of pam 1.0.1-6, this file is managed by pam-auth-update by default.
10 | # To take advantage of this, it is recommended that you configure any
11 | # local modules either before or after the default block, and use
12 | # pam-auth-update to manage selection of other modules. See
13 | # pam-auth-update(8) for details.
14 |
15 | # here are the per-package modules (the "Primary" block)
16 | session [default=1] pam_permit.so
17 | # here's the fallback if no module succeeds
18 | session requisite pam_deny.so
19 | # prime the stack with a positive return value if there isn't one already;
20 | # this avoids us returning an error just because nothing sets a success code
21 | # since the modules above will each just jump around
22 | session required pam_permit.so
23 | # The pam_umask module will set the umask according to the system default in
24 | # /etc/login.defs and user settings, solving the problem of different
25 | # umask settings with different shells, display managers, remote sessions etc.
26 | # See "man pam_umask".
27 | session optional pam_umask.so
28 | # and here are more per-package modules (the "Additional" block)
29 | session required pam_unix.so
30 | session optional pam_systemd.so
31 | session required pam_limits.so
32 | # end of pam-auth-update config
33 |
--------------------------------------------------------------------------------
/tcapy/conf/common-session-noninteractive:
--------------------------------------------------------------------------------
1 | #
2 | # /etc/pam.d/common-session-noninteractive - session-related modules
3 | # common to all non-interactive services
4 | #
5 | # This file is included from other service-specific PAM config files,
6 | # and should contain a list of modules that define tasks to be performed
7 | # at the start and end of all non-interactive sessions.
8 | #
9 | # As of pam 1.0.1-6, this file is managed by pam-auth-update by default.
10 | # To take advantage of this, it is recommended that you configure any
11 | # local modules either before or after the default block, and use
12 | # pam-auth-update to manage selection of other modules. See
13 | # pam-auth-update(8) for details.
14 |
15 | # here are the per-package modules (the "Primary" block)
16 | session [default=1] pam_permit.so
17 | # here's the fallback if no module succeeds
18 | session requisite pam_deny.so
19 | # prime the stack with a positive return value if there isn't one already;
20 | # this avoids us returning an error just because nothing sets a success code
21 | # since the modules above will each just jump around
22 | session required pam_permit.so
23 | # The pam_umask module will set the umask according to the system default in
24 | # /etc/login.defs and user settings, solving the problem of different
25 | # umask settings with different shells, display managers, remote sessions etc.
26 | # See "man pam_umask".
27 | session optional pam_umask.so
28 | # and here are more per-package modules (the "Additional" block)
29 | session required pam_unix.so
30 | session required pam_limits.so
31 | # end of pam-auth-update config
32 |
--------------------------------------------------------------------------------
/tcapy/conf/limits.conf:
--------------------------------------------------------------------------------
1 | # /etc/security/limits.conf
2 | #
3 | #Each line describes a limit for a user in the form:
4 | #
5 | #
-
6 | #
7 | #Where:
8 | # can be:
9 | # - a user name
10 | # - a group name, with @group syntax
11 | # - the wildcard *, for default entry
12 | # - the wildcard %, can be also used with %group syntax,
13 | # for maxlogin limit
14 | # - NOTE: group and wildcard limits are not applied to root.
15 | # To apply a limit to the root user, must be
16 | # the literal username root.
17 | #
18 | # can have the two values:
19 | # - "soft" for enforcing the soft limits
20 | # - "hard" for enforcing hard limits
21 | #
22 | #
- can be one of the following:
23 | # - core - limits the core file size (KB)
24 | # - data - max data size (KB)
25 | # - fsize - maximum filesize (KB)
26 | # - memlock - max locked-in-memory address space (KB)
27 | # - nofile - max number of open files
28 | # - rss - max resident set size (KB)
29 | # - stack - max stack size (KB)
30 | # - cpu - max CPU time (MIN)
31 | # - nproc - max number of processes
32 | # - as - address space limit (KB)
33 | # - maxlogins - max number of logins for this user
34 | # - maxsyslogins - max number of logins on the system
35 | # - priority - the priority to run user process with
36 | # - locks - max number of file locks the user can hold
37 | # - sigpending - max number of pending signals
38 | # - msgqueue - max memory used by POSIX message queues (bytes)
39 | # - nice - max nice priority allowed to raise to values: [-20, 19]
40 | # - rtprio - max realtime priority
41 | # - chroot - change root to directory (Debian-specific)
42 | #
43 | #
-
44 | #
45 |
46 | #* soft core 0
47 | #root hard core 100000
48 | #* hard rss 10000
49 | #@student hard nproc 20
50 | #@faculty soft nproc 20
51 | #@faculty hard nproc 50
52 | #ftp hard nproc 0
53 | #ftp - chroot /ftp
54 | #@student - maxlogins 4
55 | root hard nofile 100000
56 | root soft nofile 100000
57 | * hard nofile 100000
58 | * soft nofile 100000
59 |
60 | # End of file
61 |
--------------------------------------------------------------------------------
/tcapy/conf/logging.conf:
--------------------------------------------------------------------------------
1 | [loggers]
2 | keys=root
3 |
4 | [handlers]
5 | keys=consoleHandler, fileHandler
6 |
7 | [formatters]
8 | keys=simpleFormatter
9 |
10 | [logger_root]
11 | level=DEBUG
12 | handlers=consoleHandler, fileHandler
13 | propagate=1
14 | disabled=0
15 |
16 | [handler_consoleHandler]
17 | class=StreamHandler
18 | level=DEBUG
19 | formatter=simpleFormatter
20 | args=(sys.stdout,)
21 |
22 | [handler_fileHandler]
23 | class=FileHandler
24 | level=DEBUG
25 | formatter=simpleFormatter
26 | # This causes a new file to be created for each script
27 | # Change time.strftime("%Y%m%d%H%M%S") to time.strftime("%Y%m%d")
28 | # And only one log per day will be created. All messages will be amended to it.
29 | args=('tcapy.log', 'a')
30 |
31 | [formatter_simpleFormatter]
32 | format=%(asctime)s - %(name)s - %(levelname)s - %(message)s
33 | datefmt=
--------------------------------------------------------------------------------
/tcapy/conf/mongo.conf:
--------------------------------------------------------------------------------
1 | dbpath = /data/db
2 | logpath = /home/ubuntu/cuemacro/tcapy/log/mongo.log
3 | # logpath = /tmp/mongo.log
4 | logappend = true
5 | bind_ip = 127.0.0.1
6 | port = 27017
7 |
8 | fork = true
9 | # rest = true
10 | verbose = true
--------------------------------------------------------------------------------
/tcapy/conf/my.cnf:
--------------------------------------------------------------------------------
1 | [mysqld]
2 | datadir=/data/mysqldb
--------------------------------------------------------------------------------
/tcapy/conf/tcapy.wsgi:
--------------------------------------------------------------------------------
1 | # preferred to use apache/gunicorn (easier to support Python 3) instead of apache/wsgi (more for Python 2)
2 |
3 | # if we need to run apache/wsgi for Python 3, then needs some patching of mod_wsgi
4 | # as suggested in http://devmartin.com/blog/2015/02/how-to-deploy-a-python3-wsgi-application-with-apache2-and-debian/
5 |
6 | import os
7 | import sys
8 |
9 | try:
10 | tcapy_cuemacro = os.environ['TCAPY_CUEMACRO']
11 | python_home = os.environ['TCAPY_PYTHON_ENV']
12 | except:
13 | user_home = os.environ['USER']
14 |
15 | # if TCAPY_CUEMACRO not set globally (or the Python environment for TCAPY), we need to specify it here
16 | tcapy_cuemacro = '/home/' + user_home + '/cuemacro/'
17 | python_home = '/home/' + user_home + '/py36tca/'
18 |
19 | activate_this = python_home + '/bin/activate_this.py'
20 |
21 | execfile(activate_this, dict(__file__=activate_this))
22 |
23 | sys.path.insert(0, tcapy_cuemacro)
24 | # os.chdir(tcapy_cuemacro+ '/tcapypro/vis/')
25 |
26 | from tcapy.vis.app import server as application
27 | application.root_path = tcapy_cuemacro + '/tcapy/vis/'
--------------------------------------------------------------------------------
/tcapy/conf/tcapy_apache.conf:
--------------------------------------------------------------------------------
1 | NameVirtualHost *:80
2 |
3 |
4 | ServerName tcapy
5 |
6 | WSGIDaemonProcess tcapy user=redhat processes=4 threads=6
7 |
8 | WSGIScriptAlias /tcapy /var/www/tcapy/tcapy.wsgi
9 |
10 |
11 | WSGIProcessGroup tcapy
12 | WSGIApplicationGroup %{GLOBAL}
13 | Require all granted
14 |
15 |
16 |
--------------------------------------------------------------------------------
/tcapy/conf/tcapy_apache_gunicorn.conf:
--------------------------------------------------------------------------------
1 | LoadModule ssl_module modules/mod_ssl.so
2 |
3 | # for https
4 |
5 | Listen 443
6 |
7 | ServerName tcapy
8 | SSLEngine on
9 |
10 | # need to be changed to the paths to your certificates
11 | SSLCertificateFile "/home/redhat/cuemacro/tcapy/batch_scripts/server.crt"
12 | SSLCertificateKeyFile "/home/redhat/cuemacro/tcapy/batch_scripts/server.key"
13 |
14 | ServerAdmin webmaster@localhost
15 | DocumentRoot /var/www/html
16 |
17 | # Available loglevels: trace8, ..., trace1, debug, info, notice, warn,
18 | # error, crit, alert, emerg.
19 | # It is also possible to configure the loglevel for particular
20 | # modules, e.g.
21 | #LogLevel info ssl:warn
22 |
23 | # make sure Apache can write to these areas (eg. if in a user folder, may need to chown for root to be able to write there)
24 | # ErrorLog /home/redhat/cuemacro/tcapy/error.log
25 | # CustomLog /home/redhat/cuemacro/tcapy/access.log combined
26 |
27 | # For most configuration files from conf-available/, which are
28 | # enabled or disabled at a global level, it is possible to
29 | # include a line for only one particular virtual host. For example the
30 | # following line enables the CGI configuration for this host only
31 | # after it has been globally disabled with "a2disconf".
32 | #Include conf-available/serve-cgi-bin.conf
33 |
34 | Order deny,allow
35 | Allow from all
36 |
37 | ProxyPreserveHost On
38 | #
39 |
40 | ProxyPass "http://127.0.0.1:8080/"
41 | ProxyPassReverse "http://127.0.0.1:8080/"
42 | # http://unix:/tmp/tcapy.sock
43 | # ProxyPass "unix:/tmp/tcapy.sock|http://localhost/tcapy"
44 | # ProxyPassReverse "unix:/tmp/tcapy.sock|http://localhost/tcapy"
45 |
46 |
47 |
48 | ProxyPass "http://127.0.0.1:8081/" retry=0
49 | ProxyPassReverse "http://127.0.0.1:8081/"
50 |
51 |
52 |
53 | ProxyPass "http://127.0.0.1:8082/" retry=0
54 | ProxyPassReverse "http://127.0.0.1:8082/"
55 |
56 |
57 |
58 |
59 | # needs changing to your own address
60 | Redirect permanent / https://127.0.0.1:443/tcapy/
61 |
62 |
63 |
64 |
--------------------------------------------------------------------------------
/tcapy/conf/tcapy_apache_gunicorn_80.conf:
--------------------------------------------------------------------------------
1 | NameVirtualHost *:80
2 |
3 | # for http
4 |
5 |
6 | # The ServerName directive sets the request scheme, hostname and port that
7 | # the server uses to identify itself. This is used when creating
8 | # redirection URLs. In the context of virtual hosts, the ServerName
9 | # specifies what hostname must appear in the request's Host: header to
10 | # match this virtual host. For the default virtual host (this file) this
11 | # value is not decisive as it is used as a last resort host regardless.
12 | # However, you must set it for any further virtual host explicitly.
13 | ServerName tcapy
14 |
15 | ServerAdmin webmaster@localhost
16 | DocumentRoot /var/www/html
17 |
18 | # Available loglevels: trace8, ..., trace1, debug, info, notice, warn,
19 | # error, crit, alert, emerg.
20 | # It is also possible to configure the loglevel for particular
21 | # modules, e.g.
22 | #LogLevel info ssl:warn
23 |
24 | # make sure Apache can write to these areas (eg. if in a user folder, may need to chown for root to be able to write there)
25 | # ErrorLog /home/redhat/cuemacro/tcapy/error.log
26 | # CustomLog /home/redhat/cuemacro/tcapy/access.log combined
27 |
28 | # For most configuration files from conf-available/, which are
29 | # enabled or disabled at a global level, it is possible to
30 | # include a line for only one particular virtual host. For example the
31 | # following line enables the CGI configuration for this host only
32 | # after it has been globally disabled with "a2disconf".
33 | #Include conf-available/serve-cgi-bin.conf
34 |
35 | Order deny,allow
36 | Allow from all
37 |
38 | ProxyPreserveHost On
39 | #
40 |
41 | ProxyPass "http://127.0.0.1:8080/"
42 | ProxyPassReverse "http://127.0.0.1:8080/"
43 | # http://unix:/tmp/tcapy.sock
44 | # ProxyPass "unix:/tmp/tcapy.sock|http://localhost/tcapy"
45 | # ProxyPassReverse "unix:/tmp/tcapy.sock|http://localhost/tcapy"
46 |
47 |
--------------------------------------------------------------------------------
/tcapy/conf/tcapy_nginx_gunicorn.conf:
--------------------------------------------------------------------------------
1 | server {
2 | listen 9500;
3 |
4 | #location / {
5 | # return 410; # Default root of site won't exist.
6 | #}
7 |
8 | location /tcapy/ {
9 | rewrite ^/tcapy(.*)$ $1 break;
10 | proxy_set_header Host localhost;
11 | proxy_set_header X-Real-IP $remote_addr;
12 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
13 | proxy_set_header X-Forwarded-Proto $scheme;
14 | proxy_pass http://localhost:8090/;
15 | }
16 |
17 | location /tcapyapi/ {
18 | rewrite ^/tcapyapi(.*)$ $1 break;
19 | proxy_set_header Host localhost;
20 | proxy_set_header X-Real-IP $remote_addr;
21 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
22 | proxy_set_header X-Forwarded-Proto $scheme;
23 | proxy_pass http://localhost:8091/;
24 | }
25 |
26 | location /tcapyboard/ {
27 | rewrite ^/tcapyboard(.*)$ $1 break;
28 | proxy_set_header Host localhost;
29 | proxy_set_header X-Real-IP $remote_addr;
30 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
31 | proxy_set_header X-Forwarded-Proto $scheme;
32 | proxy_pass http://localhost:8092/;
33 | }
34 | }
--------------------------------------------------------------------------------
/tcapy/conf/tcapy_nginx_gunicorn_docker.conf:
--------------------------------------------------------------------------------
1 | server {
2 | listen 9500;
3 |
4 | #location / {
5 | # return 410; # Default root of site won't exist.
6 | #}
7 |
8 | location /tcapy/ {
9 | rewrite ^/tcapy(.*)$ $1 break;
10 | proxy_set_header Host gunicorn_tcapy;
11 | proxy_set_header X-Real-IP $remote_addr;
12 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
13 | proxy_set_header X-Forwarded-Proto $scheme;
14 | proxy_pass http://gunicorn_tcapy:8090/;
15 | }
16 |
17 | location /tcapyboard/ {
18 | rewrite ^/tcapyboard(.*)$ $1 break;
19 | proxy_set_header Host gunicorn_tcapyboard;
20 | proxy_set_header X-Real-IP $remote_addr;
21 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
22 | proxy_set_header X-Forwarded-Proto $scheme;
23 | proxy_pass http://gunicorn_tcapyboard:8092/;
24 | }
25 | }
--------------------------------------------------------------------------------
/tcapy/conf/tcapy_wsgi.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | tcapy_cuemacro_home = os.environ['TCAPY_CUEMACRO']
5 |
6 | sys.path.insert(0, tcapy_cuemacro_home)
7 | # os.chdir(user_home + '/cuemacro/tcapy/tcapypro/vis/')
8 |
9 | from tcapy.vis.app import server as application
10 | application.root_path = tcapy_cuemacro_home + '/tcapy/vis/'
--------------------------------------------------------------------------------
/tcapy/conf/tcapyapi.wsgi:
--------------------------------------------------------------------------------
1 | # preferred to use apache/gunicorn (easier to support Python 3) instead of apache/wsgi (more for Python 2)
2 |
3 | # if we need to run apache/wsgi for Python 3, then needs some patching of mod_wsgi
4 | # as suggested in http://devmartin.com/blog/2015/02/how-to-deploy-a-python3-wsgi-application-with-apache2-and-debian/
5 |
6 | import os
7 | import sys
8 |
9 | try:
10 | tcapy_cuemacro = os.environ['TCAPY_CUEMACRO']
11 | python_home = os.environ['TCAPY_PYTHON_ENV']
12 | except:
13 | user_home = os.environ['USER']
14 |
15 | # if TCAPY_CUEMACRO not set globally (or the Python environment for TCAPY), we need to specify it here
16 | tcapy_cuemacro = '/home/' + user_home + '/cuemacro/'
17 | python_home = '/home/' + user_home + '/py36tca/'
18 |
19 | activate_this = python_home + '/bin/activate_this.py'
20 |
21 | execfile(activate_this, dict(__file__=activate_this))
22 |
23 | sys.path.insert(0, tcapy_cuemacro)
24 | # os.chdir(tcapy_cuemacro+ '/tcapypro/vis/')
25 |
26 | from tcapypro.api.app_api import server as application
27 | application.root_path = tcapy_cuemacro + '/tcapypro/api/'
--------------------------------------------------------------------------------
/tcapy/conf/tcapyapi_wsgi.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | tcapy_cuemacro_home = os.environ['TCAPY_CUEMACRO']
5 |
6 | sys.path.insert(0, tcapy_cuemacro_home)
7 | # os.chdir(user_home + '/cuemacro/tcapy/tcapypro/vis/')
8 |
9 | from tcapy.api.app_api import application
10 | application.root_path = tcapy_cuemacro_home + '/tcapy/api/'
--------------------------------------------------------------------------------
/tcapy/conf/tcapyboard.wsgi:
--------------------------------------------------------------------------------
1 | # Preferred to use apache/gunicorn (easier to support Python 3) instead of apache/wsgi (more for Python 2)
2 |
3 | # if we need to run apache/wsgi for Python 3, then needs some patching of mod_wsgi
4 | # as suggested in http://devmartin.com/blog/2015/02/how-to-deploy-a-python3-wsgi-application-with-apache2-and-debian/
5 |
6 | import os
7 | import sys
8 |
9 | try:
10 | tcapy_cuemacro = os.environ['TCAPY_CUEMACRO']
11 | python_home = os.environ['TCAPY_PYTHON_ENV']
12 | except:
13 | user_home = os.environ['USER']
14 |
15 | # if TCAPY_CUEMACRO not set globally (or the Python environment for TCAPY), we need to specify it here
16 | tcapy_cuemacro = '/home/' + user_home + '/cuemacro/'
17 | python_home = '/home/' + user_home + '/py36tca/'
18 |
19 | activate_this = python_home + '/bin/activate_this.py'
20 |
21 | execfile(activate_this, dict(__file__=activate_this))
22 |
23 | sys.path.insert(0, tcapy_cuemacro + '/tcapy/')
24 | # os.chdir(tcapy_cuemacro+ '/tcapy/tcapy/vis/')
25 |
26 | from tcapy.vis.app_board import server as application
27 | application.root_path = tcapy_cuemacro + '/tcapy/tcapy/vis/'
--------------------------------------------------------------------------------
/tcapy/conf/tcapyboard_wsgi.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | tcapy_cuemacro_home = os.environ['TCAPY_CUEMACRO']
5 |
6 | sys.path.insert(0, tcapy_cuemacro_home)
7 | # os.chdir(user_home + '/cuemacro/tcapy/tcapypro/vis/')
8 |
9 | from tcapy.vis.app_board import server as application
10 | application.root_path = tcapy_cuemacro_home + '/tcapy/vis/'
--------------------------------------------------------------------------------
/tcapy/data/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/data/__init__.py
--------------------------------------------------------------------------------
/tcapy/excel/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/excel/__init__.py
--------------------------------------------------------------------------------
/tcapy/excel/tcapy_xl.xlsm:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/excel/tcapy_xl.xlsm
--------------------------------------------------------------------------------
/tcapy/util/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/util/__init__.py
--------------------------------------------------------------------------------
/tcapy/util/customexceptions.py:
--------------------------------------------------------------------------------
1 | class DateException(Exception):
2 | pass
3 |
4 | class ValidationException(Exception):
5 | pass
6 |
7 | class TradeMarketNonOverlapException(Exception):
8 | pass
9 |
10 | class DataMissingException(Exception):
11 | pass
12 |
13 | class ErrorWritingOverlapDataException(Exception):
14 | pass
--------------------------------------------------------------------------------
/tcapy/util/fxconv.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function, division
2 |
3 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
4 |
5 | #
6 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
7 | #
8 | # See the License for the specific language governing permissions and limitations under the License.
9 | #
10 |
11 | from tcapy.util.loggermanager import LoggerManager
12 | from tcapy.conf.constants import Constants
13 |
14 | class FXConv(object):
15 | """Various methods to manipulate FX crosses, applying correct conventions.
16 |
17 | """
18 |
19 | # TODO
20 | # g10 = ['EUR', 'GBP', 'AUD', 'NZD', 'USD', 'CAD', 'CHF', 'NOK', 'SEK', 'JPY']
21 | # order = ['XAU', 'XPT', 'XBT', 'XAG', 'EUR', 'GBP', 'AUD', 'NZD', 'USD', 'CAD', 'CHF', 'NOK', 'SEK', 'JPY']
22 |
23 | def __init__(self):
24 | self.logger = LoggerManager().getLogger(__name__)
25 | self.constants = Constants()
26 | return
27 |
28 | def g10_crosses(self):
29 |
30 | g10_crosses = []
31 |
32 | for i in range(0, len(self.constants.g10)):
33 | for j in range(0, len(self.constants.g10)):
34 | if i != j:
35 | g10_crosses.append(self.correct_notation(self.constants.g10[i] + self.constants.g10[j]))
36 |
37 | set_val = set(g10_crosses)
38 | g10_crosses = sorted(list(set_val))
39 |
40 | return g10_crosses
41 |
42 | def em_or_g10(self, currency):
43 |
44 | try:
45 | index = self.constants.g10.index(currency)
46 | except ValueError:
47 | index = -1
48 |
49 | if (index < 0):
50 | return 'em'
51 |
52 | return 'g10'
53 |
54 | def is_USD_base(self, cross):
55 | base = cross[0:3]
56 | terms = cross[3:6]
57 |
58 | if base == 'USD':
59 | return True
60 |
61 | return False
62 |
63 | def is_EM_cross(self, cross):
64 | base = cross[0:3]
65 | terms = cross[3:6]
66 |
67 | if self.em_or_g10(base) == 'em' or self.em_or_g10(terms) == 'em':
68 | return True
69 |
70 | return False
71 |
72 | def reverse_notation(self, cross):
73 | base = cross[0:3]
74 | terms = cross[3:6]
75 |
76 | return terms + base
77 |
78 | def correct_notation(self, cross):
79 |
80 | if isinstance(cross, list):
81 | corrected_pairs = []
82 |
83 | for c in cross:
84 | corrected_pairs.append(self.correct_notation(c))
85 |
86 | return corrected_pairs
87 |
88 | base = cross[0:3]
89 | terms = cross[3:6]
90 |
91 | try:
92 | base_index = self.constants.quotation_order.index(base)
93 | except ValueError:
94 | base_index = -1
95 |
96 | try:
97 | terms_index = self.constants.quotation_order.index(terms)
98 | except ValueError:
99 | terms_index = -1
100 |
101 | if (base_index < 0 and terms_index > 0):
102 | return terms + base
103 | if (base_index > 0 and terms_index < 0):
104 | return base + terms
105 | elif (base_index > terms_index):
106 | return terms + base
107 | elif (terms_index > base_index):
108 | return base + terms
109 |
110 | return cross
111 |
112 | def currency_pair_in_list(self, currency_pair, list_of_pairs):
113 | return currency_pair in self.correct_notation(list_of_pairs)
114 |
115 | def correct_unique_notation_list(self, list_of_pairs):
116 |
117 | return list(set(self.correct_notation(list_of_pairs)))
--------------------------------------------------------------------------------
/tcapy/util/loggermanager.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 |
3 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
4 |
5 | #
6 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
7 | #
8 | # See the License for the specific language governing permissions and limitations under the License.
9 | #
10 |
11 |
12 | import logging
13 | import logging.config
14 |
15 | import threading
16 |
17 | # import cloghandler
18 | # from cloghandler import ConcurrentRotatingFileHandler
19 |
20 | from tcapy.conf.constants import Constants
21 | from tcapy.util.singleton import Singleton
22 |
23 | constants = Constants()
24 |
25 | try:
26 | from celery.utils.log import get_task_logger
27 | from celery import current_task
28 | except:
29 | current_task = False
30 |
31 | class LoggerManager(object):
32 | """LoggerManager acts as a wrapper for logging the runtime operation of tcapy
33 |
34 | """
35 | __metaclass__ = Singleton
36 |
37 | _loggers = {}
38 | _loggers_lock = threading.Lock()
39 |
40 | def __init__(self, *args, **kwargs):
41 | pass
42 |
43 | @staticmethod
44 | def getLogger(name=None):
45 |
46 | if not current_task:
47 | # Directly called (ie. not by celery)
48 |
49 | if not name:
50 | try:
51 | logging.config.dictConfig(constants.logging_parameters)
52 | except:
53 | pass
54 |
55 | log = logging.getLogger();
56 | elif name not in LoggerManager._loggers.keys():
57 | try:
58 | # logging.config.fileConfig(Constants().logging_conf)
59 | logging.config.dictConfig(constants.logging_parameters)
60 | except:
61 | pass
62 |
63 | with LoggerManager._loggers_lock:
64 | LoggerManager._loggers[name] = logging.getLogger(str(name))
65 |
66 | log = LoggerManager._loggers[name]
67 |
68 | # When recalling appears to make other loggers disabled
69 | # hence apply this hack!
70 | for name in LoggerManager._loggers.keys():
71 | with LoggerManager._loggers_lock:
72 | LoggerManager._loggers[name].disabled = False
73 |
74 | # log.debug("Called directly")
75 |
76 | elif current_task.request.id is None:
77 | log = get_task_logger(name)
78 | # log.debug("Called synchronously")
79 | else:
80 | log = get_task_logger(name)
81 | # log.debug("Dispatched now!")
82 |
83 | return log
84 |
--------------------------------------------------------------------------------
/tcapy/util/singleton.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function, division
2 |
3 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
4 |
5 |
6 | #
7 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
8 | #
9 | # See the License for the specific language governing permissions and limitations under the License.
10 | #
11 |
12 | class Singleton(type):
13 | """Singleton type which is used by LoggerManager to ensure there is only one instance across the whole project.
14 |
15 | """
16 | _instances = {}
17 |
18 | def __call__(cls, *args, **kwargs):
19 | if cls not in cls._instances.keys():
20 | cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
21 | return cls._instances[cls]
22 |
--------------------------------------------------------------------------------
/tcapy/util/ssl_no_verify.py:
--------------------------------------------------------------------------------
1 | """Disables SSL verification, on some networks can be an issue - you might need to import this file when running scripts
2 | which need to access https services
3 | """
4 |
5 | from __future__ import print_function, division
6 |
7 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
8 |
9 | #
10 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
11 | #
12 | # See the License for the specific language governing permissions and limitations under the License.
13 | #
14 |
15 |
16 | import ssl
17 |
18 | try:
19 | _create_unverified_https_context = ssl._create_unverified_context
20 | except AttributeError:
21 | # Legacy Python that doesn't verify HTTPS certificates by default
22 | pass
23 | else:
24 | # Handle target environment that doesn't support HTTPS verification
25 | ssl._create_default_https_context = _create_unverified_https_context
--------------------------------------------------------------------------------
/tcapy/util/swim.py:
--------------------------------------------------------------------------------
1 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
2 |
3 | #
4 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
5 | #
6 | # See the License for the specific language governing permissions and limitations under the License.
7 | #
8 |
9 | from tcapy.conf.constants import Constants
10 |
11 | constants = Constants()
12 |
13 | class Swim(object):
14 | """Creating thread and process pools in a generic way. Allows users to specify the underlying thread or multiprocess library
15 | they wish to use. Note you can share Pool objects between processes.
16 |
17 | """
18 |
19 | def __init__(self, parallel_library=None):
20 | self._pool = None
21 |
22 | if parallel_library is None:
23 | parallel_library = constants.parallel_library
24 |
25 | self._parallel_library = parallel_library
26 |
27 | if parallel_library == 'multiprocess':
28 | try:
29 | import multiprocess;
30 | multiprocess.freeze_support()
31 | except:
32 | pass
33 | elif parallel_library == 'pathos':
34 | try:
35 | import pathos
36 | pathos.helpers.freeze_support()
37 | except:
38 | pass
39 |
40 | def create_pool(self, thread_no=constants.thread_no, force_new=True):
41 |
42 | if not (force_new) and self._pool is not None:
43 | return self._pool
44 |
45 | if self._parallel_library == "thread":
46 | from multiprocessing.dummy import Pool
47 | elif self._parallel_library == 'multiprocess':
48 | from multiprocess import Pool
49 | elif self._parallel_library == 'pathos':
50 | from pathos.pools import ProcessPool as Pool
51 |
52 | if thread_no == 0:
53 | self._pool = Pool()
54 | else:
55 | self._pool = Pool(thread_no)
56 |
57 | return self._pool
58 |
59 | def close_pool(self, pool, force_process_respawn=False):
60 | if constants.parallel_library != 'pathos' or force_process_respawn and pool is not None:
61 | pool.close()
62 | pool.join()
63 |
--------------------------------------------------------------------------------
/tcapy/vis/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/__init__.py
--------------------------------------------------------------------------------
/tcapy/vis/app_imports.py:
--------------------------------------------------------------------------------
1 | """This provides the entry point for the GUI web application, which uses the Dash library on top lightweight server
2 | web application (eg. Flask). It queries TCAEngine, which returns appropriate TCA output (via TCACaller). Uses LayoutDash
3 | class to render the layout and SessionManager to keep track of each user session.
4 |
5 | """
6 | from __future__ import division, print_function
7 |
8 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
9 |
10 | #
11 | # Copyright 2019 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
12 | #
13 | # See the License for the specific language governing permissions and limitations under the License.
14 | #
15 | # This may not be distributed without the permission of Cuemacro.
16 | #
17 |
18 | import os
19 | import sys
20 |
21 | # for LINUX machines
22 | try:
23 | tcapy_cuemacro = os.environ['TCAPY_CUEMACRO']
24 | sys.path.insert(0, tcapy_cuemacro + '/tcapy/')
25 | except:
26 | print('Did not set path! Check if TCAPY_CUEMACRO environment variable is set?')
27 |
28 | # it is recommended to set your Python environment before running app.py!
29 |
30 | # try:
31 | # user_home = os.environ['HOME']
32 | #
33 | # python_home = user_home + '/py27tca'
34 | # activate_this = python_home + '/bin/activate_this.py'
35 | #
36 | # execfile(activate_this, dict(__file__=activate_this))
37 | # except:
38 | # pass
39 |
40 | # set plotly to work privately/offline
41 | try:
42 | import plotly # JavaScript based plotting library with Python connector
43 |
44 | plotly.tools.set_config_file(plotly_domain='https://type-here.com',
45 | world_readable=False,
46 | sharing='private')
47 | except:
48 | pass
49 |
50 | ## web server components (used later)
51 | import flask
52 | from flask import Flask
53 |
54 | import dash
55 | import dash_bootstrap_components as dbc
56 | from dash.dependencies import Output, Input
57 |
58 | ## for getting paths and general file operations
59 | import os
60 | import sys
61 |
62 | # for caching data (in Redis)
63 | from tcapy.util.mediator import Mediator
64 |
65 | # utility stuff
66 | from tcapy.conf.constants import Constants
67 | from tcapy.util.loggermanager import LoggerManager
68 | from tcapy.util.utilfunc import UtilFunc
69 |
70 | # for caching data (in Redis)
71 |
72 | # creates the HTML layout of the web pages
73 | from chartpy.dashboard import CallbackManager, SessionManager
74 |
75 | constants = Constants()
76 | util_func = UtilFunc()
77 |
78 | # manage session information for every client
79 | session_manager = SessionManager()
80 |
81 | # manage creation of callback for Dash
82 | callback_manager = CallbackManager(constants)
83 |
84 | logger = LoggerManager.getLogger(__name__)
85 |
86 | # print constants for user information
87 | logger.info("Platform = " + constants.plat)
88 | logger.info("Env = " + constants.env)
89 | logger.info("Python = " + sys.executable)
90 | logger.info("Debug environment = " + str(constants.debug_start_flask_server_directly))
91 |
92 | logger.info("Database volatile cache/Redis server_host = " + str(constants.volatile_cache_host_redis))
93 | logger.info("Database arctic server_host = " + str(constants.arctic_host))
94 | logger.info("Database ms sql server server_host = " + str(constants.ms_sql_server_host))
95 |
96 | logger.info("Database trade/order data source = " + str(constants.default_trade_data_store))
97 | logger.info("Database market data source = " + str(constants.default_market_data_store))
--------------------------------------------------------------------------------
/tcapy/vis/assets/android-chrome-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/assets/android-chrome-192x192.png
--------------------------------------------------------------------------------
/tcapy/vis/assets/android-chrome-256x256.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/assets/android-chrome-256x256.png
--------------------------------------------------------------------------------
/tcapy/vis/assets/android-chrome-512x512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/assets/android-chrome-512x512.png
--------------------------------------------------------------------------------
/tcapy/vis/assets/apple-touch-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/assets/apple-touch-icon.png
--------------------------------------------------------------------------------
/tcapy/vis/assets/browserconfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | #da532c
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/tcapy/vis/assets/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/assets/favicon-16x16.png
--------------------------------------------------------------------------------
/tcapy/vis/assets/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/assets/favicon-32x32.png
--------------------------------------------------------------------------------
/tcapy/vis/assets/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/assets/favicon.ico
--------------------------------------------------------------------------------
/tcapy/vis/assets/head.htm:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/tcapy/vis/assets/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/assets/logo.png
--------------------------------------------------------------------------------
/tcapy/vis/assets/logo_crop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/assets/logo_crop.png
--------------------------------------------------------------------------------
/tcapy/vis/assets/mstile-150x150.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/assets/mstile-150x150.png
--------------------------------------------------------------------------------
/tcapy/vis/assets/site.webmanifest:
--------------------------------------------------------------------------------
1 | {
2 | "name": "",
3 | "short_name": "",
4 | "icons": [
5 | {
6 | "src": "/android-chrome-192x192.png",
7 | "sizes": "192x192",
8 | "type": "image/png"
9 | },
10 | {
11 | "src": "/android-chrome-256x256.png",
12 | "sizes": "256x256",
13 | "type": "image/png"
14 | }
15 | ],
16 | "theme_color": "#ffffff",
17 | "background_color": "#ffffff",
18 | "display": "standalone"
19 | }
20 |
--------------------------------------------------------------------------------
/tcapy/vis/computationresults.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 |
3 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
4 |
5 | #
6 | # Copyright 2018 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
7 | #
8 | # See the License for the specific language governing permissions and limitations under the License.
9 | #
10 |
11 | import abc
12 |
13 | from tcapy.util.utilfunc import UtilFunc
14 | from tcapy.vis.displaylisteners import PlotRender
15 |
16 | ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
17 |
18 | class ComputationResults(ABC):
19 | """Abstract class holds the results of a computation in a friendly format, splitting out the various dataset which can be used
20 | for charts. Also converts these datasets to Plotly Figure objects, ready to be plotted in HTML documents.
21 |
22 | """
23 |
24 | def __init__(self, dict_of_df, computation_request, text_preamble=''):
25 | self._plot_render = PlotRender()
26 | self._util_func = UtilFunc()
27 | self.text_preamble = text_preamble
28 |
29 | self.computation_request = computation_request
30 |
31 | self._rendered = False
32 |
33 | @abc.abstractmethod
34 | def render_computation_charts(self):
35 | """Takes the various dataframes computation results output, and then renders these as Plotly JSON charts (data and
36 | all their graphical properties), which are easy to plot later.
37 |
38 | Returns
39 | -------
40 |
41 | """
42 | pass
43 |
44 | ##### Other data (eg. text)
45 | @property
46 | def text_preamble(self):
47 | return self.__text_preamble
48 |
49 | @text_preamble.setter
50 | def text_preamble(self, text_preamble):
51 | self.__text_preamble = text_preamble
52 |
53 | @property
54 | def computation_request(self):
55 | return self.__computation_request
56 |
57 | @computation_request.setter
58 | def computation_request(self, computation_request):
59 | self.__computation_request = computation_request
--------------------------------------------------------------------------------
/tcapy/vis/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/favicon.ico
--------------------------------------------------------------------------------
/tcapy/vis/layoutdash.py:
--------------------------------------------------------------------------------
1 | from __future__ import division, print_function
2 |
3 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
4 |
5 | #
6 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
7 | #
8 | # See the License for the specific language governing permissions and limitations under the License.
9 | #
10 |
11 | import abc
12 |
13 | ## web server components
14 | import dash_core_components as dcc
15 | import dash_html_components as html
16 |
17 | import dash_table as dt
18 |
19 | ## time/date components
20 | import datetime
21 | from datetime import timedelta
22 |
23 | from collections import OrderedDict
24 |
25 | from tcapy.util.utilfunc import UtilFunc
26 |
27 | from tcapy.conf.constants import Constants
28 |
29 | from chartpy.dashboard.layoutcanvas import LayoutCanvas
30 |
31 | class LayoutDash(LayoutCanvas):
32 | """Abstract class for creating HTML pages via Dash/HTML components. Has generic methods for creating HTML/Dash
33 | components, including, header bars, link bars, buttons and plots
34 | """
35 |
36 | def __init__(self, app=None, constants=None, url_prefix=''):
37 | super(LayoutDash, self).__init__(app=app, constants=constants, url_prefix=url_prefix)
38 |
39 | self.id_flags = {}
40 | self.pages = {}
41 |
42 | self._util_func = UtilFunc()
43 | self._url_prefix = url_prefix
44 |
45 | def id_flag_parameters(self):
46 | return self.id_flags
47 |
48 | def calculate_button(self):
49 | pass
50 |
51 | def page_name(self):
52 | pass
53 |
54 | def attach_callbacks(self):
55 | pass
56 |
57 | def construct_layout(self):
58 | pass
59 |
--------------------------------------------------------------------------------
/tcapy/vis/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/logo.png
--------------------------------------------------------------------------------
/tcapy/vis/logo_cropped.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/logo_cropped.png
--------------------------------------------------------------------------------
/tcapy/vis/report/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/report/__init__.py
--------------------------------------------------------------------------------
/tcapy/vis/report/tcareport.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 |
3 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
4 |
5 | #
6 | # Copyright 2018 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
7 | #
8 | # See the License for the specific language governing permissions and limitations under the License.
9 | #
10 |
11 | from tcapy.vis.report.computationreport import ComputationReport, JinjaRenderer
12 |
13 | class TCAReport(ComputationReport):
14 | """Creates an HTML or PDF report from TCA results, which can be written to disk or returned as a string/binary
15 | for display elsewhere (eg. to be output by a webserver). Uses Renderer objects to write to HTML/convert to PDF.
16 | Preferred to use JinjaRender for TCAReport
17 |
18 | """
19 |
20 | def __init__(self, tca_results, title='TCA Report / generated by tcapy', renderer=JinjaRenderer(), text_dict={}):
21 | super(TCAReport, self).__init__(tca_results, title=title, renderer=renderer)
22 |
23 | self._text_dict = text_dict
24 |
25 | def _layout_computation_results_to_html(self, embed_chart='offline_embed_js_div'):
26 |
27 | # Make sure to convert the dataframes into Plotly Figures first
28 | self._computation_results.render_computation_charts()
29 |
30 | tca_request = self._computation_request
31 |
32 | textpreamble_dict = {}
33 | charts_dict = {}
34 | text_dict = self._text_dict
35 | tables_dict = {}
36 |
37 | # Add some text at the beginning
38 | textpreamble_dict['Introduction'] = self._create_text_html(self._computation_results.text_preamble, add_hr=False)
39 |
40 | # Generate the HTML for all the sparse market charts
41 | charts_dict['Markets and trade/order charts'] = self._create_chart_html(self._computation_results.sparse_market_charts,
42 | embed_chart=embed_chart)
43 |
44 | # Generate the HTML for all the timeline charts
45 | charts_dict['Timeline charts'] = self._create_chart_html(self._computation_results.timeline_charts, embed_chart=embed_chart)
46 |
47 | # Generate the HTML for all the bar charts
48 | charts_dict['Bar charts'] = self._create_chart_html(self._computation_results.bar_charts, embed_chart=embed_chart)
49 |
50 | # Generate the HTML for all the probability distribution charts
51 | charts_dict['PDF charts'] \
52 | = self._create_chart_html(self._computation_results.dist_charts, embed_chart=embed_chart)
53 |
54 | # Generate the HTML for all the scatter charts
55 | charts_dict['Scatter charts'] = self._create_chart_html(self._computation_results.scatter_charts, embed_chart=embed_chart)
56 |
57 | # Generate the HTML for all the scatter charts
58 | charts_dict['Heatmap charts'] = self._create_chart_html(self._computation_results.heatmap_charts, embed_chart=embed_chart)
59 |
60 | # Include the HTML for tables
61 | tables_dict['Tables'] = self._create_table_html(self._computation_results.styled_tables)
62 |
63 | # Create a summary of the parameters of the TCA request
64 | listpoints_dict = {}
65 |
66 | listpoints_dict['Ticker'] = self._create_text_html(self._util_func.pretty_str_list(tca_request.ticker), add_hr=False)
67 | listpoints_dict['Date'] = self._create_text_html(str(tca_request.start_date) + ' - ' + str(tca_request.finish_date), add_hr=False)
68 |
69 | if tca_request.metric_calcs is not None:
70 | if tca_request.metric_calcs != []:
71 | listpoints_dict['Metric'] = self._create_text_html(
72 | self._util_func.pretty_str_list([m.get_metric_name().replace('_', ' ') for m in tca_request.metric_calcs]), add_hr=False)
73 |
74 | return {'text' : text_dict, 'charts' : charts_dict, 'tables' : tables_dict, 'textpreamble' : textpreamble_dict, 'listpoints' : listpoints_dict}
75 |
--------------------------------------------------------------------------------
/tcapy/vis/report/templates/cuemacro_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/report/templates/cuemacro_logo.png
--------------------------------------------------------------------------------
/tcapy/vis/report/templates/img/OpenSans-Bold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/report/templates/img/OpenSans-Bold.ttf
--------------------------------------------------------------------------------
/tcapy/vis/report/templates/img/OpenSans-BoldItalic.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/report/templates/img/OpenSans-BoldItalic.ttf
--------------------------------------------------------------------------------
/tcapy/vis/report/templates/img/OpenSans-ExtraBold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/report/templates/img/OpenSans-ExtraBold.ttf
--------------------------------------------------------------------------------
/tcapy/vis/report/templates/img/OpenSans-ExtraBoldItalic.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/report/templates/img/OpenSans-ExtraBoldItalic.ttf
--------------------------------------------------------------------------------
/tcapy/vis/report/templates/img/OpenSans-Italic.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/report/templates/img/OpenSans-Italic.ttf
--------------------------------------------------------------------------------
/tcapy/vis/report/templates/img/OpenSans-Light.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/report/templates/img/OpenSans-Light.ttf
--------------------------------------------------------------------------------
/tcapy/vis/report/templates/img/OpenSans-LightItalic.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/report/templates/img/OpenSans-LightItalic.ttf
--------------------------------------------------------------------------------
/tcapy/vis/report/templates/img/OpenSans-Regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/report/templates/img/OpenSans-Regular.ttf
--------------------------------------------------------------------------------
/tcapy/vis/report/templates/img/OpenSans-Semibold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/report/templates/img/OpenSans-Semibold.ttf
--------------------------------------------------------------------------------
/tcapy/vis/report/templates/img/OpenSans-SemiboldItalic.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy/vis/report/templates/img/OpenSans-SemiboldItalic.ttf
--------------------------------------------------------------------------------
/tcapy_examples/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy_examples/__init__.py
--------------------------------------------------------------------------------
/tcapy_examples/gen/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy_examples/gen/__init__.py
--------------------------------------------------------------------------------
/tcapy_examples/gen/date_calculation_examples.py:
--------------------------------------------------------------------------------
1 | from __future__ import division, print_function
2 |
3 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
4 |
5 | #
6 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
7 | #
8 | # See the License for the specific language governing permissions and limitations under the License.
9 | #
10 |
--------------------------------------------------------------------------------
/tcapy_examples/gen/filtering_examples.py:
--------------------------------------------------------------------------------
1 | from __future__ import division, print_function
2 |
3 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
4 |
5 | #
6 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
7 | #
8 | # See the License for the specific language governing permissions and limitations under the License.
9 | #
10 |
11 | import os
12 |
13 | from tcapy.util.loggermanager import LoggerManager
14 |
15 | from tcapy.analysis.tcaengine import TCAEngineImpl
16 | from tcapy.analysis.tcarequest import TCARequest
17 |
18 | from tcapy.analysis.algos.benchmark import BenchmarkMarketMid
19 |
20 | from tcapy.data.databasesource import *
21 |
22 | logger = LoggerManager.getLogger(__name__)
23 |
24 | # 'dukascopy' or 'ncfx'
25 | data_source = 'ncfx'
26 |
27 | # Change the market and trade data store as necessary
28 | market_data_store = 'arctic-' + data_source
29 |
30 | trade_data_source = 'ms_sql_server'
31 | ticker = 'EURUSD'
32 | tca_type = 'aggregated'
33 | start_date = '01 May 2017'
34 | finish_date = '12 May 2017'
35 | bid_mid_bp = 0.1;
36 | ask_mid_bp = 0.1
37 |
38 | use_test_csv = True
39 |
40 | folder = constants.test_data_harness_folder
41 |
42 | def get_sample_data():
43 | logger.info("About to load data for " + ticker)
44 |
45 | if use_test_csv:
46 | tca_request = TCARequest(start_date=start_date, finish_date=finish_date, ticker=ticker,
47 | trade_data_store='csv',
48 | reporting_currency='EUR',
49 | market_data_store=os.path.join(folder, 'small_test_market_df.csv.gz'),
50 | trade_order_mapping={'trade_df': os.path.join(folder, 'small_test_trade_df.csv'),
51 | 'order_df': os.path.join(folder, 'small_test_order_df.csv')},
52 | tca_type=tca_type, benchmark_calcs=BenchmarkMarketMid())
53 | else:
54 | tca_request = TCARequest(start_date=start_date, finish_date=finish_date, ticker=ticker,
55 | trade_data_store=trade_data_source,
56 | reporting_currency='EUR',
57 | market_data_store=market_data_store,
58 | trade_order_mapping=['trade_df', 'order_df'],
59 | tca_type=tca_type, benchmark_calcs=BenchmarkMarketMid())
60 |
61 | tca_engine = TCAEngineImpl()
62 |
63 | trade_order_results_df_dict = tca_engine.calculate_tca(tca_request)
64 |
65 | return trade_order_results_df_dict[ticker + '_df'], trade_order_results_df_dict['trade_df'], \
66 | trade_order_results_df_dict['order_df']
67 |
68 |
69 | def example_filter_by_time_of_day_day_of_week():
70 | """Example showing how to filter trade data by the time of data and day of the week
71 | """
72 |
73 | from tcapy.analysis.tradeorderfilter import TradeOrderFilterTimeOfDayWeekMonth
74 |
75 | # Get sample market/trade/order data
76 | market_df, trade_df, order_df = get_sample_data()
77 |
78 | # Create a filter for trades between 7-12pm
79 | trade_order_filter = TradeOrderFilterTimeOfDayWeekMonth(
80 | time_of_day={'start_time': '07:00', 'finish_time': '12:00'}, day_of_week='Mon', month_of_year='May')
81 |
82 | trade_df = trade_order_filter.filter_trade_order(trade_df)
83 |
84 | # Check the final time series has no values outside of 7am-12pm, on Mondays in May
85 | assert (trade_df.index.hour >= 7).all() and (trade_df.index.hour <= 12).all() \
86 | and (trade_df.index.dayofweek == 0).all() and (trade_df.index.month == 5).all()
87 |
88 | if __name__ == '__main__':
89 | import time
90 |
91 | start = time.time()
92 |
93 | example_filter_by_time_of_day_day_of_week()
94 |
95 | finish = time.time()
96 | print('Status: calculated ' + str(round(finish - start, 3)) + "s")
97 |
--------------------------------------------------------------------------------
/tcapy_examples/gen/metric_calculation_examples.py:
--------------------------------------------------------------------------------
1 | from __future__ import division, print_function
2 |
3 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
4 |
5 | #
6 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
7 | #
8 | # See the License for the specific language governing permissions and limitations under the License.
9 | #
10 |
11 | from tcapy.analysis.tcaengine import TCAEngineImpl
12 | from tcapy.analysis.tcarequest import TCARequest
13 |
14 | from tcapy.util.loggermanager import LoggerManager
15 |
16 | logger = LoggerManager.getLogger(__name__)
17 |
18 | # 'dukascopy' or 'ncfx'
19 | data_source = 'dukascopy'
20 |
21 | # Change the market and trade data store as necessary
22 | market_data_store = 'arctic-' + data_source
23 |
24 | ticker = 'EURUSD'
25 | tca_type = 'aggregated'
26 | bid_mid_bp = 0.1;
27 | ask_mid_bp = 0.1
28 |
29 | use_multithreading = False
30 |
31 |
32 | def get_sample_data():
33 | from tcapy.analysis.algos.benchmark import BenchmarkMarketSpreadToMid
34 | logger.info("About to load data for " + ticker)
35 |
36 | tca_request = TCARequest(start_date='01 May 2017', finish_date='15 May 2017', ticker=ticker, trade_data_store='mysql',
37 | market_data_store=market_data_store,
38 | benchmark_calcs=[BenchmarkMarketSpreadToMid(bid_mid_bp=bid_mid_bp, ask_mid_bp=ask_mid_bp)],
39 | trade_order_mapping=['trade_df'], tca_type=tca_type, use_multithreading=use_multithreading)
40 |
41 | tca_engine = TCAEngineImpl()
42 |
43 | trade_order_results_df_dict = tca_engine.calculate_tca(tca_request)
44 | trade_df = trade_order_results_df_dict['trade_df']
45 |
46 | return trade_order_results_df_dict[ticker + '_df'], trade_df
47 |
48 | def example_calculate_market_impact():
49 | """Calculates the transient market impact for a trade
50 | """
51 | from tcapy.analysis.algos.metric import MetricTransientMarketImpact
52 |
53 | market_df, trade_df = get_sample_data()
54 |
55 | metric_market_impact = MetricTransientMarketImpact()
56 | metric_market_impact.calculate_metric(trade_df, market_df)
57 |
58 | print(trade_df)
59 |
60 | def example_calculate_slippage_with_bid_mid_spreads():
61 | """Calculate the slippage for trades given market data as a benchmark
62 | """
63 | from tcapy.analysis.algos.metric import MetricSlippage
64 |
65 | market_df, trade_df = get_sample_data()
66 |
67 | metric_slippage = MetricSlippage()
68 | trade_df, _ = metric_slippage.calculate_metric(trade_df, market_df)
69 |
70 | print(trade_df)
71 |
72 |
73 | if __name__ == '__main__':
74 | import time
75 |
76 | start = time.time()
77 |
78 | example_calculate_market_impact()
79 | example_calculate_slippage_with_bid_mid_spreads()
80 |
81 | finish = time.time()
82 | print('Status: calculated ' + str(round(finish - start, 3)) + "s")
83 |
--------------------------------------------------------------------------------
/tcapy_examples/gen/mongo_aws_examples.py:
--------------------------------------------------------------------------------
1 | """This shows how we can connect to an instance of MongoDB Atlas to read/write market tick data
2 |
3 | Note, that you will need to get a MongoDB Atlas cloud account, and change the connection string below for it to work
4 | """
5 |
6 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
7 |
8 | #
9 | # Copyright 2020 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
10 | #
11 | # See the License for the specific language governing permissions and limitations under the License.
12 | #
13 |
14 | import datetime
15 | import time
16 |
17 | from tcapy.util.loggermanager import LoggerManager
18 | from tcapy.conf.constants import Constants
19 |
20 | from tcapy.data.datafactory import MarketRequest
21 |
22 | from tcapy.data.databasesource import DatabaseSourceArctic
23 |
24 | from tcapy.util.mediator import Mediator
25 | from tcapy.util.customexceptions import *
26 |
27 | from test.config import *
28 |
29 | logger = LoggerManager().getLogger(__name__)
30 |
31 | constants = Constants()
32 |
33 | logger.info('Make sure you have created folder ' + constants.csv_folder + ' & ' + constants.temp_data_folder +
34 | ' otherwise tests will fail')
35 |
36 | Mediator.get_volatile_cache().clear_cache()
37 |
38 | ########################################################################################################################
39 | # YOU MAY NEED TO CHANGE THESE
40 |
41 | start_date = '26 Apr 2017'
42 | finish_date = '05 Jun 2017'
43 | ticker = 'EURUSD'
44 |
45 | # Market data parameters for tables/databases
46 | test_harness_arctic_market_data_table = 'market_data_table_test_harness'
47 | test_harness_arctic_market_data_store = 'arctic-testharness'
48 |
49 | csv_market_data_store = resource('small_test_market_df.parquet')
50 | csv_reverse_market_data_store = resource('small_test_market_df_reverse.parquet')
51 |
52 | # Note, you'll need to get your own connection string!
53 | # You can setup your own MongoDB instance on the cloud using MongoDB Atlas https://www.mongodb.com/cloud/atlas
54 | # It will give you the connection string to use
55 | arctic_connection_string = "mongodb+srv://:@cluster0.blah-blah.mongodb.net/?retryWrites=true&w=majority"
56 |
57 | def write_mongo_db_atlas_arctic():
58 | """Tests we can write market data to Arctic/MongoDB on Atlas (cloud)
59 | """
60 |
61 | market_loader = Mediator.get_tca_market_trade_loader(version=tcapy_version)
62 |
63 | ### Test we can read data from CSV and dump to Arctic (and when read back it matches CSV)
64 | db_start_date = '01 Jan 2016';
65 | db_finish_date = pd.Timestamp(datetime.datetime.utcnow())
66 |
67 | database_source = DatabaseSourceArctic(postfix='testharness', arctic_lib_type='CHUNK_STORE', connection_string=arctic_connection_string)
68 |
69 | # Write CSV to Arctic
70 | database_source.convert_csv_to_table(csv_market_data_store, ticker,
71 | test_harness_arctic_market_data_table,
72 | if_exists_table='replace', if_exists_ticker='replace', market_trade_data='market',
73 | remove_duplicates=False)
74 |
75 | # Read back data from Arctic and compare with CSV
76 | market_request = MarketRequest(start_date=db_start_date, finish_date=db_finish_date, ticker=ticker,
77 | data_store=database_source, # test_harness_arctic_market_data_store,
78 | market_data_database_table=test_harness_arctic_market_data_table)
79 |
80 | market_df_load = market_loader.get_market_data(market_request=market_request)
81 |
82 | print(market_df_load)
83 |
84 | if __name__ == '__main__':
85 | start = time.time()
86 |
87 | write_mongo_db_atlas_arctic()
88 |
89 | finish = time.time()
90 | print('Status: calculated ' + str(round(finish - start, 3)) + "s")
91 |
--------------------------------------------------------------------------------
/tcapy_examples/gen/test_tca_report.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy_examples/gen/test_tca_report.pdf
--------------------------------------------------------------------------------
/tcapy_examples/gen/time_series_ops_examples.py:
--------------------------------------------------------------------------------
1 | from __future__ import division, print_function
2 |
3 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
4 |
5 | #
6 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
7 | #
8 | # See the License for the specific language governing permissions and limitations under the License.
9 | #
10 | # This may not be distributed without the permission of Cuemacro.
11 | #
12 |
13 | import os
14 |
15 | from tcapy.analysis.tcaengine import TCAEngineImpl
16 | from tcapy.analysis.tcarequest import TCARequest
17 | from tcapy.analysis.algos.benchmark import BenchmarkMarketMid
18 | from tcapy.util.timeseries import TimeSeriesOps
19 | from tcapy.util.loggermanager import LoggerManager
20 |
21 | from tcapy.conf.constants import Constants
22 |
23 | logger = LoggerManager.getLogger(__name__)
24 | time_series_ops = TimeSeriesOps()
25 | constants = Constants()
26 |
27 | # 'dukascopy' or 'ncfx'
28 | data_source = 'ncfx'
29 |
30 | # Change the market and trade data store as necessary
31 | market_data_store = 'arctic-' + data_source
32 | trade_data_source = 'ms_sql_server'
33 | ticker = 'EURUSD'
34 | tca_type = 'aggregated'
35 | start_date = '01 May 2017'
36 | finish_date = '12 May 2017'
37 | bid_mid_bp = 0.1;
38 | ask_mid_bp = 0.1
39 |
40 | use_test_csv = True
41 |
42 | folder = constants.test_data_harness_folder
43 |
44 |
45 | def get_sample_data():
46 | logger.info("About to load data for " + ticker)
47 |
48 | if use_test_csv:
49 | tca_request = TCARequest(start_date=start_date, finish_date=finish_date, ticker=ticker,
50 | trade_data_store='csv',
51 | reporting_currency='EUR',
52 | market_data_store=os.path.join(folder, 'small_test_market_df.csv.gz'),
53 | trade_order_mapping={'trade_df': os.path.join(folder, 'small_test_trade_df.csv'),
54 | 'order_df': os.path.join(folder, 'small_test_order_df.csv')},
55 | tca_type=tca_type, benchmark_calcs=BenchmarkMarketMid())
56 | else:
57 | tca_request = TCARequest(start_date=start_date, finish_date=finish_date, ticker=ticker,
58 | trade_data_store=trade_data_source,
59 | reporting_currency='EUR',
60 | market_data_store=data_source,
61 | trade_order_mapping=['trade_df'], tca_type=tca_type, benchmark_calcs=BenchmarkMarketMid())
62 |
63 | tca_engine = TCAEngineImpl()
64 |
65 | trade_order_results_df_dict = tca_engine.calculate_tca(tca_request)
66 |
67 | return trade_order_results_df_dict[ticker + '_df'], trade_order_results_df_dict['trade_df'], \
68 | trade_order_results_df_dict['order_df']
69 |
70 |
71 | def example_calculate_weighted_average():
72 | """Example to create a weighted average of all (numerical) columns trades (with weighting by notional)
73 | """
74 | market_df, trade_df, order_df = get_sample_data()
75 |
76 | avg = time_series_ops.weighted_average_of_each_column(trade_df, weighting_col='notional')
77 |
78 | print(avg)
79 |
80 |
81 | if __name__ == '__main__':
82 | import time
83 |
84 | start = time.time()
85 |
86 | example_calculate_weighted_average()
87 |
88 | finish = time.time()
89 | print('Status: calculated ' + str(round(finish - start, 3)) + "s")
90 |
--------------------------------------------------------------------------------
/tcapy_notebooks/cuemacro_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy_notebooks/cuemacro_logo.png
--------------------------------------------------------------------------------
/tcapy_scripts/gen/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapy_scripts/gen/__init__.py
--------------------------------------------------------------------------------
/tcapy_scripts/gen/convert_csv_to_hdf5_parquet.py:
--------------------------------------------------------------------------------
1 | """Converts CSV files with DataFrames into Parquet (or HDF5) and dumps to disk
2 | """
3 |
4 | from __future__ import print_function, division
5 |
6 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
7 |
8 | #
9 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
10 | #
11 | # See the License for the specific language governing permissions and limitations under the License.
12 | #
13 |
14 | from tcapy.util.utilfunc import UtilFunc
15 | from tcapy.conf.constants import Constants
16 |
17 | import os
18 |
19 | constants = Constants()
20 |
21 | if __name__ == '__main__':
22 |
23 | folder = constants.test_data_harness_folder
24 |
25 | csv_market_data_files = ['small_test_market_df.csv.gz', 'small_test_market_df_reverse.csv.gz']
26 |
27 | # Can either dump to Parquet (default) or HDF (optional)
28 | # format = 'hdf5'; file_ext = 'h5'
29 | format = 'parquet'; file_ext = 'parquet'
30 |
31 | for csv_market_data in csv_market_data_files:
32 | csv_market_data = os.path.join(folder, csv_market_data)
33 |
34 | REVERSE_SORT = False
35 |
36 | from tcapy.data.databasesource import DatabaseSourceCSV
37 |
38 | # Read CSV and parse the main field
39 | df = DatabaseSourceCSV()._fetch_table(csv_market_data)
40 |
41 | if REVERSE_SORT:
42 | df = df.sort_index(ascending=False)
43 |
44 | h5_market_data = csv_market_data.replace('.csv.gz', '.' + file_ext)
45 | UtilFunc().write_dataframe_to_binary(df, h5_market_data, format=format)
--------------------------------------------------------------------------------
/tcapy_scripts/gen/convert_hdf5_to_csv.py:
--------------------------------------------------------------------------------
1 | """Converts HDF5 files with DataFrames into CSV and dumps to disk
2 | """
3 |
4 | from __future__ import print_function, division
5 |
6 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
7 |
8 | #
9 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
10 | #
11 | # See the License for the specific language governing permissions and limitations under the License.
12 | #
13 |
14 | from tcapy.util.utilfunc import UtilFunc
15 |
16 | if __name__ == '__main__':
17 |
18 | REVERSE_SORT_CSV = False
19 |
20 | # Convert from H5 to CSV file (change path as appropriate) - change path as appropriate
21 | h5_file = '/mnt/e/Remote/tcapy/tests_harness_data/test_market_EURUSD.h5'
22 |
23 | df = UtilFunc().read_dataframe_from_binary(h5_file)
24 |
25 | if REVERSE_SORT_CSV:
26 | df = df.sort_index(ascending=False)
27 |
28 | df.to_csv(h5_file.replace('.h5', '.csv'))
--------------------------------------------------------------------------------
/tcapy_scripts/gen/convert_hdf5_to_parquet.py:
--------------------------------------------------------------------------------
1 | """Converts HDF5 files with DataFrames into CSV and dumps to disk
2 | """
3 |
4 | from __future__ import print_function, division
5 |
6 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
7 |
8 | #
9 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
10 | #
11 | # See the License for the specific language governing permissions and limitations under the License.
12 | #
13 |
14 | from tcapy.util.utilfunc import UtilFunc
15 |
16 | import glob
17 |
18 | if __name__ == '__main__':
19 |
20 | REVERSE_SORT_CSV = False
21 |
22 | input_path = '/home/tcapyuser/cuemacro/tcapy/tests_harness_data'
23 | output_path = '/home/tcapyuser/cuemacro/tcapy/tests_harness_data'
24 |
25 | # Convert from H5 to CSV file (change input_path as appropriate) - change input_path as appropriate
26 | h5_file_list = [input_path + '/small_test_market_df.h5',
27 | input_path + '/small_test_market_df_reverse.h5']
28 |
29 | for h5_file in h5_file_list:
30 |
31 | if '*' in h5_file:
32 | h5_mini_list = glob.glob(h5_file)
33 | else:
34 | h5_mini_list = [h5_file]
35 |
36 | for next_h5_file in h5_mini_list:
37 | df = UtilFunc().read_dataframe_from_binary(next_h5_file, format='hdf5')
38 |
39 | if REVERSE_SORT_CSV:
40 | df = df.sort_index(ascending=False)
41 |
42 | UtilFunc().write_dataframe_to_binary(df, next_h5_file.replace(input_path, output_path)
43 | .replace('.h5', '.parquet'), format='parquet')
--------------------------------------------------------------------------------
/tcapy_scripts/gen/copy_parquet_to_arrow.py:
--------------------------------------------------------------------------------
1 | """Copies a folder of parquet files into another into arrow folder for use with vaex. Note you need to install vaex
2 | library in addition to use this.
3 | """
4 |
5 | from __future__ import print_function, division
6 |
7 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
8 |
9 | #
10 | # Copyright 2021 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
11 | #
12 | # See the License for the specific language governing permissions and limitations under the License.
13 | #
14 |
15 | if __name__ == '__main__':
16 | import time
17 | import vaex
18 | import pandas as pd
19 |
20 | import glob
21 | import os
22 |
23 | from findatapy.util.loggermanager import LoggerManager
24 |
25 | start = time.time()
26 |
27 | data_vendor = 'dukascopy' # 'ncfx' or 'dukascopy'
28 |
29 | source_folder = '/data/csv_dump/' + data_vendor + '/'
30 | destination_folder = '/data/csv_dump/' + data_vendor + '_arrow/'
31 |
32 | logger = LoggerManager().getLogger(__name__)
33 |
34 | parquet_list = glob.glob(source_folder + '/*.parquet')
35 |
36 | for p in parquet_list:
37 | df = pd.read_parquet(p)
38 |
39 | df = vaex.from_pandas(df, name='pandas', copy_index=True, index_name='Date')
40 |
41 | logger.info("Converting " + p + "...")
42 | filename = os.path.basename(p)
43 |
44 | df.export(destination_folder + "/" + filename.replace('parquet', 'arrow'))
45 |
46 | finish = time.time()
47 | print('Status: calculated ' + str(round(finish - start, 3)) + "s")
48 |
--------------------------------------------------------------------------------
/tcapy_scripts/gen/download_data_vendor_data.py:
--------------------------------------------------------------------------------
1 | """Calls data vendor via API and displays market data output
2 | """
3 |
4 | from __future__ import print_function, division
5 |
6 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
7 |
8 | #
9 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
10 | #
11 | # See the License for the specific language governing permissions and limitations under the License.
12 | #
13 |
14 | if __name__ == '__main__':
15 |
16 | import time
17 |
18 | start = time.clock()
19 |
20 | data_vendor = 'ncfx'
21 |
22 | if data_vendor == 'dukascopy':
23 | from tcapy.data.databasesource import DatabaseSourceDukascopy as DatabaseSource
24 | elif data_vendor == 'ncfx':
25 | from tcapy.data.databasesource import DatabaseSourceNCFX as DatabaseSource
26 |
27 | database_source = DatabaseSource()
28 |
29 | ticker = 'EURUSD'
30 |
31 | # df1 = database_source.fetch_market_data(start_date='02 Apr 2016 20:00', finish_date='04 Apr 2016 02:00',
32 | # ticker=ticker)
33 |
34 | # Note: some data sources might only let you download a small chunk of tick data (try daily and hourly)
35 | df1 = database_source.fetch_market_data(start_date='05 Aug 2016 00:00', finish_date='05 Aug 2016 02:00', ticker=ticker)
36 | df2 = database_source.fetch_market_data(start_date='06 Aug 2019', finish_date='07 Aug 2019', ticker=ticker)
37 | df3 = database_source.fetch_market_data(start_date='07 Aug 2019', finish_date='08 Aug 2019', ticker=ticker)
38 |
39 | # Both points on a Saturday (should result in empty DataFrame)
40 | df4 = database_source.fetch_market_data(start_date='25 Apr 2020 00:00', finish_date='25 Apr 2020 02:00',
41 | ticker=ticker)
42 |
43 | # Start late on Friday and finish on Saturday (should have some data)
44 | df5 = database_source.fetch_market_data(start_date='24 Apr 2020 18:00', finish_date='25 Apr 2020 02:00',
45 | ticker=ticker)
46 |
47 | print(df1)
48 | print(df2)
49 | print(df3)
50 | print(df4) # Should be empty
51 | print(df5)
52 |
53 | finish = time.clock()
54 | print('Status: calculated ' + str(round(finish - start, 3)) + "s")
55 |
--------------------------------------------------------------------------------
/tcapy_scripts/gen/dump_data_vendor_large_chunk_to_parquet_csv_hdf5.py:
--------------------------------------------------------------------------------
1 | """Downloads new market data from data vendor for writing to CSV (using a larger chunk size for DatabasePopulator,
2 | which should be quicker for NCFX, so we can reuse the same TCP connection)
3 | """
4 |
5 | from __future__ import print_function, division
6 |
7 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
8 |
9 | #
10 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
11 | #
12 | # See the License for the specific language governing permissions and limitations under the License.
13 | #
14 |
15 | if __name__ == '__main__':
16 | import time
17 |
18 | start = time.time()
19 |
20 | from tcapy.conf.constants import Constants
21 |
22 | data_vendor = 'ncfx' # 'ncfx' or 'dukascopy'
23 | write_large_csv = False
24 | write_large_hdf5_parquet = True
25 | return_df = False # returns the dataframe (DO NOT DO this for large datasets, as it will cause you to run out of memory)
26 | remove_duplicates = False # Removes duplicate data points (the vast proportion of datapoints will be duplicates
27 | # this will make the CSV files much bigger, which is ok for archival purposes
28 | # however, when finally copying to the Arctic database, we recommend removing duplicates
29 | # otherwise, it quickly results in going out of memory
30 |
31 | csv_folder = '/data/csv_dump/' + data_vendor + '/'
32 | constants = Constants()
33 |
34 | # Where should we dump the temporary FX data mini files and large H5/Parquet files
35 | # sometimes we might want to specify just a small section to download and specific _tickers
36 |
37 | # Usual default parameters
38 | start_date = None; finish_date = None
39 |
40 | large_chunk_int_min = 1440 # Uses a full day chunk size (DatabaseSource underneath can still manage this)
41 |
42 | # You may need to change these folders
43 | temp_data_folder = constants.temp_data_folder; temp_large_data_folder = constants.temp_large_data_folder
44 | temp_data_folder = '/data/csv_dump/temp/'
45 | temp_large_data_folder = '/data/csv_dump/temp/large/'
46 |
47 | start_date_csv = '01 Apr 2016'; finish_date_csv = '01 Feb 2021'; split_size = 'monthly' # 'daily' or 'monthly'
48 | # start_date_csv = '01 Jan 2005'; finish_date_csv = '01 Jan 2021';
49 | start_date_csv = '01 Oct 2017'; finish_date_csv = '01 Feb 2021';
50 |
51 | if data_vendor == 'ncfx':
52 | from tcapy.data.databasepopulator import DatabasePopulatorNCFX as DatabasePopulator
53 |
54 | tickers = constants.ncfx_tickers
55 | elif data_vendor == 'dukascopy':
56 | from tcapy.data.databasepopulator import DatabasePopulatorDukascopy as DatabasePopulator
57 |
58 | tickers = constants.dukascopy_tickers
59 |
60 | # Example of manually specifying _tickers
61 | # tickers = {'EURUSD' : 'EURUSD', 'GBPUSD': 'GBPUSD', 'USDCAD': 'USDCAD', 'NZDUSD': 'NZDUSD', 'USDCHF' : 'USDCHF',
62 | # 'USDJPY' : 'USDJPY'}
63 |
64 | db_populator = DatabasePopulator(temp_data_folder=temp_data_folder, temp_large_data_folder=temp_large_data_folder,
65 | tickers=tickers)
66 |
67 | # Writes a CSV/Parquet to disk from data vendor (does not attempt to write anything to the database)
68 | # Will also dump temporary HDF5 files to disk (to avoid reloading them)
69 | msg, df_dict = db_populator.download_to_csv(start_date_csv, finish_date_csv, tickers, split_size=split_size,
70 | csv_folder=csv_folder, return_df=False, remove_duplicates=False, write_large_csv=write_large_csv,
71 | write_large_hdf5_parquet=write_large_hdf5_parquet, chunk_int_min=large_chunk_int_min)
72 |
73 | print(msg)
74 | print(df_dict)
75 |
76 | finish = time.time()
77 | print('Status: calculated ' + str(round(finish - start, 3)) + "s")
--------------------------------------------------------------------------------
/tcapy_scripts/gen/dump_data_vendor_to_parquet_csv_hdf5.py:
--------------------------------------------------------------------------------
1 | """Downloads new market data from data vendor for writing to CSV
2 | """
3 |
4 | from __future__ import print_function, division
5 |
6 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
7 |
8 | #
9 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
10 | #
11 | # See the License for the specific language governing permissions and limitations under the License.
12 | #
13 |
14 | if __name__ == '__main__':
15 | import time
16 |
17 | start = time.time()
18 |
19 | from tcapy.conf.constants import Constants
20 |
21 | data_vendor = 'dukascopy' # 'ncfx' or 'dukascopy'
22 | write_large_csv = False
23 | write_large_hdf5_parquet = True
24 | return_df = False # returns the dataframe (DO NOT DO this for large datasets, as it will cause you to run out of memory)
25 | remove_duplicates = False # Removes duplicate data points (the vast proportion of datapoints will be duplicates
26 | # this will make the CSV files much bigger, which is ok for archival purposes
27 | # however, when finally copying to the Arctic database, we recommend removing duplicates
28 | # otherwise, it quickly results in going out of memory
29 |
30 | csv_folder = '/data/csv_dump/' + data_vendor + '/'
31 | constants = Constants()
32 |
33 | # Where should we dump the temporary FX data mini files and large H5/Parquet files
34 | # sometimes we might want to specify just a small section to download and specific _tickers
35 |
36 | # Usual default parameters
37 | start_date = None; finish_date = None
38 |
39 | # You may need to change these folders
40 | temp_data_folder = constants.temp_data_folder; temp_large_data_folder = constants.temp_large_data_folder
41 | temp_data_folder = '/data/csv_dump/temp/'
42 | temp_large_data_folder = '/data/csv_dump/temp/large/'
43 |
44 | start_date_csv = '01 Apr 2016'; finish_date_csv = '01 May 2020'; split_size = 'monthly' # 'daily' or 'monthly'
45 | start_date_csv = '01 Jan 2005'; finish_date_csv = '01 Feb 2021';
46 |
47 | if data_vendor == 'ncfx':
48 | from tcapy.data.databasepopulator import DatabasePopulatorNCFX as DatabasePopulator
49 |
50 | tickers = constants.ncfx_tickers
51 | elif data_vendor == 'dukascopy':
52 | from tcapy.data.databasepopulator import DatabasePopulatorDukascopy as DatabasePopulator
53 |
54 | tickers = constants.dukascopy_tickers
55 |
56 | # Example of manually specifying _tickers
57 | # tickers = {'EURUSD' : 'EURUSD', 'GBPUSD': 'GBPUSD', 'USDCAD': 'USDCAD', 'NZDUSD': 'NZDUSD', 'USDCHF' : 'USDCHF',
58 | # 'USDJPY' : 'USDJPY'}
59 |
60 | db_populator = DatabasePopulator(temp_data_folder=temp_data_folder, temp_large_data_folder=temp_large_data_folder,
61 | tickers=tickers)
62 |
63 | # Writes a CSV/Parquet to disk from data vendor (does not attempt to write anything to the database)
64 | # Will also dump temporary HDF5 files to disk (to avoid reloading them)
65 | msg, df_dict = db_populator.download_to_csv(start_date_csv, finish_date_csv, tickers, split_size=split_size,
66 | csv_folder=csv_folder, return_df=False, remove_duplicates=False, write_large_csv=write_large_csv,
67 | write_large_hdf5_parquet=write_large_hdf5_parquet)
68 |
69 | print(msg)
70 | print(df_dict)
71 |
72 | finish = time.time()
73 | print('Status: calculated ' + str(round(finish - start, 3)) + "s")
74 |
--------------------------------------------------------------------------------
/tcapy_scripts/gen/dump_market_data_from_database_to_parquet.py:
--------------------------------------------------------------------------------
1 | """Script to fetch market data from database and dump to disk as a Parquet.
2 | This can be useful if we want to transfer the data to another computer later, or for backup purposes.
3 | """
4 |
5 | from __future__ import division, print_function
6 |
7 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
8 |
9 | #
10 | # Copyright 2020 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
11 | #
12 | # See the License for the specific language governing permissions and limitations under the License.
13 | #
14 |
15 | import os
16 | from tcapy.conf.constants import Constants
17 |
18 | constants = Constants()
19 |
20 | if __name__ == '__main__':
21 | # 'arctic' or 'pystore'
22 | database_dialect = 'arctic'
23 |
24 | # 'dukascopy' or 'ncfx'
25 | data_vendor = 'dukascopy'
26 |
27 | # Where to dump the CSV (& Parquet) files - make sure this exists
28 | folder = '/home/tcapyuser/debug_tick/'
29 |
30 | # Warning for high frequency data file sizes might be very big, so you may need to reduce this!
31 | start_date = '01 Jan 2020'; finish_date = '01 Jun 2020'
32 |
33 | tickers = ['EURUSD', 'GBPUSD', 'AUDUSD', 'NZDUSD', 'USDCAD', 'USDCHF', 'EURNOK', 'EURSEK', 'USDJPY']
34 |
35 | if database_dialect == 'arctic':
36 | from tcapy.data.databasesource import DatabaseSourceArctic as DatabaseSource
37 | elif database_dialect == 'pystore':
38 | from tcapy.data.databasesource import DatabaseSourcePyStore as DatabaseSource
39 | elif database_dialect == 'influxdb':
40 | from tcapy.data.databasesource import DatabaseSourceInfluxDB as DatabaseSource
41 |
42 | database_source = DatabaseSource(postfix=data_vendor)
43 |
44 | file_format = 'parquet'
45 |
46 | for t in tickers:
47 | market_df = database_source.fetch_market_data(start_date=start_date, finish_date=finish_date, ticker=t)
48 |
49 | key = '_' + data_vendor + "_" + \
50 | (str(market_df.index[0]) + str(market_df.index[-1])).replace(":", '_').replace(" ", '_')
51 | filename = os.path.join(folder, t + key) + '.' + file_format
52 |
53 | if market_df is not None:
54 | c = market_df.columns
55 | print('Writing ' + t + ' to ' + filename)
56 | print('No of items ' + str(len(market_df.index)))
57 |
58 | if file_format == 'parquet':
59 | market_df.to_parquet(filename)
60 | elif file_format == 'csv':
61 | market_df.to_csv(filename)
62 |
--------------------------------------------------------------------------------
/tcapy_scripts/gen/dump_trade_data_from_database_to_csv.py:
--------------------------------------------------------------------------------
1 | """Script to fetch trade/order data from database (here from a defined SQL database) and dump to disk as a CSV file.
2 | This can be useful if we want to transfer the data to another computer later, or for backup purposes.
3 | """
4 |
5 | from __future__ import division, print_function
6 |
7 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
8 |
9 | #
10 | # Copyright 2020 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
11 | #
12 | # See the License for the specific language governing permissions and limitations under the License.
13 | #
14 |
15 | from tcapy.conf.constants import Constants
16 |
17 | constants = Constants()
18 |
19 | if __name__ == '__main__':
20 | # 'ms_sql_server' or 'mysql'
21 | sql_dialect = 'ms_sql_server'
22 | trade_data_database_name = 'trade_database'
23 |
24 | trade_data_folder = ''
25 |
26 | if sql_dialect == 'ms_sql_server':
27 | from tcapy.data.databasesource import DatabaseSourceMSSQLServer as DatabaseSource
28 | elif sql_dialect == 'mysql':
29 | from tcapy.data.databasesource import DatabaseSourceMySQL as DatabaseSource
30 |
31 | # Where to dump the CSV (& Parquet) files
32 | csv_trade_order_mapping_dump = {'trade_df' : trade_data_folder + 'trade_df_dump.csv',
33 | 'order_df' : trade_data_folder + 'order_df_dump.csv'}
34 |
35 | # Get the actual table names in the database which may differ from "nicknames"
36 | trade_order_mapping = constants.trade_order_mapping[sql_dialect]
37 |
38 | database_source = DatabaseSource(trade_data_database_name=trade_data_database_name)
39 |
40 | # Go through each trade/order and then dump as CSV and Parquet files on disk
41 | for k in trade_order_mapping.keys():
42 | trade_order_df = database_source.fetch_trade_order_data(table_name=trade_order_mapping[k])
43 | trade_order_df.to_csv(csv_trade_order_mapping_dump[k])
44 | trade_order_df.to_parquet(csv_trade_order_mapping_dump[k].replace('csv', 'parquet'))
45 |
46 |
--------------------------------------------------------------------------------
/tcapy_scripts/gen/edit_folder_hdf5_parquet.py:
--------------------------------------------------------------------------------
1 | """Edits a folder of Parquet files to add a ticker column
2 | """
3 |
4 | from __future__ import print_function, division
5 |
6 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
7 |
8 | #
9 | # Copyright 2020 Cuemacro
10 | #
11 | # See the License for the specific language governing permissions and limitations under the License.
12 | #
13 |
14 | import os
15 |
16 | from tcapy.util.loggermanager import LoggerManager
17 | from tcapy.util.utilfunc import UtilFunc
18 |
19 | add_vendor = 'dukascopy'
20 |
21 | path = parquet_path = '/home/tcapyuser/csv_dump/' + add_vendor + '/'
22 |
23 | filenames = os.listdir(path)
24 |
25 | util_func = UtilFunc()
26 | logger = LoggerManager.getLogger(__name__)
27 |
28 | for filename in filenames:
29 | format = filename.split('.')[-1]
30 |
31 | if format == 'gzip':
32 | format = 'parquet'
33 | elif format == 'h5':
34 | format = 'hdf5'
35 |
36 | logger.info('Reading to patch file ' + filename)
37 |
38 | df = util_func.read_dataframe_from_binary(os.path.join(path, filename), format=format)
39 |
40 | # Do your edits here, in this case overwriting the ticker column
41 | ticker = filename.split('_')[0]
42 | df['ticker'] = ticker
43 |
44 | util_func.write_dataframe_to_binary(df, os.path.join(path, filename), format=format)
--------------------------------------------------------------------------------
/tcapy_scripts/gen/plot_market_data_from_database.py:
--------------------------------------------------------------------------------
1 | """Script to fetch market data from database and plot in Plotly. It might often be the case you need to get market data
2 | for other purposes (eg. to plot in Excel etc.). Note, given it's high frequency data we need to careful when fetching large
3 | amounts to plot.
4 | """
5 |
6 | from __future__ import division, print_function
7 |
8 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
9 |
10 | #
11 | # Copyright 2020 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
12 | #
13 | # See the License for the specific language governing permissions and limitations under the License.
14 | #
15 |
16 | from chartpy import Chart, Style
17 |
18 | from tcapy.analysis.tcamarkettradeloaderimpl import TCAMarketTradeLoaderImpl
19 | from tcapy.analysis.tcarequest import MarketRequest
20 | from tcapy.vis.displaylisteners import PlotRender
21 | from tcapy.conf.constants import Constants
22 |
23 | constants = Constants()
24 |
25 | if __name__ == '__main__':
26 | # 'arctic' or 'pystore'
27 | database_dialect = 'arctic'
28 |
29 | # 'dukascopy' or 'ncfx'
30 | data_vendor = 'dukascopy'
31 |
32 | ticker = 'EURUSD'
33 |
34 | # Warning for high frequency data file sizes might be very big, so you may need to reduce this!
35 | market_request = MarketRequest(start_date='01 Jan 2020', finish_date='01 Feb 2020', ticker=ticker,
36 | data_store=database_dialect + '-' + data_vendor)
37 |
38 | tca_market_trade_loader = TCAMarketTradeLoaderImpl()
39 |
40 | df = tca_market_trade_loader.get_market_data(market_request)
41 |
42 | # Grab a Plotly figure of the data
43 | fig = PlotRender().plot_timeline(df, title=ticker)
44 |
45 | # Generate HTML file of Plotly figure
46 | Chart(engine='plotly').plot(fig, style=Style(html_file_output='test.html'))
47 |
48 | print(df)
49 |
--------------------------------------------------------------------------------
/tcapy_scripts/gen/plot_parquet_file.py:
--------------------------------------------------------------------------------
1 | """Plots Parquet files, such as those which have been downloaded from Dukascopy
2 | """
3 |
4 | from __future__ import print_function, division
5 |
6 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
7 |
8 | #
9 | # Copyright 2020 Cuemacro
10 | #
11 | # See the License for the specific language governing permissions and limitations under the License.
12 | #
13 |
14 | import os
15 |
16 | from chartpy import Chart
17 | from tcapy.data.databasesource import DatabaseSourceCSVBinary
18 |
19 | parquet_path = '/data/csv_dump/dukascopy/'
20 |
21 | filename = ['EURUSD_dukascopy_2020-05-01_00_00_00.096000+00_002020-05-31_23_59_59.084000+00_00.parquet']
22 |
23 | for f in filename:
24 | final_path = os.path.join(parquet_path, f)
25 |
26 | database_source = DatabaseSourceCSVBinary(market_data_database_csv=final_path)
27 | df = database_source.fetch_market_data()
28 |
29 | print(df)
30 |
31 | df_resample = df.resample('1min').last()
32 |
33 | Chart().plot(df_resample)
34 |
--------------------------------------------------------------------------------
/tcapy_scripts/gen/query_available_trades.py:
--------------------------------------------------------------------------------
1 | """Queries what trades are available in the database
2 | """
3 |
4 | from __future__ import print_function, division
5 |
6 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
7 |
8 | #
9 | # Copyright 2018 Cuemacro
10 | #
11 | # See the License for the specific language governing permissions and limitations under the License.
12 | #
13 |
14 | from tcapy.conf.constants import Constants
15 |
16 | constants = Constants()
17 |
18 | if __name__ == '__main__':
19 | ### Fetch all the trades from SQL Server (irrespective of ticker) and every event-type
20 | from tcapy.data.datafactory import DataFactory
21 | from tcapy.analysis.tcarequest import TradeRequest
22 | from tcapy.analysis.algos.resultssummary import ResultsSummary
23 |
24 | data_factory = DataFactory()
25 | results_summary = ResultsSummary()
26 |
27 | start_date = '01 Mar 2018'; finish_date = '01 Apr 2018'
28 |
29 | trade_order_type_list = ['trade_df']
30 | query_fields = ['ticker', 'broker_id']
31 |
32 | for t in trade_order_type_list:
33 | trade_request = TradeRequest(start_date=start_date, finish_date=finish_date, data_store='ms_sql_server',
34 | trade_order_type=t)
35 |
36 | trade_order_df = data_factory.fetch_table(trade_request)
37 | query_dict = results_summary.query_trade_order_population(trade_order_df, query_fields=query_fields)
38 |
39 | print(query_dict)
--------------------------------------------------------------------------------
/tcapy_scripts/gen/rename_file_names.py:
--------------------------------------------------------------------------------
1 | """Renames filenames in a folder.
2 | """
3 | from __future__ import print_function, division
4 |
5 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
6 |
7 | #
8 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
9 | #
10 | # See the License for the specific language governing permissions and limitations under the License.
11 | #
12 |
13 | if __name__ == '__main__':
14 |
15 | import os
16 |
17 | add_vendor = 'dukascopy'
18 |
19 | path = '/home/redhat/tcapy_tests_data/csv_dump'
20 |
21 | filenames = os.listdir(path)
22 |
23 | for filename in filenames:
24 | os.rename(path + "/" + filename, path + "/" + filename.replace("USDJPY", "USDJPY_" + add_vendor + "_"))
25 | # os.rename(path + "/" + filename, path + "/" + filename.replace("large_", ""))
--------------------------------------------------------------------------------
/tcapy_scripts/gen/upload_market_parquet_csv_hdf5.py:
--------------------------------------------------------------------------------
1 | """Can be used to populate the market and trade/order databases. Populate market database with market data in Parquet/H5/CSVs (Arctic).
2 | Users can modify the Parquet/H5/CSV paths, so they can dump their own trade/order data into the trade database.
3 |
4 | Uses DataDumper underneath to access the various databases (via DatabaseSource)
5 | """
6 |
7 | from __future__ import print_function, division
8 |
9 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
10 |
11 | #
12 | # Copyright 2018 Cuemacro
13 | #
14 | # See the License for the specific language governing permissions and limitations under the License.
15 | #
16 |
17 | from tcapy.util.loggermanager import LoggerManager
18 |
19 | if __name__ == '__main__':
20 | logger = LoggerManager.getLogger(__name__)
21 |
22 | plot_back_data = False
23 | data_vendor = 'dukascopy' # 'dukascopy' or 'ncfx'
24 |
25 | # Either use 'arctic' or 'pystore' or 'influxdb' or 'kdb' to store market tick data
26 | market_data_store = 'arctic'
27 |
28 | # If left as None, will pick up from constants
29 | server_host = None
30 | server_port = None
31 |
32 | logger.info("About to upload data to " + market_data_store)
33 |
34 | ## YOU WILL NEED TO CHANGE THE BELOW LINES #########################################################################
35 |
36 | # dukascopy or ncfx style parameters for uploading a large number of Parquet files with market data
37 | # Note: use of wildcard * to specify multiple files
38 |
39 | ticker_mkt = ['EURUSD', 'GBPUSD', 'AUDUSD', 'NZDUSD', 'USDCAD', 'USDCHF',
40 | 'EURNOK', 'EURSEK', 'USDJPY',
41 | 'USDNOK', 'USDSEK', 'EURJPY',
42 | 'USDMXN', 'USDTRY', 'USDZAR', 'EURPLN']
43 |
44 | csv_folder = '/data/csv_dump/' + data_vendor + '/'
45 |
46 | if_exists_table = 'append' # 'replace' or 'append' to database table
47 | if_append_replace_ticker = 'replace' # 'replace' or 'append' to ticker
48 |
49 | file_extension = 'parquet' # 'parquet' (recommended) or 'csv' or 'h5' on disk
50 |
51 | # Files dumped by DatabasePopulator look like this
52 | ## 'AUDUSD_dukascopy_2016-01-03_22_00_01.868000+00_002016-01-31_23_59_57.193000+00_00.parquet'
53 |
54 | from tcapy.data.datadumper import DataDumper
55 |
56 | data_dumper = DataDumper()
57 | data_dumper.upload_market_data_flat_file(data_vendor=data_vendor, market_data_store=market_data_store,
58 | server_host=server_host, server_port=server_port,
59 | ticker_mkt=ticker_mkt,
60 | csv_folder=csv_folder,
61 | if_exists_table=if_exists_table,
62 | if_append_replace_ticker=if_append_replace_ticker,
63 | file_extension=file_extension,
64 | plot_back_data=plot_back_data)
65 |
66 |
67 |
68 |
69 |
--------------------------------------------------------------------------------
/tcapy_scripts/gen/upload_trade_data_csv.py:
--------------------------------------------------------------------------------
1 | """Script to copy trade/order CSVs from disk and dump them into a SQL database. Note, that by default, they will replace
2 | any existing tables (this can be changed to 'append')
3 |
4 | Uses DataDumper underneath to access the various databases (via DatabaseSource)
5 | """
6 |
7 | from __future__ import division, print_function
8 |
9 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
10 |
11 | #
12 | # Copyright 2020 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
13 | #
14 | # See the License for the specific language governing permissions and limitations under the License.
15 | #
16 |
17 | from tcapy.conf.constants import Constants
18 | import os
19 |
20 | constants = Constants()
21 |
22 | if __name__ == '__main__':
23 | # 'ms_sql_server' or 'mysql' or 'sqlite'
24 | sql_database_type = 'mysql'
25 | trade_data_database_name = 'trade_database'
26 | trade_order_path = '/data/csv_dump/trade_order/'
27 |
28 | # Where are the trade/order CSVs stored, and how are they mapped?
29 | # This assumes you have already generated these files!
30 |
31 | # eg. 'trade' is the SQL table name, rather than the nickname we use
32 | csv_sql_table_trade_order_mapping = {'trade': os.path.join(trade_order_path, 'trade_df_dump.csv'),
33 | 'order': os.path.join(trade_order_path, 'order_df_dump.csv')}
34 |
35 | # If no server server_host is specified then the default one from constants will be returned
36 | server_host = None
37 |
38 | # 'replace' or 'append' existing database table (replace will totally wipe it!)
39 | if_exists_trade_table = 'replace'
40 |
41 | from tcapy.data.datadumper import DataDumper
42 |
43 | data_dumper = DataDumper()
44 | data_dumper.upload_trade_data_flat_file(sql_database_type=sql_database_type, trade_data_database_name=trade_data_database_name,
45 | csv_sql_table_trade_order_mapping=csv_sql_table_trade_order_mapping,
46 | server_host=server_host, if_exists_trade_table=if_exists_trade_table)
47 |
48 |
--------------------------------------------------------------------------------
/tcapy_scripts/gen/volatile_cache_market_trade_data.py:
--------------------------------------------------------------------------------
1 | """Runs an aggregated TCA calculation for all currency pairs for the past _year. This will cache all the market and
2 | trade data on Redis. Hence subsequent calls should mostly be using Redis, when downloading full month (day/week) data
3 | (and calling the underlying trade/market databases only for smaller portions of data < 1 day, 1 week etc).
4 | """
5 |
6 | from __future__ import print_function, division
7 |
8 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
9 |
10 | #
11 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
12 | #
13 | # See the License for the specific language governing permissions and limitations under the License.
14 | #
15 |
16 | if __name__ == '__main__':
17 |
18 | # Need this for WINDOWS machines, to ensure multiprocessing stuff works properly
19 | from tcapy.util.swim import Swim;
20 |
21 | Swim()
22 |
23 | from tcapy.data.volatilecache import VolatileRedis
24 |
25 | import datetime;
26 | from datetime import timedelta
27 |
28 | # First delete the Redis cache
29 | volatile = VolatileRedis()
30 | volatile.clear_cache()
31 |
32 | from tcapy.analysis.tcaengine import TCARequest, TCAEngineImpl
33 |
34 | tca_engine = TCAEngineImpl()
35 |
36 | # Do a massive TCA computation for all currency pairs for the past _year
37 | # this will cache all the data in Redis, which can be used later
38 | finish_date = datetime.datetime.utcnow().date() - timedelta(days=1)
39 | start_date = finish_date - timedelta(days=252)
40 |
41 | tca_request = TCARequest(start_date=start_date, finish_date=finish_date, ticker='All')
42 | tca_engine.calculate_tca(tca_request)
43 |
--------------------------------------------------------------------------------
/tcapygen/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapygen/__init__.py
--------------------------------------------------------------------------------
/tcapygen/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapygen/logo.png
--------------------------------------------------------------------------------
/tcapyuser/CREATE_USER_DEFINED_TCAPY.txt:
--------------------------------------------------------------------------------
1 | You can create your own customized tcapy GUI/models by adding classes here which is the 'user' version
--------------------------------------------------------------------------------
/tcapyuser/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/tcapyuser/__init__.py
--------------------------------------------------------------------------------
/test/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/test/__init__.py
--------------------------------------------------------------------------------
/test/config.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pandas as pd
3 |
4 | from collections import OrderedDict
5 |
6 | use_multithreading = False
7 |
8 | def resource(name):
9 | return os.path.join(os.path.dirname(__file__), "resources", name)
10 |
11 | def read_pd(name, **kwargs):
12 | return pd.read_csv(resource(name), **kwargs)
13 |
14 | tcapy_version = 'test_tcapy'
15 |
16 | #### Trade/order mapping to database tables/CSVs
17 |
18 | trade_order_list = ['trade_df', 'order_df']
19 |
20 | sql_trade_order_mapping = {
21 | 'ms_sql_server' : {'trade_df' : '[dbo].[test_trade]', # Name of table which has broker messages to client
22 | 'order_df' : '[dbo].[test_order]'}, # Name of table which has orders from client
23 | 'mysql': {'trade_df': 'trade_database_test_harness.trade', # Name of table which has broker messages to client
24 | 'order_df': 'trade_database_test_harness.order'}, # Name of table which has orders from client
25 | 'sqlite': {'trade_df': 'test_trade_table', # Name of table which has broker messages to client
26 | 'order_df': 'test_order_table'} # Name of table which has orders from client
27 | }
28 |
29 | csv_trade_order_mapping = {'trade_df' : resource('small_test_trade_df.csv'),
30 | 'order_df' : resource('small_test_order_df.csv')}
31 |
32 | #### Flat file market data (Parquet)
33 |
34 | csv_market_data_store = resource('small_test_market_df.parquet')
35 | csv_reverse_market_data_store = resource('small_test_market_df_reverse.parquet')
36 |
37 | #### Database tables and parameters
38 |
39 | # Market data parameters for tables/databases
40 | test_harness_arctic_market_data_table = 'market_data_table_test_harness'
41 | test_harness_arctic_market_data_store = 'arctic-testharness'
42 |
43 | test_harness_kdb_market_data_table = 'market_data_table_test_harness'
44 | test_harness_kdb_market_data_store = 'kdb-testharness'
45 |
46 | test_harness_influxdb_market_data_table = 'market_data_table_test_harness' # InfluxDB database
47 | test_harness_influxdb_market_data_store = 'influxdb-testharness' # InfluxDB measurement
48 |
49 | test_harness_questdb_market_data_table = 'market_data_table_test_harness' # InfluxDB database
50 | test_harness_questdb_market_data_store = 'questdb-testharness' # InfluxDB measurement
51 |
52 | test_harness_pystore_market_data_table = 'market_data_table_test_harness' # PyStore
53 | test_harness_pystore_market_data_store = 'pystore-testharness' # PyStore folder
54 |
55 | # Default format is CHUNK_STORE, so should be last, so we can read in later
56 | arctic_lib_type = ['TICK_STORE', 'VERSION_STORE', 'CHUNK_STORE']
57 |
58 | # Trade data parameters
59 | test_harness_ms_sql_server_trade_data_database = 'trade_database_test_harness'
60 | test_harness_ms_sql_server_trade_data_store = 'ms_sql_server'
61 | test_harness_mysql_trade_data_database = 'trade_database_test_harness'
62 | test_harness_mysql_trade_data_store = 'mysql'
63 | test_harness_sqlite_trade_data_database = resource('trade_database_test_harness.db')
64 | test_harness_sqlite_trade_data_store = 'sqlite'
65 |
66 | #### Tolerance for errors
67 |
68 | eps = 10 ** -5
69 |
70 | invalid_start_date = '01 Jan 1999'
71 | invalid_finish_date = '01 Feb 1999'
72 |
73 | # So we are not specifically testing the database of tcapy - can instead use CSV in the test harness folder
74 | use_trade_test_csv = False
75 | use_market_test_csv = False
76 |
77 | use_multithreading = False
78 |
79 | if use_market_test_csv:
80 | market_data_store = csv_market_data_store
81 |
82 | if use_trade_test_csv:
83 | trade_data_store = 'csv'
84 |
85 | trade_order_mapping = csv_trade_order_mapping
86 | venue_filter = 'venue1'
87 | else:
88 | trade_order_mapping = sql_trade_order_mapping
89 |
90 |
--------------------------------------------------------------------------------
/test/resources/small_test_market_df.parquet:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/test/resources/small_test_market_df.parquet
--------------------------------------------------------------------------------
/test/resources/small_test_market_df_reverse.parquet:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/test/resources/small_test_market_df_reverse.parquet
--------------------------------------------------------------------------------
/test/run_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$0")" )"
4 | export SCRIPT_FOLDER="$( dirname "$(readlink -f -- "$SCRIPT_FOLDER")" )"
5 | source $SCRIPT_FOLDER/batch_scripts/linux/installation/set_tcapy_env_vars.sh
6 | source $SCRIPT_FOLDER/batch_scripts/linux/installation/activate_python_environment.sh
7 |
8 | echo 'Batch folder' $SCRIPT_FOLDER
9 | echo 'Cuemacro TCAPY' $TCAPY_CUEMACRO
10 |
11 | # pytest | tee tcapy_pytest.log
12 |
13 | py.test --cov-report term-missing --cov tcapy --verbose | tee pytest.log
14 |
15 | # | tee pytest.log
16 |
17 | # if we want to run tests for a particular file we can do this
18 | # pytest -v /home/tcapyuser/cuemacro/tcapy/tests/test_tcapy/test_data_read_write.py
19 |
20 | # #- ./test:/tcapy/test
--------------------------------------------------------------------------------
/test/test_tcapy/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuemacro/tcapy/6a06f7322bc0de8ed54c40a0d5347e7d0aa63a53/test/test_tcapy/__init__.py
--------------------------------------------------------------------------------
/test/test_tcapy/test_overlapping_data_caching.py:
--------------------------------------------------------------------------------
1 | """Tests out the caching when we have requests with overlapping dates
2 | """
3 |
4 | from __future__ import division, print_function
5 |
6 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
7 |
8 | #
9 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
10 | #
11 | # See the License for the specific language governing permissions and limitations under the License.
12 | #
13 | import pandas as pd
14 | import os
15 |
16 | from tcapy.analysis.tcaengine import TCAEngineImpl
17 |
18 | from tcapy.analysis.tcarequest import TCARequest
19 |
20 | from tcapy.analysis.algos.benchmark import *
21 | from tcapy.analysis.algos.resultsform import *
22 |
23 | from collections import OrderedDict
24 |
25 | from test.config import *
26 |
27 | constants = Constants()
28 | logger = LoggerManager().getLogger(__name__)
29 |
30 | logger.info('Make sure you have created folder ' + constants.csv_folder + ' & ' + constants.temp_data_folder +
31 | ' otherwise tests will fail')
32 |
33 | ########################################################################################################################
34 | # YOU MAY NEED TO CHANGE TESTING PARAMETERS IF YOUR DATABASE DOESN'T COVER THESE DATES
35 | start_date = '20 May 2017'
36 | finish_date = '25 May 2017'
37 |
38 | trade_data_store = 'mysql'
39 | trade_data_database_name = 'trade_database_test_harness'
40 |
41 | trade_order_mapping = sql_trade_order_mapping[trade_data_store]
42 |
43 | market_data_store = 'arctic-testharness'
44 | market_data_database_table = 'market_data_table_test_harness'
45 |
46 | ticker = 'EURUSD'
47 | reporting_currency = 'USD'
48 | tca_type = 'aggregated'
49 | venue_filter = 'venue1'
50 |
51 | ########################################################################################################################
52 |
53 | def test_overlapping_full_detailed_tca_calculation():
54 | """Tests a detailed TCA calculation works with caching and overlapping dates, checking that it has the right tables returned.
55 | """
56 |
57 | tca_request = TCARequest(start_date=start_date, finish_date=finish_date, ticker=ticker,
58 | tca_type='detailed',
59 | trade_data_store=trade_data_store,
60 | trade_data_database_name=trade_data_database_name,
61 | market_data_store=market_data_store,
62 | market_data_database_table=market_data_database_table,
63 | trade_order_mapping=trade_order_mapping, use_multithreading=use_multithreading)
64 |
65 | tca_engine = TCAEngineImpl(version=tcapy_version)
66 |
67 | # Extend sample
68 | tca_request.start_date = pd.Timestamp(start_date) - timedelta(days=10)
69 |
70 | dict_of_df = tca_engine.calculate_tca(tca_request=tca_request)
71 |
72 | sparse_market_trade_df = dict_of_df['sparse_market_trade_df']
73 |
74 | assert len(sparse_market_trade_df.index[sparse_market_trade_df.index < '01 Jun 2017']) > 0
75 |
--------------------------------------------------------------------------------
/test/test_tcapy/test_results_agg.py:
--------------------------------------------------------------------------------
1 | """Tests results aggregation methods, such as the histogram generator
2 | """
3 |
4 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
5 |
6 | #
7 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
8 | #
9 | # See the License for the specific language governing permissions and limitations under the License.
10 | #
11 |
12 | from tcapy.conf.constants import Constants
13 | from tcapy.util.timeseries import RandomiseTimeSeries
14 | from tcapy.util.loggermanager import LoggerManager
15 | from tcapy.util.utilfunc import UtilFunc
16 |
17 | from tcapy.analysis.algos.resultssummary import ResultsSummary
18 |
19 | from test.config import *
20 |
21 | logger = LoggerManager().getLogger(__name__)
22 |
23 | constants = Constants()
24 | util_func = UtilFunc()
25 |
26 | logger.info('Make sure you have created folder ' + constants.csv_folder + ' & ' + constants.temp_data_folder +
27 | ' otherwise tests will fail')
28 |
29 | eps = 10 ** -5
30 |
31 | def test_histogram_generator():
32 | """Test histogram generator for results - in particular for instances where we have only 1 point
33 | """
34 |
35 | results_summary = ResultsSummary()
36 |
37 | df = RandomiseTimeSeries().create_random_time_series(max_points=1000, freq='minute', start='01 Jan 2018', end="01 Jun 2018")
38 |
39 | df_hist, df_pdf = results_summary._create_histogram_distribution(df)
40 |
41 | assert df_pdf is not None
42 |
43 | df['Col'] = 'something'
44 | df['Col'][0] = 'something-else'
45 |
46 | df_hist_pdf = results_summary.field_distribution(df, aggregate_by_field='Col', metric_name='price')
47 |
48 | # Should only have output for 'something' because 'something-else' only has one point, so can't construct distribution from it
49 | assert len(df_hist_pdf.columns) == 3
50 |
51 | df = RandomiseTimeSeries().create_random_time_series(max_points=1, freq='minute', start='01 Jan 2018', end="01 Jun 2018")
52 |
53 | df_hist, df_pdf = results_summary._create_histogram_distribution(df)
54 |
55 | # Should have empty plots because only 1 point from these
56 | assert df_hist.empty and df_pdf.empty
57 |
58 | def test_field_bucketing():
59 | """Tests field bucketing by a label
60 | """
61 |
62 | results_summary = ResultsSummary()
63 |
64 | df = RandomiseTimeSeries().create_random_time_series(max_points=1000, freq='minute', start='01 Jan 2018', end="01 Jun 2018")
65 | df['Col'] = 'something'
66 |
67 | df_fields = results_summary.field_bucketing(df, metric_name='price', aggregation_metric='sum', aggregate_by_field='Col')
68 |
69 | assert df_fields.values[0] - df_fields['Col'].sum() < eps
70 |
71 | # Overwrite first point
72 | df['Col'][0] = 'something-else'
73 |
74 | df_fields = results_summary.field_bucketing(df, metric_name='price', aggregation_metric='mean', aggregate_by_field='Col')
75 |
76 | # Check the averages match
77 | assert df_fields['Col']['something-else'] - df['price'][0] < eps
78 | assert df_fields['Col']['something'] - df['price'][1:].mean() < eps
79 |
--------------------------------------------------------------------------------
/test/test_tcapy/test_trade_data_generation_gen.py:
--------------------------------------------------------------------------------
1 | """Tests out the code for generating randomised test trades/orders.
2 | """
3 |
4 | from __future__ import print_function
5 |
6 | __author__ = 'saeedamen' # Saeed Amen / saeed@cuemacro.com
7 |
8 | #
9 | # Copyright 2017 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
10 | #
11 | # See the License for the specific language governing permissions and limitations under the License.
12 | #
13 |
14 | import os
15 |
16 | from tcapy.conf.constants import Constants
17 |
18 | from tcapy.data.datatestcreator import DataTestCreator
19 | from tcapy.data.databasesource import DatabaseSourceCSVBinary as DatabaseSourceCSV
20 | from tcapy.data.databasesource import DatabaseSourceArctic
21 | from tcapy.util.mediator import Mediator
22 | from tcapy.util.loggermanager import LoggerManager
23 |
24 | logger = LoggerManager().getLogger(__name__)
25 |
26 | constants = Constants()
27 |
28 | postfix = 'testharness'
29 | ticker = ['EURUSD']
30 | start_date = '01 May 2017'
31 | finish_date = '31 May 2017'
32 |
33 | use_market_data_test_csv = True
34 |
35 | from test.config import *
36 |
37 | logger.info('Make sure you have created folder ' + constants.csv_folder + ' & ' + constants.temp_data_folder +
38 | ' otherwise tests will fail')
39 |
40 | Mediator.get_volatile_cache().clear_cache()
41 |
42 | ########################################################################################################################
43 |
44 | # You can change the test_data_harness_folder to one on your own machine with real data
45 | folder = constants.test_data_harness_folder
46 |
47 | eps = 10 ** -5
48 |
49 | if use_market_data_test_csv:
50 | # Only contains limited amount of EURUSD and USDJPY in Apr/Jun 2017
51 | market_data_store = resource(csv_market_data_store)
52 |
53 | def test_randomized_trade_data_generation():
54 | """Tests randomized trade generation data (and writing to database)
55 | """
56 | from tcapy.data.datatestcreator import DataTestCreator
57 |
58 | data_test_creator = DataTestCreator(market_data_postfix=postfix, write_to_db=False,
59 | market_data_database_table=test_harness_arctic_market_data_table,
60 | trade_data_database_name=test_harness_mysql_trade_data_database)
61 |
62 | # Use database source as Arctic for market data (assume we are using market data as a source)
63 | if use_market_data_test_csv:
64 | data_test_creator._database_source_market = DatabaseSourceCSV(market_data_database_csv=market_data_store)
65 | data_test_creator._market_data_source = market_data_store
66 | else:
67 | data_test_creator._database_source_market = DatabaseSourceArctic(postfix=postfix)
68 |
69 | # Create randomised trade/order data
70 | trade_order = data_test_creator.create_test_trade_order(ticker, start_date=start_date, finish_date=finish_date)
71 |
72 | # Trade_order has dictionary of trade_df and order_df
73 |
74 | # Make sure the number of trades > number of orders
75 | assert (len(trade_order['trade_df'].index) > len(trade_order['order_df'].index))
76 |
77 |
78 |
--------------------------------------------------------------------------------