├── .flake8 ├── .github ├── ISSUE_TEMPLATE │ ├── bug-report.md │ ├── custom.md │ ├── feature_request.md │ └── new-tutorial-notebook.md ├── pull_request_template.md └── workflows │ └── py-cli.yml ├── .gitignore ├── .pre-commit-config.yaml ├── API.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── README.md ├── ROADMAP.md ├── docs ├── assets │ ├── bbands.png │ ├── bitcoin_forecasts.png │ ├── bitcoin_prophet_backtest.png │ ├── daily_closing.png │ ├── daily_closing_sma30.png │ ├── emac.png │ ├── macd.png │ ├── rsi.png │ ├── sentiment.png │ ├── smac.png │ └── smac_sample.png └── docusaurus │ ├── docs │ ├── backtest.md │ ├── get_crypto_data.md │ ├── get_stock_data.md │ ├── getting_started.md │ └── walk_forward_data_split.md │ └── sidebars.js ├── examples ├── 2020-04-10-disclosures.ipynb ├── 2020-04-20-backtest_with_grid_search.ipynb ├── 2020-05-10-backtest_multi_strategy.ipynb ├── 2020-05-20-backtest_crypto.ipynb ├── 2020-06-10-network_analysis.ipynb ├── 2020-06-20-basic_portfolio.ipynb ├── 2020-10-26-user_defined_strategy.ipynb ├── 2021-12-07-backtest_crypto-hourly.ipynb ├── 2022-02-24-backtest_crypto_exit.ipynb ├── 2022-03-01-backtest_crypto_short.ipynb ├── backtest_disclosures.ipynb ├── backtest_news_sentiment.ipynb ├── chart.png ├── fastquant_demo.ipynb ├── feature_extraction_crypto_20200824.ipynb ├── jfc_rsi.ipynb ├── jfc_rsi.py ├── jfc_support_resistance.ipynb ├── jfc_support_resistance.py ├── notif_bot │ ├── notif_bot_POC.ipynb │ └── slack_notif.py ├── prediction_with_prophet.ipynb └── stock_data_cache.ipynb ├── lessons ├── 2020-01-26-lesson1-accessing-pse_data.ipynb ├── 2020-03-09-lesson2-backtest-your-trading-strategy.ipynb ├── fastquant_lesson1_accessing_pse_data.ipynb └── fastquant_lesson2_backtest_your_trading_strategy.ipynb ├── python ├── Get Twitter API Credentials.md ├── LICENSE ├── fastquant │ ├── __init__.py │ ├── backtest │ │ ├── __init__.py │ │ ├── backtest.py │ │ ├── backtest_indicators.py │ │ ├── data_prep.py │ │ └── post_backtest.py │ ├── config.py │ ├── data │ │ ├── JFC_1-1-2020_4-1-2020.csv │ │ ├── JFC_2010-01-01_2019-01-01_OHLCV.csv │ │ ├── JFC_20180101_20190110_DCV.csv │ │ ├── JFC_disclosures_1-1-2020_5-1-2020.csv │ │ ├── __init__.py │ │ ├── bluechips.txt │ │ ├── bt_sentiments_tests.pkl │ │ ├── crypto │ │ │ ├── __init__.py │ │ │ └── crypto.py │ │ ├── merged_stock_data.zip │ │ ├── network.jpg │ │ ├── senti_disclosures.pkl │ │ ├── stock_table.csv │ │ ├── stocklist.txt │ │ ├── stocks │ │ │ ├── __init__.py │ │ │ ├── phisix.py │ │ │ ├── pse.py │ │ │ ├── stocks.py │ │ │ └── yahoofinance.py │ │ └── web │ │ │ ├── __init__.py │ │ │ ├── businesstimes.py │ │ │ └── twitter.py │ ├── disclosures │ │ ├── __init__.py │ │ ├── base.py │ │ ├── investagrams.py │ │ ├── pse.py │ │ └── sentiment.py │ ├── indicators │ │ ├── __init__.py │ │ ├── backtrader_indicators.py │ │ ├── custom.py │ │ └── sentiment.py │ ├── network.py │ ├── notification.py │ ├── portfolio.py │ ├── strategies │ │ ├── __init__.py │ │ ├── base.py │ │ ├── bollinger_band.py │ │ ├── buy_and_hold.py │ │ ├── custom.py │ │ ├── ma_crossover.py │ │ ├── macd.py │ │ ├── mappings.py │ │ ├── rsi.py │ │ └── sentiment.py │ └── utils │ │ ├── __init__.py │ │ └── data_split.py ├── images │ ├── addtl_steps.PNG │ ├── app_name.PNG │ ├── create_app.PNG │ ├── details.PNG │ ├── generate_keys_tokens.PNG │ ├── keys_tokens_bar.PNG │ ├── landing_page.PNG │ └── website_url.PNG ├── pyproject.toml ├── requirements.txt ├── scripts │ ├── get_disclosures │ ├── make_batch_file.sh │ └── update_cache └── tests │ ├── __init__.py │ ├── test_disclosures.py │ ├── test_fastquant.py │ ├── test_network.py │ ├── test_portfolio.py │ ├── test_strategies.py │ └── test_twitter.py └── setup.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E203, E266, E501, W503, F403, F401, C901 3 | max-line-length = 79 4 | max-complexity = 18 5 | select = B,C,E,F,W,T4,B9 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug-report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Follow the outline when submitting issues 4 | title: "[BUG]" 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | #### Problem description 13 | 14 | 15 | #### Example 16 | 17 | ```python 18 | import fastquant 19 | # insert code here ... 20 | ``` 21 | 22 | #### Expected behavior 23 | 24 | 25 | #### Environment 26 | 27 | - platform (e.g. Linux, OSX, Windows): 28 | - fastquant version (e.g. 0.1.3.17) 29 | - installation method (e.g. pip, conda, source): 30 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/custom.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Custom issue template 3 | about: Describe this issue template's purpose here. 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: "[FEATURE]" 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/new-tutorial-notebook.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: New tutorial notebook 3 | about: Template for contributing new tutorial notebooks to fastquant 4 | title: '' 5 | labels: documentation 6 | assignees: '' 7 | 8 | --- 9 | 10 | ### How to contribute a tutorial notebook to fastquant 11 | 12 | 1. Create a `New tutorial notebook` issue with the tutorial notebook outline details filled 13 | 2. If you haven't yet, read this [guide](https://fastpages.fast.ai/jupyter/2020/02/20/test.html) on how to format your notebook as a blog post 14 | 3. Add your tutorial notebook to the [examples](https://github.com/enzoampil/fastquant/tree/master/examples) directory with the naming convention `YYYY-MM-DD-*` (details [here](https://github.com/fastai/fastpages#automatically-convert-notebooks-to-blog-posts)). 15 | 4. Send a PR that refers to this issue 16 | 17 | ### Tutorial notebook outline 18 | 19 | **Tutorial title:** 20 | 21 | **Tutorial summary:** 22 | 23 | Please use this checklist as a rough outline of prerequisites when submitting a new tutorial notebook to fastquant! 24 | 25 | - [ ] Complete [front matter](https://github.com/fastai/fastpages#customizing-blog-posts-with-front-matter) (title, description, author, etc) 26 | - [ ] Each section has at least some commentary to guide the reader 27 | - [ ] All images, including graphs, and equations are displaying properly 28 | - [ ] Code is expected to work for someone with fastquant [dependencies](https://github.com/enzoampil/fastquant/blob/master/python/requirements.txt) installed; otherwise, indicate the installation on the notebook. 29 | - [ ] Each of the section headers have their first letter capitalized (e.g. *Define the search space*) 30 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | resolves # 2 | 3 | 8 | 9 | 10 | ### Description 11 | 12 | 13 | 14 | 15 | ### Checklist 16 | - [ ] I am making a pull request from a branch other than master 17 | - [ ] I have read the CONTRIBUTING.md 18 | - [ ] I have added/edited documentation in a relevant docs/docusaurus markdown file 19 | - [ ] (For new docs, check if not applicable) I have added the id of a new docs md to the docs/docusaurus/sidebars.js file -------------------------------------------------------------------------------- /.github/workflows/py-cli.yml: -------------------------------------------------------------------------------- 1 | name: Test python package 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | 8 | # The last image version support python3.6, latest have removed it 9 | # Still need python3.6 for EPEL8 10 | runs-on: ubuntu-20.04 11 | strategy: 12 | matrix: 13 | python-version: [3.7, 3.8, 3.9] 14 | 15 | steps: 16 | - uses: actions/checkout@v2 17 | - name: Setup Python ${{ matrix.python-version }} 18 | uses: actions/setup-python@v2 19 | with: 20 | python-version: ${{ matrix.python-version }} 21 | - name: Install fastquant with dependencies 22 | run: | 23 | python -m pip install --upgrade pip 24 | pip install pytest 25 | pip install . 26 | - name: Test with pytest 27 | run: | 28 | pytest python/tests 29 | 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | 129 | # Rproject cache folder 130 | .Rproj.user 131 | 132 | # R package cache 133 | fastquant.Rcheck/ 134 | fastquant_0.0.0.9000.tar.gz 135 | 136 | # VSCode Workspace files 137 | *.code-workspace 138 | .vscode 139 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/ambv/black 3 | rev: stable 4 | hooks: 5 | - id: black 6 | language_version: python3 7 | - repo: https://github.com/pre-commit/pre-commit-hooks 8 | rev: v1.2.3 9 | hooks: 10 | - id: flake8 11 | 12 | repos: 13 | - repo: https://github.com/ambv/black 14 | rev: stable 15 | hooks: 16 | - id: black 17 | language_version: python3 18 | - repo: https://gitlab.com/pycqa/flake8 19 | rev: 3.7.9 20 | hooks: 21 | - id: flake8 22 | -------------------------------------------------------------------------------- /API.md: -------------------------------------------------------------------------------- 1 | # backtest 2 | Available parameters 3 | * `strategy` : str or an instance of `fastquant.strategies.base.BaseStrategy` 4 | see list of accepted strategy keys below 5 | * `data` : pandas.DataFrame 6 | dataframe with at least close price indexed with time 7 | * `commission` : float 8 | commission per transaction [0, 1] (default 0.0075) 9 | * `init_cash` : float 10 | initial cash (currency implied from `data`) (default 100000) 11 | * `plot` : bool 12 | show plot backtrader (disabled if `strategy`=="multi") 13 | * `verbose` : int 14 | Verbose can take values: [0, 1, 2, 3], with increasing levels of verbosity (default=1). 15 | * `sort_by` : str 16 | sort result by given metric (default='rnorm') 17 | * `sentiments` : pandas.DataFrame 18 | df of sentiment [0, 1] indexed by time (applicable if `strategy`=='senti') 19 | * `strats` : dict 20 | dictionary of strategy parameters (applicable if `strategy`=='multi') 21 | * `return_history` : bool 22 | return history of transactions (i.e. buy and sell timestamps) (default=False) 23 | * `return_plot`: bool 24 | return the plot (if you want to save the plot) (default=False) 25 | * `channel` : str 26 | Channel to be used for notifications - e.g. "slack" (default=None) 27 | * `symbol` : str 28 | Symbol to be referenced in the channel notification if not None (default=None) 29 | * `allow_short` : bool 30 | Whether to allow short selling, with max set as `short_max` times the portfolio value (default=False) 31 | * `short_max` : float 32 | The maximum short position allowable as a ratio relative to the portfolio value at that time point (default=1.5) 33 | * `figsize` : tuple 34 | The size of the figure to be displayed at the end of the backtest (default=(30, 15)) 35 | * `data_class` : bt.feed.DataBase 36 | Custom backtrader database to be used as a parent class instead bt.feed. (default=None) 37 | * `data_kwargs` : dict 38 | Datafeed keyword arguments (empty dict by default) 39 | ## Strategies 40 | List of accepted strategy keys are 41 | | Strategy | Alias | Parameters | 42 | | --- | --- | --- | 43 | | Relative Strength Index (RSI) | rsi | `rsi_period`, `rsi_upper`, `rsi_lower` | 44 | | Simple moving average crossover (SMAC) | smac | `fast_period`, `slow_period` | 45 | | Exponential moving average crossover (EMAC) | emac | `fast_period`, `slow_period` | 46 | | Moving Average Convergence Divergence (MACD) | macd | `fast_perod`, `slow_upper`, `signal_period`, `sma_period`, `sma_dir_period` | 47 | | Bollinger Bands | bbands | `period`, `devfactor` | 48 | | Buy and Hold | buynhold | `N/A` | 49 | | Sentiment Strategy | sentiment | `keyword` , `page_nums`, `senti` | 50 | | Custom Prediction Strategy | custom | `upper_limit`, `lower_limit`, `custom_column` | 51 | | Custom Ternary Strategy | ternary | `buy_int`, `sell_int`, `custom_column` | 52 | ## Examples 53 | ### Return history 54 | ```python 55 | from fastquant import backtest 56 | res, hist = backtest(..., return_history=True) 57 | ``` 58 | ### Return plot 59 | ```python 60 | from fastquant import backtest 61 | res, plot = backtest(..., return_plot=True) 62 | 63 | # Save plot 64 | plot.savefig('example.png') 65 | ``` 66 | 67 | ### Return history and plot 68 | ```python 69 | from fastquant import backtest 70 | res, hist, plot = backtest(..., return_history=True, return_plot=True, 71 | ``` 72 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | --- 3 | 4 | 5 | ## Types of Contributions 6 | 7 | 8 | ### Report Bugs 9 | 10 | Report bugs at https://github.com/enzoampil/fastquant/issues. 11 | 12 | 13 | ### Fix Bugs 14 | 15 | Look through the GitHub issues for bugs. Anything tagged with "bug" 16 | and "help wanted" is open to whoever wants to implement it. 17 | 18 | 19 | ### Implement Features 20 | 21 | Look through the GitHub issues for features. Anything tagged with "enhancement" 22 | and "help wanted" is open to whoever wants to implement it. Those that are 23 | tagged with "first-timers-only" is suitable for those getting started in open-source software. 24 | 25 | 26 | ### Write Documentation 27 | 28 | `fastquant` could always use more documentation, whether as part of the 29 | official `fastquant` docs, in docstrings, and such. 30 | 31 | 32 | ### Submit Feedback 33 | 34 | The best way to send feedback is to file an issue at https://github.com/enzoampil/fastquant/issues. 35 | 36 | If you are proposing a feature: 37 | 38 | * Explain in detail how it would work. 39 | * Keep the scope as narrow as possible, to make it easier to implement. 40 | * Remember that this is a volunteer-driven project, and that contributions 41 | are welcome :) 42 | 43 | ### Contribute a tutorial notebook to fastquant 44 | 45 | 1. Create a `New tutorial notebook` issue with the tutorial notebook outline details filled 46 | 2. If you haven't yet, read this [guide](https://fastpages.fast.ai/jupyter/2020/02/20/test.html) on how to format your notebook as a blog post 47 | 3. Add your tutorial notebook to the [examples](https://github.com/enzoampil/fastquant/tree/master/examples) directory with the naming convention `YYYY-MM-DD-*` (details [here](https://github.com/fastai/fastpages#automatically-convert-notebooks-to-blog-posts)). 48 | 4. Send a PR that refers to this issue 49 | 50 | #### Tutorial notebook outline 51 | 52 | **Tutorial title:** 53 | 54 | **Tutorial summary:** 55 | 56 | Please use this checklist as a rough outline of prerequisites when submitting a new tutorial notebook to fastquant! 57 | 58 | - [ ] Complete [front matter](https://github.com/fastai/fastpages#customizing-blog-posts-with-front-matter) (title, description, author, etc) 59 | - [ ] Each section has at least some commentary to guide the reader 60 | - [ ] All images, including graphs, and equations are displaying properly 61 | - [ ] Code is expected to work for someone with fastquant [dependencies](https://github.com/enzoampil/fastquant/blob/master/python/requirements.txt) installed; otherwise, indicate the installation on the notebook. 62 | 63 | 64 | ## Get Started! 65 | 66 | 67 | Ready to contribute? Here's how to set up `fastquant` for local development. 68 | 69 | 1. Fork the `fastquant` repo on GitHub. 70 | 2. Clone your fork locally 71 | ```shell 72 | $ git clone git@github.com:your_name_here/fastquant.git 73 | ``` 74 | 75 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development 76 | ```shell 77 | $ git clone https://github.com/enzoampil/fastquant.git 78 | $ cd fastquant 79 | $ virtualenv env 80 | $ source env/bin/activate 81 | $ pip install -r python/requirements.txt 82 | ``` 83 | 84 | 4. Create a branch for local development 85 | ```shell 86 | $ git checkout -b name-of-your-bugfix-or-feature 87 | ``` 88 | Now you can make your changes locally. 89 | 90 | 5. When you're done making changes, check that your changes pass `flake8` and the tests. In addition, ensure that your code is formatted using `black` 91 | ```shell 92 | $ flake8 . 93 | $ black . 94 | $ pytest python/tests/test_fastquant.py 95 | ``` 96 | 97 | To get `flake8`, `black`, and `pytest`, just pip install them into your virtualenv. If you wish, 98 | you can add pre-commit hooks for both `flake8` and `black` to make all formatting easier. See [this blog post](https://ljvmiranda921.github.io/notebook/2018/06/21/precommits-using-black-and-flake8/) for details. 99 | 100 | 6. Commit your changes and push your branch to GitHub 101 | ```shell 102 | $ git add . 103 | $ git commit -m "Your detailed description of your changes." 104 | $ git push origin name-of-your-bugfix-or-feature 105 | ``` 106 | 107 | In brief, commit messages should follow these conventions: 108 | 109 | * Always contain a subject line which briefly describes the changes made. For example "Update CONTRIBUTING.md". 110 | * Subject lines should not exceed 50 characters. 111 | * The commit body should contain context about the change - how the code worked before, how it works now and why you decided to solve the issue in the way you did. 112 | 113 | More detail on commit guidelines can be found at https://chris.beams.io/posts/git-commit 114 | 115 | 7. Submit a pull request through the GitHub website. 116 | 117 | 118 | ## Pull Request Guidelines 119 | 120 | Before you submit a pull request, check that it meets these guidelines: 121 | 122 | 1. The pull request should include tests. 123 | 2. If the pull request adds functionality, the docs should be updated. Put 124 | your new functionality into a function with a docstring, and add the 125 | feature to the list in README.md. 126 | 3. The pull request should work for Python 3.5 and above. Check 127 | https://github.com/enzoampil/fastquant/pull_requests 128 | and make sure that the tests pass for all supported Python versions. 129 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Set base image (host OS) 2 | FROM python:3.7 3 | 4 | # Set the working directory in the container 5 | WORKDIR /fastquant 6 | 7 | # Copy the dependencies file to the working directory 8 | COPY python/requirements.txt . 9 | 10 | # Install dependencies 11 | RUN pip install -r requirements.txt 12 | 13 | # Copy the content of the python directory to the working directory 14 | COPY python/ . 15 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Lorenzo Ampil 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /ROADMAP.md: -------------------------------------------------------------------------------- 1 | ## Goal: 2 | 3 | To promote data driven investments in the Philippines 4 | 5 | ## Objectives: 6 | 7 | 1. To make it easy to access Philippine Stock Exchange (PSE) data (2 lines of code) 8 | 2. To create reusable templates for backtesting trading strategies on Philippine stocks 9 | 10 | ## Current features: 11 | ### Easy access to stock related data (OHLCV) 12 | 1. Basic daily price features 13 | - Date 14 | - Open 15 | - High 16 | - Low 17 | - Close 18 | - Value 19 | 2. Company disclosures (WIP) 20 | 21 | ### Example notebooks for backtesting with different trading strategies 22 | 1. Relative Strength Index (RSI) 23 | 2. Min Max Support Resistance 24 | 25 | ## Future features 26 | 27 | ### Easy to use API for back testing trading strategies 28 | 1. High level functions and classes for backtesting standard strading trategies w/ PH context set as default 29 | - RSI 30 | - Support resistance 31 | - Bollinger bands 32 | - MACD 33 | 2. Easy integration of custom trading strategies 34 | - Combinations of standard trading strategies 35 | - Application of trained machine learning and statistical models 36 | 37 | ### Processed text information with natural language processing (NLP) 38 | 1. Disclosure reports summarized in structured tabular form 39 | 2. Summary statistics from tweets 40 | 3. Company related tweets (WIP) -------------------------------------------------------------------------------- /docs/assets/bbands.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/docs/assets/bbands.png -------------------------------------------------------------------------------- /docs/assets/bitcoin_forecasts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/docs/assets/bitcoin_forecasts.png -------------------------------------------------------------------------------- /docs/assets/bitcoin_prophet_backtest.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/docs/assets/bitcoin_prophet_backtest.png -------------------------------------------------------------------------------- /docs/assets/daily_closing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/docs/assets/daily_closing.png -------------------------------------------------------------------------------- /docs/assets/daily_closing_sma30.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/docs/assets/daily_closing_sma30.png -------------------------------------------------------------------------------- /docs/assets/emac.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/docs/assets/emac.png -------------------------------------------------------------------------------- /docs/assets/macd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/docs/assets/macd.png -------------------------------------------------------------------------------- /docs/assets/rsi.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/docs/assets/rsi.png -------------------------------------------------------------------------------- /docs/assets/sentiment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/docs/assets/sentiment.png -------------------------------------------------------------------------------- /docs/assets/smac.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/docs/assets/smac.png -------------------------------------------------------------------------------- /docs/assets/smac_sample.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/docs/assets/smac_sample.png -------------------------------------------------------------------------------- /docs/docusaurus/docs/backtest.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: backtest 3 | title: backtest 4 | --- 5 | 6 | ## Description 7 | 8 | Backtest financial data with a specified trading strategy 9 | 10 | ## Parameters 11 | 12 | **strategy** : str or an instance of `fastquant.strategies.base.BaseStrategy` 13 | see list of accepted strategy keys below 14 | 15 | **data** : pandas.DataFrame 16 | dataframe with at least close price indexed with time 17 | 18 | **commission** : float 19 | commission per transaction [0, 1] 20 | 21 | **init_cash** : float 22 | initial cash (currency implied from `data`) 23 | 24 | **plot** : bool 25 | show plot backtrader (disabled if `strategy`=="multi") 26 | 27 | **verbose** : int 28 | Verbose can take values: [0, 1, 2, 3], with increasing levels of verbosity (default=1). 29 | 30 | **sort_by** : str 31 | sort result by given metric (default='rnorm') 32 | 33 | **sentiments** : pandas.DataFrame 34 | df of sentiment [0, 1] indexed by time (applicable if `strategy`=='senti') 35 | 36 | **strats** : dict 37 | dictionary of strategy parameters (applicable if `strategy`=='multi') 38 | 39 | **return_history** : bool 40 | return history of transactions (i.e. buy and sell timestamps) (default=False) 41 | 42 | **channel** : str 43 | Channel to be used for notifications - e.g. "slack" (default=None) 44 | 45 | **symbol** : str 46 | Symbol to be referenced in the channel notification if not None (default=None) 47 | 48 | **allow_short** : bool 49 | Whether to allow short selling, with max set as `short_max` times the portfolio value (default=False) 50 | 51 | **short_max** : float 52 | The maximum short position allowable as a ratio relative to the portfolio value at that timepoint(default=1.5) 53 | 54 | **figsize** : tuple 55 | The size of the figure to be displayed at the end of the backtest (default=(30, 15)) 56 | 57 | **data_class** : bt.feed.DataBase 58 | Custom backtrader database to be used as a parent class instead bt.feed. (default=None) 59 | 60 | **data_kwargs** : dict 61 | Datafeed keyword arguments (empty dict by default) 62 | 63 | ## Returns 64 | 65 | A plot containing the backtest results and a dictionary of the history and results of the backtest run. -------------------------------------------------------------------------------- /docs/docusaurus/docs/get_crypto_data.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: get_crypto_data 3 | title: get_crypto_data 4 | --- 5 | 6 | Get crypto data in OHLCV format 7 | 8 | ## Parameters 9 | 10 | **ticker** : str 11 | List of ticker symbols here: https://coinmarketcap.com/exchanges/binance/ 12 | 13 | **start_date**, **end_date** : str 14 | date in YYYY-MM-DD format 15 | 16 | **time_resolution** : str 17 | resolutions: '1w', '1d' (default), '1h', '1m' 18 | 19 | **exchange** : str 20 | market exchanges: 'binance' (default), 'coinbasepro', 'bithumb', 'kraken', 'kucoin', 'bitstamp' 21 | 22 | ## Returns 23 | 24 | **pandas.DataFrame** 25 | Stock data (in the specified `format`) for the specified company and date range -------------------------------------------------------------------------------- /docs/docusaurus/docs/get_stock_data.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: get_stock_data 3 | title: get_stock_data 4 | --- 5 | 6 | ## Parameters 7 | 8 | **symbol** : str 9 | Symbol of the stock in the PSE or Yahoo. 10 | You can refer to these links: 11 | PHISIX: https://www.pesobility.com/stock 12 | YAHOO: https://www.nasdaq.com/market-activity/stocks/screener?exchange=nasdaq 13 | 14 | **start_date** : str 15 | Starting date (YYYY-MM-DD) of the period that you want to get data on 16 | 17 | **end_date** : str 18 | Ending date (YYYY-MM-DD) of the period you want to get data on 19 | 20 | **source** : str 21 | First source to query from ("pse", "yahoo"). If the stock is not found in the first source, the query is run on the other source. 22 | 23 | **format** : str 24 | Format of the output data 25 | 26 | ## Returns 27 | 28 | **pandas.DataFrame** 29 | Stock data (in the specified `format`) for the specified company and date range -------------------------------------------------------------------------------- /docs/docusaurus/docs/getting_started.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: getting_started 3 | title: Getting Started with fastquant 4 | --- 5 | 6 | ## Installation 7 | 8 | ### Python 9 | 10 | ``` 11 | pip install fastquant 12 | or 13 | python -m pip install fastquant 14 | ``` 15 | 16 | ## Get stock data 17 | All symbols from [Yahoo Finance](https://finance.yahoo.com/) and Philippine Stock Exchange ([PSE](https://www.pesobility.com/stock)) are accessible via `get_stock_data`. 18 | 19 | ### Python 20 | 21 | ``` 22 | from fastquant import get_stock_data 23 | df = get_stock_data("JFC", "2018-01-01", "2019-01-01") 24 | print(df.head()) 25 | 26 | # dt close 27 | # 2019-01-01 293.0 28 | # 2019-01-02 292.0 29 | # 2019-01-03 309.0 30 | # 2019-01-06 323.0 31 | # 2019-01-07 321.0 32 | ``` 33 | 34 | ## Get crypto data 35 | The data is pulled from Binance, and all the available tickers are found [here](https://coinmarketcap.com/exchanges/binance/). 36 | 37 | ### Python 38 | 39 | ``` 40 | from fastquant import get_crypto_data 41 | crypto = get_crypto_data("BTC/USDT", "2018-12-01", "2019-12-31") 42 | crypto.head() 43 | 44 | # open high low close volume 45 | # dt 46 | # 2018-12-01 4041.27 4299.99 3963.01 4190.02 44840.073481 47 | # 2018-12-02 4190.98 4312.99 4103.04 4161.01 38912.154790 48 | # 2018-12-03 4160.55 4179.00 3827.00 3884.01 49094.369163 49 | # 2018-12-04 3884.76 4085.00 3781.00 3951.64 48489.551613 50 | # 2018-12-05 3950.98 3970.00 3745.00 3769.84 44004.799448 51 | ``` 52 | 53 | ## Backtest trading strategies 54 | 55 | ### Simple Moving Average Crossover (15 day MA vs 40 day MA) 56 | Daily Jollibee prices from 2018-01-01 to 2019-01-01 57 | ``` 58 | from fastquant import backtest 59 | backtest('smac', df, fast_period=15, slow_period=40) 60 | 61 | # Starting Portfolio Value: 100000.00 62 | # Final Portfolio Value: 102272.90 63 | ``` 64 | ![](./docs/assets/smac_sample.png) 65 | 66 | ## Optimize trading strategies with automated grid search 67 | 68 | fastquant allows you to automatically measure the performance of your trading strategy on multiple combinations of parameters. All you need to do is to input the values as iterators (like as a `list` or `range`). 69 | 70 | ### Simple Moving Average Crossover (15 to 30 day MA vs 40 to 55 day MA) 71 | Daily Jollibee prices from 2018-01-01 to 2019-01-01 72 | 73 | ``` 74 | from fastquant import backtest 75 | res = backtest("smac", df, fast_period=range(15, 30, 3), slow_period=range(40, 55, 3), verbose=False) 76 | 77 | # Optimal parameters: {'init_cash': 100000, 'buy_prop': 1, 'sell_prop': 1, 'execution_type': 'close', 'fast_period': 15, 'slow_period': 40} 78 | # Optimal metrics: {'rtot': 0.022, 'ravg': 9.25e-05, 'rnorm': 0.024, 'rnorm100': 2.36, 'sharperatio': None, 'pnl': 2272.9, 'final_value': 102272.90} 79 | 80 | print(res[['fast_period', 'slow_period', 'final_value']].head()) 81 | 82 | # fast_period slow_period final_value 83 | #0 15 40 102272.90 84 | #1 21 40 98847.00 85 | #2 21 52 98796.09 86 | #3 24 46 98008.79 87 | #4 15 46 97452.92 88 | 89 | ``` 90 | 91 | 92 | ## Library of trading strategies 93 | 94 | | Strategy | Alias | Parameters | 95 | | --- | --- | --- | 96 | | Relative Strength Index (RSI) | rsi | `rsi_period`, `rsi_upper`, `rsi_lower` | 97 | | Simple moving average crossover (SMAC) | smac | `fast_period`, `slow_period` | 98 | | Exponential moving average crossover (EMAC) | emac | `fast_period`, `slow_period` | 99 | | Moving Average Convergence Divergence (MACD) | macd | `fast_perod`, `slow_upper`, `signal_period`, `sma_period`, `sma_dir_period` | 100 | | Bollinger Bands | bbands | `period`, `devfactor` | 101 | | Buy and Hold | buynhold | `N/A` | 102 | | Sentiment Strategy | sentiment | `keyword` , `page_nums`, `senti` | 103 | | Custom Prediction Strategy | custom | `upper_limit`, `lower_limit`, `custom_column` | 104 | | Custom Ternary Strategy | ternary | `buy_int`, `sell_int`, `custom_column` | 105 | 106 | ### Relative Strength Index (RSI) Strategy 107 | ``` 108 | backtest('rsi', df, rsi_period=14, rsi_upper=70, rsi_lower=30) 109 | 110 | # Starting Portfolio Value: 100000.00 111 | # Final Portfolio Value: 132967.87 112 | ``` 113 | ![](./docs/assets/rsi.png) 114 | 115 | ### Simple moving average crossover (SMAC) Strategy 116 | ``` 117 | backtest('smac', df, fast_period=10, slow_period=30) 118 | 119 | # Starting Portfolio Value: 100000.00 120 | # Final Portfolio Value: 95902.74 121 | ``` 122 | ![](./docs/assets/smac.png) 123 | 124 | ### Exponential moving average crossover (EMAC) Strategy 125 | ``` 126 | backtest('emac', df, fast_period=10, slow_period=30) 127 | 128 | # Starting Portfolio Value: 100000.00 129 | # Final Portfolio Value: 90976.00 130 | ``` 131 | ![](./docs/assets/emac.png) 132 | 133 | ### Moving Average Convergence Divergence (MACD) Strategy 134 | ``` 135 | backtest('macd', df, fast_period=12, slow_period=26, signal_period=9, sma_period=30, dir_period=10) 136 | 137 | # Starting Portfolio Value: 100000.00 138 | # Final Portfolio Value: 96229.58 139 | ``` 140 | ![](./docs/assets/macd.png) 141 | 142 | ### Bollinger Bands Strategy 143 | ``` 144 | backtest('bbands', df, period=20, devfactor=2.0) 145 | 146 | # Starting Portfolio Value: 100000.00 147 | # Final Portfolio Value: 97060.30 148 | ``` 149 | ![](./docs/assets/bbands.png) 150 | 151 | ### News Sentiment Strategy 152 | Use Tesla (TSLA) stock from yahoo finance and news articles from [Business Times](https://www.businesstimes.com.sg/) 153 | ``` 154 | from fastquant import get_yahoo_data, get_bt_news_sentiment 155 | data = get_yahoo_data("TSLA", "2020-01-01", "2020-07-04") 156 | sentiments = get_bt_news_sentiment(keyword="tesla", page_nums=3) 157 | backtest("sentiment", data, sentiments=sentiments, senti=0.2) 158 | 159 | # Starting Portfolio Value: 100000.00 160 | # Final Portfolio Value: 313198.37 161 | # Note: Unfortunately, you can't recreate this scenario due to inconsistencies in the dates and sentiments that is scraped by get_bt_news_sentiment. In order to have a quickstart with News Sentiment Strategy you need to make the dates consistent with the sentiments that you are scraping. 162 | 163 | from fastquant import get_yahoo_data, get_bt_news_sentiment 164 | from datetime import datetime, timedelta 165 | 166 | # we get the current date and delta time of 30 days 167 | current_date = datetime.now().strftime("%Y-%m-%d") 168 | delta_date = (datetime.now() - timedelta(30)).strftime("%Y-%m-%d") 169 | data = get_yahoo_data("TSLA", delta_date, current_date) 170 | sentiments = get_bt_news_sentiment(keyword="tesla", page_nums=3) 171 | backtest("sentiment", data, sentiments=sentiments, senti=0.2) 172 | ``` 173 | ![](./docs/assets/sentiment.png) 174 | 175 | ### Multi Strategy 176 | 177 | Multiple registered strategies can be utilized together in an OR fashion, where buy or sell signals are applied when at least one of the strategies trigger them. 178 | 179 | ``` 180 | df = get_stock_data("JFC", "2018-01-01", "2019-01-01") 181 | 182 | # Utilize single set of parameters 183 | strats = { 184 | "smac": {"fast_period": 35, "slow_period": 50}, 185 | "rsi": {"rsi_lower": 30, "rsi_upper": 70} 186 | } 187 | res = backtest("multi", df, strats=strats) 188 | res.shape 189 | # (1, 16) 190 | 191 | 192 | # Utilize auto grid search 193 | strats_opt = { 194 | "smac": {"fast_period": 35, "slow_period": [40, 50]}, 195 | "rsi": {"rsi_lower": [15, 30], "rsi_upper": 70} 196 | } 197 | 198 | res_opt = backtest("multi", df, strats=strats_opt) 199 | res_opt.shape 200 | # (4, 16) 201 | ``` 202 | 203 | ### Custom Strategy for Backtesting Machine Learning & Statistics Based Predictions 204 | 205 | This powerful strategy allows you to backtest your own trading strategies using any type of model w/ as few as 3 lines of code after the forecast! 206 | 207 | Predictions based on any model can be used as a custom indicator to be backtested using fastquant. You just need to add a `custom` column in the input dataframe, and set values for `upper_limit` and `lower_limit`. 208 | 209 | The strategy is structured similar to `RSIStrategy` where you can set an `upper_limit`, above which the asset is sold (considered "overbought"), and a `lower_limit`, below which the asset is bought (considered "underbought). `upper_limit` is set to 95 by default, while `lower_limit` is set to 5 by default. 210 | 211 | In the example below, we show how to use the custom strategy to backtest a custom indicator based on in-sample time series forecasts. The forecasts were generated using Facebook's [Prophet](https://github.com/facebook/prophet) package on Bitcoin prices. 212 | 213 | ``` 214 | from fastquant import get_crypto_data, backtest 215 | from fbprophet import Prophet 216 | from matplotlib import pyplot as plt 217 | 218 | # Pull crypto data 219 | df = get_crypto_data("BTC/USDT", "2019-01-01", "2020-05-31") 220 | 221 | # Fit model on closing prices 222 | ts = df.reset_index()[["dt", "close"]] 223 | ts.columns = ['ds', 'y'] 224 | m = Prophet(daily_seasonality=True, yearly_seasonality=True).fit(ts) 225 | forecast = m.make_future_dataframe(periods=0, freq='D') 226 | 227 | # Predict and plot 228 | pred = m.predict(forecast) 229 | fig1 = m.plot(pred) 230 | plt.title('BTC/USDT: Forecasted Daily Closing Price', fontsize=25) 231 | ``` 232 | 233 | ![](./docs/assets/bitcoin_forecasts.png) 234 | 235 | ``` 236 | # Convert predictions to expected 1 day returns 237 | expected_1day_return = pred.set_index("ds").yhat.pct_change().shift(-1).multiply(100) 238 | 239 | # Backtest the predictions, given that we buy bitcoin when the predicted next day return is > +1.5%, and sell when it's < -1.5%. 240 | df["custom"] = expected_1day_return.multiply(-1) 241 | backtest("custom", df.dropna(),upper_limit=1.5, lower_limit=-1.5) 242 | ``` 243 | 244 | ![](./docs/assets/bitcoin_prophet_backtest.png) 245 | 246 | See more examples [here](https://nbviewer.jupyter.org/github/enzoampil/fastquant/tree/master/examples/). 247 | 248 | ## Be part of the growing fastquant community 249 | 250 | Want to discuss more about fastquant with other users, and our team of developers? 251 | 252 | Join the fastquant Slack community, and our bi-weekly remote meetups through this [link](https://join.slack.com/t/fastquant/shared_invite/zt-gaaoahkz-X~5qw0psNOLg1iFYKcpRlQ)! 253 | 254 | You can also [subscribe](https://forms.gle/HAPYdMp2YMu4qXPd7) to our monthly newsletter to receive updates on our latest tutorials, blog posts, and product features! 255 | 256 | ## Run fastquant in a Docker Container 257 | 258 | ``` 259 | # Build the image 260 | docker build -t myimage . 261 | 262 | # Run the container 263 | docker run -t -d -p 5000:5000 myimage 264 | 265 | # Get the container id 266 | docker ps 267 | 268 | # SSH into the fastquant container 269 | docker exec -it /bin/bash 270 | 271 | # Run python and use fastquant 272 | python 273 | 274 | >>> from fastquant import get_stock_data 275 | >>> df = get_stock_data("TSLA", "2019-01-01", "2020-01-01") 276 | >>> df.head() 277 | ``` 278 | -------------------------------------------------------------------------------- /docs/docusaurus/docs/walk_forward_data_split.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: walk_forward_data_split 3 | title: walk_forward_data_split 4 | --- 5 | 6 | # Example 7 | 8 | To import: `from fastquant.utils.data_split import walk_forward_split` 9 | 10 | Example: 11 | Initialize time series with size of 100 12 | 13 | ## Default Parameters 14 | ``` 15 | >>> X = np.random.random(100) 16 | >>> for train_indices, test_indices in walk_forward_split(X): 17 | print("TRAIN:",len(train_indices), train_indices) 18 | print("TEST: ", len(test_indices) ,test_indices) 19 | print() 20 | ``` 21 | 22 | ``` 23 | TRAIN: 57 [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 | 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 25 | 48 49 50 51 52 53 54 55 56] 26 | TEST: 15 [57 58 59 60 61 62 63 64 65 66 67 68 69 70 71] 27 | 28 | TRAIN: 57 [15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 29 | 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 30 | 63 64 65 66 67 68 69 70 71] 31 | TEST: 15 [72 73 74 75 76 77 78 79 80 81 82 83 84 85 86] 32 | 33 | TRAIN: 57 [30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 34 | 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 35 | 78 79 80 81 82 83 84 85 86] 36 | TEST: 13 [87 88 89 90 91 92 93 94 95 96 97 98 99] 37 | ``` 38 | Gives the output of 3 sets (default is `n_splits=3`) of training and test indices with the proportion of 80:20 proportion (default is `train_size=0.80`) for the training and testing indices 39 | 40 | 41 | ## Specific Training Size and Number of Splits 42 | ``` 43 | >>> X = np.random.random(100) 44 | >>> for train_indices, test_indices in walk_forward_split(X, train_size=25, n_splits=2): 45 | print("TRAIN:",len(train_indices), train_indices) 46 | print("TEST: ", len(test_indices) ,test_indices) 47 | print() 48 | ``` 49 | 50 | ``` 51 | TRAIN: 25 [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 52 | 24] 53 | TEST: 38 [25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 54 | 49 50 51 52 53 54 55 56 57 58 59 60 61 62] 55 | 56 | TRAIN: 25 [38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 57 | 62] 58 | TEST: 37 [63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 59 | 87 88 89 90 91 92 93 94 95 96 97 98 99] 60 | ``` 61 | When `train_size` and `n_splits` are specifed, the size of the test indices will adjust. 62 | 63 | 64 | ## Specific Training and Testing Sizes 65 | ``` 66 | >>> X = np.random.random(100) 67 | >>> for train_indices, test_indices in walk_forward_split(X, train_size=50, test_size=10): 68 | print("TRAIN:",len(train_indices), train_indices) 69 | print("TEST: ", len(test_indices) ,test_indices) 70 | print() 71 | ``` 72 | 73 | ``` 74 | TRAIN: 50 [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 75 | 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 76 | 48 49] 77 | TEST: 10 [50 51 52 53 54 55 56 57 58 59] 78 | 79 | TRAIN: 50 [10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 80 | 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 81 | 58 59] 82 | TEST: 10 [60 61 62 63 64 65 66 67 68 69] 83 | 84 | TRAIN: 50 [20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 85 | 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 86 | 68 69] 87 | TEST: 10 [70 71 72 73 74 75 76 77 78 79] 88 | 89 | TRAIN: 50 [30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 90 | 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 91 | 78 79] 92 | TEST: 10 [80 81 82 83 84 85 86 87 88 89] 93 | 94 | TRAIN: 50 [40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 95 | 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 96 | 88 89] 97 | TEST: 10 [90 91 92 93 94 95 96 97 98 99] 98 | 99 | ``` 100 | When `train_size` and `test_size` are specifed, the number of splits (`n_splits`) changes to utilize the entire dataset 101 | 102 | 103 | ## Sliding vs Expanding modes 104 | ``` 105 | >>> X = np.random.random(100) 106 | >>> for train_indices, test_indices in walk_forward_split(X, mode='expanding'): 107 | print("TRAIN:",len(train_indices), train_indices) 108 | print("TEST: ", len(test_indices) ,test_indices) 109 | print() 110 | ``` 111 | 112 | ``` 113 | TRAIN: 57 [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 114 | 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 115 | 48 49 50 51 52 53 54 55 56] 116 | TEST: 15 [57 58 59 60 61 62 63 64 65 66 67 68 69 70 71] 117 | 118 | TRAIN: 72 [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 119 | 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 120 | 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71] 121 | TEST: 15 [72 73 74 75 76 77 78 79 80 81 82 83 84 85 86] 122 | 123 | TRAIN: 87 [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 124 | 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 125 | 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 126 | 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86] 127 | TEST: 13 [87 88 89 90 91 92 93 94 95 96 97 98 99] 128 | ``` 129 | When `mode` is set to `expanding`, the training indices will include indices from the previous splits 130 | 131 | 132 | ## Overlap with training data 133 | 134 | There may be cases that you might want to some data from training added to the test data. 135 | In this case, you can set the size of the data which will be added to the test indices 136 | 137 | ``` 138 | >>> X = np.random.random(100) 139 | >>> for train_indices, test_indices in walk_forward_split(X, training_overlap_size=5): 140 | print("TRAIN:",len(train_indices), train_indices) 141 | print("TEST: ", len(test_indices) ,test_indices) 142 | print() 143 | ``` 144 | 145 | ``` 146 | TRAIN: 57 [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 147 | 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 148 | 48 49 50 51 52 53 54 55 56] 149 | TEST: 20 [52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71] 150 | 151 | TRAIN: 57 [15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 152 | 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 153 | 63 64 65 66 67 68 69 70 71] 154 | TEST: 20 [67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86] 155 | 156 | TRAIN: 57 [30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 157 | 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 158 | 78 79 80 81 82 83 84 85 86] 159 | TEST: 18 [82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99] 160 | 161 | ``` 162 | -------------------------------------------------------------------------------- /docs/docusaurus/sidebars.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | someSidebar: { 3 | Docs: ['getting_started'], 4 | Reference: ['backtest', 'get_stock_data', 'get_crypto_data', 'walk_forward_data_split'], 5 | }, 6 | }; 7 | -------------------------------------------------------------------------------- /examples/chart.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/examples/chart.png -------------------------------------------------------------------------------- /examples/jfc_rsi.py: -------------------------------------------------------------------------------- 1 | # Import standard library 2 | from __future__ import ( 3 | absolute_import, 4 | division, 5 | print_function, 6 | unicode_literals, 7 | ) 8 | import datetime 9 | import os.path 10 | import sys 11 | from pkg_resources import resource_filename 12 | 13 | # Import from package 14 | import backtrader as bt 15 | import backtrader.feeds as btfeed 16 | 17 | INIT_CASH = 100000 18 | RSI_PERIOD = 14 19 | COMMISSION_PER_TRANSACTION = 0.006 20 | RSI_UPPER = 70 21 | RSI_LOWER = 30 22 | DATA_FILE = resource_filename(__name__, "../data/JFC_2010-01-01_2019-01-01_OHLCV.csv") 23 | 24 | 25 | class RSIStrategy(bt.Strategy): 26 | def log(self, txt, dt=None): 27 | dt = dt or self.datas[0].datetime.date(0) 28 | print("%s, %s" % (dt.isoformat(), txt)) 29 | 30 | def __init__(self): 31 | self.dataclose = self.datas[0].close 32 | self.order = None 33 | self.buyprice = None 34 | self.buycomm = None 35 | 36 | self.rsi = bt.indicators.RelativeStrengthIndex(period=RSI_PERIOD) 37 | 38 | def notify_order(self, order): 39 | if order.status in [order.Submitted, order.Accepted]: 40 | return 41 | 42 | if order.status in [order.Completed]: 43 | if order.isbuy(): 44 | self.log( 45 | "BUY EXECUTED, Price: %.2f, Cost: %.2f, Comm %.2f" 46 | % ( 47 | order.executed.price, 48 | order.executed.value, 49 | order.executed.comm, 50 | ) 51 | ) 52 | 53 | self.buyprice = order.executed.price 54 | self.buycomm = order.executed.comm 55 | else: # Sell 56 | self.log( 57 | "SELL EXECUTED, Price: %.2f, Cost: %.2f, Comm %.2f" 58 | % ( 59 | order.executed.price, 60 | order.executed.value, 61 | order.executed.comm, 62 | ) 63 | ) 64 | 65 | self.bar_executed = len(self) 66 | 67 | elif order.status in [order.Canceled, order.Margin, order.Rejected]: 68 | self.log("Order Canceled/Margin/Rejected") 69 | 70 | # Write down: no pending order 71 | self.order = None 72 | 73 | def notify_trade(self, trade): 74 | if not trade.isclosed: 75 | return 76 | 77 | self.log("OPERATION PROFIT, GROSS %.2f, NET %.2f" % (trade.pnl, trade.pnlcomm)) 78 | 79 | def notify_cashvalue(self, cash, value): 80 | # Update cash and value every period 81 | self.log("Cash %s Value %s" % (cash, value)) 82 | self.cash = cash 83 | self.value = value 84 | 85 | def next(self): 86 | self.log("Close, %.2f" % self.dataclose[0]) 87 | print("rsi:", self.rsi[0]) 88 | if self.order: 89 | return 90 | 91 | # Only buy if there is enough cash for at least one stock 92 | if self.cash >= self.dataclose[0]: 93 | if self.rsi[0] < RSI_LOWER: 94 | self.log("BUY CREATE, %.2f" % self.dataclose[0]) 95 | # Take a 10% long position every time it's a buy signal (or whatever is afforder by the current cash position) 96 | # "size" refers to the number of stocks to purchase 97 | self.order = self.buy( 98 | size=int( 99 | min( 100 | (INIT_CASH / self.dataclose[0]) * 0.1, 101 | self.cash / self.dataclose[0], 102 | ) 103 | ) 104 | ) 105 | 106 | # Only sell if you hold least one unit of the stock (and sell only that stock, so no short selling) 107 | if (self.value - self.cash) > 0: 108 | if self.rsi[0] > RSI_UPPER: 109 | self.log("SELL CREATE, %.2f" % self.dataclose[0]) 110 | # Sell a 5% sell position (or whatever is afforded by the current stock holding) 111 | # "size" refers to the number of stocks to purchase 112 | self.order = self.sell(size=int((INIT_CASH / self.dataclose[0]) * 0.1)) 113 | 114 | 115 | if __name__ == "__main__": 116 | cerebro = bt.Cerebro() 117 | cerebro.addstrategy(RSIStrategy) 118 | cerebro.broker.setcommission(commission=COMMISSION_PER_TRANSACTION) 119 | 120 | data = btfeed.GenericCSVData( 121 | dataname=DATA_FILE, 122 | fromdate=datetime.datetime(2017, 1, 1), 123 | todate=datetime.datetime(2019, 1, 1), 124 | nullvalue=0.0, 125 | dtformat=("%Y-%m-%d"), 126 | datetime=0, 127 | open=1, 128 | high=2, 129 | low=3, 130 | close=4, 131 | volume=5, 132 | openinterest=-1, 133 | ) 134 | cerebro.adddata(data) 135 | cerebro.broker.setcash(INIT_CASH) 136 | print("Starting Portfolio Value: %.2f" % cerebro.broker.getvalue()) 137 | cerebro.run() 138 | print("Final Portfolio Value: %.2f" % cerebro.broker.getvalue()) 139 | cerebro.plot(figsize=(30, 15)) 140 | -------------------------------------------------------------------------------- /examples/jfc_support_resistance.py: -------------------------------------------------------------------------------- 1 | # Import standard library 2 | from __future__ import ( 3 | absolute_import, 4 | division, 5 | print_function, 6 | unicode_literals, 7 | ) 8 | import datetime 9 | import os.path 10 | import sys 11 | from pkg_resources import resource_filename 12 | 13 | # Import modules 14 | import backtrader as bt 15 | import backtrader.feeds as btfeed 16 | 17 | INIT_CASH = 100000 18 | BAND_PERIOD = 30 19 | COMMISSION_PER_TRANSACTION = 0.006 20 | DATA_FILE = resource_filename(__name__, "../data/JFC_2010-01-01_2019-01-01_OHLCV.csv") 21 | 22 | 23 | class MinMaxSupportResistance(bt.Strategy): 24 | def log(self, txt, dt=None): 25 | dt = dt or self.datas[0].datetime.date(0) 26 | print("%s, %s" % (dt.isoformat(), txt)) 27 | 28 | def __init__(self): 29 | self.dataclose = self.datas[0].close 30 | self.order = None 31 | self.buyprice = None 32 | self.buycomm = None 33 | 34 | self.minn = bt.indicators.MinN(self.datas[0], period=BAND_PERIOD) 35 | self.maxn = bt.indicators.MaxN(self.datas[0], period=BAND_PERIOD) 36 | 37 | def notify_order(self, order): 38 | if order.status in [order.Submitted, order.Accepted]: 39 | return 40 | 41 | if order.status in [order.Completed]: 42 | if order.isbuy(): 43 | self.log( 44 | "BUY EXECUTED, Price: %.2f, Cost: %.2f, Comm %.2f" 45 | % ( 46 | order.executed.price, 47 | order.executed.value, 48 | order.executed.comm, 49 | ) 50 | ) 51 | 52 | self.buyprice = order.executed.price 53 | self.buycomm = order.executed.comm 54 | else: # Sell 55 | self.log( 56 | "SELL EXECUTED, Price: %.2f, Cost: %.2f, Comm %.2f" 57 | % ( 58 | order.executed.price, 59 | order.executed.value, 60 | order.executed.comm, 61 | ) 62 | ) 63 | 64 | self.bar_executed = len(self) 65 | 66 | elif order.status in [order.Canceled, order.Margin, order.Rejected]: 67 | self.log("Order Canceled/Margin/Rejected") 68 | 69 | # Write down: no pending order 70 | self.order = None 71 | 72 | def notify_cashvalue(self, cash, value): 73 | # Update cash and value every period 74 | self.log("Cash %s Value %s" % (cash, value)) 75 | self.cash = cash 76 | self.value = value 77 | 78 | def notify_trade(self, trade): 79 | if not trade.isclosed: 80 | return 81 | 82 | self.log("OPERATION PROFIT, GROSS %.2f, NET %.2f" % (trade.pnl, trade.pnlcomm)) 83 | 84 | def next(self): 85 | self.log("Close, %.2f" % self.dataclose[0]) 86 | print("Max price:", self.maxn[0]) 87 | print("Min price:", self.minn[0]) 88 | if self.order: 89 | return 90 | 91 | # Only buy if there is enough cash for at least one stock 92 | if self.cash >= self.dataclose[0]: 93 | # Sell if the current closing is the lowest in the last N days 94 | if self.dataclose[0] == self.maxn: 95 | self.log("BUY CREATE, %.2f" % self.dataclose[0]) 96 | # Take a 10% long position every time it's a buy signal (or whatever is afforder by the current cash position) 97 | # "size" refers to the number of stocks to purchase 98 | self.order = self.buy( 99 | size=int( 100 | min( 101 | (INIT_CASH / self.dataclose[0]) * 0.1, 102 | self.cash / self.dataclose[0], 103 | ) 104 | ) 105 | ) 106 | 107 | # Only sell if you hold least one unit of the stock (and sell only that stock, so no short selling) 108 | if (self.value - self.cash) > 0: 109 | # Buy if the current closing is the highest in the last N days 110 | if self.dataclose[0] == self.minn: 111 | self.log("SELL CREATE, %.2f" % self.dataclose[0]) 112 | # Sell a 5% sell position (or whatever is afforded by the current stock holding) 113 | # "size" refers to the number of stocks to purchase 114 | self.order = self.sell( 115 | size=int( 116 | min( 117 | (INIT_CASH / self.dataclose[0]) * 0.05, 118 | (self.value - self.cash) / self.dataclose[0], 119 | ) 120 | ) 121 | ) 122 | 123 | 124 | if __name__ == "__main__": 125 | cerebro = bt.Cerebro() 126 | cerebro.addstrategy(MinMaxSupportResistance) 127 | cerebro.broker.setcommission(commission=COMMISSION_PER_TRANSACTION) 128 | 129 | data = btfeed.GenericCSVData( 130 | dataname=DATA_FILE, 131 | fromdate=datetime.datetime(2017, 1, 1), 132 | todate=datetime.datetime(2019, 1, 1), 133 | nullvalue=0.0, 134 | dtformat=("%Y-%m-%d"), 135 | datetime=0, 136 | open=1, 137 | high=2, 138 | low=3, 139 | close=4, 140 | volume=5, 141 | openinterest=-1, 142 | ) 143 | cerebro.adddata(data) 144 | cerebro.broker.setcash(INIT_CASH) 145 | print("Starting Portfolio Value: %.2f" % cerebro.broker.getvalue()) 146 | cerebro.run() 147 | print("Final Portfolio Value: %.2f" % cerebro.broker.getvalue()) 148 | cerebro.plot(figsize=(30, 15)) 149 | -------------------------------------------------------------------------------- /examples/notif_bot/slack_notif.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import json 3 | import requests 4 | 5 | action = sys.argv[1] 6 | today = sys.argv[2] 7 | symbol = sys.argv[3] 8 | 9 | # See https://api.slack.com/tutorials/slack-apps-hello-world for more information about Slack apps 10 | webhook_url = YOUR_WEBHOOK_URL 11 | message = "Today is " + today + ": " + action + " " + symbol 12 | 13 | requests.post( 14 | webhook_url, 15 | data=json.dumps({"text": message}), 16 | headers={"Content-Type": "application/json"}, 17 | ) 18 | -------------------------------------------------------------------------------- /python/Get Twitter API Credentials.md: -------------------------------------------------------------------------------- 1 | # Get Twitter API Credentials 2 | 3 | In fastquant, there's a module called **get\_twitter\_sentiment()** wherein you specify a PSE stock symbol that you want to get tweet sentiments from and the list of twitter accounts you want to scrape tweets from. 4 | 5 | ### Sample usage: 6 | 7 | ``` 8 | 9 | twitter_sentiment = get_twitter_sentiment(stock_code, twitter_auth, start_date, twitter_accounts=None) 10 | 11 | ``` 12 | 13 | However, to do this, you need to get your own Twitter API credentials (twitter\_auth in the parameter) and authenticate it using the Tweepy library. 14 | 15 | Below is a guide on how to get your API credentials and how to authenticate them using fastquant. 16 | 17 | 18 | Before anything, **please make sure that you have a Twitter account and are logged in**. Once done, please follow the steps below: 19 | 20 | ### Steps: 21 | 22 | 1. Go to [developer.twitter.com/en/apps](https://developer.twitter.com/en/apps) 23 | 24 | ![Landing page](images/landing_page.PNG) 25 | 26 | 2. Click **Create an App** 27 | 28 | ![Create an app](images/create_app.PNG) 29 | 30 | 3. Please input your **App name** 31 | 32 | 4. Under **Application Description**, please write a short/brief description about your app 33 | 34 | ![App name](images/app_name.PNG) 35 | 36 | 5. For **Website URL**, feel free to write anything if you don't have any. (e.g. placeholder.com) 37 | 38 | ![Website url](images/website_url.PNG) 39 | 40 | 6. Feel free to skip the others, and go straight to **Tell us how this app will be used**. Write a short explanation of why you need to get Twitter API credentials. 41 | 42 | 7. Click **Create** 43 | 44 | ![Additional Steps](images/addtl_steps.PNG) 45 | 46 | 47 | 8. Go back to [developer.twitter.com/en/apps](https://developer.twitter.com/en/apps). Now, you must be seeing the app that you've created. Click on **Details**. 48 | 49 | ![Details](images/details.PNG) 50 | 51 | 9. Navigate to **Keys and Tokens** 52 | 53 | ![Navigation bar](images/keys_tokens_bar.PNG) 54 | 55 | 10. You should be seeing both your **API key** and **API secret key**. Save or secure both of these keys and remember where you saved them. 56 | 57 | 11. Now, for your **Access tokens** and **Access token secret**, click **Generate** (or if you have already and you want to get a new one, click **Regenerate**). Same as the API keys, save or secure both of these tokens and remember where you saved them. *Note*: Regenerating new tokens will invalidate your existing tokens. 58 | 59 | ![Generate keys and tokens](images/generate_keys_tokens.PNG) 60 | 61 | 62 | *** 63 | ### Authenticating your API credentials using Fastquant 64 | 65 | Fastquant has a module named tweepy_api() which authenticates your API credentials. 66 | 67 | 68 | #### Usage: 69 | ``` 70 | 71 | consumer_key = 'put_your_API_key_here' 72 | consumer_secret = 'put_your_API_secret_key_here' 73 | access_token = 'put_your_access_token_here' 74 | access_secret = 'put_your_access_token_secret_here' 75 | 76 | api = tweepy_api(consumer_key, consumer_secret, access_token, access_secret) 77 | 78 | ``` 79 | 80 | Now, that you have your api credentials authenticated, you can already try using the get_tweet_sentiment() module fastquant by passing the api object as twitter_auth in the parameter. -------------------------------------------------------------------------------- /python/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Lorenzo Ampil 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /python/fastquant/__init__.py: -------------------------------------------------------------------------------- 1 | # from .fastquant import * 2 | from .disclosures import * 3 | from .strategies import * 4 | from .network import * 5 | from .portfolio import * 6 | from .config import * 7 | from .backtest import * 8 | from .data import * 9 | from .notification import * 10 | -------------------------------------------------------------------------------- /python/fastquant/backtest/__init__.py: -------------------------------------------------------------------------------- 1 | # Modules available for fastquant.backtest.* 2 | 3 | from fastquant.backtest.backtest import backtest 4 | from fastquant.backtest.backtest import STRATEGY_MAPPING 5 | -------------------------------------------------------------------------------- /python/fastquant/backtest/backtest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Import standard library 4 | from __future__ import ( 5 | absolute_import, 6 | division, 7 | print_function, 8 | unicode_literals, 9 | ) 10 | import warnings 11 | 12 | # Import modules 13 | import backtrader as bt 14 | import backtrader.feeds as btfeed 15 | import backtrader.analyzers as btanalyzers 16 | import pandas as pd 17 | import numpy as np 18 | from collections.abc import Iterable 19 | import time 20 | from pandas.api.types import is_numeric_dtype 21 | 22 | from backtrader import CommInfoBase 23 | 24 | # Import backtest variables 25 | from fastquant.config import ( 26 | INIT_CASH, 27 | COMMISSION_PER_TRANSACTION, 28 | GLOBAL_PARAMS, 29 | DEFAULT_PANDAS, 30 | ) 31 | from fastquant.strategies.mappings import STRATEGY_MAPPING 32 | 33 | # Other backtest components 34 | from fastquant.backtest.data_prep import initalize_data 35 | from fastquant.backtest.post_backtest import analyze_strategies, plot_results 36 | 37 | strat_docs = "\nExisting strategies:\n\n" + "\n".join( 38 | [key + "\n" + value.__doc__ for key, value in STRATEGY_MAPPING.items()] 39 | ) 40 | 41 | 42 | def docstring_parameter(*sub): 43 | """ 44 | Decorator to ensure all the strategy docstrings are included in the `backtest` docstring. 45 | """ 46 | 47 | def dec(obj): 48 | obj.__doc__ = obj.__doc__.format(*sub) 49 | return obj 50 | 51 | return dec 52 | 53 | 54 | @docstring_parameter(strat_docs) 55 | def backtest( 56 | strategy, 57 | data, # Treated as csv path is str, and dataframe of pd.DataFrame 58 | commission=COMMISSION_PER_TRANSACTION, 59 | init_cash=INIT_CASH, 60 | plot=True, 61 | fractional=False, 62 | slippage=0.001, 63 | single_position=None, 64 | verbose=1, 65 | sort_by="rnorm", 66 | sentiments=[], 67 | strats={}, # Only used when strategy = "multi" 68 | return_history=False, 69 | return_plot=False, 70 | channel="", 71 | symbol="", 72 | allow_short=False, 73 | short_max=1.5, 74 | figsize=(30, 15), 75 | multi_line_indicators=None, 76 | data_class=None, 77 | data_kwargs={}, 78 | plot_kwargs={}, 79 | fig=None, 80 | **kwargs, 81 | ): 82 | """Backtest financial data with a specified trading strategy 83 | 84 | Parameters 85 | ---------------- 86 | strategy : str or an instance of `fastquant.strategies.base.BaseStrategy` 87 | see list of accepted strategy keys below 88 | data : pandas.DataFrame 89 | dataframe with at least close price indexed with time 90 | commission : float 91 | commission per transaction [0, 1] 92 | init_cash : float 93 | initial cash (currency implied from `data`) 94 | plot : bool 95 | show plot backtrader (disabled if `strategy`=="multi") 96 | verbose : int 97 | Verbose can take values: [0, 1, 2, 3], with increasing levels of verbosity (default=1). 98 | sort_by : str 99 | sort result by given metric (default='rnorm') 100 | sentiments : pandas.DataFrame 101 | df of sentiment [0, 1] indexed by time (applicable if `strategy`=='senti') 102 | strats : dict 103 | dictionary of strategy parameters (applicable if `strategy`=='multi') 104 | return_history : bool 105 | return history of transactions (i.e. buy and sell timestamps) (default=False) 106 | return_plot: bool 107 | return the plot (if you want to save the plot) (default=True) 108 | channel : str 109 | Channel to be used for notifications - e.g. "slack" (default=None) 110 | symbol : str 111 | Symbol to be referenced in the channel notification if not None (default=None) 112 | allow_short : bool 113 | Whether to allow short selling, with max set as `short_max` times the portfolio value (default=False) 114 | short_max : float 115 | The maximum short position allowable as a ratio relative to the portfolio value at that time point (default=1.5) 116 | figsize : tuple 117 | The size of the figure to be displayed at the end of the backtest (default=(30, 15)) 118 | data_class : bt.feed.DataBase 119 | Custom backtrader database to be used as a parent class instead bt.feed. (default=None) 120 | data_kwargs : dict 121 | Datafeed keyword arguments (empty dict by default) 122 | F : dict 123 | Argument for function cerebro.plot() (empty dict by default) 124 | {0} 125 | """ 126 | # Setting initial support for 1 cpu 127 | # Return the full strategy object to get all run information 128 | cerebro = bt.Cerebro(stdstats=False, maxcpus=1, optreturn=False) 129 | cerebro.addobserver(bt.observers.Broker) 130 | cerebro.addobserver(bt.observers.Trades) 131 | cerebro.addobserver(bt.observers.BuySell) 132 | 133 | # Convert all non iterables and strings into lists 134 | kwargs = { 135 | k: v if isinstance(v, Iterable) and not isinstance(v, str) else [v] 136 | for k, v in kwargs.items() 137 | } 138 | 139 | # Add logging parameters based on the `verbose` parameter 140 | logging_params = get_logging_params(verbose) 141 | kwargs.update(logging_params) 142 | 143 | # Add Strategy 144 | strat_names = [] 145 | strat_name = None 146 | if strategy == "multi" and strats is not None: 147 | for strat, params in strats.items(): 148 | cerebro.optstrategy( 149 | STRATEGY_MAPPING[strat], 150 | init_cash=[init_cash], 151 | commission=commission, 152 | channel=channel, 153 | symbol=symbol, 154 | allow_short=allow_short, 155 | fractional=fractional, 156 | slippage=slippage, 157 | single_position=single_position, 158 | short_max=short_max, 159 | **params, 160 | ) 161 | strat_names.append(strat) 162 | else: 163 | 164 | # Allow instance of BaseStrategy or from the predefined mapping 165 | if not isinstance(strategy, str) and issubclass(strategy, bt.Strategy): 166 | strat_name = ( 167 | strategy.__name__ if hasattr(strategy, "__name__") else str(strategy) 168 | ) 169 | else: 170 | strat_name = strategy 171 | strategy = STRATEGY_MAPPING[strategy] 172 | 173 | cerebro.optstrategy( 174 | strategy, 175 | init_cash=[init_cash], 176 | commission=commission, 177 | channel=channel, 178 | symbol=symbol, 179 | fractional=fractional, 180 | slippage=slippage, 181 | single_position=single_position, 182 | allow_short=allow_short, 183 | short_max=short_max, 184 | **kwargs, 185 | ) 186 | strat_names.append(strat_name) 187 | 188 | # Apply Total, Average, Compound and Annualized Returns calculated using a logarithmic approach 189 | cerebro.addanalyzer(btanalyzers.Returns, _name="returns") 190 | cerebro.addanalyzer(btanalyzers.SharpeRatio, _name="mysharpe") 191 | cerebro.addanalyzer(btanalyzers.DrawDown, _name="drawdown") 192 | cerebro.addanalyzer(btanalyzers.TimeDrawDown, _name="timedraw") 193 | cerebro.addanalyzer( 194 | btanalyzers.TradeAnalyzer, _name="tradeanalyzer" 195 | ) # trade analyzer 196 | 197 | cerebro.broker.setcommission(commission=commission) 198 | 199 | # Initalize and verify data 200 | pd_data, data, data_format_dict = initalize_data( 201 | data, strat_name, symbol, data_class, sentiments, data_kwargs 202 | ) 203 | cerebro.adddata(pd_data) 204 | cerebro.broker.setcash(init_cash) 205 | # Allows us to set buy price based on next day closing 206 | # (technically impossible, but reasonable assuming you use all your money to buy market at the end of the next day) 207 | cerebro.broker.set_coc(True) 208 | if verbose > 0: 209 | print("Starting Portfolio Value: %.2f" % cerebro.broker.getvalue()) 210 | 211 | # clock the start of the process 212 | tstart = time.time() 213 | stratruns = cerebro.run() 214 | 215 | # clock the end of the process 216 | tend = time.time() 217 | 218 | if verbose > 0: 219 | # print out the result 220 | print("Time used (seconds):", str(tend - tstart)) 221 | 222 | # Get History, Optimal Parameters and Strategy Metrics 223 | sorted_combined_df, optim_params, history_dict = analyze_strategies( 224 | init_cash, 225 | stratruns, 226 | data, 227 | strat_names, 228 | strategy, 229 | strats, 230 | sort_by, 231 | return_history, 232 | verbose, 233 | multi_line_indicators, 234 | **kwargs, 235 | ) 236 | 237 | # Plot 238 | 239 | if plot and strategy != "multi": 240 | # Plot only with the optimal parameters when multiple strategy runs are required 241 | if sorted_combined_df.shape[0] != 1: 242 | if verbose > 0: 243 | print("=============================================") 244 | print("Plotting backtest for optimal parameters ...") 245 | _, fig = backtest( 246 | strategy, 247 | data, 248 | plot=plot, 249 | verbose=0, 250 | sort_by=sort_by, 251 | return_plot=return_plot, 252 | plot_kwargs=plot_kwargs, 253 | **optim_params, 254 | ) 255 | else: 256 | fig = plot_results(cerebro, data_format_dict, figsize, **plot_kwargs) 257 | 258 | if return_history and return_plot: 259 | return sorted_combined_df, history_dict, fig 260 | elif return_history: 261 | return sorted_combined_df, history_dict 262 | elif return_plot: 263 | return sorted_combined_df, fig 264 | else: 265 | return sorted_combined_df 266 | 267 | 268 | def get_logging_params(verbose): 269 | """ 270 | Adjusts the logging verbosity based on the `verbose` parameter 271 | 0 - No logging 272 | 1 - Strategy Level logs 273 | 2 - Transaction Level logs 274 | 3 - Periodic Logs 275 | """ 276 | verbosity_args = dict( 277 | strategy_logging=False, 278 | transaction_logging=False, 279 | periodic_logging=False, 280 | ) 281 | if verbose > 0: 282 | verbosity_args["strategy_logging"] = True 283 | if verbose > 1: 284 | verbosity_args["transaction_logging"] = True 285 | if verbose > 2: 286 | verbosity_args["periodic_logging"] = True 287 | 288 | return verbosity_args 289 | -------------------------------------------------------------------------------- /python/fastquant/backtest/backtest_indicators.py: -------------------------------------------------------------------------------- 1 | from backtrader.indicators import ( 2 | BBands, 3 | BollingerBands, 4 | MACD, 5 | MACDHisto, 6 | MACDHistogram, 7 | Envelope, 8 | Ichimoku, 9 | ADXR, 10 | AverageDirectionalMovementIndexRating, 11 | ) 12 | import re 13 | 14 | 15 | # Some indicators contain multiple "lines" instead of just one 16 | # From source code `lines` attribute of the indacator 17 | # https://github.com/mementum/backtrader/tree/master/backtrader/indicators 18 | MULTI_LINE_INDICATORS = [ 19 | ( 20 | AverageDirectionalMovementIndexRating, 21 | ( 22 | "adx", 23 | "adxr", 24 | ), 25 | ), 26 | ( 27 | ADXR, 28 | ( 29 | "adx", 30 | "adxr", 31 | ), 32 | ), 33 | ( 34 | BollingerBands, 35 | ( 36 | "mid", 37 | "top", 38 | "bot", 39 | ), 40 | ), 41 | ( 42 | BBands, 43 | ( 44 | "mid", 45 | "top", 46 | "bot", 47 | ), 48 | ), 49 | ( 50 | Envelope, 51 | ( 52 | "top", 53 | "bot", 54 | ), 55 | ), 56 | ( 57 | MACD, 58 | ( 59 | "macd", 60 | "signal", 61 | ), 62 | ), 63 | ( 64 | MACDHistogram, 65 | ( 66 | "macd", 67 | "signal", 68 | "histo", 69 | ), 70 | ), 71 | ( 72 | MACDHisto, 73 | ( 74 | "macd", 75 | "signal", 76 | "histo", 77 | ), 78 | ), 79 | ( 80 | Ichimoku, 81 | ( 82 | "tenkan_sen", 83 | "kijun_sen", 84 | "senkou_span_a", 85 | "senkou_span_b", 86 | "chikou_span", 87 | ), 88 | ), 89 | ] 90 | 91 | 92 | indicator_regex = re.compile("[a-zA-Z0-9.]+") 93 | 94 | 95 | def get_indicators_as_dict(strat_run, multi_line_indicators): 96 | """ 97 | Returns the indicators used for the strategy run 98 | """ 99 | 100 | if multi_line_indicators is not None: 101 | multi_line_ind = multi_line_indicators 102 | else: 103 | multi_line_ind = MULTI_LINE_INDICATORS 104 | 105 | indicators = strat_run.getindicators() 106 | indicators_dict = dict() 107 | for i, ind in enumerate(indicators): 108 | indicator_name = ( 109 | ind.plotlabel() if hasattr(ind, "plotlabel") else "indicator{}".format(i) 110 | ) 111 | 112 | # Check if indicator contains multiple lines 113 | line_names = get_line_names(ind, multi_line_ind) 114 | if len(line_names) > 1: 115 | for lx, line_name in enumerate(line_names): 116 | key = rename_indicator(indicator_name, line_name) 117 | indicators_dict[key] = ind.lines[lx].array 118 | 119 | else: 120 | key = rename_indicator(indicator_name) 121 | indicators_dict[key] = ind.lines[0].array 122 | 123 | return indicators_dict 124 | 125 | 126 | def get_line_names(indicator, multi_line_ind): 127 | 128 | for indicator_class, line_names in multi_line_ind: 129 | # Check the type/class # isinstance doesnt work on subclasses correctly 130 | if type(indicator) == indicator_class: 131 | return line_names 132 | return () 133 | 134 | 135 | def rename_indicator(name, line_name=None): 136 | # Changes the name to ___ 137 | tokens = indicator_regex.findall(name) 138 | if line_name: 139 | tokens = [tokens[0], line_name] + (tokens[1:] if len(tokens) > 1 else []) 140 | return "_".join(tokens) 141 | -------------------------------------------------------------------------------- /python/fastquant/backtest/data_prep.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import backtrader as bt 3 | from pandas.api.types import is_numeric_dtype 4 | 5 | 6 | from fastquant.config import DEFAULT_PANDAS 7 | 8 | 9 | def initalize_data( 10 | data, 11 | strategy_name, 12 | symbol=None, 13 | data_class=None, 14 | sentiments=None, 15 | data_kwargs={}, 16 | verbose=None, 17 | ): 18 | 19 | # Treat `data` as a path if it's a string; otherwise, it's treated as a pandas dataframe 20 | if isinstance(data, str): 21 | if verbose > 0: 22 | print("Reading path as pandas dataframe ...") 23 | # Rename dt to datetime 24 | data = pd.read_csv(data, header=0, parse_dates=["dt"]) 25 | 26 | # Add dividend column in case it doesn't exist 27 | # This is utilized if `invest_div` is set to True in `backtest` `kwargs` (True by default) 28 | if "dividend" not in data.columns: 29 | data["dividend"] = 0 30 | 31 | if strategy_name == "sentiment": 32 | data = include_sentiment_score(data, sentiments) 33 | 34 | # If a `close` column exists but an `open` column doesn't, create a new `open` column with the same values as the `close` column 35 | # This is for easier handling of next day trades (w/ the assumption that next day open is equal to current day close) 36 | if "close" in data.columns and "open" not in data.columns: 37 | data["open"] = data.close.shift().values 38 | 39 | # If data has `dt` as the index and `dt` or `datetime` are not already columns, set `dt` as the first column 40 | # This means `backtest` supports the dataframe whether `dt` is the index or a column 41 | if len(set(["dt", "datetime"]).intersection(data.columns)) == 0: 42 | if data.index.name == "dt": 43 | data = data.reset_index() 44 | # If the index is a datetime index, set this as the datetime column 45 | elif isinstance(data.index, pd.DatetimeIndex): 46 | data.index.name = "dt" 47 | data = data.reset_index() 48 | 49 | # Rename "dt" column to "datetime" to match the formal alias 50 | data = data.rename(columns={"dt": "datetime"}) 51 | data["datetime"] = pd.to_datetime(data.datetime) 52 | 53 | numeric_cols = [col for col in data.columns if is_numeric_dtype(data[col])] 54 | params_tuple = tuple( 55 | [ 56 | (col, i) 57 | for i, col in enumerate(data.columns) 58 | if col in numeric_cols + ["datetime"] 59 | ] 60 | ) 61 | default_cols = [c for c, _ in DEFAULT_PANDAS] 62 | non_default_numeric_cols = tuple( 63 | [col for col, _ in params_tuple if col not in default_cols] 64 | ) 65 | 66 | # Use custom data class if input 67 | if data_class: 68 | 69 | class CustomData(data_class): 70 | """ 71 | Data feed that includes all the columns in the input dataframe 72 | """ 73 | 74 | # Need to make sure that the new lines don't overlap w/ the default lines already in PandasData 75 | lines = non_default_numeric_cols 76 | 77 | # automatically handle parameter with -1 78 | # add the parameter to the parameters inherited from the base class 79 | params = params_tuple + (("symbol", symbol),) 80 | 81 | else: 82 | 83 | class CustomData(bt.feeds.PandasData): 84 | """ 85 | Data feed that includes all the columns in the input dataframe 86 | """ 87 | 88 | # Need to make sure that the new lines don't overlap w/ the default lines already in PandasData 89 | lines = non_default_numeric_cols 90 | 91 | # automatically handle parameter with -1 92 | # add the parameter to the parameters inherited from the base class 93 | params = params_tuple + (("symbol", symbol),) 94 | 95 | data_format_dict = tuple_to_dict(params_tuple) 96 | 97 | pd_data = CustomData( 98 | dataname=data, symbol=symbol, **data_format_dict, **data_kwargs 99 | ) 100 | 101 | return pd_data, data, data_format_dict 102 | 103 | 104 | def include_sentiment_score(data, sentiments): 105 | 106 | # initialize series for sentiments 107 | senti_series = pd.Series(sentiments, name="sentiment_score", dtype=float) 108 | 109 | # join and reset the index for dt to become the first column 110 | data = data.merge(senti_series, left_index=True, right_index=True, how="left") 111 | data = data.reset_index() 112 | 113 | return data 114 | 115 | 116 | def tuple_to_dict(tup): 117 | di = dict(tup) 118 | return di 119 | -------------------------------------------------------------------------------- /python/fastquant/backtest/post_backtest.py: -------------------------------------------------------------------------------- 1 | from fastquant.strategies.buy_and_hold import BuyAndHoldStrategy 2 | import pandas as pd 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | import backtrader as bt 6 | 7 | from fastquant.backtest.backtest_indicators import get_indicators_as_dict 8 | from fastquant.config import GLOBAL_PARAMS 9 | 10 | """ 11 | Post backtest functionalities 12 | - Retrieval of hisotry of orders, indicators and perodic logs 13 | - Analysis of each strategy 14 | - Plotting 15 | 16 | """ 17 | 18 | 19 | def analyze_strategies( 20 | init_cash, 21 | stratruns, 22 | data, 23 | strat_names, 24 | strategy, 25 | strats, 26 | sort_by, 27 | return_history, 28 | verbose, 29 | multi_line_indicators=None, 30 | **kwargs 31 | ): 32 | params = [] 33 | metrics = [] 34 | if verbose > 0: 35 | print("==================================================") 36 | print("Number of strat runs:", len(stratruns)) 37 | print("Number of strats per run:", len(stratruns[0])) 38 | print("Strat names:", strat_names) 39 | 40 | order_history_dfs = [] 41 | periodic_history_dfs = [] 42 | indicator_history_dfs = [] 43 | for strat_idx, stratrun in enumerate(stratruns): 44 | strats_params = {} 45 | 46 | if verbose > 0: 47 | print("**************************************************") 48 | 49 | for i, strat in enumerate(stratrun): 50 | # Get indicator history 51 | st_dtime = [ 52 | bt.utils.date.num2date(num) for num in strat.lines.datetime.plot() 53 | ] 54 | indicators_dict = get_indicators_as_dict(strat, multi_line_indicators) 55 | indicators_df = pd.DataFrame(indicators_dict) 56 | indicators_df.insert(0, "dt", st_dtime) 57 | 58 | strat_name = strat_names[i] 59 | p_raw = strat.p._getkwargs() 60 | p, selected_p = {}, {} 61 | for k, v in p_raw.items(): 62 | if k not in [ 63 | "strategy_logging", 64 | "periodic_logging", 65 | "transaction_logging", 66 | ]: 67 | # Make sure the parameters are mapped to the corresponding strategy 68 | if strategy == "multi": 69 | key = ( 70 | "{}.{}".format(strat_name, k) 71 | if k not in GLOBAL_PARAMS 72 | else k 73 | ) 74 | # make key with format: e.g. smac.slow_period40_fast_period10 75 | if k in strats[strat_name]: 76 | selected_p[k] = v 77 | pkeys = "_".join( 78 | ["{}{}".format(*i) for i in selected_p.items()] 79 | ) 80 | history_key = "{}.{}".format(strat_name, pkeys) 81 | else: 82 | key = k 83 | 84 | # make key with format: e.g. slow_period40_fast_period10 85 | if k in kwargs.keys(): 86 | selected_p[k] = v 87 | history_key = "_".join( 88 | ["{}{}".format(*i) for i in selected_p.items()] 89 | ) 90 | p[key] = v 91 | 92 | strats_params = {**strats_params, **p} 93 | 94 | if return_history: 95 | # columns are decided in log method of BaseStrategy class in base.py 96 | order_history_df = strat.order_history_df 97 | order_history_df["dt"] = pd.to_datetime(order_history_df.dt) 98 | # combine rows with identical index 99 | # history_df = order_history_df.set_index('dt').dropna(how='all') 100 | # history_dfs[history_key] = order_history_df.stack().unstack().astype(float) 101 | order_history_df.insert(0, "strat_name", history_key) 102 | order_history_df.insert(0, "strat_id", strat_idx) 103 | order_history_dfs.append(order_history_df) 104 | 105 | periodic_history_df = strat.periodic_history_df 106 | periodic_history_df["dt"] = pd.to_datetime(periodic_history_df.dt) 107 | periodic_history_df.insert(0, "strat_name", history_key) 108 | periodic_history_df.insert(0, "strat_id", strat_idx) 109 | periodic_history_df[ 110 | "return" 111 | ] = periodic_history_df.portfolio_value.pct_change() 112 | periodic_history_dfs.append(periodic_history_df) 113 | 114 | indicators_df.insert(0, "strat_name", history_key) 115 | indicators_df.insert(0, "strat_id", strat_idx) 116 | indicator_history_dfs.append(indicators_df) 117 | 118 | # We run metrics on the last strat since all the metrics will be the same for all strats 119 | returns = strat.analyzers.returns.get_analysis() 120 | sharpe = strat.analyzers.mysharpe.get_analysis() 121 | drawdown = strat.analyzers.drawdown.get_analysis() 122 | timedraw = strat.analyzers.timedraw.get_analysis() 123 | tradeanalyzer = strat.analyzers.tradeanalyzer.get_analysis() 124 | 125 | # Combine dicts for returns and sharpe 126 | m = { 127 | **returns, 128 | **drawdown, 129 | **timedraw, 130 | **sharpe, 131 | "pnl": strat.pnl, 132 | "final_value": strat.final_value, 133 | } 134 | 135 | if "total" in tradeanalyzer.keys(): 136 | total = tradeanalyzer["total"]["total"] 137 | else: 138 | total = np.nan 139 | 140 | if "won" in tradeanalyzer.keys(): 141 | win_rate = tradeanalyzer["won"]["total"] / tradeanalyzer["total"]["total"] 142 | won = tradeanalyzer["won"]["total"] 143 | won_avg = tradeanalyzer["won"]["pnl"]["average"] 144 | won_avg_prcnt = tradeanalyzer["won"]["pnl"]["average"] / init_cash * 100 145 | won_max = tradeanalyzer["won"]["pnl"]["max"] 146 | won_max_prcnt = tradeanalyzer["won"]["pnl"]["max"] / init_cash * 100 147 | else: 148 | win_rate = np.nan 149 | won = np.nan 150 | won_avg = np.nan 151 | won_avg_prcnt = np.nan 152 | won_max = np.nan 153 | won_max_prcnt = np.nan 154 | 155 | if "lost" in tradeanalyzer.keys(): 156 | lost = tradeanalyzer["lost"]["total"] 157 | lost_avg = tradeanalyzer["lost"]["pnl"]["average"] 158 | lost_avg_prcnt = tradeanalyzer["lost"]["pnl"]["average"] / init_cash * 100 159 | lost_max = tradeanalyzer["lost"]["pnl"]["max"] 160 | lost_max_prcnt = tradeanalyzer["lost"]["pnl"]["max"] / init_cash * 100 161 | else: 162 | lost = np.nan 163 | lost_avg = np.nan 164 | lost_avg_prcnt = np.nan 165 | lost_max = np.nan 166 | lost_max_prcnt = np.nan 167 | 168 | m2 = { 169 | "total": total, 170 | "win_rate": win_rate, 171 | "won": won, 172 | "lost": lost, 173 | "won_avg": won_avg, 174 | "won_avg_prcnt": won_avg_prcnt, 175 | "lost_avg": lost_avg, 176 | "lost_avg_prcnt": lost_avg_prcnt, 177 | "won_max": won_max, 178 | "won_max_prcnt": won_max_prcnt, 179 | "lost_max": lost_max, 180 | "lost_max_prcnt": lost_max_prcnt, 181 | } 182 | 183 | m = {**m, **m2} 184 | 185 | params.append(strats_params) 186 | metrics.append(m) 187 | if verbose > 0: 188 | print("--------------------------------------------------") 189 | print_dict(strats_params, "Strategy Parameters") 190 | print_dict(returns, "Returns") 191 | print_dict(sharpe, "Sharpe") 192 | print_dict(drawdown, "Drawdown") 193 | print_dict(timedraw, "Timedraw") 194 | 195 | params_df = pd.DataFrame(params) 196 | # Set the index as a separate strat id column, so that we retain the information after sorting 197 | strat_ids = pd.DataFrame({"strat_id": params_df.index.values}) 198 | metrics_df = pd.DataFrame(metrics) 199 | 200 | # Find optimal parameters 201 | sorted_combined_df, optim_params = sort_metrics_params_and_strats( 202 | metrics_df, params_df, strat_ids, sort_by, verbose 203 | ) 204 | 205 | # History dict 206 | if return_history: 207 | order_history = pd.concat(order_history_dfs) 208 | periodic_history = pd.concat(periodic_history_dfs) 209 | indicator_history = pd.concat(indicator_history_dfs) 210 | history_dict = dict( 211 | orders=order_history, 212 | periodic=periodic_history, 213 | indicators=indicator_history, 214 | ) 215 | else: 216 | history_dict = dict() 217 | 218 | return sorted_combined_df, optim_params, history_dict 219 | 220 | 221 | def sort_metrics_params_and_strats(metrics_df, params_df, strat_ids, sort_by, verbose): 222 | 223 | # Get indices based on `sort_by` metric 224 | optim_idxs = np.argsort(metrics_df[sort_by].values)[::-1] 225 | sorted_params_df = params_df.iloc[optim_idxs].reset_index(drop=True) 226 | sorted_metrics_df = metrics_df.iloc[optim_idxs].reset_index(drop=True) 227 | sorted_strat_ids = strat_ids.iloc[optim_idxs].reset_index(drop=True) 228 | sorted_combined_df = pd.concat( 229 | [sorted_strat_ids, sorted_params_df, sorted_metrics_df], axis=1 230 | ) 231 | 232 | optim_params = get_optim_metrics_and_params( 233 | sorted_metrics_df, sorted_params_df, verbose 234 | ) 235 | # drop extra columns #248 236 | if ( 237 | len(set(["channel" "symbol"]).intersection(sorted_combined_df.columns.values)) 238 | == 2 239 | ): 240 | sorted_combined_df.drop(["channel", "symbol"], axis=1, inplace=True) 241 | 242 | return sorted_combined_df, optim_params 243 | 244 | 245 | def get_optim_metrics_and_params(sorted_metrics_df, sorted_params_df, verbose): 246 | # Save optimal parameters as dictionary 247 | optim_params = sorted_params_df.iloc[0].to_dict() 248 | optim_metrics = sorted_metrics_df.iloc[0].to_dict() 249 | 250 | if verbose > 0: 251 | print_dict(optim_params, "Optimal parameters:") 252 | print_dict(optim_metrics, "Optimal metrics:") 253 | 254 | return optim_params 255 | 256 | 257 | def plot_results(cerebro, data_format_dict, figsize=(30, 15), **plot_kwargs): 258 | 259 | try: 260 | # This handles the Colab Plotting, Simple Check if we are in Colab 261 | from google.colab import drive 262 | 263 | iplot = False 264 | except Exception: 265 | iplot = True 266 | 267 | has_volume = ( 268 | data_format_dict["volume"] is not None 269 | if "volume" in data_format_dict.keys() 270 | else False 271 | ) 272 | 273 | # Set matplotlib parameters 274 | plt.style.use("classic") # ggplot is also fine 275 | plt.rcParams["figure.figsize"] = figsize 276 | 277 | fig = cerebro.plot(volume=has_volume, iplot=iplot, **plot_kwargs) 278 | 279 | return fig[0][0] 280 | 281 | 282 | def print_dict(d, title="", format="inline"): 283 | if format is None: 284 | print(title, d) 285 | 286 | if format == "inline": 287 | items = [title] + ["%s:%s" % (key, value) for key, value in d.items()] 288 | print("\t".join(items)) 289 | 290 | if format == "indent": 291 | if title != "": 292 | print(title) 293 | for key, value in d.items(): 294 | print("\t%s:%s" % (key, value)) 295 | -------------------------------------------------------------------------------- /python/fastquant/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pkg_resources import resource_filename 3 | from pathlib import Path 4 | 5 | 6 | # Backtesting arguments 7 | INIT_CASH = 100000 8 | COMMISSION_PER_TRANSACTION = 0 9 | DATA_FILE = resource_filename(__name__, "data/JFC_20180101_20190110_DCV.csv") 10 | 11 | BUY_PROP = 1 12 | SELL_PROP = 1 13 | SHORT_MAX = 1.5 14 | 15 | DEFAULT_PANDAS = ( 16 | ("datetime", None), 17 | ("open", -1), 18 | ("high", -1), 19 | ("low", -1), 20 | ("close", -1), 21 | ("volume", -1), 22 | ("openinterest", -1), 23 | ) 24 | 25 | DATA_FORMAT_MAPPING = { 26 | "cv": { 27 | "datetime": 0, 28 | "open": None, 29 | "high": None, 30 | "low": None, 31 | "close": 1, 32 | "volume": 2, 33 | "openinterest": None, 34 | }, 35 | "c": { 36 | "datetime": 0, 37 | "open": None, 38 | "high": None, 39 | "low": None, 40 | "close": 1, 41 | "volume": None, 42 | "openinterest": None, 43 | }, 44 | } 45 | GLOBAL_PARAMS = ["init_cash", "buy_prop", "sell_prop", "execution_type"] 46 | 47 | # Data Config 48 | 49 | DATA_PATH = resource_filename(__name__, "data") 50 | 51 | if not Path(DATA_PATH).exists(): 52 | os.makedirs(DATA_PATH) 53 | 54 | # CSV file containing all the listed PSE companies 55 | PSE_STOCK_TABLE_FILE = "stock_table.py" 56 | 57 | # Cache file for PSE prices in OHLC format 58 | PSE_CACHE_FILE = "merged_stock_data.zip" 59 | 60 | PSE_TWITTER_ACCOUNTS = [ 61 | "phstockexchange", 62 | "colfinancial", 63 | "firstmetrosec", 64 | "BPItrade", 65 | "Philstocks_", 66 | "itradeph", 67 | "UTradePH", 68 | "wealthsec", 69 | ] 70 | 71 | DATA_FORMAT_COLS = { 72 | "o": "open", 73 | "h": "high", 74 | "l": "low", 75 | "c": "close", 76 | "v": "volume", 77 | "i": "openinterest", 78 | } 79 | 80 | CALENDAR_FORMAT = "%Y-%m-%d" 81 | -------------------------------------------------------------------------------- /python/fastquant/data/JFC_1-1-2020_4-1-2020.csv: -------------------------------------------------------------------------------- 1 | dt,close,volume 2 | 2020-01-02,213.0,377920 3 | 2020-01-03,214.8,1396990 4 | 2020-01-06,213.0,444130 5 | 2020-01-07,217.0,777460 6 | 2020-01-08,210.2,414790 7 | 2020-01-09,207.0,463590 8 | 2020-01-10,206.6,304320 9 | 2020-01-14,212.0,447000 10 | 2020-01-15,206.0,308320 11 | 2020-01-16,201.0,703890 12 | 2020-01-17,213.6,481370 13 | 2020-01-20,212.8,310910 14 | 2020-01-21,209.4,225600 15 | 2020-01-22,209.0,367870 16 | 2020-01-23,211.6,278020 17 | 2020-01-24,217.0,596560 18 | 2020-01-27,213.2,219790 19 | 2020-01-28,205.0,795770 20 | 2020-01-29,203.6,318090 21 | 2020-01-30,200.2,580180 22 | 2020-01-31,191.2,876750 23 | 2020-02-03,196.0,426520 24 | 2020-02-04,195.0,1281730 25 | 2020-02-05,190.6,1369170 26 | 2020-02-06,191.9,1475450 27 | 2020-02-07,192.5,283570 28 | 2020-02-10,190.0,514380 29 | 2020-02-11,192.0,811210 30 | 2020-02-12,192.0,507220 31 | 2020-02-13,195.0,275400 32 | 2020-02-14,185.2,727570 33 | 2020-02-17,188.8,281170 34 | 2020-02-18,185.2,500990 35 | 2020-02-19,184.0,1798180 36 | 2020-02-20,188.0,1087480 37 | 2020-02-21,189.0,1135990 38 | 2020-02-24,185.6,1239120 39 | 2020-02-26,175.1,2328310 40 | 2020-02-27,174.5,652780 41 | 2020-02-28,170.0,1743110 42 | 2020-03-02,167.1,990990 43 | 2020-03-03,168.0,1158580 44 | 2020-03-04,174.6,869400 45 | 2020-03-05,184.0,1144230 46 | 2020-03-06,176.5,637540 47 | 2020-03-09,163.0,1144100 48 | 2020-03-10,158.0,1251780 49 | 2020-03-11,166.3,916340 50 | 2020-03-12,153.0,1198200 51 | 2020-03-13,140.5,2203770 52 | 2020-03-16,121.0,2237330 53 | 2020-03-19,91.1,3310100 54 | 2020-03-20,100.0,1994100 55 | 2020-03-23,95.0,2580240 56 | 2020-03-24,95.0,1597120 57 | 2020-03-25,98.45,4027090 58 | 2020-03-26,102.2,3227240 59 | 2020-03-27,107.0,3592270 60 | 2020-03-30,103.0,801630 61 | 2020-03-31,106.4,1549020 62 | 2020-04-01,103.2,2526960 63 | -------------------------------------------------------------------------------- /python/fastquant/data/JFC_20180101_20190110_DCV.csv: -------------------------------------------------------------------------------- 1 | dt,close,volume,dividend 2 | 2018-01-03,255.4,745780,0 3 | 2018-01-04,255.0,617010,0 4 | 2018-01-05,255.0,946040,0 5 | 2018-01-08,256.0,840630,0 6 | 2018-01-09,255.8,978180,0 7 | 2018-01-10,255.4,447710,0 8 | 2018-01-11,255.0,681780,0 9 | 2018-01-12,255.0,1311930,0 10 | 2018-01-15,263.8,1256670,0 11 | 2018-01-16,277.6,1795160,0 12 | 2018-01-17,277.6,1680700,0 13 | 2018-01-18,279.0,1484360,0 14 | 2018-01-19,280.0,560060,0 15 | 2018-01-22,283.0,1001520,0 16 | 2018-01-23,282.2,1179360,0 17 | 2018-01-24,282.0,714160,0 18 | 2018-01-25,293.0,491640,0 19 | 2018-01-26,292.2,375570,0 20 | 2018-01-29,285.4,542160,0 21 | 2018-01-30,284.2,1125860,0 22 | 2018-01-31,284.8,910980,0 23 | 2018-02-01,287.2,512760,0 24 | 2018-02-02,294.0,943050,0 25 | 2018-02-05,282.0,742600,0 26 | 2018-02-06,289.0,1021600,0 27 | 2018-02-07,293.2,582230,0 28 | 2018-02-08,292.8,871390,0 29 | 2018-02-09,292.0,413260,0 30 | 2018-02-12,283.0,741090,0 31 | 2018-02-13,277.0,763250,0 32 | 2018-02-14,278.0,1491080,0 33 | 2018-02-15,285.0,516060,0 34 | 2018-02-19,285.0,492990,0 35 | 2018-02-20,286.0,397190,0 36 | 2018-02-21,282.0,442680,0 37 | 2018-02-22,282.0,602290,0 38 | 2018-02-23,282.0,1269870,0 39 | 2018-02-26,292.0,839100,0 40 | 2018-02-27,295.0,815230,0 41 | 2018-02-28,298.4,1052820,0 42 | 2018-03-01,297.6,663930,0 43 | 2018-03-02,298.0,735080,0 44 | 2018-03-05,299.6,388840,0 45 | 2018-03-06,298.6,833460,0 46 | 2018-03-07,304.0,853340,0 47 | 2018-03-08,295.0,367970,0 48 | 2018-03-09,292.0,352590,0 49 | 2018-03-12,295.4,303520,0 50 | 2018-03-13,293.0,557650,0 51 | 2018-03-14,288.4,757340,0 52 | 2018-03-15,284.0,1140780,0 53 | 2018-03-16,305.4,2052470,0 54 | 2018-03-19,298.0,531280,0 55 | 2018-03-20,286.0,1028940,0 56 | 2018-03-21,285.0,953100,0 57 | 2018-03-22,286.8,573890,0 58 | 2018-03-23,288.0,864870,0 59 | 2018-03-26,295.0,404650,0 60 | 2018-03-27,294.4,356420,0 61 | 2018-03-28,299.0,759390,0 62 | 2018-04-02,300.0,701450,0 63 | 2018-04-03,296.4,1392970,0 64 | 2018-04-04,285.0,1021530,0 65 | 2018-04-05,280.0,497530,0 66 | 2018-04-06,278.0,1301960,0 67 | 2018-04-10,284.0,1189830,0 68 | 2018-04-11,289.6,1197810,0 69 | 2018-04-12,300.0,268920,0 70 | 2018-04-13,292.2,1000070,0 71 | 2018-04-16,291.6,478570,0 72 | 2018-04-17,288.6,1061840,0 73 | 2018-04-18,294.2,496950,0 74 | 2018-04-19,285.0,1653070,0 75 | 2018-04-20,291.0,662390,0 76 | 2018-04-23,294.0,302310,0 77 | 2018-04-24,285.0,891380,0 78 | 2018-04-25,281.6,1202880,0 79 | 2018-04-26,280.0,1333040,0 80 | 2018-04-27,282.0,275720,0 81 | 2018-04-30,285.0,720250,0 82 | 2018-05-02,283.0,439510,0 83 | 2018-05-03,275.0,919090,0 84 | 2018-05-04,275.4,863750,0 85 | 2018-05-07,279.0,790120,0 86 | 2018-05-08,278.0,1470730,0 87 | 2018-05-09,283.0,646300,0 88 | 2018-05-10,279.8,949240,0 89 | 2018-05-11,285.0,366080,0 90 | 2018-05-15,283.0,1041560,0 91 | 2018-05-16,286.6,495600,0 92 | 2018-05-17,285.0,432100,0 93 | 2018-05-18,285.0,236440,0 94 | 2018-05-21,280.0,355840,0 95 | 2018-05-22,284.0,570700,0 96 | 2018-05-23,287.0,574100,0 97 | 2018-05-24,285.0,376390,0 98 | 2018-05-25,281.2,486670,0 99 | 2018-05-28,280.4,338540,0 100 | 2018-05-29,283.0,317450,0 101 | 2018-05-30,280.0,1104490,0 102 | 2018-05-31,274.0,2835720,0 103 | 2018-06-01,283.8,750260,0 104 | 2018-06-04,280.4,1839120,0 105 | 2018-06-05,280.2,696340,0 106 | 2018-06-06,280.8,664040,0 107 | 2018-06-07,282.6,435860,0 108 | 2018-06-08,284.0,235220,0 109 | 2018-06-11,285.0,487600,0 110 | 2018-06-13,279.0,1075600,0 111 | 2018-06-14,279.2,1393440,0 112 | 2018-06-18,277.2,609580,0 113 | 2018-06-19,284.0,757250,0 114 | 2018-06-20,279.0,726920,0 115 | 2018-06-21,270.0,1867510,0 116 | 2018-06-22,267.0,1497250,0 117 | 2018-06-25,268.4,757810,0 118 | 2018-06-26,270.6,370900,0 119 | 2018-06-27,271.0,529860,0 120 | 2018-06-28,259.6,1761980,0 121 | 2018-06-29,263.0,378480,0 122 | 2018-07-02,260.0,965510,0 123 | 2018-07-03,260.0,925680,0 124 | 2018-07-04,259.8,1220700,0 125 | 2018-07-05,254.0,635340,0 126 | 2018-07-06,251.0,583500,0 127 | 2018-07-09,245.0,736500,0 128 | 2018-07-10,246.0,535530,0 129 | 2018-07-11,247.2,760520,0 130 | 2018-07-12,249.8,1783980,0 131 | 2018-07-13,251.8,611160,0 132 | 2018-07-16,248.8,273670,0 133 | 2018-07-17,252.6,240860,0 134 | 2018-07-18,254.2,142770,0 135 | 2018-07-19,253.0,164630,0 136 | 2018-07-20,250.0,585790,0 137 | 2018-07-23,250.0,257100,0 138 | 2018-07-24,251.0,224900,0 139 | 2018-07-25,251.2,207290,0 140 | 2018-07-26,256.2,714360,0 141 | 2018-07-27,266.0,646770,0 142 | 2018-07-30,266.0,281580,0 143 | 2018-07-31,270.0,997780,0 144 | 2018-08-01,279.4,789140,0 145 | 2018-08-02,276.0,510190,0 146 | 2018-08-03,277.0,442350,0 147 | 2018-08-06,280.0,355370,0 148 | 2018-08-07,270.0,641970,0 149 | 2018-08-08,276.8,316510,0 150 | 2018-08-09,269.0,448860,0 151 | 2018-08-10,267.0,492160,0 152 | 2018-08-13,270.0,242500,0 153 | 2018-08-14,273.4,394990,0 154 | 2018-08-15,274.0,334270,0 155 | 2018-08-16,266.8,635710,0 156 | 2018-08-17,271.2,985800,0 157 | 2018-08-20,273.0,403650,0 158 | 2018-08-22,286.0,1729220,0 159 | 2018-08-23,293.0,2015570,0 160 | 2018-08-24,288.0,582540,0 161 | 2018-08-28,290.2,836640,0 162 | 2018-08-29,287.4,687570,0 163 | 2018-08-30,286.0,545750,0 164 | 2018-08-31,288.0,1025030,0 165 | 2018-09-03,282.0,347880,0 166 | 2018-09-04,286.6,575500,0 167 | 2018-09-05,285.0,477030,0 168 | 2018-09-06,276.0,745500,0 169 | 2018-09-07,281.0,546950,0 170 | 2018-09-10,278.4,799990,0 171 | 2018-09-11,276.0,358670,0 172 | 2018-09-12,269.0,572730,0 173 | 2018-09-13,277.0,550430,0 174 | 2018-09-14,269.0,560990,0 175 | 2018-09-17,275.0,167450,0 176 | 2018-09-18,272.0,692650,0 177 | 2018-09-19,264.0,542930,0 178 | 2018-09-20,271.0,361700,0 179 | 2018-09-21,272.0,694330,0 180 | 2018-09-24,271.0,207870,0 181 | 2018-09-25,268.8,229530,0 182 | 2018-09-26,266.2,132640,0 183 | 2018-09-27,259.0,804170,0 184 | 2018-09-28,257.0,605490,0 185 | 2018-10-01,252.0,394210,0 186 | 2018-10-02,250.0,913210,0 187 | 2018-10-03,252.0,393580,0 188 | 2018-10-04,252.0,359360,0 189 | 2018-10-05,247.0,897790,0 190 | 2018-10-08,243.0,927610,0 191 | 2018-10-09,252.4,427280,0 192 | 2018-10-10,246.0,390150,0 193 | 2018-10-11,254.0,490960,0 194 | 2018-10-12,262.0,440520,0 195 | 2018-10-15,250.0,480350,0 196 | 2018-10-16,254.2,864680,0 197 | 2018-10-17,258.6,379720,0 198 | 2018-10-18,260.0,328170,0 199 | 2018-10-19,265.0,328340,0 200 | 2018-10-22,272.0,699990,0 201 | 2018-10-23,268.0,688370,0 202 | 2018-10-24,264.2,462050,0 203 | 2018-10-25,262.0,469150,0 204 | 2018-10-26,270.0,698210,0 205 | 2018-10-29,270.0,366380,0 206 | 2018-10-30,264.0,823140,0 207 | 2018-10-31,276.0,1466050,0 208 | 2018-11-05,280.0,1688940,0 209 | 2018-11-06,278.6,646700,0 210 | 2018-11-07,280.0,569150,0 211 | 2018-11-08,280.0,815650,0 212 | 2018-11-09,278.0,333550,0 213 | 2018-11-12,274.0,337800,0 214 | 2018-11-13,272.8,899670,0 215 | 2018-11-14,279.0,677510,0 216 | 2018-11-15,280.0,371040,0 217 | 2018-11-16,282.0,514500,0 218 | 2018-11-19,282.0,562330,0 219 | 2018-11-20,279.0,388900,0 220 | 2018-11-21,282.0,437010,0 221 | 2018-11-22,285.0,589260,0 222 | 2018-11-23,283.0,372480,0 223 | 2018-11-26,280.0,316450,0 224 | 2018-11-27,283.0,712330,0 225 | 2018-11-28,280.0,654090,0 226 | 2018-11-29,278.4,2113500,0 227 | 2018-12-03,286.8,1030870,0 228 | 2018-12-04,289.6,1533830,0 229 | 2018-12-05,294.0,735640,0 230 | 2018-12-06,296.0,504510,0 231 | 2018-12-07,295.0,574500,0 232 | 2018-12-10,291.0,264940,0 233 | 2018-12-11,297.0,602960,0 234 | 2018-12-12,294.0,1701840,0 235 | 2018-12-13,294.6,484920,0 236 | 2018-12-14,298.0,1378350,0 237 | 2018-12-17,300.0,1255210,0 238 | 2018-12-18,301.2,1014270,0 239 | 2018-12-19,305.0,1796000,0 240 | 2018-12-20,303.0,659480,0 241 | 2018-12-21,302.4,715510,0 242 | 2018-12-26,292.0,1087620,0 243 | 2018-12-27,295.0,585760,0 244 | 2018-12-28,291.8,425440,0 245 | 2019-01-02,293.0,181410,0 246 | 2019-01-03,292.0,1665440,0 247 | 2019-01-04,309.0,1622480,0 248 | 2019-01-07,323.0,1004160,0 249 | 2019-01-08,321.0,623090,0 250 | 2019-01-09,319.0,1659810,0 251 | 2019-01-10,314.0,1064200,0 252 | -------------------------------------------------------------------------------- /python/fastquant/data/__init__.py: -------------------------------------------------------------------------------- 1 | # Modules available for fastquant.data.* 2 | 3 | from fastquant.data.crypto.crypto import get_crypto_data, CRYPTO_EXCHANGES 4 | 5 | from fastquant.data.stocks.pse import ( 6 | # Gets from yahoo finance 7 | get_yahoo_data, 8 | # Gets listed PSE companies 9 | get_stock_table, 10 | # Combines get_phisix_data and get_pse_data_cache 11 | get_pse_data, 12 | # Gets data from PHISIX 13 | get_phisix_data, 14 | # Gets data from PSE Data Cache 15 | get_pse_data_cache, 16 | pse_data_to_csv, 17 | ) 18 | 19 | # Combines get_pse_data and yahoo_data 20 | from fastquant.data.stocks.stocks import get_stock_data 21 | 22 | # Businesstimes news 23 | from fastquant.data.web.businesstimes import get_bt_news_sentiment 24 | 25 | # Twitter 26 | from fastquant.data.web.twitter import tweepy_api 27 | from fastquant.data.web.twitter import get_twitter_sentiment 28 | -------------------------------------------------------------------------------- /python/fastquant/data/bluechips.txt: -------------------------------------------------------------------------------- 1 | DMC DMCI Holdings, Inc. 2 | SCC Semirara Mining and Power Corporation 3 | AGI Alliance Global Group, Inc. 4 | LTG LT Group, Inc. 5 | MPI Metro Pacific Investments Corporation 6 | MEG Megaworld Corporation 7 | FGEN First Gen Corporation 8 | GTCAP GT Capital Holdings, Inc. 9 | MBT Metropolitan Bank and Trust Company 10 | RLC Robinson Land Corporation 11 | PCOR Petron Corporation 12 | SECB Security Bank Corporation 13 | AP Aboitiz Power Corporation 14 | AC Ayala Corporation 15 | RRHI Robinsons Retail Holdings, Inc. 16 | AEV Aboitiz Equity Ventures, Inc. 17 | BPI Bank of the Philippine Islands 18 | TEL PLDT, Inc. 19 | MER Manila Electric Company 20 | BDO BDO Unibank, Inc. 21 | SMC San Miguel Corporation 22 | JFC Jollibee Foods Corporation 23 | GLO Globe Telecom, Inc. 24 | ALI Ayala Land, Inc. 25 | PGOLD Puregold Price Club, Inc. 26 | ICT Int'l Container Terminal Services, Inc. 27 | JGS JG Summit Holdings, Inc. 28 | SMPH SM Prime Holdings, Inc. 29 | SM SM Investments Corporation 30 | URC Universal Robina Corporation 31 | -------------------------------------------------------------------------------- /python/fastquant/data/bt_sentiments_tests.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/fastquant/data/bt_sentiments_tests.pkl -------------------------------------------------------------------------------- /python/fastquant/data/crypto/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/fastquant/data/crypto/__init__.py -------------------------------------------------------------------------------- /python/fastquant/data/crypto/crypto.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | import pandas as pd 3 | import ccxt 4 | 5 | # Only support top 6 listed on https://www.coingecko.com/en/exchanges for now 6 | CRYPTO_EXCHANGES = [ 7 | "binance", 8 | "coinbasepro", 9 | "bithumb", 10 | "kraken", 11 | "kucoin", 12 | "bitstamp", 13 | ] 14 | # to add more just add more method names to the above 15 | # list of supported exchanges according to the classes mentioned here: https://github.com/ccxt/ccxt/tree/master/python/ccxt 16 | 17 | DATETIME_FORMAT = {"daily": "%Y-%m-%d", "intraday": "%Y-%m-%d %H:%M:%S"} 18 | 19 | 20 | def unix_time_millis(date): 21 | # epoch = datetime.utcfromtimestamp(0) 22 | 23 | # value will only have : if the date passed is intraday 24 | dt_format = DATETIME_FORMAT["intraday"] if ":" in date else DATETIME_FORMAT["daily"] 25 | dt = datetime.strptime(date, dt_format) 26 | # return int((dt - epoch).total_seconds() * 1000) 27 | return int(dt.timestamp() * 1000) 28 | 29 | 30 | def get_crypto_data( 31 | ticker, start_date, end_date, time_resolution="1d", exchange="binance" 32 | ): 33 | """ 34 | Get crypto data in OHLCV format 35 | 36 | Parameters 37 | ---------- 38 | ticker : str 39 | List of ticker symbols here: https://coinmarketcap.com/exchanges/binance/ 40 | start_date, end_date : str 41 | date in YYYY-MM-DD format 42 | time_resolution : str 43 | resolutions: '1w', '1d' (default), '1h', '1m' 44 | exchange : str 45 | market exchanges: 'binance' (default), 'coinbasepro', 'bithumb', 'kraken', 'kucoin', 'bitstamp' 46 | """ 47 | dt_format = ( 48 | DATETIME_FORMAT["intraday"] 49 | if "m" in time_resolution or "h" in time_resolution 50 | else DATETIME_FORMAT["daily"] 51 | ) 52 | start_date_epoch = unix_time_millis(start_date) 53 | end_date_epoch = unix_time_millis(end_date) 54 | 55 | if exchange in CRYPTO_EXCHANGES: 56 | # Get the exchange we want to use from ccxt's exchange attributes 57 | ex = getattr(ccxt, exchange)({"verbose": False}) 58 | 59 | # We're going to get data in batches, so want to know what the last record in the previous batch was 60 | previous_request_end_date_epoch = start_date_epoch 61 | 62 | # The time we want data from will shift each batch as well 63 | request_start_date_epoch = start_date_epoch 64 | 65 | # Variable to store our dataframe as we fill it out 66 | ohlcv_df = None 67 | 68 | while previous_request_end_date_epoch < end_date_epoch: 69 | # Pull the data from the exchange 70 | ohlcv_lol = ex.fetch_ohlcv( 71 | ticker, time_resolution, since=request_start_date_epoch 72 | ) 73 | # Convert it to a dataframe 74 | current_request_df = pd.DataFrame( 75 | ohlcv_lol, 76 | columns=["dt", "open", "high", "low", "close", "volume"], 77 | ) 78 | 79 | if current_request_df.size == 0: 80 | # If we got no results (which happens sometimes, like on binance for ETH/BTC when requesting 2018-02-08) 81 | # then step forward to the next day 82 | request_start_date_epoch += ( 83 | int(timedelta(days=1).total_seconds()) * 1000 84 | ) 85 | # Make sure we're at the start of that day 86 | request_start_date_epoch = unix_time_millis( 87 | pd.to_datetime(request_start_date_epoch, unit="ms").strftime( 88 | dt_format 89 | ) 90 | ) 91 | previous_request_end_date_epoch = request_start_date_epoch - 1 92 | continue 93 | 94 | if ohlcv_df is None: 95 | # We don't have a dataframe yet, so start with this 96 | ohlcv_df = current_request_df 97 | else: 98 | # Trim any overlap with the new results 99 | ohlcv_df = ohlcv_df[ohlcv_df.dt < current_request_df.dt.min()] 100 | # Append the results to what we have so far 101 | # ohlcv_df = ohlcv_df.append(current_request_df) 102 | ohlcv_df = pd.concat([ohlcv_df, current_request_df], ignore_index=True) 103 | 104 | # Get the last entry timestamp after we've retrieved (or attempted to) additional records 105 | current_request_end_date_epoch = int(ohlcv_df.dt.max()) 106 | 107 | if current_request_end_date_epoch <= previous_request_end_date_epoch: 108 | # We haven't gained any additional records, so there's no point in further requests 109 | # Let's mark this for the data end date, mostly so both end_date and end_date_epoch will be 110 | # in sync in case someone in future uses them in code futher down and to ensure the loop bails 111 | end_date_epoch = current_request_end_date_epoch 112 | # Update the actual end date so that the stored value will reflect the actual end 113 | end_date = pd.to_datetime(end_date_epoch, unit="ms") 114 | # The loop would exit based on the end_date_epoch value, but we'll save that check occuring 115 | break 116 | else: 117 | # We've gained some more records, so let's place another request (unless we're past the end, but our loop will catch that without checking here) 118 | # The next request should start a millisecond after this one ended 119 | request_start_date_epoch = current_request_end_date_epoch + 1 120 | # This request's end date should now be set as current for the next loop 121 | previous_request_end_date_epoch = current_request_end_date_epoch 122 | 123 | if ohlcv_df is not None: 124 | # Convert the unix timestampe to datetime 125 | ohlcv_df["dt"] = pd.to_datetime(ohlcv_df["dt"], unit="ms") 126 | # Trim off any records which were returned beyond the end 127 | ohlcv_df = ohlcv_df[ohlcv_df.dt <= end_date] 128 | # Save input parameters into dataframe 129 | ohlcv_df.start_date = start_date 130 | ohlcv_df.end_date = end_date 131 | ohlcv_df.symbol = ticker 132 | ohlcv_df = ohlcv_df.set_index("dt") 133 | 134 | return ohlcv_df 135 | else: 136 | raise NotImplementedError( 137 | "The exchange " + exchange + " is not yet supported. Available exchanges: " 138 | ", ".join(CRYPTO_EXCHANGES) 139 | ) 140 | -------------------------------------------------------------------------------- /python/fastquant/data/merged_stock_data.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/fastquant/data/merged_stock_data.zip -------------------------------------------------------------------------------- /python/fastquant/data/network.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/fastquant/data/network.jpg -------------------------------------------------------------------------------- /python/fastquant/data/senti_disclosures.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/fastquant/data/senti_disclosures.pkl -------------------------------------------------------------------------------- /python/fastquant/data/stocklist.txt: -------------------------------------------------------------------------------- 1 | 2GO 2GO Group, Inc. 2 | AAA Asia Amalgamated Holdings Corp. 3 | AB Atok-Big Wedge Company, Inc. 4 | ABA AbaCore Capital Holdings, Inc. 5 | ABG Asiabest Group International, Inc. 6 | ABS ABS-CBN Corporation 7 | ABSP ABS-CBN Holdings Corporation "PDR" 8 | AC Ayala Corporation 9 | ACE Acesite (Phils.) Hotel Corp. 10 | ACEPH AC Energy Philippines, Inc. 11 | ACEX ACE Enexor, Inc. 12 | ACPA Ayala Corporation Preferred Class "A" 13 | ACR Alsons Consolidated Resources, Inc. 14 | AEV Aboitiz Equity Ventures, Inc. 15 | AGI Alliance Global Group, Inc. 16 | ALCO Arthaland Corporation 17 | ALCPB Arthaland Corp. Perpetual Pref "B" 18 | ALHI Anchor Land Holdings, Inc. 19 | ALI Ayala Land, Inc. 20 | ALLHC AyalaLand Logistics Holdings Corp. 21 | ANI AgriNurture, Inc. 22 | ANS A. Soriano Corporation 23 | AP Aboitiz Power Corporation 24 | APC APC Group, Inc. 25 | APL Apollo Global Capital, Inc. 26 | APO Anglo-Philippine Holdings Corporation 27 | APX Apex Mining Company, Inc. 28 | AR Abra Mining and Industrial Corp. 29 | ARA Araneta Properties, Inc. 30 | AT Atlas Cons. Mining and Dev't Corp. 31 | ATI Asian Terminals, Inc. 32 | ATN ATN Holdings, Inc. "A" 33 | ATNB ATN Holdings, Inc. "B" 34 | AUB Asia United Bank Corporation 35 | AXLM Axelum Resources Corp. 36 | BC Benguet Corporation "A" 37 | BCB Benguet Corporation "B" 38 | BCOR Berjaya Philippines, Inc. 39 | BCP Benguet Corporation Conv. Pref. "A" 40 | BDO BDO Unibank, Inc. 41 | BEL Belle Corporation 42 | BH BHI Holdings, Inc. 43 | BHI Boulevard Holdings, Inc. 44 | BKR Bright Kindle Res. and Investments, Inc. 45 | BLFI BDO Leasing and Finance, Inc. 46 | BLOOM Bloomberry Resorts Corporation 47 | BMM Bogo Medellin Milling Company 48 | BPI Bank of the Philippine Islands 49 | BRN A Brown Company, Inc. 50 | BSC Basic Energy Corporation 51 | C Chelsea Logistics and Infra Hdgs Corp. 52 | CA Concrete Aggregates Corp. "A" 53 | CAB Concrete Aggregates Corp. "B" 54 | CAT Central Azucarera De Tarlac 55 | CDC Cityland Development Corporation 56 | CEB Cebu Air, Inc. 57 | CEI Crown Equities, Inc. 58 | CEU Centro Escolar University 59 | CHI Cebu Holdings, Inc. 60 | CHIB China Banking Corporation 61 | CHP Cemex Holdings Philippines, Inc. 62 | CIC Concepcion Industrial Corporation 63 | CIP Chemical Industries of the Phils. 64 | CLI Cebu Landmasters, Inc. 65 | CNPF Century Pacific Food, Inc. 66 | COAL Coal Asia Holdings, Inc. 67 | COL COL Financial Group, Inc. 68 | COSCO Cosco Capital, Inc. 69 | CPG Century Properties Group, Inc. 70 | CPM Century Peak Holdings Corporation 71 | CPV Cebu Property Venture and Dev't Corp."A" 72 | CPVB Cebu Property Venture and Dev't Corp."B" 73 | CROWN Crown Asia Chemicals Corporation 74 | CSB Citystate Savings Bank 75 | CYBR Cyber Bay Corporation 76 | DAVIN Da Vinci Capital Holdings, Inc. 77 | DD DoubleDragon Properties Corp. 78 | DDPR DoubleDragon Properties Corp. Pref. 79 | DELM Del Monte Pacific Limited 80 | DFNN DFNN, Inc. 81 | DITO DITO CME Holdings Corp. 82 | DIZ Dizon Copper Silver Mines, Inc. 83 | DMC DMCI Holdings, Inc. 84 | DMW D.M. Wenceslao and Associates, Inc. 85 | DNA Philab Holdings Corp. 86 | DNL D and L Industries, Inc. 87 | DWC Discovery World Corporation 88 | EAGLE Eagle Cement Corporation 89 | ECP Easycall Comm. Phils., Inc. 90 | EEI EEI Corporation 91 | EG IP E-Game Ventures Inc. 92 | ELI Empire East Land Holdings, Inc. 93 | EMP Emperador Inc. 94 | EURO Euro-Med Laboratories Phil., Inc. 95 | EVER Ever Gotesco Resources and Hold'gs, Inc. 96 | EW East West Banking Corporation 97 | FAF First Abacus Financial Holdings Corp. 98 | FB San Miguel Food and Beverage, Inc. 99 | FBP San Miguel Food and Beverage Pref 100 | FBP2 San Miguel Food and Beverage Perp Pref 2 101 | FDC Filinvest Development Corporation 102 | FERRO Ferronoux Holdings, Inc. 103 | FEU Far Eastern University, Inc. 104 | FFI Filipino Fund, Inc. 105 | FGEN First Gen Corporation 106 | FGENG First Gen Corporation - Series G Pref. 107 | FJP F and J Prince Holdings Corp. "A" 108 | FJPB F and J Prince Holdings Corp. "B" 109 | FLI Filinvest Land, Inc. 110 | FMETF First Metro Philippine Equity ETF 111 | FNI Global Ferronickel Holdings, Inc. 112 | FOOD Alliance Select Foods Int'l, Inc. 113 | FPH First Phil. Holdings Corp. 114 | FPI Forum Pacific, Inc. 115 | FRUIT Fruitas Holdings, Inc. 116 | GEO GEOGRACE Resources Philippines, Inc. 117 | GERI Global-Estate Resorts, Inc. 118 | GLO Globe Telecom, Inc. 119 | GLOPA Globe Telecom, Inc - Preferred "A" 120 | GLOPP Globe Telecom, Inc. - Perpetual Pref. 121 | GMA7 GMA Network, Inc. 122 | GMAP GMA Holdings, Inc. "PDR" 123 | GPH Grand Plaza Hotel Corporation 124 | GREEN Greenergy Holdings, Inc. 125 | GSMI Ginebra San Miguel Inc. 126 | GTCAP GT Capital Holdings, Inc. 127 | GTPPA GT Capital Non-Voting Perpetual Pref "A" 128 | GTPPB GT Capital Non-Voting Perpetual Pref "B" 129 | HI House of Investments, Inc. 130 | HLCM Holcim Philippines, Inc. 131 | HOME AllHome Corp. 132 | HOUSE 8990 Holdings, Inc. 133 | HVN Golden Bria Holdings, Inc. 134 | I I-Remit, Inc. 135 | ICT Int'l Container Terminal Services, Inc. 136 | IDC Italpinas Development Corporation 137 | IMI Integrated Micro-Electronics, Inc. 138 | IMP Imperial Resources, Inc. 139 | ION Ionics, Inc. 140 | IPM IPM Holdings, Inc. 141 | IPO iPeople, Inc. 142 | IRC Philippine Infradev Holdings Inc. 143 | IS Island Information and Technology, Inc. 144 | JAS Jackstones, Inc. 145 | JFC Jollibee Foods Corporation 146 | JGS JG Summit Holdings, Inc. 147 | JOH Jolliville Holdings Corporation 148 | KEP Keppel Philippines Properties, Inc. 149 | KPH Keppel Philippines Holdings, Inc. "A" 150 | KPHB Keppel Philippines Holdings, Inc. "B" 151 | KPPI Kepwealth Property Phils., Inc. 152 | LAND City and Land Developers, Inc. 153 | LBC LBC Express Holdings, Inc. 154 | LC Lepanto Consolidated Mining Co. "A" 155 | LCB Lepanto Consolidated Mining Co. "B" 156 | LFM Liberty Flour Mills, Inc. 157 | LIHC Lodestar Investment Hldgs Corp. 158 | LMG LMG Chemicals Corporation 159 | LOTO Pacific Online Systems Corporation 160 | LPZ Lopez Holdings Corporation 161 | LR Leisure and Resorts World Corporation 162 | LRP Leisure and Resorts World Corp.-Pref 163 | LRW Leisure and Resorts World Corp.-Warrants 164 | LSC Lorenzo Shipping Corporation 165 | LTG LT Group, Inc. 166 | MA Manila Mining Corporation "A" 167 | MAB Manila Mining Corporation "B" 168 | MAC Macroasia Corporation 169 | MACAY Macay Holdings, Inc. 170 | MAH Metro Alliance Hold'gs and Eqts. "A" 171 | MAHB Metro Alliance Hold'gs and Eqts. "B" 172 | MARC Marcventures Holdings, Inc. 173 | MAXS Max's Group, Inc. 174 | MB Manila Bulletin Publishing Corp. 175 | MBC Manila Broadcasting Company 176 | MBT Metropolitan Bank and Trust Company 177 | MED MEDCO Holdings, Inc. 178 | MEG Megaworld Corporation 179 | MER Manila Electric Company 180 | MFC Manulife Financial Corporation 181 | MFIN Makati Finance Corporation 182 | MG Millenium Global Holdings, Inc. 183 | MHC Mabuhay Holdings Corporation 184 | MJC Manila Jockey Club, Inc. 185 | MJIC MJC Investments Corporation 186 | MPI Metro Pacific Investments Corporation 187 | MRC MRC Allied, Inc. 188 | MRSGI Metro Retail Stores Group, Inc. 189 | MVC Mabuhay Vinyl Corporation 190 | MWC Manila Water Company, Inc. 191 | MWIDE Megawide Construction Corporation 192 | MWP Megawide Const. Corp. - Perpetual Pref. 193 | NI NiHAO Minerals Resources Int'l, Inc. 194 | NIKL Nickel Asia Corporation 195 | NOW Now Corporation 196 | NRCP National Reinsurance Corp. of the Phils. 197 | OM Omico Corporation 198 | OPM Oriental Pet. and Minerals Corp. "A" 199 | OPMB Oriental Pet. and Minerals Corp. "B" 200 | ORE Oriental Peninsula Resources Group, Inc. 201 | OV The Philodrill Corporation 202 | PA Pacifica Holdings, Inc. 203 | PAL PAL Holdings Inc. 204 | PAX Paxys, Inc. 205 | PBB Philippine Business Bank 206 | PBC Philippine Bank of Communications 207 | PCOR Petron Corporation 208 | PERC Petroenergy Resources Corporation 209 | PGOLD Puregold Price Club, Inc. 210 | PHA Premiere Horizon Alliance Corporation 211 | PHES Phil. Estates Corporation 212 | PHN Phinma Corporation 213 | PHR PH Resorts Group Holdings, Inc. 214 | PIP Pepsi-Cola Products Philippines, Inc. 215 | PIZZA Shakey's Pizza Asia Ventures, Inc. 216 | PLC Premium Leisure Corp. 217 | PMPC Panasonic Manufacturing Phils. Corp. 218 | PNB Philippine National Bank 219 | PNX Phoenix Petroleum Philippines, Inc. 220 | PNX3A Phoenix Petroleum - Non-Voting Pref. 3A 221 | PNX3B Phoenix Petroleum - Non-Voting Pref. 3B 222 | PNXP Phoenix Petroleum - Perpetual Pref. 223 | PORT Globalport 900, Inc. 224 | PPC Pryce Corporation 225 | PRC Philippine Racing Club, Inc. 226 | PRF2A Petron Corp. - Perpetual Pref. Series 2A 227 | PRF2B Petron Corp. - Perpetual Pref. Series 2B 228 | PRF3B Petron Corp. - Perpetual Pref. Series 3B 229 | PRIM Prime Media Holdings, Inc. 230 | PRMX Primex Corporation 231 | PSB Philippine Savings Bank 232 | PSE The Philippine Stock Exchange, Inc 233 | PTC Philippine Trust Company 234 | PX Philex Mining Corporation 235 | PXP PXP Energy Corporation 236 | RCB Rizal Commercial Banking Corp. 237 | RCI Roxas and Company, Inc. 238 | REG Republic Glass Holdings Corp. 239 | RFM RFM Corporation 240 | RLC Robinson Land Corporation 241 | RLT Philippine Realty and Holdings Corp. 242 | ROCK Rockwell Land Corporation 243 | ROX Roxas Holdings, Inc. 244 | RRHI Robinsons Retail Holdings, Inc. 245 | SBS SBS Philippines Corporation 246 | SCC Semirara Mining and Power Corporation 247 | SECB Security Bank Corporation 248 | SEVN Philippine Seven Corporation 249 | SFI Swift Foods, Inc. 250 | SFIP Swift Foods, Inc. Conv. Pref. 251 | SGI Solid Group, Inc. 252 | SGP Synergy Grid and Dev't Phils., Inc. 253 | SHLPH Pilipinas Shell Petroleum Corporation 254 | SHNG Shang Properties, Inc. 255 | SLF Sun Life Financial, Inc. 256 | SLI Sta. Lucia Land, Inc. 257 | SM SM Investments Corporation 258 | SMC San Miguel Corporation 259 | SMC2A San Miguel Corporation Series 2-A Pref. 260 | SMC2B San Miguel Corporation Series 2-B Pref. 261 | SMC2C San Miguel Corporation Series 2-C Pref. 262 | SMC2D San Miguel Corporation Series 2-D Pref. 263 | SMC2E San Miguel Corporation Series 2-E Pref. 264 | SMC2F San Miguel Corporation Series 2-F Pref. 265 | SMC2G San Miguel Corporation Series 2-G Pref. 266 | SMC2H San Miguel Corporation Series 2-H Pref. 267 | SMC2I San Miguel Corporation Series 2-I Pref. 268 | SMPH SM Prime Holdings, Inc. 269 | SOC SOCResources, Inc. 270 | SPC SPC Power Corporation 271 | SPM Seafront Resources Corporation 272 | SRDC Supercity Realty Development Corporation 273 | SSI SSI Group, Inc. 274 | SSP SFA Semicon Philippines Corporation 275 | STI STI Education Systems Holdings, Inc. 276 | STR Vistamalls, Inc. 277 | SUN Suntrust Home Developers, Inc. 278 | T TKC Metals Corporation 279 | TBGI Transpacific Broadband Group Int'l. Inc. 280 | TECH Cirtek Holdings Philippines Corporation 281 | TEL PLDT, Inc. 282 | TFC PTFC Redevelopment Corporation 283 | TFHI Top Frontier Investment Holdings, Inc. 284 | TUGS Harbor Star Shipping Services, Inc. 285 | UBP Union Bank of the Philippines 286 | UNI Unioil Resources and Holdings Co., Inc. 287 | UPM United Paragon Mining Corporation 288 | URC Universal Robina Corporation 289 | V Vantage Equities, Inc. 290 | VITA Vitarich Corporation 291 | VLL Vista Land and Lifescapes, Inc. 292 | VMC Victorias Milling Company, Inc. 293 | VUL Vulcan Industrial and Mining Corporation 294 | VVT Vivant Corporation 295 | WEB Philweb Corporation 296 | WIN Wellex Industries, Inc. 297 | WLCON Wilcon Depot, Inc. 298 | WPI Waterfront Philippines, Inc. 299 | X Xurpas Inc. 300 | ZHI Zeus Holdings, Inc. 301 | -------------------------------------------------------------------------------- /python/fastquant/data/stocks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/fastquant/data/stocks/__init__.py -------------------------------------------------------------------------------- /python/fastquant/data/stocks/phisix.py: -------------------------------------------------------------------------------- 1 | import os 2 | import requests 3 | from datetime import datetime, timedelta 4 | import time 5 | from pathlib import Path 6 | from pkg_resources import resource_filename 7 | 8 | # Import modules 9 | import pandas as pd 10 | import numpy as np 11 | import lxml.html as LH 12 | from tqdm import tqdm 13 | import tweepy 14 | import yfinance as yf 15 | from nltk.sentiment.vader import SentimentIntensityAnalyzer 16 | import nltk 17 | from urllib.request import urlopen 18 | from bs4 import BeautifulSoup 19 | 20 | # Import package modules 21 | from fastquant.config import ( 22 | DATA_PATH, 23 | PSE_TWITTER_ACCOUNTS, 24 | DATA_FORMAT_COLS, 25 | CALENDAR_FORMAT, 26 | PSE_STOCK_TABLE_FILE, 27 | PSE_CACHE_FILE, 28 | ) 29 | 30 | 31 | def process_phisix_date_dict(phisix_dict): 32 | date = datetime.strftime( 33 | pd.to_datetime(phisix_dict["as_of"]).date(), CALENDAR_FORMAT 34 | ) 35 | stock_dict = phisix_dict["stock"][0] 36 | stock_price_dict = stock_dict["price"] 37 | name = stock_dict["name"] 38 | currency = stock_price_dict["currency"] 39 | closing_price = stock_price_dict["amount"] 40 | percent_change = stock_dict["percent_change"] 41 | volume = stock_dict["volume"] 42 | symbol = stock_dict["symbol"] 43 | return { 44 | "dt": date, 45 | "name": name, 46 | "currency": currency, 47 | "close": closing_price, 48 | "percent_change": percent_change, 49 | "volume": volume, 50 | "symbol": symbol, 51 | } 52 | 53 | 54 | def get_phisix_data_by_date(symbol, date): 55 | """ 56 | Requests data in json format from phisix API 57 | 58 | Note: new API endpoint is now used, with fallback to old API 59 | """ 60 | 61 | new_endpoint = "http://1.phisix-api.appspot.com/stocks/" 62 | url = new_endpoint + "{}.{}.json".format(symbol, date) 63 | res = requests.get(url) 64 | if res.ok: 65 | unprocessed_dict = res.json() 66 | processed_dict = process_phisix_date_dict(unprocessed_dict) 67 | return processed_dict 68 | else: 69 | # fallback to old endpoint 70 | old_endpoint = "http://phisix-api2.appspot.com/stocks/" 71 | url = old_endpoint + "{}.{}.json".format(symbol, date) 72 | res = requests.get(url) 73 | if res.ok: 74 | unprocessed_dict = res.json() 75 | processed_dict = process_phisix_date_dict(unprocessed_dict) 76 | return processed_dict 77 | else: 78 | if res.status_code == 500: 79 | # server error 80 | res.raise_for_status() 81 | else: 82 | # non-trading day 83 | return None 84 | 85 | 86 | def get_phisix_data(symbol, start_date, end_date, save=False, max_straight_nones=10): 87 | """Returns pricing data for a PHISIX stock symbol. 88 | 89 | Parameters 90 | ---------- 91 | symbol : str 92 | Symbol of the stock in the PSE. You can refer to this link: https://www.pesobility.com/stock. 93 | start_date : str 94 | Starting date (YYYY-MM-DD) of the period that you want to get data on 95 | end_date : str 96 | Ending date (YYYY-MM-DD) of the period you want to get data on 97 | 98 | Returns 99 | ------- 100 | pandas.DataFrame 101 | Stock data (in CV format) for the specified company and date range 102 | """ 103 | date_range = ( 104 | pd.period_range(start_date, end_date, freq="D").to_series().astype(str).values 105 | ) 106 | 107 | max_straight_nones = min(max_straight_nones, len(date_range)) 108 | pse_data_list = [] 109 | straight_none_count = 0 110 | for i, date in tqdm(enumerate(date_range)): 111 | iter_num = i + 1 112 | pse_data_1day = get_phisix_data_by_date(symbol, date) 113 | 114 | # Return None if the first `max_straight_nones` phisix iterations return Nones (status_code != 200) 115 | if pse_data_1day is None: 116 | if iter_num < max_straight_nones: 117 | straight_none_count += 1 118 | else: 119 | straight_none_count += 1 120 | if straight_none_count >= max_straight_nones: 121 | print( 122 | "{} not found in phisix after the first {} date iterations!".format( 123 | symbol, straight_none_count 124 | ) 125 | ) 126 | return None 127 | continue 128 | else: 129 | # Refresh straight none count when phisix returns 130 | straight_none_count = 0 131 | pse_data_list.append(pse_data_1day) 132 | pse_data_df = pd.DataFrame(pse_data_list) 133 | pse_data_df = pse_data_df[["dt", "close", "volume"]] 134 | return pse_data_df 135 | -------------------------------------------------------------------------------- /python/fastquant/data/stocks/pse.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Tue Jun 25 19:48:03 2019 5 | 6 | @authors: enzoampil & jpdeleon 7 | """ 8 | # Import standard library 9 | import os 10 | import requests 11 | from datetime import datetime, timedelta 12 | from pathlib import Path 13 | 14 | # Import modules 15 | import pandas as pd 16 | from pandas import json_normalize 17 | import numpy as np 18 | import lxml.html as LH 19 | from tqdm import tqdm 20 | 21 | # Import package modules 22 | from fastquant.config import ( 23 | DATA_PATH, 24 | PSE_TWITTER_ACCOUNTS, 25 | DATA_FORMAT_COLS, 26 | CALENDAR_FORMAT, 27 | PSE_STOCK_TABLE_FILE, 28 | PSE_CACHE_FILE, 29 | ) 30 | from fastquant.data.stocks.phisix import get_phisix_data 31 | from fastquant.data.stocks.yahoofinance import get_yahoo_data 32 | 33 | 34 | def get_stock_table(stock_table_fp=None): 35 | """ 36 | Returns dataframe containing info about PSE listed stocks while also saving it 37 | """ 38 | if stock_table_fp is None: 39 | stock_table_fp = Path(DATA_PATH, PSE_STOCK_TABLE_FILE) 40 | 41 | stock_table = pd.DataFrame( 42 | columns=[ 43 | "Company Name", 44 | "Stock Symbol", 45 | "Sector", 46 | "Subsector", 47 | "Listing Date", 48 | "company_id", 49 | "security_id", 50 | ] 51 | ) 52 | 53 | data = { 54 | "pageNo": "1", 55 | "companyId": "", 56 | "keyword": "", 57 | "sortType": "", 58 | "dateSortType": "DESC", 59 | "cmpySortType": "ASC", 60 | "symbolSortType": "ASC", 61 | "sector": "ALL", 62 | "subsector": "ALL", 63 | } 64 | 65 | for p in range(1, 7): 66 | print(str(p) + " out of " + str(7 - 1) + " pages", end="\r") 67 | data["pageNo"] = str(p) 68 | r = requests.post( 69 | url="https://edge.pse.com.ph/companyDirectory/search.ax", data=data 70 | ) 71 | table = LH.fromstring(r.text) 72 | page_df = ( 73 | pd.concat( 74 | [ 75 | pd.read_html(r.text)[0], 76 | pd.DataFrame({"attr": table.xpath("//tr/td/a/@onclick")[::2]}), 77 | ], 78 | axis=1, 79 | ) 80 | .assign( 81 | company_id=lambda x: x["attr"].apply( 82 | lambda s: s[s.index("(") + 2 : s.index(",") - 1] 83 | ) 84 | ) 85 | .assign( 86 | security_id=lambda x: x["attr"].apply( 87 | lambda s: s[s.index(",") + 2 : s.index(")") - 1] 88 | ) 89 | ) 90 | .drop(["attr"], axis=1) 91 | ) 92 | 93 | # stock_table = stock_table.append(page_df) 94 | stock_table = pd.concat([stock_table, page_df], ignore_index=True) 95 | stock_table.to_csv(stock_table_fp, index=False) 96 | return stock_table 97 | 98 | 99 | def get_pse_all_stocks(): 100 | """ 101 | Returns dataframe containing all PSE listed stock symbols 102 | """ 103 | 104 | """ Note ID is taken from inkdrop.app """ 105 | res = requests.get("http://phisix-api.appspot.com/stocks.json") 106 | if not res: 107 | return pd.DataFrame() 108 | 109 | df = json_normalize(res.json(), "stock")["symbol"].sort_values() 110 | df.columns = ["symbol"] 111 | return df 112 | 113 | 114 | def get_pse_data_old(symbol, start_date, end_date, stock_table_fp=None, verbose=True): 115 | """Returns pricing data for a specified stock. 116 | 117 | Parameters 118 | ---------- 119 | symbol : str 120 | Symbol of the stock in the PSE. You can refer to this link: https://www.pesobility.com/stock. 121 | start_date : str 122 | Starting date (YYYY-MM-DD) of the period that you want to get data on 123 | end_date : str 124 | Ending date (YYYY-MM-DD) of the period you want to get data on 125 | stock_table_fp : str 126 | File path of an existing stock table or where a newly downloaded table should be saved 127 | 128 | Returns 129 | ------- 130 | pandas.DataFrame 131 | Stock data (in OHLCV format) for the specified company and date range 132 | """ 133 | if stock_table_fp is None: 134 | stock_table_fp = Path(DATA_PATH, "stock_table.csv") 135 | 136 | if stock_table_fp.exists(): 137 | stock_table = pd.read_csv(stock_table_fp) 138 | if verbose: 139 | print("Loaded: ", stock_table_fp) 140 | else: 141 | stock_table = get_stock_table(stock_table_fp=stock_table_fp) 142 | 143 | data = { 144 | "cmpy_id": int( 145 | stock_table["company_id"][stock_table["Stock Symbol"] == symbol].values[0] 146 | ), 147 | "security_id": int( 148 | stock_table["security_id"][stock_table["Stock Symbol"] == symbol].values[0] 149 | ), 150 | "startDate": datetime.strptime(start_date, CALENDAR_FORMAT).strftime( 151 | "%m-%d-%Y" 152 | ), 153 | "endDate": datetime.strptime(end_date, CALENDAR_FORMAT).strftime("%m-%d-%Y"), 154 | } 155 | 156 | r = requests.post(url="https://edge.pse.com.ph/common/DisclosureCht.ax", json=data) 157 | df = pd.DataFrame(r.json()["chartData"]) 158 | rename_dict = { 159 | "CHART_DATE": "dt", 160 | "OPEN": "open", 161 | "HIGH": "high", 162 | "LOW": "low", 163 | "CLOSE": "close", 164 | "VALUE": "value", 165 | } 166 | rename_list = ["dt", "open", "high", "low", "close", "value"] 167 | df = df.rename(columns=rename_dict)[rename_list].drop_duplicates() 168 | df.dt = pd.to_datetime(df.dt) 169 | df = df.set_index("dt") 170 | return df 171 | 172 | 173 | def get_pse_data_cache(symbol=None, cache_fp=None, update=False, verbose=False): 174 | """ 175 | Loads cached historical data 176 | Returns all if symbol is None 177 | """ 178 | if update: 179 | update_pse_data_cache() 180 | if cache_fp is None: 181 | cache_fp = Path(DATA_PATH, PSE_CACHE_FILE) 182 | 183 | if cache_fp.exists(): 184 | df = pd.read_csv(cache_fp, index_col=0, header=[0, 1]) 185 | df.index = pd.to_datetime(df.index) 186 | if verbose: 187 | print("Loaded: ", cache_fp) 188 | return df if symbol is None else df[symbol] if symbol in df.columns else None 189 | else: 190 | errmsg = "Cache does not exist! Try update=True" 191 | print(errmsg) 192 | return None 193 | 194 | 195 | def update_pse_data_cache(start_date="2010-01-01", verbose=True, cache_fp=None): 196 | """ 197 | Downloads DOHLC data of all PSE comapnies using get_pse_old 198 | and saves as .zip in /data to be used as cache 199 | 200 | NOTE: useful to add sector in column 201 | """ 202 | if verbose: 203 | print("Updating cache...") 204 | date_today = datetime.now().date().strftime("%Y-%m-%d") 205 | 206 | names = get_stock_table(stock_table_fp=None) 207 | 208 | data, unavailable = {}, [] 209 | for symbol in tqdm(names["Stock Symbol"].values): 210 | try: 211 | df = get_pse_data_old(symbol, start_date, date_today, verbose=False) 212 | data[symbol] = df 213 | except Exception as e: 214 | unavailable.append(symbol) 215 | print(e) 216 | if verbose: 217 | print("No data:\n", unavailable) 218 | 219 | # concatenate by column after sorting by date 220 | DF = pd.concat(data, axis=1, sort=True) 221 | DF.columns.names = ["Symbol", None] 222 | DF.index.name = "dt" 223 | 224 | # save as csv 225 | if cache_fp is None: 226 | cache_fp = Path(DATA_PATH, PSE_CACHE_FILE) 227 | 228 | DF.to_csv(cache_fp, index=True) 229 | if verbose: 230 | print("Saved: ", cache_fp) 231 | # return DF 232 | 233 | 234 | def get_pse_data( 235 | symbol, 236 | start_date, 237 | end_date, 238 | save=False, 239 | max_straight_nones=10, 240 | format="dohlc", 241 | ): 242 | """Returns pricing data for a PHISIX stock symbol with caching. 243 | 244 | Parameters 245 | ---------- 246 | symbol : str 247 | Symbol of the stock in the PSE. You can refer to this link: https://www.pesobility.com/stock. 248 | start_date : str 249 | Starting date (YYYY-MM-DD) of the period that you want to get data on 250 | end_date : str 251 | Ending date (YYYY-MM-DD) of the period you want to get data on 252 | 253 | Returns 254 | ------- 255 | pandas.DataFrame 256 | Stock data (in CV format) for the specified company and date range 257 | """ 258 | start = datestring_to_datetime(start_date) 259 | end = datestring_to_datetime(end_date) 260 | 261 | fp = Path(DATA_PATH, "{}_stock_{}_{}.csv".format(symbol, start_date, end_date)) 262 | 263 | if "v" in format: 264 | if fp.exists(): 265 | pse_data_df = pd.read_csv(fp) 266 | else: 267 | pse_data_df = get_phisix_data( 268 | symbol, start_date, end_date, save=False, max_straight_nones=10 269 | ) 270 | if pse_data_df is None: 271 | return None 272 | else: 273 | cache = get_pse_data_cache(symbol=symbol) 274 | # Return None if the column is not in the cached df 275 | if cache is None: 276 | return None 277 | cache = cache.reset_index() 278 | # oldest_date = cache["dt"].iloc[0] 279 | newest_date = cache["dt"].iloc[-1] 280 | if newest_date <= end: 281 | # overwrite start date 282 | start_date = newest_date.strftime(CALENDAR_FORMAT) 283 | pse_data_df = get_phisix_data( 284 | symbol, start_date, end_date, save=False, max_straight_nones=10 285 | ) 286 | if not pse_data_df.empty: 287 | pse_data_df = pd.concat([cache, pse_data_df], ignore_index=True) 288 | else: 289 | pse_data_df = cache.copy() 290 | 291 | pse_data_df["dt"] = pd.to_datetime(pse_data_df.dt) 292 | idx = (start <= pse_data_df["dt"]) & (pse_data_df["dt"] <= end) 293 | pse_data_df = pse_data_df[idx].drop_duplicates("dt") 294 | if save: 295 | pse_data_df.to_csv(fp, index=False) 296 | print("Saved: ", fp) 297 | # add dividend column for dividends sake XD 298 | 299 | pse_data_df["dividend"] = 0 300 | return pse_data_df.set_index("dt") 301 | 302 | 303 | def datestring_to_datetime(date, sep="-"): 304 | ymd = date.split(sep) 305 | errmsg = "date format must be YYYY-MM-DD" 306 | assert len(ymd[0]) == 4, errmsg 307 | return datetime(*map(int, ymd)) 308 | 309 | 310 | def pse_data_to_csv(symbol, start_date, end_date, pse_dir=DATA_PATH): 311 | """""" 312 | pse = get_pse_data(symbol, start_date, end_date) 313 | fp = Path( 314 | pse_dir, 315 | "{}_{}_{}_OHLCV.csCRYPTO_EXCHANGESv".format(symbol, start_date, end_date), 316 | ) 317 | if isinstance(pse, pd.DataFrame): 318 | pse.to_csv(fp) 319 | else: 320 | pse[0].to_csv(fp) 321 | performance_dict = pse[1] 322 | performance_dict["D"].to_csv( 323 | Path(pse_dir, "{}_{}_{}_D.csv".format(symbol, start_date, end_date)) 324 | ) 325 | performance_dict["E"].to_csv( 326 | Path(pse_dir, "{}_{}_{}_E.csv".format(symbol, start_date, end_date)) 327 | ) 328 | -------------------------------------------------------------------------------- /python/fastquant/data/stocks/stocks.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Tue Jun 25 19:48:03 2019 5 | 6 | @authors: enzoampil & jpdeleon 7 | """ 8 | import numpy as np 9 | 10 | # Import from config 11 | from fastquant.config import DATA_FORMAT_COLS 12 | 13 | # Import package 14 | from fastquant.data.stocks.pse import get_pse_data 15 | from fastquant.data.stocks.yahoofinance import get_yahoo_data 16 | 17 | 18 | def get_stock_data( 19 | symbol, 20 | start_date, 21 | end_date, 22 | source="yahoo", 23 | format="ohlcv", 24 | dividends=True, 25 | ): 26 | """Returns pricing data for a specified stock and source. 27 | 28 | Parameters 29 | ---------- 30 | symbol : str 31 | Symbol of the stock in the PSE or Yahoo. 32 | You can refer to these links: 33 | PHISIX: https://www.pesobility.com/stock 34 | YAHOO: https://www.nasdaq.com/market-activity/stocks/screener?exchange=nasdaq 35 | start_date : str 36 | Starting date (YYYY-MM-DD) of the period that you want to get data on 37 | end_date : str 38 | Ending date (YYYY-MM-DD) of the period you want to get data on 39 | source : str 40 | First source to query from ("pse", "yahoo"). 41 | If the stock is not found in the first source, 42 | the query is run on the other source. 43 | format : str 44 | Format of the output data 45 | 46 | Returns 47 | ------- 48 | pandas.DataFrame 49 | Stock data (in the specified `format`) for the specified company and date range 50 | """ 51 | 52 | if source == "yahoo": 53 | # The query is run on 'yahoo', but if the symbol isn't found, the same query is run on 'phisix'. 54 | df = get_yahoo_data(symbol, start_date, end_date, dividends) 55 | if df is None or symbol == "JFC": 56 | format = "c" 57 | df = get_pse_data(symbol, start_date, end_date, format=format) 58 | 59 | elif source == "phisix": 60 | # The query is run on 'phisix', but if the symbol isn't found, the same query is run on 'yahoo'. 61 | format = "c" 62 | df = get_pse_data(symbol, start_date, end_date, format=format) 63 | if df is None: 64 | df = get_yahoo_data(symbol, start_date, end_date, dividends) 65 | 66 | else: 67 | raise Exception("Source must be either 'phisix' or 'yahoo'") 68 | 69 | df_columns = [DATA_FORMAT_COLS[c] for c in format] 70 | missing_columns = [col for col in df_columns if col not in df.columns] 71 | 72 | # Fill missing columns with np.nan 73 | for missing_column in missing_columns: 74 | df[missing_column] = np.nan 75 | 76 | if len(missing_columns) > 0: 77 | print("Missing columns filled w/ NaN:", missing_columns) 78 | 79 | # Save input parameters into dataframe 80 | df.start_date = start_date 81 | df.end_date = end_date 82 | df.symbol = symbol 83 | 84 | return df[df_columns] 85 | -------------------------------------------------------------------------------- /python/fastquant/data/stocks/yahoofinance.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import pandas as pd 5 | import yfinance as yf 6 | 7 | 8 | def get_yahoo_data(symbol, start_date, end_date, dividends=True): 9 | """Returns pricing data for a YAHOO stock symbol. 10 | 11 | Parameters 12 | ---------- 13 | symbol : str 14 | Symbol of the stock in the Yahoo. You can refer to this link: 15 | https://www.nasdaq.com/market-activity/stocks/screener?exchange=nasdaq. 16 | start_date : str 17 | Starting date (YYYY-MM-DD) of the period that you want to get data on 18 | end_date : str 19 | Ending date (YYYY-MM-DD) of the period you want to get data on 20 | dividends: bool 21 | Include dividends in the dataframe. 22 | 23 | Returns 24 | ------- 25 | pandas.DataFrame 26 | Stock data (in OHLCAV format) for the specified company and date range 27 | """ 28 | df = yf.download(symbol, start=start_date, end=end_date) 29 | df = df.reset_index() 30 | rename_dict = { 31 | "Date": "dt", 32 | "Open": "open", 33 | "High": "high", 34 | "Low": "low", 35 | "Close": "close", 36 | "Adj Close": "adj_close", 37 | "Volume": "volume", 38 | "Dividends": "dividend", 39 | } 40 | if dividends: 41 | ticker = yf.Ticker(symbol) 42 | div_df = ticker.dividends 43 | 44 | if div_df.shape[0] > 0: 45 | df = df.join(div_df, how="left", on="Date") 46 | else: 47 | df["dividend"] = 0 48 | else: 49 | df["Dividends"] = 0 50 | 51 | rename_list = [ 52 | "dt", 53 | "open", 54 | "high", 55 | "low", 56 | "close", 57 | "adj_close", 58 | "volume", 59 | "dividend", 60 | ] 61 | df = df.rename(columns=rename_dict)[rename_list].drop_duplicates() 62 | df["dividend"] = df["dividend"].fillna(0) 63 | df["dt"] = pd.to_datetime(df.dt) 64 | return df.set_index("dt") 65 | -------------------------------------------------------------------------------- /python/fastquant/data/web/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/fastquant/data/web/__init__.py -------------------------------------------------------------------------------- /python/fastquant/data/web/businesstimes.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Tue Jun 25 19:48:03 2019 5 | 6 | @authors: enzoampil & jpdeleon 7 | """ 8 | # Import standard library 9 | import requests 10 | from datetime import datetime, timedelta 11 | import time 12 | 13 | 14 | # Import modules 15 | from tqdm import tqdm 16 | from nltk.sentiment.vader import SentimentIntensityAnalyzer 17 | import nltk 18 | from urllib.request import urlopen 19 | from bs4 import BeautifulSoup 20 | 21 | 22 | def get_bt_news_sentiment(keyword, page_nums=None): 23 | """ 24 | This function scrapes Business Times (https://www.businesstimes.com.sg) articles by giving 25 | a specific keyword e.g "facebook, jollibee" and number of pages that you needed. 26 | 27 | Parameters 28 | ---------- 29 | keyword : str 30 | The keyword you wanted to search for in Business Times page. 31 | page_nums : int 32 | The number of iteration of pages you want to scrape. 33 | 34 | Returns 35 | ---------- 36 | date_sentiments: dict 37 | The dictionary output of the scraped data in form of {date: sentiment score} 38 | 39 | TO DO: change page_nums to a start_date (and end_date maybe) 40 | """ 41 | 42 | nltk.download("vader_lexicon", quiet=True) # download vader lexicon 43 | 44 | if page_nums is None: 45 | page_nums = 1 46 | print("no page numbers indicated, setting this variable to 1") 47 | 48 | date_sentiments = {} 49 | sia = SentimentIntensityAnalyzer() 50 | 51 | for i in tqdm(range(1, page_nums + 1)): 52 | page = urlopen( 53 | "https://www.businesstimes.com.sg/search/{}?page={}".format( 54 | keyword.replace(" ", "%20"), str(i) 55 | ) 56 | ).read() 57 | soup = BeautifulSoup(page, features="html.parser") 58 | posts = soup.findAll("div", {"class": "media-body"}) 59 | for post in posts: 60 | time.sleep(1) 61 | url = post.a["href"] 62 | date = post.time.text 63 | try: 64 | link_page = urlopen(url).read() 65 | except Exception: 66 | url = url[:-2] 67 | link_page = urlopen(url).read() 68 | link_soup = BeautifulSoup(link_page, features="lxml") 69 | sentences = link_soup.findAll("p") 70 | passage = "" 71 | for sentence in sentences: 72 | passage += sentence.text 73 | sentiment = sia.polarity_scores(passage)["compound"] 74 | date_sentiments.setdefault(date, []).append(sentiment) 75 | 76 | date_sentiment = {} 77 | 78 | for k, v in date_sentiments.items(): 79 | date_sentiment[ 80 | datetime.strptime(k, "%d %b %Y").date() + timedelta(days=1) 81 | ] = round(sum(v) / float(len(v)), 3) 82 | 83 | return date_sentiment 84 | -------------------------------------------------------------------------------- /python/fastquant/data/web/twitter.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | import tweepy 4 | from nltk.sentiment.vader import SentimentIntensityAnalyzer 5 | from datetime import date, datetime 6 | import pandas as pd 7 | 8 | 9 | def tweepy_api(consumer_key, consumer_secret, access_token, access_secret): 10 | """ 11 | Returns authenticated tweepy.API object 12 | 13 | Sample methods: 14 | user_timeline: returns recent tweets from a specified twitter user 15 | - screen_name: username of account of interest 16 | - count: number of most recent tweets to return 17 | """ 18 | auth = tweepy.OAuthHandler(consumer_key, consumer_secret) 19 | auth.set_access_token(access_token, access_secret) 20 | api = tweepy.API(auth) 21 | return api 22 | 23 | 24 | def get_twitter_sentiment(stock_code, twitter_auth, start_date, twitter_accounts=None): 25 | """ 26 | This function scrapes twitter data based on stock code and twitter accounts specified 27 | Parameters 28 | ---------- 29 | stock_code : str 30 | The stock code you wanted to scrape from Twitter 31 | twitter_auth : obj 32 | Authenticated tweepy.API object 33 | start_date : str 34 | Starting date (YYYY-MM-DD) of the period that you want to get twitter data on 35 | twitter_accounts : list 36 | List of twitter account names you want to scrape from 37 | Returns 38 | ---------- 39 | date_sentiments: dict 40 | The dictionary output of the scraped data in form of {date: sentiment score} 41 | """ 42 | year, month, day = map(int, start_date.split(sep="-")) 43 | start_date = date(year, month, day) 44 | 45 | stock_code = stock_code.lstrip("$") 46 | 47 | # Empty list to store the user tweets 48 | usertweets = [] 49 | 50 | sia = SentimentIntensityAnalyzer() 51 | 52 | if twitter_accounts is None or len(twitter_accounts) == 0: 53 | raise Exception("You don't have any twitter accounts specified.") 54 | else: 55 | for acc in twitter_accounts: 56 | print(f"Scraping ${stock_code} tweets from {acc}") 57 | cursor = tweepy.Cursor( 58 | twitter_auth.user_timeline, 59 | id=acc, 60 | count=200, 61 | tweet_mode="extended", 62 | ) 63 | 64 | item_counter = 1 65 | for item in cursor.pages(): 66 | new_tweets = [ 67 | tweet 68 | for tweet in item 69 | if datetime.strptime( 70 | str(tweet.created_at), "%Y-%m-%d %H:%M:%S" 71 | ).date() 72 | >= start_date 73 | and stock_code in tweet.full_text 74 | ] 75 | 76 | print(f"{len(new_tweets)} tweets scraped from {acc}") 77 | 78 | if len(new_tweets) > 0 and item_counter == 1: 79 | usertweets.extend(new_tweets) 80 | item_counter += 1 81 | elif ( 82 | len(new_tweets) > 0 83 | and item_counter != 1 84 | and usertweets[-1].created_at.date != start_date 85 | ): 86 | usertweets.extend(new_tweets) 87 | item_counter += 1 88 | else: 89 | break # Break if 0 tweets 90 | 91 | tweet_df = pd.DataFrame([]) 92 | 93 | if len(usertweets) > 0: 94 | tweet_created_at = [tweet.created_at for tweet in usertweets] 95 | tweet_text = [tweet.full_text for tweet in usertweets] 96 | 97 | tweet_df["tweet_created_at"] = tweet_created_at 98 | tweet_df["tweet_created_at"] = pd.to_datetime( 99 | tweet_df["tweet_created_at"] 100 | ).dt.date 101 | tweet_df["tweet"] = tweet_text 102 | tweet_df["sentiment_score"] = tweet_df["tweet"].apply( 103 | lambda tweet: sia.polarity_scores(tweet)["compound"] 104 | ) 105 | 106 | tweet_avg_df = tweet_df.groupby("tweet_created_at", as_index=False).agg( 107 | {"sentiment_score": "mean"} 108 | ) 109 | 110 | date_sentiment = dict( 111 | zip( 112 | tweet_avg_df["tweet_created_at"], 113 | tweet_avg_df["sentiment_score"], 114 | ) 115 | ) 116 | 117 | return date_sentiment 118 | 119 | else: 120 | raise Exception( 121 | f"No tweet found for {stock_code} starting from {start_date}." 122 | ) 123 | -------------------------------------------------------------------------------- /python/fastquant/disclosures/__init__.py: -------------------------------------------------------------------------------- 1 | # Modules available for fastquant.disclosures.* 2 | 3 | from fastquant.disclosures.base import get_company_disclosures 4 | from fastquant.disclosures.pse import DisclosuresPSE 5 | from fastquant.disclosures.investagrams import DisclosuresInvestagrams 6 | from fastquant.disclosures.sentiment import get_disclosure_sentiment 7 | -------------------------------------------------------------------------------- /python/fastquant/disclosures/base.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Tue Apr 5, 2020 5 | 6 | @authors: enzoampil & jpdeleon 7 | """ 8 | 9 | from datetime import datetime 10 | import warnings 11 | from string import digits 12 | 13 | 14 | def _remove_amend(x): 15 | if len(x.split("]")) == 2: 16 | return x.split("]")[1] 17 | else: 18 | return x 19 | 20 | 21 | def format_date(date, informat="%Y-%m-%d", outformat="%%m-%d-%Y"): 22 | return datetime.strptime(date, informat).strftime(outformat) 23 | 24 | 25 | def date_to_epoch(date, format="%Y-%m-%d"): 26 | return int(datetime.strptime(date, format).timestamp()) 27 | 28 | 29 | def remove_digits(string): 30 | remove_digits = str.maketrans("", "", digits) 31 | res = string.translate(remove_digits) 32 | return res 33 | 34 | 35 | def get_company_disclosures(*args, **kwargs): 36 | errmsg = "This function is deprecated. Use `DisclosuresPSE` class instead." 37 | warnings.warn(errmsg, DeprecationWarning) 38 | print(errmsg) 39 | -------------------------------------------------------------------------------- /python/fastquant/disclosures/investagrams.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Tue Apr 5, 2020 5 | 6 | @authors: enzoampil & jpdeleon 7 | """ 8 | # Import standard library 9 | import os 10 | from inspect import signature 11 | from datetime import datetime 12 | import warnings 13 | from pathlib import Path 14 | from string import digits 15 | import requests 16 | import json 17 | import re 18 | 19 | # Import modules 20 | import numpy as np 21 | import pandas as pd 22 | from tqdm import tqdm 23 | from bs4 import BeautifulSoup 24 | from pandas import json_normalize 25 | import matplotlib.pyplot as pl 26 | import matplotlib as mpl 27 | 28 | # Import from package 29 | from fastquant.data import get_stock_data 30 | from fastquant.config import DATA_PATH 31 | from fastquant.disclosures.base import ( 32 | _remove_amend, 33 | format_date, 34 | date_to_epoch, 35 | remove_digits, 36 | ) 37 | 38 | 39 | class DisclosuresInvestagrams: 40 | """ 41 | Disclosures scraped from investagrams 42 | 43 | Attribues 44 | --------- 45 | disclosures_df : pd.DataFrame 46 | parsed disclosures 47 | """ 48 | 49 | def __init__(self, symbol, from_date, to_date): 50 | """ 51 | symbol : str 52 | phisix symbol 53 | from_date : str 54 | (%Y-%m-%d) 55 | end_date = str 56 | (%Y-%m-%d) 57 | """ 58 | self.symbol = symbol 59 | self.from_date = from_date 60 | self.to_date = to_date 61 | self.disclosures_json = self.get_disclosures_json() 62 | self.disclosures_dict = self.get_disclosures_df() 63 | self.earnings = self.disclosures_dict["E"] 64 | self.dividends = self.disclosures_dict["D"] 65 | 66 | def get_disclosures_json(self): 67 | headers = { 68 | "Accept": "application/json, text/javascript, */*; q=0.01", 69 | "Referer": "https://www.investagrams.com/Stock/PSE:JFC", 70 | "Origin": "https://www.investagrams.com", 71 | "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36", 72 | "Content-Type": "text/plain; charset=utf-8", 73 | } 74 | from_date_epoch = date_to_epoch(self.from_date) 75 | to_date_epoch = date_to_epoch(self.to_date) 76 | params = ( 77 | ("symbol", "PSE:{}".format(self.symbol)), 78 | ("from", from_date_epoch), 79 | ("to", to_date_epoch), 80 | ("resolution", "D"), # Setting D (daily) by default 81 | ) 82 | 83 | response = requests.post( 84 | "https://webapi.investagrams.com/InvestaApi/TradingViewChart/timescale_marks", 85 | headers=headers, 86 | params=params, 87 | ) 88 | if hasattr(response, "text"): 89 | assert len(response.text) > 10, "Empty response from investagrams.com" 90 | return response.json() 91 | 92 | def disclosures_json_to_df(self): 93 | disclosure_dfs = {} 94 | for disc in ["D", "E"]: 95 | filtered_examples = [ 96 | ex for ex in self.disclosures_json if ex["label"] == disc 97 | ] 98 | additional_feats_df = pd.DataFrame( 99 | [ 100 | dict( 101 | [ 102 | tuple(item.split(":")) 103 | for item in ex["tooltip"] 104 | if ":" in item 105 | ] 106 | ) 107 | for ex in filtered_examples 108 | ] 109 | ) 110 | main_df = pd.DataFrame(filtered_examples)[["id", "time", "color", "label"]] 111 | combined = pd.concat([main_df, additional_feats_df], axis=1) 112 | combined["time"] = pd.to_datetime(combined.time, unit="s") 113 | if "Total Revenue" in combined.columns.values: 114 | combined["Revenue Unit"] = combined["Total Revenue"].apply( 115 | lambda x: remove_digits(x).replace(".", "") 116 | ) 117 | combined["Total Revenue"] = ( 118 | combined["Total Revenue"] 119 | .str.replace("B", "") 120 | .str.replace("M", "") 121 | .astype(float) 122 | ) 123 | # Net income is followed by a parenthesis which corresponds to that quarter's YoY growth 124 | combined["NI Unit"] = combined["Net Income"].apply( 125 | lambda x: remove_digits(x).replace(".", "") 126 | ) 127 | combined["Net Income Amount"] = ( 128 | combined["Net Income"] 129 | .str.replace("B", "") 130 | .str.replace("M", "") 131 | .apply(lambda x: x.split()[0]) 132 | .astype(float) 133 | ) 134 | combined["Net Income YoY Growth (%)"] = combined["Net Income"].apply( 135 | lambda x: str(x) 136 | .replace("(", "") 137 | .replace(")", "") 138 | .replace("%", "") 139 | .split()[1] 140 | ) 141 | disclosure_dfs[disc] = combined 142 | return disclosure_dfs 143 | 144 | def get_disclosures_df(self): 145 | if self.disclosures_json is None: 146 | self.disclosures_json = self.get_disclosures_json() 147 | return self.disclosures_json_to_df() 148 | -------------------------------------------------------------------------------- /python/fastquant/disclosures/sentiment.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Tue Jun 25 19:48:03 2019 5 | 6 | @authors: rafmacalaba 7 | """ 8 | # Import standard library 9 | from datetime import datetime, timedelta 10 | import time 11 | from fastquant.disclosures.pse import DisclosuresPSE 12 | 13 | # Import modules 14 | from tqdm import tqdm 15 | from nltk.sentiment.vader import SentimentIntensityAnalyzer 16 | import nltk 17 | import pandas as pd 18 | 19 | 20 | def get_sentiments(passage): 21 | """ 22 | Helper function to get the sentiment from a blob of words. 23 | 24 | Parameters 25 | ---------- 26 | passage : str 27 | The blob of word from description of disclosures 28 | 29 | Returns 30 | ---------- 31 | score: float 32 | Output of the nltk sentiment analyzer 33 | 34 | """ 35 | nltk.download("vader_lexicon", quiet=True) 36 | sia = SentimentIntensityAnalyzer() 37 | try: 38 | return sia.polarity_scores(passage)["compound"] 39 | except AttributeError: 40 | return 0 41 | 42 | 43 | def get_disclosure_sentiment(stock_code, start_date, end_date=None, source="pse"): 44 | """ 45 | This function scrapes pse/investagram disclosure using fastquant and calculate the 46 | sentiment on each disclosure. 47 | 48 | Parameters 49 | ---------- 50 | stock_code : str 51 | The stock code. 52 | 53 | start_date: str 54 | start date to get the disclosure in form of "%Y-%m-%d" 55 | 56 | end_date: str 57 | end date in form of "%Y-%m-%d" 58 | 59 | source: str 60 | pse or investagrams, only supports pse as of now 61 | 62 | 63 | Returns 64 | ---------- 65 | date_sentiments: dict 66 | The dictionary output of the data in form of {date: sentiment score} 67 | 68 | """ 69 | start_date = datetime.strptime(start_date, "%Y-%m-%d").date().strftime("%m-%d-%Y") 70 | if end_date is not None: 71 | end_date = datetime.strptime(end_date, "%Y-%m-%d").date().strftime("%m-%d-%Y") 72 | dpse = DisclosuresPSE(symbol=stock_code, start_date=start_date, end_date=end_date) 73 | 74 | df = dpse.get_combined_disclosures()[ 75 | ["Announce Date and Time", "Background/Description of the Disclosure"] 76 | ] 77 | df.columns = ["date", "description"] 78 | df["sentiments"] = df["description"].apply(get_sentiments) 79 | df["date"] = pd.to_datetime(df["date"].apply(lambda x: str(x)[:11])).dt.strftime( 80 | "%d %b %Y" 81 | ) 82 | date_sentiment = {} 83 | 84 | for k, v in zip(df["date"], df["sentiments"]): 85 | date_sentiment[k] = v 86 | 87 | return date_sentiment 88 | -------------------------------------------------------------------------------- /python/fastquant/indicators/__init__.py: -------------------------------------------------------------------------------- 1 | # Modules available for fastquant.indicators.* 2 | 3 | from fastquant.indicators.sentiment import Sentiment 4 | from fastquant.indicators.custom import CustomIndicator 5 | 6 | 7 | # Import backtrader indicators 8 | from fastquant.indicators.backtrader_indicators import * 9 | -------------------------------------------------------------------------------- /python/fastquant/indicators/backtrader_indicators.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Import standard library 4 | from __future__ import ( 5 | absolute_import, 6 | division, 7 | print_function, 8 | unicode_literals, 9 | ) 10 | 11 | # Indicators from backtrader 12 | from backtrader.indicators import ( 13 | AverageDirectionalMovementIndex, 14 | ADX, 15 | AverageDirectionalMovementIndexRating, 16 | ADXR, 17 | AverageTrueRange, 18 | ATR, 19 | BollingerBands, 20 | BBands, 21 | CommodityChannelIndex, 22 | CCI, 23 | CrossOver, 24 | Envelope, 25 | ExponentialMovingAverage, 26 | EMA, 27 | Ichimoku, 28 | MACD, 29 | ParabolicSAR, 30 | RelativeStrengthIndex, 31 | RSI, 32 | SimpleMovingAverage, 33 | SMA, 34 | Stochastic, 35 | TripleExponentialMovingAverage, 36 | TRIX, 37 | WilliamsR, 38 | ) 39 | -------------------------------------------------------------------------------- /python/fastquant/indicators/custom.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Import standard library 4 | from __future__ import ( 5 | absolute_import, 6 | division, 7 | print_function, 8 | unicode_literals, 9 | ) 10 | from pkg_resources import resource_filename 11 | import datetime 12 | import sys 13 | 14 | # Import modules 15 | import backtrader as bt 16 | 17 | 18 | class CustomIndicator(bt.Indicator): 19 | 20 | """ 21 | Custom Indicator 22 | """ 23 | 24 | lines = ("custom",) 25 | 26 | params = (("custom_column", "custom"),) 27 | 28 | plotinfo = dict( 29 | plotymargin=0.15, 30 | plothlines=[0], 31 | plotyticks=[5, 95], 32 | ) 33 | 34 | def __init__(self): 35 | super().__init__() 36 | self.custom_column = self.params.custom_column 37 | 38 | def _plotlabel(self): 39 | return 40 | 41 | def next(self): 42 | self.lines.custom[0] = getattr(self.datas[0], self.custom_column)[0] 43 | -------------------------------------------------------------------------------- /python/fastquant/indicators/sentiment.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Import standard library 4 | from __future__ import ( 5 | absolute_import, 6 | division, 7 | print_function, 8 | unicode_literals, 9 | ) 10 | from pkg_resources import resource_filename 11 | import datetime 12 | import sys 13 | 14 | # Import modules 15 | import backtrader as bt 16 | 17 | 18 | class Sentiment(bt.Indicator): 19 | 20 | """ 21 | Sentiment Custom Indicator 22 | Implementation of sentiment custom indicator using nltk/textblob pre-built sentiment models 23 | """ 24 | 25 | lines = ("sentiment",) 26 | 27 | plotinfo = dict(plotymargin=0.15, plothlines=[0], plotyticks=[1.0, 0, -1.0]) 28 | 29 | plotlines = dict( 30 | sentiment=dict( 31 | alpha=0.85, 32 | width=1.0, 33 | _method="bar", 34 | _plotvalue=True, 35 | _plotvaluetag=False, 36 | _name=" ", 37 | _skipnan=True, 38 | _samecolor=False, 39 | ) 40 | ) 41 | 42 | def _plotlabel(self): 43 | return 44 | 45 | def next(self): 46 | self.lines.sentiment[0] = self.datas[0].sentiment_score[0] 47 | -------------------------------------------------------------------------------- /python/fastquant/notification.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | import json 4 | import pandas as pd 5 | import requests 6 | import os 7 | from datetime import datetime, timedelta 8 | import smtplib 9 | from email.mime.multipart import MIMEMultipart 10 | from email.mime.text import MIMEText 11 | from fastquant.data.stocks.stocks import get_stock_data 12 | 13 | 14 | def periodic_fetch(file_dir, symbol, today, next_period_dummy=False): 15 | today_df = get_stock_data(symbol, today, today) 16 | 17 | # Retrieve saved historical data on disk and append new data 18 | # TODO: add checks if daily updates were broken 19 | df = pd.read_csv(file_dir, parse_dates=["dt"]).set_index("dt") 20 | # df = df.append(today_df) 21 | df = pd.concat([df, today_df], ignore_index=True) 22 | if next_period_dummy: 23 | tomorrow_dummy = today_df.iloc[-1:].copy() 24 | tomorrow_dummy.index = tomorrow_dummy.index + timedelta(days=1) 25 | # df = df.append(tomorrow_dummy) 26 | df = pd.concat([df, tomorrow_dummy], ignore_index=True) 27 | 28 | df.to_csv(file_dir) 29 | 30 | return df 31 | 32 | 33 | def slack_post(message, webhook_url): 34 | # See https://api.slack.com/tutorials/slack-apps-hello-world for more information about Slack apps 35 | 36 | requests.post( 37 | webhook_url, 38 | data=json.dumps({"text": message}), 39 | headers={"Content-Type": "application/json"}, 40 | ) 41 | 42 | 43 | def slack_notif(symbol, action, date=None): 44 | webhook_url = os.getenv("SLACK_URL") 45 | assert ( 46 | webhook_url 47 | ), "Please set your slack webhook url as an environment variable: SLACK_URL" 48 | # Set date to the current date (UTC + 0) if no date argument is passed 49 | date = date or datetime.utcnow().strftime("%Y-%m-%d") 50 | message = "Today is " + date + ": " + action + " " + symbol or "" 51 | slack_post(message, webhook_url) 52 | 53 | 54 | def email_notif( 55 | symbol, 56 | action, 57 | to_address, 58 | date=None, 59 | subject=None, 60 | message=None, 61 | host="smtp.gmail.com", 62 | port=587, 63 | ): 64 | """ 65 | Send email w/ credentials saved as environment variables for security 66 | 67 | If your credentials are correct and the email still doesn't work, refer to the link below: 68 | https://stackoverflow.com/questions/16512592/login-credentials-not-working-with-gmail-smtp 69 | """ 70 | my_address = os.getenv("EMAIL_ADDRESS") 71 | password = os.getenv("EMAIL_PASSWORD") 72 | # set up the SMTP server 73 | s = smtplib.SMTP(host=host, port=port) 74 | s.starttls() 75 | s.login(my_address, password) 76 | 77 | msg = MIMEMultipart() # create a message 78 | 79 | date = date or datetime.utcnow().strftime("%Y-%m-%d") 80 | message = message or "Today is " + date + ": " + action + " " + symbol or "" 81 | 82 | # setup the parameters of the message 83 | msg["From"] = my_address 84 | msg["To"] = to_address 85 | msg["Subject"] = subject or message 86 | 87 | # add in the message body 88 | msg.attach(MIMEText(message, "plain")) 89 | 90 | # send the message via the server set up earlier. 91 | s.send_message(msg) 92 | 93 | 94 | def trigger_bot(symbol, action, date, channel=None, **kwargs): 95 | if channel == "slack": 96 | slack_notif(symbol, action, date=date) 97 | elif channel == "email": 98 | email_notif(symbol, action, date=date, **kwargs) 99 | else: 100 | if action == "buy": 101 | print( 102 | ">>> Notif bot: Today is", 103 | date, 104 | ":", 105 | action, 106 | symbol or "", 107 | "<<<", 108 | ) 109 | elif action == "sell": 110 | print( 111 | ">>> Notif bot: Today is", 112 | date, 113 | ":", 114 | action, 115 | symbol or "", 116 | "<<<", 117 | ) 118 | else: # hold 119 | print( 120 | ">>> Notif bot: Today is", 121 | date, 122 | ":", 123 | action, 124 | symbol or "", 125 | "<<<", 126 | ) 127 | return 128 | -------------------------------------------------------------------------------- /python/fastquant/portfolio.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Jul 29, 2020 5 | @authors: jpdeleon & benjamincabalona1029 6 | """ 7 | 8 | from datetime import datetime 9 | import pandas as pd 10 | import numpy as np 11 | import matplotlib.pyplot as plt 12 | 13 | import scipy.optimize as optimization 14 | from fastquant.data import get_stock_data 15 | 16 | TODAY = datetime.now().date().strftime("%Y-%m-%d") 17 | 18 | 19 | class Portfolio: 20 | def __init__( 21 | self, 22 | stock_list, 23 | start_date="2015-01-01", 24 | end_date=TODAY, 25 | init_weights=None, 26 | verbose=False, 27 | ): 28 | """ 29 | Allows constrained optimization of portfolio consisting of PSE stocks 30 | 31 | Attributes 32 | ---------- 33 | stock_list : list 34 | list of stock symbols e.g. ["MBT","JFC","ALI"] 35 | start_date : str 36 | starting date of stock data (default="2020-01-01") 37 | end_date : str 38 | ending date of stock data (default is date today) 39 | init_weights : list 40 | list of initial weights for each stock in portfolio 41 | """ 42 | self.stock_list = stock_list 43 | self.start_date = start_date 44 | self.end_date = end_date 45 | self.verbose = verbose 46 | self.data = self.get_data() 47 | self.returns = self.data.pct_change() 48 | if init_weights is None: 49 | self.random_weights = np.random.random(len(stock_list)) 50 | else: 51 | assert len(init_weights) == len(stock_list) 52 | self.random_weights = init_weights 53 | self.optimum_weights = None 54 | self.optimum = self.optimize_portfolio() 55 | 56 | def get_data(self): 57 | dfs = [] 58 | for i in self.stock_list: 59 | df = get_stock_data(i, self.start_date, self.end_date, format="c") 60 | df.columns = [i] 61 | dfs.append(df) 62 | data = pd.concat(dfs, axis=1) 63 | data.index.name = "DATE" 64 | return data 65 | 66 | def calculate_portfolio_returns(self, weights): 67 | # annualization of returns = 252 68 | portfolio_returns = np.sum(self.returns.mean() * weights) * 252 69 | if self.verbose: 70 | print("Expected Portfolio Return:", portfolio_returns) 71 | return portfolio_returns 72 | 73 | def calculate_portfolio_risk(self, weights): 74 | # variance = risk or volatility 75 | portfolio_risk = np.sqrt( 76 | np.dot(weights.T, np.dot(self.returns.cov() * 252, weights)) 77 | ) 78 | if self.verbose: 79 | print("Expected Risk:", portfolio_risk) 80 | return portfolio_risk 81 | 82 | def generate_portfolios(self, N=10000): 83 | preturns = [] 84 | pvariances = [] 85 | for _ in range(N): 86 | weights = np.random.random(len(self.stock_list)) 87 | weights /= np.sum(weights) 88 | preturns.append(self.calculate_portfolio_returns(weights)) 89 | pvariances.append(self.calculate_portfolio_risk(weights)) 90 | return np.array(preturns), np.array(pvariances) 91 | 92 | def calculate_statistics(self, weights): 93 | portfolio_return = self.calculate_portfolio_returns(weights) 94 | portfolio_risk = self.calculate_portfolio_risk(weights) 95 | sharpe_ratio = portfolio_return / portfolio_risk 96 | return [portfolio_return, portfolio_risk, sharpe_ratio] 97 | 98 | def min_func_sharpe(self, weights): 99 | # we want to maximize sharpe ratio = minimize the negative of it 100 | return -self.calculate_statistics(weights)[2] 101 | 102 | def optimize_portfolio(self): 103 | constraints = {"type": "eq", "fun": lambda x: np.sum(x) - 1} 104 | bounds = tuple((0, 1) for _ in range(len(self.stock_list))) 105 | init_weights = ( 106 | self.random_weights 107 | if self.optimum_weights is None 108 | else self.optimum_weights 109 | ) 110 | optimum = optimization.minimize( 111 | fun=self.min_func_sharpe, 112 | x0=init_weights, 113 | method="SLSQP", 114 | bounds=bounds, 115 | constraints=constraints, 116 | ) 117 | if optimum.success: 118 | self.optimum_weights = optimum["x"].round(3) 119 | self.print_optimal_portfolio() 120 | return optimum 121 | else: 122 | print("Optimization failed. Try different init_weights.") 123 | 124 | def print_optimal_portfolio(self): 125 | print("Optimal weights:", self.optimum_weights) 126 | print( 127 | "Expected return, volatility and Sharpe ratio:", 128 | self.calculate_statistics(self.optimum_weights), 129 | ) 130 | 131 | def plot_portfolio(self, optimal=True, **kwargs): 132 | fig = plt.figure(figsize=(10, 6)) 133 | preturns, pvariances = self.generate_portfolios(**kwargs) 134 | plt.scatter(pvariances, preturns, c=preturns / pvariances, marker="o") 135 | plt.grid(True) 136 | plt.xlabel("Expected Volatility") 137 | plt.ylabel("Expected Return") 138 | plt.colorbar(label="Sharpe Ratio") 139 | if optimal: 140 | stats = self.calculate_statistics(self.optimum_weights) 141 | plt.plot(stats[1], stats[0], "r*", markersize=20.0) 142 | return fig 143 | -------------------------------------------------------------------------------- /python/fastquant/strategies/__init__.py: -------------------------------------------------------------------------------- 1 | # Modules available for fastquant.strategies.* 2 | 3 | from fastquant.strategies.base import BaseStrategy 4 | from fastquant.strategies.bollinger_band import BBandsStrategy 5 | from fastquant.strategies.buy_and_hold import BuyAndHoldStrategy 6 | from fastquant.strategies.ma_crossover import SMACStrategy, EMACStrategy 7 | from fastquant.strategies.macd import MACDStrategy 8 | from fastquant.strategies.rsi import RSIStrategy 9 | from fastquant.strategies.sentiment import SentimentStrategy 10 | from fastquant.strategies.custom import CustomStrategy, TernaryStrategy 11 | -------------------------------------------------------------------------------- /python/fastquant/strategies/bollinger_band.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Import standard library 4 | from __future__ import ( 5 | absolute_import, 6 | division, 7 | print_function, 8 | unicode_literals, 9 | ) 10 | 11 | # Import modules 12 | import backtrader as bt 13 | 14 | # Import from package 15 | from fastquant.strategies.base import BaseStrategy 16 | 17 | 18 | class BBandsStrategy(BaseStrategy): 19 | """ 20 | Bollinger Bands strategy 21 | Simple implementation of backtrader BBands strategy reference: https://community.backtrader.com/topic/122/bband-strategy/2 22 | 23 | Parameters 24 | ---------- 25 | period : int 26 | Period used as basis in calculating the moving average and standard deviation 27 | devfactor : int 28 | The number of standard deviations from the moving average to derive the upper and lower bands 29 | 30 | TODO: Study this strategy closer based on the above reference. Current implementation is naive. 31 | """ 32 | 33 | params = ( 34 | ("period", 20), # period for the fast moving average 35 | ("devfactor", 2.0), 36 | ) 37 | 38 | def __init__(self): 39 | # Initialize global variables 40 | super().__init__() 41 | # Strategy level variables 42 | self.period = self.params.period 43 | self.devfactor = self.params.devfactor 44 | 45 | if self.strategy_logging: 46 | print("===Strategy level arguments===") 47 | print("period :", self.period) 48 | print("devfactor :", self.devfactor) 49 | bbands = bt.ind.BBands(period=self.period, devfactor=self.devfactor) 50 | self.mid = bbands.mid 51 | self.top = bbands.top 52 | self.bot = bbands.bot 53 | 54 | def buy_signal(self): 55 | return self.dataclose[0] < self.bot 56 | 57 | def sell_signal(self): 58 | return self.dataclose[0] > self.top 59 | -------------------------------------------------------------------------------- /python/fastquant/strategies/buy_and_hold.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Import standard library 4 | from __future__ import ( 5 | absolute_import, 6 | division, 7 | print_function, 8 | unicode_literals, 9 | ) 10 | 11 | # Import modules 12 | import backtrader as bt 13 | 14 | # Import from package 15 | from fastquant.strategies.base import BaseStrategy 16 | 17 | 18 | class BuyAndHoldStrategy(BaseStrategy): 19 | """ 20 | Buy and Hold Strategy 21 | """ 22 | 23 | def _init_(self): 24 | # Initialize global variables 25 | super().__init__() 26 | # Strategy level variables 27 | self.buy_and_hold = None 28 | self.buy_and_hold_sell = None 29 | 30 | def buy_signal(self): 31 | if not self.position: 32 | self.buy_and_hold = True 33 | return self.buy_and_hold 34 | 35 | def sell_signal(self): 36 | if (len(self) + 2) >= self.len_data: 37 | self.buy_and_hold_sell = True 38 | else: 39 | self.buy_and_hold_sell = False 40 | return self.buy_and_hold_sell 41 | -------------------------------------------------------------------------------- /python/fastquant/strategies/custom.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Import standard library 4 | # Strategy module for custom indicators input to backtest 5 | from __future__ import ( 6 | absolute_import, 7 | division, 8 | print_function, 9 | unicode_literals, 10 | ) 11 | 12 | # Import modules 13 | import backtrader as bt 14 | 15 | # Import from package 16 | from fastquant.strategies.base import BaseStrategy 17 | from fastquant.indicators.custom import CustomIndicator 18 | 19 | 20 | class CustomStrategy(BaseStrategy): 21 | """ 22 | Implements a chosen dataframe column as a custom indicator (column name set as "custom" by default). 23 | 24 | The strategy is structured similar to RSIStrategy where you can set an upper_limit, above which the asset is sold (considered "overbought"), and a lower_limit, below which the asset is sold (considered "underbought). upper_limit is set to 95 by default, while lower_limit is set to 5 by default. 25 | 26 | Parameters 27 | ---------- 28 | upper_limit : float 29 | The upper value of the custom indicator above which, the asset is sold 30 | lower_limit : float 31 | The lower value of the custom indicator above which, the asset is sold 32 | custom_column : str 33 | The name of the column in the dataframe that corresponds to the custom indicator 34 | 35 | """ 36 | 37 | params = ( 38 | ("upper_limit", 95), 39 | ("lower_limit", 5), 40 | ("custom_column", "custom"), 41 | ) 42 | 43 | def __init__(self): 44 | # Initialize global variables 45 | super().__init__() 46 | # Strategy level variables 47 | self.upper_limit = self.params.upper_limit 48 | self.lower_limit = self.params.lower_limit 49 | self.custom_column = self.params.custom_column 50 | self.custom_indicator = CustomIndicator( 51 | self.data, 52 | custom_column=self.custom_column, 53 | ) 54 | # Plotting lines are based on the upper and lower limits 55 | self.custom_indicator.plotinfo.plotyticks = [ 56 | self.lower_limit, 57 | self.upper_limit, 58 | ] 59 | 60 | print("===Strategy level arguments===") 61 | print("Upper limit: ", self.upper_limit) 62 | print("Lower limit: ", self.lower_limit) 63 | 64 | # Buy when the custom indicator is below the lower limit, and sell when it's above the upper limit 65 | def buy_signal(self): 66 | return self.custom_indicator[0] < self.lower_limit 67 | 68 | def sell_signal(self): 69 | return self.custom_indicator[0] > self.upper_limit 70 | 71 | 72 | class TernaryStrategy(BaseStrategy): 73 | """ 74 | Implements a chosen dataframe column as a custom "buy" (1), "sell" (-1), and "neutral" (0). 75 | 76 | The strategy is to simply buy when the custom indicator is equal to the buy_int (1), sell when equal to the sell_int (-1), and do nothing otherwise (0). 77 | 78 | Parameters 79 | ---------- 80 | buy_int : int 81 | The value of the custom column that indicates a "buy" signal (default=1) 82 | sell_int : int 83 | The value of the custom column that indicates a "sell" signal (default=-1) 84 | custom_column : str 85 | The name of the column in the dataframe that corresponds to the custom ternary indicator 86 | 87 | """ 88 | 89 | params = ( 90 | ("buy_int", 1), 91 | ("sell_int", -1), 92 | ("custom_column", "custom"), 93 | ) 94 | 95 | def __init__(self): 96 | # Initialize global variables 97 | super().__init__() 98 | # Strategy level variables 99 | self.buy_int = self.params.buy_int 100 | self.sell_int = self.params.sell_int 101 | self.custom_column = self.params.custom_column 102 | self.custom_indicator = CustomIndicator( 103 | self.data, 104 | custom_column=self.custom_column, 105 | ) 106 | # Plotting lines are based on the upper and lower limits 107 | self.custom_indicator.plotinfo.plotyticks = [ 108 | self.sell_int, 109 | self.buy_int, 110 | ] 111 | 112 | if self.strategy_logging: 113 | print("===Strategy level arguments===") 114 | print("Buy Int: ", self.buy_int) 115 | print("Sell Int: ", self.sell_int) 116 | 117 | # Buy when the custom indicator is equal to buy_int (+1), and sell when custom indicator is equal to sell_int (-1) 118 | def buy_signal(self): 119 | return int(self.custom_indicator[0]) == self.buy_int 120 | 121 | def sell_signal(self): 122 | return int(self.custom_indicator[0]) == self.sell_int 123 | -------------------------------------------------------------------------------- /python/fastquant/strategies/ma_crossover.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Import standard library 4 | from __future__ import ( 5 | absolute_import, 6 | division, 7 | print_function, 8 | unicode_literals, 9 | ) 10 | 11 | # Import modules 12 | import backtrader as bt 13 | 14 | # Import from package 15 | from fastquant.strategies.base import BaseStrategy 16 | 17 | 18 | class SMACStrategy(BaseStrategy): 19 | """ 20 | Simple moving average crossover strategy 21 | 22 | Parameters 23 | ---------- 24 | fast_period : int 25 | The period used for the fast moving average line (should be smaller than `slow_upper`) 26 | slow_period : int 27 | The period used for the slow moving average line (should be larger than `fast_upper`) 28 | 29 | """ 30 | 31 | params = ( 32 | ("fast_period", 10), # period for the fast moving average 33 | ("slow_period", 30), 34 | ) 35 | 36 | def __init__(self): 37 | # Initialize global variables 38 | super().__init__() 39 | # Strategy level variables 40 | self.fast_period = self.params.fast_period 41 | self.slow_period = self.params.slow_period 42 | 43 | if self.strategy_logging: 44 | print("===Strategy level arguments===") 45 | print("fast_period :", self.fast_period) 46 | print("slow_period :", self.slow_period) 47 | sma_fast = bt.ind.SMA(period=self.fast_period) # fast moving average 48 | sma_slow = bt.ind.SMA(period=self.slow_period) # slow moving average 49 | self.crossover = bt.ind.CrossOver(sma_fast, sma_slow) # crossover signal 50 | 51 | def buy_signal(self): 52 | return self.crossover > 0 53 | 54 | def sell_signal(self): 55 | return self.crossover < 0 56 | 57 | 58 | class EMACStrategy(BaseStrategy): 59 | """ 60 | Exponential moving average crossover strategy 61 | 62 | Parameters 63 | ---------- 64 | fast_period : int 65 | The period used for the fast exponential moving average line (should be smaller than `slow_upper`) 66 | slow_period : int 67 | The period used for the slow exponential moving average line (should be larger than `fast_upper`) 68 | 69 | """ 70 | 71 | params = ( 72 | ("fast_period", 10), # period for the fast moving average 73 | ("slow_period", 30), 74 | ) 75 | 76 | def __init__(self): 77 | # Initialize global variables 78 | super().__init__() 79 | # Strategy level variables 80 | self.fast_period = self.params.fast_period 81 | self.slow_period = self.params.slow_period 82 | 83 | if self.strategy_logging: 84 | print("===Strategy level arguments===") 85 | print("fast_period :", self.fast_period) 86 | print("slow_period :", self.slow_period) 87 | ema_fast = bt.ind.EMA(period=self.fast_period) # fast moving average 88 | ema_slow = bt.ind.EMA(period=self.slow_period) # slow moving average 89 | self.crossover = bt.ind.CrossOver(ema_fast, ema_slow) # crossover signal 90 | 91 | def buy_signal(self): 92 | return self.crossover > 0 93 | 94 | def sell_signal(self): 95 | return self.crossover < 0 96 | -------------------------------------------------------------------------------- /python/fastquant/strategies/macd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Import standard library 4 | from __future__ import ( 5 | absolute_import, 6 | division, 7 | print_function, 8 | unicode_literals, 9 | ) 10 | 11 | # Import modules 12 | import backtrader as bt 13 | 14 | # Import from package 15 | from fastquant.strategies.base import BaseStrategy 16 | 17 | 18 | class MACDStrategy(BaseStrategy): 19 | """ 20 | Moving Average Convergence Divergence (MACD) strategy 21 | Simple implementation of backtrader MACD reference: https://www.backtrader.com/blog/posts/2016-07-30-macd-settings/macd-settings/ 22 | 23 | Summary: 24 | Enter if the macd line crosses the signal line to the upside and a control Simple Moving Average has had a 25 | net negative direction in the last x periods (current SMA value below the value x periods ago). 26 | In the opposite situation, given a market position exists, a sell position is made. 27 | 28 | Parameters 29 | ---------- 30 | fast_period : int 31 | The period used for the fast exponential moving average line (should be smaller than `slow_upper`) 32 | slow_period : int 33 | The period used for the slow exponential moving average line (should be larger than `fast_upper`) 34 | signal_period : int 35 | The period used for the signal line for MACD 36 | sma_period : int 37 | Period for the moving average (default: 30) 38 | dir_period: int 39 | Period for SMA direction calculation (default: 10) 40 | """ 41 | 42 | params = ( 43 | ("fast_period", 12), # period for the fast moving average 44 | ("slow_period", 26), 45 | ("signal_period", 9), 46 | ("sma_period", 30), 47 | ("dir_period", 10), 48 | ) 49 | 50 | def __init__(self): 51 | # Initialize global variables 52 | super().__init__() 53 | # Strategy level variables 54 | self.fast_period = self.params.fast_period 55 | self.slow_period = self.params.slow_period 56 | self.signal_period = self.params.signal_period 57 | self.sma_period = self.params.sma_period 58 | self.dir_period = self.params.dir_period 59 | 60 | if self.strategy_logging: 61 | print("===Strategy level arguments===") 62 | print("fast_period :", self.fast_period) 63 | print("slow_period :", self.slow_period) 64 | print("signal_period :", self.signal_period) 65 | print("sma_period :", self.sma_period) 66 | print("dir_period :", self.dir_period) 67 | macd_ind = bt.ind.MACD( 68 | period_me1=self.fast_period, 69 | period_me2=self.slow_period, 70 | period_signal=self.signal_period, 71 | ) 72 | self.macd = macd_ind.macd 73 | self.signal = macd_ind.signal 74 | self.crossover = bt.ind.CrossOver( 75 | self.macd, self.signal 76 | ) # crossover buy signal 77 | 78 | # Control market trend 79 | self.sma = bt.indicators.SMA(period=self.sma_period) 80 | self.smadir = self.sma - self.sma(-self.dir_period) 81 | 82 | def buy_signal(self): 83 | # Buy if the macd line cross the signal line to the upside 84 | # and a control Simple Moving Average has had a net negative 85 | # direction in the last x periods 86 | 87 | return self.crossover > 0 and self.smadir < 0.0 88 | 89 | def sell_signal(self): 90 | return self.crossover < 0 and self.smadir > 0.0 91 | -------------------------------------------------------------------------------- /python/fastquant/strategies/mappings.py: -------------------------------------------------------------------------------- 1 | # Import from package 2 | from fastquant.strategies import ( 3 | RSIStrategy, 4 | SMACStrategy, 5 | BaseStrategy, 6 | MACDStrategy, 7 | EMACStrategy, 8 | BBandsStrategy, 9 | BuyAndHoldStrategy, 10 | SentimentStrategy, 11 | CustomStrategy, 12 | TernaryStrategy, 13 | ) 14 | 15 | # Register your strategy here 16 | STRATEGY_MAPPING = { 17 | "rsi": RSIStrategy, 18 | "smac": SMACStrategy, 19 | "base": BaseStrategy, 20 | "macd": MACDStrategy, 21 | "emac": EMACStrategy, 22 | "bbands": BBandsStrategy, 23 | "buynhold": BuyAndHoldStrategy, 24 | "sentiment": SentimentStrategy, 25 | "custom": CustomStrategy, 26 | "ternary": TernaryStrategy, 27 | } 28 | -------------------------------------------------------------------------------- /python/fastquant/strategies/rsi.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Import standard library 4 | from __future__ import ( 5 | absolute_import, 6 | division, 7 | print_function, 8 | unicode_literals, 9 | ) 10 | 11 | # Import modules 12 | import backtrader as bt 13 | 14 | # Import from package 15 | from fastquant.strategies.base import BaseStrategy 16 | 17 | 18 | class RSIStrategy(BaseStrategy): 19 | """ 20 | Relative Strength Index (RSI) trading strategy 21 | 22 | Parameters 23 | ---------- 24 | rsi_period : int 25 | Period used as basis in computing RSI 26 | rsi_upper : int 27 | The RSI upper limit, above which the assess is considered "overbought" and is sold 28 | rsi_lower : int 29 | The RSI lower limit, below which the assess is considered "oversold" and is bought 30 | """ 31 | 32 | params = (("rsi_period", 14), ("rsi_upper", 70), ("rsi_lower", 30)) 33 | 34 | def __init__(self): 35 | 36 | # Initialize global variables 37 | super().__init__() 38 | # Strategy level variables 39 | self.rsi_period = self.params.rsi_period 40 | self.rsi_upper = self.params.rsi_upper 41 | self.rsi_lower = self.params.rsi_lower 42 | 43 | if self.strategy_logging: 44 | print("===Strategy level arguments===") 45 | print("rsi_period :", self.rsi_period) 46 | print("rsi_upper :", self.rsi_upper) 47 | print("rsi_lower :", self.rsi_lower) 48 | self.rsi = bt.indicators.RelativeStrengthIndex( 49 | period=self.rsi_period, 50 | upperband=self.rsi_upper, 51 | lowerband=self.rsi_lower, 52 | ) 53 | 54 | def buy_signal(self): 55 | return self.rsi[0] < self.rsi_lower 56 | 57 | def sell_signal(self): 58 | return self.rsi[0] > self.rsi_upper 59 | -------------------------------------------------------------------------------- /python/fastquant/strategies/sentiment.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # Import standard library 4 | from __future__ import ( 5 | absolute_import, 6 | division, 7 | print_function, 8 | unicode_literals, 9 | ) 10 | 11 | # Import modules 12 | import backtrader as bt 13 | import backtrader.feeds as btfeed 14 | 15 | # Import from package 16 | from fastquant.indicators.sentiment import Sentiment 17 | from fastquant.strategies.base import BaseStrategy 18 | 19 | 20 | class SentimentStrategy(BaseStrategy): 21 | """ 22 | SentimentStrategy 23 | Implementation of sentiment strategy using nltk/textblob pre-built sentiment models 24 | 25 | Parameters 26 | ---------- 27 | senti : float 28 | The sentiment score threshold to indicate when to buy/sell 29 | 30 | TODO: Textblob implementation for Sentiment indicator 31 | 32 | """ 33 | 34 | params = (("senti", 0.2),) 35 | 36 | def __init__(self): 37 | # Initialize global variables 38 | super().__init__() 39 | # Strategy level variables 40 | self.senti = self.params.senti 41 | 42 | if self.strategy_logging: 43 | print("===Strategy level arguments===") 44 | print("sentiment threshold :", self.senti) 45 | self.datasentiment = Sentiment(self.data) 46 | 47 | def buy_signal(self): 48 | return self.datasentiment[0] >= self.senti 49 | 50 | def sell_signal(self): 51 | return self.datasentiment[0] <= -self.senti 52 | 53 | 54 | class SentimentDF(bt.feeds.PandasData): 55 | # Add a 'sentiment_score' line to the inherited ones from the base class 56 | lines = ("sentiment_score",) 57 | 58 | # automatically handle parameter with -1 59 | # add the parameter to the parameters inherited from the base class 60 | params = (("sentiment_score", -1),) 61 | -------------------------------------------------------------------------------- /python/fastquant/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/fastquant/utils/__init__.py -------------------------------------------------------------------------------- /python/fastquant/utils/data_split.py: -------------------------------------------------------------------------------- 1 | import math 2 | import numpy as np 3 | from sklearn.utils.validation import _num_samples 4 | 5 | 6 | def walk_forward_split( 7 | X, 8 | train_size=0.80, 9 | test_size=None, 10 | n_splits=3, 11 | mode="sliding", 12 | training_overlap_size=0, 13 | ): 14 | """ 15 | Split the time series data into multiple sets(or "splits") of training/optimization indices and testing indices. 16 | Each split contains training indices and test indices that are sequential. 17 | This is based on sckit-learn's `sklearn.model_selection.TimeSeriesSplit` module. 18 | 19 | Usage can be: 20 | - Fixed input size, fixed test size 21 | >>> walk_forward_split(X, train_size=20, test_size=20) 22 | 23 | - Fized input size, fixed number of splits 24 | >>> for train_index, test_index in walk_forward_split(X, train_size=20, n_splits=3) 25 | 26 | - Training size proportion, fixed number of splits 27 | >>> walk_forward_split(X, train_size=0.5, n_splits=3) 28 | 29 | Parameters 30 | ----- 31 | X : array-like of shape (n_samples, n_features) 32 | train_size : float or int 33 | Minimum samples for the training data, if float is given, it will set the ratio between training size and testing size. 34 | When train_size is an integer, either n_splits or test_size is required 35 | test_size : int (default: None) 36 | Number of samples in test data. Ignored if train_size is float 37 | n_splits: int (default : 3) 38 | Number of splits to generate. Ignored if number of training data and testing data is specified 39 | mode : str {"sliding", "expanding"} (default : "sliding") 40 | Whether the training data size is fixed ("sliding") or the training data always starts at the begining ("expanding") 41 | training_overlap_size : int, (default:0) 42 | Number of items from the training indices to add to the test indices. This is unaffected by the `test_size` parameter. 43 | Note that increasing the data added from the training data to the test data increases data leakage. 44 | 45 | Returns 46 | ------- 47 | train_ix, test_ix : tuple (generator) 48 | indices of the training data and the testing data 49 | 50 | Example 51 | 52 | >>> X = np.arange(100) 53 | >>> for train_index, test_index in walk_forward_split(X, train_size=20, test_size=20,mode='expanding'): 54 | print("\nTRAIN:",train_index.shape, train_index, "\nTEST: ", test_index.shape ,test_index) 55 | ``` 56 | TRAIN: (20,) [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19] 57 | TEST: (20,) [20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39] 58 | 59 | TRAIN: (40,) [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 60 | 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39] 61 | TEST: (20,) [40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59] 62 | 63 | TRAIN: (60,) [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 64 | 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 65 | 48 49 50 51 52 53 54 55 56 57 58 59] 66 | TEST: (20,) [60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79] 67 | 68 | TRAIN: (80,) [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 69 | 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 70 | 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 71 | 72 73 74 75 76 77 78 79] 72 | TEST: (20,) [80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99] 73 | 74 | """ 75 | n_samples = _num_samples(X) 76 | indices = np.arange(n_samples) 77 | 78 | if type(train_size) == float and n_splits is not None: 79 | 80 | # Determine number of folds based on train_size and n_split 81 | window_folds = ( 82 | max(train_size, 1 - train_size) // min(train_size, 1 - train_size) 83 | ) + 1 84 | test_folds = window_folds * (1 - train_size) 85 | train_folds = window_folds - test_folds 86 | total_folds = train_folds + test_folds * n_splits 87 | 88 | fold_size = n_samples / total_folds # size of the fold wrt n_samples 89 | window_size = math.ceil(fold_size * window_folds) 90 | 91 | train_size = int(train_size * window_size) 92 | test_size = window_size - train_size 93 | 94 | if test_size is None and type(train_size) != float: 95 | test_size = math.ceil((n_samples - train_size) / n_splits) 96 | 97 | if test_size < 1 or train_size < 1: 98 | raise ValueError("Not enough data. Adjust training size or n_split") 99 | 100 | for test_start in range(train_size, n_samples, test_size): 101 | if mode == "sliding": 102 | train_ix = indices[test_start - train_size : test_start] 103 | test_ix = indices[ 104 | test_start - training_overlap_size : test_start + test_size 105 | ] 106 | elif mode == "expanding": 107 | train_ix = indices[:test_start] 108 | test_ix = indices[ 109 | test_start - training_overlap_size : test_start + test_size 110 | ] 111 | 112 | yield train_ix, test_ix 113 | -------------------------------------------------------------------------------- /python/images/addtl_steps.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/images/addtl_steps.PNG -------------------------------------------------------------------------------- /python/images/app_name.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/images/app_name.PNG -------------------------------------------------------------------------------- /python/images/create_app.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/images/create_app.PNG -------------------------------------------------------------------------------- /python/images/details.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/images/details.PNG -------------------------------------------------------------------------------- /python/images/generate_keys_tokens.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/images/generate_keys_tokens.PNG -------------------------------------------------------------------------------- /python/images/keys_tokens_bar.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/images/keys_tokens_bar.PNG -------------------------------------------------------------------------------- /python/images/landing_page.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/images/landing_page.PNG -------------------------------------------------------------------------------- /python/images/website_url.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/images/website_url.PNG -------------------------------------------------------------------------------- /python/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | line-length = 79 3 | include = '\.pyi?$' 4 | exclude = ''' 5 | /( 6 | \.git 7 | | \.hg 8 | | \.mypy_cache 9 | | \.tox 10 | | \.venv 11 | | _build 12 | | buck-out 13 | | build 14 | | dist 15 | )/ 16 | ''' 17 | -------------------------------------------------------------------------------- /python/requirements.txt: -------------------------------------------------------------------------------- 1 | backtrader>=1.9.75.123 2 | beautifulsoup4>=4.8.2 3 | black>=19.10b0 4 | bs4>=0.0.1 5 | ccxt>=1.31.1 6 | certifi>=2019.11.28 7 | chardet>=3.0.4 8 | croniter>=0.3.35 9 | idna>=2.8 10 | lxml>=4.6.2 11 | matplotlib>=3.2.2 12 | networkx>=2.4 13 | numpy>=1.18.0 14 | oauthlib>=3.1.0 15 | pandas>=1.1.5 16 | PySocks>=1.7.1 17 | python-dateutil>=2.8.1 18 | pytz>=2019.3 19 | requests>=2.22.0 20 | requests-oauthlib>=1.3.0 21 | six>=1.13.0 22 | soupsieve>=1.9.5 23 | tqdm>=4.28.1 24 | tweepy>=3.8.0 25 | urllib3>=1.25.7 26 | yfinance>=0.1.54 27 | nltk>=3.5 28 | # flammkuchen 29 | pre-commit 30 | scipy 31 | -------------------------------------------------------------------------------- /python/scripts/get_disclosures: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Created on Tue Apr 5, 2020 4 | @author: jpdeleon 5 | 6 | download parsed company disclosures issued between specified dates 7 | """ 8 | import os 9 | import sys 10 | import argparse 11 | 12 | from pathlib import Path 13 | import matplotlib.pyplot as pl 14 | 15 | from fastquant import DisclosuresPSE 16 | from fastquant import DATA_PATH 17 | 18 | parser = argparse.ArgumentParser( 19 | description="download company disclosures data" 20 | ) 21 | parser.add_argument("symbol", type=str, help="company symbol") 22 | parser.add_argument( 23 | "-t1", 24 | "--start_date", 25 | type=str, 26 | help="start date (default=1-1-2020)", 27 | default="1-1-2020", 28 | ) 29 | parser.add_argument( 30 | "-t2", 31 | "--end_date", 32 | type=str, 33 | help="end date (default=TODAY)", 34 | default=None, 35 | ) 36 | parser.add_argument( 37 | "-v", "--verbose", action="store_true", help="show details", default=False 38 | ) 39 | parser.add_argument( 40 | "-c", 41 | "--clobber", 42 | action="store_true", 43 | help="clobber/overwrite", 44 | default=False, 45 | ) 46 | parser.add_argument( 47 | "-p", "--plot", action="store_true", help="show plot", default=False 48 | ) 49 | parser.add_argument( 50 | "-o", "--outdir", type=str, help="output directory", default=DATA_PATH 51 | ) 52 | parser.add_argument( 53 | "-s", 54 | "--savefig", 55 | action="store_true", 56 | help="save disclosures figure", 57 | default=False, 58 | ) 59 | parser.add_argument( 60 | "-d", 61 | "--disclosure_type", 62 | type=str, 63 | help="disclosure type (default=all)", 64 | default="all", 65 | ) 66 | 67 | # print help if no arguments supplied 68 | args = parser.parse_args(None if sys.argv[1:] else ["-h"]) 69 | if __name__ == "__main__": 70 | cd = DisclosuresPSE( 71 | args.symbol, 72 | start_date=args.start_date, 73 | end_date=args.end_date, 74 | disclosure_type=args.disclosure_type, 75 | verbose=args.verbose, 76 | clobber=args.clobber, 77 | ) 78 | if not Path(args.outdir).exists(): 79 | os.makedirs(args.outdir) 80 | 81 | fig = cd.plot_disclosures(disclosure_type=args.disclosure_type) 82 | if args.plot: 83 | pl.show() 84 | 85 | if args.savefig: 86 | fp = Path(args.outdir, "{}_disclosures.png".format(cd.symbol)) 87 | fig.savefig(fp) 88 | -------------------------------------------------------------------------------- /python/scripts/make_batch_file.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env/sh 2 | 3 | # This creates a batch script that 4 | # downloads disclosures data and saves .csv & .png 5 | 6 | input='../data/bluechips.txt' 7 | output='bluechips' 8 | start_dt='1-1-2019' 9 | #end_date = None 10 | #get first column=stock symbols; iterate per row 11 | awk '{print $1}' $input | while read symbol; do echo ./get_disclosures $symbol -t1 $start_dt -v -s; done > $output'.batch' 12 | echo 'Saved: '$output'.batch' 13 | echo 'Run: cat '$output'.batch | parallel 2>&1 | tee '$output'.log' 14 | -------------------------------------------------------------------------------- /python/scripts/update_cache: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Downloads stock data of all symbols in company_names.csv 4 | """ 5 | import argparse 6 | from fastquant.data.stocks.pse import update_pse_data_cache 7 | 8 | parser = argparse.ArgumentParser( 9 | description="update pse stock data cache" 10 | ) 11 | parser.add_argument("--start_date", type=str, help="date in YYYY-MM-DD (default=2010-01-01)", default="2010-01-01") 12 | args = parser.parse_args() 13 | update_pse_data_cache(start_date=args.start_date, verbose=True) 14 | -------------------------------------------------------------------------------- /python/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/enzoampil/fastquant/805c4440bf96ba04cfd43aaf4926e4b45f3c3f33/python/tests/__init__.py -------------------------------------------------------------------------------- /python/tests/test_disclosures.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from fastquant import DisclosuresPSE, DisclosuresInvestagrams 3 | 4 | SYMBOL = "JFC" 5 | START_DATE = "1-1-2020" 6 | END_DATE = "2-1-2020" 7 | 8 | 9 | # def test_diclosures_pse(): 10 | # dp = DisclosuresPSE( 11 | # symbol=SYMBOL, start_date=START_DATE, end_date=END_DATE 12 | # ) 13 | # assert isinstance(dp.company_disclosures, pd.DataFrame) 14 | # assert dp.company_disclosures.shape == (10, 7) 15 | # assert isinstance(dp.disclosure_tables, dict) 16 | # assert dp.disclosure_tables["a5df62b1a9558fe60de8473cebbd6407"].shape == ( 17 | # 6, 18 | # 2, 19 | # ) 20 | 21 | 22 | # def test_disclosures_investagrams(): 23 | # di = DisclosuresInvestagrams("JFC", "2020-01-01", "2020-04-01") 24 | # dfd = di.disclosures_df["D"] 25 | # dfe = di.disclosures_df["E"] 26 | # assert isinstance(dfd, pd.DataFrame) 27 | # assert isinstance(dfe, pd.DataFrame) 28 | -------------------------------------------------------------------------------- /python/tests/test_fastquant.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from fastquant import ( 3 | get_pse_data, 4 | get_yahoo_data, 5 | get_stock_data, 6 | get_crypto_data, 7 | pse_data_to_csv, 8 | ) 9 | 10 | PHISIX_SYMBOL = "JFC" 11 | CRYPTO_SYMBOL = "BTC/USDT" 12 | MSFT_SYMBOL = "MSFT" 13 | YAHOO_SYMBOL = "GOOGL" 14 | DATE_START = "2018-01-01" 15 | DATE_END = "2019-01-01" 16 | MSFT_SYMBOL_START = "2020-10-01" 17 | MSFT_SYMBOL_STOP = "2020-12-31" 18 | 19 | 20 | def test_get_pse_data(): 21 | stock_df = get_pse_data(PHISIX_SYMBOL, DATE_START, DATE_END, format="c") 22 | assert isinstance(stock_df, pd.DataFrame) 23 | 24 | # Unused functions which haven't been tested since newest yfinance update 25 | # def test_get_yahoo_data(): 26 | # stock_df = get_yahoo_data(YAHOO_SYMBOL, DATE_START, DATE_END) 27 | # assert isinstance(stock_df, pd.DataFrame) 28 | 29 | # def test_get_yahoo_data_dividend(): 30 | # stock_df = get_yahoo_data( 31 | # MSFT_SYMBOL, MSFT_SYMBOL_START, MSFT_SYMBOL_STOP, dividends=True 32 | # ) 33 | # assert isinstance(stock_df, pd.DataFrame) 34 | 35 | 36 | def test_get_stock_data(): 37 | # Test w/ respective sources 38 | stock_df = get_stock_data(PHISIX_SYMBOL, DATE_START, DATE_END, source="phisix") 39 | assert isinstance(stock_df, pd.DataFrame) 40 | 41 | # stock_df = get_stock_data(YAHOO_SYMBOL, DATE_START, DATE_END, source="yahoo") 42 | # assert isinstance(stock_df, pd.DataFrame) 43 | 44 | # Test getting yahoo when (default) phisix fails on a non PSE SYMBOL 45 | stock_df = get_stock_data(YAHOO_SYMBOL, DATE_START, DATE_END) 46 | assert isinstance(stock_df, pd.DataFrame) 47 | 48 | 49 | def test_get_crypto_data(): 50 | # test that multiple exchanges work 51 | from fastquant import CRYPTO_EXCHANGES 52 | 53 | exchange_pairs = { 54 | "binance": "BTC/BUSD", 55 | "coinbasepro": "BTC/USD", 56 | "bithumb": "XRP/KRW", 57 | "kraken": "BTC/USD", 58 | "kucoin": "BTC/USDT", 59 | "bitstamp": "BTC/USD", 60 | } 61 | 62 | for ex in CRYPTO_EXCHANGES: 63 | # Github actions for fastquant uses US server, which doesn't have access to binance 64 | # Using binance elsewhere works without any issues 65 | if ex == 'binance': 66 | continue 67 | crypto_df = get_crypto_data( 68 | exchange_pairs[ex], DATE_START, DATE_END, exchange=ex 69 | ) 70 | assert isinstance(crypto_df, pd.DataFrame), ex 71 | -------------------------------------------------------------------------------- /python/tests/test_network.py: -------------------------------------------------------------------------------- 1 | from matplotlib.figure import Figure 2 | import pandas as pd 3 | from fastquant import Network 4 | 5 | nw = Network(symbol="JFC", start_date="2020-01-01", end_date="2020-04-01", metric="b") 6 | 7 | 8 | def test_network_init(): 9 | assert isinstance(nw.price_corr, pd.DataFrame) 10 | 11 | 12 | def test_network_plots(): 13 | fig = nw.plot_network() 14 | assert isinstance(fig, Figure) 15 | 16 | fig = nw.plot_corr_company() 17 | assert isinstance(fig, Figure) 18 | -------------------------------------------------------------------------------- /python/tests/test_portfolio.py: -------------------------------------------------------------------------------- 1 | # import inspect 2 | # from matplotlib.figure import Figure 3 | # from matplotlib.axes import Axes 4 | # from fastquant import Portfolio 5 | 6 | # stock_list = ["MEG", "MAXS", "JFC", "ALI"] 7 | 8 | 9 | # def test_portfolio_init(): 10 | # global p 11 | # p = Portfolio(stock_list) 12 | # # assert inspect.isclass(p) 13 | 14 | 15 | # def test_portfolio_data_query(): 16 | # axs = p.data.plot(subplots=True, figsize=(15, 10)) 17 | # assert isinstance(axs[0], Axes) 18 | 19 | 20 | # def test_optimization(): 21 | # fig = p.plot_portfolio(N=1000) 22 | # assert isinstance(fig, Figure) 23 | -------------------------------------------------------------------------------- /python/tests/test_strategies.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import pickle 4 | from pathlib import Path 5 | from datetime import datetime 6 | from fastquant import ( 7 | backtest, 8 | STRATEGY_MAPPING, 9 | DATA_PATH, 10 | get_yahoo_data, 11 | get_stock_data, 12 | get_bt_news_sentiment, 13 | get_disclosure_sentiment, 14 | ) 15 | 16 | SENTI_PKL = Path(DATA_PATH, "bt_sentiments_tests.pkl") 17 | DISCLOSURE_PKL = Path(DATA_PATH, "senti_disclosures.pkl") 18 | SAMPLE_CSV = Path(DATA_PATH, "JFC_20180101_20190110_DCV.csv") 19 | SAMPLE_STRAT_DICT = { 20 | "smac": {"fast_period": 35, "slow_period": [40, 50]}, 21 | "rsi": {"rsi_lower": [15, 30], "rsi_upper": 70}, 22 | } 23 | 24 | 25 | def test_backtest(): 26 | """ 27 | Ensures that the backtest function works on all the registered strategies, with their default parameter values 28 | """ 29 | sample = pd.read_csv(SAMPLE_CSV, parse_dates=["dt"]) 30 | # Simulate custom indicator 31 | sample["custom"] = np.random.random((sample.shape[0],)) * 100 32 | 33 | for strategy in STRATEGY_MAPPING.keys(): 34 | if strategy == "sentiment": 35 | data = get_yahoo_data("TSLA", "2020-01-01", "2020-07-04", dividends=True) 36 | # use cached data instead of scraping for tests purposes. 37 | # sentiments = get_bt_news_sentiment(keyword="tesla", page_nums=2) 38 | with open(SENTI_PKL, "rb") as handle: 39 | sentiments = pickle.load(handle) 40 | cerebro = backtest( 41 | strategy, data, sentiments=sentiments, senti=0.4, plot=False 42 | ) 43 | errmsg = "Backtest encountered error for strategy '{}'!".format(strategy) 44 | assert cerebro is not None, errmsg 45 | 46 | data_disclosures = get_stock_data( 47 | "TSLA", 48 | "2020-01-01", 49 | "2020-09-30", 50 | dividends=True, # source="phisix" 51 | ) 52 | 53 | # sentiments_disclosures = get_disclosure_sentiment( 54 | # stock_code="JFC", 55 | # start_date="2020-07-01", 56 | # end_date="2020-09-30", 57 | # ) 58 | 59 | with open(DISCLOSURE_PKL, "rb") as handle_disclosures: 60 | sentiments_disclosures = pickle.load(handle_disclosures) 61 | 62 | cerebro_disclosures = backtest( 63 | strategy, 64 | data_disclosures, 65 | sentiments=sentiments_disclosures, 66 | senti=0.2, 67 | plot=False, 68 | ) 69 | errmsg_disclosures = "Backtest encountered error for strategy '{}'!".format( 70 | strategy 71 | ) 72 | assert cerebro_disclosures is not None, errmsg_disclosures 73 | 74 | else: 75 | cerebro = backtest(strategy, sample, plot=False) 76 | errmsg = "Backtest encountered error for strategy '{}'!".format(strategy) 77 | assert cerebro is not None, errmsg 78 | 79 | 80 | def test_multi_backtest(): 81 | """ 82 | Test multi-strategy 83 | """ 84 | sample = pd.read_csv(SAMPLE_CSV, parse_dates=["dt"]) 85 | cerebro = backtest("multi", sample, strats=SAMPLE_STRAT_DICT, plot=False) 86 | assert cerebro is not None, "Backtest encountered error for strategy 'multi'!" 87 | 88 | 89 | def test_grid_backtest(): 90 | """ 91 | Test grid search 92 | """ 93 | sample = pd.read_csv(SAMPLE_CSV, parse_dates=["dt"]) 94 | cerebro = backtest( 95 | "smac", 96 | sample, 97 | fast_period=range(15, 30, 3), 98 | slow_period=range(40, 55, 3), 99 | plot=False, 100 | ) 101 | assert cerebro is not None, "Backtest encountered error doing grid search on SMAC!" 102 | -------------------------------------------------------------------------------- /python/tests/test_twitter.py: -------------------------------------------------------------------------------- 1 | from fastquant.data.web.twitter import tweepy_api 2 | 3 | import os 4 | import sys 5 | 6 | import pytest 7 | 8 | sys.path.append(os.path.abspath(os.path.join("..", "fastquant"))) 9 | 10 | 11 | def test_get_twitter_sentiment(): 12 | # You need to put your own Twitter API Credentials here 13 | consumer_key = "" 14 | consumer_secret = "" 15 | access_token = "" 16 | access_secret = "" 17 | 18 | api = tweepy_api(consumer_key, consumer_secret, access_token, access_secret) 19 | assert api is not None 20 | # account_list = ["2TradeAsia", "colfinancial"] 21 | # sentiment_dict = get_twitter_sentiment('$ALI', api, '2020-06-14', account_list) 22 | # assert isinstance(sentiment_dict, dict) 23 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | with open("README.md", "r") as fh: 4 | long_description = fh.read() 5 | with open("python/requirements.txt", "r") as fh: 6 | install_requires = fh.read().splitlines() 7 | 8 | setuptools.setup( 9 | name="fastquant", 10 | version="0.1.8.1", 11 | author="Lorenzo Ampil", 12 | author_email="lorenzo.ampil@gmail.com", 13 | description="Bringing data driven investments to the mainstream", 14 | long_description=long_description, 15 | long_description_content_type="text/markdown", 16 | url="https://github.com/enzoampil/fastquant", 17 | packages=setuptools.find_packages(where="./python", exclude=["docs", "tests"]), 18 | package_dir={"": "python"}, 19 | package_data={"fastquant": ["data/*"]}, 20 | include_package_data=True, 21 | scripts=["python/scripts/get_disclosures", "python/scripts/update_cache"], 22 | classifiers=[ 23 | "Programming Language :: Python :: 3", 24 | "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", 25 | "Operating System :: OS Independent", 26 | ], 27 | install_requires=install_requires, 28 | ) 29 | --------------------------------------------------------------------------------