├── .github └── workflows │ ├── release.yml │ ├── test_release.yml │ └── tests.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── dev-readme.md ├── docker-compose.yml ├── pyproject.toml ├── release_log.txt ├── src └── tradinghours │ ├── __init__.py │ ├── client.py │ ├── config.py │ ├── console.py │ ├── currency.py │ ├── exceptions.py │ ├── market.py │ ├── models.py │ ├── store.py │ ├── util.py │ └── validate.py ├── test_and_release_readme.txt └── tests ├── __init__.py ├── conftest.py ├── test_access.py ├── test_generate_phases.py ├── test_market.py ├── test_models.py ├── test_readme.py ├── test_remote.py ├── test_schedules.py ├── test_timezone.py ├── test_util.py ├── test_validate.py └── utils.py /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | release: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Check out code 13 | uses: actions/checkout@v2 14 | 15 | - name: Set up Python 16 | uses: actions/setup-python@v2 17 | with: 18 | python-version: '3.13' # Specify your Python version here 19 | 20 | - name: Version and Summary 21 | id: update_info 22 | run: | 23 | VERSION=$(grep '__version__' src/tradinghours/__init__.py | cut -d '"' -f 2) 24 | echo "version=${VERSION}" >> "$GITHUB_OUTPUT" 25 | 26 | line_number=$(grep -n '^## ' "release_log.txt" | head -2 | tail -1 | cut -d: -f1) 27 | SUMMARY="$(sed -n "2,$(( ${line_number} -1))p" release_log.txt)" 28 | echo "${SUMMARY}" 29 | 30 | echo "summary<> $GITHUB_OUTPUT 31 | echo "${SUMMARY}" >> $GITHUB_OUTPUT 32 | echo "EOF" >> $GITHUB_OUTPUT 33 | 34 | sed -i "s/## new_release/## ${VERSION} ($(date +%Y-%m-%d))/" release_log.txt 35 | 36 | - uses: stefanzweifel/git-auto-commit-action@v4 37 | with: 38 | file_pattern: release_log.txt 39 | commit_message: '[GH-Actions] v${{ steps.update_info.outputs.version }} -- updated release_log.txt' 40 | 41 | - name: Build and publish package using Flit 42 | run: | 43 | pip install flit 44 | 45 | export FLIT_USERNAME=__token__ 46 | export FLIT_PASSWORD=${{ secrets.PYPI_TOKEN }} 47 | 48 | flit publish 49 | 50 | - name: Create Release 51 | uses: softprops/action-gh-release@v1 52 | with: 53 | name: ${{ steps.update_info.outputs.version }} 54 | tag_name: ${{ steps.update_info.outputs.version }} 55 | body: | 56 | ${{ steps.update_info.outputs.summary }} 57 | 58 | -------------------------------------------------------------------------------- /.github/workflows/test_release.yml: -------------------------------------------------------------------------------- 1 | name: Testing Release 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | push_to_test_pypi: 10 | runs-on: ubuntu-latest 11 | outputs: 12 | dev_version: ${{ steps.current_version.outputs.dev_version }} 13 | steps: 14 | - name: Check out code 15 | uses: actions/checkout@v2 16 | 17 | - name: Set up Python 18 | uses: actions/setup-python@v2 19 | with: 20 | python-version: '3.13' 21 | 22 | - name: get current version 23 | id: current_version 24 | run: | # checks if __init__.__version__ is incremented, and then creates a DEV_VERSION for test.pypi 25 | PYPI_VERSION=$(pip index versions tradinghours | cut -d'(' -f2 | cut -d')' -f1 | sed 1q) 26 | NEW_VERSION=$(grep '__version__' src/tradinghours/__init__.py | cut -d '"' -f 2) 27 | echo "comparing $NEW_VERSION with $PYPI_VERSION" 28 | if [[ $NEW_VERSION == $PYPI_VERSION ]]; then 29 | echo "Version needs to be incremented." 30 | exit 1 31 | fi 32 | 33 | DEV_VERSION=${NEW_VERSION}rc${{github.run_number}}.dev${{github.run_attempt}} 34 | echo "::set-output name=dev_version::${DEV_VERSION}" 35 | 36 | echo "setting version to $DEV_VERSION" 37 | sed -i "s/$NEW_VERSION/$DEV_VERSION/" src/tradinghours/__init__.py 38 | 39 | - name: Install dependencies 40 | run: pip install flit 41 | 42 | - name: release to test pypi 43 | run: | 44 | export FLIT_USERNAME=__token__ 45 | export FLIT_PASSWORD=${{ secrets.TEST_PYPI_TOKEN }} 46 | export FLIT_INDEX_URL=https://test.pypi.org/legacy/ 47 | flit publish 48 | 49 | test_from_test_pypi: 50 | needs: push_to_test_pypi 51 | runs-on: ubuntu-latest 52 | steps: 53 | - name: Set up Python 54 | uses: actions/setup-python@v2 55 | with: 56 | python-version: '3.13' # Ensure this matches your project's Python version 57 | 58 | - name: get from test pypi 59 | run: | 60 | DEV_VERSION=${{ needs.push_to_test_pypi.outputs.dev_version }} 61 | echo "$DEV_VERSION" 62 | while [ "$DEV_VERSION" != "$(pip index versions -i https://test.pypi.org/simple --pre tradinghours | cut -d'(' -f2 | cut -d')' -f1 | sed 1q)" ];\ 63 | do echo not found yet, sleeping 5s; sleep 5s; done 64 | pip install tradinghours==$DEV_VERSION --extra-index-url https://test.pypi.org/simple/ 65 | 66 | - name: run import tests 67 | run: | 68 | pip show tradinghours 69 | 70 | export TRADINGHOURS_TOKEN=${{ secrets.TEST_KEY }} 71 | 72 | tradinghours import 73 | tradinghours status --extended 74 | 75 | python -c "from tradinghours import Market, Currency; m = Market.get('XNYS'); m.list_holidays('2020-01-01', '2025-01-01');m.generate_phases('2024-01-01', '2024-01-04'); c = Currency.get('USD');c.list_holidays('2020-01-01', '2025-01-01');" 76 | 77 | - name: Check out code 78 | uses: actions/checkout@v2 79 | 80 | - name: run all tests 81 | run: | 82 | pip install pytest pytest-mock 83 | 84 | export TRADINGHOURS_TOKEN=${{ secrets.TEST_KEY }} 85 | 86 | tradinghours status --extended 87 | 88 | pytest tests 89 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Testing and Coverage 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - pre-release 7 | 8 | jobs: 9 | coverage: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Set up Python 14 | uses: actions/setup-python@v4 15 | with: 16 | python-version: "3.13" 17 | 18 | - name: Checkout code 19 | uses: actions/checkout@v4 20 | 21 | - name: Install dependencies 22 | run: pip install -e ".[dev]" 23 | 24 | - name: Install pytest-cov 25 | run: pip install pytest-cov 26 | 27 | - name: Set up test data 28 | run: | 29 | export TRADINGHOURS_TOKEN=${{ secrets.TEST_KEY }} 30 | tradinghours import 31 | 32 | - name: Run Tests with Coverage (SQLite) 33 | run: pytest --cov=src/ tests 34 | 35 | - name: Report Coverage 36 | run: | 37 | coverage report 38 | coverage xml 39 | 40 | # Run Tests for MySQL and PostgreSQL 41 | databases: 42 | runs-on: ubuntu-latest 43 | services: 44 | mysql: 45 | image: mysql:8.4 46 | env: 47 | MYSQL_ROOT_PASSWORD: root 48 | MYSQL_DATABASE: test_db 49 | ports: 50 | - 3306:3306 51 | options: >- 52 | --health-cmd="mysqladmin ping --silent" 53 | --health-interval=10s 54 | --health-timeout=5s 55 | --health-retries=5 56 | 57 | postgres: 58 | image: postgres:15.8 59 | env: 60 | POSTGRES_USER: postgres 61 | POSTGRES_PASSWORD: password 62 | POSTGRES_DB: test_db 63 | ports: 64 | - 5432:5432 65 | options: >- 66 | --health-cmd="pg_isready -U postgres" 67 | --health-interval=10s 68 | --health-timeout=5s 69 | --health-retries=5 70 | 71 | strategy: 72 | matrix: 73 | db: [mysql, postgres] 74 | 75 | steps: 76 | - name: Set up Python 77 | uses: actions/setup-python@v4 78 | with: 79 | python-version: "3.13" 80 | 81 | - name: Checkout code 82 | uses: actions/checkout@v4 83 | 84 | - name: Install dependencies 85 | run: | 86 | pip install pytest pytest-mock 87 | pip install -e ".[${{ matrix.db }}]" 88 | 89 | - name: Wait for DB to be ready 90 | run: | 91 | if [ "${{ matrix.db }}" == "mysql" ]; then 92 | while ! mysqladmin ping -h "127.0.0.1" --silent; do 93 | sleep 1 94 | done 95 | elif [ "${{ matrix.db }}" == "postgres" ]; then 96 | while ! pg_isready -h "127.0.0.1" -U postgres; do 97 | sleep 1 98 | done 99 | fi 100 | 101 | - name: Set up test data 102 | run: | 103 | export TRADINGHOURS_TOKEN=${{ secrets.TEST_KEY }} 104 | if [ "${{ matrix.db }}" == "mysql" ]; then 105 | export TH_DB_URL=mysql+pymysql://root:root@127.0.0.1:3306/test_db 106 | elif [ "${{ matrix.db }}" == "postgres" ]; then 107 | export TH_DB_URL=postgresql://postgres:password@127.0.0.1:5432/test_db 108 | fi 109 | tradinghours import 110 | 111 | - name: Run Tests (MySQL or PostgreSQL) 112 | run: | 113 | if [ "${{ matrix.db }}" == "mysql" ]; then 114 | export TH_DB_URL=mysql+pymysql://root:root@127.0.0.1:3306/test_db 115 | elif [ "${{ matrix.db }}" == "postgres" ]; then 116 | export TH_DB_URL=postgresql://postgres:password@127.0.0.1:5432/test_db 117 | fi 118 | pytest tests 119 | 120 | os-versions-api-levels: 121 | runs-on: ${{ matrix.os }} 122 | strategy: 123 | fail-fast: false 124 | matrix: 125 | os: [ubuntu-latest, windows-latest, macos-latest] 126 | level: [full, no_currencies, only_holidays] 127 | 128 | steps: 129 | - name: Set up Python 130 | uses: actions/setup-python@v4 131 | with: 132 | python-version: 3.11 133 | 134 | - name: Checkout code 135 | uses: actions/checkout@v4 136 | 137 | - name: Install dependencies 138 | run: | 139 | pip install pytest pytest-mock 140 | pip install -e . 141 | 142 | - name: Set up test data 143 | shell: bash 144 | run: | 145 | if [ "${{ matrix.level }}" = "full" ]; then 146 | TRADINGHOURS_TOKEN=${{ secrets.TEST_KEY }} 147 | elif [ "${{ matrix.level }}" = "no_currencies" ]; then 148 | TRADINGHOURS_TOKEN=${{ secrets.TEST_KEY_NO_CURRENCIES }} 149 | elif [ "${{ matrix.level }}" = "only_holidays" ]; then 150 | TRADINGHOURS_TOKEN=${{ secrets.TEST_KEY_HOLIDAYS_ONLY }} 151 | fi 152 | export TRADINGHOURS_TOKEN 153 | 154 | tradinghours import 155 | tradinghours status --extended 156 | 157 | - name: Run New Tests 158 | shell: bash 159 | run: | 160 | export API_KEY_LEVEL=${{ matrix.level }} 161 | pytest tests 162 | 163 | python-versions: 164 | runs-on: ubuntu-latest 165 | needs: coverage 166 | 167 | strategy: 168 | matrix: 169 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] 170 | 171 | steps: 172 | - name: Set up Python 173 | uses: actions/setup-python@v4 174 | with: 175 | python-version: ${{ matrix.python-version }} 176 | 177 | - name: Checkout code 178 | uses: actions/checkout@v4 179 | 180 | - name: Install dependencies 181 | run: pip install -e ".[dev]" 182 | 183 | - name: Set up test data 184 | run: | 185 | export TRADINGHOURS_TOKEN=${{ secrets.TEST_KEY }} 186 | tradinghours import 187 | 188 | - name: Run Tests 189 | run: pytest tests 190 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | schema.sql 2 | .vscode/ 3 | .idea/ 4 | data/ 5 | store_dir/ 6 | other/ 7 | .DS_Store 8 | debug.txt 9 | *.zip 10 | old_tests/local 11 | old_tests/fixtures/remote 12 | tests/local 13 | tests/fixtures/remote 14 | *.db 15 | tradinghours.ini 16 | 17 | # Byte-compiled / optimized / DLL files 18 | __pycache__/ 19 | *.py[cod] 20 | *$py.class 21 | 22 | # C extensions 23 | *.so 24 | 25 | # Distribution / packaging 26 | .Python 27 | build/ 28 | develop-eggs/ 29 | dist/ 30 | downloads/ 31 | eggs/ 32 | .eggs/ 33 | lib/ 34 | lib64/ 35 | parts/ 36 | sdist/ 37 | var/ 38 | wheels/ 39 | share/python-wheels/ 40 | *.egg-info/ 41 | .installed.cfg 42 | *.egg 43 | MANIFEST 44 | 45 | # PyInstaller 46 | # Usually these files are written by a python script from a template 47 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 48 | *.manifest 49 | *.spec 50 | 51 | # Installer logs 52 | pip-log.txt 53 | pip-delete-this-directory.txt 54 | 55 | # Unit test / coverage reports 56 | htmlcov/ 57 | .tox/ 58 | .nox/ 59 | .coverage 60 | .coverage.* 61 | .cache 62 | nosetests.xml 63 | coverage.xml 64 | *.cover 65 | *.py,cover 66 | .hypothesis/ 67 | .pytest_cache/ 68 | cover/ 69 | 70 | # Translations 71 | *.mo 72 | *.pot 73 | 74 | # Django stuff: 75 | *.log 76 | local_settings.py 77 | db.sqlite3 78 | db.sqlite3-journal 79 | 80 | # Flask stuff: 81 | instance/ 82 | .webassets-cache 83 | 84 | # Scrapy stuff: 85 | .scrapy 86 | 87 | # Sphinx documentation 88 | docs/_build/ 89 | 90 | # PyBuilder 91 | .pybuilder/ 92 | target/ 93 | 94 | # Jupyter Notebook 95 | .ipynb_checkpoints 96 | 97 | # IPython 98 | profile_default/ 99 | ipython_config.py 100 | 101 | # pyenv 102 | # For a library or package, you might want to ignore these files since the code is 103 | # intended to run in multiple environments; otherwise, check them in: 104 | # .python-version 105 | 106 | # pipenv 107 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 108 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 109 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 110 | # install all needed dependencies. 111 | #Pipfile.lock 112 | 113 | # poetry 114 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 115 | # This is especially recommended for binary packages to ensure reproducibility, and is more 116 | # commonly ignored for libraries. 117 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 118 | #poetry.lock 119 | 120 | # pdm 121 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 122 | #pdm.lock 123 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 124 | # in version control. 125 | # https://pdm.fming.dev/#use-with-ide 126 | .pdm.toml 127 | 128 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 129 | __pypackages__/ 130 | 131 | # Celery stuff 132 | celerybeat-schedule 133 | celerybeat.pid 134 | 135 | # SageMath parsed files 136 | *.sage.py 137 | 138 | # Environments 139 | *.env 140 | .venv 141 | env/ 142 | venv/ 143 | ENV/ 144 | env.bak/ 145 | venv.bak/ 146 | 147 | # Spyder project settings 148 | .spyderproject 149 | .spyproject 150 | 151 | # Rope project settings 152 | .ropeproject 153 | 154 | # mkdocs documentation 155 | /site 156 | 157 | # mypy 158 | .mypy_cache/ 159 | .dmypy.json 160 | dmypy.json 161 | 162 | # Pyre type checker 163 | .pyre/ 164 | 165 | # pytype static type analyzer 166 | .pytype/ 167 | 168 | # Cython debug symbols 169 | cython_debug/ 170 | 171 | # PyCharm 172 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 173 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 174 | # and can be added to the global gitignore or merged into this file. For a more nuclear 175 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 176 | #.idea/ 177 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Use an official Ubuntu base image 2 | FROM ubuntu:latest 3 | 4 | # Install Python and pip 5 | RUN apt-get update && \ 6 | apt-get install -y python3 python3-pip && \ 7 | apt-get clean && \ 8 | rm -rf /var/lib/apt/lists/* 9 | 10 | # Set the working directory inside the container to /app 11 | WORKDIR /app 12 | 13 | # Copy the current directory contents into the container at /app 14 | COPY . /app 15 | 16 | # Install development dependencies 17 | RUN pip3 install -e ".[dev]" 18 | 19 | CMD ["/bin/bash"] 20 | 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | SOURCE CODE LICENSE AGREEMENT 2 | 3 | Copyright (c) 2015-2024 TradingHours.com (https://tradinghours.com) 4 | 5 | You acknowledge that you have read this Source Code License Agreement (the "Agreement") and agree to all its terms and conditions. By accessing and using the Source Code and Product, you agree to be bound by this Agreement. If you do not agree to abide by the terms of this Agreement, you are not authorized to access or use the Source Code and Product. 6 | 7 | 1. DEFINITIONS 8 | 1.1 "Product" shall mean the data products offered by TradingHours.com. 9 | 1.2 "Source Code" means, as it pertains to the Product, those statements in a computer language which, when processed by a compiler, assembler or interpreter, become executable by a computer and includes, without limitation, all comments, notes, flow charts, decision tables, argument lists and other human readable references relating to the operation, purpose, compilation, assembly or interpretation thereof. 10 | 11 | 2. LICENSE GRANT 12 | TradingHours.com grants to Client, under all of TradingHours.com's intellectual property rights, a nonexclusive, nontransferable, irrevocable, perpetual, worldwide, royalty-free license, to (directly or indirectly) use or modify the Source Code to view the Product (the "Purpose"). Client must have an active subscription with TradingHours.com in order to use the Source Code to view the Product as Client is required to have an API key to download, view, and use the Product. Client shall not utilize the Source Code for any reason other than for the Purpose. 13 | 14 | 3. OWNERSHIP; COPYRIGHT 15 | 3.1 As between the parties, TradingHours.com retains all title, and (except as expressly granted in this Agreement) all rights and interest in and to the Source Code, the format of TradingHours.com's data format, and any and all copies and portions thereof. Client shall not publish their own data in a format compatible with the Source Code. 16 | 3.2 In the event Client provides TradingHours.com with any suggested code changes to the Source Code, TradingHours.com shall be the exclusive owner of all rights and interest to such changes. TradingHours.com shall be the owner of any derivative works based on the Source Code. 17 | 3.3 Client shall reproduce and include any and all copyright notices and proprietary rights legends, as such notices and legends appear in the Product or Source Code, on any copy of the Product or Source Code, or portion thereof. 18 | 3.4 In the event Client provides any code to TradingHours.com repository, Client agrees that TradingHours.com shall be the owner of such code and shall own all title, rights, and interest in such code. 19 | 20 | 4. WARRANTY DISCLAIMER; LIMITATION OF LIABILITY 21 | 4.1 TRADINGHOURS.COM MAKES NO WARRANTIES OR REPRESENTATIONS AS TO THE SOURCE CODE. TRADINGHOURS.COM DISCLAIMS ALL WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INTERRUPTION OF USE AND FREEDOM FROM BUGS. 22 | 4.2 CLIENT ACKNOWLEDGES AND AGREES THAT THE CONSIDERATION DOES NOT INCLUDE ASSUMPTION BY TRADINGHOURS.COM OF THE RISK OF CLIENT'S CONSEQUENTIAL OR INCIDENTAL DAMAGES WHICH MAY ARISE IN CONNECTION WITH CLIENT'S USE OF THE PRODUCT OR SOURCE CODE. ACCORDINGLY, CLIENT AGREES THAT TRADINGHOURS.COM SHALL NOT BE RESPONSIBLE TO CLIENT FOR ANY LOSS OF PROFIT OR INDIRECT, INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE LICENSING OR USE OF THE PRODUCT OR SOURCE CODE. 23 | 24 | 5. EXPORT CONTROL LAWS 25 | The Product and Source Code are subject to U.S. export control laws and may be subject to export or import regulations in other countries. Client agrees to strictly comply with all such laws and regulations. 26 | 27 | 6. INDEMNIFICATION 28 | Client shall defend, indemnify and hold harmless TradingHours.com and its officers, directors, employees, shareholders, customers, agents, successors and assigns from and against any and all loss, damage, settlement or expense (including legal expenses), as incurred, resulting from or arising out of any third-party claim arising from Client's use of the Source Code. 29 | 30 | 7. TERM AND TERMINATION 31 | 7.1 Termination by TradingHours.com. This Agreement is effective until terminated. TradingHours.com may terminate this Agreement only in the event Client's material breach of this Agreement. 32 | 7.2 Exclusive Remedy. Termination is not an exclusive remedy and all other remedies will be available whether or not the Agreement is terminated. The terminating party shall not be liable for any charges, damages, obligations or other costs incurred as a result of termination. 33 | 34 | 8. ASSIGNMENT 35 | 8.1 TradingHours.com Assignment. TradingHours.com may assign this Agreement to a party that succeeds to all or substantially all the business or assets of TradingHours.com. 36 | 8.2 Client Assignment. Client shall not assign any right hereunder without TradingHours.com's written consent. 37 | 38 | 9. GENERAL 39 | 9.1 Notice. Notices and all other communications contemplated by this Agreement shall be in writing and shall be deemed to have been duly given when personally delivered or when mailed by U.S. certified mail, return receipt requested and postage prepaid. 40 | 9.2 Waiver. No provision of this Agreement shall be modified, waived or discharged unless the modification, waiver or discharge is agreed to in writing and signed by the appropriate parties. No waiver by either party or any breach of, or compliance with, any condition or provision of this Agreement by the other party shall be considered a waiver of any other condition or provision or of the same condition or provision at another time. 41 | 9.3 Governing Law. Any claim arising under or relating to this Agreement, shall be governed by the internal substantive laws of the State of Pennsylvania, without regard to principles of conflict of laws. Each party hereby agrees to jurisdiction and venue in the courts of the State of Pennsylvania or federal courts located in Pennsylvania for all disputes and litigation arising under or relating to this Agreement. 42 | 9.4 Independent Parties. TradingHours.com and Client are independent parties. Nothing in this Agreement shall be construed to make the parties partners or joint ventures or to make either party liable for the obligations, acts, omissions or activities of the other party. 43 | 9.5 Severability. The invalidity or unenforceability of any provision of this Agreement shall not affect the validity or enforceability of any other provision hereof, which shall remain in full force and effect. 44 | 9.6 Arbitration. Any controversy or claim arising out of or relating to this Agreement, or the breach thereof, shall be settled by arbitration in accordance with the Commercial Arbitration Rules of the American Arbitration Association. The arbitration hearing shall take place in Monroe County, Pennsylvania, and judgment on the award rendered by the arbitrator may be entered in any court having jurisdiction thereof. Notwithstanding the foregoing, in the event irreparable injury can be shown, either party may obtain injunctive relief exclusively in any court having jurisdiction thereof. 45 | 9.7 Injunctive Relief. Client acknowledges that a violation of the rights granted to it under this Agreement cannot be readily remedied in monetary damages in an action at law, and, accordingly, TradingHours.com will be entitled to seek and obtain equitable remedies from any court of competent jurisdiction, including, without limited to, to temporary or permanent injunctive relief, costs and reasonable attorneys' fees. 46 | 9.8 Attorneys' Fees. In any arbitration or litigation to enforce the terms of this Agreement, the prevailing party shall be entitled to collect its costs and fees, including reasonable attorneys' fees. 47 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | TradingHours API Docs 3 |

TradingHours.com Python Library

4 | 5 | 6 |

7 | 8 | PyPI version 9 | 10 | Python versions 11 | GitHub Actions 12 |

13 |
14 | 15 | [TradingHours.com](https://www.tradinghours.com) licenses **Market Holidays and Trading Hours data** for over **1,000** exchanges and trading venues around the world. This library allows clients to easily integrate market holidays and trading hours data into existing applications. This package downloads all available data from TradingHours.com, allowing you to work with the data locally. 16 | 17 | ### About the Data 18 | We support over 1,000 exchanges and trading venues, including all major currencies. [See all supported markets.](https://www.tradinghours.com/coverage) 19 | 20 | Our comprehensive data covers: 21 | 22 | - Market holidays 23 | - Trading hours 24 | - Half-days / Irregular schedules 25 | - Non-settlement dates 26 | - Currency holidays 27 | - Detailed trading phases 28 | 29 | ### How is the data collected? 30 | Our global research team collects and verifies trading hours and market holidays using primary sources exclusively. Manual and automated checks ensure the highest degree of accuracy and reliability. 31 | 32 | Once data is collected, we continually monitor for changes to ensure the data is always up-to-date. Data updates occur daily. 33 | 34 | ### Getting Started 35 | 36 | To get started, you'll need an active subscription. [Learn more »](https://www.tradinghours.com/data) 37 | 38 | 1. Install the `tradinghours` package 39 | 40 | ```sh 41 | pip install tradinghours 42 | ``` 43 | 44 | 2. Set your API Key ([Click here to get your key](https://www.tradinghours.com/user/api-tokens)) 45 | 46 | ```sh 47 | export TRADINGHOURS_TOKEN= 48 | ``` 49 | 50 | You can also install with mysql or postgres dependencies, if you wish to use one of these. You can read more about this in [advanced configuration options](#optional-advanced-configuration). 51 | ```sh 52 | pip install tradinghours[mysql] 53 | # or 54 | pip install tradinghours[postgres] 55 | ``` 56 | 57 | ### Alternatives 58 | 59 | Instead of using this Python Library, clients can also use the web-based [Trading Hours API](https://docs.tradinghours.com/). 60 | The web-based API is programming language agnostic. 61 | 62 | ### Contents 63 | - [Importing Data](#importing-data) 64 | - [Markets](#markets) 65 | - [View Available Markets](#view-available-markets) 66 | - [Get a Specific Market](#get-a-specific-market) 67 | - [Market Status](#market-status) 68 | - [Market Holidays](#market-holidays) 69 | - [Trading Hours](#trading-hours) 70 | - [Currencies](#currencies) 71 | - [List Currencies](#list-currencies) 72 | - [Currency Holidays](#currency-holidays) 73 | - [Advanced](#advanced) 74 | - [Optional Advanced Configuration](#optional-advanced-configuration) 75 | - [Database](#database) 76 | - [Time Zones](#time-zones) 77 | - [Model Configuration](#model-configuration) 78 | - [Change String Format](#change-string-format) 79 | 80 | --- 81 | ## Importing Data 82 | 83 | Run the following command to download and import official data. Ensure you have set the **TRADINGHOURS_TOKEN** environment variable. 84 | 85 | ```console 86 | $ tradinghours import 87 | Downloading..... (0.824s) 88 | Ingesting.......................... (12.066s) 89 | ``` 90 | 91 | You can check the current data status with the following subcommand: 92 | 93 | ```console 94 | $ tradinghours status --extended 95 | Collecting timestamps.... (0.213s) 96 | TradingHours Data Status: 97 | Remote Timestamp: Thu Oct 26 02:08:17 2023 98 | Local Timestamp: Thu Oct 26 03:12:40 2023 99 | 100 | Reading local data.... (0.426s) 101 | Extended Information: 102 | Currencies count: 30 103 | Markets count: 1012 104 | ``` 105 | 106 | ## Markets 107 | 108 | ### View Available Markets 109 | 110 | ```python 111 | from tradinghours import Market 112 | 113 | for market in Market.list_all()[:3]: 114 | print(market) 115 | 116 | >>> Market: AE.ADX Abu Dhabi Securities Exchange Asia/Dubai 117 | Market: AE.DFM Dubai Financial Market Asia/Dubai 118 | Market: AE.DGCX Dubai Gold & Commodities Exchange Asia/Dubai 119 | ``` 120 | 121 | You can also use an `*` to filter the list of markets based on their `fin_id`: 122 | 123 | ```python 124 | from tradinghours import Market 125 | 126 | for market in Market.list_all("US.*")[:3]: 127 | print(market) 128 | 129 | >>> Market: US.BTEC.ACTIVES.ASIA BrokerTec America/New_York 130 | Market: US.BTEC.ACTIVES.LDN BrokerTec America/New_York 131 | Market: US.BTEC.ACTIVES.US BrokerTec America/New_York 132 | ``` 133 | 134 | ### Get a Specific Market 135 | 136 | ```python 137 | from tradinghours import Market 138 | 139 | # Get by either FinID or MIC 140 | market = Market.get('US.NYSE') 141 | market = Market.get('XNYS') 142 | 143 | # Easily see what attributes an object has 144 | # (You can call this on any object) 145 | market.pprint() # same as pprint(market.to_dict()) 146 | >>> {'exchange_name': 'New York Stock Exchange', 147 | 'market_name': 'Canonical', 148 | 'security_group': None, 149 | 'timezone': 'America/New_York', 150 | 'weekend_definition': 'Sat-Sun', 151 | 'fin_id': 'US.NYSE', 152 | 'mic': 'XNYS', 153 | 'acronym': 'NYSE', 154 | 'asset_type': 'Securities', 155 | 'memo': 'Canonical', 156 | 'permanently_closed': None, 157 | 'replaced_by': None, 158 | 'country_code': 'US'} 159 | ``` 160 | 161 | If a market is marked "permanently closed," it may be replaced or superseded by another market. By default, the newer market will be returned automatically. You can retrieve the older market object for historical analysis by using the `follow=False` parameter. 162 | 163 | ```python 164 | from tradinghours import Market 165 | 166 | # AR.BCBA is permanently closed and replaced by AR.BYMA 167 | market = Market.get('AR.BCBA') 168 | original = Market.get('AR.BCBA', follow=False) 169 | 170 | print(f'{market.fin_id} replaced by {market.replaced_by} on {market.permanently_closed}') 171 | print(f'{original.fin_id} replaced by {original.replaced_by} on {original.permanently_closed}') 172 | 173 | >>> AR.BYMA replaced by None on None 174 | AR.BCBA replaced by AR.BYMA on 2017-04-17 175 | ``` 176 | 177 | ### Market Status 178 | The `Market.status` method will return a `MarketStatus` representing the status of the market at a specific datetime. 179 | 180 | ```python 181 | from tradinghours import Market 182 | import datetime as dt 183 | 184 | market = Market.get("US.NYSE") 185 | status = market.status() 186 | # The default datetime is the current time. 187 | now = dt.datetime.now(dt.timezone.utc) 188 | print( 189 | status.status == market.status(now).status 190 | ) 191 | >>> True 192 | ``` 193 | To use a different datetime, create a timezone-aware datetime object. 194 | 195 | ```python 196 | from tradinghours import Market 197 | from zoneinfo import ZoneInfo 198 | import datetime as dt 199 | 200 | christmas_noon = dt.datetime(2024,12,25,12,tzinfo=ZoneInfo("America/New_York")) 201 | status = Market.get("US.NYSE").status(christmas_noon) 202 | 203 | status.pprint() # same as pprint(status.to_dict()) 204 | >>> {'status': 'Closed', 205 | 'reason': 'Christmas', 206 | 'until': '2024-12-26 04:00:00-05:00', 207 | 'next_bell': '2024-12-26 09:30:00-05:00', 208 | 'phase': None, 209 | 'market': 'Market: US.NYSE New York Stock Exchange America/New_York'} 210 | ``` 211 | 212 | ### Market Holidays 213 | 214 | ```python 215 | from tradinghours import Market 216 | 217 | market = Market.get('US.NYSE') 218 | holidays = market.list_holidays("2024-01-01", "2024-12-31") 219 | for holiday in holidays[:3]: 220 | print(holiday) 221 | 222 | >>> MarketHoliday: US.NYSE 2024-01-01 New Year's Day 223 | MarketHoliday: US.NYSE 2024-01-15 Birthday of Martin Luther King, Jr 224 | MarketHoliday: US.NYSE 2024-02-19 Washington's Birthday 225 | ``` 226 | ### Trading Hours 227 | #### Phases 228 | To get opening and closing times for a particular date range, use the `Market.generate_phases` method. This will return a generator yielding `tradinghours.models.Phase` objects, representing specific datetimes based on the "general schedule" of a market, considering holidays and potential schedule changes. 229 | 230 | ```python 231 | from tradinghours import Market 232 | 233 | market = Market.get('XNYS') 234 | for phase in list(market.generate_phases("2023-09-01", "2023-09-30"))[:3]: 235 | print(phase) 236 | 237 | >>> Phase: 2023-09-01 04:00:00-04:00 - 2023-09-01 09:30:00-04:00 Pre-Trading Session 238 | Phase: 2023-09-01 06:30:00-04:00 - 2023-09-01 09:30:00-04:00 Pre-Open 239 | Phase: 2023-09-01 09:30:00-04:00 - 2023-09-01 09:30:00-04:00 Call Auction 240 | ``` 241 | #### Schedules 242 | To get the "general schedule" that phases are based on, use `Market.list_schedules()`. This will provide a list of `tradinghours.models.Schedule` objects, representing the schedule without consideration of holidays. The schedule will include 'Regular,' 'Partial,' and potentially other irregular schedules. Interpreting these schedule objects can be difficult. In most cases, you will want to use the `Market.generate_phases` method above. 243 | 244 | `US.NYSE` is one of the simplest examples for schedules: 245 | ```python 246 | from tradinghours import Market 247 | 248 | market = Market.get('XNYS') 249 | for schedule in market.list_schedules(): 250 | print(schedule) 251 | 252 | >>> Schedule: US.NYSE (Partial) 06:30:00 - 09:30:00 Mon-Fri Pre-Trading Session 253 | Schedule: US.NYSE (Partial) 09:30:00 - 13:00:00 Mon-Fri Primary Trading Session 254 | Schedule: US.NYSE (Partial) 13:00:00 - 13:30:00 Mon-Fri Post-Trading Session 255 | Schedule: US.NYSE (Regular) 04:00:00 - 09:30:00 Mon-Fri Pre-Trading Session 256 | Schedule: US.NYSE (Regular) 06:30:00 - 09:30:00 Mon-Fri Pre-Open 257 | Schedule: US.NYSE (Regular) 09:30:00 - 09:30:00 Mon-Fri Call Auction 258 | Schedule: US.NYSE (Regular) 09:30:00 - 16:00:00 Mon-Fri Primary Trading Session 259 | Schedule: US.NYSE (Regular) 15:50:00 - 16:00:00 Mon-Fri Pre-Close 260 | Schedule: US.NYSE (Regular) 16:00:00 - 20:00:00 Mon-Fri Post-Trading Session 261 | ``` 262 | 263 | `US.MGEX` is a more complex example, which has multiple irregular schedules and overnight trading sessions: 264 | 265 | ```python 266 | from tradinghours import Market 267 | 268 | market = Market.get('US.MGEX') 269 | for schedule in market.list_schedules()[-11:-5]: 270 | print(schedule) 271 | 272 | # US.MGEX has multiple irregular schedules and overnight trading sessions 273 | >>> Schedule: US.MGEX (Regular) 19:00:00 - 07:45:00 +1 Sun-Thu Primary Trading Session 274 | Schedule: US.MGEX (Thanksgiving2022) 08:00:00 - 08:30:00 Wed Pre-Open 275 | Schedule: US.MGEX (Thanksgiving2022) 08:30:00 - 12:15:00 Fri Primary Trading Session 276 | Schedule: US.MGEX (Thanksgiving2022) 08:30:00 - 13:30:00 Wed Primary Trading Session 277 | Schedule: US.MGEX (Thanksgiving2022) 14:30:00 - 16:00:00 Wed Post-Trading Session 278 | Schedule: US.MGEX (Thanksgiving2022) 16:45:00 - 08:30:00 +2 Wed Pre-Open 279 | ``` 280 | The string representation created by `print(schedule)` is using the format shown below. Other available fields are also listed. These fields are based on the data that is returned from the API's `download` endpoint described [here](https://docs.tradinghours.com/3.x/enterprise/download.html). 281 | ```python 282 | from tradinghours import Market 283 | schedule = Market.get('US.MGEX').list_schedules()[-6] 284 | 285 | print(schedule.get_string_format()) 286 | schedule.pprint() # same as pprint(schedule.to_dict()) 287 | 288 | >>> Schedule: {fin_id} ({schedule_group}) {start} - {end_with_offset} {days} {phase_type} 289 | {'fin_id': 'US.MGEX', # Fin ID of the market of this schedule 290 | 'schedule_group': 'Thanksgiving2022', # Used to group phases together. If there is no holiday then the “Regular” phase applies. 291 | 'schedule_group_memo': None, # additional description for the schedule_group 292 | 'timezone': 'America/Chicago', # timezone of the market 293 | 'phase_type': 'Pre-Open', # normalized name for the phase 294 | 'phase_name': 'Pre-Open', # name for the phase as it is used by the market 295 | 'phase_memo': None, # additional description for the phase_name 296 | 'days': 'Wed', # days of the week that this schedule applies to 297 | 'start': '16:45:00', # start time of the phase 298 | 'end': '08:30:00', # end time of the phase 299 | 'offset_days': 2, # number of days that need to be added to the end time 300 | 'duration': 143100, # total length of this phase in seconds 301 | 'min_start': None, # earliest possible start when random start/stop times apply 302 | 'max_start': None, # latest possible start when random start/stop times apply 303 | 'min_end': None, # earliest possible end when random start/stop times apply 304 | 'max_end': None, # latest possible end when random start/stop times apply 305 | 'in_force_start_date': None, # date that this schedule starts being in effect 306 | 'in_force_end_date': None, # date that this schedule stops being in effect 307 | 'season_start': None, # the start of the season, if this is seasonal 308 | 'season_end': None, # the end of the season 309 | 'end_with_offset': '08:30:00 +2', # string representation of the end time with offset_days concatenated 310 | 'has_season': False} # Indicator whether this schedule only applies to a specific season 311 | ``` 312 | As mentioned earlier, it can be very error-prone to interpret these schedules yourself, so we recommend sticking to the `generate_phases` method as much as possible. 313 | 314 | ## Currencies 315 | ### List Currencies 316 | 317 | ```python 318 | from tradinghours import Currency 319 | 320 | for currency in Currency.list_all()[:3]: 321 | print(currency) 322 | 323 | >>> Currency: AUD Australian Dollar 324 | Currency: BRL Brazilian Real 325 | Currency: CAD Canadian Dollar 326 | ``` 327 | 328 | ### Currency Holidays 329 | 330 | ```python 331 | from tradinghours import Currency 332 | 333 | currency = Currency.get('AUD') 334 | for holiday in currency.list_holidays("2023-06-01", "2023-12-31")[:3]: 335 | print(holiday) 336 | 337 | >>> CurrencyHoliday: AUD 2023-06-12 King's Birthday 338 | CurrencyHoliday: AUD 2023-10-02 Labor Day 339 | CurrencyHoliday: AUD 2023-12-25 Christmas Day 340 | ``` 341 | 342 | ## Advanced 343 | ### Optional Advanced Configuration 344 | 345 | Configuration can be changed by creating a `tradinghours.ini` file in the current directory. 346 | 347 | These are possible and optional values, for which explanations follow: 348 | 349 | ```ini 350 | [api] 351 | token = YOUR-TOKEN 352 | 353 | [data] 354 | db_url = postgresql://postgres:password@localhost:5432/your_database 355 | table_prefix = thstore_ 356 | remote_dir = path/to/empty/folder 357 | 358 | [control] 359 | check_tzdata = False 360 | ``` 361 | 362 | ### Database 363 | * `[data]` 364 | * `db_url` 365 | * A connection string to a database. Please read the [caveats](#caveats) before using this setting. 366 | * This allows you to download the data once and let your team members use the same database. 367 | * `table_prefix` 368 | * Every table created in the database will be prefixed with this. `'thstore_'` is the default. 369 | * This can be used to avoid conflicts with existing tables. 370 | * `remote_dir` 371 | * The folder in which to save the raw CSV files after downloading with `tradinghours import`. 372 | * The content of these CSV files will immediately be ingested into the database defined in `db_url` and then not used anymore. 373 | * Unless you want to access the raw CSV files directly, there is no reason to change this. 374 | 375 | #### Caveats 376 | * This package has been tested with MySQL 8.4 and PostgreSQL 15.8 377 | * Dependencies: 378 | * Running `pip install tradinghours[mysql]` or `pip install tradinghours[postgres]` installs `pymysql` or `psycopg2-binary`, respectively. 379 | * You can install any other package (e.g. `mysqlclient`), as long as it allows `sqlalchemy` to communicate with the chosen database. 380 | * Data ingestion: 381 | * Tables used by this package (identified by the `table_prefix`) are dropped and recreated every time `tradinghours import` is run. 382 | * To avoid any complications with existing data, we recommend creating a separate database for the `tradinghours` data, and making this the only database the `db_url` user has access to. 383 | 384 | ##### Schema 385 | * The tables are named after the CSV files, with `_` instead of `-` and prefixed with the `table_prefix` setting. 386 | * To allow flexibility with updates to the raw data, where columns might be added in the future, tables are created dynamically, based on the content of the CSV files. 387 | * Columns of the tables are named after the columns of the CSV files, although in lower case and with underscores instead of spaces. 388 | 389 | ### Time Zones 390 | This package employs `zoneinfo` for timezone management, utilizing the IANA Time Zone Database, 391 | which is routinely updated. In certain environments, it's essential to update the `tzdata` package accordingly. 392 | `tradinghours` automatically checks your `tzdata` version against PyPI via HTTP request, issuing a warning 393 | if an update is needed. 394 | 395 | To update `tzdata` run this command: `pip install tzdata --upgrade` 396 | 397 | To disable this verification and prevent the request, add this section to your tradinghours.ini file: 398 | ```ini 399 | [control] 400 | check_tzdata = False 401 | ``` 402 | 403 | ## Model Configuration 404 | ### Change String Format 405 | ```python 406 | from tradinghours import Currency 407 | 408 | Currency.set_string_format("{currency_code}: {financial_capital} - {financial_capital_timezone}") 409 | currency = Currency.get("EUR") 410 | print(currency) 411 | 412 | Currency.reset_string_format() 413 | print(currency) 414 | 415 | >>> EUR: Frankfurt - Europe/Berlin 416 | Currency: EUR Euro 417 | ``` 418 | -------------------------------------------------------------------------------- /dev-readme.md: -------------------------------------------------------------------------------- 1 | # Developer README 2 | 3 | ## Setting Up Local Environment 4 | 5 | ### Clone the Repository 6 | 7 | Starting from scratch, go into a test folder and follow these steps to set up the code locally. This will clone the repository, switch to the desired branch, and set up the Python environment: 8 | 9 | ```bash 10 | git clone https://github.com/tradinghours/tradinghours-python.git 11 | 12 | cd tradinghours-python 13 | 14 | python -m venv venv 15 | 16 | venv\Scripts\activate # on Windows 17 | # or 18 | source venv/bin/activate # on Unix or MacOS 19 | 20 | pip install -e . 21 | ``` 22 | 23 | You can now use the package from this directory. 24 | 25 | ### Running Tests 26 | 27 | To run the tests, you need to install the development requirements (e.g., `pytest`): 28 | 29 | ```bash 30 | pip install -e .[dev] 31 | 32 | pytest 33 | ``` 34 | 35 | ### Running with a Database 36 | 37 | If you want to run it with a MySQL or Postgres database, create a `tradinghours.ini` file in the current directory with the connection string: 38 | 39 | ```ini 40 | [data] 41 | db_url = mysql+pymysql://test_user:test_password@localhost:3306/test_db 42 | ``` 43 | 44 | Then, install the appropriate dependencies: 45 | 46 | ```bash 47 | pip install -e .[mysql] # or pip install -e .[postgres] 48 | ``` 49 | 50 | If you don't have one of these databases available, you can run a Docker container defined in `docker-compose.yml`: 51 | 52 | ```bash 53 | docker compose up mysql -d # for MySQL 54 | # or 55 | docker compose up postgres -d # for PostgreSQL 56 | ``` 57 | 58 | When done, don't forget to shut it down: 59 | 60 | ```bash 61 | docker compose down 62 | ``` 63 | 64 | ## CI/CD Workflow 65 | 66 | ### Overview 67 | 68 | The CI/CD process uses GitHub Actions to automate testing and releasing the project. The workflow includes several steps described below: 69 | 70 | 1. **Create a Pull Request (PR) against `pre-release`:** 71 | - This will trigger the `tests.yml` workflow. 72 | - If all tests pass, merge with the `pre-release` branch. 73 | 74 | 2. **PR against `main`:** 75 | - Increment the version number in `src\tradinghours\__init__.py`. 76 | - Describe the changes in `release_log.txt` under the `## new_release` heading. (But keep the heading as it is) 77 | - Open a PR from the `pre-release` branch to `main`. 78 | - This will trigger the `test_release.yml` workflow. 79 | - If the tests pass, merge with the `main` branch. 80 | 81 | 3. **Automatic Release:** 82 | - Merging with `main` triggers the `release.yml` workflow. 83 | - This will push to PyPI and create a release tag on GitHub. 84 | 85 | ## GitHub Actions Workflows 86 | 87 | ### `tests.yml` 88 | 89 | This workflow runs tests across different configurations including coverage, database integration tests, and various OS and Python versions. 90 | 91 | #### Trigger 92 | 93 | The workflow is triggered on pull requests to the `pre-release` branch. 94 | 95 | #### Sections 96 | 97 | 1. **Coverage** 98 | 2. **Database Tests** 99 | 3. **OS Versions and API Levels** 100 | 4. **Python Versions** 101 | 102 | ### `test_release.yml` 103 | 104 | This workflow ensures that the package can be pushed and pulled from the test PyPI repository before an actual release. 105 | 106 | #### Trigger 107 | 108 | The workflow is triggered on pull requests to the `main` branch. 109 | 110 | #### Steps 111 | 112 | 1. **Push to Test PyPI**: 113 | - Checks if the version has been incremented. 114 | - Releases the package to Test PyPI. 115 | 2. **Test from Test PyPI**: 116 | - Installs the package from Test PyPI. 117 | - Runs all tests to ensure the package works as expected. 118 | 119 | ### `release.yml` 120 | 121 | This workflow publishes the package to PyPI and creates a release on GitHub. 122 | 123 | #### Trigger 124 | 125 | The workflow is triggered on push events to the `main` branch. 126 | 127 | #### Steps 128 | 129 | 1. **Update Version and Summary**: 130 | - Sets the version and updates the release log. 131 | 2. **Build and Publish**: 132 | - Publishes the package to PyPI using Flit. 133 | 3. **Create GitHub Release**: 134 | - Creates a new GitHub release with the current version and summary. 135 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | 3 | services: 4 | mysql5: 5 | image: mysql:5.7 6 | environment: 7 | MYSQL_ROOT_PASSWORD: root 8 | MYSQL_DATABASE: test_db 9 | MYSQL_USER: test_user 10 | MYSQL_PASSWORD: test_password 11 | ports: 12 | - "3306:3306" 13 | healthcheck: 14 | test: [ "CMD", "mysqladmin", "ping", "--silent" ] 15 | interval: 10s 16 | retries: 3 17 | timeout: 5s 18 | 19 | mysql: 20 | image: mysql:8.4 21 | environment: 22 | MYSQL_ROOT_PASSWORD: root 23 | MYSQL_DATABASE: test_db 24 | MYSQL_USER: test_user 25 | MYSQL_PASSWORD: test_password 26 | ports: 27 | - "3306:3306" 28 | healthcheck: 29 | test: ["CMD", "mysqladmin", "ping", "--silent"] 30 | interval: 10s 31 | retries: 3 32 | timeout: 5s 33 | 34 | postgres: 35 | image: postgres:15.8 36 | environment: 37 | POSTGRES_USER: postgres 38 | POSTGRES_PASSWORD: password 39 | POSTGRES_DB: test_db 40 | ports: 41 | - "5432:5432" 42 | healthcheck: 43 | test: ["CMD-SHELL", "pg_isready -U postgres"] 44 | interval: 10s 45 | retries: 5 46 | timeout: 5s 47 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["flit_core >=3.2,<4"] 3 | build-backend = "flit_core.buildapi" 4 | 5 | [project] 6 | name = "tradinghours" 7 | authors = [{ name="TradingHours", email="developer@tradinghours.com" }] 8 | dynamic = ["version", "description"] 9 | readme = "README.md" 10 | requires-python = ">=3.9" 11 | classifiers = [ 12 | "Programming Language :: Python :: 3", 13 | "Operating System :: OS Independent", 14 | "Development Status :: 1 - Planning", 15 | "Intended Audience :: Developers", 16 | "Intended Audience :: Financial and Insurance Industry", 17 | "Topic :: Office/Business :: Financial", 18 | "Topic :: Software Development :: Libraries", 19 | ] 20 | dependencies = ['tzdata', 'requests', 'sqlalchemy'] 21 | 22 | [project.urls] 23 | "Homepage" = "https://github.com/tradinghours/tradinghours-python" 24 | "Bug Tracker" = "https://github.com/tradinghours/tradinghours-python/issues" 25 | 26 | [project.optional-dependencies] 27 | dev = [ 28 | 'flit', 29 | 'isort', 30 | 'flake8', 31 | 'flake8-pyproject', 32 | 'black', 33 | 'coverage[toml]', 34 | 'pytest', 35 | 'pytest-mock', 36 | ] 37 | mysql = [ 38 | 'pymysql', 39 | 'cryptography' 40 | ] 41 | postgres = [ 42 | 'psycopg2-binary' 43 | ] 44 | 45 | [project.scripts] 46 | tradinghours = "tradinghours.console:main" 47 | 48 | [tool.isort] 49 | profile = "black" 50 | 51 | [tool.flake8] 52 | max-line-length = 88 53 | extend-ignore = 'E203' 54 | 55 | [tool.black] 56 | exclude = ''' 57 | /( 58 | | tests 59 | )/ 60 | ''' 61 | 62 | [tool.coverage.run] 63 | omit = [ 'tests/*' ] 64 | -------------------------------------------------------------------------------- /release_log.txt: -------------------------------------------------------------------------------- 1 | ## 0.4.2 (2025-01-28) 2 | Changes: 3 | * made the package compatible with python 3.13 by removing class properties 4 | 5 | ## 0.4.1 (2024-12-11) 6 | Changes: 7 | * fix for first_available_date conflicting with consideration of offset_days 8 | 9 | ## 0.4.0 (2024-10-03) 10 | This update completely overhauls the backend of the package: 11 | * Instead of using two storages (files and sql), it uses sqlite by default but can be used with a sql connection string to a remote database. 12 | * The interface is mostly the same, except that some functionality, outputs, and exceptions have been added or modified. 13 | * It is best to consult the README.md and dev-readme.md as if it's the first time you are working with this package. 14 | 15 | 16 | ## 0.3.0 (2024-02-16) 17 | Breaking Changes: 18 | * renamed Market.generate_schedules to Market.generate_phases 19 | * moved Schedule.list_all to Market.list_schedules 20 | * environment variables now take precedence over the .ini file 21 | 22 | Other Changes: 23 | * updated test_release.yml for non-dev dependency test 24 | * updated release.yml to include summary from release_log.txt 25 | * added release_log.txt 26 | * added dev-readme.md, showing set up for dev environment and CI/CD 27 | 28 | 29 | ## 0.2.1 30 | Changes: 31 | * add missing dependency to pyproject.toml 32 | 33 | 34 | ## 0.2.0 35 | Breaking Changes: 36 | * imports changed, see readme examples. 37 | 38 | Other Changes: 39 | * replace pytz with zoneinfo 40 | * added automated check for tzdata version 41 | * complete migration to pytest 42 | * add tests for API access levels 43 | * added workflows for releasing 44 | 45 | -------------------------------------------------------------------------------- /src/tradinghours/__init__.py: -------------------------------------------------------------------------------- 1 | """TradingHours Library""" 2 | 3 | __version__ = "0.4.2" 4 | 5 | from .currency import Currency 6 | from .market import Market 7 | 8 | -------------------------------------------------------------------------------- /src/tradinghours/client.py: -------------------------------------------------------------------------------- 1 | import datetime, json, shutil, tempfile, zipfile, time, os 2 | from pathlib import Path 3 | from urllib.error import HTTPError 4 | from urllib.parse import urljoin 5 | from urllib.request import Request, urlopen 6 | 7 | from .config import main_config 8 | from .util import timed_action 9 | from .exceptions import ClientError, TokenError 10 | 11 | TOKEN = main_config.get("api", "token") 12 | BASE_URL = main_config.get("api", "base_url") 13 | ROOT = Path(main_config.get("data", "remote_dir")) 14 | ROOT.mkdir(parents=True, exist_ok=True) 15 | 16 | 17 | def get_response(path): 18 | url = urljoin(BASE_URL, path) 19 | request = Request(url) 20 | request.add_header("Authorization", f"Bearer {TOKEN}") 21 | try: 22 | response = urlopen(request) 23 | except HTTPError as error: 24 | if error.code == 401: 25 | raise TokenError("Token is missing or invalid") 26 | raise ClientError("Error getting server response", inner=error) 27 | 28 | return response 29 | 30 | 31 | def download_zip_file(path="download"): 32 | response = get_response(path) 33 | if response.status == 200: 34 | with tempfile.NamedTemporaryFile() as temp_file: 35 | shutil.copyfileobj(response, temp_file) 36 | temp_file.flush() 37 | temp_file.seek(0) 38 | 39 | # clear out the directory to make sure no old csv files 40 | # are present if the access level is reduced 41 | for path in os.listdir(ROOT): 42 | path = ROOT / path 43 | if os.path.isdir(path): 44 | shutil.rmtree(path) 45 | else: 46 | os.remove(path) 47 | 48 | with zipfile.ZipFile(temp_file, "r") as zip_ref: 49 | zip_ref.extractall(ROOT) 50 | return True 51 | elif response.status == 202: 52 | return False 53 | 54 | raise ClientError("Error getting server response") 55 | 56 | 57 | def download_covered_markets(): 58 | response = get_response("markets?group=all") 59 | markets = json.load(response).get("data", []) 60 | with open(ROOT / "covered_markets.json", "w") as covered_markets: 61 | json.dump(markets, covered_markets) 62 | 63 | 64 | def download_covered_currencies(): 65 | response = get_response("currencies") 66 | markets = json.load(response).get("data", []) 67 | with open(ROOT / "covered_currencies.json", "w") as covered_currencies: 68 | json.dump(markets, covered_currencies) 69 | 70 | 71 | def download(): 72 | """ 73 | Downloads zip file from tradinghours and unzips it into the 74 | folder set in main_config.data.remote_dir 75 | """ 76 | try: 77 | with timed_action("Downloading") as (change_message, start_time): 78 | waited = False 79 | while True: 80 | downloaded = download_zip_file() 81 | if downloaded: 82 | break 83 | if (time.time() - start_time) > 120: 84 | raise ClientError("Failed downloading data, please try again.") 85 | 86 | change_message("Generating (~ 1min)") 87 | time.sleep(5 if waited else 30) 88 | waited = True 89 | 90 | download_covered_markets() 91 | download_covered_currencies() 92 | 93 | except TokenError: 94 | raise 95 | # TODO: think about cleaner error handling (e.g: not a zipfile) 96 | except Exception as e: 97 | raise 98 | 99 | 100 | def get_remote_timestamp() -> datetime.datetime: 101 | response = get_response("last-updated") 102 | data = json.load(response) 103 | last_updated = data["last_updated"] 104 | timestamp = datetime.datetime.fromisoformat(last_updated) 105 | return timestamp 106 | 107 | -------------------------------------------------------------------------------- /src/tradinghours/config.py: -------------------------------------------------------------------------------- 1 | import configparser 2 | import os 3 | from pathlib import Path 4 | 5 | PROJECT_PATH = Path(__file__).parent 6 | DEFAULT_STORE_DIR = PROJECT_PATH / "store_dir" 7 | os.makedirs(DEFAULT_STORE_DIR, exist_ok=True) 8 | 9 | # Define default settings in this dictionary 10 | default_settings = { 11 | "api": { 12 | "base_url": "https://api.tradinghours.com/v3/", 13 | }, 14 | "data": { 15 | "remote_dir": DEFAULT_STORE_DIR / "remote", 16 | "db_url": f"sqlite:///{DEFAULT_STORE_DIR / 'tradinghours.db'}", 17 | "table_prefix": "thstore_" 18 | }, 19 | "control": { 20 | "check_tzdata": True, 21 | } 22 | } 23 | 24 | # Read config file with defaults 25 | main_config = configparser.ConfigParser() 26 | main_config.read_dict(default_settings) 27 | main_config.read("tradinghours.ini") 28 | 29 | token = os.getenv("TRADINGHOURS_TOKEN", main_config.get("api", "token", fallback="")) 30 | main_config.set("api", "token", token) 31 | db_url = os.getenv("TH_DB_URL", main_config.get("data", "db_url", fallback="")) 32 | main_config.set("data", "db_url", db_url) 33 | -------------------------------------------------------------------------------- /src/tradinghours/console.py: -------------------------------------------------------------------------------- 1 | import argparse, warnings 2 | import traceback 3 | from textwrap import wrap 4 | 5 | from . import __version__ 6 | from .store import Writer, db 7 | from .client import ( 8 | download as client_download, 9 | get_remote_timestamp as client_get_remote_timestamp, 10 | timed_action 11 | ) 12 | from .currency import Currency 13 | from .market import Market 14 | from .exceptions import TradingHoursError, NoAccess 15 | 16 | EXIT_CODE_EXPECTED_ERROR = 1 17 | EXIT_CODE_UNKNOWN_ERROR = 2 18 | 19 | 20 | def print_help(text): 21 | lines = wrap(text, initial_indent=" ", subsequent_indent=" ") 22 | print("\n --") 23 | print("\n".join(lines)) 24 | print() 25 | 26 | 27 | 28 | def create_parser(): 29 | parser = argparse.ArgumentParser(description="TradingHours API Client") 30 | 31 | # Create a subparser for the subcommands 32 | subparsers = parser.add_subparsers(dest="command", help="Available subcommands") 33 | subparsers.required = True 34 | 35 | # "status" subcommand 36 | status_parser = subparsers.add_parser("status", help="Get status") 37 | status_parser.add_argument( 38 | "--extended", action="store_true", help="Show more information" 39 | ) 40 | 41 | # "import" subcommand 42 | import_parser = subparsers.add_parser("import", help="Import data") 43 | import_parser.add_argument("--force", action="store_true", help="Force the import") 44 | import_parser.add_argument("--reset", action="store_true", help="Re-ingest data, without downloading. (Resets the database)") 45 | 46 | return parser 47 | 48 | 49 | def run_status(args): 50 | db.ready() 51 | with timed_action("Collecting timestamps"): 52 | remote_timestamp = client_get_remote_timestamp() 53 | local_timestamp = db.get_local_timestamp() 54 | print("TradingHours Data Status:") 55 | print(" Remote Timestamp: ", remote_timestamp.ctime()) 56 | print(" Local Timestamp: ", local_timestamp and local_timestamp.ctime()) 57 | print() 58 | if args.extended: 59 | if local_timestamp: 60 | with timed_action("Reading local data"): 61 | num_markets, num_currencies = db.get_num_covered() 62 | num_permanently_closed = db.get_num_permanently_closed() 63 | try: 64 | num_all_currencies = len(list(Currency.list_all())) 65 | except NoAccess: 66 | num_all_currencies = 0 67 | num_all_markets = len(list(Market.list_all())) 68 | num_all_markets -= num_permanently_closed 69 | 70 | print(f" Currencies count: {num_all_currencies:4} available out of {num_currencies} total") 71 | print(f" Markets count: {num_all_markets:4} available out of {num_markets} total") 72 | if num_permanently_closed: 73 | print() 74 | print("Notes:") 75 | print( 76 | f" {num_permanently_closed} permanently closed markets are available but excluded from the totals above.\n" 77 | f" For access to additional markets, please contact us at ." 78 | ) 79 | else: 80 | print("No local data to show extended information") 81 | 82 | 83 | def run_import(args): 84 | show_warning = False 85 | if args.reset: 86 | show_warning = not Writer().ingest_all() 87 | 88 | elif args.force or db.needs_download(): 89 | client_download() 90 | show_warning = not Writer().ingest_all() 91 | else: 92 | print("Local data is up-to-date.") 93 | 94 | if show_warning: 95 | warnings.warn( 96 | "\n\nWarning:\nYou seem to be using a MySQL database that is not configured " 97 | "to handle the full unicode set. Unicode characters have been replaced with " 98 | "'?'. Consult the MySQL documentation for your version to enable this feature." 99 | ) 100 | 101 | def main(): 102 | try: 103 | # Main console entrypoint 104 | parser = create_parser() 105 | args = parser.parse_args() 106 | if args.command == "status": 107 | run_status(args) 108 | elif args.command == "import": 109 | run_import(args) 110 | 111 | # Handle generic errors gracefully 112 | except Exception as error: 113 | # TradingHours errors with help messages are simpler 114 | if isinstance(error, TradingHoursError) and error.help_message: 115 | print("ERROR:", error.detail) 116 | print_help(error.help_message) 117 | exit(EXIT_CODE_EXPECTED_ERROR) 118 | 119 | # Other errors will generate a traceback dump 120 | error_message = f"ERROR: {error}" 121 | print(error_message) 122 | 123 | try: 124 | # Try saving extra information to local file 125 | traceback_info = traceback.format_exc() 126 | version_message = f"\nVERSION: {__version__}" 127 | with open("debug.txt", "w") as debug_file: 128 | debug_file.write(error_message) 129 | debug_file.write(version_message) 130 | debug_file.write("\n\nTraceback:\n") 131 | debug_file.write(traceback_info) 132 | print_help( 133 | "Details about this error were saved to debug.txt file. You can " 134 | "submit it to the support team for further investigation by emailing " 135 | "support@tradinghours.com.", 136 | ) 137 | except Exception as error: 138 | print("Failed saving debug information.", error) 139 | finally: 140 | exit(EXIT_CODE_UNKNOWN_ERROR) 141 | 142 | 143 | if __name__ == "__main__": 144 | main() 145 | -------------------------------------------------------------------------------- /src/tradinghours/currency.py: -------------------------------------------------------------------------------- 1 | from typing import List, Union 2 | import datetime as dt 3 | 4 | from .validate import validate_range_args, validate_date_arg, validate_str_arg 5 | from .models import BaseModel, CurrencyHoliday 6 | from .store import db 7 | from .exceptions import NotCovered, NoAccess 8 | 9 | class Currency(BaseModel): 10 | _table = "currencies" 11 | _original_string_format = "Currency: {currency_code} {currency_name}" 12 | 13 | def __init__(self, data): 14 | super().__init__(data) 15 | self.currency_code = self._data["currency_code"] 16 | self.currency_name = self._data["currency_name"] 17 | self.country_code = self._data["country_code"] 18 | self.central_bank = self._data["central_bank"] 19 | self.financial_capital = self._data["financial_capital"] 20 | self.financial_capital_timezone = self._data["financial_capital_timezone"] 21 | self.weekend_definition = self._data["weekend_definition"] 22 | 23 | def list_holidays( 24 | self, start: Union[str, dt.date], end: Union[str, dt.date] 25 | ) -> List["CurrencyHoliday"]: 26 | start, end = validate_range_args( 27 | validate_date_arg("start", start), 28 | validate_date_arg("end", end), 29 | ) 30 | table = CurrencyHoliday.table() 31 | result = db.query(table).filter( 32 | table.c.currency_code == self.currency_code, 33 | table.c.date >= start, 34 | table.c.date <= end 35 | ) 36 | return [CurrencyHoliday(r) for r in result] 37 | 38 | @classmethod 39 | @db.check_access 40 | def list_all(cls) -> List["Currency"]: 41 | return [cls(r) for r in db.query(cls.table())] 42 | 43 | @classmethod 44 | def is_available(cls, code:str) -> bool: 45 | try: 46 | cls.get(code) 47 | return True 48 | except (NoAccess, NotCovered): 49 | return False 50 | 51 | @classmethod 52 | @db.check_access 53 | def is_covered(cls, code:str) -> bool: 54 | """ 55 | Returns True or False showing if tradinghours provides data for the Currency. 56 | This differs from is_available because is_covered does not mean that the user 57 | has access to it under their current plan. 58 | """ 59 | table = db.table("covered_currencies") 60 | found = db.query(table).filter( 61 | table.c.currency_code == code 62 | ).one_or_none() 63 | return found is not None 64 | 65 | @classmethod 66 | @db.check_access 67 | def get(cls, code: str) -> "Currency": 68 | validate_str_arg("code", code) 69 | result = db.query(cls.table()).filter( 70 | cls.table().c.currency_code == code 71 | ).one_or_none() 72 | if result: 73 | return cls(result) 74 | 75 | if cls.is_covered(code): 76 | raise NoAccess( 77 | f"\n\nThe currency '{code}' is supported but not available on your current plan." 78 | f"\nPlease learn more or contact sales at https://www.tradinghours.com/data" 79 | ) 80 | # if no result found, raise NotCovered 81 | raise NotCovered( 82 | f"The currency '{code}' is currently not available." 83 | ) 84 | -------------------------------------------------------------------------------- /src/tradinghours/exceptions.py: -------------------------------------------------------------------------------- 1 | from functools import cached_property 2 | from typing import Any, Optional 3 | 4 | class TradingHoursError(Exception): 5 | """Baseclass for all errors from this library""" 6 | 7 | def __init__(self, message: str, inner: Optional[Exception] = None): 8 | super().__init__(message, inner) 9 | self._message = message 10 | self._inner = inner 11 | 12 | @property 13 | def message(self): 14 | return self._message 15 | 16 | @property 17 | def inner(self): 18 | return self._inner 19 | 20 | @cached_property 21 | def detail(self): 22 | return self.build_detail() 23 | 24 | @cached_property 25 | def help_message(self): 26 | return self.build_help_message() 27 | 28 | def build_detail(self, message: Optional[str] = None): 29 | message = message or self.message 30 | if self.inner: 31 | message = f"{message} ({self.inner})" 32 | return message 33 | 34 | def build_help_message(self): 35 | return None 36 | 37 | def __str__(self): 38 | return self.detail 39 | 40 | 41 | class PrepareError(TradingHoursError): 42 | """Happens when a field from a model cannot be interpreted""" 43 | 44 | def __init__(self, field: "Field", value: Any, inner: Optional[Exception] = None): 45 | super().__init__("Error preparing field", inner=inner) 46 | self._field = field 47 | self._value = value 48 | 49 | @property 50 | def field(self): 51 | return self._field 52 | 53 | @property 54 | def value(self): 55 | return self._value 56 | 57 | def build_detail(self): 58 | message = f"Could not prepare field {self._field.field_name}" 59 | return super().build_detail(message) 60 | 61 | 62 | class ClientError(TradingHoursError): 63 | """When an error occurs accessing remote HTTP server""" 64 | 65 | pass 66 | 67 | 68 | class TokenError(ClientError): 69 | """When server access fails because of an invalid token""" 70 | 71 | def build_help_message(self): 72 | return ( 73 | "A TradingHours token is required to perform this operation. " 74 | "You can access https://www.tradinghours.com/user/api-tokens to " 75 | "obtain one. In case you already have a token, remember to make " 76 | "it available by exporting the environment variable " 77 | "TRADINGHOURS_TOKEN and try again." 78 | ) 79 | 80 | 81 | class MissingDefinitionError(TradingHoursError): 82 | """When a season definition is not found""" 83 | 84 | pass 85 | 86 | 87 | class MissingSqlAlchemyError(TradingHoursError): 88 | """When SQL Alchemy is not installed""" 89 | 90 | def build_help_message(self): 91 | return ( 92 | "You need to install SQLAlchemy in order to use database " 93 | "ready store. You should be able to do that by running " 94 | "`pip install tradinghours[sql]` from the command line." 95 | ) 96 | 97 | class NoAccess(TradingHoursError): 98 | """ 99 | Raised when a user attempts accessing a type of data 100 | that is not available under their current plan. 101 | """ 102 | pass 103 | 104 | class NotCovered(TradingHoursError): 105 | """ 106 | Raised when a user attempts to access a specific data item 107 | that is not covered by Tradinghours but the type of data 108 | (e.g.: Currencies) are available under their current plan. 109 | """ 110 | pass 111 | 112 | 113 | class MICDoesNotExist(TradingHoursError): 114 | """ 115 | Raised when a user tries to get a Market with a mic that can not 116 | be matched with a finid. 117 | """ 118 | pass 119 | 120 | 121 | class MissingTzdata(TradingHoursError): 122 | pass 123 | 124 | 125 | class DBError(TradingHoursError): 126 | """ 127 | Raised when the database could not be accessed 128 | """ 129 | pass 130 | 131 | 132 | class DateNotAvailable(TradingHoursError): 133 | """ 134 | Raised when the dates passed to generate_phases are outside 135 | of the first_ and last_available dates. 136 | """ 137 | pass 138 | 139 | -------------------------------------------------------------------------------- /src/tradinghours/market.py: -------------------------------------------------------------------------------- 1 | import calendar 2 | import datetime as dt 3 | from typing import Iterable, Generator, Union 4 | from zoneinfo import ZoneInfo 5 | from functools import cached_property 6 | 7 | from .models import ( 8 | BaseModel, 9 | Schedule, 10 | Phase, 11 | PhaseType, 12 | MarketHoliday, 13 | MicMapping, 14 | SeasonDefinition, 15 | MarketStatus 16 | ) 17 | from .validate import ( 18 | validate_range_args, 19 | validate_date_arg, 20 | validate_finid_arg, 21 | validate_str_arg, 22 | validate_mic_arg 23 | ) 24 | from .store import db 25 | from .util import weekdays_match 26 | from .exceptions import NoAccess, NotCovered, MICDoesNotExist, DateNotAvailable 27 | 28 | # Arbitrary max offset days for TradingHours data 29 | MAX_OFFSET_DAYS = 2 30 | 31 | class Market(BaseModel): 32 | _table = "markets" 33 | _original_string_format = "Market: {fin_id} {exchange_name} {timezone}" 34 | 35 | def __init__(self, data): 36 | super().__init__(data) 37 | self.exchange_name = self._data["exchange_name"] 38 | self.market_name = self._data["market_name"] 39 | self.security_group = self._data["security_group"] 40 | self.timezone = self._data["timezone"] 41 | self.weekend_definition = self._data["weekend_definition"] 42 | self.fin_id = self._data["fin_id"] 43 | self.mic = self._data["mic"] 44 | self.acronym = self._data["acronym"] 45 | self.asset_type = self._data["asset_type"] 46 | self.memo = self._data["memo"] 47 | self.permanently_closed = self._data["permanently_closed"] 48 | self.replaced_by = self._data["replaced_by"] 49 | 50 | @cached_property 51 | def first_available_date(self): 52 | """ 53 | The first available date is the 1st day of 54 | the month of the first holiday of the given market. 55 | """ 56 | table = MarketHoliday.table() 57 | result = db.query(table).filter( 58 | table.c.fin_id == self.fin_id 59 | ).order_by( 60 | table.c.date 61 | ).first() 62 | return result.date.replace(day=1) 63 | 64 | @cached_property 65 | def last_available_date(self): 66 | """ 67 | The last available date is the last day of the month 68 | of the last available holiday of the given market. 69 | """ 70 | table = MarketHoliday.table() 71 | result = db.query(table).filter( 72 | table.c.fin_id == self.fin_id 73 | ).order_by( 74 | table.c.date.desc() 75 | ).first() 76 | date = result.date 77 | _, num_days_in_month = calendar.monthrange(date.year, date.month) 78 | return date.replace(day=num_days_in_month) 79 | 80 | def _in_range(self, *dates) -> None: 81 | if not all( 82 | self.first_available_date <= date <= self.last_available_date for date in dates 83 | ): 84 | raise DateNotAvailable("the requested data is outside of the available dates for this " 85 | "Market. You can use the properties `first_available_date` and " 86 | "`last_available_date` to stay within bounds.") 87 | 88 | @property 89 | def country_code(self): 90 | """Two-letter country code.""" 91 | return self.fin_id.split(".")[0] 92 | 93 | def _pick_schedule_group( 94 | self, 95 | some_date: dt.date, 96 | holidays: dict[dt.date, "MarketHoliday"], 97 | ) -> tuple[str, bool]: 98 | 99 | if found := holidays.get(some_date): 100 | schedule_group = found.schedule.lower() 101 | fallback = Schedule.is_group_open(schedule_group) 102 | else: 103 | schedule_group = "regular" 104 | fallback = False 105 | return schedule_group, fallback 106 | 107 | def _filter_schedule_group( 108 | self, schedule_group: str, schedules: Iterable[Schedule] 109 | ) -> Iterable[Schedule]: 110 | for current in schedules: 111 | if current.schedule_group.lower() == schedule_group.lower(): 112 | yield current 113 | 114 | 115 | def _filter_inforce( 116 | self, some_date: dt.date, schedules: Iterable[Schedule] 117 | ) -> Iterable[Schedule]: 118 | for current in schedules: 119 | if current.is_in_force(some_date, some_date): 120 | yield current 121 | 122 | def _filter_season( 123 | self, some_date: dt.date, schedules: Iterable[Schedule] 124 | ) -> Iterable[Schedule]: 125 | for current in schedules: 126 | # If there is no season, it means there is no restriction in terms 127 | # of the season when this schedule is valid, and as such it is valid, 128 | # from a season-perspective for any date 129 | if not current.has_season: 130 | yield current 131 | else: 132 | start_date = SeasonDefinition.get(current.season_start, some_date.year).date 133 | end_date = SeasonDefinition.get(current.season_end, some_date.year).date 134 | 135 | if end_date < start_date: 136 | if some_date <= end_date or some_date >= start_date: 137 | yield current 138 | 139 | if some_date >= start_date and some_date <= end_date: 140 | yield current 141 | 142 | def _filter_weekdays( 143 | self, weekday: int, schedules: Iterable[Schedule] 144 | ) -> Iterable[Schedule]: 145 | for current in schedules: 146 | if weekdays_match(current.days, weekday): 147 | yield current 148 | 149 | def _generate_phases( 150 | self, start: Union[str, dt.date], end: Union[str, dt.date], 151 | _for_status: bool = False 152 | ) -> Generator[Union[Phase, dict], None, None]: 153 | 154 | start, end = validate_range_args( 155 | validate_date_arg("start", start), 156 | validate_date_arg("end", end), 157 | ) 158 | if not _for_status: 159 | self._in_range(start, end) 160 | 161 | phase_types_dict = PhaseType.as_dict() 162 | 163 | # Get required global data 164 | offset_start = max(start - dt.timedelta(days=MAX_OFFSET_DAYS), self.first_available_date) 165 | all_schedules = self.list_schedules() 166 | holidays = self.list_holidays(offset_start, end, as_dict=True) 167 | if _for_status: 168 | yield holidays 169 | 170 | # Iterate through all dates generating phases 171 | current_date = offset_start 172 | while current_date <= end: 173 | current_weekday = current_date.weekday() 174 | # Starts with all schedules 175 | schedules = all_schedules 176 | 177 | # Filter schedule group based on holiday if any 178 | schedule_group, fallback = self._pick_schedule_group(current_date, holidays) 179 | schedules = self._filter_schedule_group(schedule_group, schedules) 180 | 181 | # Filters what is in force or for expected season 182 | schedules = self._filter_inforce(current_date, schedules) 183 | schedules = self._filter_season(current_date, schedules) 184 | 185 | # Save for fallback and filter weekdays 186 | before_weekdays = list(schedules) 187 | found_schedules = list(self._filter_weekdays(current_weekday, before_weekdays)) 188 | 189 | # Consider fallback if needed 190 | if not found_schedules and fallback: 191 | fallback_weekday = 6 if current_weekday == 0 else current_weekday - 1 192 | fallback_schedules = [] 193 | while not fallback_schedules and fallback_weekday != current_weekday: 194 | fallback_schedules = list( 195 | filter( 196 | lambda s: weekdays_match(s.days, fallback_weekday), 197 | before_weekdays, 198 | ), 199 | ) 200 | fallback_weekday = ( 201 | 6 if fallback_weekday == 0 else fallback_weekday - 1 202 | ) 203 | found_schedules = fallback_schedules 204 | 205 | # Sort based on start time and duration 206 | found_schedules = sorted( 207 | found_schedules, 208 | key=lambda s: (s.start, s.duration, s.phase_type != "Primary Trading Session"), 209 | ) 210 | 211 | # Generate phases for current date 212 | for current_schedule in found_schedules: 213 | start_date = current_date 214 | end_date = current_date + dt.timedelta(days=current_schedule.offset_days) 215 | 216 | # Filter out phases not finishing after start because we 217 | # began looking a few days ago to cover offset days 218 | if end_date >= start: 219 | start_datetime = dt.datetime.combine(start_date, current_schedule.start) 220 | end_datetime = dt.datetime.combine(end_date, current_schedule.end) 221 | # start_datetime = current_schedule.timezone_obj.localize(start_datetime) 222 | # end_datetime = current_schedule.timezone_obj.localize(end_datetime) 223 | zoneinfo_obj = ZoneInfo(current_schedule.timezone) 224 | start_datetime = start_datetime.replace(tzinfo=zoneinfo_obj) 225 | end_datetime = end_datetime.replace(tzinfo=zoneinfo_obj) 226 | 227 | phase_type = phase_types_dict[current_schedule.phase_type] 228 | yield Phase( 229 | dict( 230 | phase_type=current_schedule.phase_type, 231 | phase_name=current_schedule.phase_name, 232 | phase_memo=current_schedule.phase_memo, 233 | status=phase_type.status, 234 | settlement=phase_type.settlement, 235 | start=start_datetime, 236 | end=end_datetime, 237 | ) 238 | ) 239 | 240 | # Next date, please 241 | current_date += dt.timedelta(days=1) 242 | 243 | 244 | @db.check_access 245 | def generate_phases( 246 | self, start: Union[str, dt.date], end: Union[str, dt.date] 247 | ) -> Generator[Phase, None, None]: 248 | return self._generate_phases(start, end, _for_status=False) 249 | 250 | @classmethod 251 | def list_all(cls, sub_set="*") -> list["Market"]: 252 | validate_str_arg("sub_set", sub_set) 253 | sub_set = sub_set.upper().replace("*", "%") 254 | return [cls(r) for r in db.query(cls.table()).filter( 255 | cls.table().c.fin_id.like(sub_set) 256 | )] 257 | 258 | def _last_holiday(self): 259 | table = self.table() 260 | result = db.query(table).filter( 261 | table.c.fin_id == self.fin_id 262 | ).order_by( 263 | table.c.date.desc() 264 | ).first() 265 | return MarketHoliday(result) 266 | 267 | def list_holidays( 268 | self, start: Union[str, dt.date], end: Union[str, dt.date], as_dict: bool = False 269 | ) -> Union[list["MarketHoliday"], dict[dt.date, "MarketHoliday"]]: 270 | start, end = validate_range_args( 271 | validate_date_arg("start", start), 272 | validate_date_arg("end", end), 273 | ) 274 | table = MarketHoliday.table() 275 | result = db.query(table).filter( 276 | table.c.fin_id == self.fin_id, 277 | table.c.date >= start, 278 | table.c.date <= end 279 | ) 280 | if as_dict: 281 | dateix = list(table.c.keys()).index("date") 282 | return { 283 | r[dateix]: MarketHoliday(r) for r in result 284 | } 285 | 286 | return [MarketHoliday(r) for r in result] 287 | 288 | @db.check_access 289 | def list_schedules(self) -> list["Schedule"]: 290 | schedules = db.query(Schedule.table()).filter( 291 | Schedule.table().c.fin_id == self.fin_id 292 | ).order_by( 293 | Schedule.table().c.schedule_group.asc(), 294 | Schedule.table().c.in_force_start_date.asc(), 295 | Schedule.table().c.season_start.asc(), 296 | Schedule.table().c.start.asc(), 297 | Schedule.table().c.end.asc() 298 | ) 299 | return [Schedule(r) for r in schedules] 300 | 301 | @classmethod 302 | def is_available(cls, identifier: str) -> bool: 303 | """ 304 | Return True or False to show if a mic or finid can be accessed 305 | under the current plan. 306 | """ 307 | try: 308 | cls.get(identifier) 309 | return True 310 | except (NoAccess, NotCovered, MICDoesNotExist): 311 | return False 312 | 313 | @classmethod 314 | def is_covered(cls, finid: str) -> bool: 315 | """ 316 | Returns True or False showing if tradinghours provides data for the Market. 317 | This differs from is_available because is_covered does not mean that the user 318 | has access to it under their current plan. 319 | """ 320 | table = db.table("covered_markets") 321 | found = db.query(table).filter( 322 | table.c.fin_id == finid 323 | ).one_or_none() 324 | return found is not None 325 | 326 | @classmethod 327 | def _get_by_finid(cls, finid:str, following=None) -> Union[None, tuple]: 328 | found = db.query(cls.table()).filter( 329 | cls.table().c.fin_id == finid 330 | ).one_or_none() 331 | if found is not None: 332 | return found 333 | 334 | # if not found, check if it is covered at all and raise appropriate Exception 335 | following = f" (replaced: '{following}')" if following else "" 336 | if cls.is_covered(finid): 337 | raise NoAccess( 338 | f"\n\nThe market '{finid}'{following} is supported but not available on your current plan." 339 | f"\nPlease learn more or contact sales at https://www.tradinghours.com/data" 340 | ) 341 | raise NotCovered( 342 | f"The market '{finid}'{following} is currently not available." 343 | ) 344 | 345 | @classmethod 346 | def get_by_finid(cls, finid: str, follow=True) -> Union[None, "Market"]: 347 | finid = validate_finid_arg(finid) 348 | found = cls._get_by_finid(finid) 349 | 350 | while found and (found_obj := cls(found)).replaced_by and follow: 351 | found = cls._get_by_finid(found_obj.replaced_by, following=finid) 352 | 353 | return found_obj 354 | 355 | 356 | @classmethod 357 | def get_by_mic(cls, mic: str, follow=True) -> "Market": 358 | mic = validate_mic_arg(mic) 359 | mapping = db.query(MicMapping.table()).filter( 360 | MicMapping.table().c.mic == mic 361 | ).one_or_none() 362 | if mapping: 363 | return cls.get_by_finid(mapping.fin_id, follow=follow) 364 | raise MICDoesNotExist(f"The MIC {mic} could not be matched with a FinID") 365 | 366 | @classmethod 367 | def get(cls, identifier: str, follow=True) -> "Market": 368 | identifier = validate_str_arg("identifier", identifier) 369 | if "." in identifier: 370 | found = cls.get_by_finid(identifier, follow=follow) 371 | else: 372 | found = cls.get_by_mic(identifier, follow=follow) 373 | return found 374 | 375 | @db.check_access 376 | def status(self, datetime: Union[dt.datetime, None] = None) -> "MarketStatus": 377 | """ 378 | Will return the status of the market. 379 | 380 | If `time` is None, it will be the current status, otherwise the status 381 | at the given `datetime`, which needs to be timezone aware. 382 | """ 383 | if datetime is None: 384 | datetime = dt.datetime.now(dt.timezone.utc) 385 | elif type(datetime) is not dt.datetime or datetime.tzinfo is None: 386 | raise ValueError("You need to pass a timezone aware datetime.") 387 | 388 | date = datetime.date() 389 | self._in_range(date) 390 | # arbitrarily extending end so that there are definitely following phases 391 | end = min(date + dt.timedelta(days=5), self.last_available_date) 392 | 393 | current, nxt = [], [] 394 | is_primary = False 395 | phase_generator = self._generate_phases(start=date, end=end, _for_status=True) 396 | holidays = next(phase_generator) 397 | for phase in phase_generator: 398 | if not is_primary and phase.start <= datetime < phase.end: 399 | # is_open means that it's a primary phase 400 | # and we just take that one 401 | if phase.is_open: 402 | is_primary = True 403 | current = phase 404 | else: 405 | current.append(phase) 406 | elif datetime < phase.start: 407 | nxt.append(phase) 408 | 409 | # if there is no primary session we need to take the one starting first 410 | # if there are no sessions at all, we set current to None 411 | if not is_primary: 412 | if current: 413 | current = sorted(current, key=lambda p: p.start)[0] 414 | else: 415 | current = None 416 | 417 | # set until 418 | if current: 419 | # check if there are any overlapping phases 420 | overlapping = [phase for phase in nxt if phase.start < current.end] 421 | if overlapping: 422 | # take the first one that overlaps 423 | until = sorted(overlapping, key= lambda p: p.start)[0].start 424 | else: 425 | until = current.end 426 | else: 427 | until = sorted(nxt, key= lambda p: p.start)[0].start 428 | 429 | # set next_bell 430 | if is_primary: 431 | next_bell = current.end 432 | else: 433 | next_bell = None 434 | for phase in nxt: 435 | if phase.is_open: 436 | next_bell = phase.start 437 | break 438 | 439 | # set reason 440 | reason = "" 441 | holiday = holidays.get(date) 442 | if holiday: 443 | reason += holiday.holiday_name 444 | if current: 445 | reason += f" - {current.phase_type}" 446 | if holiday and holiday.schedule.lower() != "regular": 447 | reason += f" ({holiday.schedule})" 448 | 449 | reason = reason.strip(" -") if reason else None 450 | return MarketStatus({ 451 | "status": current.status if current else "Closed", 452 | "reason": reason, 453 | "until": until, 454 | "next_bell": next_bell, 455 | "phase": current, 456 | "market": self 457 | }) 458 | 459 | 460 | -------------------------------------------------------------------------------- /src/tradinghours/models.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from pprint import pprint 3 | from sqlalchemy import func 4 | import datetime as dt 5 | 6 | from .store import db 7 | from .validate import validate_str_arg, validate_int_arg, validate_range_args, validate_date_arg 8 | from .exceptions import MissingDefinitionError 9 | 10 | 11 | class BaseModel: 12 | """ 13 | Will receive records from the databse and set the instance attributes. The attributes 14 | match the column names and some classes have additional properties. 15 | 16 | Besides accessing the data through attributes like `market.exchange_name`, 17 | you can also access `data` or `to_dict`. See their docstrings to see how they differ. 18 | 19 | """ 20 | _table: Union[str, None] = None 21 | _string_format: str = "" 22 | _original_string_format: str = "" 23 | _fields: list = [] # columns in database 24 | _extra_fields: list = [] # properties of python class 25 | _access_levels: set = set() 26 | 27 | @classmethod 28 | def table(cls) -> "Table": 29 | return db.table(cls._table) 30 | 31 | @classmethod 32 | def fields(cls): 33 | return cls._fields + cls._extra_fields 34 | 35 | def __init__(self, data: Union[dict, tuple]): 36 | if not isinstance(data, dict): 37 | data = { 38 | col_name: value for col_name, value in zip( 39 | self.table().c.keys(), data 40 | ) 41 | } 42 | 43 | self._data = {} 44 | _fields = [] 45 | for key, value in data.items(): 46 | if key != "id": 47 | if key == "observed": 48 | # deal with the fact that MySQL doesn't have a boolean 49 | # field and the value is going to be 0 or 1 because we 50 | # are using sqlalchemy's Core API 51 | value = bool(value) 52 | setattr(self, key, value) 53 | self._data[key] = value 54 | _fields.append(key) 55 | 56 | if not self.__class__._fields: 57 | exclude = set(dir(BaseModel)) 58 | _extra_fields = [] # properties 59 | for att in dir(self): 60 | if (att[0] != "_" and 61 | att not in exclude 62 | and isinstance(getattr(self.__class__, att, None), property) 63 | ): 64 | _extra_fields.append(att) 65 | 66 | self.__class__._fields = _fields 67 | self.__class__._extra_fields = _extra_fields 68 | 69 | @property 70 | def data(self) -> dict: 71 | """ 72 | Returns a dictionary with the values exactly as they were in the 73 | database, excluding properties like .is_open. Keys are exact matches to the 74 | column names of the matching table. 75 | """ 76 | return {f: getattr(self, f) for f in self._fields} 77 | 78 | def to_dict(self) -> dict: 79 | """ 80 | Returns a dictionary with the values as they are displayed to the user, including 81 | properties like .is_open, which means that there are keys present that don't exist 82 | in the matching table. 83 | """ 84 | return {f: getattr(self, f) for f in self.fields()} 85 | 86 | def pprint(self) -> None: 87 | dct = {} 88 | for f in self.fields(): 89 | val = getattr(self, f) 90 | if not isinstance(val, int) and not isinstance(val, float) and val is not None: 91 | val = str(val) 92 | dct[f] = val 93 | pprint(dct, sort_dicts=False) 94 | 95 | def __repr__(self) -> str: 96 | class_name = self.__class__.__name__ 97 | return f"{class_name}({self._data!r})" 98 | 99 | @classmethod 100 | def get_string_format(cls): 101 | return cls._string_format or cls._original_string_format 102 | 103 | @classmethod 104 | def set_string_format(cls, string_format: str, prefix_class: bool = False): 105 | if prefix_class: 106 | string_format = f"{cls.__name__}: " + string_format 107 | cls._string_format = string_format 108 | 109 | @classmethod 110 | def reset_string_format(cls): 111 | cls._string_format = cls._original_string_format 112 | 113 | def __str__(self): 114 | return self.get_string_format().format(**self.to_dict()) 115 | 116 | 117 | class MarketHoliday(BaseModel): 118 | _table = "holidays" 119 | _original_string_format = "MarketHoliday: {fin_id} {date} {holiday_name}" 120 | 121 | def __init__(self, data): 122 | super().__init__(data) 123 | self.fin_id = self._data["fin_id"] 124 | self.date = self._data["date"] 125 | self.holiday_name = self._data["holiday_name"] 126 | self.schedule = self._data["schedule"] 127 | self.settlement = self._data["settlement"] 128 | self.observed = self._data["observed"] 129 | self.memo = self._data["memo"] 130 | self.status = self._data["status"] 131 | 132 | @property 133 | def has_settlement(self): 134 | return self.settlement == 'Yes' 135 | 136 | @property 137 | def is_open(self): 138 | return self.status == 'Open' 139 | 140 | 141 | class MicMapping(BaseModel): 142 | _table = "mic_mapping" 143 | _original_string_format = "MicMapping: {mic} {fin_id}" 144 | 145 | def __init__(self, data): 146 | super().__init__(data) 147 | self.mic = self._data["mic"] 148 | self.fin_id = self._data["fin_id"] 149 | 150 | 151 | class CurrencyHoliday(BaseModel): 152 | _table = "currency_holidays" 153 | _original_string_format = "CurrencyHoliday: {currency_code} {date} {holiday_name}" 154 | 155 | def __init__(self, data): 156 | super().__init__(data) 157 | self.currency_code = self._data["currency_code"] 158 | self.date = self._data["date"] 159 | self.holiday_name = self._data["holiday_name"] 160 | self.settlement = self._data["settlement"] 161 | self.observed = self._data["observed"] 162 | self.memo = self._data["memo"] 163 | 164 | 165 | class PhaseType(BaseModel): 166 | _table = "phases" 167 | 168 | def __init__(self, data): 169 | super().__init__(data) 170 | self.name = self._data["name"] 171 | self.status = self._data["status"] 172 | self.settlement = self._data["settlement"] 173 | self.closing_price = self._data["closing_price"] 174 | 175 | @classmethod 176 | def as_dict(cls) -> dict[str, "PhaseType"]: 177 | return {pt.name: cls(pt) for pt in db.query(cls.table())} 178 | 179 | @property 180 | def has_settlement(self): 181 | return self.settlement == 'Yes' 182 | 183 | @property 184 | def is_open(self): 185 | return self.status == 'Open' 186 | 187 | 188 | class Schedule(BaseModel): 189 | _table = "schedules" 190 | _original_string_format = "Schedule: {fin_id} ({schedule_group}) {start} - {end_with_offset} {days} {phase_type}" 191 | 192 | def __init__(self, data): 193 | super().__init__(data) 194 | self.fin_id = self._data["fin_id"] 195 | self.schedule_group = self._data["schedule_group"] 196 | self.schedule_group_memo = self._data["schedule_group_memo"] 197 | self.timezone = self._data["timezone"] 198 | self.phase_type = self._data["phase_type"] 199 | self.phase_name = self._data["phase_name"] 200 | self.phase_memo = self._data["phase_memo"] 201 | self.days = self._data["days"] 202 | self.start = self._data["start"] 203 | self.end = self._data["end"] 204 | self.offset_days = self._data["offset_days"] 205 | self.duration = self._data["duration"] 206 | self.min_start = self._data["min_start"] 207 | self.max_start = self._data["max_start"] 208 | self.min_end = self._data["min_end"] 209 | self.max_end = self._data["max_end"] 210 | self.in_force_start_date = self._data["in_force_start_date"] 211 | self.in_force_end_date = self._data["in_force_end_date"] 212 | self.season_start = self._data["season_start"] 213 | self.season_end = self._data["season_end"] 214 | 215 | @property 216 | def end_with_offset(self): 217 | end = str(self.end) 218 | if self.offset_days: 219 | return end + f" +{self.offset_days}" 220 | return end + " " 221 | 222 | @classmethod 223 | def is_group_open(cls, group): 224 | return group.lower() == "regular" 225 | 226 | @property 227 | def has_season(self) -> bool: 228 | season_start = (self.season_start or "").strip() 229 | season_end = (self.season_end or "").strip() 230 | return bool(season_start and season_end) 231 | 232 | def is_in_force(self, start: dt.date, end: dt.date) -> bool: 233 | if not self.in_force_start_date and not self.in_force_end_date: 234 | return True 235 | elif self.in_force_start_date is None: 236 | return self.in_force_end_date >= start 237 | elif self.in_force_end_date is None: 238 | return self.in_force_start_date <= end 239 | else: 240 | return self.in_force_start_date <= end and self.in_force_end_date >= start 241 | 242 | 243 | class SeasonDefinition(BaseModel): 244 | _table = "season_definitions" 245 | _original_string_format = "SeasonDefinition: {date} {season}" 246 | 247 | def __init__(self, data): 248 | super().__init__(data) 249 | self.season = self._data["season"] 250 | self.year = self._data["year"] 251 | self.date = self._data["date"] 252 | 253 | @classmethod 254 | def get(cls, season: str, year: int) -> "SeasonDefinition": 255 | season = validate_str_arg("season", season) 256 | year = validate_int_arg("year", year) 257 | 258 | table = cls.table() 259 | result = db.query(table).filter( 260 | func.lower(table.c["season"]) == season.lower(), 261 | table.c["year"] == year 262 | ).one_or_none() 263 | 264 | if not result: 265 | raise MissingDefinitionError(f"missing definition {season} - {year}") 266 | return cls(result) 267 | 268 | 269 | class Phase(BaseModel): 270 | _table = None 271 | _original_string_format = "Phase: {start} - {end} {phase_type}" 272 | 273 | def __init__(self, data): 274 | super().__init__(data) 275 | self.phase_type = self._data["phase_type"] 276 | self.phase_name = self._data["phase_name"] 277 | self.phase_memo = self._data["phase_memo"] 278 | self.status = self._data["status"] 279 | self.settlement = self._data["settlement"] 280 | self.start = self._data["start"] 281 | self.end = self._data["end"] 282 | self._timezone = str(self.start.tzinfo) 283 | 284 | @property 285 | def timezone(self): 286 | return self._timezone 287 | 288 | @property 289 | def has_settlement(self): 290 | return self.settlement == 'Yes' 291 | 292 | @property 293 | def is_open(self): 294 | return self.status == 'Open' 295 | 296 | class MarketStatus(BaseModel): 297 | _table = None 298 | _original_string_format = "MarketStatus: {status}" 299 | 300 | def __init__(self, data): 301 | super().__init__(data) 302 | self.status = self._data["status"] 303 | self.reason = self._data["reason"] 304 | self.until = self._data["until"] 305 | self.next_bell = self._data["next_bell"] 306 | self.phase = self._data["phase"] 307 | self.market = self._data["market"] 308 | -------------------------------------------------------------------------------- /src/tradinghours/store.py: -------------------------------------------------------------------------------- 1 | import os, csv, json, codecs 2 | import datetime as dt 3 | from pathlib import Path 4 | from pprint import pprint 5 | from sqlalchemy import ( 6 | create_engine, 7 | MetaData, 8 | func, 9 | Table, 10 | Column, 11 | String, 12 | Integer, 13 | DateTime, 14 | Time, 15 | Date, 16 | Boolean, 17 | Text 18 | ) 19 | from sqlalchemy.orm import sessionmaker 20 | from contextlib import contextmanager 21 | from typing import Union 22 | import functools 23 | from enum import Enum 24 | 25 | from .config import main_config 26 | from .client import get_remote_timestamp as client_get_remote_timestamp 27 | from .util import tprefix, tname, clean_name, timed_action 28 | from .exceptions import DBError, NoAccess 29 | 30 | class AccessLevel(Enum): 31 | full = "full" 32 | no_currencies = "no_currencies" 33 | only_holidays = "only_holidays" 34 | no_access = None 35 | 36 | class DB: 37 | _instance = None 38 | _types = { 39 | "date": (Date, dt.date.fromisoformat), 40 | "observed": (Boolean, lambda v: v == "OBS"), 41 | "start": (Time, dt.time.fromisoformat), 42 | "end": (Time, dt.time.fromisoformat), 43 | "offset_days": (Integer, int), 44 | "duration": (Integer, int), 45 | "min_start": (Time, dt.time.fromisoformat), 46 | "max_start": (Time, dt.time.fromisoformat), 47 | "min_end": (Time, dt.time.fromisoformat), 48 | "max_end": (Time, dt.time.fromisoformat), 49 | "in_force_start_date": (Date, dt.date.fromisoformat), 50 | "in_force_end_date": (Date, dt.date.fromisoformat), 51 | "year": (Integer, int), 52 | # Everything else is Text 53 | } 54 | _default_type = (Text, str) 55 | _access = { 56 | "Currency.list_all" : {AccessLevel.full}, 57 | "Currency.get": {AccessLevel.full}, 58 | "Currency.is_covered": {AccessLevel.full}, 59 | "Market.list_schedules": {AccessLevel.full, AccessLevel.no_currencies}, 60 | "Market.generate_phases": {AccessLevel.full, AccessLevel.no_currencies}, 61 | "Market.status": {AccessLevel.full, AccessLevel.no_currencies} 62 | } 63 | _no_model_access = { 64 | AccessLevel.full: set(), 65 | AccessLevel.no_currencies: {"currencies", "currency_holidays"}, 66 | AccessLevel.only_holidays: {"currencies", "currency_holidays", "phases", "schedules", "season_definitions"} 67 | } 68 | 69 | @classmethod 70 | def set_no_unicode(cls): 71 | """ 72 | MySQL databases may not be able to handle the full unicode set by default. So if a 73 | mysql db is used and the ingestion fails, it is attempted again with the following 74 | conversion, which replaces unicode characters with '?'. 75 | """ 76 | cls._default_type = ( 77 | Text, 78 | lambda s: str(s).encode("ascii", "replace").decode("ascii") 79 | ) 80 | 81 | @classmethod 82 | def get_type(cls, col_name): 83 | return cls._types.get(col_name, cls._default_type)[0] 84 | 85 | @classmethod 86 | def clean(cls, col_name: str, value: Union[bool, str, None]) -> Union[bool, str, None]: 87 | """ 88 | Used to map values from the csv files to what they should be in the database 89 | For observed columns 'OBS' is True, anything else is False 90 | For other columns, empty strings should be converted to None 91 | """ 92 | converter = cls._types.get(col_name, cls._default_type)[1] 93 | if col_name == "observed": 94 | return converter(value) 95 | 96 | return converter(value) if value else None 97 | 98 | def __new__(cls): 99 | if cls._instance is None: 100 | cls._instance = self = super().__new__(cls) 101 | self.db_url = main_config.get("data", "db_url") 102 | try: 103 | self.engine = create_engine(self.db_url) 104 | except ModuleNotFoundError as e: 105 | raise ModuleNotFoundError( 106 | "You seem to be missing the required dependencies to interact with your chosen database. " 107 | "Please run `pip install tradinghours[mysql]` or `pip install tradinghours[postgres]` if " 108 | "you are trying to access mysql or postgres, respectively. Consult the docs for more information." 109 | ) from e 110 | 111 | self.metadata = MetaData() 112 | try: 113 | self.update_metadata() 114 | except Exception: 115 | self._failed_to_access = True 116 | 117 | self.Session = sessionmaker(bind=self.engine) 118 | self._access_level = None 119 | 120 | return cls._instance 121 | 122 | def table(self, table_name: str) -> Table: 123 | try: 124 | return self.metadata.tables[tname(table_name)] 125 | except KeyError: 126 | # using self._access_level instead of property to avoid an infinite recursion 127 | # when running on a new database without an access_level. If ._access_level is None, 128 | # it would check if table_name is in an empty set, which would make it raise a KeyError, 129 | # which is handled properly in .access_level property. 130 | if self._access_level == AccessLevel.no_access: 131 | raise DBError(f"{table_name} could not be found. Are you sure you ran `tradinghours import`?") 132 | 133 | if table_name in self._no_model_access.get(self._access_level, set()): 134 | raise NoAccess( 135 | f"\nIf you are sure you ran `tradinghours import`, {table_name} is not available on your current plan." 136 | f"\nPlease learn more or contact sales at https://www.tradinghours.com/data" 137 | ) 138 | raise 139 | 140 | def ready(self) -> None: 141 | if getattr(self, "_failed_to_access", True): 142 | raise DBError("Could not access database") 143 | 144 | if tname("admin") not in self.metadata.tables: 145 | raise DBError("Database not prepared. Did you run `tradinghours import`?") 146 | 147 | def reset_session(self): 148 | if hasattr(self, "_session"): 149 | self._session.rollback() 150 | self._session.close() 151 | 152 | self._session = self.Session() 153 | 154 | @contextmanager 155 | def session(self): 156 | if hasattr(self, "_session"): 157 | s = self._session 158 | else: 159 | s = self._session = self.Session() 160 | yield s 161 | 162 | def execute(self, *query): 163 | with self.session() as s: 164 | result = s.execute(*query) 165 | s.commit() 166 | return result 167 | 168 | def query(self, *query): 169 | with self.session() as s: 170 | return s.query(*query) 171 | 172 | def get_local_timestamp(self): 173 | # admin table is not present when `tradinghours import` 174 | # is run for the first time on a given database 175 | if tname("admin") not in self.metadata.tables: 176 | return 177 | 178 | table = self.table("admin") 179 | with self.session() as s: 180 | result = s.query( 181 | table.c["data_timestamp"]).order_by( 182 | table.c["id"].desc() 183 | ).limit(1).scalar() 184 | if result: 185 | return result.replace(tzinfo=dt.timezone.utc) 186 | 187 | @property 188 | def access_level(self) -> AccessLevel: 189 | if self._access_level is None: 190 | try: 191 | table = self.table("admin") 192 | except KeyError: 193 | # This should only be the case when ingesting into a completely new 194 | # database, that doesn't have an admin table yet. 195 | level = None 196 | else: 197 | level = self.query(table.c.access_level).order_by( 198 | table.c.id.desc() 199 | ).limit(1).scalar() 200 | self._access_level = AccessLevel(level) 201 | 202 | return self._access_level 203 | 204 | @classmethod 205 | def check_access(cls, method): 206 | """ 207 | Used as a decorator of Currency and Market methods, 208 | to check whether the user has access to the data requested. 209 | """ 210 | method_name = method.__qualname__ 211 | not_has_access = db.access_level not in cls._access[method_name] 212 | 213 | @functools.wraps(method) 214 | def new_method(*args, **kwargs): 215 | if not_has_access: 216 | raise NoAccess(f"\n\n{method_name} is supported but not available on your current plan." 217 | f"\nPlease learn more or contact sales at https://www.tradinghours.com/data") 218 | return method(*args, **kwargs) 219 | 220 | return new_method 221 | 222 | 223 | def needs_download(self): 224 | if local := self.get_local_timestamp(): 225 | remote_timestamp = client_get_remote_timestamp() 226 | return remote_timestamp > local 227 | return True 228 | 229 | def update_metadata(self): 230 | self.metadata.clear() 231 | self.metadata.reflect(bind=self.engine) 232 | self._failed_to_access = False 233 | 234 | def get_num_covered(self) -> tuple[int, int]: 235 | table = db.table("covered_markets") 236 | num_markets = self.query(func.count()).select_from(table).scalar() 237 | table = db.table("covered_currencies") 238 | num_currencies = self.query(func.count()).select_from(table).scalar() 239 | return num_markets, num_currencies 240 | 241 | def get_num_permanently_closed(self) -> int: 242 | table = db.table("markets") 243 | num = self.query(func.count()).filter( 244 | table.c.permanently_closed.isnot(None) 245 | ).scalar() 246 | return num 247 | 248 | ######################################################## 249 | # Singleton db instance used across the entire project # 250 | ######################################################## 251 | db = DB() 252 | 253 | 254 | # noinspection PyMethodMayBeStatic 255 | class Writer: 256 | 257 | def __init__(self): 258 | self.remote = Path(main_config.get("data", "remote_dir")) 259 | 260 | def prepare_ingestion(self): 261 | """Preserves the last 9 records from the thstore_admin table, 262 | drops the table, recreates it, and re-inserts the 9 records.""" 263 | table_name = tname("admin") 264 | last_9_records = [] 265 | if table_name not in db.metadata.tables: 266 | return last_9_records 267 | 268 | table = db.metadata.tables[table_name] 269 | columns_to_select = [col for col in table.c.values() if col.name != 'id'] 270 | result = db.execute( 271 | table.select() 272 | .with_only_columns(*columns_to_select) 273 | .order_by(table.c['id'].desc()) 274 | .limit(9) 275 | ) 276 | 277 | # Fetch all results 278 | last_9_records = result.fetchall() 279 | last_9_records = [ 280 | {col.name: value for col, value in zip(columns_to_select, row)} 281 | for row in last_9_records[::-1] 282 | ] 283 | table.drop(db.engine) 284 | db.update_metadata() 285 | 286 | return last_9_records 287 | 288 | def drop_th_tables(self): 289 | """Drops all tables from the database that start with 'thstore_'.""" 290 | # Iterate over all tables in the metadata 291 | for table_name in db.metadata.tables: 292 | if table_name.startswith(tprefix): 293 | table = db.metadata.tables[table_name] 294 | table.drop(db.engine) 295 | 296 | # Clear the metadata cache after dropping tables 297 | db.update_metadata() 298 | # print(f"Dropped all tables starting with {tprefix}.") 299 | 300 | def create_table_from_csv(self, file_path, table_name): 301 | """Creates a SQL table dynamically from a CSV file.""" 302 | 303 | with codecs.open(file_path, mode='r', encoding='utf-8-sig') as file: 304 | reader = csv.reader(file) 305 | # Get the columns (first row of the CSV) 306 | columns = next(reader) 307 | columns = [clean_name(col_name) for col_name in columns] 308 | 309 | # Define the SQL table dynamically with all columns as Strings 310 | table = Table( 311 | table_name, 312 | db.metadata, 313 | Column('id', Integer, primary_key=True), 314 | *(Column(col_name, DB.get_type(col_name)) for col_name in columns) 315 | ) 316 | batch = [] 317 | for i, row in enumerate(reader): 318 | values = {col_name: DB.clean(col_name, value) for col_name, value in zip(columns, row)} 319 | batch.append(values) 320 | 321 | table.create(db.engine) 322 | db.execute(table.insert(), batch) 323 | 324 | def create_table_from_json(self, file_path, table_name): 325 | """ 326 | This method takes a filepath to a json file that should hold a list of dictionaries. 327 | It is probably redundant, but it makes sure that the table created is flexible in regard to 328 | the content of the dictionaries by following these steps: 329 | 330 | # find all keys that exist 331 | # filter out keys that don't exist in every dictionary 332 | # clean these keys using clean_name 333 | # create a table with the cleaned keys 334 | # insert a batch of fields that exist in every dictionary 335 | """ 336 | with open(file_path, "r") as data: 337 | data = json.load(data) 338 | 339 | keys = {} 340 | len_data = 0 341 | for dct in data: 342 | len_data += 1 343 | for k in dct: 344 | keys[k] = keys.setdefault(k, 0) + 1 345 | 346 | columns = [(k, clean_name(k)) for k, n in keys.items() if n == len_data] 347 | table = Table( 348 | table_name, 349 | db.metadata, 350 | Column('id', Integer, primary_key=True), 351 | *(Column(col_name, DB.get_type(col_name)) for k, col_name in columns) 352 | ) 353 | batch = [] 354 | for dct in data: 355 | batch.append({clean_k: DB.clean(clean_k, dct.get(k, "")) for k, clean_k in columns}) 356 | 357 | table.create(db.engine) 358 | db.execute(table.insert(), batch) 359 | 360 | def create_admin(self, access_level, last_9_records): 361 | version_file = self.remote / "VERSION.txt" 362 | timestamp_format = "Generated at %a, %d %b %Y %H:%M:%S %z" 363 | content = version_file.read_text() 364 | line = content.splitlines()[0] 365 | data_timestamp = dt.datetime.strptime(line, timestamp_format) 366 | 367 | table = Table( 368 | tname("admin"), 369 | db.metadata, 370 | Column('id', Integer, primary_key=True), 371 | Column('data_timestamp', DateTime, nullable=False), 372 | Column('access_level', String(255), nullable=False), 373 | Column('download_timestamp', DateTime, nullable=False), 374 | ) 375 | table.create(db.engine) 376 | if last_9_records: 377 | db.execute(table.insert(), last_9_records) 378 | 379 | db.execute( 380 | table.insert().values( 381 | data_timestamp=data_timestamp, 382 | access_level=access_level.value, 383 | download_timestamp=dt.datetime.now(dt.timezone.utc).replace(tzinfo=None) 384 | ) 385 | ) 386 | db.update_metadata() 387 | 388 | def _ingest_all(self, change_message): 389 | """Iterates over CSV files in the remote directory and ingests them.""" 390 | db.reset_session() 391 | last_9_admin_records = self.prepare_ingestion() 392 | self.drop_th_tables() 393 | 394 | csv_dir = self.remote / "csv" 395 | # Iterate over all CSV files in the directory 396 | downloaded_csvs = os.listdir(csv_dir) 397 | 398 | for csv_file in downloaded_csvs: 399 | if csv_file.endswith('.csv'): 400 | file_path = csv_dir / csv_file 401 | table_name = os.path.splitext(csv_file)[0] 402 | table_name = tname(clean_name(table_name)) 403 | change_message(f" {table_name}") 404 | self.create_table_from_csv(file_path, table_name) 405 | 406 | for json_file in ("covered_markets", "covered_currencies"): 407 | table_name = tname(json_file) 408 | change_message(f" {table_name}") 409 | self.create_table_from_json( 410 | self.remote / f"{json_file}.json", 411 | table_name 412 | ) 413 | 414 | db.update_metadata() 415 | 416 | if "schedules.csv" not in downloaded_csvs: 417 | access_level = AccessLevel.only_holidays 418 | elif "currencies.csv" not in downloaded_csvs: 419 | access_level = AccessLevel.no_currencies 420 | else: 421 | access_level = AccessLevel.full 422 | 423 | self.create_admin(access_level, last_9_admin_records) 424 | 425 | def ingest_all(self) -> bool: 426 | with timed_action("Ingesting") as (change_message, start_time): 427 | try: 428 | self._ingest_all(change_message) 429 | return True 430 | except Exception as e: 431 | if db.engine.dialect.name != "mysql" or "Incorrect string value" not in str(e): 432 | raise 433 | 434 | # Deal with the problem that MySQL may not be able to 435 | # handle the full unicode set and then try again 436 | print("\nHandling unicode problem, warning will follow") 437 | db.set_no_unicode() 438 | with timed_action("Ingesting") as (change_message, start_time): 439 | self._ingest_all(change_message) 440 | return False 441 | 442 | 443 | 444 | 445 | 446 | 447 | """ 448 | full = all 449 | 450 | only_holidays = no schedules 451 | 452 | no_currencies = schedules but no currencies 453 | """ 454 | 455 | -------------------------------------------------------------------------------- /src/tradinghours/util.py: -------------------------------------------------------------------------------- 1 | import re, time 2 | from contextlib import contextmanager 3 | from threading import Thread, Event 4 | 5 | from zoneinfo import TZPATH 6 | import importlib.metadata as metadata 7 | import requests, warnings 8 | 9 | from .exceptions import MissingTzdata 10 | from .config import main_config 11 | 12 | tprefix = main_config.get("data", "table_prefix") 13 | 14 | 15 | @contextmanager 16 | def timed_action(message: str): 17 | start = time.time() 18 | print(f"{message}...", end="", flush=True) 19 | 20 | done = False 21 | change_message_event = Event() 22 | current_message = [message] 23 | last_message = [message] 24 | 25 | def print_dots(): 26 | last_check = time.time() 27 | while not done: 28 | if change_message_event.is_set() and current_message != last_message: 29 | # Move to the next line and print the new message 30 | print(f"\n{current_message[0]}...", end="", flush=True) 31 | last_message[0] = current_message[0] 32 | change_message_event.clear() 33 | 34 | if time.time() - last_check > 1: 35 | print(".", end="", flush=True) 36 | last_check = time.time() 37 | time.sleep(0.05) 38 | 39 | thread = Thread(target=print_dots) 40 | thread.daemon = True 41 | thread.start() 42 | 43 | # Function to change the message from within the main block 44 | def change_message(new_message): 45 | current_message[0] = new_message 46 | change_message_event.set() 47 | 48 | yield change_message, start 49 | 50 | elapsed = time.time() - start 51 | done = True 52 | thread.join() 53 | print(f" ({elapsed:.3f}s)", flush=True) 54 | 55 | 56 | def tname(table_name): 57 | return f"{tprefix}{table_name}" 58 | 59 | def clean_name(name): 60 | name = name.lower().replace('"', '').replace("finid", "fin_id") 61 | return re.sub(r'[^a-zA-Z0-9_]', '_', name) 62 | 63 | 64 | WEEKDAYS = { 65 | d: i for i, d in enumerate(["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]) 66 | } 67 | 68 | def weekdays_match(weekday_set, weekday): 69 | for period_str in weekday_set.split(","): 70 | if "-" in period_str: 71 | day_range = [WEEKDAYS[x] for x in period_str.split("-")] 72 | if weekday in day_range: 73 | return True 74 | 75 | start_day, end_day = day_range 76 | day = start_day 77 | while day != end_day: 78 | if day == 6: 79 | day = 0 80 | else: 81 | day += 1 82 | if weekday == day: 83 | return True 84 | 85 | elif weekday == WEEKDAYS[period_str]: 86 | return True 87 | 88 | return False 89 | 90 | 91 | def _get_latest_tzdata_version(): 92 | try: 93 | response = requests.get(f"https://pypi.org/pypi/tzdata/json") 94 | except requests.exceptions.RequestException: 95 | return None 96 | 97 | if response.status_code == 200: 98 | return response.json()["info"]["version"] 99 | 100 | 101 | def check_if_tzdata_required_and_up_to_date(): 102 | """ 103 | required installed # check for version 104 | required notinstalled # raise error 105 | notrequired installed # doesn't matter 106 | notrequired notinstalled # doesn't matter 107 | [don't check] 108 | 109 | if required (no tzpath) 110 | get version 111 | if not version: 112 | raise Error # required notinstalled 113 | else: 114 | check version/give warning # required installed 115 | else (tzpath): 116 | 117 | """ 118 | if not main_config.getboolean("control", "check_tzdata"): 119 | return False 120 | 121 | required = len(TZPATH) == 0 122 | if required: 123 | try: 124 | installed_version = metadata.version('tzdata') 125 | except metadata.PackageNotFoundError: 126 | raise MissingTzdata("\nYour environment does not provide timezone data and\n" 127 | "you don't have tzdata installed, please run:\n" 128 | " pip install tzdata") from None 129 | 130 | latest_version = _get_latest_tzdata_version() 131 | if latest_version is None: 132 | warnings.warn("Failed to get latest version of tzdata. " 133 | "Check your internet connection or set " 134 | "check_tzdata = False under [control] in tradinghours.ini") 135 | return None 136 | 137 | if installed_version < latest_version: 138 | warnings.warn(f"\nThe installed version of tzdata is {installed_version}\n" 139 | f"The latest version of tzdata is {latest_version}\n" 140 | f"Please run: pip install tzdata --upgrade") 141 | return None 142 | 143 | return True -------------------------------------------------------------------------------- /src/tradinghours/validate.py: -------------------------------------------------------------------------------- 1 | import datetime as dt 2 | from typing import Any, Optional, Tuple, TypeVar 3 | 4 | T = TypeVar("T") 5 | 6 | 7 | def validate_date_arg(name: str, value: Any) -> dt.date: 8 | if value is None: 9 | raise ValueError(f"Missing {name}") 10 | if isinstance(value, str): 11 | value = dt.date.fromisoformat(value) 12 | if type(value) is not dt.date: 13 | raise TypeError(f"Invalid {name} type") 14 | return value 15 | 16 | 17 | def validate_range_args(start: T, end: T) -> Tuple[T, T]: 18 | if end < start: 19 | raise ValueError("Invalid date range") 20 | return start, end 21 | 22 | 23 | def validate_str_arg(name: str, value: Any, strip=True) -> str: 24 | if value is None: 25 | raise ValueError(f"Missing {name}") 26 | if not isinstance(value, str): 27 | raise TypeError(f"Invalid {name} type") 28 | if strip: 29 | value = value.strip() 30 | return value 31 | 32 | 33 | def validate_int_arg(name: str, value: Any, default: Optional[int] = None) -> int: 34 | if value is None: 35 | if default is None: 36 | raise ValueError(f"Missing {name}") 37 | else: 38 | value = default 39 | if not isinstance(value, int): 40 | raise TypeError(f"Invalid {name} type") 41 | return value 42 | 43 | 44 | def validate_finid_arg(value: Any) -> str: 45 | if value is None: 46 | raise ValueError(f"Missing FinID") 47 | if isinstance(value, str): 48 | segments = value.split(".") 49 | if len(segments) < 2: 50 | raise ValueError("Invalid FinID string") 51 | return value.upper() 52 | 53 | def validate_mic_arg(value: Any) -> str: 54 | if value is None: 55 | raise ValueError(f"Missing MIC") 56 | if not isinstance(value, str): 57 | raise TypeError(f"MIC needs to be a str") 58 | if not value.isalnum() or len(value) != 4: 59 | raise ValueError(f"Invalid MIC string") 60 | return value.upper() -------------------------------------------------------------------------------- /test_and_release_readme.txt: -------------------------------------------------------------------------------- 1 | This readme covers the Github Actions workflows that manage the automated test and release process. These workflows I will be referring to are located in the .github/workflows folder. 2 | 3 | There are 3 steps involved to go from a development branch, which can have any name to the main branch, which automatically results in the release of the package. 4 | 5 | 1. Pull Request against the `pre-release` branch: 6 | Immediately when this PR is created, so before it is merged, the tests.yml workflow runs. This ensures that all tests pass before merging code into the `pre-release` branch. 7 | 8 | 2. Pull Request against the `main` branch: 9 | Immediately when this PR is created, so before it is merged, the test_release.yml workflow runs. This ensures that the code can be built and pushed to pypi successfully and that the code that is pushed still passes all tests after it is downloaded. 10 | 11 | 3. Merge into the `main` branch: 12 | When the merge is completed, the release.yml workflow runs. 13 | 14 | Before PR against `main`: 15 | * __init__.__version__ must be incremented 16 | * release_log.txt needs to be updated 17 | 18 | 19 | ## tests.yml 20 | This workflow covers three jobs: 21 | 22 | 1. Run the tests with coverage tracking 23 | 2. Run the tests with all combinations of different operating systems, and API key access levels 24 | 3. Run the tests with different python versions 25 | 26 | 27 | ## test_release.yml 28 | This workflow covers two jobs: 29 | 30 | 1. Push the code from pre-release to test.pypi.org 31 | 2. Install the code from test.pypi.org and run the tests with that code. 32 | 33 | 34 | ## release.yml 35 | This workflow covers one job: 36 | 37 | 1. It makes the release to pypi.org 38 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tradinghours/tradinghours-python/188208e6371dab1013886504732e9c28d7368f46/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | from contextlib import contextmanager 4 | from tradinghours.store import db 5 | 6 | 7 | @contextmanager 8 | def _select_and_delete(result, table): 9 | # Retrieve the inserted record 10 | select_stmt = table.select().where(table.c.id == result.inserted_primary_key[0]) 11 | record = db.execute(select_stmt).fetchone() 12 | 13 | yield record 14 | 15 | delete_stmt = table.delete().where(table.c.id == result.inserted_primary_key[0]) 16 | db.execute(delete_stmt) 17 | 18 | 19 | @pytest.fixture 20 | def covered_market(): 21 | table = db.table("covered_markets") 22 | result = db.execute(table.insert().values(fin_id='XX.TEST')) 23 | with _select_and_delete(result, table) as record: 24 | yield record 25 | 26 | 27 | @pytest.fixture 28 | def covered_currency(): 29 | table = db.table("covered_currencies") 30 | result = db.execute(table.insert().values(currency_code='XXX')) 31 | 32 | with _select_and_delete(result, table) as record: 33 | yield record 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /tests/test_access.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from tradinghours import store as st 3 | from tradinghours import exceptions as ex 4 | from tradinghours import Currency, Market 5 | 6 | 7 | # test that the access_level in db is set correctly 8 | def test_access_level(): 9 | tables = st.db.metadata.tables 10 | 11 | if st.tname("schedules") not in tables: 12 | should_be = st.AccessLevel.only_holidays 13 | elif st.tname("currencies") not in tables: 14 | should_be = st.AccessLevel.no_currencies 15 | else: 16 | should_be = st.AccessLevel.full 17 | 18 | assert st.db.access_level == should_be 19 | 20 | 21 | def test_raises_no_access(): 22 | """ 23 | The level doesn't need to be changed in this test. 24 | Github Actions will run the test suite with data loaded using 25 | API keys of different access levels. 26 | """ 27 | # These should never raise NoAccess 28 | Market.list_all() 29 | nyse = Market.get("US.NYSE") 30 | nyse.list_holidays("2024-01-01", "2025-01-01") 31 | if st.db.access_level == st.AccessLevel.full: 32 | nyse.list_schedules() 33 | list(nyse.generate_phases("2024-09-12", "2024-09-13")) 34 | return 35 | 36 | # these should raise NoAccess for any level != full 37 | with pytest.raises(ex.NoAccess): 38 | Currency.get("EUR") 39 | with pytest.raises(ex.NoAccess): 40 | Currency.list_all() 41 | 42 | if st.db.access_level == st.AccessLevel.no_currencies: 43 | # these should work with full and no_currencies 44 | nyse.list_schedules() 45 | list(nyse.generate_phases("2024-09-12", "2024-09-13")) 46 | 47 | elif st.db.access_level == st.AccessLevel.only_holidays: 48 | with pytest.raises(ex.NoAccess): 49 | nyse.list_schedules() 50 | with pytest.raises(ex.NoAccess): 51 | list(nyse.generate_phases("2024-09-12", "2024-09-13")) 52 | 53 | 54 | def test_raise_not_covered(covered_market, covered_currency): 55 | """ 56 | NotCovered should be raised when 57 | Market.get(fin_id) 58 | not in csvs and not in covered 59 | 60 | (it should raise NoAccess when 61 | Market.get(fin_id) not in csvs but in covered) 62 | 63 | I need to add a test value 64 | 65 | Currency.get(code) should raise NotCovered when it is not found 66 | """ 67 | with pytest.raises(ex.NotCovered): 68 | Market.get("XX.NOTCOVERED") 69 | assert Market.is_available("XX.NOTCOVERED") is False 70 | assert Market.is_covered("XX.NOTCOVERED") is False 71 | 72 | with pytest.raises(ex.NoAccess): 73 | Market.get(covered_market.fin_id) 74 | assert Market.is_available(covered_market.fin_id) is False 75 | assert Market.is_covered(covered_market.fin_id) is True 76 | 77 | # should raise nothing 78 | Market.get("US.NYSE") 79 | assert Market.is_available("US.NYSE") is True 80 | assert Market.is_covered("US.NYSE") is True 81 | 82 | if st.db.access_level == st.AccessLevel.full: 83 | with pytest.raises(ex.NotCovered): 84 | Currency.get("NOTCOVERED") 85 | assert Currency.is_available("NOTCOVERED") is False 86 | assert Currency.is_covered("NOTCOVERED") is False 87 | 88 | # should raise NoAccess with the covered_currency, 89 | # which is not available under the current plan 90 | with pytest.raises(ex.NoAccess): 91 | Currency.get(covered_currency.currency_code) 92 | assert Currency.is_available(covered_currency.currency_code) is False 93 | assert Currency.is_covered(covered_currency.currency_code) is True 94 | 95 | # should raise nothing 96 | Currency.get("EUR") 97 | assert Currency.is_available("EUR") is True 98 | assert Currency.is_covered("EUR") is True 99 | 100 | else: 101 | with pytest.raises(ex.NoAccess): 102 | Currency.get("NOTCOVERED") 103 | 104 | with pytest.raises(ex.NoAccess): 105 | assert Currency.is_covered(covered_currency.currency_code) is True 106 | assert Currency.is_available("NOTCOVERED") is False 107 | 108 | with pytest.raises(ex.NoAccess): 109 | Currency.get("EUR") 110 | assert Currency.is_available("EUR") is False 111 | 112 | with pytest.raises(ex.NoAccess): 113 | Currency.is_covered("EUR") 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | -------------------------------------------------------------------------------- /tests/test_generate_phases.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | from tradinghours.market import Market 4 | from tradinghours.exceptions import NoAccess 5 | from tradinghours.store import db, AccessLevel 6 | 7 | from .utils import fromiso 8 | 9 | @pytest.mark.xfail( 10 | db.access_level == AccessLevel.only_holidays, 11 | reason="No access", 12 | strict=True, 13 | raises=NoAccess 14 | ) 15 | @pytest.mark.parametrize("fin_id, start, end, expected", [ 16 | ("US.NYSE", "2023-11-15", "2023-11-15", 17 | [{'phase_type': 'Pre-Trading Session', 18 | 'phase_name': 'Pre-Trading Session', 19 | 'phase_memo': None, 20 | 'status': 'Closed', 21 | 'settlement': 'No', 22 | 'start': fromiso("2023-11-15 04:00:00", "America/New_York"), 23 | 'end': fromiso("2023-11-15 09:30:00", "America/New_York"), 24 | 'has_settlement': False, 25 | 'is_open': False, 26 | 'timezone': 'America/New_York'}, 27 | {'phase_type': 'Pre-Open', 28 | 'phase_name': 'Pre-Opening Session', 29 | 'phase_memo': None, 30 | 'status': 'Closed', 31 | 'settlement': 'No', 32 | 'start': fromiso("2023-11-15 06:30:00", "America/New_York"), 33 | 'end': fromiso("2023-11-15 09:30:00", "America/New_York"), 34 | 'has_settlement': False, 35 | 'is_open': False, 36 | 'timezone': 'America/New_York'}, 37 | {'phase_type': 'Call Auction', 38 | 'phase_name': 'Core Open Auction', 39 | 'phase_memo': None, 40 | 'status': 'Closed', 41 | 'settlement': 'No', 42 | 'start': fromiso("2023-11-15 09:30:00", "America/New_York"), 43 | 'end': fromiso("2023-11-15 09:30:00", "America/New_York"), 44 | 'has_settlement': False, 45 | 'is_open': False, 46 | 'timezone': 'America/New_York'}, 47 | {'phase_type': 'Primary Trading Session', 48 | 'phase_name': 'Core Trading Session', 49 | 'phase_memo': None, 50 | 'status': 'Open', 51 | 'settlement': 'Yes', 52 | 'start': fromiso("2023-11-15 09:30:00", "America/New_York"), 53 | 'end': fromiso("2023-11-15 16:00:00", "America/New_York"), 54 | 'has_settlement': True, 55 | 'is_open': True, 56 | 'timezone': 'America/New_York'}, 57 | {'phase_type': 'Pre-Close', 58 | 'phase_name': 'Closing Imbalance Period', 59 | 'phase_memo': None, 60 | 'status': 'Closed', 61 | 'settlement': 'No', 62 | 'start': fromiso("2023-11-15 15:50:00", "America/New_York"), 63 | 'end': fromiso("2023-11-15 16:00:00", "America/New_York"), 64 | 'has_settlement': False, 65 | 'is_open': False, 66 | 'timezone': 'America/New_York'}, 67 | {'phase_type': 'Post-Trading Session', 68 | 'phase_name': 'Extended Hours', 69 | 'phase_memo': None, 70 | 'status': 'Closed', 71 | 'settlement': 'No', 72 | 'start': fromiso("2023-11-15 16:00:00", "America/New_York"), 73 | 'end': fromiso("2023-11-15 20:00:00", "America/New_York"), 74 | 'has_settlement': False, 75 | 'is_open': False, 76 | 'timezone': 'America/New_York'}] 77 | ), 78 | 79 | ("US.NYSE", "2023-11-11", "2023-11-11", 80 | [] 81 | ), 82 | 83 | ("US.NYSE", "2023-11-24", "2023-11-24", 84 | [{'phase_type': 'Pre-Trading Session', 85 | 'phase_name': 'Pre-Opening Session', 86 | 'phase_memo': None, 87 | 'status': 'Closed', 88 | 'settlement': 'No', 89 | 'start': fromiso("2023-11-24 06:30:00", "America/New_York"), 90 | 'end': fromiso("2023-11-24 09:30:00", "America/New_York"), 91 | 'has_settlement': False, 92 | 'is_open': False, 93 | 'timezone': 'America/New_York'}, 94 | {'phase_type': 'Primary Trading Session', 95 | 'phase_name': 'Core Trading Session', 96 | 'phase_memo': None, 97 | 'status': 'Open', 98 | 'settlement': 'Yes', 99 | 'start': fromiso("2023-11-24 09:30:00", "America/New_York"), 100 | 'end': fromiso("2023-11-24 13:00:00", "America/New_York"), 101 | 'has_settlement': True, 102 | 'is_open': True, 103 | 'timezone': 'America/New_York'}, 104 | {'phase_type': 'Post-Trading Session', 105 | 'phase_name': 'Crossing Session', 106 | 'phase_memo': None, 107 | 'status': 'Closed', 108 | 'settlement': 'No', 109 | 'start': fromiso("2023-11-24 13:00:00", "America/New_York"), 110 | 'end': fromiso("2023-11-24 13:30:00", "America/New_York"), 111 | 'has_settlement': False, 112 | 'is_open': False, 113 | 'timezone': 'America/New_York'}] 114 | ), 115 | 116 | ("US.CME.EQUITY.USINDEX1", "2023-11-13", "2023-11-13", 117 | [{'phase_type': 'Primary Trading Session', 118 | 'phase_name': None, 119 | 'phase_memo': None, 120 | 'status': 'Open', 121 | 'settlement': 'Yes', 122 | 'start': fromiso("2023-11-12 17:00:00", "America/Chicago"), 123 | 'end': fromiso("2023-11-13 16:00:00", "America/Chicago"), 124 | 'has_settlement': True, 125 | 'is_open': True, 126 | 'timezone': 'America/Chicago'}, 127 | {'phase_type': 'Settlement Window', 128 | 'phase_name': None, 129 | 'phase_memo': None, 130 | 'status': 'Closed', 131 | 'settlement': 'No', 132 | 'start': fromiso("2023-11-13 14:59:30", "America/Chicago"), 133 | 'end': fromiso("2023-11-13 15:00:00", "America/Chicago"), 134 | 'has_settlement': False, 135 | 'is_open': False, 136 | 'timezone': 'America/Chicago'}, 137 | {'phase_type': 'Pre-Open', 138 | 'phase_name': None, 139 | 'phase_memo': None, 140 | 'status': 'Closed', 141 | 'settlement': 'No', 142 | 'start': fromiso("2023-11-13 16:45:00", "America/Chicago"), 143 | 'end': fromiso("2023-11-13 17:00:00", "America/Chicago"), 144 | 'has_settlement': False, 145 | 'is_open': False, 146 | 'timezone': 'America/Chicago'}, 147 | {'phase_type': 'Primary Trading Session', 148 | 'phase_name': None, 149 | 'phase_memo': None, 150 | 'status': 'Open', 151 | 'settlement': 'Yes', 152 | 'start': fromiso("2023-11-13 17:00:00", "America/Chicago"), 153 | 'end': fromiso("2023-11-14 16:00:00", "America/Chicago"), 154 | 'has_settlement': True, 155 | 'is_open': True, 156 | 'timezone': 'America/Chicago'}] 157 | ), 158 | 159 | ("US.CME.EQUITY.USINDEX1", "2023-11-23", "2023-11-23", 160 | [{'phase_type': 'Primary Trading Session, No Closing Price', 161 | 'phase_name': None, 162 | 'phase_memo': None, 163 | 'status': 'Open', 164 | 'settlement': 'No', 165 | 'start': fromiso("2023-11-22 17:00:00", "America/Chicago"), 166 | 'end': fromiso("2023-11-23 12:00:00", "America/Chicago"), 167 | 'has_settlement': False, 168 | 'is_open': True, 169 | 'timezone': 'America/Chicago'}, 170 | {'phase_type': 'Pre-Open', 171 | 'phase_name': None, 172 | 'phase_memo': None, 173 | 'status': 'Closed', 174 | 'settlement': 'No', 175 | 'start': fromiso("2023-11-23 12:00:00", "America/Chicago"), 176 | 'end': fromiso("2023-11-23 17:00:00", "America/Chicago"), 177 | 'has_settlement': False, 178 | 'is_open': False, 179 | 'timezone': 'America/Chicago'}, 180 | {'phase_type': 'Primary Trading Session', 181 | 'phase_name': None, 182 | 'phase_memo': None, 183 | 'status': 'Open', 184 | 'settlement': 'Yes', 185 | 'start': fromiso("2023-11-23 17:00:00", "America/Chicago"), 186 | 'end': fromiso("2023-11-24 12:15:00", "America/Chicago"), 187 | 'has_settlement': True, 188 | 'is_open': True, 189 | 'timezone': 'America/Chicago'}] 190 | ), 191 | 192 | ("US.CME.EQUITY.USINDEX1", "2023-12-25", "2023-12-25", 193 | [{'phase_type': 'Pre-Open', 194 | 'phase_name': None, 195 | 'phase_memo': None, 196 | 'status': 'Closed', 197 | 'settlement': 'No', 198 | 'start': fromiso("2023-12-25 16:00:00", "America/Chicago"), 199 | 'end': fromiso("2023-12-25 17:00:00", "America/Chicago"), 200 | 'has_settlement': False, 201 | 'is_open': False, 202 | 'timezone': 'America/Chicago'}, 203 | {'phase_type': 'Primary Trading Session', 204 | 'phase_name': None, 205 | 'phase_memo': None, 206 | 'status': 'Open', 207 | 'settlement': 'Yes', 208 | 'start': fromiso("2023-12-25 17:00:00", "America/Chicago"), 209 | 'end': fromiso("2023-12-26 16:00:00", "America/Chicago"), 210 | 'has_settlement': True, 211 | 'is_open': True, 212 | 'timezone': 'America/Chicago'}] 213 | ), 214 | 215 | ("CN.CIBM", "2020-01-19", "2020-01-19", 216 | [{'phase_type': 'Primary Trading Session', 217 | 'phase_name': 'First Session', 218 | 'phase_memo': None, 219 | 'status': 'Open', 220 | 'settlement': 'Yes', 221 | 'start': fromiso("2020-01-19 09:00:00", "Asia/Shanghai"), 222 | 'end': fromiso("2020-01-19 12:00:00", "Asia/Shanghai"), 223 | 'has_settlement': True, 224 | 'is_open': True, 225 | 'timezone': 'Asia/Shanghai'}, 226 | {'phase_type': 'Intermission', 227 | 'phase_name': 'Intermission', 228 | 'phase_memo': None, 229 | 'status': 'Closed', 230 | 'settlement': 'No', 231 | 'start': fromiso("2020-01-19 12:00:00", "Asia/Shanghai"), 232 | 'end': fromiso("2020-01-19 13:30:00", "Asia/Shanghai"), 233 | 'has_settlement': False, 234 | 'is_open': False, 235 | 'timezone': 'Asia/Shanghai'}, 236 | {'phase_type': 'Primary Trading Session', 237 | 'phase_name': 'Second Session', 238 | 'phase_memo': None, 239 | 'status': 'Open', 240 | 'settlement': 'Yes', 241 | 'start': fromiso("2020-01-19 13:30:00", "Asia/Shanghai"), 242 | 'end': fromiso("2020-01-19 20:00:00", "Asia/Shanghai"), 243 | 'has_settlement': True, 244 | 'is_open': True, 245 | 'timezone': 'Asia/Shanghai'}] 246 | ), 247 | 248 | ("US.CBOE.VIX", "2024-10-15", "2024-10-15", 249 | [{'phase_type': 'Primary Trading Session', 250 | 'phase_name': 'Extended Trading Hours', 251 | 'phase_memo': None, 252 | 'status': 'Open', 253 | 'settlement': 'Yes', 254 | 'start': fromiso("2024-10-14 17:00:00", "America/Chicago"), 255 | 'end': fromiso("2024-10-15 08:30:00", "America/Chicago"), 256 | 'has_settlement': True, 257 | 'is_open': True, 258 | 'timezone': 'America/Chicago'}, 259 | {'phase_type': 'Trading-at-Last', 260 | 'phase_name': 'Trade at Settlement', 261 | 'phase_memo': None, 262 | 'status': 'Closed', 263 | 'settlement': 'No', 264 | 'start': fromiso("2024-10-14 17:00:00", "America/Chicago"), 265 | 'end': fromiso("2024-10-15 08:30:00", "America/Chicago"), 266 | 'has_settlement': False, 267 | 'is_open': False, 268 | 'timezone': 'America/Chicago'}, 269 | {'phase_type': 'Primary Trading Session', 270 | 'phase_name': 'Regular Trading Hours', 271 | 'phase_memo': 'Market Order Acceptance Time', 272 | 'status': 'Open', 273 | 'settlement': 'Yes', 274 | 'start': fromiso("2024-10-15 08:30:00", "America/Chicago"), 275 | 'end': fromiso("2024-10-15 15:00:00", "America/Chicago"), 276 | 'has_settlement': True, 277 | 'is_open': True, 278 | 'timezone': 'America/Chicago'}, 279 | {'phase_type': 'Trading-at-Last', 280 | 'phase_name': 'Trade at Settlement', 281 | 'phase_memo': None, 282 | 'status': 'Closed', 283 | 'settlement': 'No', 284 | 'start': fromiso("2024-10-15 08:30:00", "America/Chicago"), 285 | 'end': fromiso("2024-10-15 15:00:00", "America/Chicago"), 286 | 'has_settlement': False, 287 | 'is_open': False, 288 | 'timezone': 'America/Chicago'}, 289 | {'phase_type': 'Settlement Window', 290 | 'phase_name': 'Daily Settlement Price', 291 | 'phase_memo': None, 292 | 'status': 'Closed', 293 | 'settlement': 'No', 294 | 'start': fromiso("2024-10-15 15:00:00", "America/Chicago"), 295 | 'end': fromiso("2024-10-15 15:00:00", "America/Chicago"), 296 | 'has_settlement': False, 297 | 'is_open': False, 298 | 'timezone': 'America/Chicago'}, 299 | {'phase_type': 'Post-Trading Session', 300 | 'phase_name': 'Extended Trading Hours', 301 | 'phase_memo': 'Post Settlement', 302 | 'status': 'Closed', 303 | 'settlement': 'No', 304 | 'start': fromiso("2024-10-15 15:00:00", "America/Chicago"), 305 | 'end': fromiso("2024-10-15 16:00:00", "America/Chicago"), 306 | 'has_settlement': False, 307 | 'is_open': False, 308 | 'timezone': 'America/Chicago'}, 309 | {'phase_type': 'Pre-Open', 310 | 'phase_name': 'Pre-Market Session', 311 | 'phase_memo': None, 312 | 'status': 'Closed', 313 | 'settlement': 'No', 314 | 'start': fromiso("2024-10-15 16:45:00", "America/Chicago"), 315 | 'end': fromiso("2024-10-15 17:00:00", "America/Chicago"), 316 | 'has_settlement': False, 317 | 'is_open': False, 318 | 'timezone': 'America/Chicago'}, 319 | {'phase_type': 'Primary Trading Session', 320 | 'phase_name': 'Extended Trading Hours', 321 | 'phase_memo': None, 322 | 'status': 'Open', 323 | 'settlement': 'Yes', 324 | 'start': fromiso("2024-10-15 17:00:00", "America/Chicago"), 325 | 'end': fromiso("2024-10-16 08:30:00", "America/Chicago"), 326 | 'has_settlement': True, 327 | 'is_open': True, 328 | 'timezone': 'America/Chicago'}, 329 | {'phase_type': 'Trading-at-Last', 330 | 'phase_name': 'Trade at Settlement', 331 | 'phase_memo': None, 332 | 'status': 'Closed', 333 | 'settlement': 'No', 334 | 'start': fromiso("2024-10-15 17:00:00", "America/Chicago"), 335 | 'end': fromiso("2024-10-16 08:30:00", "America/Chicago"), 336 | 'has_settlement': False, 337 | 'is_open': False, 338 | 'timezone': 'America/Chicago'}] 339 | ), 340 | 341 | ("US.CBOE.VIX", "2024-10-16", "2024-10-16", 342 | [{'phase_type': 'Primary Trading Session', 343 | 'phase_name': 'Extended Trading Hours', 344 | 'phase_memo': None, 345 | 'status': 'Open', 346 | 'settlement': 'Yes', 347 | 'start': fromiso("2024-10-15 17:00:00", "America/Chicago"), 348 | 'end': fromiso("2024-10-16 08:30:00", "America/Chicago"), 349 | 'has_settlement': True, 350 | 'is_open': True, 351 | 'timezone': 'America/Chicago'}, 352 | {'phase_type': 'Trading-at-Last', 353 | 'phase_name': 'Trade at Settlement', 354 | 'phase_memo': None, 355 | 'status': 'Closed', 356 | 'settlement': 'No', 357 | 'start': fromiso("2024-10-15 17:00:00", "America/Chicago"), 358 | 'end': fromiso("2024-10-16 08:30:00", "America/Chicago"), 359 | 'has_settlement': False, 360 | 'is_open': False, 361 | 'timezone': 'America/Chicago'}, 362 | {'phase_type': 'Primary Trading Session', 363 | 'phase_name': 'Regular Trading Hours', 364 | 'phase_memo': 'Market Order Acceptance Time', 365 | 'status': 'Open', 366 | 'settlement': 'Yes', 367 | 'start': fromiso("2024-10-16 08:30:00", "America/Chicago"), 368 | 'end': fromiso("2024-10-16 15:00:00", "America/Chicago"), 369 | 'has_settlement': True, 370 | 'is_open': True, 371 | 'timezone': 'America/Chicago'}, 372 | {'phase_type': 'Trading-at-Last', 373 | 'phase_name': 'Trade at Settlement', 374 | 'phase_memo': None, 375 | 'status': 'Closed', 376 | 'settlement': 'No', 377 | 'start': fromiso("2024-10-16 08:30:00", "America/Chicago"), 378 | 'end': fromiso("2024-10-16 15:00:00", "America/Chicago"), 379 | 'has_settlement': False, 380 | 'is_open': False, 381 | 'timezone': 'America/Chicago'}, 382 | {'phase_type': 'Settlement Window', 383 | 'phase_name': 'Daily Settlement Price', 384 | 'phase_memo': None, 385 | 'status': 'Closed', 386 | 'settlement': 'No', 387 | 'start': fromiso("2024-10-16 15:00:00", "America/Chicago"), 388 | 'end': fromiso("2024-10-16 15:00:00", "America/Chicago"), 389 | 'has_settlement': False, 390 | 'is_open': False, 391 | 'timezone': 'America/Chicago'}, 392 | {'phase_type': 'Post-Trading Session', 393 | 'phase_name': 'Extended Trading Hours', 394 | 'phase_memo': 'Post Settlement', 395 | 'status': 'Closed', 396 | 'settlement': 'No', 397 | 'start': fromiso("2024-10-16 15:00:00", "America/Chicago"), 398 | 'end': fromiso("2024-10-16 16:00:00", "America/Chicago"), 399 | 'has_settlement': False, 400 | 'is_open': False, 401 | 'timezone': 'America/Chicago'}, 402 | {'phase_type': 'Pre-Open', 403 | 'phase_name': 'Pre-Market Session', 404 | 'phase_memo': None, 405 | 'status': 'Closed', 406 | 'settlement': 'No', 407 | 'start': fromiso('2024-10-16 16:45:00', 'America/Chicago'), 408 | 'end': fromiso('2024-10-16 17:00:00', 'America/Chicago'), 409 | 'has_settlement': False, 410 | 'is_open': False, 411 | 'timezone': 'America/Chicago'}, 412 | {'phase_type': 'Primary Trading Session', 413 | 'phase_name': 'Extended Trading Hours', 414 | 'phase_memo': None, 415 | 'status': 'Open', 416 | 'settlement': 'Yes', 417 | 'start': fromiso('2024-10-16 17:00:00', 'America/Chicago'), 418 | 'end': fromiso('2024-10-17 08:30:00', 'America/Chicago'), 419 | 'has_settlement': True, 420 | 'is_open': True, 421 | 'timezone': 'America/Chicago'}, 422 | {'phase_type': 'Trading-at-Last', 423 | 'phase_name': 'Trade at Settlement', 424 | 'phase_memo': None, 425 | 'status': 'Closed', 426 | 'settlement': 'No', 427 | 'start': fromiso('2024-10-16 17:00:00', 'America/Chicago'), 428 | 'end': fromiso('2024-10-17 08:30:00', 'America/Chicago'), 429 | 'has_settlement': False, 430 | 'is_open': False, 431 | 'timezone': 'America/Chicago'}] 432 | ), 433 | 434 | ("US.CME.AGRI.DAIRY1", "2022-01-16", "2022-01-16", 435 | [{'phase_type': 'Pre-Open', 436 | 'phase_name': None, 437 | 'phase_memo': None, 438 | 'status': 'Closed', 439 | 'settlement': 'No', 440 | 'start': fromiso("2022-01-16 16:00:00", "America/Chicago"), 441 | 'end': fromiso("2022-01-17 17:00:00", "America/Chicago"), 442 | 'has_settlement': False, 443 | 'is_open': False, 444 | 'timezone': 'America/Chicago'}] 445 | ), 446 | 447 | ("US.CME.AGRI.DAIRY1", "2022-01-17", "2022-01-17", 448 | [{'phase_type': 'Pre-Open', 449 | 'phase_name': None, 450 | 'phase_memo': None, 451 | 'status': 'Closed', 452 | 'settlement': 'No', 453 | 'start': fromiso("2022-01-16 16:00:00", "America/Chicago"), 454 | 'end': fromiso("2022-01-17 17:00:00", "America/Chicago"), 455 | 'has_settlement': False, 456 | 'is_open': False, 457 | 'timezone': 'America/Chicago'}, 458 | {'phase_type': 'Primary Trading Session', 459 | 'phase_name': None, 460 | 'phase_memo': None, 461 | 'status': 'Open', 462 | 'settlement': 'Yes', 463 | 'start': fromiso("2022-01-17 17:00:00", "America/Chicago"), 464 | 'end': fromiso("2022-01-18 16:00:00", "America/Chicago"), 465 | 'has_settlement': True, 466 | 'is_open': True, 467 | 'timezone': 'America/Chicago'}] 468 | ), 469 | 470 | ("US.CME.AGRI.DAIRY1", "2022-01-09", "2022-01-09", 471 | [{'phase_type': 'Pre-Open', 472 | 'phase_name': None, 473 | 'phase_memo': None, 474 | 'status': 'Closed', 475 | 'settlement': 'No', 476 | 'start': fromiso("2022-01-09 16:00:00", "America/Chicago"), 477 | 'end': fromiso("2022-01-09 17:00:00", "America/Chicago"), 478 | 'has_settlement': False, 479 | 'is_open': False, 480 | 'timezone': 'America/Chicago'}, 481 | {'phase_type': 'Primary Trading Session', 482 | 'phase_name': None, 483 | 'phase_memo': None, 484 | 'status': 'Open', 485 | 'settlement': 'Yes', 486 | 'start': fromiso("2022-01-09 17:00:00", "America/Chicago"), 487 | 'end': fromiso("2022-01-10 16:00:00", "America/Chicago"), 488 | 'has_settlement': True, 489 | 'is_open': True, 490 | 'timezone': 'America/Chicago'}, 491 | {'phase_type': 'Other', 492 | 'phase_name': 'CME Clearport', 493 | 'phase_memo': None, 494 | 'status': 'Closed', 495 | 'settlement': 'No', 496 | 'start': fromiso("2022-01-09 17:00:00", "America/Chicago"), 497 | 'end': fromiso("2022-01-10 17:45:00", "America/Chicago"), 498 | 'has_settlement': False, 499 | 'is_open': False, 500 | 'timezone': 'America/Chicago'}] 501 | ), 502 | 503 | ("US.CME.AGRI.DAIRY1", "2022-01-10", "2022-01-10", 504 | [{'phase_type': 'Primary Trading Session', 505 | 'phase_name': None, 506 | 'phase_memo': None, 507 | 'status': 'Open', 508 | 'settlement': 'Yes', 509 | 'start': fromiso("2022-01-09 17:00:00", "America/Chicago"), 510 | 'end': fromiso("2022-01-10 16:00:00", "America/Chicago"), 511 | 'has_settlement': True, 512 | 'is_open': True, 513 | 'timezone': 'America/Chicago'}, 514 | {'phase_type': 'Other', 515 | 'phase_name': 'CME Clearport', 516 | 'phase_memo': None, 517 | 'status': 'Closed', 518 | 'settlement': 'No', 519 | 'start': fromiso("2022-01-09 17:00:00", "America/Chicago"), 520 | 'end': fromiso("2022-01-10 17:45:00", "America/Chicago"), 521 | 'has_settlement': False, 522 | 'is_open': False, 523 | 'timezone': 'America/Chicago'}, 524 | {'phase_type': 'Settlement Window', 525 | 'phase_name': None, 526 | 'phase_memo': None, 527 | 'status': 'Closed', 528 | 'settlement': 'No', 529 | 'start': fromiso("2022-01-10 13:09:30", "America/Chicago"), 530 | 'end': fromiso("2022-01-10 13:10:00", "America/Chicago"), 531 | 'has_settlement': False, 532 | 'is_open': False, 533 | 'timezone': 'America/Chicago'}, 534 | {'phase_type': 'Pre-Open', 535 | 'phase_name': None, 536 | 'phase_memo': None, 537 | 'status': 'Closed', 538 | 'settlement': 'No', 539 | 'start': fromiso("2022-01-10 16:45:00", "America/Chicago"), 540 | 'end': fromiso("2022-01-10 17:00:00", "America/Chicago"), 541 | 'has_settlement': False, 542 | 'is_open': False, 543 | 'timezone': 'America/Chicago'}, 544 | {'phase_type': 'Primary Trading Session', 545 | 'phase_name': None, 546 | 'phase_memo': None, 547 | 'status': 'Open', 548 | 'settlement': 'Yes', 549 | 'start': fromiso("2022-01-10 17:00:00", "America/Chicago"), 550 | 'end': fromiso("2022-01-11 16:00:00", "America/Chicago"), 551 | 'has_settlement': True, 552 | 'is_open': True, 553 | 'timezone': 'America/Chicago'}, 554 | {'phase_type': 'Other', 555 | 'phase_name': 'CME Clearport', 556 | 'phase_memo': None, 557 | 'status': 'Closed', 558 | 'settlement': 'No', 559 | 'start': fromiso("2022-01-10 17:00:00", "America/Chicago"), 560 | 'end': fromiso("2022-01-11 17:45:00", "America/Chicago"), 561 | 'has_settlement': False, 562 | 'is_open': False, 563 | 'timezone': 'America/Chicago'}, 564 | {'phase_type': 'Other', 565 | 'phase_name': 'CME Clearport with no reporting', 566 | 'phase_memo': None, 567 | 'status': 'Closed', 568 | 'settlement': 'No', 569 | 'start': fromiso("2022-01-10 17:45:00", "America/Chicago"), 570 | 'end': fromiso("2022-01-10 18:00:00", "America/Chicago"), 571 | 'has_settlement': False, 572 | 'is_open': False, 573 | 'timezone': 'America/Chicago'}] 574 | ), 575 | 576 | ("US.BTEC.ACTIVES.US", "2023-03-09", "2023-03-09", 577 | [{'phase_type': 'Primary Trading Session', 578 | 'phase_name': None, 579 | 'phase_memo': 'Winter Season', 580 | 'status': 'Open', 581 | 'settlement': 'Yes', 582 | 'start': fromiso("2023-03-08 18:30:00", "America/New_York"), 583 | 'end': fromiso("2023-03-09 17:30:00", "America/New_York"), 584 | 'has_settlement': True, 585 | 'is_open': True, 586 | 'timezone': 'America/New_York'}, 587 | {'phase_type': 'Primary Trading Session', 588 | 'phase_name': None, 589 | 'phase_memo': 'Winter Season', 590 | 'status': 'Open', 591 | 'settlement': 'Yes', 592 | 'start': fromiso("2023-03-09 18:30:00", "America/New_York"), 593 | 'end': fromiso("2023-03-10 17:30:00", "America/New_York"), 594 | 'has_settlement': True, 595 | 'is_open': True, 596 | 'timezone': 'America/New_York'}] 597 | ), 598 | 599 | ("US.BTEC.ACTIVES.US", "2023-11-12", "2023-11-12", 600 | [{'phase_type': 'Primary Trading Session', 601 | 'phase_name': None, 602 | 'phase_memo': 'Winter Season', 603 | 'status': 'Open', 604 | 'settlement': 'Yes', 605 | 'start': fromiso("2023-11-12 18:30:00", "America/New_York"), 606 | 'end': fromiso("2023-11-13 17:30:00", "America/New_York"), 607 | 'has_settlement': True, 608 | 'is_open': True, 609 | 'timezone': 'America/New_York'}] 610 | ), 611 | 612 | ("CN.SGE", "2023-01-02", "2023-01-02", 613 | [] 614 | ), 615 | 616 | ("CN.SGE", "2023-01-01", "2023-01-03", 617 | [{'phase_type': 'Primary Trading Session', 618 | 'phase_name': 'Day Trading Session', 619 | 'phase_memo': None, 620 | 'status': 'Open', 621 | 'settlement': 'Yes', 622 | 'start': fromiso("2023-01-03 09:00:00", "Asia/Shanghai"), 623 | 'end': fromiso("2023-01-03 15:30:00", "Asia/Shanghai"), 624 | 'has_settlement': True, 625 | 'is_open': True, 626 | 'timezone': 'Asia/Shanghai'}, 627 | {'phase_type': 'Other', 628 | 'phase_name': 'Delivery Tender Submission', 629 | 'phase_memo': None, 630 | 'status': 'Closed', 631 | 'settlement': 'No', 632 | 'start': fromiso("2023-01-03 15:00:00", "Asia/Shanghai"), 633 | 'end': fromiso("2023-01-03 15:30:00", "Asia/Shanghai"), 634 | 'has_settlement': False, 635 | 'is_open': False, 636 | 'timezone': 'Asia/Shanghai'}, 637 | {'phase_type': 'Pre-Close', 638 | 'phase_name': 'Pre Market Close', 639 | 'phase_memo': None, 640 | 'status': 'Closed', 641 | 'settlement': 'No', 642 | 'start': fromiso("2023-01-03 15:30:00", "Asia/Shanghai"), 643 | 'end': fromiso("2023-01-03 15:45:00", "Asia/Shanghai"), 644 | 'has_settlement': False, 645 | 'is_open': False, 646 | 'timezone': 'Asia/Shanghai'}, 647 | {'phase_type': 'Other', 648 | 'phase_name': 'Equalizer Tender Submission', 649 | 'phase_memo': None, 650 | 'status': 'Closed', 651 | 'settlement': 'No', 652 | 'start': fromiso("2023-01-03 15:31:00", "Asia/Shanghai"), 653 | 'end': fromiso("2023-01-03 15:40:00", "Asia/Shanghai"), 654 | 'has_settlement': False, 655 | 'is_open': False, 656 | 'timezone': 'Asia/Shanghai'}, 657 | {'phase_type': 'Other', 658 | 'phase_name': 'Deliver Tender Matching', 659 | 'phase_memo': None, 660 | 'status': 'Closed', 661 | 'settlement': 'No', 662 | 'start': fromiso("2023-01-03 15:40:00", "Asia/Shanghai"), 663 | 'end': fromiso("2023-01-03 15:40:00", "Asia/Shanghai"), 664 | 'has_settlement': False, 665 | 'is_open': False, 666 | 'timezone': 'Asia/Shanghai'}, 667 | {'phase_type': 'Pre-Open', 668 | 'phase_name': 'Market Opening', 669 | 'phase_memo': None, 670 | 'status': 'Closed', 671 | 'settlement': 'No', 672 | 'start': fromiso("2023-01-03 19:45:00", "Asia/Shanghai"), 673 | 'end': fromiso("2023-01-03 20:00:00", "Asia/Shanghai"), 674 | 'has_settlement': False, 675 | 'is_open': False, 676 | 'timezone': 'Asia/Shanghai'}, 677 | {'phase_type': 'Other', 678 | 'phase_name': 'Order Matching', 679 | 'phase_memo': None, 680 | 'status': 'Closed', 681 | 'settlement': 'No', 682 | 'start': fromiso("2023-01-03 19:50:00", "Asia/Shanghai"), 683 | 'end': fromiso("2023-01-03 19:59:00", "Asia/Shanghai"), 684 | 'has_settlement': False, 685 | 'is_open': False, 686 | 'timezone': 'Asia/Shanghai'}, 687 | {'phase_type': 'Primary Trading Session', 688 | 'phase_name': 'Night Trading Session', 689 | 'phase_memo': None, 690 | 'status': 'Open', 691 | 'settlement': 'Yes', 692 | 'start': fromiso("2023-01-03 20:00:00", "Asia/Shanghai"), 693 | 'end': fromiso("2023-01-04 02:30:00", "Asia/Shanghai"), 694 | 'has_settlement': True, 695 | 'is_open': True, 696 | 'timezone': 'Asia/Shanghai'}] 697 | ), 698 | 699 | ("CN.SGE", "2024-09-27", "2024-10-04", 700 | [{'phase_type': 'Primary Trading Session', 701 | 'phase_name': 'Night Trading Session', 702 | 'phase_memo': None, 703 | 'status': 'Open', 704 | 'settlement': 'Yes', 705 | 'start': fromiso("2024-09-26 20:00:00", "Asia/Shanghai"), 706 | 'end': fromiso("2024-09-27 02:30:00", "Asia/Shanghai"), 707 | 'has_settlement': True, 708 | 'is_open': True, 709 | 'timezone': 'Asia/Shanghai'}, 710 | {'phase_type': 'Primary Trading Session', 711 | 'phase_name': 'Day Trading Session', 712 | 'phase_memo': None, 713 | 'status': 'Open', 714 | 'settlement': 'Yes', 715 | 'start': fromiso("2024-09-27 09:00:00", "Asia/Shanghai"), 716 | 'end': fromiso("2024-09-27 15:30:00", "Asia/Shanghai"), 717 | 'has_settlement': True, 718 | 'is_open': True, 719 | 'timezone': 'Asia/Shanghai'}, 720 | {'phase_type': 'Other', 721 | 'phase_name': 'Delivery Tender Submission', 722 | 'phase_memo': None, 723 | 'status': 'Closed', 724 | 'settlement': 'No', 725 | 'start': fromiso("2024-09-27 15:00:00", "Asia/Shanghai"), 726 | 'end': fromiso("2024-09-27 15:30:00", "Asia/Shanghai"), 727 | 'has_settlement': False, 728 | 'is_open': False, 729 | 'timezone': 'Asia/Shanghai'}, 730 | {'phase_type': 'Pre-Close', 731 | 'phase_name': 'Pre Market Close', 732 | 'phase_memo': None, 733 | 'status': 'Closed', 734 | 'settlement': 'No', 735 | 'start': fromiso("2024-09-27 15:30:00", "Asia/Shanghai"), 736 | 'end': fromiso("2024-09-27 15:45:00", "Asia/Shanghai"), 737 | 'has_settlement': False, 738 | 'is_open': False, 739 | 'timezone': 'Asia/Shanghai'}, 740 | {'phase_type': 'Other', 741 | 'phase_name': 'Equalizer Tender Submission', 742 | 'phase_memo': None, 743 | 'status': 'Closed', 744 | 'settlement': 'No', 745 | 'start': fromiso("2024-09-27 15:31:00", "Asia/Shanghai"), 746 | 'end': fromiso("2024-09-27 15:40:00", "Asia/Shanghai"), 747 | 'has_settlement': False, 748 | 'is_open': False, 749 | 'timezone': 'Asia/Shanghai'}, 750 | {'phase_type': 'Other', 751 | 'phase_name': 'Deliver Tender Matching', 752 | 'phase_memo': None, 753 | 'status': 'Closed', 754 | 'settlement': 'No', 755 | 'start': fromiso("2024-09-27 15:40:00", "Asia/Shanghai"), 756 | 'end': fromiso("2024-09-27 15:40:00", "Asia/Shanghai"), 757 | 'has_settlement': False, 758 | 'is_open': False, 759 | 'timezone': 'Asia/Shanghai'}, 760 | {'phase_type': 'Pre-Open', 761 | 'phase_name': 'Market Opening', 762 | 'phase_memo': None, 763 | 'status': 'Closed', 764 | 'settlement': 'No', 765 | 'start': fromiso("2024-09-27 19:45:00", "Asia/Shanghai"), 766 | 'end': fromiso("2024-09-27 20:00:00", "Asia/Shanghai"), 767 | 'has_settlement': False, 768 | 'is_open': False, 769 | 'timezone': 'Asia/Shanghai'}, 770 | {'phase_type': 'Other', 771 | 'phase_name': 'Order Matching', 772 | 'phase_memo': None, 773 | 'status': 'Closed', 774 | 'settlement': 'No', 775 | 'start': fromiso("2024-09-27 19:50:00", "Asia/Shanghai"), 776 | 'end': fromiso("2024-09-27 19:59:00", "Asia/Shanghai"), 777 | 'has_settlement': False, 778 | 'is_open': False, 779 | 'timezone': 'Asia/Shanghai'}, 780 | {'phase_type': 'Primary Trading Session', 781 | 'phase_name': 'Night Trading Session', 782 | 'phase_memo': None, 783 | 'status': 'Open', 784 | 'settlement': 'Yes', 785 | 'start': fromiso("2024-09-27 20:00:00", "Asia/Shanghai"), 786 | 'end': fromiso("2024-09-28 02:30:00", "Asia/Shanghai"), 787 | 'has_settlement': True, 788 | 'is_open': True, 789 | 'timezone': 'Asia/Shanghai'}, 790 | {'phase_type': 'Primary Trading Session', 791 | 'phase_name': 'Day Trading Session', 792 | 'phase_memo': None, 793 | 'status': 'Open', 794 | 'settlement': 'Yes', 795 | 'start': fromiso("2024-09-30 09:00:00", "Asia/Shanghai"), 796 | 'end': fromiso("2024-09-30 15:30:00", "Asia/Shanghai"), 797 | 'has_settlement': True, 798 | 'is_open': True, 799 | 'timezone': 'Asia/Shanghai'}, 800 | {'phase_type': 'Other', 801 | 'phase_name': 'Delivery Tender Submission', 802 | 'phase_memo': None, 803 | 'status': 'Closed', 804 | 'settlement': 'No', 805 | 'start': fromiso("2024-09-30 15:00:00", "Asia/Shanghai"), 806 | 'end': fromiso("2024-09-30 15:30:00", "Asia/Shanghai"), 807 | 'has_settlement': False, 808 | 'is_open': False, 809 | 'timezone': 'Asia/Shanghai'}, 810 | {'phase_type': 'Pre-Close', 811 | 'phase_name': 'Pre Market Close', 812 | 'phase_memo': None, 813 | 'status': 'Closed', 814 | 'settlement': 'No', 815 | 'start': fromiso("2024-09-30 15:30:00", "Asia/Shanghai"), 816 | 'end': fromiso("2024-09-30 15:45:00", "Asia/Shanghai"), 817 | 'has_settlement': False, 818 | 'is_open': False, 819 | 'timezone': 'Asia/Shanghai'}, 820 | {'phase_type': 'Other', 821 | 'phase_name': 'Equalizer Tender Submission', 822 | 'phase_memo': None, 823 | 'status': 'Closed', 824 | 'settlement': 'No', 825 | 'start': fromiso("2024-09-30 15:31:00", "Asia/Shanghai"), 826 | 'end': fromiso("2024-09-30 15:40:00", "Asia/Shanghai"), 827 | 'has_settlement': False, 828 | 'is_open': False, 829 | 'timezone': 'Asia/Shanghai'}, 830 | {'phase_type': 'Other', 831 | 'phase_name': 'Deliver Tender Matching', 832 | 'phase_memo': None, 833 | 'status': 'Closed', 834 | 'settlement': 'No', 835 | 'start': fromiso("2024-09-30 15:40:00", "Asia/Shanghai"), 836 | 'end': fromiso("2024-09-30 15:40:00", "Asia/Shanghai"), 837 | 'has_settlement': False, 838 | 'is_open': False, 839 | 'timezone': 'Asia/Shanghai'}] 840 | ), 841 | 842 | ("AR.BCBA", "2017-05-17", "2017-05-19", 843 | [{'phase_type': 'Pre-Trading Session', 844 | 'phase_name': 'Opening Session', 845 | 'phase_memo': None, 846 | 'status': 'Closed', 847 | 'settlement': 'No', 848 | 'start': fromiso("2017-05-17 10:30:00", "America/Argentina/Buenos_Aires"), 849 | 'end': fromiso("2017-05-17 11:00:00", "America/Argentina/Buenos_Aires"), 850 | 'has_settlement': False, 851 | 'is_open': False, 852 | 'timezone': 'America/Argentina/Buenos_Aires'}, 853 | {'phase_type': 'Primary Trading Session', 854 | 'phase_name': 'Trading Session', 855 | 'phase_memo': None, 856 | 'status': 'Open', 857 | 'settlement': 'Yes', 858 | 'start': fromiso("2017-05-17 11:00:00", "America/Argentina/Buenos_Aires"), 859 | 'end': fromiso("2017-05-17 17:00:00", "America/Argentina/Buenos_Aires"), 860 | 'has_settlement': True, 861 | 'is_open': True, 862 | 'timezone': 'America/Argentina/Buenos_Aires'}, 863 | {'phase_type': 'Post-Trading Session', 864 | 'phase_name': 'Extended Hours', 865 | 'phase_memo': None, 866 | 'status': 'Closed', 867 | 'settlement': 'No', 868 | 'start': fromiso("2017-05-17 17:05:00", "America/Argentina/Buenos_Aires"), 869 | 'end': fromiso("2017-05-17 17:15:00", "America/Argentina/Buenos_Aires"), 870 | 'has_settlement': False, 871 | 'is_open': False, 872 | 'timezone': 'America/Argentina/Buenos_Aires'}, 873 | {'phase_type': 'Pre-Trading Session', 874 | 'phase_name': 'Opening Session', 875 | 'phase_memo': None, 876 | 'status': 'Closed', 877 | 'settlement': 'No', 878 | 'start': fromiso("2017-05-18 10:30:00", "America/Argentina/Buenos_Aires"), 879 | 'end': fromiso("2017-05-18 11:00:00", "America/Argentina/Buenos_Aires"), 880 | 'has_settlement': False, 881 | 'is_open': False, 882 | 'timezone': 'America/Argentina/Buenos_Aires'}, 883 | {'phase_type': 'Primary Trading Session', 884 | 'phase_name': 'Trading Session', 885 | 'phase_memo': None, 886 | 'status': 'Open', 887 | 'settlement': 'Yes', 888 | 'start': fromiso("2017-05-18 11:00:00", "America/Argentina/Buenos_Aires"), 889 | 'end': fromiso("2017-05-18 17:00:00", "America/Argentina/Buenos_Aires"), 890 | 'has_settlement': True, 891 | 'is_open': True, 892 | 'timezone': 'America/Argentina/Buenos_Aires'}, 893 | {'phase_type': 'Post-Trading Session', 894 | 'phase_name': 'Extended Hours', 895 | 'phase_memo': None, 896 | 'status': 'Closed', 897 | 'settlement': 'No', 898 | 'start': fromiso("2017-05-18 17:05:00", "America/Argentina/Buenos_Aires"), 899 | 'end': fromiso("2017-05-18 17:15:00", "America/Argentina/Buenos_Aires"), 900 | 'has_settlement': False, 901 | 'is_open': False, 902 | 'timezone': 'America/Argentina/Buenos_Aires'}, 903 | {'phase_type': 'Pre-Trading Session', 904 | 'phase_name': 'Opening Session', 905 | 'phase_memo': None, 906 | 'status': 'Closed', 907 | 'settlement': 'No', 908 | 'start': fromiso("2017-05-19 10:30:00", "America/Argentina/Buenos_Aires"), 909 | 'end': fromiso("2017-05-19 11:00:00", "America/Argentina/Buenos_Aires"), 910 | 'has_settlement': False, 911 | 'is_open': False, 912 | 'timezone': 'America/Argentina/Buenos_Aires'}, 913 | {'phase_type': 'Primary Trading Session', 914 | 'phase_name': 'Trading Session', 915 | 'phase_memo': None, 916 | 'status': 'Open', 917 | 'settlement': 'Yes', 918 | 'start': fromiso("2017-05-19 11:00:00", "America/Argentina/Buenos_Aires"), 919 | 'end': fromiso("2017-05-19 17:00:00", "America/Argentina/Buenos_Aires"), 920 | 'has_settlement': True, 921 | 'is_open': True, 922 | 'timezone': 'America/Argentina/Buenos_Aires'}, 923 | {'phase_type': 'Post-Trading Session', 924 | 'phase_name': 'Extended Hours', 925 | 'phase_memo': None, 926 | 'status': 'Closed', 927 | 'settlement': 'No', 928 | 'start': fromiso("2017-05-19 17:05:00", "America/Argentina/Buenos_Aires"), 929 | 'end': fromiso("2017-05-19 17:15:00", "America/Argentina/Buenos_Aires"), 930 | 'has_settlement': False, 931 | 'is_open': False, 932 | 'timezone': 'America/Argentina/Buenos_Aires'}] 933 | ), 934 | ]) 935 | def test_generate_phases(fin_id, start, end, expected): 936 | market = Market.get(fin_id) 937 | calculated = list(market.generate_phases(start, end)) 938 | 939 | assert len(calculated) == len(expected) 940 | for calced, exp in zip(calculated, expected): 941 | calced = calced.to_dict() 942 | assert calced["start"] == exp["start"] 943 | assert calced["end"] == exp["end"] 944 | 945 | assert (calced["timezone"] == market.timezone and 946 | calced["timezone"] == str(calced["start"].tzinfo) == str(calced["end"].tzinfo)) 947 | 948 | assert ((calced["has_settlement"] is False and calced["settlement"] == 'No') or 949 | (calced["has_settlement"] is True and calced["settlement"] == 'Yes')) 950 | assert ((calced["is_open"] is False and calced["status"] == 'Closed') or 951 | (calced["is_open"] is True and calced["status"] == 'Open')) 952 | 953 | assert ((exp["has_settlement"] is False and exp["settlement"] == 'No') or 954 | (exp["has_settlement"] is True and exp["settlement"] == 'Yes')) 955 | assert ((exp["is_open"] is False and exp["status"] == 'Closed') or 956 | (exp["is_open"] is True and exp["status"] == 'Open')) 957 | 958 | assert str(calced) == str(exp) 959 | 960 | 961 | # [ 962 | # # Check there are correct schedules on a regularly open day, no holiday 963 | # ("US.NYSE", "2023-11-15", "2023-11-15", 964 | # # Check there are not schedules on a closed open day, no holiday 965 | # ("US.NYSE", "2023-11-11", "2023-11-11", 966 | # # Test there are correct schedules on an irregular schedule 967 | # ("US.NYSE", "2023-11-24", "2023-11-24", 968 | # # Test there are correct schedules with schedule coming from the proceeding day, Regular Schedule (overnight) 969 | # ("US.CME.EQUITY.USINDEX1", "2023-11-13", "2023-11-13", 970 | # # Test there are correct schedules with schedule coming from the proceeding day, Irregular Schedule (overnight) 971 | # ("US.CME.EQUITY.USINDEX1", "2023-11-23", "2023-11-23", 972 | # # Test there are not schedules coming from the proceeding day when there is a holiday, but normally there would be an overnight schedule 973 | # ("US.CME.EQUITY.USINDEX1", "2023-12-25", "2023-12-25", 974 | # # Test there are correct schedules on a working Weekend (If Saturday is set 975 | # # as Regular in the holidays table, but the regular schedule is normally 976 | # # M-F, ignore the day of week.) 977 | # ("CN.CIBM", "2020-01-19", "2020-01-19", 978 | # # Test the correct schedule for the day of the week is returned for schedule 979 | # # with different hours on different days of the week 980 | # # -- THURSDAY 981 | # ("US.CBOE.VIX", "2020-10-15", "2020-10-15", 982 | # # -- FRIDAY 983 | # ("US.CBOE.VIX", "2020-10-16", "2020-10-16", 984 | # # Test there are correct schedules on irregular day when the irregular schedule 985 | # # does have a schedule for the current day of the week 986 | # # -- SUNDAY 987 | # ("US.CME.AGRI.DAIRY1", "2022-01-16", "2022-01-16", 988 | # # -- MONDAY 989 | # ("US.CME.AGRI.DAIRY1", "2022-01-17", "2022-01-17", 990 | # # -- REGULAR SUNDAY 991 | # ("US.CME.AGRI.DAIRY1", "2022-01-09", "2022-01-09", 992 | # # -- REGULAR MONDAY 993 | # ("US.CME.AGRI.DAIRY1", "2022-01-10", "2022-01-10", 994 | # # Test Seasonality cases 995 | # # -- SEASON 996 | # ("US.BTEC.ACTIVES.US", "2023-03-09", "2023-03-09", 997 | # # -- OVERNIGHT 998 | # ("US.BTEC.ACTIVES.US", "2023-11-12", "2023-11-12", 999 | # # Testing holiday with offset 1000 | # # -- SINGLE 1001 | # ("CN.SGE", "2023-01-02", "2023-01-02", 1002 | # # -- MULTI 1003 | # ("CN.SGE", "2023-01-01", "2023-01-03", 1004 | # # Partial followed by fully closed holiday, including overnight session 1005 | # ("CN.SGE", "2024-09-27", "2024-10-04", 1006 | # # replaced market transition (2017-04-16) 1007 | # # TODO: Should following a replaced market get rid off the replaced data? 1008 | # ("AR.BCBA", "2017-04-10", "2017-04-19", 1009 | # ] 1010 | 1011 | -------------------------------------------------------------------------------- /tests/test_market.py: -------------------------------------------------------------------------------- 1 | import pytest, calendar 2 | from tradinghours import Market 3 | from tradinghours.models import MarketHoliday 4 | from tradinghours.exceptions import DateNotAvailable, NoAccess 5 | import tradinghours.store as st 6 | 7 | from .utils import fromiso 8 | 9 | # Test whether you can follow or not a permanently closed market 10 | @pytest.mark.parametrize("method, args, expected", [ 11 | (Market.get, ("XBUE",), "AR.BYMA"), 12 | (Market.get, ("XBUE", False), "AR.BCBA"), 13 | (Market.get, ("AR.BCBA",), "AR.BYMA"), 14 | (Market.get, ("AR.BCBA", False), "AR.BCBA"), 15 | (Market.get_by_mic, ("XBUE",), "AR.BYMA"), 16 | (Market.get_by_mic, ("XBUE", False), "AR.BCBA"), 17 | (Market.get_by_finid, ("AR.BCBA",), "AR.BYMA"), 18 | (Market.get_by_finid, ("AR.BCBA", False), "AR.BCBA"), 19 | ]) 20 | def test_market_follow(method, args, expected): 21 | 22 | market = method(*args) 23 | result = str(market.fin_id) 24 | assert result == expected 25 | 26 | # Test whether MIC case is ignored 27 | @pytest.mark.parametrize("method, identifier, expected", [ 28 | (Market.get_by_finid, "AR.BYMA", "AR.BYMA"), 29 | (Market.get_by_finid, "ar.byma", "AR.BYMA"), 30 | (Market.get_by_finid, "aR.ByMa", "AR.BYMA"), 31 | (Market.get_by_mic, "XBUE", "AR.BYMA"), 32 | (Market.get_by_mic, "xbue", "AR.BYMA"), 33 | (Market.get_by_mic, "xBuE", "AR.BYMA"), 34 | ]) 35 | def test_market_case_insensitivity(method, identifier, expected): 36 | market = method(identifier) 37 | result = str(market.fin_id) 38 | assert result == expected 39 | 40 | 41 | def test_market_list_all(): 42 | found = Market.list_all() 43 | assert len(found) == len(list(st.db.query(Market.table()))) 44 | 45 | found = Market.list_all("US*") 46 | assert all(f.fin_id.startswith("US") for f in found) 47 | 48 | @pytest.mark.parametrize("fin_id", [ 49 | "US.NYSE", "US.CME.EQUITY.USINDEX1", "US.CBOE.VIX" 50 | ]) 51 | def test_market_available_dates(fin_id): 52 | market = Market.get(fin_id) 53 | table = MarketHoliday.table() 54 | 55 | first_should_be = st.db.query(table).filter( 56 | table.c.fin_id == fin_id 57 | ).order_by( 58 | table.c.date 59 | ).first().date.replace(day=1) 60 | 61 | last_should_be = st.db.query(table).filter( 62 | table.c.fin_id == fin_id 63 | ).order_by( 64 | table.c.date.desc() 65 | ).first().date 66 | _, num_days_in_month = calendar.monthrange(last_should_be.year, last_should_be.month) 67 | last_should_be = last_should_be.replace(day=num_days_in_month) 68 | 69 | assert market.first_available_date == first_should_be 70 | assert market.last_available_date == last_should_be 71 | 72 | if st.db.access_level != st.AccessLevel.only_holidays: 73 | with pytest.raises(DateNotAvailable): 74 | list(market.generate_phases("1900-01-01", "2020-01-01")) 75 | with pytest.raises(DateNotAvailable): 76 | list(market.generate_phases("2020-01-01", "2099-01-01")) 77 | with pytest.raises(DateNotAvailable): 78 | list(market.generate_phases("1900-01-01", "2099-01-01")) 79 | 80 | with pytest.raises(DateNotAvailable): 81 | market.status(fromiso("1900-01-01", "America/New_York")) 82 | with pytest.raises(DateNotAvailable): 83 | market.status(fromiso("2099-01-01", "America/New_York")) 84 | 85 | 86 | @pytest.mark.xfail( 87 | st.db.access_level == st.AccessLevel.only_holidays, 88 | reason="No access", 89 | strict=True, 90 | raises=NoAccess 91 | ) 92 | @pytest.mark.parametrize("fin_id, datetime, expected", [ 93 | ("US.NYSE", fromiso("2023-11-15 12:00", "America/New_York"), 94 | { 95 | "status": "Open", 96 | "reason": "Primary Trading Session", 97 | "until": fromiso("2023-11-15 15:50", "America/New_York"), 98 | "next_bell": fromiso("2023-11-15 16:00", "America/New_York"), 99 | # "timezone": "America/New_York", 100 | }), 101 | ("US.NYSE", fromiso("2023-11-15 18:00", "America/New_York"), 102 | { 103 | "status": "Closed", 104 | "reason": "Post-Trading Session", 105 | "until": fromiso("2023-11-15 20:00", "America/New_York"), 106 | "next_bell": fromiso("2023-11-16 09:30", "America/New_York"), 107 | # "timezone": "America/New_York", 108 | }), 109 | ("US.NYSE", fromiso("2023-11-11 18:00", "America/New_York"), 110 | { 111 | "status": "Closed", 112 | "reason": None, 113 | "until": fromiso("2023-11-13 04:00", "America/New_York"), 114 | "next_bell": fromiso("2023-11-13 09:30", "America/New_York"), 115 | # "timezone": "America/New_York", 116 | }), 117 | ("US.NYSE", fromiso("2023-11-24 10:00", "America/New_York"), 118 | { 119 | "status": "Open", 120 | "reason": "Thanksgiving Day - Primary Trading Session (Partial)", 121 | "until": fromiso("2023-11-24 13:00", "America/New_York"), 122 | "next_bell": fromiso("2023-11-24 13:00", "America/New_York"), 123 | # "timezone": "America/New_York", 124 | }), 125 | ("US.NYSE", fromiso("2024-12-25 10:00", "America/New_York"), 126 | { 127 | "status": "Closed", 128 | "reason": "Christmas", 129 | "until": fromiso("2024-12-26 04:00", "America/New_York"), 130 | "next_bell": fromiso("2024-12-26 09:30", "America/New_York"), 131 | # "timezone": "America/New_York", 132 | }), 133 | ("US.NYSE", fromiso("2023-11-15 16:00", "America/New_York"), 134 | { 135 | "status": "Closed", 136 | "reason": "Post-Trading Session", 137 | "until": fromiso("2023-11-15 20:00", "America/New_York"), 138 | "next_bell": fromiso("2023-11-16 09:30", "America/New_York"), 139 | # "timezone": "America/New_York", 140 | }), 141 | ("US.NYSE", fromiso("2023-11-15 15:59", "America/New_York"), 142 | { 143 | "status": "Open", 144 | "reason": "Primary Trading Session", 145 | "until": fromiso("2023-11-15 16:00", "America/New_York"), 146 | "next_bell": fromiso("2023-11-15 16:00", "America/New_York"), 147 | # "timezone": "America/New_York", 148 | }), 149 | ("US.MGEX", fromiso("2024-09-30 07:45", "America/Chicago"), 150 | { 151 | "status": "Closed", 152 | "reason": None, 153 | "until": fromiso("2024-09-30 08:00", "America/Chicago"), 154 | "next_bell": fromiso("2024-09-30 08:30", "America/Chicago"), 155 | # "timezone": "America/New_York", 156 | }), 157 | ("US.MGEX", fromiso("2024-09-30 08:00", "America/Chicago"), 158 | { 159 | "status": "Closed", 160 | "reason": "Pre-Open", 161 | "until": fromiso("2024-09-30 08:30", "America/Chicago"), 162 | "next_bell": fromiso("2024-09-30 08:30", "America/Chicago"), 163 | # "timezone": "America/New_York", 164 | }), 165 | ("US.MGEX", fromiso("2024-09-30 15:00", "America/Chicago"), 166 | { 167 | "status": "Closed", 168 | "reason": "Post-Trading Session", 169 | "until": fromiso("2024-09-30 16:00", "America/Chicago"), 170 | "next_bell": fromiso("2024-09-30 19:00", "America/Chicago"), 171 | # "timezone": "America/New_York", 172 | }), 173 | ("US.MGEX", fromiso("2024-10-04 00:00", "America/Chicago"), 174 | { 175 | "status": "Open", 176 | "reason": "Primary Trading Session", 177 | "until": fromiso("2024-10-04 07:45", "America/Chicago"), 178 | "next_bell": fromiso("2024-10-04 07:45", "America/Chicago"), 179 | # "timezone": "America/New_York", 180 | }) 181 | ]) 182 | def test_market_status(fin_id, datetime, expected): 183 | market = Market.get(fin_id) 184 | status = market.status(datetime=datetime) 185 | status = status.to_dict() 186 | status = {k: status.get(k) for k in expected} 187 | assert status == expected 188 | -------------------------------------------------------------------------------- /tests/test_models.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | 4 | from tradinghours.market import Market, MarketHoliday 5 | from tradinghours.currency import Currency 6 | from tradinghours.models import PhaseType, SeasonDefinition 7 | from tradinghours.exceptions import NoAccess 8 | import tradinghours.store as st 9 | 10 | # @pytest.mark.parametrize("model, columns", [ 11 | # (Market, ["Exchange Name", 12 | # "Market Name", 13 | # "Security Group", 14 | # "Timezone", 15 | # "Weekend Definition", 16 | # "FinID", 17 | # "MIC", 18 | # "Acronym", 19 | # "Asset Type", 20 | # "Memo", 21 | # "Permanently Closed", 22 | # "Replaced By"] 23 | # ), 24 | # (Currency, ["Currency Code", 25 | # "Currency Name", 26 | # "Country Code", 27 | # "Central Bank", 28 | # "Financial Capital", 29 | # "Financial Capital Timezone", 30 | # "Weekend Definition"] 31 | # ), 32 | # (CurrencyHoliday, ["Currency Code", 33 | # "Date", 34 | # "Holiday Name", 35 | # "Settlement", 36 | # "Observed", 37 | # "Memo"] 38 | # ), 39 | # (MarketHoliday, ["FinID", 40 | # "Date", 41 | # "Holiday Name", 42 | # "Schedule", 43 | # "Settlement", 44 | # "Observed", 45 | # "Memo", 46 | # "Status"] 47 | # ), 48 | # (MicMapping, ["MIC", 49 | # "FinID"] 50 | # ), 51 | # (Schedule, ["FinID", 52 | # "Schedule Group", 53 | # "Schedule Group Memo", 54 | # "Timezone", 55 | # "Phase Type", 56 | # "Phase Name", 57 | # "Phase Memo", 58 | # "Days", 59 | # "Start", 60 | # "End", 61 | # "Offset Days", 62 | # "Duration", 63 | # "Min Start", 64 | # "Max Start", 65 | # "Min End", 66 | # "Max End", 67 | # "In Force Start Date", 68 | # "In Force End Date", 69 | # "Season Start", 70 | # "Season End"] 71 | # ), 72 | # (SeasonDefinition, ["Season", 73 | # "Year", 74 | # "Date"] 75 | # ), 76 | # (PhaseType, ["Name", 77 | # "Status", 78 | # "Settlement"] 79 | # ) 80 | # ]) 81 | # def test_model_fields(model, columns): 82 | # column_snakes = sorted([snake_case(c) for c in columns]) 83 | # field_names = sorted(model._fields) 84 | # assert field_names == column_snakes 85 | 86 | 87 | def test_market_instance_fields(): 88 | nyse = Market.get("US.NYSE") 89 | assert nyse.fin_id == "US.NYSE" 90 | # assert nyse.fin_id_obj.country == "US" 91 | # assert nyse.fin_id_obj.acronym == "NYSE" 92 | # assert str(nyse.fin_id_obj) == "US.NYSE" 93 | 94 | assert nyse.mic == "XNYS" 95 | assert nyse.weekend_definition == "Sat-Sun" 96 | # assert str(nyse.weekend_definition_obj) == "Sat-Sun" 97 | 98 | def test_market_holiday_instance_fields(): 99 | nyse = Market.get("XNYS") 100 | holidays = nyse.list_holidays("2007-11-20", "2007-11-23") 101 | assert len(holidays) == 2 102 | 103 | first, second = holidays 104 | assert first.holiday_name == "Thanksgiving Day" 105 | assert first.settlement == 'No' 106 | assert first.has_settlement is False 107 | assert first.status == 'Closed' 108 | assert first.is_open is False 109 | assert first.observed is False 110 | 111 | assert second.holiday_name == "Thanksgiving Day" 112 | assert second.settlement == 'Yes' 113 | assert second.has_settlement is True 114 | assert second.status == 'Open' 115 | assert second.is_open is True 116 | assert second.observed is True 117 | 118 | 119 | @pytest.mark.xfail( 120 | st.db.access_level != st.AccessLevel.full, 121 | reason="No access", strict=True, raises=NoAccess 122 | ) 123 | def test_currency_instance_fields(): 124 | aud = Currency.get("AUD") 125 | assert aud.weekend_definition == "Sat-Sun" 126 | 127 | 128 | def test_phase_type_instance_fields(): 129 | if st.db.access_level == st.AccessLevel.only_holidays: 130 | with pytest.raises(NoAccess) as exception: 131 | PhaseType.as_dict() 132 | return 133 | else: 134 | phase_types = PhaseType.as_dict() 135 | 136 | assert len(phase_types) == 13 137 | expected = { 138 | 'Primary Trading Session': ('Primary Trading Session', 'Open', 'Yes', True, True), 139 | 'Primary Trading Session, No Settlement': ('Primary Trading Session, No Settlement', 'Open', 'No', False, True), 140 | 'Primary Trading Session, No Closing Price': ('Primary Trading Session, No Closing Price', 'Open', 'No', False, True), 141 | 'Intermission': ('Intermission', 'Closed', 'No', False, False), 142 | 'Pre-Trading Session': ('Pre-Trading Session', 'Closed', 'No', False, False), 143 | 'Post-Trading Session': ('Post-Trading Session', 'Closed', 'No', False, False), 144 | 'Trading-at-Last': ('Trading-at-Last', 'Closed', 'No', False, False), 145 | 'Pre-Open': ('Pre-Open', 'Closed', 'No', False, False), 146 | 'Pre-Close': ('Pre-Close', 'Closed', 'No', False, False), 147 | 'Order Collection Period': ('Order Collection Period', 'Closed', 'No', False, False), 148 | 'Call Auction': ('Call Auction', 'Closed', 'No', False, False), 149 | 'Settlement Window': ('Settlement Window', 'Closed', 'No', False, False), 150 | 'Other': ('Other', 'Closed', 'No', False, False), 151 | } 152 | for k, phase in phase_types.items(): 153 | assert expected[k] == (str(phase.name), 154 | str(phase.status), 155 | str(phase.settlement), 156 | phase.has_settlement, 157 | phase.is_open) 158 | 159 | 160 | def test_string_format(): 161 | # TODO: go over the error messages that are supposed to be shown 162 | market = Market.get('US.NYSE') 163 | assert str(market) == 'Market: US.NYSE New York Stock Exchange America/New_York' 164 | 165 | market_holiday = market.list_holidays("2007-11-20", "2007-11-23")[0] 166 | assert str(market_holiday) == 'MarketHoliday: US.NYSE 2007-11-22 Thanksgiving Day' 167 | 168 | if st.db.access_level != st.AccessLevel.full: 169 | with pytest.raises(NoAccess): 170 | Currency.get('AUD') 171 | # assert str(exception.value) == "You didn't run `tradinghours import` or you dont have access to currencies." 172 | else: 173 | currency = Currency.get('AUD') 174 | assert str(currency) == 'Currency: AUD Australian Dollar' 175 | 176 | currency_holiday = currency.list_holidays("2020-01-27", "2020-01-27")[0] 177 | assert str(currency_holiday) == 'CurrencyHoliday: AUD 2020-01-27 Australia Day' 178 | 179 | if st.db.access_level == st.AccessLevel.only_holidays: 180 | with pytest.raises(NoAccess) as exception: 181 | Market.get("US.NYSE").list_schedules() 182 | # assert str(exception.value) == r"You didn't run `tradinghours import` or you dont have access to schedules/us-nyse." 183 | 184 | with pytest.raises(NoAccess) as exception: 185 | list(market.generate_phases("2024-02-06", "2024-02-06")) 186 | # assert str(exception.value) == r"You didn't run `tradinghours import` or you dont have access to phases." 187 | 188 | with pytest.raises(NoAccess) as exception: 189 | SeasonDefinition.get("First day of March", 2022) 190 | # assert str(exception.value) == r"You didn't run `tradinghours import` or you dont have access to season-definitions." 191 | 192 | else: 193 | schedule = Market.get("US.NYSE").list_schedules() 194 | assert str(schedule[0]) == "Schedule: US.NYSE (Partial) 06:30:00 - 09:30:00 Mon-Fri Pre-Trading Session" 195 | 196 | schedule = Market.get("US.MGEX").list_schedules() 197 | assert str(schedule[-1]) == "Schedule: US.MGEX (Thanksgiving2023) 16:45:00 - 08:30:00 +2 Wed Pre-Open" 198 | 199 | concrete_phase = list(market.generate_phases("2024-02-06", "2024-02-06"))[0] 200 | assert str(concrete_phase) == 'Phase: 2024-02-06 04:00:00-05:00 - 2024-02-06 09:30:00-05:00 Pre-Trading Session' 201 | 202 | season = SeasonDefinition.get("First day of March", 2022) 203 | assert str(season) == 'SeasonDefinition: 2022-03-01 First day of March' 204 | 205 | 206 | def test_set_string_format(): 207 | market = Market.get('ZA.JSE.SAFEX') 208 | 209 | # change format 210 | Market.set_string_format("{acronym} - {asset_type}") 211 | assert str(market) == "JSE - Derivatives" 212 | 213 | # change format with class prefix 214 | Market.set_string_format("{acronym} - {asset_type}", prefix_class=True) 215 | assert str(market) == "Market: JSE - Derivatives" 216 | 217 | Market.reset_string_format() 218 | assert str(market) == 'Market: ZA.JSE.EQUITIES.DRV Johannesburg Stock Exchange Africa/Johannesburg' 219 | 220 | holiday = Market.get("XNYS").list_holidays("2022-01-17", "2022-01-17")[0] 221 | assert str(holiday) == "MarketHoliday: US.NYSE 2022-01-17 Birthday of Martin Luther King, Jr" 222 | 223 | holiday.set_string_format("{holiday_name} on {date} is open: {is_open} and has settlement: {has_settlement}") 224 | assert str(holiday) == "Birthday of Martin Luther King, Jr on 2022-01-17 is open: False and has settlement: False" 225 | 226 | MarketHoliday.reset_string_format() 227 | 228 | def test_market_raw_data(): 229 | 230 | nyse = Market.get("XNYS") 231 | holiday = nyse.list_holidays("2007-11-20", "2007-11-23")[0] 232 | assert holiday.data["settlement"] == "No" 233 | assert holiday.data["status"] == "Closed" 234 | assert holiday.data["observed"] is False 235 | 236 | 237 | @pytest.mark.xfail( 238 | st.db.access_level != st.AccessLevel.full, 239 | reason="No access", strict=True, raises=NoAccess 240 | ) 241 | def test_currency_raw_data(): 242 | currency = Currency.get('AUD') 243 | holiday = currency.list_holidays("2020-01-27", "2020-01-27")[0] 244 | assert holiday.data["settlement"] == "No" 245 | assert holiday.data["observed"] is True 246 | 247 | 248 | -------------------------------------------------------------------------------- /tests/test_readme.py: -------------------------------------------------------------------------------- 1 | import pytest, io, sys, os 2 | 3 | from tradinghours import Market 4 | from tradinghours.currency import Currency, CurrencyHoliday 5 | from tradinghours.models import Phase, MarketHoliday 6 | from tradinghours.exceptions import NoAccess 7 | import tradinghours.store as st 8 | from pathlib import Path 9 | 10 | 11 | def test_market_list_all(): 12 | 13 | for obj in Market.list_all(): 14 | assert str(obj) == Market.get_string_format().format(**obj.to_dict()) 15 | 16 | 17 | def test_get_by_finid_or_mic(): 18 | # Get by either FinID or MIC 19 | market = Market.get('US.NYSE') 20 | assert str(market) == "Market: US.NYSE New York Stock Exchange America/New_York" 21 | market = Market.get('XNYS') 22 | assert str(market) == "Market: US.NYSE New York Stock Exchange America/New_York" 23 | 24 | 25 | def test_follow_market(): 26 | # AR.BCBA is permanently closed and replaced by AR.BYMA 27 | market = Market.get('AR.BCBA') 28 | original = Market.get('AR.BCBA', follow=False) 29 | 30 | assert market.fin_id == "AR.BYMA" 31 | assert original.fin_id == "AR.BCBA" 32 | 33 | 34 | def test_market_list_holidays(): 35 | holidays = Market.get('US.NYSE').list_holidays("2024-01-01", "2024-12-31") 36 | 37 | for obj in holidays[:3]: 38 | assert str(obj) == MarketHoliday.get_string_format().format(**obj.to_dict()) 39 | 40 | 41 | @pytest.mark.xfail( 42 | st.db.access_level == st.AccessLevel.only_holidays, 43 | reason="No access", strict=True, raises=NoAccess 44 | ) 45 | def test_generate_phases(): 46 | market = Market.get('XNYS') 47 | schedules = market.generate_phases("2023-09-01", "2023-09-30") 48 | 49 | for obj in schedules: 50 | assert str(obj) == Phase.get_string_format().format(**obj.to_dict()) 51 | 52 | 53 | @pytest.mark.xfail( 54 | st.db.access_level != st.AccessLevel.full, 55 | reason="No access", strict=True, raises=NoAccess 56 | ) 57 | def test_currencies_list_all(): 58 | for obj in Currency.list_all(): 59 | assert str(obj) == Currency.get_string_format().format(**obj.to_dict()) 60 | 61 | 62 | @pytest.mark.xfail( 63 | st.db.access_level != st.AccessLevel.full, 64 | reason="No access", strict=True, raises=NoAccess 65 | ) 66 | def test_currency_list_holidays(): 67 | currency = Currency.get('AUD') 68 | for obj in currency.list_holidays("2023-06-01", "2023-12-31"): 69 | assert str(obj) == CurrencyHoliday.get_string_format().format(**obj.to_dict()) 70 | 71 | def strip(line, sub): 72 | try: 73 | return line[:line.index(sub)] 74 | except ValueError: 75 | return line 76 | 77 | @pytest.mark.xfail( 78 | st.db.access_level != st.AccessLevel.full, 79 | reason="No access", strict=True, raises=NoAccess 80 | ) 81 | def test_code_blocks(): 82 | with open(Path("README.md"), "r", encoding="utf-8") as readme: 83 | readme = readme.readlines() 84 | 85 | code_blocks = [] 86 | in_block = False # False=Not in block, True=in code part of block, None=in output part of block 87 | block = "" 88 | out = "" 89 | for line in readme: 90 | if line.startswith("```python"): 91 | in_block = True 92 | block = "" 93 | out = "" 94 | elif in_block in (True, None): 95 | if line.startswith("```"): 96 | code_blocks.append((block, out)) 97 | in_block = False 98 | continue 99 | if line.startswith(">>> ") or in_block is None: 100 | in_block = None 101 | out += line[4:] 102 | continue 103 | 104 | block += line + "\n" 105 | 106 | for code_block, output in code_blocks: 107 | original_stdout = sys.stdout 108 | sys.stdout = io.StringIO() 109 | exec(code_block) 110 | captured_out = sys.stdout.getvalue() 111 | sys.stdout.close() 112 | sys.stdout = original_stdout 113 | # assert captured_out == output 114 | 115 | 116 | 117 | if __name__ == '__main__': 118 | nprint = lambda *s: print("\n", *s) 119 | 120 | print("Markets:") 121 | test_market_list_all() 122 | nprint("Markets:") 123 | test_get_by_finid_or_mic() 124 | nprint("Market.fin_ids:") 125 | test_follow_market() 126 | nprint("MarketHolidays:") 127 | test_market_list_holidays() 128 | nprint("Schedules:") 129 | test_generate_phases() 130 | nprint("Currency:") 131 | test_currencies_list_all() 132 | nprint("CurrencyHolidays:") 133 | test_currency_list_holidays() -------------------------------------------------------------------------------- /tests/test_remote.py: -------------------------------------------------------------------------------- 1 | import pytest, json 2 | import csv 3 | 4 | from tradinghours import Market 5 | 6 | import pytest, datetime 7 | from unittest.mock import MagicMock, patch 8 | from urllib.request import HTTPError 9 | 10 | from tradinghours import client 11 | from tradinghours.exceptions import TokenError, ClientError 12 | 13 | @pytest.fixture 14 | def client_urlopen(): 15 | client.TOKEN = "test_token" 16 | client.BASE_URL = "http://example.com" 17 | with patch("tradinghours.client.urlopen") as mock_urlopen: 18 | yield client, mock_urlopen 19 | 20 | @pytest.fixture 21 | def mocked_client(): 22 | """Provide a client instance.""" 23 | client.TOKEN = "test_token" 24 | client.BASE_URL = "http://example.com" 25 | return client 26 | 27 | @pytest.fixture 28 | def patch_response(mocker): 29 | """Patch response with specific content.""" 30 | def _patch(content): 31 | content = content.encode("utf-8") 32 | class MockResponse: 33 | status = 200 34 | def read(self): 35 | return content 36 | return mocker.patch("tradinghours.client.get_response", return_value=MockResponse()) 37 | return _patch 38 | 39 | @pytest.fixture 40 | def patch_response_error(mocker): 41 | """Patch response to raise an exception.""" 42 | def _patch(exception): 43 | return mocker.patch("tradinghours.client.get_response", side_effect=exception) 44 | return _patch 45 | 46 | @pytest.fixture 47 | def patch_response_file(mocker): 48 | """Patch response with content from a file.""" 49 | def _patch(file_path): 50 | with open(file_path, "rb") as file: 51 | content = file.read() 52 | return mocker.patch("tradinghours.client.get_response", return_value=[content, b""]) 53 | return _patch 54 | 55 | def test_urlopen_successful(client_urlopen): 56 | client, mock_urlopen = client_urlopen 57 | mock_urlopen.return_value = "foobar" 58 | 59 | response = client.get_response("/test") 60 | assert response == "foobar" 61 | 62 | def test_urlopen_token_error(client_urlopen): 63 | client, mock_urlopen = client_urlopen 64 | mock_urlopen.side_effect = HTTPError("url", 401, "Unauthorized", None, None) 65 | 66 | with pytest.raises(TokenError): 67 | with client.get_response("/test"): 68 | pass 69 | 70 | def test_urlopen_client_error(client_urlopen): 71 | client, mock_urlopen = client_urlopen 72 | mock_urlopen.side_effect = HTTPError("url", 500, "Server Error", None, None) 73 | 74 | with pytest.raises(ClientError): 75 | with client.get_response("/test"): 76 | pass 77 | 78 | 79 | 80 | def test_download_temporary_token_error(mocked_client, patch_response_error): 81 | patch_response_error(TokenError("Token is missing or invalid")) 82 | with pytest.raises(TokenError): 83 | client.download_zip_file("/test") 84 | 85 | def test_download_temporary_client_error(mocked_client, patch_response_error): 86 | patch_response_error(ClientError("Error getting server response")) 87 | with pytest.raises(ClientError): 88 | client.download_zip_file("/test") 89 | 90 | def test_remote_timestamp(patch_response): 91 | timestamp = "2023-10-27T12:00:00" 92 | patch_response("{" + f'"last_updated":"{timestamp}"' + "}") 93 | 94 | expected_datetime = datetime.datetime.fromisoformat(timestamp) 95 | assert client.get_remote_timestamp() == expected_datetime 96 | 97 | ## 98 | # TODO: This was for the weird bug when rows were appended to csv files 99 | # see if it's worth keeping 100 | # def test_no_duplicates(): 101 | # collection = default_catalog.find_model_collection(Market) 102 | # cluster = collection.clusters.get("us") 103 | # 104 | # market_file = MarketFile(default_data_manager.csv_dir) 105 | # market_file.ingest(default_catalog.store) 106 | # cluster.flush() 107 | # 108 | # market_file.ingest(default_catalog.store) 109 | # cluster.flush() 110 | # 111 | # keys = set() 112 | # with open(cluster.location, "r", encoding="utf-8", newline="") as file: 113 | # for row in csv.reader(file): 114 | # if row[0] in keys: 115 | # pytest.fail("Data was duplicated") 116 | # keys.add(row[0]) 117 | # 118 | 119 | 120 | 121 | 122 | 123 | 124 | -------------------------------------------------------------------------------- /tests/test_schedules.py: -------------------------------------------------------------------------------- 1 | import pytest, os, datetime 2 | from pprint import pformat 3 | from tradinghours import Market 4 | from tradinghours.exceptions import NoAccess 5 | import tradinghours.store as st 6 | 7 | 8 | @pytest.mark.xfail( 9 | st.db.access_level == st.AccessLevel.only_holidays, 10 | reason="No access", strict=True, raises=NoAccess 11 | ) 12 | @pytest.mark.parametrize("fin_id, expected", [ 13 | ("US.NYSE", [{'schedule_group': 'Partial', 'in_force_start_date': None, 'season_start': None, 14 | 'start': datetime.time(6, 30), 'end': datetime.time(9, 30), 'fin_id': 'US.NYSE', 15 | 'schedule_group_memo': None, 'timezone': 'America/New_York', 'phase_type': 'Pre-Trading Session', 16 | 'phase_name': 'Pre-Opening Session', 'phase_memo': None, 'days': 'Mon-Fri', 17 | 'offset_days': 0, 'duration': 10800, 'min_start': None, 'max_start': None, 'min_end': None, 18 | 'max_end': None, 'in_force_end_date': None, 'season_end': None, 'end_with_offset': '09:30:00 ', 19 | 'has_season': False}, 20 | {'schedule_group': 'Partial', 'in_force_start_date': None, 'season_start': None, 21 | 'start': datetime.time(9, 30), 'end': datetime.time(13, 0), 22 | 'fin_id': 'US.NYSE', 'schedule_group_memo': None, 'timezone': 'America/New_York', 23 | 'phase_type': 'Primary Trading Session', 'phase_name': 'Core Trading Session', 24 | 'phase_memo': None, 'days': 'Mon-Fri', 'offset_days': 0, 'duration': 12600, 25 | 'min_start': None, 'max_start': None, 'min_end': None, 'max_end': None, 26 | 'in_force_end_date': None, 'season_end': None, 'end_with_offset': '13:00:00 ', 27 | 'has_season': False}, 28 | {'schedule_group': 'Partial', 'in_force_start_date': None, 'season_start': None, 29 | 'start': datetime.time(13, 0), 'end': datetime.time(13, 30), 'fin_id': 'US.NYSE', 30 | 'schedule_group_memo': None, 'timezone': 'America/New_York', 'phase_type': 'Post-Trading Session', 31 | 'phase_name': 'Crossing Session', 'phase_memo': None, 'days': 'Mon-Fri', 32 | 'offset_days': 0, 'duration': 1800, 'min_start': None, 'max_start': None, 'min_end': None, 33 | 'max_end': None, 'in_force_end_date': None, 'season_end': None, 'end_with_offset': '13:30:00 ', 34 | 'has_season': False}, 35 | {'schedule_group': 'Regular', 'in_force_start_date': None, 'season_start': None, 36 | 'start': datetime.time(4, 0), 'end': datetime.time(9, 30), 'fin_id': 'US.NYSE', 37 | 'schedule_group_memo': None, 'timezone': 'America/New_York', 38 | 'phase_type': 'Pre-Trading Session', 'phase_name': 'Pre-Trading Session', 'phase_memo': None, 39 | 'days': 'Mon-Fri', 'offset_days': 0, 'duration': 19800, 'min_start': None, 40 | 'max_start': None, 'min_end': None, 'max_end': None, 'in_force_end_date': None, 41 | 'season_end': None, 'end_with_offset': '09:30:00 ', 'has_season': False}, 42 | {'schedule_group': 'Regular', 'in_force_start_date': None, 'season_start': None, 43 | 'start': datetime.time(6, 30), 'end': datetime.time(9, 30), 'fin_id': 'US.NYSE', 44 | 'schedule_group_memo': None, 'timezone': 'America/New_York', 'phase_type': 'Pre-Open', 45 | 'phase_name': 'Pre-Opening Session', 'phase_memo': None, 'days': 'Mon-Fri', 46 | 'offset_days': 0, 'duration': 10800, 'min_start': None, 'max_start': None, 'min_end': None, 47 | 'max_end': None, 'in_force_end_date': None, 'season_end': None, 'end_with_offset': '09:30:00 ', 48 | 'has_season': False}, 49 | {'schedule_group': 'Regular', 'in_force_start_date': None, 'season_start': None, 50 | 'start': datetime.time(9, 30), 'end': datetime.time(9, 30), 'fin_id': 'US.NYSE', 51 | 'schedule_group_memo': None, 'timezone': 'America/New_York', 'phase_type': 'Call Auction', 'phase_name': 'Core Open Auction', 'phase_memo': None, 52 | 'days': 'Mon-Fri', 'offset_days': 0, 'duration': 0, 'min_start': None, 53 | 'max_start': None, 'min_end': None, 'max_end': None, 'in_force_end_date': None, 'season_end': None, 54 | 'end_with_offset': '09:30:00 ', 'has_season': False}, 55 | {'schedule_group': 'Regular', 'in_force_start_date': None, 'season_start': None, 56 | 'start': datetime.time(9, 30), 'end': datetime.time(16, 0), 'fin_id': 'US.NYSE', 'schedule_group_memo': None, 'timezone': 'America/New_York', 57 | 'phase_type': 'Primary Trading Session', 'phase_name': 'Core Trading Session', 'phase_memo': None, 58 | 'days': 'Mon-Fri', 'offset_days': 0, 'duration': 23400, 'min_start': None, 'max_start': None, 59 | 'min_end': None, 'max_end': None, 'in_force_end_date': None, 'season_end': None, 60 | 'end_with_offset': '16:00:00 ', 'has_season': False}, 61 | {'schedule_group': 'Regular', 'in_force_start_date': None, 'season_start': None, 62 | 'start': datetime.time(15, 50), 'end': datetime.time(16, 0), 'fin_id': 'US.NYSE', 63 | 'schedule_group_memo': None, 'timezone': 'America/New_York', 'phase_type': 'Pre-Close', 64 | 'phase_name': 'Closing Imbalance Period', 'phase_memo': None, 'days': 'Mon-Fri', 'offset_days': 0, 65 | 'duration': 600, 'min_start': None, 'max_start': None, 'min_end': None, 'max_end': None, 66 | 'in_force_end_date': None, 'season_end': None, 'end_with_offset': '16:00:00 ', 'has_season': False}, 67 | {'schedule_group': 'Regular', 'in_force_start_date': None, 'season_start': None, 68 | 'start': datetime.time(16, 0), 'end': datetime.time(20, 0), 'fin_id': 'US.NYSE', 69 | 'schedule_group_memo': None, 'timezone': 'America/New_York', 'phase_type': 'Post-Trading Session', 70 | 'phase_name': 'Extended Hours', 'phase_memo': None, 'days': 'Mon-Fri', 'offset_days': 0, 71 | 'duration': 14400, 'min_start': None, 'max_start': None, 'min_end': None, 'max_end': None, 72 | 'in_force_end_date': None, 'season_end': None, 'end_with_offset': '20:00:00 ', 73 | 'has_season': False} 74 | ] 75 | ), 76 | ("AE.ADX", [ 77 | {'schedule_group': 'Regular', 'in_force_start_date': None, 'season_start': None, 78 | 'start': datetime.time(9, 30), 'end': datetime.time(10, 0), 'fin_id': 'AE.ADX', 79 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Pre-Open', 80 | 'phase_name': 'Pre-open Session', 'phase_memo': None, 'days': 'Sun-Thu', 81 | 'offset_days': 0, 'duration': 1800, 'min_start': None, 'max_start': None, 'min_end': None, 82 | 'max_end': None, 'in_force_end_date': datetime.date(2021, 3, 3), 'season_end': None, 83 | 'end_with_offset': '10:00:00 ', 'has_season': False}, 84 | {'schedule_group': 'Regular', 'in_force_start_date': None, 'season_start': None, 85 | 'start': datetime.time(10, 0), 'end': datetime.time(13, 50), 'fin_id': 'AE.ADX', 86 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Primary Trading Session', 87 | 'phase_name': 'Continuous Trading Session', 'phase_memo': None, 'days': 'Sun-Thu', 88 | 'offset_days': 0, 'duration': 13800, 'min_start': None, 'max_start': None, 'min_end': None, 89 | 'max_end': None, 'in_force_end_date': datetime.date(2021, 3, 3), 'season_end': None, 90 | 'end_with_offset': '13:50:00 ', 'has_season': False}, 91 | {'schedule_group': 'Regular', 'in_force_start_date': None, 'season_start': None, 92 | 'start': datetime.time(13, 50), 'end': datetime.time(13, 55), 'fin_id': 'AE.ADX', 93 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Pre-Close', 94 | 'phase_name': 'Pre-Closing Session', 'phase_memo': None, 'days': 'Sun-Thu', 95 | 'offset_days': 0, 'duration': 300, 'min_start': None, 'max_start': None, 'min_end': None, 96 | 'max_end': None, 'in_force_end_date': datetime.date(2021, 3, 3), 'season_end': None, 97 | 'end_with_offset': '13:55:00 ', 'has_season': False}, 98 | {'schedule_group': 'Regular', 'in_force_start_date': datetime.date(2021, 3, 4), 'season_start': None, 99 | 'start': datetime.time(9, 30), 'end': datetime.time(10, 0), 'fin_id': 'AE.ADX', 100 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Pre-Open', 101 | 'phase_name': 'Pre-open Session', 'phase_memo': None, 'days': 'Sun-Thu', 102 | 'offset_days': 0, 'duration': 1800, 'min_start': None, 'max_start': None, 'min_end': None, 103 | 'max_end': None, 'in_force_end_date': datetime.date(2021, 10, 2), 'season_end': None, 104 | 'end_with_offset': '10:00:00 ', 'has_season': False}, 105 | {'schedule_group': 'Regular', 'in_force_start_date': datetime.date(2021, 3, 4), 'season_start': None, 106 | 'start': datetime.time(10, 0), 'end': datetime.time(13, 50), 'fin_id': 'AE.ADX', 107 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Primary Trading Session', 108 | 'phase_name': 'Continuous Trading Session', 'phase_memo': None, 'days': 'Sun-Thu', 109 | 'offset_days': 0, 'duration': 13800, 'min_start': None, 'max_start': None, 'min_end': None, 110 | 'max_end': None, 'in_force_end_date': datetime.date(2021, 10, 2), 'season_end': None, 111 | 'end_with_offset': '13:50:00 ', 'has_season': False}, 112 | {'schedule_group': 'Regular', 'in_force_start_date': datetime.date(2021, 3, 4), 'season_start': None, 113 | 'start': datetime.time(13, 50), 'end': datetime.time(13, 55), 'fin_id': 'AE.ADX', 114 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Pre-Close', 115 | 'phase_name': 'Pre-Closing Session', 'phase_memo': None, 'days': 'Sun-Thu', 116 | 'offset_days': 0, 'duration': 300, 'min_start': None, 'max_start': None, 'min_end': None, 117 | 'max_end': None, 'in_force_end_date': datetime.date(2021, 10, 2), 'season_end': None, 118 | 'end_with_offset': '13:55:00 ', 'has_season': False}, 119 | {'schedule_group': 'Regular', 'in_force_start_date': datetime.date(2021, 3, 4), 'season_start': None, 120 | 'start': datetime.time(13, 55), 'end': datetime.time(14, 0), 'fin_id': 'AE.ADX', 121 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Trading-at-Last', 122 | 'phase_name': 'Trade At Last Session', 'phase_memo': None, 'days': 'Sun-Thu', 123 | 'offset_days': 0, 'duration': 300, 'min_start': None, 'max_start': None, 'min_end': None, 124 | 'max_end': None, 'in_force_end_date': datetime.date(2021, 10, 2), 'season_end': None, 125 | 'end_with_offset': '14:00:00 ', 'has_season': False}, 126 | {'schedule_group': 'Regular', 'in_force_start_date': datetime.date(2021, 10, 3), 'season_start': None, 127 | 'start': datetime.time(9, 30), 'end': datetime.time(9, 59, 59), 'fin_id': 'AE.ADX', 128 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Pre-Open', 129 | 'phase_name': 'Pre-open Session', 'phase_memo': None, 'days': 'Sun-Thu', 130 | 'offset_days': 0, 'duration': 1799, 'min_start': None, 'max_start': None, 'min_end': None, 131 | 'max_end': None, 'in_force_end_date': datetime.date(2022, 1, 2), 'season_end': None, 132 | 'end_with_offset': '09:59:59 ', 'has_season': False}, 133 | {'schedule_group': 'Regular', 'in_force_start_date': datetime.date(2021, 10, 3), 'season_start': None, 134 | 'start': datetime.time(10, 0), 'end': datetime.time(14, 44, 59), 'fin_id': 'AE.ADX', 135 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Primary Trading Session', 136 | 'phase_name': 'Continuous Trading Session', 'phase_memo': None, 'days': 'Sun-Thu', 137 | 'offset_days': 0, 'duration': 17099, 'min_start': None, 'max_start': None, 'min_end': None, 138 | 'max_end': None, 'in_force_end_date': datetime.date(2022, 1, 2), 'season_end': None, 139 | 'end_with_offset': '14:44:59 ', 'has_season': False}, 140 | {'schedule_group': 'Regular', 'in_force_start_date': datetime.date(2021, 10, 3), 'season_start': None, 141 | 'start': datetime.time(14, 45), 'end': datetime.time(14, 54, 59), 'fin_id': 'AE.ADX', 142 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Pre-Close', 143 | 'phase_name': 'Pre-Closing Session', 'phase_memo': None, 'days': 'Sun-Thu', 144 | 'offset_days': 0, 'duration': 599, 'min_start': None, 'max_start': None, 'min_end': None, 145 | 'max_end': None, 'in_force_end_date': datetime.date(2022, 1, 2), 'season_end': None, 146 | 'end_with_offset': '14:54:59 ', 'has_season': False}, 147 | {'schedule_group': 'Regular', 'in_force_start_date': datetime.date(2021, 10, 3), 'season_start': None, 148 | 'start': datetime.time(14, 55), 'end': datetime.time(15, 0), 'fin_id': 'AE.ADX', 149 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Trading-at-Last', 150 | 'phase_name': 'Trade At Last Session', 'phase_memo': None, 'days': 'Sun-Thu', 151 | 'offset_days': 0, 'duration': 300, 'min_start': None, 'max_start': None, 'min_end': None, 152 | 'max_end': None, 'in_force_end_date': datetime.date(2022, 1, 2), 'season_end': None, 153 | 'end_with_offset': '15:00:00 ', 'has_season': False}, 154 | {'schedule_group': 'Regular', 'in_force_start_date': datetime.date(2022, 1, 3), 'season_start': None, 155 | 'start': datetime.time(9, 30), 'end': datetime.time(9, 59, 59), 'fin_id': 'AE.ADX', 156 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Pre-Open', 157 | 'phase_name': 'Pre-open Session', 'phase_memo': None, 'days': 'Mon-Fri', 158 | 'offset_days': 0, 'duration': 1799, 'min_start': None, 'max_start': None, 'min_end': None, 159 | 'max_end': None, 'in_force_end_date': None, 'season_end': None, 160 | 'end_with_offset': '09:59:59 ', 'has_season': False}, 161 | {'schedule_group': 'Regular', 'in_force_start_date': datetime.date(2022, 1, 3), 'season_start': None, 162 | 'start': datetime.time(10, 0), 'end': datetime.time(14, 44, 59), 'fin_id': 'AE.ADX', 163 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Primary Trading Session', 164 | 'phase_name': 'Continuous Trading Session', 'phase_memo': None, 'days': 'Mon-Fri', 165 | 'offset_days': 0, 'duration': 17099, 'min_start': None, 'max_start': None, 'min_end': None, 166 | 'max_end': None, 'in_force_end_date': None, 'season_end': None, 167 | 'end_with_offset': '14:44:59 ', 'has_season': False}, 168 | {'schedule_group': 'Regular', 'in_force_start_date': datetime.date(2022, 1, 3), 'season_start': None, 169 | 'start': datetime.time(14, 45), 'end': datetime.time(14, 54, 59), 'fin_id': 'AE.ADX', 170 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Pre-Close', 171 | 'phase_name': 'Pre-Closing Session', 'phase_memo': None, 'days': 'Mon-Fri', 172 | 'offset_days': 0, 'duration': 599, 'min_start': None, 'max_start': None, 'min_end': None, 173 | 'max_end': None, 'in_force_end_date': None, 'season_end': None, 174 | 'end_with_offset': '14:54:59 ', 'has_season': False}, 175 | {'schedule_group': 'Regular', 'in_force_start_date': datetime.date(2022, 1, 3), 'season_start': None, 176 | 'start': datetime.time(14, 55), 'end': datetime.time(15, 0), 'fin_id': 'AE.ADX', 177 | 'schedule_group_memo': None, 'timezone': 'Asia/Dubai', 'phase_type': 'Trading-at-Last', 178 | 'phase_name': 'Trade At Last Session', 'phase_memo': None, 'days': 'Mon-Fri', 179 | 'offset_days': 0, 'duration': 300, 'min_start': None, 'max_start': None, 'min_end': None, 180 | 'max_end': None, 'in_force_end_date': None, 'season_end': None, 181 | 'end_with_offset': '15:00:00 ', 'has_season': False} 182 | ] 183 | ) 184 | ]) 185 | def test_list_schedules(fin_id, expected): 186 | scheds = Market.get(fin_id).list_schedules() 187 | scheds = [s.to_dict() for s in scheds] 188 | 189 | assert len(scheds) == len(expected) 190 | 191 | 192 | 193 | matched = [] 194 | for sched in scheds: 195 | equals = 0 196 | for i, expect in enumerate(expected): 197 | if i in matched: continue 198 | 199 | matches = False 200 | if sorted(sched.keys()) == sorted(expect.keys()): 201 | for k in sched: 202 | if sched[k] is None and expect[k] is None: 203 | continue 204 | try: 205 | if sched[k].lower() != expect[k].lower(): 206 | break 207 | except: 208 | if sched[k] != expect[k]: 209 | break 210 | 211 | else: 212 | matches = True 213 | 214 | if matches: 215 | matched.append(i) 216 | equals += 1 217 | 218 | 219 | assert equals == 1 220 | 221 | 222 | # # apply custom sorting of dictionaries 223 | # # for easier check if the sorting in .list_schedules is correct 224 | # assert sorted(sched.keys()) == sorted(expect.keys()) 225 | # for k in sched: 226 | # assert sched[k] == expect[k], f"{k} failed, expected:\n{pformat(expect)}" 227 | # 228 | -------------------------------------------------------------------------------- /tests/test_timezone.py: -------------------------------------------------------------------------------- 1 | import pytest, os 2 | from zoneinfo import ZoneInfo 3 | from datetime import datetime, timedelta 4 | 5 | from tradinghours import Currency, Market 6 | from tradinghours.exceptions import NoAccess 7 | import tradinghours.store as st 8 | 9 | 10 | def _convert(naive_dt, timezone_str): 11 | return naive_dt.replace(tzinfo=ZoneInfo(timezone_str)) 12 | 13 | # 14 | # @pytest.mark.parametrize("naive_dt, timezone_str, expected_dt", [ 15 | # # Test DST start transition (US/Eastern, 2nd Sunday in March) 16 | # (datetime(2022, 3, 13, 2), "US/Eastern", _convert(datetime(2022, 3, 13, 3), "US/Eastern")), 17 | # # Test DST end transition (US/Eastern, 1st Sunday in November) 18 | # (datetime(2022, 11, 6, 1), "US/Eastern", _convert(datetime(2022, 11, 6, 1), "US/Eastern")), 19 | # ]) 20 | # def test_dst_transitions(naive_dt, timezone_str, expected_dt): 21 | # converted = _convert(naive_dt, timezone_str) 22 | # print(converted, expected_dt, converted == expected_dt) 23 | # assert converted == expected_dt, str(converted) + "==" + str(expected_dt) 24 | 25 | @pytest.mark.parametrize("dt, days, expected_dt", [ 26 | # Adding days over a leap day (non-leap year) 27 | (_convert(datetime(2021, 2, 28), "UTC"), 1, _convert(datetime(2021, 3, 1), "UTC")), 28 | # Adding days over a leap day (leap year) 29 | (_convert(datetime(2024, 2, 28), "UTC"), 1, _convert(datetime(2024, 2, 29), "UTC")), 30 | ]) 31 | def test_leap_years(dt, days, expected_dt): 32 | assert dt + timedelta(days=days) == expected_dt 33 | 34 | # 35 | # @pytest.mark.parametrize("dt, hours, timezone_str, expected_dt", [ 36 | # # Subtracting hours across DST start (spring forward) 37 | # (convert_to_timezone(datetime(2022, 3, 13, 10), "US/Eastern"), -8, "US/Eastern", convert_to_timezone(datetime(2022, 3, 13, 1), "US/Eastern")), 38 | # # Adding hours across DST end (fall back) 39 | # (convert_to_timezone(datetime(2022, 11, 6, 1), "US/Eastern"), 2, "US/Eastern", convert_to_timezone(datetime(2022, 11, 6, 1), "US/Eastern")), 40 | # ]) 41 | # def test_datetime_arithmetic_across_dst(dt, hours, timezone_str, expected_dt): 42 | # assert dt + timedelta(hours=hours) == expected_dt 43 | 44 | @pytest.mark.parametrize("dt, new_timezone_str, expected_dt", [ 45 | # Convert time from UTC to Asia/Tokyo 46 | (_convert(datetime(2022, 1, 1, 0), "UTC"), "Asia/Tokyo", _convert(datetime(2022, 1, 1, 9), "Asia/Tokyo")), 47 | # Convert time from Asia/Tokyo to Europe/London 48 | (_convert(datetime(2022, 1, 1, 0), "Asia/Tokyo"), "Europe/London", _convert(datetime(2021, 12, 31, 15), "Europe/London")), 49 | ]) 50 | def test_timezone_conversion(dt, new_timezone_str, expected_dt): 51 | assert dt.astimezone(ZoneInfo(new_timezone_str)) == expected_dt 52 | 53 | 54 | @pytest.mark.xfail( 55 | st.db.access_level != st.AccessLevel.full, 56 | reason="No access", strict=True, raises=NoAccess 57 | ) 58 | @pytest.mark.parametrize("currency, timezone", [ 59 | ("BRL", "America/Sao_Paulo"), 60 | ("CAD", "America/Toronto"), 61 | ("DKK", "Europe/Copenhagen"), 62 | ("MYR", "Asia/Kuala_Lumpur") 63 | ]) 64 | def test_currency_timezone(currency, timezone): 65 | currency = Currency.get(currency) 66 | assert currency.financial_capital_timezone == timezone 67 | assert ZoneInfo(currency.financial_capital_timezone) == ZoneInfo(timezone) 68 | 69 | 70 | 71 | @pytest.mark.parametrize("market, timezone", [ 72 | ("AE.ADX", "Asia/Dubai"), 73 | ("AU.ASX.NIGHT.DERIVATIVES.SPI", "Australia/Sydney"), 74 | ("BE.EURONEXT.DERIVATIVES.LIS.FUT", "Europe/Brussels"), 75 | ("BW.BSE", "Africa/Gaborone") 76 | ]) 77 | def test_market_timezone(market, timezone): 78 | market = Market.get(market) 79 | assert market.timezone == timezone 80 | assert ZoneInfo(market.timezone) == ZoneInfo(timezone) 81 | 82 | 83 | 84 | -------------------------------------------------------------------------------- /tests/test_util.py: -------------------------------------------------------------------------------- 1 | import zoneinfo 2 | 3 | import pytest, json 4 | from unittest.mock import MagicMock 5 | 6 | import requests.exceptions 7 | from requests.models import Response 8 | 9 | from tradinghours.util import (_get_latest_tzdata_version, 10 | check_if_tzdata_required_and_up_to_date) 11 | 12 | from tradinghours.exceptions import MissingTzdata 13 | import importlib.metadata as metadata 14 | 15 | @pytest.fixture 16 | def mock_requests_get(mocker): 17 | mock_response = MagicMock(spec=Response) 18 | mocker.patch("tradinghours.util.requests.get", return_value=mock_response) 19 | return mock_response 20 | 21 | def test_latest_version_success(mock_requests_get): 22 | mock_requests_get.status_code = 200 23 | mock_requests_get.json.return_value = {"info": {"version": "2021.1"}} 24 | assert _get_latest_tzdata_version() == "2021.1" 25 | 26 | def test_latest_version_failure(mock_requests_get): 27 | mock_requests_get.status_code = 404 28 | assert _get_latest_tzdata_version() is None 29 | 30 | mock_requests_get.side_effect = requests.exceptions.ConnectionError 31 | assert _get_latest_tzdata_version() is None 32 | 33 | mock_requests_get.side_effect = requests.exceptions.Timeout 34 | assert _get_latest_tzdata_version() is None 35 | 36 | 37 | def test_check_tzdata_disbaled(mocker): 38 | mocker.patch("tradinghours.util.main_config.getboolean", return_value=False) 39 | assert check_if_tzdata_required_and_up_to_date() is False 40 | 41 | def test_check_tzdata_not_required(mocker): 42 | mocker.patch("tradinghours.util.TZPATH", new=('/usr/share/zoneinfo', 43 | '/usr/lib/zoneinfo', 44 | '/usr/share/lib/zoneinfo', 45 | '/etc/zoneinfo')) 46 | mocker.patch("tradinghours.util.metadata.version", side_effect=metadata.PackageNotFoundError) 47 | assert check_if_tzdata_required_and_up_to_date() is True 48 | 49 | def test_check_tzdata_required_and_missing(mocker): 50 | mocker.patch("tradinghours.util.main_config.getboolean", return_value=True) 51 | mocker.patch("tradinghours.util.TZPATH", new=tuple()) 52 | mocker.patch("tradinghours.util.metadata.version", side_effect=metadata.PackageNotFoundError) 53 | with pytest.raises(MissingTzdata): 54 | check_if_tzdata_required_and_up_to_date() 55 | 56 | def test_check_tzdata_required_and_outdated(mocker, mock_requests_get): 57 | mocker.patch("tradinghours.util.main_config.getboolean", return_value=True) 58 | mocker.patch("tradinghours.util.TZPATH", new=tuple()) 59 | mocker.patch("tradinghours.util.metadata.version", return_value="2020.1") 60 | mock_requests_get.status_code = 200 61 | mock_requests_get.json.return_value = {"info": {"version": "2021.1"}} 62 | with pytest.warns(UserWarning, match="The installed version of tzdata is 2020.1"): 63 | assert check_if_tzdata_required_and_up_to_date() is None 64 | 65 | def test_check_tzdata_required_and_up_to_date(mocker, mock_requests_get): 66 | mocker.patch("tradinghours.util.main_config.getboolean", return_value=True) 67 | mocker.patch("tradinghours.util.TZPATH", new=tuple()) 68 | mocker.patch("tradinghours.util.metadata.version", return_value="2021.1") 69 | mock_requests_get.status_code = 200 70 | mock_requests_get.json.return_value = {"info": {"version": "2021.1"}} 71 | assert check_if_tzdata_required_and_up_to_date() is True 72 | 73 | 74 | def test_check_tzdata_required_and_fail(mocker, mock_requests_get): 75 | mocker.patch("tradinghours.util.main_config.getboolean", return_value=True) 76 | mocker.patch("tradinghours.util.TZPATH", new=tuple()) 77 | mocker.patch("tradinghours.util.metadata.version", return_value="2021.1") 78 | mock_requests_get.status_code = 500 79 | mock_requests_get.return_value = None 80 | with pytest.warns(UserWarning, match="Failed to get latest version of tzdata."): 81 | assert check_if_tzdata_required_and_up_to_date() is None 82 | 83 | 84 | -------------------------------------------------------------------------------- /tests/test_validate.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import datetime as dt 3 | import tradinghours.validate as val 4 | 5 | def test_validate_date_arg(): 6 | with pytest.raises(ValueError): 7 | val.validate_date_arg("test", None) 8 | 9 | with pytest.raises(TypeError): 10 | val.validate_date_arg("test", dt.datetime.now()) 11 | 12 | with pytest.raises(ValueError): 13 | val.validate_date_arg("test", "2024-01-01 00:00:00") 14 | 15 | with pytest.raises(ValueError): 16 | val.validate_date_arg("test", "sdgssdg") 17 | 18 | date = dt.date.fromisoformat("2024-01-01") 19 | assert date == val.validate_date_arg("test", date) 20 | assert date == val.validate_date_arg("test", "2024-01-01") 21 | 22 | 23 | def test_validate_range_args(): 24 | with pytest.raises(ValueError): 25 | val.validate_range_args(2, 1) 26 | 27 | assert val.validate_range_args(1, 1) == (1, 1) 28 | 29 | assert val.validate_range_args(1, 2) == (1, 2) 30 | 31 | 32 | def test_validate_mic_arg(): 33 | with pytest.raises(ValueError): 34 | val.validate_mic_arg(None) 35 | 36 | with pytest.raises(TypeError): 37 | val.validate_mic_arg(123) 38 | 39 | with pytest.raises(ValueError): 40 | val.validate_mic_arg("12345") 41 | 42 | with pytest.raises(ValueError): 43 | val.validate_mic_arg("US.YE") 44 | 45 | assert val.validate_mic_arg("nyse") == "NYSE" 46 | assert val.validate_mic_arg("12Me") == "12ME" 47 | 48 | 49 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | import datetime as dt 2 | from zoneinfo import ZoneInfo 3 | 4 | 5 | def fromiso(iso: str, tz: str) -> dt.datetime: 6 | d = dt.datetime.fromisoformat(iso) 7 | return d.replace(tzinfo=ZoneInfo(tz)) 8 | --------------------------------------------------------------------------------