├── .coveragerc ├── .flake8 ├── .github └── workflows │ ├── auto_ci.yml │ ├── codeql-analysis.yml │ └── pypi_upload.yml ├── .gitignore ├── Examples.ipynb ├── LICENSE ├── MANIFEST.in ├── README.md ├── SECURITY.md ├── _config.yml ├── azure-pipelines.yml ├── codecov.yml ├── docs ├── Makefile ├── conf.py ├── index.rst ├── make.bat └── xbbg.png ├── feeds ├── __init__.py ├── pub.py └── sub.py ├── requirements.txt ├── setup.py ├── venv └── Pipfile └── xbbg ├── __init__.py ├── blp.py ├── const.py ├── core ├── __init__.py ├── conn.py ├── intervals.py ├── overrides.py ├── process.py ├── timezone.py ├── trials.py └── utils.py ├── io ├── __init__.py ├── cached.py ├── db.py ├── files.py ├── logs.py ├── param.py └── storage.py ├── markets ├── __init__.py ├── assets.yml ├── cached │ ├── Comdty_cfg.pkl │ ├── Curncy_cfg.pkl │ ├── Equity_cfg.pkl │ ├── Index_cfg.pkl │ ├── assets.pkl │ ├── ccy.pkl │ ├── ccy_cfg.pkl │ ├── exch.pkl │ └── exch_cfg.pkl ├── ccy.yml └── exch.yml ├── pipeline.py └── tests ├── __init__.py ├── conftest.py ├── data ├── Equity │ └── AAPL US Equity │ │ ├── DVD_Hist_All │ │ ├── asof=2018-11-02, DVD_Start_Dt=20180101, DVD_End_Dt=20180501.pkl │ │ └── asof=2021-01-02, DVD_Start_Dt=20180101, DVD_End_Dt=20180501.pkl │ │ └── TRADE │ │ └── 2018-11-02.parq ├── aapl.parq ├── asof=2018-11-02, DVD_Start_Dt=20180101, DVD_End_Dt=20180501.pkl ├── sample_bdib.parq ├── sample_bdp.pkl ├── sample_dvd.pkl ├── sample_dvd_mc.pkl ├── sample_dvd_mc_raw.pkl ├── sample_earning.pkl ├── sample_earning_amzn.pkl ├── sample_earning_header.pkl ├── sample_eur_ib.pkl ├── sample_indx_members_raw.pkl ├── sample_rms_ib0.pkl └── sample_rms_ib1.pkl ├── markets └── exch.yml └── xone.db /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | xbbg/blp.py 4 | xbbg/markets/* 5 | xbbg/core/conn.py 6 | xbbg/core/trials.py 7 | xbbg/core/pdblp.py 8 | xbbg/io/files.py 9 | xbbg/io/cached.py 10 | xbbg/io/db.py 11 | xbbg/tests/* 12 | */__init__.py 13 | 14 | [html] 15 | directory = htmlcov 16 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E701,E501 3 | exclude = xbbg/feeds/* 4 | show_source = True 5 | statistics = True 6 | -------------------------------------------------------------------------------- /.github/workflows/auto_ci.yml: -------------------------------------------------------------------------------- 1 | name: Auto CI 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | build: 11 | runs-on: ${{ matrix.os }} 12 | env: 13 | BLPAPI_ROOT: /home/bbg 14 | strategy: 15 | matrix: 16 | python-version: ["3.6.15", "3.7.17", "3.8.18"] 17 | os: [ubuntu-latest, ubuntu-20.04] 18 | exclude: 19 | - python-version: "3.6.15" 20 | os: ubuntu-latest 21 | 22 | steps: 23 | - uses: actions/checkout@v3 24 | - name: Set up Python ${{ matrix.python-version }} 25 | uses: actions/setup-python@v3 26 | with: 27 | python-version: ${{ matrix.python-version }} 28 | - name: Install dependencies 29 | run: | 30 | python -m pip install --upgrade pip 31 | pip install flake8 pytest pytest-cov codecov 32 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 33 | - name: Lint with flake8 34 | run: | 35 | # stop the build if there are Python syntax errors or undefined names 36 | flake8 --extend-ignore=E701,E501 xbbg 37 | - name: Test with pytest 38 | run: | 39 | pytest --doctest-modules --cov -v xbbg 40 | - name: Coverage report 41 | run: | 42 | codecov --token=ff17768d-30bd-4917-98f2-a011606597ea 43 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | name: "CodeQL" 7 | 8 | on: 9 | push: 10 | branches: [main] 11 | pull_request: 12 | # The branches below must be a subset of the branches above 13 | branches: [main] 14 | schedule: 15 | - cron: '0 21 * * 3' 16 | 17 | jobs: 18 | analyze: 19 | name: Analyze 20 | runs-on: ubuntu-latest 21 | 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | # Override automatic language detection by changing the below list 26 | # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] 27 | language: ['python'] 28 | # Learn more... 29 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection 30 | 31 | steps: 32 | - name: Checkout repository 33 | uses: actions/checkout@v3 34 | with: 35 | # We must fetch at least the immediate parents so that if this is 36 | # a pull request then we can checkout the head. 37 | fetch-depth: 2 38 | 39 | # If this run was triggered by a pull request event, then checkout 40 | # the head of the pull request instead of the merge commit. 41 | - run: git checkout HEAD^2 42 | if: ${{ github.event_name == 'pull_request' }} 43 | 44 | # Initializes the CodeQL tools for scanning. 45 | - name: Initialize CodeQL 46 | uses: github/codeql-action/init@v2 47 | with: 48 | languages: ${{ matrix.language }} 49 | # If you wish to specify custom queries, you can do so here or in a config file. 50 | # By default, queries listed here will override any specified in a config file. 51 | # Prefix the list here with "+" to use these queries and those in the config file. 52 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 53 | 54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 55 | # If this step fails, then you should remove it and run the build manually (see below) 56 | - name: Autobuild 57 | uses: github/codeql-action/autobuild@v2 58 | 59 | # ℹ️ Command-line programs to run using the OS shell. 60 | # 📚 https://git.io/JvXDl 61 | 62 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 63 | # and modify them (or add more) to build your code if your project 64 | # uses a compiled language 65 | 66 | #- run: | 67 | # make bootstrap 68 | # make release 69 | 70 | - name: Perform CodeQL Analysis 71 | uses: github/codeql-action/analyze@v2 72 | -------------------------------------------------------------------------------- /.github/workflows/pypi_upload.yml: -------------------------------------------------------------------------------- 1 | name: Upload Python Package 2 | 3 | on: 4 | release: 5 | types: [ created ] 6 | 7 | jobs: 8 | deploy: 9 | 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v3 14 | - name: Set up Python 15 | uses: actions/setup-python@v2 16 | with: 17 | python-version: 3.7 18 | - name: Install dependencies 19 | run: | 20 | pip install pip==20.0.2 21 | pip install setuptools wheel twine 22 | - name: Build and publish 23 | env: 24 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 25 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 26 | run: | 27 | python setup.py sdist bdist_wheel 28 | twine upload dist/* 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | ENV/ 89 | env.bak/ 90 | venv.bak/ 91 | .idea/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | 106 | # VS Code 107 | .idea/* 108 | .vscode/* 109 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include xbbg *.yml -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![xbbg](https://raw.githubusercontent.com/alpha-xone/xbbg/main/docs/xbbg.png) 2 | 3 | # xbbg 4 | 5 | An intuitive Bloomberg API 6 | 7 | [![PyPI version](https://img.shields.io/pypi/v/xbbg.svg)](https://badge.fury.io/py/xbbg) 8 | [![PyPI version](https://img.shields.io/pypi/pyversions/xbbg.svg)](https://badge.fury.io/py/xbbg) 9 | [![PyPI - Downloads](https://img.shields.io/pypi/dm/xbbg)](https://pypistats.org/packages/xbbg) 10 | [![Gitter](https://badges.gitter.im/xbbg/community.svg)](https://gitter.im/xbbg/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) 11 | 12 | [![Coffee](https://www.buymeacoffee.com/assets/img/custom_images/purple_img.png)](https://www.buymeacoffee.com/Lntx29Oof) 13 | 14 | ## Features 15 | 16 | Below are main features. Jupyter notebook examples can be found [here](https://colab.research.google.com/drive/1YVVS5AiJAQGGEECmOFAb7DNQZMOHdXLR). 17 | 18 | - Excel compatible inputs 19 | - Straightforward intraday bar requests 20 | - Subscriptions 21 | 22 | ## Requirements 23 | 24 | - Bloomberg C++ SDK version 3.12.1 or higher: 25 | 26 | - Visit [Bloomberg API Library](https://www.bloomberg.com/professional/support/api-library/) and download C++ Supported Release 27 | 28 | - In the `bin` folder of downloaded zip file, copy `blpapi3_32.dll` and `blpapi3_64.dll` to Bloomberg `BLPAPI_ROOT` folder (usually `blp/DAPI`) 29 | 30 | - Bloomberg official Python API: 31 | 32 | ```cmd 33 | pip install blpapi --index-url=https://bcms.bloomberg.com/pip/simple/ 34 | ``` 35 | 36 | - `numpy`, `pandas`, `ruamel.yaml` and `pyarrow` 37 | 38 | ## Installation 39 | 40 | ```cmd 41 | pip install xbbg 42 | ``` 43 | 44 | ## What's New 45 | 46 | _0.7.7a2_ - Custom `config` and etc. for reference exchange (author `hceh`) 47 | 48 | _0.7.6a2_ - Use `blp.connect` for alternative Bloomberg connection (author `anxl2008`) 49 | 50 | _0.7.2_ - Use `async` for live data feeds 51 | 52 | _0.7.0_ - `bdh` preserves columns orders (both tickers and flds). 53 | `timeout` argument is available for all queries - `bdtick` usually takes longer to respond - 54 | can use `timeout=1000` for example if keep getting empty DataFrame. 55 | 56 | _0.6.6_ - Add flexibility to use reference exchange as market hour definition 57 | (so that it's not necessary to add `.yml` for new tickers, provided that the exchange was defined 58 | in `/xbbg/markets/exch.yml`). See example of `bdib` below for more details. 59 | 60 | _0.6.0_ - Speed improvements and tick data availablity 61 | 62 | _0.5.0_ - Rewritten library to add subscription, BEQS, simplify interface and remove dependency of `pdblp` 63 | 64 | _0.1.22_ - Remove PyYAML dependency due to security vulnerability 65 | 66 | _0.1.17_ - Add `adjust` argument in `bdh` for easier dividend / split adjustments 67 | 68 | ## Tutorial 69 | 70 | ```python 71 | In [1]: from xbbg import blp 72 | ``` 73 | 74 | ### Basics 75 | 76 | - ``BDP`` example: 77 | 78 | ```python 79 | In [2]: blp.bdp(tickers='NVDA US Equity', flds=['Security_Name', 'GICS_Sector_Name']) 80 | ``` 81 | 82 | ```pydocstring 83 | Out[2]: 84 | security_name gics_sector_name 85 | NVDA US Equity NVIDIA Corp Information Technology 86 | ``` 87 | 88 | - ``BDP`` with overrides: 89 | 90 | ```python 91 | In [3]: blp.bdp('AAPL US Equity', 'Eqy_Weighted_Avg_Px', VWAP_Dt='20181224') 92 | ``` 93 | 94 | ```pydocstring 95 | Out[3]: 96 | eqy_weighted_avg_px 97 | AAPL US Equity 148.75 98 | ``` 99 | 100 | - ``BDH`` example: 101 | 102 | ```python 103 | In [4]: blp.bdh( 104 | ...: tickers='SPX Index', flds=['high', 'low', 'last_price'], 105 | ...: start_date='2018-10-10', end_date='2018-10-20', 106 | ...: ) 107 | ``` 108 | 109 | ```pydocstring 110 | Out[4]: 111 | SPX Index 112 | high low last_price 113 | 2018-10-10 2,874.02 2,784.86 2,785.68 114 | 2018-10-11 2,795.14 2,710.51 2,728.37 115 | 2018-10-12 2,775.77 2,729.44 2,767.13 116 | 2018-10-15 2,775.99 2,749.03 2,750.79 117 | 2018-10-16 2,813.46 2,766.91 2,809.92 118 | 2018-10-17 2,816.94 2,781.81 2,809.21 119 | 2018-10-18 2,806.04 2,755.18 2,768.78 120 | 2018-10-19 2,797.77 2,760.27 2,767.78 121 | ``` 122 | 123 | - ``BDH`` example with Excel compatible inputs: 124 | 125 | ```python 126 | In [5]: blp.bdh( 127 | ...: tickers='SHCOMP Index', flds=['high', 'low', 'last_price'], 128 | ...: start_date='2018-09-26', end_date='2018-10-20', 129 | ...: Per='W', Fill='P', Days='A', 130 | ...: ) 131 | ``` 132 | 133 | ```pydocstring 134 | Out[5]: 135 | SHCOMP Index 136 | high low last_price 137 | 2018-09-28 2,827.34 2,771.16 2,821.35 138 | 2018-10-05 2,827.34 2,771.16 2,821.35 139 | 2018-10-12 2,771.94 2,536.66 2,606.91 140 | 2018-10-19 2,611.97 2,449.20 2,550.47 141 | ``` 142 | 143 | - ``BDH`` without adjustment for dividends and splits: 144 | 145 | ```python 146 | In [6]: blp.bdh( 147 | ...: 'AAPL US Equity', 'px_last', '20140605', '20140610', 148 | ...: CshAdjNormal=False, CshAdjAbnormal=False, CapChg=False 149 | ...: ) 150 | ``` 151 | 152 | ```pydocstring 153 | Out[6]: 154 | AAPL US Equity 155 | px_last 156 | 2014-06-05 647.35 157 | 2014-06-06 645.57 158 | 2014-06-09 93.70 159 | 2014-06-10 94.25 160 | ``` 161 | 162 | - ``BDH`` adjusted for dividends and splits: 163 | 164 | ```python 165 | In [7]: blp.bdh( 166 | ...: 'AAPL US Equity', 'px_last', '20140605', '20140610', 167 | ...: CshAdjNormal=True, CshAdjAbnormal=True, CapChg=True 168 | ...: ) 169 | ``` 170 | 171 | ```pydocstring 172 | Out[7]: 173 | AAPL US Equity 174 | px_last 175 | 2014-06-05 85.45 176 | 2014-06-06 85.22 177 | 2014-06-09 86.58 178 | 2014-06-10 87.09 179 | ``` 180 | 181 | - ``BDS`` example: 182 | 183 | ```python 184 | In [8]: blp.bds('AAPL US Equity', 'DVD_Hist_All', DVD_Start_Dt='20180101', DVD_End_Dt='20180531') 185 | ``` 186 | 187 | ```pydocstring 188 | Out[8]: 189 | declared_date ex_date record_date payable_date dividend_amount dividend_frequency dividend_type 190 | AAPL US Equity 2018-05-01 2018-05-11 2018-05-14 2018-05-17 0.73 Quarter Regular Cash 191 | AAPL US Equity 2018-02-01 2018-02-09 2018-02-12 2018-02-15 0.63 Quarter Regular Cash 192 | ``` 193 | 194 | - Intraday bars ``BDIB`` example: 195 | 196 | ```python 197 | In [9]: blp.bdib(ticker='BHP AU Equity', dt='2018-10-17').tail() 198 | ``` 199 | 200 | ```pydocstring 201 | Out[9]: 202 | BHP AU Equity 203 | open high low close volume num_trds 204 | 2018-10-17 15:56:00+11:00 33.62 33.65 33.62 33.64 16660 126 205 | 2018-10-17 15:57:00+11:00 33.65 33.65 33.63 33.64 13875 156 206 | 2018-10-17 15:58:00+11:00 33.64 33.65 33.62 33.63 16244 159 207 | 2018-10-17 15:59:00+11:00 33.63 33.63 33.61 33.62 16507 167 208 | 2018-10-17 16:10:00+11:00 33.66 33.66 33.66 33.66 1115523 216 209 | ``` 210 | 211 | Above example works because 1) `AU` in equity ticker is mapped to `EquityAustralia` in 212 | `markets/assets.yml`, and 2) `EquityAustralia` is defined in `markets/exch.yml`. 213 | To add new mappings, define `BBG_ROOT` in sys path and add `assets.yml` and 214 | `exch.yml` under `BBG_ROOT/markets`. 215 | 216 | *New in 0.6.6* - if exchange is defined in `/xbbg/markets/exch.yml`, can use `ref` to look for 217 | relevant exchange market hours. Both `ref='ES1 Index'` and `ref='CME'` work for this example: 218 | 219 | ```python 220 | In [10]: blp.bdib(ticker='ESM0 Index', dt='2020-03-20', ref='ES1 Index').tail() 221 | ``` 222 | 223 | ```pydocstring 224 | out[10]: 225 | ESM0 Index 226 | open high low close volume num_trds value 227 | 2020-03-20 16:55:00-04:00 2,260.75 2,262.25 2,260.50 2,262.00 412 157 931,767.00 228 | 2020-03-20 16:56:00-04:00 2,262.25 2,267.00 2,261.50 2,266.75 812 209 1,838,823.50 229 | 2020-03-20 16:57:00-04:00 2,266.75 2,270.00 2,264.50 2,269.00 1136 340 2,576,590.25 230 | 2020-03-20 16:58:00-04:00 2,269.25 2,269.50 2,261.25 2,265.75 1077 408 2,439,276.00 231 | 2020-03-20 16:59:00-04:00 2,265.25 2,272.00 2,265.00 2,266.50 1271 378 2,882,978.25 232 | ``` 233 | 234 | - Intraday bars within market session: 235 | 236 | ```python 237 | In [11]: blp.bdib(ticker='7974 JT Equity', dt='2018-10-17', session='am_open_30').tail() 238 | ``` 239 | 240 | ```pydocstring 241 | Out[11]: 242 | 7974 JT Equity 243 | open high low close volume num_trds 244 | 2018-10-17 09:27:00+09:00 39,970.00 40,020.00 39,970.00 39,990.00 10800 44 245 | 2018-10-17 09:28:00+09:00 39,990.00 40,020.00 39,980.00 39,980.00 6300 33 246 | 2018-10-17 09:29:00+09:00 39,970.00 40,000.00 39,960.00 39,970.00 3300 21 247 | 2018-10-17 09:30:00+09:00 39,960.00 40,010.00 39,950.00 40,000.00 3100 19 248 | 2018-10-17 09:31:00+09:00 39,990.00 40,000.00 39,980.00 39,990.00 2000 15 249 | ``` 250 | 251 | - Corporate earnings: 252 | 253 | ```python 254 | In [12]: blp.earning('AMD US Equity', by='Geo', Eqy_Fund_Year=2017, Number_Of_Periods=1) 255 | ``` 256 | 257 | ```pydocstring 258 | Out[12]: 259 | level fy2017 fy2017_pct 260 | Asia-Pacific 1.00 3,540.00 66.43 261 | China 2.00 1,747.00 49.35 262 | Japan 2.00 1,242.00 35.08 263 | Singapore 2.00 551.00 15.56 264 | United States 1.00 1,364.00 25.60 265 | Europe 1.00 263.00 4.94 266 | Other Countries 1.00 162.00 3.04 267 | ``` 268 | 269 | - Dividends: 270 | 271 | ```python 272 | In [13]: blp.dividend(['C US Equity', 'MS US Equity'], start_date='2018-01-01', end_date='2018-05-01') 273 | ``` 274 | 275 | ```pydocstring 276 | Out[13]: 277 | dec_date ex_date rec_date pay_date dvd_amt dvd_freq dvd_type 278 | C US Equity 2018-01-18 2018-02-02 2018-02-05 2018-02-23 0.32 Quarter Regular Cash 279 | MS US Equity 2018-04-18 2018-04-27 2018-04-30 2018-05-15 0.25 Quarter Regular Cash 280 | MS US Equity 2018-01-18 2018-01-30 2018-01-31 2018-02-15 0.25 Quarter Regular Cash 281 | ``` 282 | 283 | ----- 284 | 285 | *New in 0.1.17* - Dividend adjustment can be simplified to one parameter `adjust`: 286 | 287 | - ``BDH`` without adjustment for dividends and splits: 288 | 289 | ```python 290 | In [14]: blp.bdh('AAPL US Equity', 'px_last', '20140606', '20140609', adjust='-') 291 | ``` 292 | 293 | ```pydocstring 294 | Out[14]: 295 | AAPL US Equity 296 | px_last 297 | 2014-06-06 645.57 298 | 2014-06-09 93.70 299 | ``` 300 | 301 | - ``BDH`` adjusted for dividends and splits: 302 | 303 | ```python 304 | In [15]: blp.bdh('AAPL US Equity', 'px_last', '20140606', '20140609', adjust='all') 305 | ``` 306 | 307 | ```pydocstring 308 | Out[15]: 309 | AAPL US Equity 310 | px_last 311 | 2014-06-06 85.22 312 | 2014-06-09 86.58 313 | ``` 314 | 315 | ### Data Storage 316 | 317 | If `BBG_ROOT` is provided in `os.environ`, data can be saved locally. 318 | By default, local storage is preferred than Bloomberg for all queries. 319 | 320 | Noted that local data usage must be compliant with Bloomberg Datafeed Addendum 321 | (full description in `DAPI`): 322 | 323 | > To access Bloomberg data via the API (and use that data in Microsoft Excel), 324 | > your company must sign the 'Datafeed Addendum' to the Bloomberg Agreement. 325 | > This legally binding contract describes the terms and conditions of your use 326 | > of the data and information available via the API (the "Data"). 327 | > The most fundamental requirement regarding your use of Data is that it cannot 328 | > leave the local PC you use to access the BLOOMBERG PROFESSIONAL service. 329 | 330 | ## Star History 331 | 332 | [![Star History Chart](https://api.star-history.com/svg?repos=alpha-xone/xbbg&type=Date)](https://star-history.com/#alpha-xone/xbbg&Date) 333 | 334 | | | | 335 | | -------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | 336 | | Docs | [![Documentation Status](https://readthedocs.org/projects/xbbg/badge/?version=latest)](https://xbbg.readthedocs.io/) | 337 | | Build | [![Actions Status](https://github.com/alpha-xone/xbbg/workflows/Auto%20CI/badge.svg)](https://github.com/alpha-xone/xbbg/actions) | 338 | | | [![Azure](https://dev.azure.com/alpha-xone/xbbg/_apis/build/status/alpha-xone.xbbg?branchName=main)](https://dev.azure.com/alpha-xone/xbbg/_build) | 339 | | Coverage | [![codecov](https://codecov.io/gh/alpha-xone/xbbg/branch/main/graph/badge.svg)](https://codecov.io/gh/alpha-xone/xbbg) | 340 | | Quality | [![Codacy Badge](https://app.codacy.com/project/badge/Grade/daec9f52ba344e3ea116c15f1fc6d541)](https://www.codacy.com/gh/alpha-xone/xbbg/) | 341 | | | [![CodeFactor](https://www.codefactor.io/repository/github/alpha-xone/xbbg/badge)](https://www.codefactor.io/repository/github/alpha-xone/xbbg) | 342 | | | [![codebeat badge](https://codebeat.co/badges/eef1f14d-72eb-445a-af53-12d3565385ec)](https://codebeat.co/projects/github-com-alpha-xone-xbbg-main) | 343 | | License | [![GitHub license](https://img.shields.io/github/license/alpha-xone/xbbg.svg)](https://github.com/alpha-xone/xbbg/blob/main/LICENSE) | 344 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | Use this section to tell people about which versions of your project are 6 | currently being supported with security updates. 7 | 8 | | Version | Supported | 9 | | ------- | ------------------ | 10 | | 5.1.x | :white_check_mark: | 11 | | 5.0.x | :x: | 12 | | 4.0.x | :white_check_mark: | 13 | | < 4.0 | :x: | 14 | 15 | ## Reporting a Vulnerability 16 | 17 | Use this section to tell people how to report a vulnerability. 18 | 19 | Tell them where to go, how often they can expect to get an update on a 20 | reported vulnerability, what to expect if the vulnerability is accepted or 21 | declined, etc. 22 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-leap-day -------------------------------------------------------------------------------- /azure-pipelines.yml: -------------------------------------------------------------------------------- 1 | # https://aka.ms/yaml 2 | jobs: 3 | - job: WinTest 4 | pool: 5 | vmImage: windows-latest 6 | strategy: 7 | matrix: 8 | Python36: 9 | python.version: '3.6' 10 | Python37: 11 | python.version: '3.7' 12 | Python38: 13 | python.version: '3.8' 14 | maxParallel: '3' 15 | 16 | steps: 17 | - task: UsePythonVersion@0 18 | inputs: 19 | versionSpec: '$(python.version)' 20 | architecture: 'x64' 21 | 22 | - script: | 23 | python -m pip install --upgrade pip setuptools wheel 24 | pip install -r requirements.txt 25 | displayName: Install prerequisites 26 | 27 | - script: | 28 | python -m pip install flake8 29 | flake8 xbbg 30 | displayName: Run Lint Tests 31 | 32 | - script: | 33 | pip install pytest pytest-cov coverage codecov 34 | pytest xbbg --doctest-modules --cov -v 35 | displayName: Run Doctests and Coverages 36 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | ignore: 2 | - xbbg/blp.py 3 | - xbbg/core/conn.py 4 | - xbbg/core/process.py 5 | - xbbg/core/names.py 6 | - xbbg/core/trials.py 7 | - xbbg/io/files.py 8 | - xbbg/io/cached.py 9 | - "xbbg/markets/*" 10 | - "xbbg/tests/*" 11 | - "*/__init__.py" 12 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SOURCEDIR = . 8 | BUILDDIR = build 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 13 | 14 | .PHONY: help Makefile 15 | 16 | # Catch-all target: route all unknown targets to Sphinx using the new 17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 18 | %: Makefile 19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 20 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | 15 | import os 16 | import sys 17 | 18 | ROOT_PATH = '/'.join(os.path.abspath(__file__).replace('\\', '/').split('/')[:-2]) 19 | 20 | # sys.path.insert(0, os.path.abspath('.')) 21 | sys.path.append(ROOT_PATH) 22 | 23 | 24 | def parse_version(package): 25 | 26 | init_file = '%s/%s/__init__.py' % (ROOT_PATH, package) 27 | with open(init_file, 'r', encoding='utf-8') as f: 28 | for line in f.readlines(): 29 | if '__version__' in line: 30 | return line.split('=')[1].strip()[1:-1] 31 | return '' 32 | 33 | 34 | # -- Project information ----------------------------------------------------- 35 | 36 | project = 'xbbg' 37 | copyright = '2018, Alpha x1' 38 | author = 'Alpha x1' 39 | 40 | # The short X.Y version 41 | version = '' 42 | # The full version, including alpha/beta/rc tags 43 | release = parse_version(project) 44 | 45 | # -- General configuration --------------------------------------------------- 46 | 47 | # If your documentation needs a minimal Sphinx version, state it here. 48 | # 49 | # needs_sphinx = '1.0' 50 | 51 | # Add any Sphinx extension module names here, as strings. They can be 52 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 53 | # ones. 54 | extensions = [ 55 | 'sphinx.ext.autodoc', 56 | 'sphinx.ext.todo', 57 | 'sphinx.ext.coverage', 58 | 'sphinx.ext.mathjax', 59 | 'sphinx.ext.ifconfig', 60 | 'sphinx.ext.viewcode', 61 | 'sphinx.ext.githubpages', 62 | # 'IPython.sphinxext.ipython_console_highlighting', 63 | # 'IPython.sphinxext.ipython_directive', 64 | # 'matplotlib.sphinxext.plot_directive', 65 | # 'sphinx.ext.intersphinx', 66 | ] 67 | 68 | # Add any paths that contain templates here, relative to this directory. 69 | templates_path = ['_templates'] 70 | 71 | # The suffix(es) of source filenames. 72 | # You can specify multiple suffix as a list of string: 73 | # 74 | # source_suffix = ['.rst', '.md'] 75 | source_suffix = '.rst' 76 | 77 | # The master toctree document. 78 | master_doc = 'index' 79 | 80 | # The language for content autogenerated by Sphinx. Refer to documentation 81 | # for a list of supported languages. 82 | # 83 | # This is also used if you do content translation via gettext catalogs. 84 | # Usually you set "language" from the command line for these cases. 85 | language = None 86 | 87 | # List of patterns, relative to source directory, that match files and 88 | # directories to ignore when looking for source files. 89 | # This pattern also affects html_static_path and html_extra_path. 90 | exclude_patterns = [] 91 | 92 | # The name of the Pygments (syntax highlighting) style to use. 93 | pygments_style = None 94 | 95 | # -- Options for HTML output ------------------------------------------------- 96 | 97 | # The theme to use for HTML and HTML Help pages. See the documentation for 98 | # a list of builtin themes. 99 | # 100 | # html_theme = 'alabaster' 101 | html_theme = "sphinx_rtd_theme" 102 | 103 | # Theme options are theme-specific and customize the look and feel of a theme 104 | # further. For a list of options available for each theme, see the 105 | # documentation. 106 | 107 | html_theme_options = { 108 | 'navigation_depth': 4, 109 | } 110 | 111 | # Add any paths that contain custom static files (such as style sheets) here, 112 | # relative to this directory. They are copied after the builtin static files, 113 | # so a file named "default.css" will overwrite the builtin "default.css". 114 | html_static_path = ['_static'] 115 | 116 | # Custom sidebar templates, must be a dictionary that maps document names 117 | # to template names. 118 | # 119 | # The default sidebars (for documents that don't match any pattern) are 120 | # defined by theme itself. Builtin themes are using these templates by 121 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 122 | # 'searchbox.html']``. 123 | # 124 | # html_sidebars = {} 125 | 126 | # -- Options for HTMLHelp output --------------------------------------------- 127 | 128 | # Output file base name for HTML help builder. 129 | htmlhelp_basename = 'xbbgdoc' 130 | 131 | # -- Options for LaTeX output ------------------------------------------------ 132 | 133 | latex_elements = { 134 | # The paper size ('letterpaper' or 'a4paper'). 135 | # 136 | # 'papersize': 'letterpaper', 137 | 138 | # The font size ('10pt', '11pt' or '12pt'). 139 | # 140 | # 'pointsize': '10pt', 141 | 142 | # Additional stuff for the LaTeX preamble. 143 | # 144 | # 'preamble': '', 145 | 146 | # Latex figure (float) alignment 147 | # 148 | # 'figure_align': 'htbp', 149 | } 150 | 151 | # Grouping the document tree into LaTeX files. List of tuples 152 | # (source start file, target name, title, 153 | # author, documentclass [howto, manual, or own class]). 154 | latex_documents = [( 155 | master_doc, 'xbbg.tex', 'xbbg Documentation', 'Alpha x1', 'manual' 156 | )] 157 | 158 | # -- Options for manual page output ------------------------------------------ 159 | 160 | # One entry per manual page. List of tuples 161 | # (source start file, name, description, authors, manual section). 162 | man_pages = [( 163 | master_doc, 'xbbg', 'xbbg Documentation', [author], 1 164 | )] 165 | 166 | # -- Options for Texinfo output ---------------------------------------------- 167 | 168 | # Grouping the document tree into Texinfo files. List of tuples 169 | # (source start file, target name, title, author, 170 | # dir menu entry, description, category) 171 | texinfo_documents = [( 172 | master_doc, 'xbbg', 'xbbg Documentation', 173 | author, 'xbbg', 'One line description of project.', 'Miscellaneous' 174 | )] 175 | 176 | # -- Options for Epub output ------------------------------------------------- 177 | 178 | # Bibliographic Dublin Core info. 179 | epub_title = project 180 | 181 | # The unique identifier of the text. This can be a ISBN number 182 | # or the project homepage. 183 | # 184 | # epub_identifier = '' 185 | 186 | # A unique identification for the text. 187 | # 188 | # epub_uid = '' 189 | 190 | # A list of files that should not be packed into the epub file. 191 | epub_exclude_files = ['search.html'] 192 | 193 | # -- Extension configuration ------------------------------------------------- 194 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | |xbbg| 2 | 3 | xbbg 4 | ==== 5 | 6 | An intuitive Bloomberg API 7 | 8 | |pypi| |version| |download| |chat| 9 | 10 | |coffee| 11 | 12 | Features 13 | ======== 14 | 15 | Below are main features. Jupyter notebook examples can be found here_. 16 | 17 | - Excel compatible inputs 18 | - Straightforward intraday bar requests 19 | - Subscriptions 20 | 21 | Requirements 22 | ============ 23 | 24 | - Bloomberg C++ SDK version 3.12.1 or higher 25 | 26 | - Visit `Bloomberg API Library`_ and download C++ Supported Release 27 | 28 | - In the ``bin`` folder of downloaded zip file, copy ``blpapi3_32.dll`` and ``blpapi3_64.dll`` to Bloomberg ``BLPAPI_ROOT`` folder (usually ``blp/DAPI``) 29 | 30 | - Bloomberg offical Python API: 31 | 32 | .. code-block:: console 33 | 34 | pip install blpapi --index-url=https://bcms.bloomberg.com/pip/simple/ 35 | 36 | - numpy, pandas, ruamel.yaml and pyarrow 37 | 38 | .. _download: https://bcms.bloomberg.com/BLPAPI-Generic/blpapi_cpp_3.16.1.1-windows.zip 39 | .. _here: https://colab.research.google.com/drive/1YVVS5AiJAQGGEECmOFAb7DNQZMOHdXLR 40 | 41 | Installation 42 | ============ 43 | 44 | .. code-block:: console 45 | 46 | pip install xbbg 47 | 48 | What's New 49 | ========== 50 | 51 | *0.7.7a2* - Custom `config` and etc. for reference exchange (author `hceh`) 52 | 53 | *0.7.6a2* - Use `blp.connect` for alternative Bloomberg connection (author `anxl2008`) 54 | 55 | *0.7.2* - Use `async` for live data feeds 56 | 57 | *0.7.0* - ``bdh`` preserves columns orders (both tickers and flds). 58 | ``timeout`` argument is available for all queries - ``bdtick`` usually takes longer to respond - 59 | can use ``timeout=1000`` for example if keep getting empty DataFrame. 60 | 61 | *0.6.6* - Add flexibility to use reference exchange as market hour definition 62 | (so that it's not necessary to add ``.yml`` for new tickers, provided that the exchange was defined 63 | in ``/xbbg/markets/exch.yml``). See example of ``bdib`` below for more details. 64 | 65 | *0.6.0* - Speed improvements and tick data availablity 66 | 67 | *0.5.0* - Rewritten library to add subscription, BEQS, simplify interface and remove dependency of `pdblp` 68 | 69 | *0.1.22* - Remove PyYAML dependency due to security vulnerability 70 | 71 | *0.1.17* - Add ``adjust`` argument in ``bdh`` for easier dividend / split adjustments 72 | 73 | Tutorial 74 | ======== 75 | 76 | .. code-block:: python 77 | 78 | In [1]: from xbbg import blp 79 | 80 | Basics 81 | ------ 82 | 83 | ``BDP`` example: 84 | 85 | .. code-block:: python 86 | 87 | In [2]: blp.bdp(tickers='NVDA US Equity', flds=['Security_Name', 'GICS_Sector_Name']) 88 | Out[2]: 89 | security_name gics_sector_name 90 | NVDA US Equity NVIDIA Corp Information Technology 91 | 92 | ``BDP`` with overrides: 93 | 94 | .. code-block:: python 95 | 96 | In [3]: blp.bdp('AAPL US Equity', 'Eqy_Weighted_Avg_Px', VWAP_Dt='20181224') 97 | Out[3]: 98 | eqy_weighted_avg_px 99 | AAPL US Equity 148.75 100 | 101 | ``BDH`` example: 102 | 103 | .. code-block:: python 104 | 105 | In [4]: blp.bdh( 106 | ...: tickers='SPX Index', flds=['High', 'Low', 'Last_Price'], 107 | ...: start_date='2018-10-10', end_date='2018-10-20', 108 | ...: ) 109 | Out[4]: 110 | SPX Index 111 | High Low Last_Price 112 | 2018-10-10 2,874.02 2,784.86 2,785.68 113 | 2018-10-11 2,795.14 2,710.51 2,728.37 114 | 2018-10-12 2,775.77 2,729.44 2,767.13 115 | 2018-10-15 2,775.99 2,749.03 2,750.79 116 | 2018-10-16 2,813.46 2,766.91 2,809.92 117 | 2018-10-17 2,816.94 2,781.81 2,809.21 118 | 2018-10-18 2,806.04 2,755.18 2,768.78 119 | 2018-10-19 2,797.77 2,760.27 2,767.78 120 | 121 | ``BDH`` example with Excel compatible inputs: 122 | 123 | .. code-block:: python 124 | 125 | In [5]: blp.bdh( 126 | ...: tickers='SHCOMP Index', flds=['High', 'Low', 'Last_Price'], 127 | ...: start_date='2018-09-26', end_date='2018-10-20', 128 | ...: Per='W', Fill='P', Days='A', 129 | ...: ) 130 | Out[5]: 131 | SHCOMP Index 132 | High Low Last_Price 133 | 2018-09-28 2,827.34 2,771.16 2,821.35 134 | 2018-10-05 2,827.34 2,771.16 2,821.35 135 | 2018-10-12 2,771.94 2,536.66 2,606.91 136 | 2018-10-19 2,611.97 2,449.20 2,550.47 137 | 138 | ``BDH`` without adjustment for dividends and splits: 139 | 140 | .. code-block:: python 141 | 142 | In [6]: blp.bdh( 143 | ...: 'AAPL US Equity', 'Px_Last', '20140605', '20140610', 144 | ...: CshAdjNormal=False, CshAdjAbnormal=False, CapChg=False 145 | ...: ) 146 | Out[6]: 147 | AAPL US Equity 148 | Px_Last 149 | 2014-06-05 647.35 150 | 2014-06-06 645.57 151 | 2014-06-09 93.70 152 | 2014-06-10 94.25 153 | 154 | ``BDH`` adjusted for dividends and splits: 155 | 156 | .. code-block:: python 157 | 158 | In [7]: blp.bdh( 159 | ...: 'AAPL US Equity', 'Px_Last', '20140605', '20140610', 160 | ...: CshAdjNormal=True, CshAdjAbnormal=True, CapChg=True 161 | ...: ) 162 | Out[7]: 163 | AAPL US Equity 164 | Px_Last 165 | 2014-06-05 85.45 166 | 2014-06-06 85.22 167 | 2014-06-09 86.58 168 | 2014-06-10 87.09 169 | 170 | ``BDS`` example: 171 | 172 | .. code-block:: python 173 | 174 | In [8]: blp.bds('AAPL US Equity', 'DVD_Hist_All', DVD_Start_Dt='20180101', DVD_End_Dt='20180531') 175 | Out[8]: 176 | declared_date ex_date record_date payable_date dividend_amount dividend_frequency dividend_type 177 | AAPL US Equity 2018-05-01 2018-05-11 2018-05-14 2018-05-17 0.73 Quarter Regular Cash 178 | AAPL US Equity 2018-02-01 2018-02-09 2018-02-12 2018-02-15 0.63 Quarter Regular Cash 179 | 180 | Intraday bars ``BDIB`` example: 181 | 182 | .. code-block:: python 183 | 184 | In [9]: blp.bdib(ticker='BHP AU Equity', dt='2018-10-17').tail() 185 | Out[9]: 186 | BHP AU Equity 187 | open high low close volume num_trds 188 | 2018-10-17 15:56:00+11:00 33.62 33.65 33.62 33.64 16660 126 189 | 2018-10-17 15:57:00+11:00 33.65 33.65 33.63 33.64 13875 156 190 | 2018-10-17 15:58:00+11:00 33.64 33.65 33.62 33.63 16244 159 191 | 2018-10-17 15:59:00+11:00 33.63 33.63 33.61 33.62 16507 167 192 | 2018-10-17 16:10:00+11:00 33.66 33.66 33.66 33.66 1115523 216 193 | 194 | Above example works because 1) ``AU`` in equity ticker is mapped to ``EquityAustralia`` in 195 | ``markets/assets.yml``, and 2) ``EquityAustralia`` is defined in ``markets/exch.yml``. 196 | To add new mappings, define ``BBG_ROOT`` in sys path and add ``assets.yml`` and 197 | ``exch.yml`` under ``BBG_ROOT/markets``. 198 | 199 | *New in 0.6.6* - if exchange is defined in ``/xbbg/markets/exch.yml``, can use ``ref`` to look for 200 | relevant exchange market hours. Both ``ref='ES1 Index'`` and ``ref='CME'`` work for this example: 201 | 202 | .. code-block:: python 203 | 204 | In [10]: blp.bdib(ticker='ESM0 Index', dt='2020-03-20', ref='ES1 Index').tail() 205 | out[10]: 206 | ESM0 Index 207 | open high low close volume num_trds value 208 | 2020-03-20 16:55:00-04:00 2,260.75 2,262.25 2,260.50 2,262.00 412 157 931,767.00 209 | 2020-03-20 16:56:00-04:00 2,262.25 2,267.00 2,261.50 2,266.75 812 209 1,838,823.50 210 | 2020-03-20 16:57:00-04:00 2,266.75 2,270.00 2,264.50 2,269.00 1136 340 2,576,590.25 211 | 2020-03-20 16:58:00-04:00 2,269.25 2,269.50 2,261.25 2,265.75 1077 408 2,439,276.00 212 | 2020-03-20 16:59:00-04:00 2,265.25 2,272.00 2,265.00 2,266.50 1271 378 2,882,978.25 213 | 214 | Intraday bars within market session: 215 | 216 | .. code-block:: python 217 | 218 | In [11]: blp.bdib(ticker='7974 JT Equity', dt='2018-10-17', session='am_open_30').tail() 219 | Out[11]: 220 | 7974 JT Equity 221 | open high low close volume num_trds 222 | 2018-10-17 09:27:00+09:00 39,970.00 40,020.00 39,970.00 39,990.00 10800 44 223 | 2018-10-17 09:28:00+09:00 39,990.00 40,020.00 39,980.00 39,980.00 6300 33 224 | 2018-10-17 09:29:00+09:00 39,970.00 40,000.00 39,960.00 39,970.00 3300 21 225 | 2018-10-17 09:30:00+09:00 39,960.00 40,010.00 39,950.00 40,000.00 3100 19 226 | 2018-10-17 09:31:00+09:00 39,990.00 40,000.00 39,980.00 39,990.00 2000 15 227 | 228 | Corporate earnings: 229 | 230 | .. code-block:: python 231 | 232 | In [12]: blp.earning('AMD US Equity', by='Geo', Eqy_Fund_Year=2017, Number_Of_Periods=1) 233 | Out[12]: 234 | level fy2017 fy2017_pct 235 | Asia-Pacific 1.00 3,540.00 66.43 236 | China 2.00 1,747.00 49.35 237 | Japan 2.00 1,242.00 35.08 238 | Singapore 2.00 551.00 15.56 239 | United States 1.00 1,364.00 25.60 240 | Europe 1.00 263.00 4.94 241 | Other Countries 1.00 162.00 3.04 242 | 243 | Dividends: 244 | 245 | .. code-block:: python 246 | 247 | In [13]: blp.dividend(['C US Equity', 'MS US Equity'], start_date='2018-01-01', end_date='2018-05-01') 248 | Out[13]: 249 | dec_date ex_date rec_date pay_date dvd_amt dvd_freq dvd_type 250 | C US Equity 2018-01-18 2018-02-02 2018-02-05 2018-02-23 0.32 Quarter Regular Cash 251 | MS US Equity 2018-04-18 2018-04-27 2018-04-30 2018-05-15 0.25 Quarter Regular Cash 252 | MS US Equity 2018-01-18 2018-01-30 2018-01-31 2018-02-15 0.25 Quarter Regular Cash 253 | 254 | ----- 255 | 256 | *New in 0.1.17* - Dividend adjustment can be simplified to one parameter ``adjust``: 257 | 258 | - ``BDH`` without adjustment for dividends and splits: 259 | 260 | .. code-block:: python 261 | 262 | In [14]: blp.bdh('AAPL US Equity', 'Px_Last', '20140606', '20140609', adjust='-') 263 | Out[14]: 264 | AAPL US Equity 265 | Px_Last 266 | 2014-06-06 645.57 267 | 2014-06-09 93.70 268 | 269 | - ``BDH`` adjusted for dividends and splits: 270 | 271 | .. code-block:: python 272 | 273 | In [15]: blp.bdh('AAPL US Equity', 'Px_Last', '20140606', '20140609', adjust='all') 274 | Out[15]: 275 | AAPL US Equity 276 | Px_Last 277 | 2014-06-06 85.22 278 | 2014-06-09 86.58 279 | 280 | Data Storage 281 | ------------ 282 | 283 | If ``BBG_ROOT`` is provided in ``os.environ``, data can be saved locally. 284 | By default, local storage is preferred than Bloomberg for all queries. 285 | 286 | Noted that local data usage must be compliant with Bloomberg Datafeed Addendum 287 | (full description in ``DAPI``): 288 | 289 | To access Bloomberg data via the API (and use that data in Microsoft Excel), 290 | your company must sign the 'Datafeed Addendum' to the Bloomberg Agreement. 291 | This legally binding contract describes the terms and conditions of your use 292 | of the data and information available via the API (the "Data"). 293 | The most fundamental requirement regarding your use of Data is that it cannot 294 | leave the local PC you use to access the BLOOMBERG PROFESSIONAL service. 295 | 296 | ============== ====================== 297 | Docs |docs| 298 | Build |actions| 299 | Coverage |codecov| 300 | Quality |codacy| 301 | \ |codeFactor| 302 | \ |codebeat| 303 | License |license| 304 | ============== ====================== 305 | 306 | .. |pypi| image:: https://img.shields.io/pypi/v/xbbg.svg 307 | :target: https://badge.fury.io/py/xbbg 308 | .. |version| image:: https://img.shields.io/pypi/pyversions/xbbg.svg 309 | :target: https://badge.fury.io/py/xbbg 310 | .. |actions| image:: https://github.com/alpha-xone/xbbg/workflows/Auto%20CI/badge.svg 311 | :target: https://github.com/alpha-xone/xbbg/actions 312 | :alt: Travis CI 313 | .. |azure| image:: https://dev.azure.com/alpha-xone/xbbg/_apis/build/status/alpha-xone.xbbg?branchName=main 314 | :target: https://dev.azure.com/alpha-xone/xbbg/_build 315 | :alt: Azure Pipeline 316 | .. |codecov| image:: https://codecov.io/gh/alpha-xone/xbbg/branch/main/graph/badge.svg 317 | :target: https://codecov.io/gh/alpha-xone/xbbg 318 | :alt: Codecov 319 | .. |docs| image:: https://readthedocs.org/projects/xbbg/badge/?version=latest 320 | :target: https://xbbg.readthedocs.io/ 321 | .. |codefactor| image:: https://www.codefactor.io/repository/github/alpha-xone/xbbg/badge 322 | :target: https://www.codefactor.io/repository/github/alpha-xone/xbbg 323 | :alt: CodeFactor 324 | .. |codacy| image:: https://app.codacy.com/project/badge/Grade/daec9f52ba344e3ea116c15f1fc6d541 325 | :target: https://www.codacy.com/gh/alpha-xone/xbbg 326 | .. |codebeat| image:: https://codebeat.co/badges/eef1f14d-72eb-445a-af53-12d3565385ec 327 | :target: https://codebeat.co/projects/github-com-alpha-xone-xbbg-main 328 | .. |license| image:: https://img.shields.io/github/license/alpha-xone/xbbg.svg 329 | :alt: GitHub license 330 | :target: https://github.com/alpha-xone/xbbg/blob/main/LICENSE 331 | .. |chat| image:: https://badges.gitter.im/xbbg/community.svg 332 | :target: https://gitter.im/xbbg/community 333 | .. |download| image:: https://img.shields.io/pypi/dm/xbbg 334 | :target: https://pypistats.org/packages/xbbg 335 | .. |coffee| image:: https://www.buymeacoffee.com/assets/img/custom_images/purple_img.png 336 | :target: https://www.buymeacoffee.com/Lntx29Oof 337 | .. _Bloomberg API Library: https://www.bloomberg.com/professional/support/api-library/ 338 | .. |xbbg| image:: https://raw.githubusercontent.com/alpha-xone/xbbg/main/docs/xbbg.png 339 | :alt: xbbg 340 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/xbbg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/docs/xbbg.png -------------------------------------------------------------------------------- /feeds/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/feeds/__init__.py -------------------------------------------------------------------------------- /feeds/pub.py: -------------------------------------------------------------------------------- 1 | import pynng 2 | import trio 3 | import fire 4 | import orjson 5 | 6 | from xbbg import blp 7 | from functools import partial 8 | 9 | DEFAULT_FDLS = [ 10 | 'MKTDATA_EVENT_TYPE', 'EVT_TRADE_DATE_RT', 'TIME', 11 | 'TRADE_UPDATE_STAMP_RT', 'BID_UPDATE_STAMP_RT', 'ASK_UPDATE_STAMP_RT', 12 | 'LAST_PRICE', 'RT_PX_CHG_PCT_1D', 'IS_DELAYED_STREAM', 13 | 'VOLUME', 'EQY_TURNOVER_REALTIME', 14 | ] 15 | ADDRESS = 'ipc:///xbbg/stream' 16 | 17 | 18 | async def live(channel: str, tickers, **kwargs): 19 | """ 20 | Broadcasts live data feeds 21 | 22 | Args: 23 | channel: channel name 24 | tickers: list of tickers 25 | **kwargs: other parameters for `blp.live` 26 | """ 27 | with pynng.Pub0() as pub: 28 | pub.listen(address=f'{ADDRESS}/{channel}') 29 | async for data in blp.live(tickers=tickers, **kwargs): 30 | print(data) 31 | await pub.asend(orjson.dumps(data)) 32 | 33 | 34 | def main(**kwargs): 35 | 36 | kwargs['channel'] = kwargs.get('channel', 'futures') 37 | kwargs['tickers'] = kwargs.get('tickers', ['ESA Index', 'CLA Comdty']) 38 | kwargs['info'] = kwargs.get('info', DEFAULT_FDLS) 39 | print(kwargs['tickers']) 40 | run_live = partial(live, **kwargs) 41 | trio.run(run_live) 42 | 43 | 44 | if __name__ == "__main__": 45 | 46 | # Example: 47 | # python pub.py --channel=equity --tickers="['SPY US Equity','XLE US Equity']" 48 | try: 49 | fire.Fire(main) 50 | except KeyboardInterrupt: 51 | pass 52 | -------------------------------------------------------------------------------- /feeds/sub.py: -------------------------------------------------------------------------------- 1 | import pynng 2 | import trio 3 | import fire 4 | 5 | from functools import partial 6 | 7 | ADDRESS = 'ipc:///xbbg/stream' 8 | 9 | 10 | async def client(addr, max_msg=10): 11 | with pynng.Sub0() as sock: 12 | sock.subscribe('') 13 | list(map(sock.dial, [f'{ADDRESS}/{_}' for _ in addr])) 14 | while max_msg: 15 | msg = await sock.arecv_msg() 16 | print(msg.bytes) 17 | # print(orjson.loads(msg.bytes.decode())) 18 | max_msg -= 1 19 | 20 | 21 | def main(**kwargs): 22 | 23 | run_client = partial(client, **kwargs) 24 | trio.run(run_client) 25 | 26 | 27 | if __name__ == "__main__": 28 | 29 | # Example: 30 | # python pub.py --channel=[futures,equity] 31 | try: 32 | fire.Fire(main) 33 | except KeyboardInterrupt: 34 | pass 35 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | -i https://pypi.org/simple 2 | --extra-index-url https://bcms.bloomberg.com/pip/simple/ 3 | numpy >= 1.15.0 4 | pandas >= 1.0.0, <=1.5.3 5 | pyarrow >= 1.0.1 6 | pytz >= 2020.4 7 | ruamel.yaml >= 0.15.0 8 | pytest 9 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | from os import path 3 | from setuptools import setup, find_packages 4 | 5 | # for pip >= 10 6 | try: 7 | from pip._internal.req import parse_requirements 8 | # for pip <= 9.0.3 9 | except ImportError: 10 | from pip.req import parse_requirements 11 | 12 | PACKAGE_ROOT = pathlib.Path(__file__).parent 13 | 14 | 15 | def parse_version(package): 16 | """ 17 | Parse versions 18 | """ 19 | init_file = f'{PACKAGE_ROOT}/{package}/__init__.py' 20 | with open(init_file, 'r', encoding='utf-8') as f: 21 | for line in f.readlines(): 22 | if '__version__' in line: 23 | return line.split('=')[1].strip()[1:-1] 24 | return '' 25 | 26 | 27 | def parse_markdown(): 28 | """ 29 | Parse markdown as description 30 | """ 31 | readme_file = f'{PACKAGE_ROOT}/README.md' 32 | if path.exists(readme_file): 33 | with open(readme_file, 'r', encoding='utf-8') as f: 34 | long_description = f.read() 35 | return long_description 36 | 37 | 38 | def parse_description(markdown=True): 39 | """ 40 | Parse the description in the README file 41 | """ 42 | if markdown: return parse_markdown() 43 | 44 | try: 45 | from pypandoc import convert 46 | 47 | readme_file = f'{PACKAGE_ROOT}/docs/index.rst' 48 | if not path.exists(readme_file): 49 | raise ImportError 50 | return convert(readme_file, 'rst') 51 | 52 | except ImportError: 53 | return parse_markdown() 54 | 55 | 56 | if __name__ == '__main__': 57 | 58 | setup( 59 | name='xbbg', 60 | version=parse_version('xbbg'), 61 | description='Intuitive Bloomberg data API', 62 | long_description=parse_description(), 63 | long_description_content_type='text/markdown', 64 | url='https://github.com/alpha-xone/xbbg', 65 | author='Alpha x1', 66 | author_email='alpha.xone@outlook.com', 67 | license='Apache', 68 | classifiers=[ 69 | "License :: OSI Approved :: Apache Software License", 70 | 'Programming Language :: Python :: 3.6', 71 | 'Programming Language :: Python :: 3.7', 72 | 'Programming Language :: Python :: 3.8', 73 | ], 74 | include_package_data=True, 75 | package_data={ 76 | 'yaml': ['xbbg/markets/*.yml'] 77 | }, 78 | install_requires=[ 79 | str(getattr(ir, 'req' if hasattr(ir, 'req') else 'requirement')) 80 | for ir in parse_requirements( 81 | f'{PACKAGE_ROOT}/requirements.txt', session='hack' 82 | ) 83 | ], 84 | packages=find_packages(include=['xbbg', 'xbbg.*']), 85 | dependency_links=[ 86 | 'https://bloomberg.bintray.com/pip/simple', 87 | ], 88 | ) 89 | 90 | print('\nBloomberg API') 91 | print('^^^^^^^^^^^^^\n') 92 | print('pip install --index-url=https://bloomberg.bintray.com/pip/simple blpapi') 93 | -------------------------------------------------------------------------------- /venv/Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [[source]] 7 | name = "bbg" 8 | url = "https://bloomberg.bintray.com/pip/simple" 9 | verify_ssl = true 10 | 11 | [dev-packages] 12 | ipykernel = "*" 13 | pytest = "*" 14 | xone = "*" 15 | pytest-cov = "*" 16 | coverage = "*" 17 | 18 | [packages] 19 | numpy = "*" 20 | pandas = "*" 21 | blpapi = "*" 22 | pdblp = "*" 23 | pyarrow = "*" 24 | ruamel-yaml = "*" 25 | 26 | [requires] 27 | python_version = "3.6" 28 | -------------------------------------------------------------------------------- /xbbg/__init__.py: -------------------------------------------------------------------------------- 1 | """An intuitive Bloomberg API""" 2 | 3 | __version__ = '0.7.8a2' 4 | -------------------------------------------------------------------------------- /xbbg/blp.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from functools import partial 4 | from itertools import product 5 | from contextlib import contextmanager 6 | 7 | from xbbg import __version__, const, pipeline 8 | from xbbg.io import logs, files, storage 9 | from xbbg.core import utils, conn, process 10 | from xbbg.core.conn import connect 11 | 12 | __all__ = [ 13 | '__version__', 14 | 'connect', 15 | 'bdp', 16 | 'bds', 17 | 'bdh', 18 | 'bdib', 19 | 'bdtick', 20 | 'earning', 21 | 'dividend', 22 | 'beqs', 23 | 'live', 24 | 'subscribe', 25 | 'adjust_ccy', 26 | 'turnover', 27 | ] 28 | 29 | 30 | def bdp(tickers, flds, **kwargs) -> pd.DataFrame: 31 | """ 32 | Bloomberg reference data 33 | 34 | Args: 35 | tickers: tickers 36 | flds: fields to query 37 | **kwargs: Bloomberg overrides 38 | 39 | Returns: 40 | pd.DataFrame 41 | """ 42 | logger = logs.get_logger(bdp, **kwargs) 43 | 44 | if isinstance(tickers, str): tickers = [tickers] 45 | if isinstance(flds, str): flds = [flds] 46 | 47 | request = process.create_request( 48 | service='//blp/refdata', 49 | request='ReferenceDataRequest', 50 | **kwargs, 51 | ) 52 | process.init_request(request=request, tickers=tickers, flds=flds, **kwargs) 53 | logger.debug(f'Sending request to Bloomberg ...\n{request}') 54 | conn.send_request(request=request, **kwargs) 55 | 56 | res = pd.DataFrame(process.rec_events(func=process.process_ref, **kwargs)) 57 | if kwargs.get('raw', False): return res 58 | if res.empty or any(fld not in res for fld in ['ticker', 'field']): 59 | return pd.DataFrame() 60 | 61 | return ( 62 | res 63 | .set_index(['ticker', 'field']) 64 | .unstack(level=1) 65 | .rename_axis(index=None, columns=[None, None]) 66 | .droplevel(axis=1, level=0) 67 | .loc[:, res.field.unique()] 68 | .pipe(pipeline.standard_cols, col_maps=kwargs.get('col_maps', None)) 69 | ) 70 | 71 | 72 | def bds(tickers, flds, use_port=False, **kwargs) -> pd.DataFrame: 73 | """ 74 | Bloomberg block data 75 | 76 | Args: 77 | tickers: ticker(s) 78 | flds: field 79 | use_port: use `PortfolioDataRequest` 80 | **kwargs: other overrides for query 81 | 82 | Returns: 83 | pd.DataFrame: block data 84 | """ 85 | logger = logs.get_logger(bds, **kwargs) 86 | 87 | part = partial(_bds_, fld=flds, logger=logger, use_port=use_port, **kwargs) 88 | if isinstance(tickers, str): tickers = [tickers] 89 | return pd.DataFrame(pd.concat(map(part, tickers), sort=False)) 90 | 91 | 92 | def _bds_( 93 | ticker: str, 94 | fld: str, 95 | logger: logs.logging.Logger, 96 | use_port: bool = False, 97 | **kwargs, 98 | ) -> pd.DataFrame: 99 | """ 100 | Get data of BDS of single ticker 101 | """ 102 | if 'has_date' not in kwargs: kwargs['has_date'] = True 103 | data_file = storage.ref_file(ticker=ticker, fld=fld, ext='pkl', **kwargs) 104 | if files.exists(data_file): 105 | logger.debug(f'Loading Bloomberg data from: {data_file}') 106 | return pd.DataFrame(pd.read_pickle(data_file)) 107 | 108 | request = process.create_request( 109 | service='//blp/refdata', 110 | request='PortfolioDataRequest' if use_port else 'ReferenceDataRequest', 111 | **kwargs, 112 | ) 113 | process.init_request(request=request, tickers=ticker, flds=fld, **kwargs) 114 | logger.debug(f'Sending request to Bloomberg ...\n{request}') 115 | conn.send_request(request=request, **kwargs) 116 | 117 | res = pd.DataFrame(process.rec_events(func=process.process_ref, **kwargs)) 118 | if kwargs.get('raw', False): return res 119 | if res.empty or any(fld not in res for fld in ['ticker', 'field']): 120 | return pd.DataFrame() 121 | 122 | data = ( 123 | res 124 | .set_index(['ticker', 'field']) 125 | .droplevel(axis=0, level=1) 126 | .rename_axis(index=None) 127 | .pipe(pipeline.standard_cols, col_maps=kwargs.get('col_maps', None)) 128 | ) 129 | if data_file: 130 | logger.debug(f'Saving Bloomberg data to: {data_file}') 131 | files.create_folder(data_file, is_file=True) 132 | data.to_pickle(data_file) 133 | 134 | return data 135 | 136 | 137 | def bdh( 138 | tickers, flds=None, start_date=None, end_date='today', adjust=None, **kwargs 139 | ) -> pd.DataFrame: 140 | """ 141 | Bloomberg historical data 142 | 143 | Args: 144 | tickers: ticker(s) 145 | flds: field(s) 146 | start_date: start date 147 | end_date: end date - default today 148 | adjust: `all`, `dvd`, `normal`, `abn` (=abnormal), `split`, `-` or None 149 | exact match of above words will adjust for corresponding events 150 | Case 0: `-` no adjustment for dividend or split 151 | Case 1: `dvd` or `normal|abn` will adjust for all dividends except splits 152 | Case 2: `adjust` will adjust for splits and ignore all dividends 153 | Case 3: `all` == `dvd|split` == adjust for all 154 | Case 4: None == Bloomberg default OR use kwargs 155 | **kwargs: overrides 156 | 157 | Returns: 158 | pd.DataFrame 159 | """ 160 | logger = logs.get_logger(bdh, **kwargs) 161 | 162 | if flds is None: flds = ['Last_Price'] 163 | e_dt = utils.fmt_dt(end_date, fmt='%Y%m%d') 164 | if start_date is None: start_date = pd.Timestamp(e_dt) - pd.Timedelta(weeks=8) 165 | s_dt = utils.fmt_dt(start_date, fmt='%Y%m%d') 166 | 167 | request = process.create_request( 168 | service='//blp/refdata', 169 | request='HistoricalDataRequest', 170 | **kwargs, 171 | ) 172 | process.init_request( 173 | request=request, tickers=tickers, flds=flds, 174 | start_date=s_dt, end_date=e_dt, adjust=adjust, **kwargs 175 | ) 176 | logger.debug(f'Sending request to Bloomberg ...\n{request}') 177 | conn.send_request(request=request, **kwargs) 178 | 179 | res = pd.DataFrame(process.rec_events(process.process_hist, **kwargs)) 180 | if kwargs.get('raw', False): return res 181 | if res.empty or any(fld not in res for fld in ['ticker', 'date']): 182 | return pd.DataFrame() 183 | 184 | return ( 185 | res 186 | .set_index(['ticker', 'date']) 187 | .unstack(level=0) 188 | .rename_axis(index=None, columns=[None, None]) 189 | .swaplevel(0, 1, axis=1) 190 | .reindex(columns=utils.flatten(tickers), level=0) 191 | .reindex(columns=utils.flatten(flds), level=1) 192 | ) 193 | 194 | 195 | def bdib(ticker: str, dt, session='allday', typ='TRADE', **kwargs) -> pd.DataFrame: 196 | """ 197 | Bloomberg intraday bar data 198 | 199 | Args: 200 | ticker: ticker name 201 | dt: date to download 202 | session: [allday, day, am, pm, pre, post] 203 | typ: [TRADE, BID, ASK, BID_BEST, ASK_BEST, BEST_BID, BEST_ASK] 204 | **kwargs: 205 | ref: reference ticker or exchange 206 | used as supplement if exchange info is not defined for `ticker` 207 | batch: whether is batch process to download data 208 | log: level of logs 209 | 210 | Returns: 211 | pd.DataFrame 212 | """ 213 | from xbbg.core import trials 214 | 215 | logger = logs.get_logger(bdib, **kwargs) 216 | 217 | ex_info = const.exch_info(ticker=ticker, **kwargs) 218 | if ex_info.empty: raise KeyError(f'Cannot find exchange info for {ticker}') 219 | 220 | ss_rng = process.time_range(dt=dt, ticker=ticker, session=session, tz=ex_info.tz, **kwargs) 221 | data_file = storage.bar_file(ticker=ticker, dt=dt, typ=typ) 222 | if files.exists(data_file) and kwargs.get('cache', True) and (not kwargs.get('reload', False)): 223 | res = ( 224 | pd.read_parquet(data_file) 225 | .pipe(pipeline.add_ticker, ticker=ticker) 226 | .loc[ss_rng[0]:ss_rng[1]] 227 | ) 228 | if not res.empty: 229 | logger.debug(f'Loading Bloomberg intraday data from: {data_file}') 230 | return res 231 | 232 | if not process.check_current(dt=dt, logger=logger, **kwargs): return pd.DataFrame() 233 | 234 | cur_dt = pd.Timestamp(dt).strftime('%Y-%m-%d') 235 | q_tckr = ticker 236 | if ex_info.get('is_fut', False): 237 | is_sprd = ex_info.get('has_sprd', False) and (len(ticker[:-1]) != ex_info['tickers'][0]) 238 | if not is_sprd: 239 | q_tckr = fut_ticker(gen_ticker=ticker, dt=dt, freq=ex_info['freq']) 240 | if q_tckr == '': 241 | logger.error(f'cannot find futures ticker for {ticker} ...') 242 | return pd.DataFrame() 243 | 244 | info_log = f'{q_tckr} / {cur_dt} / {typ}' 245 | trial_kw = dict(ticker=ticker, dt=dt, typ=typ, func='bdib') 246 | num_trials = trials.num_trials(**trial_kw) 247 | if num_trials >= 2: 248 | if kwargs.get('batch', False): return pd.DataFrame() 249 | logger.info(f'{num_trials} trials with no data {info_log}') 250 | return pd.DataFrame() 251 | 252 | while conn.bbg_session(**kwargs).tryNextEvent(): pass 253 | time_rng = process.time_range(dt=dt, ticker=ticker, session='allday', **kwargs) 254 | request = process.create_request( 255 | service='//blp/refdata', 256 | request='IntradayBarRequest', 257 | settings=[ 258 | ('security', ticker), 259 | ('eventType', typ), 260 | ('interval', kwargs.get('interval', 1)), 261 | ('startDateTime', time_rng[0]), 262 | ('endDateTime', time_rng[1]), 263 | ], 264 | **kwargs, 265 | ) 266 | logger.debug(f'Sending request to Bloomberg ...\n{request}') 267 | conn.send_request(request=request, **kwargs) 268 | 269 | res = pd.DataFrame(process.rec_events(func=process.process_bar, **kwargs)) 270 | if res.empty or ('time' not in res): 271 | logger.warning(f'No data for {info_log} ...') 272 | trials.update_trials(cnt=num_trials + 1, **trial_kw) 273 | return pd.DataFrame() 274 | 275 | data = ( 276 | res 277 | .set_index('time') 278 | .rename_axis(index=None) 279 | .rename(columns={'numEvents': 'num_trds'}) 280 | .tz_localize('UTC') 281 | .tz_convert(ex_info.tz) 282 | .pipe(pipeline.add_ticker, ticker=ticker) 283 | ) 284 | if kwargs.get('cache', True): 285 | storage.save_intraday(data=data[ticker], ticker=ticker, dt=dt, typ=typ, **kwargs) 286 | 287 | return data.loc[ss_rng[0]:ss_rng[1]] 288 | 289 | 290 | def bdtick(ticker, dt, session='allday', time_range=None, types=None, **kwargs) -> pd.DataFrame: 291 | """ 292 | Bloomberg tick data 293 | 294 | Args: 295 | ticker: ticker name 296 | dt: date to download 297 | session: [allday, day, am, pm, pre, post] 298 | time_range: tuple of start and end time (must be converted into UTC) 299 | if this is given, `dt` and `session` will be ignored 300 | types: str or list, one or combinations of [ 301 | TRADE, AT_TRADE, BID, ASK, MID_PRICE, 302 | BID_BEST, ASK_BEST, BEST_BID, BEST_ASK, 303 | ] 304 | 305 | Returns: 306 | pd.DataFrame 307 | """ 308 | logger = logs.get_logger(bdtick, **kwargs) 309 | 310 | if types is None: types = ['TRADE'] 311 | exch = const.exch_info(ticker=ticker, **kwargs) 312 | if exch.empty: raise LookupError(f'Cannot find exchange info for {ticker}') 313 | 314 | if isinstance(time_range, (tuple, list)) and (len(time_range) == 2): 315 | cur_dt = pd.Timestamp(dt).strftime('%Y-%m-%d') 316 | time_rng = ( 317 | pd.DatetimeIndex([ 318 | f'{cur_dt} {time_range[0]}', 319 | f'{cur_dt} {time_range[1]}', 320 | ]) 321 | .tz_localize(exch.tz) 322 | .tz_convert(process.DEFAULT_TZ) 323 | .tz_convert('UTC') 324 | ) 325 | else: 326 | time_rng = process.time_range(dt=dt, ticker=ticker, session=session, **kwargs) 327 | 328 | while conn.bbg_session(**kwargs).tryNextEvent(): pass 329 | request = process.create_request( 330 | service='//blp/refdata', 331 | request='IntradayTickRequest', 332 | settings=[ 333 | ('security', ticker), 334 | ('startDateTime', time_rng[0]), 335 | ('endDateTime', time_rng[1]), 336 | ('includeConditionCodes', True), 337 | ('includeExchangeCodes', True), 338 | ('includeNonPlottableEvents', True), 339 | ('includeBrokerCodes', True), 340 | ('includeRpsCodes', True), 341 | ('includeTradeTime', True), 342 | ('includeActionCodes', True), 343 | ('includeIndicatorCodes', True), 344 | ], 345 | append={'eventTypes': types}, 346 | **kwargs, 347 | ) 348 | 349 | logger.debug(f'Sending request to Bloomberg ...\n{request}') 350 | conn.send_request(request=request) 351 | 352 | res = pd.DataFrame(process.rec_events(func=process.process_bar, typ='t', **kwargs)) 353 | if kwargs.get('raw', False): return res 354 | if res.empty or ('time' not in res): return pd.DataFrame() 355 | 356 | return ( 357 | res 358 | .set_index('time') 359 | .rename_axis(index=None) 360 | .tz_localize('UTC') 361 | .tz_convert(exch.tz) 362 | .pipe(pipeline.add_ticker, ticker=ticker) 363 | .rename(columns={ 364 | 'size': 'volume', 365 | 'type': 'typ', 366 | 'conditionCodes': 'cond', 367 | 'exchangeCode': 'exch', 368 | 'tradeTime': 'trd_time', 369 | }) 370 | ) 371 | 372 | 373 | def earning(ticker, by='Geo', typ='Revenue', ccy=None, level=None, **kwargs) -> pd.DataFrame: 374 | """ 375 | Earning exposures by Geo or Products 376 | 377 | Args: 378 | ticker: ticker name 379 | by: [G(eo), P(roduct)] 380 | typ: type of earning, start with `PG_` in Bloomberg FLDS - default `Revenue` 381 | `Revenue` - Revenue of the company 382 | `Operating_Income` - Operating Income (also named as EBIT) of the company 383 | `Assets` - Assets of the company 384 | `Gross_Profit` - Gross profit of the company 385 | `Capital_Expenditures` - Capital expenditures of the company 386 | ccy: currency of earnings 387 | level: hierarchy level of earnings 388 | 389 | Returns: 390 | pd.DataFrame 391 | """ 392 | kwargs.pop('raw', None) 393 | ovrd = 'G' if by[0].upper() == 'G' else 'P' 394 | new_kw = dict(Product_Geo_Override=ovrd) 395 | 396 | year = kwargs.pop('year', None) 397 | periods = kwargs.pop('periods', None) 398 | if year: kwargs['Eqy_Fund_Year'] = year 399 | if periods: kwargs['Number_Of_Periods'] = periods 400 | 401 | header = bds(tickers=ticker, flds='PG_Bulk_Header', **new_kw, **kwargs) 402 | if ccy: kwargs['Eqy_Fund_Crncy'] = ccy 403 | if level: kwargs['PG_Hierarchy_Level'] = level 404 | data = bds(tickers=ticker, flds=f'PG_{typ}', **new_kw, **kwargs) 405 | 406 | if data.empty or header.empty: return pd.DataFrame() 407 | if data.shape[1] != header.shape[1]: 408 | raise ValueError('Inconsistent shape of data and header') 409 | data.columns = ( 410 | header.iloc[0] 411 | .str.lower() 412 | .str.replace(' ', '_') 413 | .str.replace('_20', '20') 414 | .tolist() 415 | ) 416 | 417 | if 'level' not in data: raise KeyError('Cannot find [level] in data') 418 | for yr in data.columns[data.columns.str.startswith('fy')]: 419 | process.earning_pct(data=data, yr=yr) 420 | 421 | return data 422 | 423 | 424 | def dividend(tickers, typ='all', start_date=None, end_date=None, **kwargs) -> pd.DataFrame: 425 | """ 426 | Bloomberg dividend / split history 427 | 428 | Args: 429 | tickers: list of tickers 430 | typ: dividend adjustment type 431 | `all`: `DVD_Hist_All` 432 | `dvd`: `DVD_Hist` 433 | `split`: `Eqy_DVD_Hist_Splits` 434 | `gross`: `Eqy_DVD_Hist_Gross` 435 | `adjust`: `Eqy_DVD_Adjust_Fact` 436 | `adj_fund`: `Eqy_DVD_Adj_Fund` 437 | `with_amt`: `DVD_Hist_All_with_Amt_Status` 438 | `dvd_amt`: `DVD_Hist_with_Amt_Status` 439 | `gross_amt`: `DVD_Hist_Gross_with_Amt_Stat` 440 | `projected`: `BDVD_Pr_Ex_Dts_DVD_Amts_w_Ann` 441 | start_date: start date 442 | end_date: end date 443 | **kwargs: overrides 444 | 445 | Returns: 446 | pd.DataFrame 447 | """ 448 | kwargs.pop('raw', None) 449 | if isinstance(tickers, str): tickers = [tickers] 450 | tickers = [t for t in tickers if ('Equity' in t) and ('=' not in t)] 451 | 452 | fld = const.DVD_TPYES.get(typ, typ) 453 | 454 | if (fld == 'Eqy_DVD_Adjust_Fact') and ('Corporate_Actions_Filter' not in kwargs): 455 | kwargs['Corporate_Actions_Filter'] = 'NORMAL_CASH|ABNORMAL_CASH|CAPITAL_CHANGE' 456 | 457 | if start_date: 458 | kwargs['DVD_Start_Dt'] = utils.fmt_dt(start_date, fmt='%Y%m%d') 459 | if end_date: 460 | kwargs['DVD_End_Dt'] = utils.fmt_dt(end_date, fmt='%Y%m%d') 461 | 462 | return bds(tickers=tickers, flds=fld, col_maps=const.DVD_COLS, **kwargs) 463 | 464 | 465 | def beqs(screen, asof=None, typ='PRIVATE', group='General', **kwargs) -> pd.DataFrame: 466 | """ 467 | Bloomberg equity screening 468 | 469 | Args: 470 | screen: screen name 471 | asof: as of date 472 | typ: GLOBAL/B (Bloomberg) or PRIVATE/C (Custom, default) 473 | group: group name if screen is organized into groups 474 | 475 | Returns: 476 | pd.DataFrame 477 | """ 478 | logger = logs.get_logger(beqs, **kwargs) 479 | 480 | request = process.create_request( 481 | service='//blp/refdata', 482 | request='BeqsRequest', 483 | settings=[ 484 | ('screenName', screen), 485 | ('screenType', 'GLOBAL' if typ[0].upper() in ['G', 'B'] else 'PRIVATE'), 486 | ('Group', group), 487 | ], 488 | ovrds=[('PiTDate', utils.fmt_dt(asof, '%Y%m%d'))] if asof else None, 489 | **kwargs, 490 | ) 491 | 492 | logger.debug(f'Sending request to Bloomberg ...\n{request}') 493 | conn.send_request(request=request, **kwargs) 494 | res = pd.DataFrame(process.rec_events(func=process.process_ref, **kwargs)) 495 | if res.empty: 496 | if kwargs.get('trial', 0): return pd.DataFrame() 497 | return beqs(screen=screen, asof=asof, typ=typ, group=group, trial=1, **kwargs) 498 | 499 | if kwargs.get('raw', False): return res 500 | cols = res.field.unique() 501 | return ( 502 | res 503 | .set_index(['ticker', 'field']) 504 | .unstack(level=1) 505 | .rename_axis(index=None, columns=[None, None]) 506 | .droplevel(axis=1, level=0) 507 | .loc[:, cols] 508 | .pipe(pipeline.standard_cols) 509 | ) 510 | 511 | 512 | @contextmanager 513 | def subscribe(tickers, flds=None, identity=None, options=None, **kwargs): 514 | """ 515 | Subscribe Bloomberg realtime data 516 | 517 | Args: 518 | tickers: list of tickers 519 | flds: fields to subscribe, default: Last_Price, Bid, Ask 520 | identity: Bloomberg identity 521 | """ 522 | logger = logs.get_logger(subscribe, **kwargs) 523 | if isinstance(tickers, str): tickers = [tickers] 524 | if flds is None: flds = ['Last_Price', 'Bid', 'Ask'] 525 | if isinstance(flds, str): flds = [flds] 526 | 527 | sub_list = conn.blpapi.SubscriptionList() 528 | for ticker in tickers: 529 | topic = f'//blp/mktdata/{ticker}' 530 | cid = conn.blpapi.CorrelationId(ticker) 531 | logger.debug(f'Subscribing {cid} => {topic}') 532 | sub_list.add(topic, flds, correlationId=cid, options=options) 533 | 534 | try: 535 | conn.bbg_session(**kwargs).subscribe(sub_list, identity) 536 | yield 537 | finally: 538 | conn.bbg_session(**kwargs).unsubscribe(sub_list) 539 | 540 | 541 | async def live(tickers, flds=None, info=None, max_cnt=0, options=None, **kwargs): 542 | """ 543 | Subscribe and getting data feeds from 544 | 545 | Args: 546 | tickers: list of tickers 547 | flds: fields to subscribe 548 | info: list of keys of interests (ticker will be included) 549 | max_cnt: max number of data points to receive 550 | 551 | Yields: 552 | dict: Bloomberg market data 553 | 554 | Examples: 555 | >>> # async for _ in live('SPY US Equity', info=const.LIVE_INFO): pass 556 | """ 557 | from collections.abc import Iterable 558 | 559 | logger = logs.get_logger(live, **kwargs) 560 | evt_typs = conn.event_types() 561 | 562 | if flds is None: 563 | s_flds = ['LAST_PRICE', 'BID', 'ASK'] 564 | else: 565 | if isinstance(flds, str): flds = [flds] 566 | s_flds = [fld.upper() for fld in flds] 567 | 568 | if isinstance(info, str): info = [info] 569 | if isinstance(info, Iterable): info = [key.upper() for key in info] 570 | if info is None: info = const.LIVE_INFO 571 | 572 | sess = conn.bbg_session(**kwargs) 573 | while sess.tryNextEvent(): pass 574 | with subscribe(tickers=tickers, flds=s_flds, options=options, **kwargs): 575 | cnt = 0 576 | while True and cnt <= max_cnt: 577 | try: 578 | ev = sess.tryNextEvent() 579 | if ev is None: continue 580 | if evt_typs[ev.eventType()] != 'SUBSCRIPTION_DATA': continue 581 | 582 | for msg, fld in product(ev, s_flds): 583 | if not msg.hasElement(fld): continue 584 | if msg.getElement(fld).isNull(): continue 585 | yield { 586 | **{ 587 | 'TICKER': msg.correlationIds()[0].value(), 588 | 'FIELD': fld, 589 | }, 590 | **{ 591 | str(elem.name()): process.elem_value(elem) 592 | for elem in msg.asElement().elements() 593 | if (True if not info else str(elem.name()) in info) 594 | }, 595 | } 596 | if max_cnt: cnt += 1 597 | 598 | except ValueError as e: logger.debug(e) 599 | except KeyboardInterrupt: break 600 | 601 | 602 | def active_futures(ticker: str, dt, **kwargs) -> str: 603 | """ 604 | Active futures contract 605 | 606 | Args: 607 | ticker: futures ticker, i.e., ESA Index, Z A Index, CLA Comdty, etc. 608 | dt: date 609 | 610 | Returns: 611 | str: ticker name 612 | """ 613 | t_info = ticker.split() 614 | prefix, asset = ' '.join(t_info[:-1]), t_info[-1] 615 | info = const.market_info(f'{prefix[:-1]}1 {asset}') 616 | 617 | f1, f2 = f'{prefix[:-1]}1 {asset}', f'{prefix[:-1]}2 {asset}' 618 | fut_2 = fut_ticker(gen_ticker=f2, dt=dt, freq=info.get('freq', 'M'), **kwargs) 619 | fut_1 = fut_ticker(gen_ticker=f1, dt=dt, freq=info.get('freq', 'M'), **kwargs) 620 | 621 | fut_tk = bdp(tickers=[fut_1, fut_2], flds='Last_Tradeable_Dt') 622 | 623 | if pd.Timestamp(dt).month < pd.Timestamp(fut_tk.last_tradeable_dt[0]).month: return fut_1 624 | 625 | dts = pd.bdate_range(end=dt, periods=10) 626 | volume = bdh(fut_tk.index, flds='volume', start_date=dts[0], end_date=dts[-1]) 627 | if volume.empty: return fut_1 628 | return volume.iloc[-1].idxmax()[0] 629 | 630 | 631 | def fut_ticker(gen_ticker: str, dt, freq: str, **kwargs) -> str: 632 | """ 633 | Get proper ticker from generic ticker 634 | 635 | Args: 636 | gen_ticker: generic ticker 637 | dt: date 638 | freq: futures contract frequency 639 | 640 | Returns: 641 | str: exact futures ticker 642 | """ 643 | logger = logs.get_logger(fut_ticker, **kwargs) 644 | dt = pd.Timestamp(dt) 645 | t_info = gen_ticker.split() 646 | pre_dt = pd.bdate_range(end='today', periods=1)[-1] 647 | same_month = (pre_dt.month == dt.month) and (pre_dt.year == dt.year) 648 | 649 | asset = t_info[-1] 650 | if asset in ['Index', 'Curncy', 'Comdty']: 651 | ticker = ' '.join(t_info[:-1]) 652 | prefix, idx, postfix = ticker[:-1], int(ticker[-1]) - 1, asset 653 | 654 | elif asset == 'Equity': 655 | ticker = t_info[0] 656 | prefix, idx, postfix = ticker[:-1], int(ticker[-1]) - 1, ' '.join(t_info[1:]) 657 | 658 | else: 659 | logger.error(f'unkonwn asset type for ticker: {gen_ticker}') 660 | return '' 661 | 662 | month_ext = 4 if asset == 'Comdty' else 2 663 | months = pd.date_range(start=dt, periods=max(idx + month_ext, 3), freq=freq) 664 | logger.debug(f'pulling expiry dates for months: {months}') 665 | 666 | def to_fut(month): 667 | return prefix + const.Futures[month.strftime('%b')] + \ 668 | month.strftime('%y')[-1 if same_month else -2:] + ' ' + postfix 669 | 670 | fut = [to_fut(m) for m in months] 671 | logger.debug(f'trying futures: {fut}') 672 | # noinspection PyBroadException 673 | try: 674 | fut_matu = bdp(tickers=fut, flds='last_tradeable_dt') 675 | except Exception as e1: 676 | logger.error(f'error downloading futures contracts (1st trial) {e1}:\n{fut}') 677 | # noinspection PyBroadException 678 | try: 679 | fut = fut[:-1] 680 | logger.debug(f'trying futures (2nd trial): {fut}') 681 | fut_matu = bdp(tickers=fut, flds='last_tradeable_dt') 682 | except Exception as e2: 683 | logger.error(f'error downloading futures contracts (2nd trial) {e2}:\n{fut}') 684 | return '' 685 | 686 | if 'last_tradeable_dt' not in fut_matu: 687 | logger.warning(f'no futures found for {fut}') 688 | return '' 689 | 690 | fut_matu.sort_values(by='last_tradeable_dt', ascending=True, inplace=True) 691 | sub_fut = fut_matu[pd.DatetimeIndex(fut_matu.last_tradeable_dt) > dt] 692 | logger.debug(f'futures full chain:\n{fut_matu.to_string()}') 693 | logger.debug(f'getting index {idx} from:\n{sub_fut.to_string()}') 694 | return sub_fut.index.values[idx] 695 | 696 | 697 | def adjust_ccy(data: pd.DataFrame, ccy: str = 'USD') -> pd.DataFrame: 698 | """ 699 | Adjust 700 | 701 | Args: 702 | data: daily price / turnover / etc. to adjust 703 | ccy: currency to adjust to 704 | 705 | Returns: 706 | pd.DataFrame 707 | """ 708 | if data.empty: return pd.DataFrame() 709 | if ccy.lower() == 'local': return data 710 | tickers = data.columns.get_level_values(level=0).unique() 711 | start_date = data.index[0] 712 | end_date = data.index[-1] 713 | 714 | uccy = bdp(tickers=tickers, flds='crncy') 715 | if not uccy.empty: 716 | adj = ( 717 | uccy.crncy 718 | .map(lambda v: { 719 | 'ccy': None if v.upper() == ccy else f'{ccy}{v.upper()} Curncy', 720 | 'factor': 100. if v[-1].islower() else 1., 721 | }) 722 | .apply(pd.Series) 723 | .dropna(subset=['ccy']) 724 | ) 725 | else: adj = pd.DataFrame() 726 | 727 | if not adj.empty: 728 | fx = ( 729 | bdh(tickers=adj.ccy.unique(), start_date=start_date, end_date=end_date) 730 | .xs('Last_Price', axis=1, level=1) 731 | ) 732 | else: fx = pd.DataFrame() 733 | 734 | return ( 735 | pd.concat([ 736 | pd.Series( 737 | ( 738 | data[t] 739 | .dropna() 740 | .prod(axis=1) 741 | .div( 742 | (fx[adj.loc[t, 'ccy']] * adj.loc[t, 'factor']) 743 | if t in adj.index else 1., 744 | ) 745 | ), 746 | name=t, 747 | ) 748 | for t in tickers 749 | ], axis=1) 750 | ) 751 | 752 | 753 | def turnover( 754 | tickers, 755 | flds='Turnover', 756 | start_date=None, 757 | end_date=None, 758 | ccy: str = 'USD', 759 | factor: float = 1e6, 760 | ) -> pd.DataFrame: 761 | """ 762 | Currency adjusted turnover (in million) 763 | 764 | Args: 765 | tickers: ticker or list of tickers 766 | flds: override `flds`, 767 | start_date: start date, default 1 month prior to `end_date` 768 | end_date: end date, default T - 1 769 | ccy: currency - 'USD' (default), any currency, or 'local' (no adjustment) 770 | factor: adjustment factor, default 1e6 - return values in millions 771 | 772 | Returns: 773 | pd.DataFrame 774 | """ 775 | if end_date is None: 776 | end_date = pd.bdate_range(end='today', periods=2)[0] 777 | if start_date is None: 778 | start_date = pd.bdate_range(end=end_date, periods=2, freq='M')[0] 779 | if isinstance(tickers, str): tickers = [tickers] 780 | 781 | data = bdh(tickers=tickers, flds=flds, start_date=start_date, end_date=end_date) 782 | cols = data.columns.get_level_values(level=0).unique() 783 | 784 | # If turnover is not available, use volume and vwap for calculation 785 | use_volume = pd.DataFrame() 786 | if isinstance(flds, str) and (flds.lower() == 'turnover'): 787 | vol_tcks = [t for t in tickers if t not in cols] 788 | if vol_tcks: 789 | use_volume = turnover( 790 | tickers=vol_tcks, 791 | flds=['eqy_weighted_avg_px', 'volume'], 792 | start_date=start_date, 793 | end_date=end_date, 794 | ccy=ccy, 795 | factor=factor, 796 | ) 797 | 798 | if data.empty and use_volume.empty: return pd.DataFrame() 799 | return pd.concat([adjust_ccy(data=data, ccy=ccy).div(factor), use_volume], axis=1) 800 | -------------------------------------------------------------------------------- /xbbg/const.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from collections import namedtuple 4 | from xbbg.core import timezone 5 | from xbbg.io import files, logs, param 6 | 7 | Futures = dict( 8 | Jan='F', Feb='G', Mar='H', Apr='J', May='K', Jun='M', 9 | Jul='N', Aug='Q', Sep='U', Oct='V', Nov='X', Dec='Z', 10 | ) 11 | CurrencyPair = namedtuple('CurrencyPair', ['ticker', 'factor', 'power']) 12 | ValidSessions = ['allday', 'day', 'am', 'pm', 'night', 'pre', 'post'] 13 | 14 | PKG_PATH = files.abspath(__file__, 0) 15 | 16 | ASSET_INFO = { 17 | 'Index': ['tickers'], 18 | 'Comdty': ['tickers', 'key_month'], 19 | 'Curncy': ['tickers'], 20 | 'Equity': ['exch_codes'], 21 | } 22 | 23 | DVD_TPYES = { 24 | 'all': 'DVD_Hist_All', 25 | 'dvd': 'DVD_Hist', 26 | 'split': 'Eqy_DVD_Hist_Splits', 27 | 'gross': 'Eqy_DVD_Hist_Gross', 28 | 'adjust': 'Eqy_DVD_Adjust_Fact', 29 | 'adj_fund': 'Eqy_DVD_Adj_Fund', 30 | 'with_amt': 'DVD_Hist_All_with_Amt_Status', 31 | 'dvd_amt': 'DVD_Hist_with_Amt_Status', 32 | 'gross_amt': 'DVD_Hist_Gross_with_Amt_Stat', 33 | 'projected': 'BDVD_Pr_Ex_Dts_DVD_Amts_w_Ann', 34 | } 35 | 36 | DVD_COLS = { 37 | 'Declared Date': 'dec_date', 38 | 'Ex-Date': 'ex_date', 39 | 'Record Date': 'rec_date', 40 | 'Payable Date': 'pay_date', 41 | 'Dividend Amount': 'dvd_amt', 42 | 'Dividend Frequency': 'dvd_freq', 43 | 'Dividend Type': 'dvd_type', 44 | 'Amount Status': 'amt_status', 45 | 'Adjustment Date': 'adj_date', 46 | 'Adjustment Factor': 'adj_factor', 47 | 'Adjustment Factor Operator Type': 'adj_op', 48 | 'Adjustment Factor Flag': 'adj_flag', 49 | 'Amount Per Share': 'amt_ps', 50 | 'Projected/Confirmed': 'category', 51 | } 52 | 53 | LIVE_INFO = { 54 | # Common fields 55 | 'MKTDATA_EVENT_TYPE', 'MKTDATA_EVENT_SUBTYPE', 'IS_DELAYED_STREAM', 56 | # Last Price 57 | 'LAST_PRICE', 'RT_PX_CHG_PCT_1D', 'REALTIME_PERCENT_BID_ASK_SPREAD', 58 | 'EVT_TRADE_DATE_RT', 'TRADE_UPDATE_STAMP_RT', 59 | 'EQY_TURNOVER_REALTIME', 'VOLUME', 60 | # Bid 61 | 'BID', 'BID_UPDATE_STAMP_RT', 62 | # Ask 63 | 'ASK', 'ASK_UPDATE_STAMP_RT', 64 | # Common in bid / ask 65 | 'SPREAD_BA', 'MID', 66 | } 67 | 68 | LIVE_CHG = { 69 | 'RT_PX_CHG_PCT_1D', 'CHG_PCT_1M_RT', 'CHG_PCT_3M_RT', 70 | 'CHG_PCT_MTD_RT', 'CHG_PCT_QTD_RT', 'CHG_PCT_YTD_RT', 71 | 'REALTIME_2_DAY_CHANGE_PERCENT', 'REALTIME_5_DAY_CHANGE_PERCENT', 72 | 'REALTIME_15_SEC_PRICE_PCT_CHG', 'REALTIME_ONE_MIN_PRICE_PCT_CHG', 73 | # Equities only 74 | 'REALTIME_FIVE_MIN_PRICE_PCT_CHG', 'REALTIME_15_MIN_PRICE_PCT_CHG', 75 | 'REALTIME_ONE_HOUR_PRICE_PCT_CHG', 76 | } 77 | 78 | LIVE_VOL = { 79 | 'REALTIME_VOLUME_5_DAY_INTERVAL', 80 | # Real-time current volume as % change from N-day avg volume 81 | 'DELTA_AVAT_1_DAY_INTERVAL', 'DELTA_AVAT_5_DAY_INTERVAL', 82 | 'DELTA_AVAT_10_DAY_INTERVAL', 'DELTA_AVAT_20_DAY_INTERVAL', 83 | 'DELTA_AVAT_30_DAY_INTERVAL', 'DELTA_AVAT_100_DAY_INTERVAL', 84 | 'DELTA_AVAT_180_DAY_INTERVAL', 85 | # Real-time turnover as % change from N-day average turnover 86 | 'DELTA_ATAT_1_DAY_INTERVAL', 'DELTA_ATAT_5_DAY_INTERVAL', 87 | 'DELTA_ATAT_10_DAY_INTERVAL', 'DELTA_ATAT_20_DAY_INTERVAL', 88 | 'DELTA_ATAT_30_DAY_INTERVAL', 'DELTA_ATAT_100_DAY_INTERVAL', 89 | 'DELTA_ATAT_180_DAY_INTERVAL', 90 | } 91 | 92 | LIVE_RATIO = { 93 | 'PRICE_EARNINGS_RATIO_RT', 'PRICE_TO_BOOK_RATIO_RT', 94 | 'PRICE_TO_SALES_RATIO_RT', 'PRICE_CASH_FLOW_RT', 'PRICE_EBITDA_RT', 95 | } 96 | 97 | 98 | def exch_info(ticker: str, **kwargs) -> pd.Series: 99 | """ 100 | Exchange info for given ticker 101 | 102 | Args: 103 | ticker: ticker or exchange 104 | **kwargs: 105 | ref: reference ticker or exchange 106 | used as supplement if exchange info is not defined for `ticker` 107 | original: original ticker (for logging) 108 | config: info from exch.yml 109 | 110 | Returns: 111 | pd.Series 112 | 113 | Examples: 114 | >>> exch_info('SPY US Equity') 115 | tz America/New_York 116 | allday [04:00, 20:00] 117 | day [09:30, 16:00] 118 | post [16:01, 20:00] 119 | pre [04:00, 09:30] 120 | Name: EquityUS, dtype: object 121 | >>> exch_info('SPY US Equity', ref='EquityUS') 122 | tz America/New_York 123 | allday [04:00, 20:00] 124 | day [09:30, 16:00] 125 | post [16:01, 20:00] 126 | pre [04:00, 09:30] 127 | Name: EquityUS, dtype: object 128 | >>> exch_info('ES1 Index') 129 | tz America/New_York 130 | allday [18:00, 17:00] 131 | day [08:00, 17:00] 132 | Name: CME, dtype: object 133 | >>> exch_info('ESM0 Index', ref='ES1 Index') 134 | tz America/New_York 135 | allday [18:00, 17:00] 136 | day [08:00, 17:00] 137 | Name: CME, dtype: object 138 | >>> exch_info('Z 1 Index') 139 | tz Europe/London 140 | allday [01:00, 21:00] 141 | day [01:00, 21:00] 142 | Name: FuturesFinancialsICE, dtype: object 143 | >>> exch_info('TESTTICKER Corp') 144 | Series([], dtype: object) 145 | >>> exch_info('US') 146 | tz America/New_York 147 | allday [04:00, 20:00] 148 | day [09:30, 16:00] 149 | post [16:01, 20:00] 150 | pre [04:00, 09:30] 151 | Name: EquityUS, dtype: object 152 | >>> exch_info('UXF1UXG1 Index') 153 | tz America/New_York 154 | allday [18:00, 17:00] 155 | day [18:00, 17:00] 156 | Name: FuturesCBOE, dtype: object 157 | >>> exch_info('TESTTICKER Index', original='TESTTICKER Index') 158 | Series([], dtype: object) 159 | >>> exch_info('TESTTCK Index') 160 | Series([], dtype: object) 161 | """ 162 | logger = logs.get_logger(exch_info, level='debug') 163 | 164 | if kwargs.get('ref', ''): 165 | return exch_info(ticker=kwargs['ref'], **{k: v for k, v in kwargs.items() if k != 'ref'}) 166 | 167 | exch = kwargs.get('config', param.load_config(cat='exch')) 168 | original = kwargs.get('original', '') 169 | 170 | # Case 1: Use exchange directly 171 | if ticker in exch.index: 172 | info = exch.loc[ticker].dropna() 173 | 174 | # Check required info 175 | if info.reindex(['allday', 'tz']).dropna().size < 2: 176 | logger.error( 177 | f'required info (allday + tz) cannot be found in ' 178 | f'{original if original else ticker} ...' 179 | ) 180 | return pd.Series(dtype=object) 181 | 182 | # Fill day session info if not provided 183 | if 'day' not in info: 184 | info['day'] = info['allday'] 185 | 186 | return info.dropna().apply(param.to_hours) 187 | 188 | if original: 189 | logger.error(f'exchange info cannot be found in {original} ...') 190 | return pd.Series(dtype=object) 191 | 192 | # Case 2: Use ticker to find exchange 193 | exch_name = market_info(ticker=ticker).get('exch', '') 194 | if not exch_name: return pd.Series(dtype=object) 195 | return exch_info( 196 | ticker=exch_name, 197 | original=ticker, 198 | config=exch, 199 | ) 200 | 201 | 202 | def market_info(ticker: str) -> pd.Series: 203 | """ 204 | Get info for given ticker 205 | 206 | Args: 207 | ticker: Bloomberg full ticker 208 | 209 | Returns: 210 | dict 211 | 212 | Examples: 213 | >>> market_info('SHCOMP Index').exch 214 | 'EquityChina' 215 | >>> market_info('SPY US Equity').exch 216 | 'EquityUS' 217 | >>> market_info('ICICIC=1 IS Equity').exch 218 | 'EquityFuturesIndia' 219 | >>> market_info('INT1 Curncy').exch 220 | 'CurrencyIndia' 221 | >>> market_info('CL1 Comdty').exch 222 | 'NYME' 223 | >>> incorrect_tickers = [ 224 | ... 'C XX Equity', 'XXX Comdty', 'Bond_ISIN Corp', 225 | ... 'XYZ Index', 'XYZ Curncy', 226 | ... ] 227 | >>> pd.concat([market_info(_) for _ in incorrect_tickers]) 228 | Series([], dtype: object) 229 | """ 230 | t_info = ticker.split() 231 | exch_only = len(ticker) == 2 232 | if (not exch_only) and (t_info[-1] not in ['Equity', 'Comdty', 'Curncy', 'Index']): 233 | return pd.Series(dtype=object) 234 | 235 | a_info = asset_config(asset='Equity' if exch_only else t_info[-1]) 236 | 237 | # =========================================== # 238 | # Equity / Equity Futures # 239 | # =========================================== # 240 | 241 | if (t_info[-1] == 'Equity') or exch_only: 242 | is_fut = '==' if '=' in ticker else '!=' 243 | exch_sym = ticker if exch_only else t_info[-2] 244 | return take_first( 245 | data=a_info, 246 | query=f'exch_codes == "{exch_sym}" and is_fut {is_fut} True', 247 | ) 248 | 249 | # ================================================ # 250 | # Currency / Commodity / Index # 251 | # ================================================ # 252 | 253 | if t_info[0] in a_info.tickers.values: 254 | symbol = t_info[0] 255 | elif t_info[0][-1].isdigit(): 256 | end_idx = 2 if t_info[-2].isdigit() else 1 257 | symbol = t_info[0][:-end_idx].strip() 258 | # Special contracts 259 | if (symbol[:2] == 'UX') and (t_info[-1] == 'Index'): 260 | symbol = 'UX' 261 | else: 262 | symbol = t_info[0].split('+')[0] 263 | return take_first(data=a_info, query=f'tickers == "{symbol}"') 264 | 265 | 266 | def take_first(data: pd.DataFrame, query: str) -> pd.Series: 267 | """ 268 | Query and take the 1st row of result 269 | 270 | Args: 271 | data: pd.DataFrame 272 | query: query string 273 | 274 | Returns: 275 | pd.Series 276 | """ 277 | if data.empty: return pd.Series(dtype=object) 278 | res = data.query(query) 279 | if res.empty: return pd.Series(dtype=object) 280 | return res.reset_index(drop=True).iloc[0] 281 | 282 | 283 | def asset_config(asset: str) -> pd.DataFrame: 284 | """ 285 | Load info for given asset 286 | 287 | Args: 288 | asset: asset name 289 | 290 | Returns: 291 | pd.DataFrame 292 | """ 293 | cfg_files = param.config_files('assets') 294 | cache_cfg = f'{PKG_PATH}/markets/cached/{asset}_cfg.pkl' 295 | last_mod = max(map(files.modified_time, cfg_files)) 296 | if files.exists(cache_cfg) and files.modified_time(cache_cfg) > last_mod: 297 | return pd.read_pickle(cache_cfg) 298 | 299 | config = ( 300 | pd.concat([ 301 | explode( 302 | data=pd.DataFrame(param.load_yaml(cf).get(asset, [])), 303 | columns=ASSET_INFO[asset], 304 | ) 305 | for cf in cfg_files 306 | ], sort=False) 307 | .drop_duplicates(keep='last') 308 | .reset_index(drop=True) 309 | ) 310 | files.create_folder(cache_cfg, is_file=True) 311 | config.to_pickle(cache_cfg) 312 | return config 313 | 314 | 315 | def explode(data: pd.DataFrame, columns: list) -> pd.DataFrame: 316 | """ 317 | Explode data by columns 318 | 319 | Args: 320 | data: pd.DataFrame 321 | columns: columns to explode 322 | 323 | Returns: 324 | pd.DataFrame 325 | """ 326 | if data.empty: return pd.DataFrame() 327 | if len(columns) == 1: 328 | return data.explode(column=columns[0]) 329 | return explode( 330 | data=data.explode(column=columns[-1]), 331 | columns=columns[:-1], 332 | ) 333 | 334 | 335 | def ccy_pair(local, base='USD') -> CurrencyPair: 336 | """ 337 | Currency pair info 338 | 339 | Args: 340 | local: local currency 341 | base: base currency 342 | 343 | Returns: 344 | CurrencyPair 345 | 346 | Examples: 347 | >>> ccy_pair(local='HKD', base='USD') 348 | CurrencyPair(ticker='HKD Curncy', factor=1.0, power=1.0) 349 | >>> ccy_pair(local='GBp') 350 | CurrencyPair(ticker='GBP Curncy', factor=100.0, power=-1.0) 351 | >>> ccy_pair(local='USD', base='GBp') 352 | CurrencyPair(ticker='GBP Curncy', factor=0.01, power=1.0) 353 | >>> ccy_pair(local='XYZ', base='USD') 354 | CurrencyPair(ticker='', factor=1.0, power=1.0) 355 | >>> ccy_pair(local='GBP', base='GBp') 356 | CurrencyPair(ticker='', factor=0.01, power=1.0) 357 | >>> ccy_pair(local='GBp', base='GBP') 358 | CurrencyPair(ticker='', factor=100.0, power=1.0) 359 | """ 360 | ccy_param = param.load_config(cat='ccy') 361 | if f'{local}{base}' in ccy_param.index: 362 | info = ccy_param.loc[f'{local}{base}'].dropna() 363 | 364 | elif f'{base}{local}' in ccy_param.index: 365 | info = ccy_param.loc[f'{base}{local}'].dropna() 366 | info['factor'] = 1. / info.get('factor', 1.) 367 | info['power'] = -info.get('power', 1.) 368 | 369 | elif base.lower() == local.lower(): 370 | info = dict(ticker='') 371 | info['factor'] = 1. 372 | if base[-1].lower() == base[-1]: 373 | info['factor'] /= 100. 374 | if local[-1].lower() == local[-1]: 375 | info['factor'] *= 100. 376 | 377 | else: 378 | logger = logs.get_logger(ccy_pair, level='debug') 379 | logger.error(f'incorrect currency - local {local} / base {base}') 380 | return CurrencyPair(ticker='', factor=1., power=1.0) 381 | 382 | if 'factor' not in info: info['factor'] = 1. 383 | if 'power' not in info: info['power'] = 1. 384 | return CurrencyPair(**info) 385 | 386 | 387 | def market_timing(ticker, dt, timing='EOD', tz='local', **kwargs) -> str: 388 | """ 389 | Market close time for ticker 390 | 391 | Args: 392 | ticker: ticker name 393 | dt: date 394 | timing: [EOD (default), BOD] 395 | tz: conversion to timezone 396 | 397 | Returns: 398 | str: date & time 399 | 400 | Examples: 401 | >>> market_timing('7267 JT Equity', dt='2018-09-10') 402 | '2018-09-10 14:58' 403 | >>> market_timing('7267 JT Equity', dt='2018-09-10', tz=timezone.TimeZone.NY) 404 | '2018-09-10 01:58:00-04:00' 405 | >>> market_timing('7267 JT Equity', dt='2018-01-10', tz='NY') 406 | '2018-01-10 00:58:00-05:00' 407 | >>> market_timing('7267 JT Equity', dt='2018-09-10', tz='SPX Index') 408 | '2018-09-10 01:58:00-04:00' 409 | >>> market_timing('8035 JT Equity', dt='2018-09-10', timing='BOD') 410 | '2018-09-10 09:01' 411 | >>> market_timing('Z 1 Index', dt='2018-09-10', timing='FINISHED') 412 | '2018-09-10 21:00' 413 | >>> market_timing('TESTTICKER Corp', dt='2018-09-10') 414 | '' 415 | """ 416 | logger = logs.get_logger(market_timing, level='debug') 417 | exch = pd.Series(exch_info(ticker=ticker, **kwargs)) 418 | if any(req not in exch.index for req in ['tz', 'allday', 'day']): 419 | logger.error(f'required exchange info cannot be found in {ticker} ...') 420 | return '' 421 | 422 | mkt_time = { 423 | 'BOD': exch.day[0], 'FINISHED': exch.allday[-1] 424 | }.get(timing, exch.day[-1]) 425 | 426 | cur_dt = pd.Timestamp(str(dt)).strftime('%Y-%m-%d') 427 | if tz == 'local': return f'{cur_dt} {mkt_time}' 428 | 429 | return timezone.tz_convert(f'{cur_dt} {mkt_time}', to_tz=tz, from_tz=exch.tz) 430 | -------------------------------------------------------------------------------- /xbbg/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/core/__init__.py -------------------------------------------------------------------------------- /xbbg/core/conn.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | 4 | try: 5 | ver = sys.version_info 6 | if f'{ver.major}.{ver.minor}' == '3.8': 7 | dll_path = os.environ.get('BBG_DLL', 'C:/blp/DAPI') 8 | if os.path.exists(dll_path): 9 | with os.add_dll_directory(dll_path): 10 | import blpapi 11 | else: 12 | raise ImportError( 13 | 'Please add BBG_DLL to your PATH variable' 14 | ) 15 | else: 16 | import blpapi 17 | except (ImportError, AttributeError): 18 | import pytest 19 | blpapi = pytest.importorskip('blpapi') 20 | 21 | from xbbg.io import logs 22 | 23 | _CON_SYM_ = '_xcon_' 24 | _PORT_ = 8194 25 | 26 | 27 | def connect(max_attempt=3, auto_restart=True, **kwargs) -> blpapi.session.Session: 28 | """ 29 | Use alternative method to connect to blpapi. If a session object is passed, arguments 30 | max_attempt and auto_restart will be ignored. 31 | 32 | referecing to blpapi example for full lists of available authentication methods: 33 | https://github.com/msitt/blpapi-python/blob/master/examples/ConnectionAndAuthExample.py 34 | """ 35 | if isinstance(kwargs.get('sess', None), blpapi.session.Session): 36 | return bbg_session(sess=kwargs['sess']) 37 | 38 | sess_opts = blpapi.SessionOptions() 39 | sess_opts.setNumStartAttempts(numStartAttempts=max_attempt) 40 | sess_opts.setAutoRestartOnDisconnection(autoRestart=auto_restart) 41 | 42 | if isinstance(kwargs.get('auth_method', None), str): 43 | auth_method = kwargs['auth_method'] 44 | auth = None 45 | 46 | if auth_method == 'user': 47 | user = blpapi.AuthUser.createWithLogonName() 48 | auth = blpapi.AuthOptions.createWithUser(user=user) 49 | elif auth_method == 'app': 50 | auth = blpapi.AuthOptions.createWithApp(appName=kwargs['app_name']) 51 | elif auth_method == 'userapp': 52 | user = blpapi.AuthUser.createWithLogonName() 53 | auth = blpapi.AuthOptions.createWithUserAndApp(user=user, appName=kwargs['app_name']) 54 | elif auth_method == 'dir': 55 | user = blpapi.AuthUser.createWithActiveDirectoryProperty(propertyName=kwargs['dir_property']) 56 | auth = blpapi.AuthOptions.createWithUser(user=user) 57 | elif auth_method == 'manual': 58 | user = blpapi.AuthUser.createWithManualOptions(userId=kwargs['user_id'], ipAddress=kwargs['ip_address']) 59 | auth = blpapi.AuthOptions.createWithUserAndApp(user=user, appName=kwargs['app_name']) 60 | else: 61 | raise ValueError( 62 | 'Received invalid value for auth_method. ' 63 | 'auth_method must be one of followings: user, app, userapp, dir, manual' 64 | ) 65 | 66 | sess_opts.setSessionIdentityOptions(authOptions=auth) 67 | 68 | if isinstance(kwargs.get('server_host', None), str): 69 | sess_opts.setServerHost(serverHost=kwargs['server_host']) 70 | 71 | if isinstance(kwargs.get('server_port', None), int): 72 | sess_opts.setServerPort(serverPort=kwargs['server_port']) 73 | 74 | if isinstance(kwargs.get('tls_options', None), blpapi.sessionoptions.TlsOptions): 75 | sess_opts.setTlsOptions(tlsOptions=kwargs['tls_options']) 76 | 77 | return bbg_session(sess=blpapi.Session(sess_opts)) 78 | 79 | 80 | def connect_bbg(**kwargs) -> blpapi.session.Session: 81 | """ 82 | Create Bloomberg session and make connection 83 | """ 84 | logger = logs.get_logger(connect_bbg, **kwargs) 85 | 86 | if isinstance(kwargs.get('sess', None), blpapi.session.Session): 87 | session = kwargs['sess'] 88 | logger.debug(f'Using Bloomberg session {session} ...') 89 | else: 90 | sess_opts = blpapi.SessionOptions() 91 | sess_opts.setServerHost('localhost') 92 | sess_opts.setServerPort(kwargs.get('port', _PORT_)) 93 | session = blpapi.Session(sess_opts) 94 | 95 | logger.debug('Connecting to Bloomberg ...') 96 | if session.start(): return session 97 | else: raise ConnectionError('Cannot connect to Bloomberg') 98 | 99 | 100 | def bbg_session(**kwargs) -> blpapi.session.Session: 101 | """ 102 | Bloomberg session - initiate if not given 103 | 104 | Args: 105 | **kwargs: 106 | port: port number (default 8194) 107 | restart: whether to restart session 108 | 109 | Returns: 110 | Bloomberg session instance 111 | """ 112 | port = kwargs.get('port', _PORT_) 113 | con_sym = f'{_CON_SYM_}//{port}' 114 | 115 | if con_sym in globals(): 116 | if getattr(globals()[con_sym], '_Session__handle', None) is None: 117 | del globals()[con_sym] 118 | 119 | if con_sym not in globals(): 120 | globals()[con_sym] = connect_bbg(**kwargs) 121 | 122 | return globals()[con_sym] 123 | 124 | 125 | def bbg_service(service: str, **kwargs) -> blpapi.service.Service: 126 | """ 127 | Initiate service 128 | 129 | Args: 130 | service: service name 131 | **kwargs: 132 | port: port number 133 | 134 | Returns: 135 | Bloomberg service 136 | """ 137 | logger = logs.get_logger(bbg_service, **kwargs) 138 | 139 | port = kwargs.get('port', _PORT_) 140 | serv_sym = f'{_CON_SYM_}/{port}{service}' 141 | 142 | log_info = f'Initiating service {service} ...' 143 | if serv_sym in globals(): 144 | if getattr(globals()[serv_sym], '_Service__handle', None) is None: 145 | log_info = f'Restarting service {service} ...' 146 | del globals()[serv_sym] 147 | 148 | if serv_sym not in globals(): 149 | logger.debug(log_info) 150 | bbg_session(**kwargs).openService(service) 151 | globals()[serv_sym] = bbg_session(**kwargs).getService(service) 152 | 153 | return globals()[serv_sym] 154 | 155 | 156 | def event_types() -> dict: 157 | """ 158 | Bloomberg event types 159 | """ 160 | return { 161 | getattr(blpapi.Event, ev_typ): ev_typ 162 | for ev_typ in dir(blpapi.Event) if ev_typ.isupper() 163 | } 164 | 165 | 166 | def send_request(request: blpapi.request.Request, **kwargs): 167 | """ 168 | Send request to Bloomberg session 169 | 170 | Args: 171 | request: Bloomberg request 172 | """ 173 | logger = logs.get_logger(send_request, **kwargs) 174 | try: 175 | bbg_session(**kwargs).sendRequest(request=request) 176 | except blpapi.InvalidStateException as e: 177 | logger.exception(e) 178 | 179 | # Delete existing connection and send again 180 | port = kwargs.get('port', _PORT_) 181 | con_sym = f'{_CON_SYM_}//{port}' 182 | if con_sym in globals(): del globals()[con_sym] 183 | 184 | # No error handler for 2nd trial 185 | bbg_session(**kwargs).sendRequest(request=request) 186 | -------------------------------------------------------------------------------- /xbbg/core/intervals.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | 4 | from collections import namedtuple 5 | 6 | from xbbg import const 7 | from xbbg.io import logs, param 8 | 9 | Session = namedtuple('Session', ['start_time', 'end_time']) 10 | SessNA = Session(None, None) 11 | 12 | 13 | def get_interval(ticker, session, **kwargs) -> Session: 14 | """ 15 | Get interval from defined session 16 | 17 | Args: 18 | ticker: ticker 19 | session: session 20 | 21 | Returns: 22 | Session of start_time and end_time 23 | 24 | Examples: 25 | >>> get_interval('005490 KS Equity', 'day_open_30') 26 | Session(start_time='09:00', end_time='09:30') 27 | >>> get_interval('005490 KS Equity', 'day_normal_30_20') 28 | Session(start_time='09:31', end_time='15:00') 29 | >>> get_interval('005490 KS Equity', 'day_close_20') 30 | Session(start_time='15:01', end_time='15:20') 31 | >>> get_interval('700 HK Equity', 'am_open_30') 32 | Session(start_time='09:30', end_time='10:00') 33 | >>> get_interval('700 HK Equity', 'am_normal_30_30') 34 | Session(start_time='10:01', end_time='11:30') 35 | >>> get_interval('700 HK Equity', 'am_close_30') 36 | Session(start_time='11:31', end_time='12:00') 37 | >>> get_interval('ES1 Index', 'day_exact_2130_2230') 38 | Session(start_time=None, end_time=None) 39 | >>> get_interval('ES1 Index', 'allday_exact_2130_2230') 40 | Session(start_time='21:30', end_time='22:30') 41 | >>> get_interval('ES1 Index', 'allday_exact_2130_0230') 42 | Session(start_time='21:30', end_time='02:30') 43 | >>> get_interval('AMLP US', 'day_open_30') 44 | Session(start_time=None, end_time=None) 45 | >>> get_interval('7974 JP Equity', 'day_normal_180_300') is SessNA 46 | True 47 | >>> get_interval('Z 1 Index', 'allday_normal_30_30') 48 | Session(start_time='01:31', end_time='20:30') 49 | >>> get_interval('GBP Curncy', 'day') 50 | Session(start_time='17:02', end_time='17:00') 51 | """ 52 | if '_' not in session: 53 | session = f'{session}_normal_0_0' 54 | interval = Intervals(ticker=ticker, **kwargs) 55 | ss_info = session.split('_') 56 | return getattr(interval, f'market_{ss_info.pop(1)}')(*ss_info) 57 | 58 | 59 | def shift_time(start_time, mins) -> str: 60 | """ 61 | Shift start time by mins 62 | 63 | Args: 64 | start_time: start time in terms of HH:MM string 65 | mins: number of minutes (+ / -) 66 | 67 | Returns: 68 | end time in terms of HH:MM string 69 | """ 70 | s_time = pd.Timestamp(start_time) 71 | e_time = s_time + np.sign(mins) * pd.Timedelta(f'00:{abs(mins)}:00') 72 | return e_time.strftime('%H:%M') 73 | 74 | 75 | class Intervals(object): 76 | 77 | def __init__(self, ticker, **kwargs): 78 | """ 79 | Args: 80 | ticker: ticker 81 | """ 82 | self.ticker = ticker 83 | self.exch = const.exch_info(ticker=ticker, **kwargs) 84 | 85 | def market_open(self, session, mins) -> Session: 86 | """ 87 | Time intervals for market open 88 | 89 | Args: 90 | session: [allday, day, am, pm, night] 91 | mins: mintues after open 92 | 93 | Returns: 94 | Session of start_time and end_time 95 | """ 96 | if session not in self.exch: return SessNA 97 | start_time = self.exch[session][0] 98 | return Session(start_time, shift_time(start_time, int(mins))) 99 | 100 | def market_close(self, session, mins) -> Session: 101 | """ 102 | Time intervals for market close 103 | 104 | Args: 105 | session: [allday, day, am, pm, night] 106 | mins: mintues before close 107 | 108 | Returns: 109 | Session of start_time and end_time 110 | """ 111 | if session not in self.exch: return SessNA 112 | end_time = self.exch[session][-1] 113 | return Session(shift_time(end_time, -int(mins) + 1), end_time) 114 | 115 | def market_normal(self, session, after_open, before_close) -> Session: 116 | """ 117 | Time intervals between market 118 | 119 | Args: 120 | session: [allday, day, am, pm, night] 121 | after_open: mins after open 122 | before_close: mins before close 123 | 124 | Returns: 125 | Session of start_time and end_time 126 | """ 127 | logger = logs.get_logger(self.market_normal) 128 | 129 | if session not in self.exch: return SessNA 130 | ss = self.exch[session] 131 | 132 | s_time = shift_time(ss[0], int(after_open) + 1) 133 | e_time = shift_time(ss[-1], -int(before_close)) 134 | 135 | request_cross = pd.Timestamp(s_time) >= pd.Timestamp(e_time) 136 | session_cross = pd.Timestamp(ss[0]) >= pd.Timestamp(ss[1]) 137 | if request_cross and (not session_cross): 138 | logger.warning(f'end time {e_time} is earlier than {s_time} ...') 139 | return SessNA 140 | 141 | return Session(s_time, e_time) 142 | 143 | def market_exact(self, session, start_time: str, end_time: str) -> Session: 144 | """ 145 | Explicitly specify start time and end time 146 | 147 | Args: 148 | session: predefined session 149 | start_time: start time in terms of HHMM string 150 | end_time: end time in terms of HHMM string 151 | 152 | Returns: 153 | Session of start_time and end_time 154 | """ 155 | if session not in self.exch: return SessNA 156 | ss = self.exch[session] 157 | 158 | same_day = ss[0] < ss[-1] 159 | 160 | if not start_time: s_time = ss[0] 161 | else: 162 | s_time = param.to_hours(int(start_time)) 163 | if same_day: s_time = max(s_time, ss[0]) 164 | 165 | if not end_time: e_time = ss[-1] 166 | else: 167 | e_time = param.to_hours(int(end_time)) 168 | if same_day: e_time = min(e_time, ss[-1]) 169 | 170 | if same_day and (s_time > e_time): return SessNA 171 | return Session(start_time=s_time, end_time=e_time) 172 | -------------------------------------------------------------------------------- /xbbg/core/overrides.py: -------------------------------------------------------------------------------- 1 | # Set os.environ['BBG_ROOT'] = '/your/bbg/data/path' 2 | # to enable xbbg saving data locally 3 | BBG_ROOT = 'BBG_ROOT' 4 | 5 | PRSV_COLS = [ 6 | 'raw', 'has_date', 'cache', 'cache_days', 'col_maps', 7 | 'keep_one', 'price_only', 'port', 'log', 'timeout', 'sess', 8 | ] 9 | 10 | ELEMENTS = [ 11 | 'periodicityAdjustment', 'periodicitySelection', 'currency', 12 | 'nonTradingDayFillOption', 'nonTradingDayFillMethod', 13 | 'maxDataPoints', 'returnEIDs', 'returnRelativeDate', 14 | 'overrideOption', 'pricingOption', 15 | 'adjustmentNormal', 'adjustmentAbnormal', 'adjustmentSplit', 16 | 'adjustmentFollowDPDF', 'calendarCodeOverride', 17 | ] 18 | 19 | ELEM_KEYS = dict( 20 | PeriodAdj='periodicityAdjustment', PerAdj='periodicityAdjustment', 21 | Period='periodicitySelection', Per='periodicitySelection', 22 | Currency='currency', Curr='currency', FX='currency', 23 | Days='nonTradingDayFillOption', Fill='nonTradingDayFillMethod', Points='maxDataPoints', 24 | # 'returnEIDs', 'returnRelativeDate', 25 | Quote='overrideOption', QuoteType='pricingOption', QtTyp='pricingOption', 26 | CshAdjNormal='adjustmentNormal', CshAdjAbnormal='adjustmentAbnormal', 27 | CapChg='adjustmentSplit', UseDPDF='adjustmentFollowDPDF', 28 | Calendar='calendarCodeOverride', 29 | ) 30 | 31 | ELEM_VALS = dict( 32 | periodicityAdjustment=dict( 33 | A='ACTUAL', C='CALENDAR', F='FISCAL', 34 | ), 35 | periodicitySelection=dict( 36 | D='DAILY', W='WEEKLY', M='MONTHLY', Q='QUARTERLY', S='SEMI_ANNUALLY', Y='YEARLY' 37 | ), 38 | nonTradingDayFillOption=dict( 39 | N='NON_TRADING_WEEKDAYS', W='NON_TRADING_WEEKDAYS', Weekdays='NON_TRADING_WEEKDAYS', 40 | C='ALL_CALENDAR_DAYS', A='ALL_CALENDAR_DAYS', All='ALL_CALENDAR_DAYS', 41 | T='ACTIVE_DAYS_ONLY', Trading='ACTIVE_DAYS_ONLY', 42 | ), 43 | nonTradingDayFillMethod=dict( 44 | C='PREVIOUS_VALUE', P='PREVIOUS_VALUE', Previous='PREVIOUS_VALUE', 45 | B='NIL_VALUE', Blank='NIL_VALUE', NA='NIL_VALUE', 46 | ), 47 | overrideOption=dict( 48 | A='OVERRIDE_OPTION_GPA', G='OVERRIDE_OPTION_GPA', Average='OVERRIDE_OPTION_GPA', 49 | C='OVERRIDE_OPTION_CLOSE', Close='OVERRIDE_OPTION_CLOSE', 50 | ), 51 | pricingOption=dict( 52 | P='PRICING_OPTION_PRICE', Price='PRICING_OPTION_PRICE', 53 | Y='PRICING_OPTION_YIELD', Yield='PRICING_OPTION_YIELD', 54 | ), 55 | ) 56 | 57 | 58 | def proc_ovrds(**kwargs): 59 | """ 60 | Bloomberg overrides 61 | 62 | Args: 63 | **kwargs: overrides 64 | 65 | Returns: 66 | list of tuples 67 | 68 | Examples: 69 | >>> list(proc_ovrds(DVD_Start_Dt='20180101')) 70 | [('DVD_Start_Dt', '20180101')] 71 | >>> list(proc_ovrds(DVD_Start_Dt='20180101', cache=True, has_date=True)) 72 | [('DVD_Start_Dt', '20180101')] 73 | """ 74 | excluded = list(ELEM_KEYS.keys()) + list(ELEM_KEYS.values()) + PRSV_COLS 75 | for k, v in kwargs.items(): 76 | if k not in excluded: 77 | yield k, v 78 | 79 | 80 | def proc_elms(**kwargs) -> list: 81 | """ 82 | Bloomberg overrides for elements 83 | 84 | Args: 85 | **kwargs: overrides 86 | 87 | Returns: 88 | list of tuples 89 | 90 | Examples: 91 | >>> list(proc_elms(PerAdj='A', Per='W')) 92 | [('periodicityAdjustment', 'ACTUAL'), ('periodicitySelection', 'WEEKLY')] 93 | >>> list(proc_elms(Days='A', Fill='B')) 94 | [('nonTradingDayFillOption', 'ALL_CALENDAR_DAYS'), ('nonTradingDayFillMethod', 'NIL_VALUE')] 95 | >>> list(proc_elms(CshAdjNormal=False, CshAdjAbnormal=True)) 96 | [('adjustmentNormal', False), ('adjustmentAbnormal', True)] 97 | >>> list(proc_elms(Per='W', Quote='Average', start_date='2018-01-10')) 98 | [('periodicitySelection', 'WEEKLY'), ('overrideOption', 'OVERRIDE_OPTION_GPA')] 99 | >>> list(proc_elms(QuoteType='Y')) 100 | [('pricingOption', 'PRICING_OPTION_YIELD')] 101 | >>> list(proc_elms(QuoteType='Y', cache=True)) 102 | [('pricingOption', 'PRICING_OPTION_YIELD')] 103 | """ 104 | included = list(ELEM_KEYS.keys()) + list(ELEM_KEYS.values()) 105 | for k, v in kwargs.items(): 106 | if (k in included) and (k not in PRSV_COLS): 107 | yield ELEM_KEYS.get(k, k), \ 108 | ELEM_VALS.get(ELEM_KEYS.get(k, k), dict()).get(v, v) 109 | 110 | 111 | def info_qry(tickers, flds) -> str: 112 | """ 113 | Logging info for given tickers and fields 114 | 115 | Args: 116 | tickers: tickers 117 | flds: fields 118 | 119 | Returns: 120 | str 121 | 122 | Examples: 123 | >>> print(info_qry( 124 | ... tickers=['NVDA US Equity'], flds=['Name', 'Security_Name'] 125 | ... )) 126 | tickers: ['NVDA US Equity'] 127 | fields: ['Name', 'Security_Name'] 128 | """ 129 | full_list = '\n'.join([f'tickers: {tickers[:8]}'] + [ 130 | f' {tickers[n:(n + 8)]}' for n in range(8, len(tickers), 8) 131 | ]) 132 | return f'{full_list}\nfields: {flds}' 133 | -------------------------------------------------------------------------------- /xbbg/core/process.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | 4 | import pytest 5 | try: import blpapi 6 | except ImportError: blpapi = pytest.importorskip('blpapi') 7 | 8 | from itertools import starmap 9 | from collections import OrderedDict 10 | 11 | from xbbg import const 12 | from xbbg.core.timezone import DEFAULT_TZ 13 | from xbbg.core import intervals, overrides, conn 14 | 15 | RESPONSE_ERROR = blpapi.Name("responseError") 16 | SESSION_TERMINATED = blpapi.Name("SessionTerminated") 17 | CATEGORY = blpapi.Name("category") 18 | MESSAGE = blpapi.Name("message") 19 | BAR_DATA = blpapi.Name('barData') 20 | BAR_TICK = blpapi.Name('barTickData') 21 | TICK_DATA = blpapi.Name('tickData') 22 | 23 | 24 | def create_request( 25 | service: str, 26 | request: str, 27 | settings: list = None, 28 | ovrds: list = None, 29 | append: dict = None, 30 | **kwargs, 31 | ) -> blpapi.request.Request: 32 | """ 33 | Create request for query 34 | 35 | Args: 36 | service: service name 37 | request: request name 38 | settings: list of settings 39 | ovrds: list of overrides 40 | append: info to be appended to request directly 41 | kwargs: other overrides 42 | 43 | Returns: 44 | Bloomberg request 45 | """ 46 | srv = conn.bbg_service(service=service, **kwargs) 47 | req = srv.createRequest(request) 48 | 49 | list(starmap(req.set, settings if settings else [])) 50 | if ovrds: 51 | ovrd = req.getElement('overrides') 52 | for fld, val in ovrds: 53 | item = ovrd.appendElement() 54 | item.setElement('fieldId', fld) 55 | item.setElement('value', val) 56 | if append: 57 | for key, val in append.items(): 58 | vals = [val] if isinstance(val, str) else val 59 | for v in vals: req.append(key, v) 60 | 61 | return req 62 | 63 | 64 | def init_request(request: blpapi.request.Request, tickers, flds, **kwargs): 65 | """ 66 | Initiate Bloomberg request instance 67 | 68 | Args: 69 | request: Bloomberg request to initiate and append 70 | tickers: tickers 71 | flds: fields 72 | **kwargs: overrides and 73 | """ 74 | while conn.bbg_session(**kwargs).tryNextEvent(): pass 75 | 76 | if isinstance(tickers, str): tickers = [tickers] 77 | for ticker in tickers: request.append('securities', ticker) 78 | 79 | if isinstance(flds, str): flds = [flds] 80 | for fld in flds: request.append('fields', fld) 81 | 82 | adjust = kwargs.pop('adjust', None) 83 | if isinstance(adjust, str) and adjust: 84 | if adjust == 'all': 85 | kwargs['CshAdjNormal'] = True 86 | kwargs['CshAdjAbnormal'] = True 87 | kwargs['CapChg'] = True 88 | else: 89 | kwargs['CshAdjNormal'] = 'normal' in adjust or 'dvd' in adjust 90 | kwargs['CshAdjAbnormal'] = 'abn' in adjust or 'dvd' in adjust 91 | kwargs['CapChg'] = 'split' in adjust 92 | 93 | if 'start_date' in kwargs: request.set('startDate', kwargs.pop('start_date')) 94 | if 'end_date' in kwargs: request.set('endDate', kwargs.pop('end_date')) 95 | 96 | for elem_name, elem_val in overrides.proc_elms(**kwargs): 97 | request.set(elem_name, elem_val) 98 | 99 | ovrds = request.getElement('overrides') 100 | for ovrd_fld, ovrd_val in overrides.proc_ovrds(**kwargs): 101 | ovrd = ovrds.appendElement() 102 | ovrd.setElement('fieldId', ovrd_fld) 103 | ovrd.setElement('value', ovrd_val) 104 | 105 | 106 | def time_range(dt, ticker, session='allday', tz='UTC', **kwargs) -> intervals.Session: 107 | """ 108 | Time range in UTC (for intraday bar) or other timezone 109 | 110 | Args: 111 | dt: date 112 | ticker: ticker 113 | session: market session defined in xbbg/markets/exch.yml 114 | tz: timezone 115 | 116 | Returns: 117 | intervals.Session 118 | """ 119 | ss = intervals.get_interval(ticker=ticker, session=session, **kwargs) 120 | ex_info = const.exch_info(ticker=ticker, **kwargs) 121 | cur_dt = pd.Timestamp(dt).strftime('%Y-%m-%d') 122 | time_fmt = '%Y-%m-%dT%H:%M:%S' 123 | time_idx = ( 124 | pd.DatetimeIndex([ 125 | f'{cur_dt} {ss.start_time}', 126 | f'{cur_dt} {ss.end_time}'], 127 | ) 128 | .tz_localize(ex_info.tz) 129 | .tz_convert(DEFAULT_TZ) 130 | .tz_convert(tz) 131 | ) 132 | if time_idx[0] > time_idx[1]: time_idx -= pd.TimedeltaIndex(['1D', '0D']) 133 | return intervals.Session(time_idx[0].strftime(time_fmt), time_idx[1].strftime(time_fmt)) 134 | 135 | 136 | def rec_events(func, **kwargs): 137 | """ 138 | Receive events received from Bloomberg 139 | 140 | Args: 141 | func: must be generator function 142 | **kwargs: arguments for input function 143 | 144 | Yields: 145 | Elements of Bloomberg responses 146 | """ 147 | timeout_counts = 0 148 | responses = [blpapi.Event.PARTIAL_RESPONSE, blpapi.Event.RESPONSE] 149 | timeout = kwargs.pop('timeout', 500) 150 | while True: 151 | ev = conn.bbg_session(**kwargs).nextEvent(timeout=timeout) 152 | if ev.eventType() in responses: 153 | for msg in ev: 154 | for r in func(msg=msg, **kwargs): 155 | yield r 156 | if ev.eventType() == blpapi.Event.RESPONSE: 157 | break 158 | elif ev.eventType() == blpapi.Event.TIMEOUT: 159 | timeout_counts += 1 160 | if timeout_counts > 20: 161 | break 162 | else: 163 | for _ in ev: 164 | if getattr(ev, 'messageType', lambda: None)() \ 165 | == SESSION_TERMINATED: break 166 | 167 | 168 | def process_ref(msg: blpapi.message.Message, **kwargs) -> dict: 169 | """ 170 | Process reference messages from Bloomberg 171 | 172 | Args: 173 | msg: Bloomberg reference data messages from events 174 | 175 | Returns: 176 | dict 177 | """ 178 | kwargs.pop('(@_<)', None) 179 | data = None 180 | if msg.hasElement('securityData'): 181 | data = msg.getElement('securityData') 182 | elif msg.hasElement('data') and \ 183 | msg.getElement('data').hasElement('securityData'): 184 | data = msg.getElement('data').getElement('securityData') 185 | if not data: return iter([]) 186 | 187 | for sec in data.values(): 188 | ticker = sec.getElement('security').getValue() 189 | for fld in sec.getElement('fieldData').elements(): 190 | info = [('ticker', ticker), ('field', str(fld.name()))] 191 | if fld.isArray(): 192 | for item in fld.values(): 193 | yield OrderedDict(info + [ 194 | ( 195 | str(elem.name()), 196 | None if elem.isNull() else elem.getValue() 197 | ) 198 | for elem in item.elements() 199 | ]) 200 | else: 201 | yield OrderedDict(info + [ 202 | ('value', None if fld.isNull() else fld.getValue()), 203 | ]) 204 | 205 | 206 | def process_hist(msg: blpapi.message.Message, **kwargs) -> dict: 207 | """ 208 | Process historical data messages from Bloomberg 209 | 210 | Args: 211 | msg: Bloomberg historical data messages from events 212 | 213 | Returns: 214 | dict 215 | """ 216 | kwargs.pop('(>_<)', None) 217 | if not msg.hasElement('securityData'): return {} 218 | ticker = msg.getElement('securityData').getElement('security').getValue() 219 | for val in msg.getElement('securityData').getElement('fieldData').values(): 220 | if val.hasElement('date'): 221 | yield OrderedDict([('ticker', ticker)] + [ 222 | (str(elem.name()), elem.getValue()) for elem in val.elements() 223 | ]) 224 | 225 | 226 | def process_bar(msg: blpapi.message.Message, typ='bar', **kwargs) -> OrderedDict: 227 | """ 228 | Process Bloomberg intraday bar messages 229 | 230 | Args: 231 | msg: Bloomberg intraday bar messages from events 232 | typ: `bar` or `tick` 233 | 234 | Yields: 235 | OrderedDict 236 | """ 237 | kwargs.pop('(#_#)', None) 238 | check_error(msg=msg) 239 | if typ[0].lower() == 't': 240 | lvls = [TICK_DATA, TICK_DATA] 241 | else: 242 | lvls = [BAR_DATA, BAR_TICK] 243 | 244 | if msg.hasElement(lvls[0]): 245 | for bar in msg.getElement(lvls[0]).getElement(lvls[1]).values(): 246 | yield OrderedDict([ 247 | (str(elem.name()), elem.getValue()) 248 | for elem in bar.elements() 249 | ]) 250 | 251 | 252 | def check_error(msg): 253 | """ 254 | Check error in message 255 | """ 256 | if msg.hasElement(RESPONSE_ERROR): 257 | error = msg.getElement(RESPONSE_ERROR) 258 | raise ValueError( 259 | f'[Intraday Bar Error] ' 260 | f'{error.getElementAsString(CATEGORY)}: ' 261 | f'{error.getElementAsString(MESSAGE)}' 262 | ) 263 | 264 | 265 | def elem_value(element: conn.blpapi.Element): 266 | """ 267 | Get value from element 268 | 269 | Args: 270 | element: Bloomberg element 271 | 272 | Returns: 273 | value 274 | """ 275 | if element.isNull(): return None 276 | try: value = element.getValue() 277 | except ValueError: return None 278 | if isinstance(value, np.bool_): return bool(value) 279 | if isinstance(value, conn.blpapi.name.Name): return str(value) 280 | return value 281 | 282 | 283 | def earning_pct(data: pd.DataFrame, yr): 284 | """ 285 | Calculate % of earnings by year 286 | """ 287 | pct = f'{yr}_pct' 288 | data.loc[:, pct] = np.nan 289 | 290 | # Calculate level 1 percentage 291 | data.loc[data.level == 1, pct] = \ 292 | 100 * data.loc[data.level == 1, yr] / data.loc[data.level == 1, yr].sum() 293 | 294 | # Calculate level 2 percentage (higher levels will be ignored) 295 | sub_pct = [] 296 | for r, snap in data.reset_index()[::-1].iterrows(): 297 | if snap.level > 2: continue 298 | if snap.level == 1: 299 | if len(sub_pct) == 0: continue 300 | data.iloc[sub_pct, data.columns.get_loc(pct)] = \ 301 | 100 * data[yr].iloc[sub_pct] / data[yr].iloc[sub_pct].sum() 302 | sub_pct = [] 303 | if snap.level == 2: sub_pct.append(r) 304 | 305 | 306 | def check_current(dt, logger, **kwargs) -> bool: 307 | """ 308 | Check current time against T-1 309 | """ 310 | t_1 = pd.Timestamp('today').date() - pd.Timedelta('1D') 311 | whole_day = pd.Timestamp(dt).date() < t_1 312 | if (not whole_day) and kwargs.get('batch', False): 313 | logger.warning(f'Querying date {t_1} is too close, ignoring download ...') 314 | return False 315 | return True 316 | -------------------------------------------------------------------------------- /xbbg/core/timezone.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | import time 4 | import pytz 5 | 6 | from xbbg.io import logs 7 | 8 | DEFAULT_TZ = pytz.FixedOffset(-time.timezone / 60) 9 | 10 | 11 | def get_tz(tz) -> str: 12 | """ 13 | Convert tz from ticker / shorthands to timezone 14 | 15 | Args: 16 | tz: ticker or timezone shorthands 17 | 18 | Returns: 19 | str: Python timzone 20 | 21 | Examples: 22 | >>> get_tz('NY') 23 | 'America/New_York' 24 | >>> get_tz(TimeZone.NY) 25 | 'America/New_York' 26 | >>> get_tz('BHP AU Equity') 27 | 'Australia/Sydney' 28 | """ 29 | from xbbg.const import exch_info 30 | 31 | if tz is None: return DEFAULT_TZ 32 | 33 | to_tz = tz 34 | if isinstance(tz, str): 35 | if hasattr(TimeZone, tz): 36 | to_tz = getattr(TimeZone, tz) 37 | else: 38 | exch = exch_info(ticker=tz) 39 | if 'tz' in exch.index: 40 | to_tz = exch.tz 41 | 42 | return to_tz 43 | 44 | 45 | def tz_convert(dt, to_tz, from_tz=None) -> str: 46 | """ 47 | Convert to tz 48 | 49 | Args: 50 | dt: date time 51 | to_tz: to tz 52 | from_tz: from tz - will be ignored if tz from dt is given 53 | 54 | Returns: 55 | str: date & time 56 | 57 | Examples: 58 | >>> dt_1 = pd.Timestamp('2018-09-10 16:00', tz='Asia/Hong_Kong') 59 | >>> tz_convert(dt_1, to_tz='NY') 60 | '2018-09-10 04:00:00-04:00' 61 | >>> dt_2 = pd.Timestamp('2018-01-10 16:00') 62 | >>> tz_convert(dt_2, to_tz='HK', from_tz='NY') 63 | '2018-01-11 05:00:00+08:00' 64 | >>> dt_3 = '2018-09-10 15:00' 65 | >>> tz_convert(dt_3, to_tz='NY', from_tz='JP') 66 | '2018-09-10 02:00:00-04:00' 67 | """ 68 | logger = logs.get_logger(tz_convert, level='debug') 69 | f_tz, t_tz = get_tz(from_tz), get_tz(to_tz) 70 | 71 | from_dt = pd.Timestamp(str(dt), tz=f_tz) 72 | logger.debug(f'converting {str(from_dt)} from {f_tz} to {t_tz} ...') 73 | return str(pd.Timestamp(str(from_dt), tz=t_tz)) 74 | 75 | 76 | class TimeZone(dict): 77 | """ 78 | Python timezones 79 | """ 80 | __getattr__ = dict.__getitem__ 81 | 82 | NY = 'America/New_York' 83 | AU = 'Australia/Sydney' 84 | JP = 'Asia/Tokyo' 85 | SK = 'Asia/Seoul' 86 | HK = 'Asia/Hong_Kong' 87 | SH = 'Asia/Shanghai' 88 | TW = 'Asia/Taipei' 89 | SG = 'Asia/Singapore' 90 | IN = 'Asia/Calcutta' 91 | DB = 'Asia/Dubai' 92 | UK = 'Europe/London' 93 | 94 | 95 | ALL_TIMEZONES = [ 96 | 'Africa/Abidjan', 97 | 'Africa/Accra', 98 | 'Africa/Addis_Ababa', 99 | 'Africa/Algiers', 100 | 'Africa/Asmara', 101 | 'Africa/Asmera', 102 | 'Africa/Bamako', 103 | 'Africa/Bangui', 104 | 'Africa/Banjul', 105 | 'Africa/Bissau', 106 | 'Africa/Blantyre', 107 | 'Africa/Brazzaville', 108 | 'Africa/Bujumbura', 109 | 'Africa/Cairo', 110 | 'Africa/Casablanca', 111 | 'Africa/Ceuta', 112 | 'Africa/Conakry', 113 | 'Africa/Dakar', 114 | 'Africa/Dar_es_Salaam', 115 | 'Africa/Djibouti', 116 | 'Africa/Douala', 117 | 'Africa/El_Aaiun', 118 | 'Africa/Freetown', 119 | 'Africa/Gaborone', 120 | 'Africa/Harare', 121 | 'Africa/Johannesburg', 122 | 'Africa/Juba', 123 | 'Africa/Kampala', 124 | 'Africa/Khartoum', 125 | 'Africa/Kigali', 126 | 'Africa/Kinshasa', 127 | 'Africa/Lagos', 128 | 'Africa/Libreville', 129 | 'Africa/Lome', 130 | 'Africa/Luanda', 131 | 'Africa/Lubumbashi', 132 | 'Africa/Lusaka', 133 | 'Africa/Malabo', 134 | 'Africa/Maputo', 135 | 'Africa/Maseru', 136 | 'Africa/Mbabane', 137 | 'Africa/Mogadishu', 138 | 'Africa/Monrovia', 139 | 'Africa/Nairobi', 140 | 'Africa/Ndjamena', 141 | 'Africa/Niamey', 142 | 'Africa/Nouakchott', 143 | 'Africa/Ouagadougou', 144 | 'Africa/Porto-Novo', 145 | 'Africa/Sao_Tome', 146 | 'Africa/Timbuktu', 147 | 'Africa/Tripoli', 148 | 'Africa/Tunis', 149 | 'Africa/Windhoek', 150 | 'America/Adak', 151 | 'America/Anchorage', 152 | 'America/Anguilla', 153 | 'America/Antigua', 154 | 'America/Araguaina', 155 | 'America/Argentina/Buenos_Aires', 156 | 'America/Argentina/Catamarca', 157 | 'America/Argentina/ComodRivadavia', 158 | 'America/Argentina/Cordoba', 159 | 'America/Argentina/Jujuy', 160 | 'America/Argentina/La_Rioja', 161 | 'America/Argentina/Mendoza', 162 | 'America/Argentina/Rio_Gallegos', 163 | 'America/Argentina/Salta', 164 | 'America/Argentina/San_Juan', 165 | 'America/Argentina/San_Luis', 166 | 'America/Argentina/Tucuman', 167 | 'America/Argentina/Ushuaia', 168 | 'America/Aruba', 169 | 'America/Asuncion', 170 | 'America/Atikokan', 171 | 'America/Atka', 172 | 'America/Bahia', 173 | 'America/Bahia_Banderas', 174 | 'America/Barbados', 175 | 'America/Belem', 176 | 'America/Belize', 177 | 'America/Blanc-Sablon', 178 | 'America/Boa_Vista', 179 | 'America/Bogota', 180 | 'America/Boise', 181 | 'America/Buenos_Aires', 182 | 'America/Cambridge_Bay', 183 | 'America/Campo_Grande', 184 | 'America/Cancun', 185 | 'America/Caracas', 186 | 'America/Catamarca', 187 | 'America/Cayenne', 188 | 'America/Cayman', 189 | 'America/Chicago', 190 | 'America/Chihuahua', 191 | 'America/Coral_Harbour', 192 | 'America/Cordoba', 193 | 'America/Costa_Rica', 194 | 'America/Creston', 195 | 'America/Cuiaba', 196 | 'America/Curacao', 197 | 'America/Danmarkshavn', 198 | 'America/Dawson', 199 | 'America/Dawson_Creek', 200 | 'America/Denver', 201 | 'America/Detroit', 202 | 'America/Dominica', 203 | 'America/Edmonton', 204 | 'America/Eirunepe', 205 | 'America/El_Salvador', 206 | 'America/Ensenada', 207 | 'America/Fort_Nelson', 208 | 'America/Fort_Wayne', 209 | 'America/Fortaleza', 210 | 'America/Glace_Bay', 211 | 'America/Godthab', 212 | 'America/Goose_Bay', 213 | 'America/Grand_Turk', 214 | 'America/Grenada', 215 | 'America/Guadeloupe', 216 | 'America/Guatemala', 217 | 'America/Guayaquil', 218 | 'America/Guyana', 219 | 'America/Halifax', 220 | 'America/Havana', 221 | 'America/Hermosillo', 222 | 'America/Indiana/Indianapolis', 223 | 'America/Indiana/Knox', 224 | 'America/Indiana/Marengo', 225 | 'America/Indiana/Petersburg', 226 | 'America/Indiana/Tell_City', 227 | 'America/Indiana/Vevay', 228 | 'America/Indiana/Vincennes', 229 | 'America/Indiana/Winamac', 230 | 'America/Indianapolis', 231 | 'America/Inuvik', 232 | 'America/Iqaluit', 233 | 'America/Jamaica', 234 | 'America/Jujuy', 235 | 'America/Juneau', 236 | 'America/Kentucky/Louisville', 237 | 'America/Kentucky/Monticello', 238 | 'America/Knox_IN', 239 | 'America/Kralendijk', 240 | 'America/La_Paz', 241 | 'America/Lima', 242 | 'America/Los_Angeles', 243 | 'America/Louisville', 244 | 'America/Lower_Princes', 245 | 'America/Maceio', 246 | 'America/Managua', 247 | 'America/Manaus', 248 | 'America/Marigot', 249 | 'America/Martinique', 250 | 'America/Matamoros', 251 | 'America/Mazatlan', 252 | 'America/Mendoza', 253 | 'America/Menominee', 254 | 'America/Merida', 255 | 'America/Metlakatla', 256 | 'America/Mexico_City', 257 | 'America/Miquelon', 258 | 'America/Moncton', 259 | 'America/Monterrey', 260 | 'America/Montevideo', 261 | 'America/Montreal', 262 | 'America/Montserrat', 263 | 'America/Nassau', 264 | 'America/New_York', 265 | 'America/Nipigon', 266 | 'America/Nome', 267 | 'America/Noronha', 268 | 'America/North_Dakota/Beulah', 269 | 'America/North_Dakota/Center', 270 | 'America/North_Dakota/New_Salem', 271 | 'America/Ojinaga', 272 | 'America/Panama', 273 | 'America/Pangnirtung', 274 | 'America/Paramaribo', 275 | 'America/Phoenix', 276 | 'America/Port-au-Prince', 277 | 'America/Port_of_Spain', 278 | 'America/Porto_Acre', 279 | 'America/Porto_Velho', 280 | 'America/Puerto_Rico', 281 | 'America/Punta_Arenas', 282 | 'America/Rainy_River', 283 | 'America/Rankin_Inlet', 284 | 'America/Recife', 285 | 'America/Regina', 286 | 'America/Resolute', 287 | 'America/Rio_Branco', 288 | 'America/Rosario', 289 | 'America/Santa_Isabel', 290 | 'America/Santarem', 291 | 'America/Santiago', 292 | 'America/Santo_Domingo', 293 | 'America/Sao_Paulo', 294 | 'America/Scoresbysund', 295 | 'America/Shiprock', 296 | 'America/Sitka', 297 | 'America/St_Barthelemy', 298 | 'America/St_Johns', 299 | 'America/St_Kitts', 300 | 'America/St_Lucia', 301 | 'America/St_Thomas', 302 | 'America/St_Vincent', 303 | 'America/Swift_Current', 304 | 'America/Tegucigalpa', 305 | 'America/Thule', 306 | 'America/Thunder_Bay', 307 | 'America/Tijuana', 308 | 'America/Toronto', 309 | 'America/Tortola', 310 | 'America/Vancouver', 311 | 'America/Virgin', 312 | 'America/Whitehorse', 313 | 'America/Winnipeg', 314 | 'America/Yakutat', 315 | 'America/Yellowknife', 316 | 'Antarctica/Casey', 317 | 'Antarctica/Davis', 318 | 'Antarctica/DumontDUrville', 319 | 'Antarctica/Macquarie', 320 | 'Antarctica/Mawson', 321 | 'Antarctica/McMurdo', 322 | 'Antarctica/Palmer', 323 | 'Antarctica/Rothera', 324 | 'Antarctica/South_Pole', 325 | 'Antarctica/Syowa', 326 | 'Antarctica/Troll', 327 | 'Antarctica/Vostok', 328 | 'Arctic/Longyearbyen', 329 | 'Asia/Aden', 330 | 'Asia/Almaty', 331 | 'Asia/Amman', 332 | 'Asia/Anadyr', 333 | 'Asia/Aqtau', 334 | 'Asia/Aqtobe', 335 | 'Asia/Ashgabat', 336 | 'Asia/Ashkhabad', 337 | 'Asia/Atyrau', 338 | 'Asia/Baghdad', 339 | 'Asia/Bahrain', 340 | 'Asia/Baku', 341 | 'Asia/Bangkok', 342 | 'Asia/Barnaul', 343 | 'Asia/Beirut', 344 | 'Asia/Bishkek', 345 | 'Asia/Brunei', 346 | 'Asia/Calcutta', 347 | 'Asia/Chita', 348 | 'Asia/Choibalsan', 349 | 'Asia/Chongqing', 350 | 'Asia/Chungking', 351 | 'Asia/Colombo', 352 | 'Asia/Dacca', 353 | 'Asia/Damascus', 354 | 'Asia/Dhaka', 355 | 'Asia/Dili', 356 | 'Asia/Dubai', 357 | 'Asia/Dushanbe', 358 | 'Asia/Famagusta', 359 | 'Asia/Gaza', 360 | 'Asia/Harbin', 361 | 'Asia/Hebron', 362 | 'Asia/Ho_Chi_Minh', 363 | 'Asia/Hong_Kong', 364 | 'Asia/Hovd', 365 | 'Asia/Irkutsk', 366 | 'Asia/Istanbul', 367 | 'Asia/Jakarta', 368 | 'Asia/Jayapura', 369 | 'Asia/Jerusalem', 370 | 'Asia/Kabul', 371 | 'Asia/Kamchatka', 372 | 'Asia/Karachi', 373 | 'Asia/Kashgar', 374 | 'Asia/Kathmandu', 375 | 'Asia/Katmandu', 376 | 'Asia/Khandyga', 377 | 'Asia/Kolkata', 378 | 'Asia/Krasnoyarsk', 379 | 'Asia/Kuala_Lumpur', 380 | 'Asia/Kuching', 381 | 'Asia/Kuwait', 382 | 'Asia/Macao', 383 | 'Asia/Macau', 384 | 'Asia/Magadan', 385 | 'Asia/Makassar', 386 | 'Asia/Manila', 387 | 'Asia/Muscat', 388 | 'Asia/Nicosia', 389 | 'Asia/Novokuznetsk', 390 | 'Asia/Novosibirsk', 391 | 'Asia/Omsk', 392 | 'Asia/Oral', 393 | 'Asia/Phnom_Penh', 394 | 'Asia/Pontianak', 395 | 'Asia/Pyongyang', 396 | 'Asia/Qatar', 397 | 'Asia/Qyzylorda', 398 | 'Asia/Rangoon', 399 | 'Asia/Riyadh', 400 | 'Asia/Saigon', 401 | 'Asia/Sakhalin', 402 | 'Asia/Samarkand', 403 | 'Asia/Seoul', 404 | 'Asia/Shanghai', 405 | 'Asia/Singapore', 406 | 'Asia/Srednekolymsk', 407 | 'Asia/Taipei', 408 | 'Asia/Tashkent', 409 | 'Asia/Tbilisi', 410 | 'Asia/Tehran', 411 | 'Asia/Tel_Aviv', 412 | 'Asia/Thimbu', 413 | 'Asia/Thimphu', 414 | 'Asia/Tokyo', 415 | 'Asia/Tomsk', 416 | 'Asia/Ujung_Pandang', 417 | 'Asia/Ulaanbaatar', 418 | 'Asia/Ulan_Bator', 419 | 'Asia/Urumqi', 420 | 'Asia/Ust-Nera', 421 | 'Asia/Vientiane', 422 | 'Asia/Vladivostok', 423 | 'Asia/Yakutsk', 424 | 'Asia/Yangon', 425 | 'Asia/Yekaterinburg', 426 | 'Asia/Yerevan', 427 | 'Atlantic/Azores', 428 | 'Atlantic/Bermuda', 429 | 'Atlantic/Canary', 430 | 'Atlantic/Cape_Verde', 431 | 'Atlantic/Faeroe', 432 | 'Atlantic/Faroe', 433 | 'Atlantic/Jan_Mayen', 434 | 'Atlantic/Madeira', 435 | 'Atlantic/Reykjavik', 436 | 'Atlantic/South_Georgia', 437 | 'Atlantic/St_Helena', 438 | 'Atlantic/Stanley', 439 | 'Australia/ACT', 440 | 'Australia/Adelaide', 441 | 'Australia/Brisbane', 442 | 'Australia/Broken_Hill', 443 | 'Australia/Canberra', 444 | 'Australia/Currie', 445 | 'Australia/Darwin', 446 | 'Australia/Eucla', 447 | 'Australia/Hobart', 448 | 'Australia/LHI', 449 | 'Australia/Lindeman', 450 | 'Australia/Lord_Howe', 451 | 'Australia/Melbourne', 452 | 'Australia/NSW', 453 | 'Australia/North', 454 | 'Australia/Perth', 455 | 'Australia/Queensland', 456 | 'Australia/South', 457 | 'Australia/Sydney', 458 | 'Australia/Tasmania', 459 | 'Australia/Victoria', 460 | 'Australia/West', 461 | 'Australia/Yancowinna', 462 | 'Brazil/Acre', 463 | 'Brazil/DeNoronha', 464 | 'Brazil/East', 465 | 'Brazil/West', 466 | 'CET', 467 | 'CST6CDT', 468 | 'Canada/Atlantic', 469 | 'Canada/Central', 470 | 'Canada/Eastern', 471 | 'Canada/Mountain', 472 | 'Canada/Newfoundland', 473 | 'Canada/Pacific', 474 | 'Canada/Saskatchewan', 475 | 'Canada/Yukon', 476 | 'Chile/Continental', 477 | 'Chile/EasterIsland', 478 | 'Cuba', 479 | 'EET', 480 | 'EST', 481 | 'EST5EDT', 482 | 'Egypt', 483 | 'Eire', 484 | 'Etc/GMT', 485 | 'Etc/GMT+0', 486 | 'Etc/GMT+1', 487 | 'Etc/GMT+10', 488 | 'Etc/GMT+11', 489 | 'Etc/GMT+12', 490 | 'Etc/GMT+2', 491 | 'Etc/GMT+3', 492 | 'Etc/GMT+4', 493 | 'Etc/GMT+5', 494 | 'Etc/GMT+6', 495 | 'Etc/GMT+7', 496 | 'Etc/GMT+8', 497 | 'Etc/GMT+9', 498 | 'Etc/GMT-0', 499 | 'Etc/GMT-1', 500 | 'Etc/GMT-10', 501 | 'Etc/GMT-11', 502 | 'Etc/GMT-12', 503 | 'Etc/GMT-13', 504 | 'Etc/GMT-14', 505 | 'Etc/GMT-2', 506 | 'Etc/GMT-3', 507 | 'Etc/GMT-4', 508 | 'Etc/GMT-5', 509 | 'Etc/GMT-6', 510 | 'Etc/GMT-7', 511 | 'Etc/GMT-8', 512 | 'Etc/GMT-9', 513 | 'Etc/GMT0', 514 | 'Etc/Greenwich', 515 | 'Etc/UCT', 516 | 'Etc/UTC', 517 | 'Etc/Universal', 518 | 'Etc/Zulu', 519 | 'Europe/Amsterdam', 520 | 'Europe/Andorra', 521 | 'Europe/Astrakhan', 522 | 'Europe/Athens', 523 | 'Europe/Belfast', 524 | 'Europe/Belgrade', 525 | 'Europe/Berlin', 526 | 'Europe/Bratislava', 527 | 'Europe/Brussels', 528 | 'Europe/Bucharest', 529 | 'Europe/Budapest', 530 | 'Europe/Busingen', 531 | 'Europe/Chisinau', 532 | 'Europe/Copenhagen', 533 | 'Europe/Dublin', 534 | 'Europe/Gibraltar', 535 | 'Europe/Guernsey', 536 | 'Europe/Helsinki', 537 | 'Europe/Isle_of_Man', 538 | 'Europe/Istanbul', 539 | 'Europe/Jersey', 540 | 'Europe/Kaliningrad', 541 | 'Europe/Kiev', 542 | 'Europe/Kirov', 543 | 'Europe/Lisbon', 544 | 'Europe/Ljubljana', 545 | 'Europe/London', 546 | 'Europe/Luxembourg', 547 | 'Europe/Madrid', 548 | 'Europe/Malta', 549 | 'Europe/Mariehamn', 550 | 'Europe/Minsk', 551 | 'Europe/Monaco', 552 | 'Europe/Moscow', 553 | 'Europe/Nicosia', 554 | 'Europe/Oslo', 555 | 'Europe/Paris', 556 | 'Europe/Podgorica', 557 | 'Europe/Prague', 558 | 'Europe/Riga', 559 | 'Europe/Rome', 560 | 'Europe/Samara', 561 | 'Europe/San_Marino', 562 | 'Europe/Sarajevo', 563 | 'Europe/Saratov', 564 | 'Europe/Simferopol', 565 | 'Europe/Skopje', 566 | 'Europe/Sofia', 567 | 'Europe/Stockholm', 568 | 'Europe/Tallinn', 569 | 'Europe/Tirane', 570 | 'Europe/Tiraspol', 571 | 'Europe/Ulyanovsk', 572 | 'Europe/Uzhgorod', 573 | 'Europe/Vaduz', 574 | 'Europe/Vatican', 575 | 'Europe/Vienna', 576 | 'Europe/Vilnius', 577 | 'Europe/Volgograd', 578 | 'Europe/Warsaw', 579 | 'Europe/Zagreb', 580 | 'Europe/Zaporozhye', 581 | 'Europe/Zurich', 582 | 'GB', 583 | 'GB-Eire', 584 | 'GMT', 585 | 'GMT+0', 586 | 'GMT-0', 587 | 'GMT0', 588 | 'Greenwich', 589 | 'HST', 590 | 'Hongkong', 591 | 'Iceland', 592 | 'Indian/Antananarivo', 593 | 'Indian/Chagos', 594 | 'Indian/Christmas', 595 | 'Indian/Cocos', 596 | 'Indian/Comoro', 597 | 'Indian/Kerguelen', 598 | 'Indian/Mahe', 599 | 'Indian/Maldives', 600 | 'Indian/Mauritius', 601 | 'Indian/Mayotte', 602 | 'Indian/Reunion', 603 | 'Iran', 604 | 'Israel', 605 | 'Jamaica', 606 | 'Japan', 607 | 'Kwajalein', 608 | 'Libya', 609 | 'MET', 610 | 'MST', 611 | 'MST7MDT', 612 | 'Mexico/BajaNorte', 613 | 'Mexico/BajaSur', 614 | 'Mexico/General', 615 | 'NZ', 616 | 'NZ-CHAT', 617 | 'Navajo', 618 | 'PRC', 619 | 'PST8PDT', 620 | 'Pacific/Apia', 621 | 'Pacific/Auckland', 622 | 'Pacific/Bougainville', 623 | 'Pacific/Chatham', 624 | 'Pacific/Chuuk', 625 | 'Pacific/Easter', 626 | 'Pacific/Efate', 627 | 'Pacific/Enderbury', 628 | 'Pacific/Fakaofo', 629 | 'Pacific/Fiji', 630 | 'Pacific/Funafuti', 631 | 'Pacific/Galapagos', 632 | 'Pacific/Gambier', 633 | 'Pacific/Guadalcanal', 634 | 'Pacific/Guam', 635 | 'Pacific/Honolulu', 636 | 'Pacific/Johnston', 637 | 'Pacific/Kiritimati', 638 | 'Pacific/Kosrae', 639 | 'Pacific/Kwajalein', 640 | 'Pacific/Majuro', 641 | 'Pacific/Marquesas', 642 | 'Pacific/Midway', 643 | 'Pacific/Nauru', 644 | 'Pacific/Niue', 645 | 'Pacific/Norfolk', 646 | 'Pacific/Noumea', 647 | 'Pacific/Pago_Pago', 648 | 'Pacific/Palau', 649 | 'Pacific/Pitcairn', 650 | 'Pacific/Pohnpei', 651 | 'Pacific/Ponape', 652 | 'Pacific/Port_Moresby', 653 | 'Pacific/Rarotonga', 654 | 'Pacific/Saipan', 655 | 'Pacific/Samoa', 656 | 'Pacific/Tahiti', 657 | 'Pacific/Tarawa', 658 | 'Pacific/Tongatapu', 659 | 'Pacific/Truk', 660 | 'Pacific/Wake', 661 | 'Pacific/Wallis', 662 | 'Pacific/Yap', 663 | 'Poland', 664 | 'Portugal', 665 | 'ROC', 666 | 'ROK', 667 | 'Singapore', 668 | 'Turkey', 669 | 'UCT', 670 | 'US/Alaska', 671 | 'US/Aleutian', 672 | 'US/Arizona', 673 | 'US/Central', 674 | 'US/East-Indiana', 675 | 'US/Eastern', 676 | 'US/Hawaii', 677 | 'US/Indiana-Starke', 678 | 'US/Michigan', 679 | 'US/Mountain', 680 | 'US/Pacific', 681 | 'US/Samoa', 682 | 'UTC', 683 | 'Universal', 684 | 'W-SU', 685 | 'WET', 686 | 'Zulu', 687 | ] 688 | -------------------------------------------------------------------------------- /xbbg/core/trials.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from xbbg.io import files, db 4 | from xbbg.core import utils 5 | from xbbg.core.overrides import BBG_ROOT 6 | 7 | 8 | TRIALS_TABLE = """ 9 | CREATE TABLE IF NOT EXISTS trials ( 10 | func varchar(20), 11 | ticker varchar(30), 12 | dt varchar(10), 13 | typ varchar(20), 14 | cnt int, 15 | PRIMARY KEY (func, ticker, dt, typ) 16 | ) 17 | """ 18 | 19 | 20 | def root_path() -> str: 21 | """ 22 | Root data path of Bloomberg 23 | """ 24 | return os.environ.get(BBG_ROOT, '').replace('\\', '/') 25 | 26 | 27 | def convert_exisiting(): 28 | """ 29 | Update existing missing logs to database 30 | """ 31 | data_path = root_path() 32 | if not data_path: return 33 | 34 | with db.SQLite(f'{data_path}/Logs/xbbg.db') as con: 35 | con.execute(TRIALS_TABLE) 36 | for item in all_trials(): 37 | con.execute(db.replace_into(table='trials', **item)) 38 | 39 | 40 | def all_trials() -> dict: 41 | """ 42 | All missing logs 43 | 44 | Yields: 45 | dict 46 | """ 47 | data_path = root_path() 48 | if data_path: 49 | for sub1 in files.all_folders(f'{data_path}/Logs/bdib'): 50 | for sub2 in files.all_folders(sub1, has_date=True): 51 | for sub3 in files.all_folders(sub2): 52 | cnt = len(files.all_files(sub3, ext='log')) 53 | if cnt: 54 | yield dict( 55 | func='bdib', 56 | ticker=sub1.split('/')[-1], 57 | dt=sub2.split('/')[-1], 58 | typ=sub3.split('/')[-1], 59 | cnt=cnt, 60 | ) 61 | 62 | 63 | def trail_info(**kwargs) -> dict: 64 | """ 65 | Convert info to proper format for databse 66 | 67 | Returns: 68 | dict 69 | """ 70 | kwargs['func'] = kwargs.pop('func', 'unknown') 71 | if 'ticker' in kwargs: 72 | kwargs['ticker'] = kwargs['ticker'].replace('/', '_') 73 | for dt in ['dt', 'start_dt', 'end_dt', 'start_date', 'end_date']: 74 | if dt not in kwargs: continue 75 | kwargs[dt] = utils.fmt_dt(kwargs[dt]) 76 | return kwargs 77 | 78 | 79 | def missing_info(**kwargs) -> str: 80 | """ 81 | Full infomation for missing query 82 | """ 83 | func = kwargs.pop('func', 'unknown') 84 | if 'ticker' in kwargs: kwargs['ticker'] = kwargs['ticker'].replace('/', '_') 85 | for dt in ['dt', 'start_dt', 'end_dt', 'start_date', 'end_date']: 86 | if dt not in kwargs: continue 87 | kwargs[dt] = utils.fmt_dt(kwargs[dt]) 88 | info = utils.to_str(kwargs, fmt='{value}', sep='/')[1:-1] 89 | return f'{func}/{info}' 90 | 91 | 92 | def num_trials(**kwargs) -> int: 93 | """ 94 | Check number of trials for missing values 95 | 96 | Returns: 97 | int: number of trials already tried 98 | """ 99 | data_path = root_path() 100 | if not data_path: return 0 101 | 102 | db_file = f'{data_path}/Logs/xbbg.db' 103 | files.create_folder(db_file, is_file=True) 104 | with db.SQLite(db_file) as con: 105 | con.execute(TRIALS_TABLE) 106 | num = con.execute(db.select( 107 | table='trials', 108 | **trail_info(**kwargs), 109 | )).fetchall() 110 | if not num: return 0 111 | return num[0][-1] 112 | 113 | 114 | def update_trials(**kwargs): 115 | """ 116 | Update number of trials for missing values 117 | """ 118 | data_path = root_path() 119 | if not data_path: return 120 | 121 | if 'cnt' not in kwargs: 122 | kwargs['cnt'] = num_trials(**kwargs) + 1 123 | 124 | db_file = f'{data_path}/Logs/xbbg.db' 125 | files.create_folder(db_file, is_file=True) 126 | with db.SQLite(db_file) as con: 127 | con.execute(TRIALS_TABLE) 128 | con.execute(db.replace_into( 129 | table='trials', 130 | **trail_info(**kwargs), 131 | )) 132 | 133 | 134 | def current_missing(**kwargs) -> int: 135 | """ 136 | Check number of trials for missing values 137 | 138 | Returns: 139 | int: number of trials already tried 140 | """ 141 | data_path = root_path() 142 | if not data_path: return 0 143 | return len(files.all_files(f'{data_path}/Logs/{missing_info(**kwargs)}')) 144 | 145 | 146 | def update_missing(**kwargs): 147 | """ 148 | Update number of trials for missing values 149 | """ 150 | data_path = root_path() 151 | if not data_path: return 152 | if len(kwargs) == 0: return 153 | 154 | log_path = f'{data_path}/Logs/{missing_info(**kwargs)}' 155 | 156 | cnt = len(files.all_files(log_path)) + 1 157 | files.create_folder(log_path) 158 | open(f'{log_path}/{cnt}.log', 'a').close() 159 | -------------------------------------------------------------------------------- /xbbg/core/utils.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | import time 4 | import pytz 5 | import inspect 6 | import sys 7 | import datetime 8 | 9 | from typing import Union 10 | from pathlib import Path 11 | 12 | DEFAULT_TZ = pytz.FixedOffset(-time.timezone / 60) 13 | 14 | 15 | def flatten(iterable, maps=None, unique=False) -> list: 16 | """ 17 | Flatten any array of items to list 18 | 19 | Args: 20 | iterable: any array or value 21 | maps: map items to values 22 | unique: drop duplicates 23 | 24 | Returns: 25 | list: flattened list 26 | 27 | References: 28 | https://stackoverflow.com/a/40857703/1332656 29 | 30 | Examples: 31 | >>> flatten('abc') 32 | ['abc'] 33 | >>> flatten(1) 34 | [1] 35 | >>> flatten(1.) 36 | [1.0] 37 | >>> flatten(['ab', 'cd', ['xy', 'zz']]) 38 | ['ab', 'cd', 'xy', 'zz'] 39 | >>> flatten(['ab', ['xy', 'zz']], maps={'xy': '0x'}) 40 | ['ab', '0x', 'zz'] 41 | """ 42 | if iterable is None: return [] 43 | if maps is None: maps = dict() 44 | 45 | if isinstance(iterable, (str, int, float)): 46 | return [maps.get(iterable, iterable)] 47 | 48 | x = [maps.get(item, item) for item in _to_gen_(iterable)] 49 | return list(set(x)) if unique else x 50 | 51 | 52 | def _to_gen_(iterable): 53 | """ 54 | Recursively iterate lists and tuples 55 | """ 56 | from collections.abc import Iterable 57 | 58 | for elm in iterable: 59 | if isinstance(elm, Iterable) and not isinstance(elm, (str, bytes)): 60 | yield from _to_gen_(elm) 61 | else: yield elm 62 | 63 | 64 | def fmt_dt(dt, fmt='%Y-%m-%d') -> str: 65 | """ 66 | Format date string 67 | 68 | Args: 69 | dt: any date format 70 | fmt: output date format 71 | 72 | Returns: 73 | str: date format 74 | 75 | Examples: 76 | >>> fmt_dt(dt='2018-12') 77 | '2018-12-01' 78 | >>> fmt_dt(dt='2018-12-31', fmt='%Y%m%d') 79 | '20181231' 80 | """ 81 | return pd.Timestamp(dt).strftime(fmt) 82 | 83 | 84 | def cur_time(typ='date', tz=DEFAULT_TZ) -> Union[datetime.date, str]: 85 | """ 86 | Current time 87 | 88 | Args: 89 | typ: one of ['date', 'time', 'time_path', 'raw', ''] 90 | tz: timezone 91 | 92 | Returns: 93 | relevant current time or date 94 | 95 | Examples: 96 | >>> cur_dt = pd.Timestamp('now') 97 | >>> cur_time(typ='date') == cur_dt.strftime('%Y-%m-%d') 98 | True 99 | >>> cur_time(typ='time') == cur_dt.strftime('%Y-%m-%d %H:%M:%S') 100 | True 101 | >>> cur_time(typ='time_path') == cur_dt.strftime('%Y-%m-%d/%H-%M-%S') 102 | True 103 | >>> isinstance(cur_time(typ='raw', tz='Europe/London'), pd.Timestamp) 104 | True 105 | >>> cur_time(typ='') == cur_dt.date() 106 | True 107 | """ 108 | dt = pd.Timestamp('now', tz=tz) 109 | 110 | if typ == 'date': return dt.strftime('%Y-%m-%d') 111 | if typ == 'time': return dt.strftime('%Y-%m-%d %H:%M:%S') 112 | if typ == 'time_path': return dt.strftime('%Y-%m-%d/%H-%M-%S') 113 | if typ == 'raw': return dt 114 | 115 | return dt.date() 116 | 117 | 118 | class FString(object): 119 | 120 | def __init__(self, str_fmt): 121 | self.str_fmt = str_fmt 122 | 123 | def __str__(self): 124 | kwargs = inspect.currentframe().f_back.f_globals.copy() 125 | kwargs.update(inspect.currentframe().f_back.f_locals) 126 | return self.str_fmt.format(**kwargs) 127 | 128 | 129 | def fstr(fmt, **kwargs) -> str: 130 | """ 131 | Delayed evaluation of f-strings 132 | 133 | Args: 134 | fmt: f-string but in terms of normal string, i.e., '{path}/{file}.parq' 135 | **kwargs: variables for f-strings, i.e., path, file = '/data', 'daily' 136 | 137 | Returns: 138 | FString object 139 | 140 | References: 141 | https://stackoverflow.com/a/42497694/1332656 142 | https://stackoverflow.com/a/4014070/1332656 143 | 144 | Examples: 145 | >>> fmt = '{data_path}/{data_file}.parq' 146 | >>> fstr(fmt, data_path='your/data/path', data_file='sample') 147 | 'your/data/path/sample.parq' 148 | """ 149 | locals().update(kwargs) 150 | return f'{FString(str_fmt=fmt)}' 151 | 152 | 153 | def to_str( 154 | data: dict, fmt='{key}={value}', sep=', ', public_only=True 155 | ) -> str: 156 | """ 157 | Convert dict to string 158 | 159 | Args: 160 | data: dict 161 | fmt: how key and value being represented 162 | sep: how pairs of key and value are seperated 163 | public_only: if display public members only 164 | 165 | Returns: 166 | str: string representation of dict 167 | 168 | Examples: 169 | >>> test_dict = dict(b=1, a=0, c=2, _d=3) 170 | >>> to_str(test_dict) 171 | '{b=1, a=0, c=2}' 172 | >>> to_str(test_dict, sep='|') 173 | '{b=1|a=0|c=2}' 174 | >>> to_str(test_dict, public_only=False) 175 | '{b=1, a=0, c=2, _d=3}' 176 | """ 177 | if public_only: keys = list(filter(lambda vv: vv[0] != '_', data.keys())) 178 | else: keys = list(data.keys()) 179 | return '{' + sep.join([ 180 | to_str(data=v, fmt=fmt, sep=sep) 181 | if isinstance(v, dict) else fstr(fmt=fmt, key=k, value=v) 182 | for k, v in data.items() if k in keys 183 | ]) + '}' 184 | 185 | 186 | def func_scope(func) -> str: 187 | """ 188 | Function scope name 189 | 190 | Args: 191 | func: python function 192 | 193 | Returns: 194 | str: module_name.func_name 195 | 196 | Examples: 197 | >>> func_scope(flatten) 198 | 'xbbg.core.utils.flatten' 199 | >>> func_scope(time.strftime) 200 | 'time.strftime' 201 | """ 202 | cur_mod = sys.modules[func.__module__] 203 | return f'{cur_mod.__name__}.{func.__name__}' 204 | 205 | 206 | def load_module(full_path): 207 | """ 208 | Load module from full path 209 | Args: 210 | full_path: module full path name 211 | Returns: 212 | python module 213 | References: 214 | https://stackoverflow.com/a/67692/1332656 215 | Examples: 216 | >>> from pathlib import Path 217 | >>> 218 | >>> cur_path = Path(__file__).parent 219 | >>> load_module(cur_path / 'timezone.py').__name__ 220 | 'timezone' 221 | >>> load_module(cur_path / 'timezone.pyc') 222 | Traceback (most recent call last): 223 | ImportError: not a python file: timezone.pyc 224 | """ 225 | from importlib import util 226 | 227 | file_name = Path(full_path).name 228 | if file_name[-3:] != '.py': 229 | raise ImportError(f'not a python file: {file_name}') 230 | module_name = file_name[:-3] 231 | 232 | spec = util.spec_from_file_location(name=module_name, location=full_path) 233 | module = util.module_from_spec(spec=spec) 234 | spec.loader.exec_module(module=module) 235 | 236 | return module 237 | -------------------------------------------------------------------------------- /xbbg/io/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/io/__init__.py -------------------------------------------------------------------------------- /xbbg/io/cached.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from itertools import product 4 | from collections import namedtuple 5 | 6 | from xbbg.core import utils 7 | from xbbg.io import files, logs, storage 8 | 9 | ToQuery = namedtuple('ToQuery', ['tickers', 'flds', 'cached_data']) 10 | EXC_COLS = ['tickers', 'flds', 'raw', 'log', 'col_maps'] 11 | 12 | 13 | def bdp_bds_cache(func, tickers, flds, **kwargs) -> ToQuery: 14 | """ 15 | Find cached `BDP` / `BDS` queries 16 | 17 | Args: 18 | func: function name - bdp or bds 19 | tickers: tickers 20 | flds: fields 21 | **kwargs: other kwargs 22 | 23 | Returns: 24 | ToQuery(ticker, flds, kwargs) 25 | """ 26 | cache_data = [] 27 | logger = logs.get_logger(bdp_bds_cache, **kwargs) 28 | kwargs['has_date'] = kwargs.pop('has_date', func == 'bds') 29 | kwargs['cache'] = kwargs.get('cache', True) 30 | 31 | tickers = utils.flatten(tickers) 32 | flds = utils.flatten(flds) 33 | loaded = pd.DataFrame(data=0, index=tickers, columns=flds) 34 | 35 | for ticker, fld in product(tickers, flds): 36 | data_file = storage.ref_file( 37 | ticker=ticker, fld=fld, ext='pkl', **{ 38 | k: v for k, v in kwargs.items() if k not in EXC_COLS 39 | } 40 | ) 41 | if not files.exists(data_file): continue 42 | logger.debug(f'reading from {data_file} ...') 43 | cache_data.append(pd.read_pickle(data_file)) 44 | loaded.loc[ticker, fld] = 1 45 | 46 | to_qry = loaded.where(loaded == 0)\ 47 | .dropna(how='all', axis=1).dropna(how='all', axis=0) 48 | 49 | return ToQuery( 50 | tickers=to_qry.index.tolist(), flds=to_qry.columns.tolist(), 51 | cached_data=cache_data 52 | ) 53 | -------------------------------------------------------------------------------- /xbbg/io/db.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | import sqlite3 4 | import json 5 | 6 | WAL_MODE = 'PRAGMA journal_mode=WAL' 7 | ALL_TABLES = 'SELECT name FROM sqlite_master WHERE type="table"' 8 | 9 | 10 | class Singleton(type): 11 | 12 | _instances_ = {} 13 | 14 | def __call__(cls, *args, **kwargs): 15 | # Default values for class init 16 | default_keys = ['db_file', 'keep_live'] 17 | kw = {**dict(zip(default_keys, args)), **kwargs} 18 | kw['keep_live'] = kw.get('keep_live', False) 19 | 20 | # Singleton instance 21 | key = json.dumps(kw) 22 | if key not in cls._instances_: 23 | cls._instances_[key] = super(Singleton, cls).__call__(**kw) 24 | return cls._instances_[key] 25 | 26 | 27 | class SQLite(metaclass=Singleton): 28 | """ 29 | Examples: 30 | >>> from xbbg.io import files 31 | >>> 32 | >>> db_file_ = f'{files.abspath(__file__, 1)}/tests/xone.db' 33 | >>> with SQLite(db_file_) as con_: 34 | ... _ = con_.execute('DROP TABLE IF EXISTS xone') 35 | ... _ = con_.execute('CREATE TABLE xone (rowid int)') 36 | >>> db_ = SQLite(db_file_) 37 | >>> db_.tables() 38 | ['xone'] 39 | >>> db_.replace_into(table='xone', rowid=1) 40 | >>> db_.select(table='xone') 41 | rowid 42 | 0 1 43 | >>> db_.replace_into( 44 | ... table='xone', 45 | ... data=pd.DataFrame([{'rowid': 2}, {'rowid': 3}]) 46 | ... ) 47 | >>> db_.select(table='xone') 48 | rowid 49 | 0 1 50 | 1 2 51 | 2 3 52 | """ 53 | 54 | def __init__(self, db_file, keep_live=False): 55 | 56 | self.db_file = db_file 57 | self.keep_live = keep_live 58 | self._con_ = None 59 | 60 | def tables(self) -> list: 61 | """ 62 | All tables within database 63 | """ 64 | keep_live = self.is_live 65 | res = self.con.execute(ALL_TABLES).fetchall() 66 | if not keep_live: self.close() 67 | return [r[0] for r in res] 68 | 69 | def select(self, table: str, cond='', **kwargs) -> pd.DataFrame: 70 | """ 71 | SELECT query 72 | """ 73 | keep_live = self.is_live 74 | q_str = select(table=table, cond=cond, **kwargs) 75 | data = self.con.execute(q_str).fetchall() 76 | if not keep_live: self.close() 77 | return pd.DataFrame(data, columns=self.columns(table=table)) 78 | 79 | def select_recent( 80 | self, 81 | table: str, 82 | dateperiod: str, 83 | date_col: str = 'modified_date', 84 | cond='', 85 | **kwargs 86 | ) -> pd.DataFrame: 87 | """ 88 | Select recent 89 | 90 | Args: 91 | table: table name 92 | dateperiod: time period, e.g., 1M, 1Q, etc. 93 | date_col: column for time period 94 | cond: conditions 95 | **kwargs: other select criteria 96 | 97 | Returns: 98 | pd.DataFrame 99 | """ 100 | cols = self.columns(table=table) 101 | if date_col not in cols: return pd.DataFrame() 102 | start_dt = ( 103 | pd.date_range( 104 | end='today', freq=dateperiod, periods=2, normalize=True, 105 | )[0] 106 | .strftime('%Y-%m-%d') 107 | ) 108 | return ( 109 | self.select(table=table, cond=cond, **kwargs) 110 | .query(f'{date_col} >= {start_dt}') 111 | .reset_index(drop=True) 112 | ) 113 | 114 | def columns(self, table: str): 115 | """ 116 | Table columns 117 | """ 118 | return [ 119 | info[1] for info in ( 120 | self.con.execute(f'PRAGMA table_info (`{table}`)').fetchall() 121 | ) 122 | ] 123 | 124 | def replace_into(self, table: str, data: pd.DataFrame = None, **kwargs): 125 | """ 126 | Replace records into table 127 | 128 | Args: 129 | table: table name 130 | data: DataFrame - if given, **kwargs will be ignored 131 | **kwargs: record values 132 | """ 133 | if isinstance(data, pd.DataFrame): 134 | keep_live = self.is_live 135 | cols = ', '.join(map(lambda v: f'`{v}`', data.columns)) 136 | vals = ', '.join(['?'] * data.shape[1]) 137 | # noinspection PyTypeChecker 138 | self.con.executemany( 139 | f'REPLACE INTO `{table}` ({cols}) values ({vals})', 140 | data.apply(tuple, axis=1).tolist() 141 | ) 142 | else: 143 | keep_live = self.is_live or kwargs.get('_live_', False) 144 | self.con.execute(replace_into(table=table, **{ 145 | k: v for k, v in kwargs.items() if k != '_live_' 146 | })) 147 | if not keep_live: self.close() 148 | 149 | @property 150 | def is_live(self) -> bool: 151 | if not isinstance(self._con_, sqlite3.Connection): 152 | return False 153 | try: 154 | self._con_.execute(ALL_TABLES) 155 | return True 156 | except sqlite3.Error: 157 | return False 158 | 159 | @property 160 | def con(self) -> sqlite3.Connection: 161 | if not self.is_live: 162 | self._con_ = sqlite3.connect(self.db_file) 163 | self._con_.execute(WAL_MODE) 164 | return self._con_ 165 | 166 | def close(self, keep_live=False): 167 | try: 168 | self._con_.commit() 169 | if not keep_live: self._con_.close() 170 | except sqlite3.ProgrammingError: 171 | pass 172 | except sqlite3.Error as e: 173 | print(e) 174 | 175 | def __enter__(self): 176 | return self.con.cursor() 177 | 178 | def __exit__(self, exc_type, exc_val, exc_tb): 179 | self.close(keep_live=self.keep_live) 180 | 181 | 182 | def db_value(val) -> str: 183 | """ 184 | Database value as in query string 185 | """ 186 | if isinstance(val, str): 187 | return json.dumps(val.replace('\"', '').strip()) 188 | return json.dumps(val, default=str) 189 | 190 | 191 | def select(table: str, cond='', **kwargs) -> str: 192 | """ 193 | Query string of SELECT statement 194 | 195 | Args: 196 | table: table name 197 | cond: conditions 198 | **kwargs: data as kwargs 199 | 200 | Examples: 201 | >>> q1 = select('daily', ticker='ES1 Index', price=3000) 202 | >>> q1.splitlines()[-2].strip() 203 | 'ticker="ES1 Index" AND price=3000' 204 | >>> q2 = select('daily', cond='price > 3000', ticker='ES1 Index') 205 | >>> q2.splitlines()[-2].strip() 206 | 'price > 3000 AND ticker="ES1 Index"' 207 | >>> q3 = select('daily', cond='price > 3000') 208 | >>> q3.splitlines()[-2].strip() 209 | 'price > 3000' 210 | >>> select('daily') 211 | 'SELECT * FROM `daily`' 212 | """ 213 | all_cond = [cond] + [ 214 | f'{key}={db_value(value)}' 215 | for key, value in kwargs.items() 216 | ] 217 | where = ' AND '.join(filter(bool, all_cond)) 218 | s = f'SELECT * FROM `{table}`' 219 | if where: 220 | return f""" 221 | {s} 222 | WHERE 223 | {where} 224 | """ 225 | return s 226 | 227 | 228 | def replace_into(table: str, **kwargs) -> str: 229 | """ 230 | Query string of REPLACE INTO statement 231 | 232 | Args: 233 | table: table name 234 | **kwargs: data as kwargs 235 | 236 | Examples: 237 | >>> query = replace_into('daily', ticker='ES1 Index', price=3000) 238 | >>> query.splitlines()[1].strip() 239 | 'REPLACE INTO `daily` (ticker, price)' 240 | >>> query.splitlines()[2].strip() 241 | 'VALUES ("ES1 Index", 3000)' 242 | """ 243 | return f""" 244 | REPLACE INTO `{table}` ({', '.join(list(kwargs.keys()))}) 245 | VALUES ({', '.join(map(db_value, list(kwargs.values())))}) 246 | """ 247 | -------------------------------------------------------------------------------- /xbbg/io/files.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | import os 4 | import re 5 | import time 6 | 7 | from typing import List 8 | from pathlib import Path 9 | 10 | DATE_FMT = r'\d{4}-(0?[1-9]|1[012])-(0?[1-9]|[12][0-9]|3[01])' 11 | 12 | 13 | def exists(path) -> bool: 14 | """ 15 | Check path or file exists (use os.path.exists) 16 | 17 | Args: 18 | path: path or file 19 | """ 20 | if not path: return False 21 | return Path(path).is_dir() or Path(path).is_file() 22 | 23 | 24 | def abspath(cur_file, parent=0) -> Path: 25 | """ 26 | Absolute path 27 | 28 | Args: 29 | cur_file: __file__ or file or path str 30 | parent: level of parent to look for 31 | 32 | Returns: 33 | str 34 | """ 35 | p = Path(cur_file) 36 | cur_path = p.parent if p.is_file() else p 37 | if parent == 0: return str(cur_path).replace('\\', '/') 38 | return abspath(cur_file=cur_path.parent, parent=parent - 1) 39 | 40 | 41 | def create_folder(path_name: str, is_file=False): 42 | """ 43 | Make folder as well as all parent folders if not exists 44 | 45 | Args: 46 | path_name: full path name 47 | is_file: whether input is name of file 48 | """ 49 | p = Path(path_name).parent if is_file else Path(path_name) 50 | p.mkdir(parents=True, exist_ok=True) 51 | 52 | 53 | def all_files( 54 | path_name, keyword='', ext='', full_path=True, 55 | has_date=False, date_fmt=DATE_FMT 56 | ) -> List[str]: 57 | """ 58 | Search all files with criteria 59 | Returned list will be sorted by last modified 60 | 61 | Args: 62 | path_name: full path name 63 | keyword: keyword to search 64 | ext: file extensions, split by ',' 65 | full_path: whether return full path (default True) 66 | has_date: whether has date in file name (default False) 67 | date_fmt: date format to check for has_date parameter 68 | 69 | Returns: 70 | list: all file names with criteria fulfilled 71 | """ 72 | p = Path(path_name) 73 | if not p.is_dir(): return [] 74 | 75 | keyword = f'*{keyword}*' if keyword else '*' 76 | keyword += f'.{ext}' if ext else '.*' 77 | r = re.compile(f'.*{date_fmt}.*') 78 | return [ 79 | str(f).replace('\\', '/') if full_path else f.name 80 | for f in p.glob(keyword) 81 | if f.is_file() and (f.name[0] != '~') and ((not has_date) or r.match(f.name)) 82 | ] 83 | 84 | 85 | def all_folders( 86 | path_name, keyword='', has_date=False, date_fmt=DATE_FMT 87 | ) -> List[str]: 88 | """ 89 | Search all folders with criteria 90 | Returned list will be sorted by last modified 91 | 92 | Args: 93 | path_name: full path name 94 | keyword: keyword to search 95 | has_date: whether has date in file name (default False) 96 | date_fmt: date format to check for has_date parameter 97 | 98 | Returns: 99 | list: all folder names fulfilled criteria 100 | """ 101 | p = Path(path_name) 102 | if not p.is_dir(): return [] 103 | 104 | r = re.compile(f'.*{date_fmt}.*') 105 | return [ 106 | str(f).replace('\\', '/') 107 | for f in p.glob(f'*{keyword}*' if keyword else '*') 108 | if f.is_dir() and (f.name[0] != '~') and ((not has_date) or r.match(f.name)) 109 | ] 110 | 111 | 112 | def sort_by_modified(files_or_folders: list) -> list: 113 | """ 114 | Sort files or folders by modified time 115 | 116 | Args: 117 | files_or_folders: list of files or folders 118 | 119 | Returns: 120 | list 121 | """ 122 | return sorted(files_or_folders, key=os.path.getmtime, reverse=True) 123 | 124 | 125 | def filter_by_dates(files_or_folders: list, date_fmt=DATE_FMT) -> list: 126 | """ 127 | Filter files or dates by date patterns 128 | 129 | Args: 130 | files_or_folders: list of files or folders 131 | date_fmt: date format 132 | 133 | Returns: 134 | list 135 | """ 136 | r = re.compile(f'.*{date_fmt}.*') 137 | return list(filter( 138 | lambda v: r.match(Path(v).name) is not None, 139 | files_or_folders, 140 | )) 141 | 142 | 143 | def latest_file(path_name, keyword='', ext='', **kwargs) -> str: 144 | """ 145 | Latest modified file in folder 146 | 147 | Args: 148 | path_name: full path name 149 | keyword: keyword to search 150 | ext: file extension 151 | 152 | Returns: 153 | str: latest file name 154 | """ 155 | files = sort_by_modified( 156 | all_files(path_name=path_name, keyword=keyword, ext=ext, full_path=True) 157 | ) 158 | 159 | if not files: 160 | from xbbg.io import logs 161 | 162 | logger = logs.get_logger(latest_file, level=kwargs.pop('log', 'warning')) 163 | logger.debug(f'no file in folder: {path_name}') 164 | return '' 165 | 166 | return str(files[0]).replace('\\', '/') 167 | 168 | 169 | def modified_time(file_name): 170 | """ 171 | File modified time in python 172 | 173 | Args: 174 | file_name: file name 175 | 176 | Returns: 177 | pd.Timestamp 178 | """ 179 | return pd.to_datetime(time.ctime(os.path.getmtime(filename=file_name))) 180 | -------------------------------------------------------------------------------- /xbbg/io/logs.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from xbbg.core import utils 4 | 5 | LOG_LEVEL = 'CRITICAL' 6 | LOG_FMT = '%(asctime)s:%(name)s:%(levelname)s:%(message)s' 7 | 8 | 9 | def get_logger(name_or_func, level=LOG_LEVEL, types='stream', **kwargs): 10 | """ 11 | Generate logger 12 | 13 | Args: 14 | name_or_func: logger name or current running function 15 | level: level of logs - debug, info, error 16 | types: file or stream, or both 17 | 18 | Returns: 19 | logger 20 | 21 | Examples: 22 | >>> get_logger(name_or_func='download_data', level='debug', types='stream') 23 | 24 | >>> get_logger(name_or_func='preprocess', log_file='pre.log', types='file|stream') 25 | 26 | """ 27 | if 'log' in kwargs: level = kwargs['log'] 28 | if isinstance(level, str): level = getattr(logging, level.upper()) 29 | log_name = utils.func_scope(name_or_func) if callable(name_or_func) else name_or_func 30 | logger = logging.getLogger(name=log_name) 31 | logger.setLevel(level=level) 32 | 33 | if not len(logger.handlers): 34 | formatter = logging.Formatter(fmt=kwargs.get('fmt', LOG_FMT)) 35 | 36 | if 'file' in types and 'log_file' in kwargs: 37 | file_handler = logging.FileHandler(kwargs['log_file']) 38 | file_handler.setFormatter(fmt=formatter) 39 | logger.addHandler(file_handler) 40 | 41 | if 'stream' in types: 42 | stream_handler = logging.StreamHandler() 43 | stream_handler.setFormatter(fmt=formatter) 44 | logger.addHandler(stream_handler) 45 | 46 | return logger 47 | -------------------------------------------------------------------------------- /xbbg/io/param.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | import os 4 | 5 | from typing import Union 6 | from ruamel.yaml import YAML 7 | from xbbg.io import files 8 | 9 | PKG_PATH = files.abspath(__file__, 1) 10 | 11 | 12 | def config_files(cat: str) -> list: 13 | """ 14 | Category files 15 | 16 | Args: 17 | cat: category 18 | 19 | Returns: 20 | list of files that exist 21 | """ 22 | return [ 23 | f'{r}/markets/{cat}.yml' 24 | for r in [ 25 | PKG_PATH, 26 | os.environ.get('BBG_ROOT', '').replace('\\', '/'), 27 | ] 28 | if files.exists(f'{r}/markets/{cat}.yml') 29 | ] 30 | 31 | 32 | def load_config(cat: str) -> pd.DataFrame: 33 | """ 34 | Load market info that can apply pd.Series directly 35 | 36 | Args: 37 | cat: category name 38 | 39 | Returns: 40 | pd.DataFrame 41 | """ 42 | cfg_files = config_files(cat=cat) 43 | cache_cfg = f'{PKG_PATH}/markets/cached/{cat}_cfg.pkl' 44 | last_mod = max(map(files.modified_time, cfg_files)) 45 | if files.exists(cache_cfg) and files.modified_time(cache_cfg) > last_mod: 46 | return pd.read_pickle(cache_cfg) 47 | 48 | config = ( 49 | pd.concat([ 50 | load_yaml(cf).apply(pd.Series) 51 | for cf in cfg_files 52 | ], sort=False) 53 | ) 54 | files.create_folder(cache_cfg, is_file=True) 55 | config.to_pickle(cache_cfg) 56 | return config 57 | 58 | 59 | def load_yaml(yaml_file: str) -> pd.Series: 60 | """ 61 | Load yaml from cache 62 | 63 | Args: 64 | yaml_file: YAML file name 65 | 66 | Returns: 67 | pd.Series 68 | """ 69 | cache_file = ( 70 | yaml_file 71 | .replace('/markets/', '/markets/cached/') 72 | .replace('.yml', '.pkl') 73 | ) 74 | cur_mod = files.modified_time(yaml_file) 75 | if files.exists(cache_file) and files.modified_time(cache_file) > cur_mod: 76 | return pd.read_pickle(cache_file) 77 | 78 | with open(yaml_file, 'r') as fp: 79 | data = pd.Series(YAML().load(fp)) 80 | files.create_folder(cache_file, is_file=True) 81 | data.to_pickle(cache_file) 82 | return data 83 | 84 | 85 | def to_hours(num_ts: Union[str, list, int, float]) -> Union[str, list]: 86 | """ 87 | Convert YAML input to hours 88 | 89 | Args: 90 | num_ts: list of number in YMAL file, e.g., 900, 1700, etc. 91 | 92 | Returns: 93 | str 94 | 95 | Examples: 96 | >>> to_hours([900, 1700]) 97 | ['09:00', '17:00'] 98 | >>> to_hours(901) 99 | '09:01' 100 | >>> to_hours('XYZ') 101 | 'XYZ' 102 | """ 103 | if isinstance(num_ts, str): return num_ts 104 | if isinstance(num_ts, (int, float)): 105 | return f'{int(num_ts / 100):02d}:{int(num_ts % 100):02d}' 106 | return [to_hours(num) for num in num_ts] 107 | -------------------------------------------------------------------------------- /xbbg/io/storage.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | import os 4 | 5 | from xbbg import const 6 | from xbbg.io import files, logs 7 | from xbbg.core import utils, overrides 8 | 9 | PKG_PATH = files.abspath(__file__, 1) 10 | 11 | 12 | def bar_file(ticker: str, dt, typ='TRADE') -> str: 13 | """ 14 | Data file location for Bloomberg historical data 15 | 16 | Args: 17 | ticker: ticker name 18 | dt: date 19 | typ: [TRADE, BID, ASK, BID_BEST, ASK_BEST, BEST_BID, BEST_ASK] 20 | 21 | Returns: 22 | file location 23 | 24 | Examples: 25 | >>> os.environ['BBG_ROOT'] = '' 26 | >>> bar_file(ticker='ES1 Index', dt='2018-08-01') == '' 27 | True 28 | >>> os.environ['BBG_ROOT'] = '/data/bbg' 29 | >>> bar_file(ticker='ES1 Index', dt='2018-08-01') 30 | '/data/bbg/Index/ES1 Index/TRADE/2018-08-01.parq' 31 | """ 32 | data_path = os.environ.get(overrides.BBG_ROOT, '').replace('\\', '/') 33 | if not data_path: return '' 34 | asset = ticker.split()[-1] 35 | proper_ticker = ticker.replace('/', '_') 36 | cur_dt = pd.Timestamp(dt).strftime('%Y-%m-%d') 37 | return f'{data_path}/{asset}/{proper_ticker}/{typ}/{cur_dt}.parq' 38 | 39 | 40 | def ref_file( 41 | ticker: str, fld: str, has_date=False, cache=False, ext='parq', **kwargs 42 | ) -> str: 43 | """ 44 | Data file location for Bloomberg reference data 45 | 46 | Args: 47 | ticker: ticker name 48 | fld: field 49 | has_date: whether add current date to data file 50 | cache: if has_date is True, whether to load file from latest cached 51 | ext: file extension 52 | **kwargs: other overrides passed to ref function 53 | 54 | Returns: 55 | str: file location 56 | 57 | Examples: 58 | >>> import shutil 59 | >>> 60 | >>> os.environ['BBG_ROOT'] = '' 61 | >>> ref_file('BLT LN Equity', fld='Crncy') == '' 62 | True 63 | >>> os.environ['BBG_ROOT'] = '/data/bbg' 64 | >>> ref_file('BLT LN Equity', fld='Crncy', cache=True) 65 | '/data/bbg/Equity/BLT LN Equity/Crncy/ovrd=None.parq' 66 | >>> ref_file('BLT LN Equity', fld='Crncy') 67 | '' 68 | >>> cur_dt_ = utils.cur_time(tz=utils.DEFAULT_TZ) 69 | >>> ref_file( 70 | ... 'BLT LN Equity', fld='DVD_Hist_All', has_date=True, cache=True, 71 | ... ).replace(cur_dt_, '[cur_date]') 72 | '/data/bbg/Equity/BLT LN Equity/DVD_Hist_All/asof=[cur_date], ovrd=None.parq' 73 | >>> ref_file( 74 | ... 'BLT LN Equity', fld='DVD_Hist_All', has_date=True, 75 | ... cache=True, DVD_Start_Dt='20180101', 76 | ... ).replace(cur_dt_, '[cur_date]')[:-5] 77 | '/data/bbg/Equity/BLT LN Equity/DVD_Hist_All/asof=[cur_date], DVD_Start_Dt=20180101' 78 | >>> sample = 'asof=2018-11-02, DVD_Start_Dt=20180101, DVD_End_Dt=20180501.pkl' 79 | >>> root_path = 'xbbg/tests/data' 80 | >>> sub_path = f'{root_path}/Equity/AAPL US Equity/DVD_Hist_All' 81 | >>> os.environ['BBG_ROOT'] = root_path 82 | >>> for tmp_file in files.all_files(sub_path): os.remove(tmp_file) 83 | >>> files.create_folder(sub_path) 84 | >>> sample in shutil.copy(f'{root_path}/{sample}', sub_path) 85 | True 86 | >>> new_file = ref_file( 87 | ... 'AAPL US Equity', 'DVD_Hist_All', DVD_Start_Dt='20180101', 88 | ... has_date=True, cache=True, ext='pkl' 89 | ... ) 90 | >>> new_file.split('/')[-1] == f'asof={cur_dt_}, DVD_Start_Dt=20180101.pkl' 91 | True 92 | >>> old_file = 'asof=2018-11-02, DVD_Start_Dt=20180101, DVD_End_Dt=20180501.pkl' 93 | >>> old_full = '/'.join(new_file.split('/')[:-1] + [old_file]) 94 | >>> updated_file = old_full.replace('2018-11-02', cur_dt_) 95 | >>> updated_file in shutil.copy(old_full, updated_file) 96 | True 97 | >>> exist_file = ref_file( 98 | ... 'AAPL US Equity', 'DVD_Hist_All', DVD_Start_Dt='20180101', 99 | ... has_date=True, cache=True, ext='pkl' 100 | ... ) 101 | >>> exist_file == updated_file 102 | False 103 | >>> exist_file = ref_file( 104 | ... 'AAPL US Equity', 'DVD_Hist_All', DVD_Start_Dt='20180101', 105 | ... DVD_End_Dt='20180501', has_date=True, cache=True, ext='pkl' 106 | ... ) 107 | >>> exist_file == updated_file 108 | True 109 | """ 110 | data_path = os.environ.get(overrides.BBG_ROOT, '').replace('\\', '/') 111 | if (not data_path) or (not cache): return '' 112 | 113 | proper_ticker = ticker.replace('/', '_') 114 | cache_days = kwargs.pop('cache_days', 10) 115 | root = f'{data_path}/{ticker.split()[-1]}/{proper_ticker}/{fld}' 116 | 117 | ref_kw = {k: v for k, v in kwargs.items() if k not in overrides.PRSV_COLS} 118 | if len(ref_kw) > 0: info = utils.to_str(ref_kw)[1:-1].replace('|', '_') 119 | else: info = 'ovrd=None' 120 | 121 | # Check date info 122 | if has_date: 123 | cache_file = f'{root}/asof=[cur_date], {info}.{ext}' 124 | cur_dt = utils.cur_time() 125 | start_dt = pd.date_range(end=cur_dt, freq=f'{cache_days}D', periods=2)[0] 126 | for dt in pd.date_range(start=start_dt, end=cur_dt, normalize=True)[1:][::-1]: 127 | cur_file = cache_file.replace('[cur_date]', dt.strftime("%Y-%m-%d")) 128 | if files.exists(cur_file): return cur_file 129 | return cache_file.replace('[cur_date]', cur_dt) 130 | 131 | return f'{root}/{info}.{ext}' 132 | 133 | 134 | def save_intraday(data: pd.DataFrame, ticker: str, dt, typ='TRADE', **kwargs): 135 | """ 136 | Check whether data is done for the day and save 137 | 138 | Args: 139 | data: data 140 | ticker: ticker 141 | dt: date 142 | typ: [TRADE, BID, ASK, BID_BEST, ASK_BEST, BEST_BID, BEST_ASK] 143 | 144 | Examples: 145 | >>> os.environ['BBG_ROOT'] = f'{PKG_PATH}/tests/data' 146 | >>> sample = pd.read_parquet(f'{PKG_PATH}/tests/data/aapl.parq') 147 | >>> save_intraday(sample, 'AAPL US Equity', '2018-11-02') 148 | >>> # Invalid exchange 149 | >>> save_intraday(sample, 'AAPL XX Equity', '2018-11-02') 150 | >>> # Invalid empty data 151 | >>> save_intraday(pd.DataFrame(), 'AAPL US Equity', '2018-11-02') 152 | >>> # Invalid date - too close 153 | >>> cur_dt_ = utils.cur_time() 154 | >>> save_intraday(sample, 'AAPL US Equity', cur_dt_) 155 | """ 156 | cur_dt = pd.Timestamp(dt).strftime('%Y-%m-%d') 157 | logger = logs.get_logger(save_intraday, **kwargs) 158 | info = f'{ticker} / {cur_dt} / {typ}' 159 | data_file = bar_file(ticker=ticker, dt=dt, typ=typ) 160 | if not data_file: return 161 | 162 | if data.empty: 163 | logger.warning(f'data is empty for {info} ...') 164 | return 165 | 166 | exch = const.exch_info(ticker=ticker, **kwargs) 167 | if exch.empty: return 168 | 169 | end_time = pd.Timestamp( 170 | const.market_timing(ticker=ticker, dt=dt, timing='FINISHED', **kwargs) 171 | ).tz_localize(exch.tz) 172 | now = pd.Timestamp('now', tz=exch.tz) - pd.Timedelta('1H') 173 | 174 | if end_time > now: 175 | logger.debug(f'skip saving cause market close ({end_time}) < now - 1H ({now}) ...') 176 | return 177 | 178 | logger.info(f'saving data to {data_file} ...') 179 | files.create_folder(data_file, is_file=True) 180 | data.to_parquet(data_file) 181 | -------------------------------------------------------------------------------- /xbbg/markets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/markets/__init__.py -------------------------------------------------------------------------------- /xbbg/markets/assets.yml: -------------------------------------------------------------------------------- 1 | # Override or expand this list by providing 2 | # os.environ['BBG_ROOT']/markets/assets.yml 3 | # 4 | # New exchange should be added to exch.yml 5 | 6 | Comdty: 7 | - tickers: [CL] 8 | exch: NYME 9 | freq: M 10 | is_fut: True 11 | - tickers: [CO, XW] 12 | exch: FuturesEuropeICE 13 | freq: M 14 | is_fut: True 15 | - tickers: [IOE] 16 | exch: CommoditiesDalian 17 | freq: M 18 | is_fut: True 19 | key_month: [J, K, U] 20 | - tickers: [RBT] 21 | exch: CommoditiesDalian 22 | freq: M 23 | is_fut: True 24 | key_month: [J, K, U] 25 | - tickers: [HG] 26 | exch: CMX 27 | freq: Q 28 | is_fut: True 29 | - tickers: [SM, W, C] 30 | exch: CBT 31 | freq: Q 32 | is_fut: True 33 | 34 | Curncy: 35 | - tickers: [ 36 | JPY, AUD, HKD, CNHHKD, CNH, EUR, GBP, GBPAUD, SEK, XAU, 37 | GBPEUR, AUDGBP, GBPHKD, KWN, IRN, NTN, CNH1M, ZAR, CHF 38 | ] 39 | exch: CurrencyGeneric 40 | - tickers: [KRW] 41 | exch: CurrencySouthKorea 42 | - tickers: [TWD] 43 | exch: CurrencyTaiwan 44 | - tickers: [CNYHKD, CNY] 45 | exch: CurrencyChina 46 | - tickers: [INR] 47 | exch: CurrencyIndia 48 | - tickers: [IRD] 49 | exch: CurrencyDubai 50 | freq: M 51 | is_fut: True 52 | - tickers: [XID] 53 | exch: CurrencySingapore 54 | freq: M 55 | is_fut: True 56 | - tickers: [INT] 57 | exch: CurrencyIndia 58 | freq: M 59 | is_fut: True 60 | - tickers: [DXY] 61 | exch: CurrencyICE 62 | 63 | Equity: 64 | - exch_codes: [AU] 65 | exch: EquityAustralia 66 | - exch_codes: [JT, JP] 67 | exch: EquityJapan 68 | - exch_codes: [KS] 69 | exch: EquitySouthKorea 70 | - exch_codes: [TT] 71 | exch: EquityTaiwan 72 | - exch_codes: [HK] 73 | exch: EquityHongKong 74 | - exch_codes: [CH, CG, CS] 75 | exch: EquityChina 76 | - exch_codes: [IN, IS, IB] 77 | exch: EquityIndia 78 | - exch_codes: [LI, LN, FP] 79 | exch: EquityLondon 80 | - exch_codes: [NA] 81 | exch: EquityAmsterdam 82 | - exch_codes: [ID, GR] 83 | exch: EquityDublin 84 | - exch_codes: [SQ, SM] 85 | exch: EquitySpain 86 | - exch_codes: [SS] 87 | exch: EquityStockholm 88 | - exch_codes: [US, UN] 89 | exch: EquityUS 90 | - exch_codes: [IS, IB, IN] 91 | exch: EquityFuturesIndia 92 | is_fut: True 93 | 94 | Index: 95 | - tickers: [XP] 96 | exch: FuturesAustralia 97 | freq: Q 98 | is_fut: True 99 | - tickers: [AS51] 100 | exch: IndexAustralia 101 | - tickers: [NKY, TPX] 102 | exch: EquityJapan 103 | - tickers: [NK, TP] 104 | exch: FuturesJapan 105 | freq: Q 106 | is_fut: True 107 | - tickers: [KOSPI2, KOSPBMET] 108 | exch: IndexSouthKorea 109 | - tickers: [KM] 110 | exch: FuturesSouthKorea 111 | freq: Q 112 | is_fut: True 113 | - tickers: [TWSE] 114 | exch: EquityTaiwan 115 | - tickers: [TW] 116 | exch: FuturesTaiwan 117 | freq: M 118 | is_fut: True 119 | - tickers: [HSI, HSCEI, HCT] 120 | exch: EquityHongKong 121 | - tickers: [HI, HC] 122 | exch: FuturesHongKong 123 | freq: M 124 | is_fut: True 125 | - tickers: [ 126 | SHSZ300, SHCOMP, SZCOMP, SZ399006, SH000905 127 | ] 128 | exch: EquityChina 129 | - tickers: [ 130 | SPX, INDU, CCMP, RTY, RAY, SOX, 131 | S5FINL, S5INSU, S5TRAN, S5UTIL, S5ENRS, VIX, 132 | S5IOIL, S5STEL, S5ITEL, S5SECO, S5INFT, SOX, 133 | EWAIV, EWJIV, EWYIV, EWTIV, EWHIV, FXIIV, INDAIV, 134 | AIAIV, AAXJIV, EEMIV, 135 | ] 136 | exch: IndexUS 137 | - tickers: [XU] 138 | exch: FuturesSingapore 139 | freq: M 140 | is_fut: True 141 | - tickers: [NZ] 142 | exch: IndexFuturesIndia 143 | freq: M 144 | is_fut: True 145 | - tickers: [NIFTY] 146 | exch: IndexFuturesIndia 147 | - tickers: [ES, DM, NQ] 148 | exch: CME 149 | freq: Q 150 | is_fut: True 151 | - tickers: [Z] 152 | exch: FuturesFinancialsICE 153 | freq: Q 154 | is_fut: True 155 | - tickers: [OMX] 156 | exch: IndexLondon 157 | - tickers: [UX] 158 | exch: FuturesCBOE 159 | freq: M 160 | is_fut: True 161 | has_sprd: True 162 | - tickers: [UKX, SXXE] 163 | exch: IndexLondon 164 | - tickers: [SX5E, SX5P] 165 | exch: IndexEurope1 166 | - tickers: [BE500] 167 | exch: IndexEurope2 168 | - tickers: [MSER, MSPE] 169 | exch: IndexEurope3 170 | - tickers: [ 171 | USGG2YR, USGG10YR, USYC2Y10, USYC1030, USGG30YR 172 | ] 173 | exch: IndexUS 174 | - tickers: [MES] 175 | exch: FuturesNYFICE 176 | freq: Q 177 | is_fut: True 178 | - tickers: [TESTTCK] 179 | exch: TestExch 180 | 181 | Corp: 182 | - {} 183 | -------------------------------------------------------------------------------- /xbbg/markets/cached/Comdty_cfg.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/markets/cached/Comdty_cfg.pkl -------------------------------------------------------------------------------- /xbbg/markets/cached/Curncy_cfg.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/markets/cached/Curncy_cfg.pkl -------------------------------------------------------------------------------- /xbbg/markets/cached/Equity_cfg.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/markets/cached/Equity_cfg.pkl -------------------------------------------------------------------------------- /xbbg/markets/cached/Index_cfg.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/markets/cached/Index_cfg.pkl -------------------------------------------------------------------------------- /xbbg/markets/cached/assets.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/markets/cached/assets.pkl -------------------------------------------------------------------------------- /xbbg/markets/cached/ccy.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/markets/cached/ccy.pkl -------------------------------------------------------------------------------- /xbbg/markets/cached/ccy_cfg.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/markets/cached/ccy_cfg.pkl -------------------------------------------------------------------------------- /xbbg/markets/cached/exch.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/markets/cached/exch.pkl -------------------------------------------------------------------------------- /xbbg/markets/cached/exch_cfg.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/markets/cached/exch_cfg.pkl -------------------------------------------------------------------------------- /xbbg/markets/ccy.yml: -------------------------------------------------------------------------------- 1 | # ======================================== 2 | # FX = (BDP(ticker) / factor) ** power 3 | # 4 | # where: 5 | # factor = power = 1 if left blank 6 | # ======================================== 7 | 8 | # Override or expand this list by providing 9 | # os.environ['BBG_ROOT']/markets/ccy.yml 10 | 11 | AUDUSD: 12 | ticker: AUD Curncy 13 | power: -1 14 | 15 | JPYUSD: 16 | ticker: JPY Curncy 17 | 18 | KRWUSD: 19 | ticker: KRW Curncy 20 | 21 | KWN+1MUSD: 22 | ticker: KRW+1M Curncy 23 | 24 | TWDUSD: 25 | ticker: TWD Curncy 26 | 27 | NTN+1MUSD: 28 | ticker: NTN+1M Curncy 29 | 30 | CNYHKD: 31 | ticker: CNYHKD Curncy 32 | power: -1 33 | 34 | CNHHKD: 35 | ticker: CNHHKD Curncy 36 | power: -1 37 | 38 | HKDUSD: 39 | ticker: HKD Curncy 40 | 41 | EURUSD: 42 | ticker: EUR Curncy 43 | power: -1 44 | 45 | GBPUSD: 46 | ticker: GBP Curncy 47 | power: -1 48 | 49 | GBpUSD: 50 | ticker: GBP Curncy 51 | factor: 100 52 | power: -1 53 | 54 | GBPAUD: 55 | ticker: GBPAUD Curncy 56 | power: -1 57 | 58 | GBpAUD: 59 | ticker: GBPAUD Curncy 60 | factor: 100 61 | power: -1 62 | 63 | GBPEUR: 64 | ticker: GBPEUR Curncy 65 | power: -1 66 | 67 | GBpEUR: 68 | ticker: GBPEUR Curncy 69 | factor: 100 70 | power: -1 71 | 72 | GBPHKD: 73 | ticker: GBPHKD Curncy 74 | power: -1 75 | 76 | GBpHKD: 77 | ticker: GBPHKD Curncy 78 | factor: 100 79 | power: -1 80 | 81 | INT1USD: 82 | ticker: INT1 Curncy 83 | 84 | INT2USD: 85 | ticker: INT2 Curncy 86 | 87 | IRD1USD: 88 | ticker: IRD1 Curncy 89 | factor: 10000 90 | power: -1 91 | 92 | IRD2USD: 93 | ticker: IRD2 Curncy 94 | factor: 10000 95 | power: -1 96 | 97 | XID1USD: 98 | ticker: XID1 Curncy 99 | factor: 10000 100 | power: -1 101 | 102 | XID2USD: 103 | ticker: XID2 Curncy 104 | factor: 10000 105 | power: -1 106 | -------------------------------------------------------------------------------- /xbbg/markets/exch.yml: -------------------------------------------------------------------------------- 1 | # ========================================= 2 | # Session definitions for intraday bars 3 | # 4 | # [allday, day, am, pm, night, pre, post] 5 | # ========================================= 6 | 7 | # Override or expand this list by providing 8 | # os.environ['BBG_ROOT']/markets/exch.yml 9 | 10 | # ------------ 11 | # Equities 12 | # ------------ 13 | 14 | EquityAustralia: 15 | tz: Australia/Sydney 16 | allday: [0959, 1616] 17 | day: [1000, 1600] 18 | post: [1601, 1616] 19 | 20 | EquityJapan: 21 | tz: Asia/Tokyo 22 | allday: [800, 1545] 23 | day: [901, 1458] 24 | am: [901, 1130] 25 | pm: [1230, 1458] 26 | pre: [0800, 901] 27 | post: [1459, 1545] 28 | 29 | EquitySouthKorea: 30 | tz: Asia/Seoul 31 | allday: [900, 1535] 32 | day: [900, 1520] 33 | post: [1521, 1535] 34 | 35 | EquityTaiwan: 36 | tz: Asia/Taipei 37 | allday: [900, 1335] 38 | day: [900, 1325] 39 | post: [1326, 1335] 40 | 41 | EquityHongKong: 42 | tz: Asia/Hong_Kong 43 | allday: [845, 1615] 44 | day: [930, 1600] 45 | am: [930, 1200] 46 | pm: [1300, 1600] 47 | pre: [845, 930] 48 | post: [1601, 1615] 49 | 50 | EquityChina: 51 | tz: Asia/Shanghai 52 | allday: [915, 1505] 53 | day: [930, 1500] 54 | am: [930, 1130] 55 | pm: [1300, 1500] 56 | pre: [0915, 930] 57 | 58 | EquityIndia: 59 | tz: Asia/Calcutta 60 | allday: [900, 1710] 61 | day: [900, 1530] 62 | post: [1531, 1710] 63 | 64 | EquityLondon: 65 | tz: Europe/London 66 | allday: [800, 1700] 67 | day: [800, 1630] 68 | post: [1631, 1700] 69 | 70 | EquityDublin: 71 | tz: Europe/London 72 | allday: [800, 1700] 73 | day: [800, 1630] 74 | post: [1631, 1700] 75 | 76 | EquityAmsterdam: 77 | tz: Europe/London 78 | allday: [800, 1700] 79 | day: [800, 1630] 80 | post: [1631, 1700] 81 | 82 | EquitySpain: 83 | tz: Europe/London 84 | allday: [800, 1700] 85 | day: [800, 1630] 86 | post: [1631, 1700] 87 | 88 | EquityFrance: 89 | tz: Europe/London 90 | allday: [800, 1700] 91 | day: [800, 1630] 92 | post: [1631, 1700] 93 | 94 | EquityStockholm: 95 | tz: Europe/London 96 | allday: [800, 1700] 97 | day: [800, 1630] 98 | post: [1631, 1700] 99 | 100 | EquityUS: 101 | tz: America/New_York 102 | allday: [400, 2000] 103 | day: [0930, 1600] 104 | pre: [400, 0930] 105 | post: [1601, 2000] 106 | 107 | # -------------- 108 | # Currencies 109 | # -------------- 110 | 111 | CurrencyGeneric: 112 | tz: America/New_York 113 | allday: [1701, 1700] 114 | 115 | CurrencySouthKorea: 116 | tz: Asia/Seoul 117 | allday: [900, 1530] 118 | 119 | CurrencyTaiwan: 120 | tz: Asia/Taipei 121 | allday: [900, 1600] 122 | 123 | CurrencyChina: 124 | tz: Asia/Shanghai 125 | allday: [930, 2330] 126 | 127 | CurrencyIndia: 128 | tz: Asia/Calcutta 129 | allday: [900, 1700] 130 | 131 | CurrencyDubai: 132 | tz: Asia/Dubai 133 | allday: [700, 2359] 134 | 135 | CurrencySingapore: 136 | tz: Asia/Singapore 137 | allday: [1950, 1935] 138 | day: [725, 1935] 139 | night: [1950, 445] 140 | 141 | CurrencyICE: 142 | tz: America/New_York 143 | allday: [1830, 1730] 144 | 145 | # ----------------------------- 146 | # Index|Commodity (Futures) 147 | # ----------------------------- 148 | 149 | FuturesAustralia: 150 | tz: Australia/Sydney 151 | allday: [1710, 1630] 152 | day: [950, 1630] 153 | night: [1710, 700] 154 | 155 | IndexAustralia: 156 | tz: Australia/Sydney 157 | allday: [1000, 1620] 158 | 159 | FuturesJapan: 160 | tz: Asia/Tokyo 161 | allday: [1630, 1515] 162 | day: [845, 1515] 163 | night: [1630, 530] 164 | 165 | FuturesSouthKorea: 166 | tz: Asia/Seoul 167 | allday: [1800, 1545] 168 | day: [900, 1545] 169 | night: [1800, 500] 170 | 171 | IndexSouthKorea: 172 | tz: Asia/Seoul 173 | allday: [830, 1535] 174 | day: [900, 1520] 175 | post: [1521, 1535] 176 | 177 | FuturesTaiwan: 178 | tz: Asia/Taipei 179 | allday: [1415, 1350] 180 | day: [845, 1350] 181 | night: [1415, 450] 182 | 183 | FuturesHongKong: 184 | tz: Asia/Hong_Kong 185 | allday: [1715, 1630] 186 | day: [915, 1630] 187 | night: [1715, 100] 188 | 189 | CommoditiesShanghai: 190 | tz: Asia/Shanghai 191 | allday: [2100, 1500] 192 | day: [900, 1500] 193 | am: [900, 1130] 194 | pm: [1330, 1500] 195 | night: [2100, 2300] 196 | 197 | CommoditiesDalian: 198 | tz: Asia/Shanghai 199 | allday: [2100, 1500] 200 | day: [900, 1130] 201 | am: [900, 1130] 202 | pm: [1330, 1500] 203 | night: [2100, 2330] 204 | 205 | FuturesSingapore: 206 | tz: Asia/Singapore 207 | allday: [1700, 1635] 208 | day: [900, 1635] 209 | night: [1700, 445] 210 | 211 | EquityFuturesIndia: 212 | tz: Asia/Calcutta 213 | allday: [915, 1645] 214 | day: [915, 1530] 215 | post: [1531, 1645] 216 | 217 | IndexFuturesIndia: 218 | tz: Asia/Calcutta 219 | allday: [915, 1530] 220 | 221 | IndexLondon: 222 | tz: Europe/London 223 | allday: [800, 1635] 224 | day: [800, 1630] 225 | post: [1631, 1635] 226 | 227 | IndexEurope1: 228 | tz: Europe/London 229 | allday: [800, 1700] 230 | 231 | IndexEurope2: 232 | tz: Europe/London 233 | allday: [800, 1715] 234 | 235 | IndexEurope3: 236 | tz: Europe/London 237 | allday: [800, 1830] 238 | 239 | FuturesFinancialsICE: 240 | tz: Europe/London 241 | allday: [100, 2100] 242 | 243 | FuturesEuropeICE: 244 | tz: Europe/London 245 | allday: [100, 2300] 246 | 247 | IndexUS: 248 | tz: America/New_York 249 | allday: [930, 1600] 250 | 251 | CME: 252 | tz: America/New_York 253 | allday: [1800, 1700] 254 | day: [800, 1700] 255 | 256 | FuturesNYFICE: 257 | tz: America/New_York 258 | allday: [2000, 1800] 259 | 260 | IndexVIX: 261 | tz: America/New_York 262 | allday: [300, 1630] 263 | day: [315, 1630] 264 | 265 | FuturesCBOE: 266 | tz: America/New_York 267 | allday: [1800, 1700] 268 | 269 | IndexYieldCurve: 270 | tz: America/New_York 271 | allday: [1800, 1720] 272 | 273 | NYME: 274 | tz: America/New_York 275 | allday: [1800, 1700] 276 | 277 | CMX: 278 | tz: America/New_York 279 | allday: [1800, 1700] 280 | 281 | CBT: 282 | tz: America/New_York 283 | allday: [1800, 1700] 284 | 285 | TestExch: 286 | tz: America/New_York 287 | -------------------------------------------------------------------------------- /xbbg/pipeline.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | 4 | from typing import Union 5 | 6 | 7 | def get_series(data: Union[pd.Series, pd.DataFrame], col='close') -> pd.DataFrame: 8 | """ 9 | Get close column from intraday data 10 | 11 | Args: 12 | data: intraday data 13 | col: column to return 14 | 15 | Returns: 16 | pd.Series or pd.DataFrame 17 | """ 18 | if isinstance(data, pd.Series): return pd.DataFrame(data) 19 | if not isinstance(data.columns, pd.MultiIndex): return data 20 | return data.xs(col, axis=1, level=1) 21 | 22 | 23 | def standard_cols(data: pd.DataFrame, col_maps: dict = None) -> pd.DataFrame: 24 | """ 25 | Rename data columns to snake case 26 | 27 | Args: 28 | data: input data 29 | col_maps: column maps 30 | 31 | Returns: 32 | pd.DataFrame 33 | 34 | Examples: 35 | >>> dvd = pd.read_pickle('xbbg/tests/data/sample_dvd_mc_raw.pkl').iloc[:, :4] 36 | >>> dvd 37 | Declared Date Ex-Date Record Date Payable Date 38 | MC FP Equity 2019-07-24 2019-12-06 2019-12-09 2019-12-10 39 | MC FP Equity 2019-01-29 2019-04-25 2019-04-26 2019-04-29 40 | MC FP Equity 2018-07-24 2018-12-04 2018-12-05 2018-12-06 41 | MC FP Equity 2018-01-25 2018-04-17 2018-04-18 2018-04-19 42 | >>> dvd.pipe(standard_cols) 43 | declared_date ex_date record_date payable_date 44 | MC FP Equity 2019-07-24 2019-12-06 2019-12-09 2019-12-10 45 | MC FP Equity 2019-01-29 2019-04-25 2019-04-26 2019-04-29 46 | MC FP Equity 2018-07-24 2018-12-04 2018-12-05 2018-12-06 47 | MC FP Equity 2018-01-25 2018-04-17 2018-04-18 2018-04-19 48 | >>> dvd.pipe(standard_cols, col_maps={'Declared Date': 'dec_date'}) 49 | dec_date ex_date record_date payable_date 50 | MC FP Equity 2019-07-24 2019-12-06 2019-12-09 2019-12-10 51 | MC FP Equity 2019-01-29 2019-04-25 2019-04-26 2019-04-29 52 | MC FP Equity 2018-07-24 2018-12-04 2018-12-05 2018-12-06 53 | MC FP Equity 2018-01-25 2018-04-17 2018-04-18 2018-04-19 54 | """ 55 | if col_maps is None: col_maps = dict() 56 | return data.rename( 57 | columns=lambda vv: col_maps.get( 58 | vv, vv.lower().replace(' ', '_').replace('-', '_') 59 | ) 60 | ) 61 | 62 | 63 | def apply_fx( 64 | data: Union[pd.Series, pd.DataFrame], 65 | fx: Union[int, float, pd.Series, pd.DataFrame], 66 | power=-1., 67 | ) -> pd.DataFrame: 68 | """ 69 | Apply FX to data 70 | 71 | Args: 72 | data: price data 73 | fx: FX price data 74 | power: apply for FX price 75 | 76 | Returns: 77 | Price * FX ** Power 78 | where FX uses latest available price 79 | 80 | Examples: 81 | >>> pd.options.display.precision = 2 82 | >>> rms = ( 83 | ... pd.read_pickle('xbbg/tests/data/sample_rms_ib1.pkl') 84 | ... .pipe(get_series, col='close') 85 | ... .apply(pd.to_numeric, errors='ignore') 86 | ... .rename_axis(columns=None) 87 | ... .pipe(dropna) 88 | ... ).tail() 89 | >>> eur = pd.read_pickle('xbbg/tests/data/sample_eur_ib.pkl') 90 | >>> rms 91 | RMS FP Equity 92 | 2020-01-17 16:26:00+00:00 725.4 93 | 2020-01-17 16:27:00+00:00 725.2 94 | 2020-01-17 16:28:00+00:00 725.4 95 | 2020-01-17 16:29:00+00:00 725.0 96 | 2020-01-17 16:35:00+00:00 725.6 97 | >>> rms.iloc[:, 0].pipe(apply_fx, fx=eur) 98 | RMS FP Equity 99 | 2020-01-17 16:26:00+00:00 653.98 100 | 2020-01-17 16:27:00+00:00 653.80 101 | 2020-01-17 16:28:00+00:00 653.98 102 | 2020-01-17 16:29:00+00:00 653.57 103 | 2020-01-17 16:35:00+00:00 654.05 104 | >>> rms.pipe(apply_fx, fx=1.1090) 105 | RMS FP Equity 106 | 2020-01-17 16:26:00+00:00 654.10 107 | 2020-01-17 16:27:00+00:00 653.92 108 | 2020-01-17 16:28:00+00:00 654.10 109 | 2020-01-17 16:29:00+00:00 653.74 110 | 2020-01-17 16:35:00+00:00 654.28 111 | """ 112 | if isinstance(data, pd.Series): data = pd.DataFrame(data) 113 | 114 | if isinstance(fx, (int, float)): 115 | return data.dropna(how='all').mul(fx ** power) 116 | 117 | add_fx = pd.concat([data, fx.pipe(get_series).iloc[:, -1]], axis=1) 118 | add_fx.iloc[:, -1] = add_fx.iloc[:, -1].fillna(method='pad') 119 | return data.mul(add_fx.iloc[:, -1].pow(power), axis=0).dropna(how='all') 120 | 121 | 122 | def daily_stats(data: Union[pd.Series, pd.DataFrame], **kwargs) -> pd.DataFrame: 123 | """ 124 | Daily stats for given data 125 | 126 | Examples: 127 | >>> pd.options.display.precision = 2 128 | >>> ( 129 | ... pd.concat([ 130 | ... pd.read_pickle('xbbg/tests/data/sample_rms_ib0.pkl'), 131 | ... pd.read_pickle('xbbg/tests/data/sample_rms_ib1.pkl'), 132 | ... ], sort=False) 133 | ... .pipe(get_series, col='close') 134 | ... .pipe(daily_stats) 135 | ... )['RMS FP Equity'].iloc[:, :5] 136 | count mean std min 10% 137 | 2020-01-16 00:00:00+00:00 434.0 711.16 1.11 708.6 709.6 138 | 2020-01-17 00:00:00+00:00 437.0 721.53 1.66 717.0 719.0 139 | """ 140 | if data.empty: return pd.DataFrame() 141 | if 'percentiles' not in kwargs: kwargs['percentiles'] = [.1, .25, .5, .75, .9] 142 | return data.groupby(data.index.floor('d')).describe(**kwargs) 143 | 144 | 145 | def dropna( 146 | data: Union[pd.Series, pd.DataFrame], 147 | cols: Union[int, list] = 0, 148 | ) -> Union[pd.Series, pd.DataFrame]: 149 | """ 150 | Drop NAs by columns 151 | """ 152 | if isinstance(data, pd.Series): return data.dropna() 153 | if isinstance(cols, int): cols = [cols] 154 | return data.dropna(how='all', subset=data.columns[cols]) 155 | 156 | 157 | def format_raw(data: pd.DataFrame) -> pd.DataFrame: 158 | """ 159 | Convert data to datetime if possible 160 | 161 | Examples: 162 | >>> dvd = pd.read_pickle('xbbg/tests/data/sample_dvd_mc_raw.pkl') 163 | >>> dvd.dtypes 164 | Declared Date object 165 | Ex-Date object 166 | Record Date object 167 | Payable Date object 168 | Dividend Amount float64 169 | Dividend Frequency object 170 | Dividend Type object 171 | dtype: object 172 | >>> dvd.pipe(format_raw).dtypes 173 | Declared Date datetime64[ns] 174 | Ex-Date datetime64[ns] 175 | Record Date datetime64[ns] 176 | Payable Date datetime64[ns] 177 | Dividend Amount float64 178 | Dividend Frequency object 179 | Dividend Type object 180 | dtype: object 181 | """ 182 | res = data.apply(pd.to_numeric, errors='ignore') 183 | dtypes = data.dtypes 184 | cols = dtypes.loc[ 185 | dtypes.isin([np.dtype('O')]) | data.columns.str.contains('UPDATE_STAMP') 186 | ].index 187 | if not cols.empty: 188 | res.loc[:, cols] = data.loc[:, cols].apply(pd.to_datetime, errors='ignore') 189 | return res 190 | 191 | 192 | def add_ticker(data: pd.DataFrame, ticker: str) -> pd.DataFrame: 193 | """ 194 | Add ticker as first layer of multi-index 195 | 196 | Args: 197 | data: raw data 198 | ticker: ticker 199 | 200 | Returns: 201 | pd.DataFrame 202 | 203 | Examples: 204 | >>> ( 205 | ... pd.read_parquet('xbbg/tests/data/sample_bdib.parq') 206 | ... .pipe(add_ticker, ticker='SPY US Equity') 207 | ... .pipe(get_series, col='close') 208 | ... ) 209 | SPY US Equity 210 | 2018-12-28 09:30:00-05:00 249.67 211 | 2018-12-28 09:31:00-05:00 249.54 212 | 2018-12-28 09:32:00-05:00 249.22 213 | 2018-12-28 09:33:00-05:00 249.01 214 | 2018-12-28 09:34:00-05:00 248.86 215 | """ 216 | data.columns = pd.MultiIndex.from_product([ 217 | [ticker], data.head().rename(columns={'numEvents': 'num_trds'}).columns 218 | ]) 219 | return data 220 | 221 | 222 | def since_year(data: pd.DataFrame, year: int) -> pd.DataFrame: 223 | """ 224 | Remove columns prior to give year. 225 | To make this work, column names must contain the year explicitly. 226 | 227 | Args: 228 | data: raw data 229 | year: starting year 230 | 231 | Returns: 232 | pd.DataFrame 233 | 234 | Examples: 235 | >>> pd.options.display.width = 120 236 | >>> pd.options.display.max_columns = 10 237 | >>> pd.options.display.precision = 2 238 | >>> amzn = pd.read_pickle('xbbg/tests/data/sample_earning_amzn.pkl') 239 | >>> amzn.query('level == 1').pipe(since_year, year=2017) 240 | segment_name level fy2018 fy2017 fy2018_pct fy2017_pct 241 | AMZN US Equity North America 1 141366.0 106110.0 60.70 59.66 242 | AMZN US Equity International 1 65866.0 54297.0 28.28 30.53 243 | AMZN US Equity AWS 1 25655.0 17459.0 11.02 9.82 244 | >>> amzn.query('level == 1').pipe(since_year, year=2018) 245 | segment_name level fy2018 fy2018_pct 246 | AMZN US Equity North America 1 141366.0 60.70 247 | AMZN US Equity International 1 65866.0 28.28 248 | AMZN US Equity AWS 1 25655.0 11.02 249 | """ 250 | return data.loc[:, ~data.columns.str.contains( 251 | '|'.join(map(str, range(year - 20, year))) 252 | )] 253 | 254 | 255 | def perf(data: Union[pd.Series, pd.DataFrame]) -> Union[pd.Series, pd.DataFrame]: 256 | """ 257 | Performance rebased to 100 258 | 259 | Examples: 260 | >>> ( 261 | ... pd.DataFrame({ 262 | ... 's1': [1., np.nan, 1.01, 1.03, .99], 263 | ... 's2': [np.nan, 1., .99, 1.04, 1.1], 264 | ... }) 265 | ... .pipe(perf) 266 | ... ) 267 | s1 s2 268 | 0 100.0 NaN 269 | 1 NaN 100.0 270 | 2 101.0 99.0 271 | 3 103.0 104.0 272 | 4 99.0 110.0 273 | """ 274 | if isinstance(data, pd.Series): 275 | return ( 276 | data 277 | .dropna() 278 | .pct_change() 279 | .fillna(0) 280 | .add(1) 281 | .cumprod() 282 | .mul(100) 283 | ) 284 | return data.apply(perf, axis=0) 285 | -------------------------------------------------------------------------------- /xbbg/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/__init__.py -------------------------------------------------------------------------------- /xbbg/tests/conftest.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | 4 | def pytest_addoption(parser): 5 | 6 | parser.addoption( 7 | '--with_bbg', action='store_true', default=False, 8 | help='Run tests with Bloomberg connections' 9 | ) 10 | 11 | 12 | def pytest_configure(config): 13 | 14 | print(config) 15 | sys.pytest_call = True 16 | 17 | 18 | def pytest_unconfigure(config): 19 | 20 | print(config) 21 | if hasattr(sys, 'pytest_call'): 22 | del sys.pytest_call 23 | -------------------------------------------------------------------------------- /xbbg/tests/data/Equity/AAPL US Equity/DVD_Hist_All/asof=2018-11-02, DVD_Start_Dt=20180101, DVD_End_Dt=20180501.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/Equity/AAPL US Equity/DVD_Hist_All/asof=2018-11-02, DVD_Start_Dt=20180101, DVD_End_Dt=20180501.pkl -------------------------------------------------------------------------------- /xbbg/tests/data/Equity/AAPL US Equity/DVD_Hist_All/asof=2021-01-02, DVD_Start_Dt=20180101, DVD_End_Dt=20180501.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/Equity/AAPL US Equity/DVD_Hist_All/asof=2021-01-02, DVD_Start_Dt=20180101, DVD_End_Dt=20180501.pkl -------------------------------------------------------------------------------- /xbbg/tests/data/Equity/AAPL US Equity/TRADE/2018-11-02.parq: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/Equity/AAPL US Equity/TRADE/2018-11-02.parq -------------------------------------------------------------------------------- /xbbg/tests/data/aapl.parq: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/aapl.parq -------------------------------------------------------------------------------- /xbbg/tests/data/asof=2018-11-02, DVD_Start_Dt=20180101, DVD_End_Dt=20180501.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/asof=2018-11-02, DVD_Start_Dt=20180101, DVD_End_Dt=20180501.pkl -------------------------------------------------------------------------------- /xbbg/tests/data/sample_bdib.parq: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/sample_bdib.parq -------------------------------------------------------------------------------- /xbbg/tests/data/sample_bdp.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/sample_bdp.pkl -------------------------------------------------------------------------------- /xbbg/tests/data/sample_dvd.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/sample_dvd.pkl -------------------------------------------------------------------------------- /xbbg/tests/data/sample_dvd_mc.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/sample_dvd_mc.pkl -------------------------------------------------------------------------------- /xbbg/tests/data/sample_dvd_mc_raw.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/sample_dvd_mc_raw.pkl -------------------------------------------------------------------------------- /xbbg/tests/data/sample_earning.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/sample_earning.pkl -------------------------------------------------------------------------------- /xbbg/tests/data/sample_earning_amzn.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/sample_earning_amzn.pkl -------------------------------------------------------------------------------- /xbbg/tests/data/sample_earning_header.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/sample_earning_header.pkl -------------------------------------------------------------------------------- /xbbg/tests/data/sample_eur_ib.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/sample_eur_ib.pkl -------------------------------------------------------------------------------- /xbbg/tests/data/sample_indx_members_raw.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/sample_indx_members_raw.pkl -------------------------------------------------------------------------------- /xbbg/tests/data/sample_rms_ib0.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/sample_rms_ib0.pkl -------------------------------------------------------------------------------- /xbbg/tests/data/sample_rms_ib1.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/data/sample_rms_ib1.pkl -------------------------------------------------------------------------------- /xbbg/tests/markets/exch.yml: -------------------------------------------------------------------------------- 1 | # Override example 2 | 3 | EquityUS: 4 | tz: America/New_York 5 | allday: [300, 2100] 6 | day: [0930, 1600] 7 | pre: [400, 0930] 8 | post: [1601, 2000] 9 | -------------------------------------------------------------------------------- /xbbg/tests/xone.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alpha-xone/xbbg/266d68a53bda61745191e1a86b0f1f32b33ae082/xbbg/tests/xone.db --------------------------------------------------------------------------------