├── .devcontainer ├── Dockerfile └── devcontainer.json ├── .github ├── ISSUE_TEMPLATE │ ├── config.yml │ ├── enhancements.yml │ └── issue.yml └── workflows │ ├── publish.yml │ └── run-tests.yml ├── .gitignore ├── .travis.yml ├── LICENSE.YOLO.md ├── README.md ├── blog_run_test.sh ├── docs └── index.rst ├── jugaad_data ├── __init__.py ├── cli.py ├── holidays.py ├── nse │ ├── __init__.py │ ├── archives.py │ ├── history.py │ └── live.py ├── rbi │ └── __init__.py └── util.py ├── pyproject.toml ├── requirements.dev.txt ├── requirements.txt ├── run_tests.sh └── tests ├── __init__.py ├── test_bhav.py ├── test_cli.py ├── test_holidays.py ├── test_nse.py ├── test_nse_live.py ├── test_rbi.py └── test_util.py /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | # [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster 2 | ARG VARIANT=3-bullseye 3 | FROM mcr.microsoft.com/vscode/devcontainers/python:${VARIANT} 4 | 5 | # [Choice] Node.js version: none, lts/*, 16, 14, 12, 10 6 | ARG NODE_VERSION="none" 7 | RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi 8 | 9 | # [Optional] If your pip requirements rarely change, uncomment this section to add them to the image. 10 | COPY requirements.txt /tmp/pip-tmp/ 11 | COPY requirements.dev.txt /tmp/pip-tmp/ 12 | RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \ 13 | && pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.dev.txt \ 14 | && rm -rf /tmp/pip-tmp 15 | 16 | # [Optional] Uncomment this section to install additional OS packages. 17 | # RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ 18 | # && apt-get -y install --no-install-recommends 19 | 20 | # [Optional] Uncomment this line to install global node packages. 21 | # RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g " 2>&1 -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Python 3", 3 | "build": { 4 | "dockerfile": "Dockerfile", 5 | "context": "..", 6 | "args": { 7 | // Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7, 3.6 8 | // Append -bullseye or -buster to pin to an OS version. 9 | // Use -bullseye variants on local on arm64/Apple Silicon. 10 | "VARIANT": "3.10-bullseye", 11 | // Options 12 | "NODE_VERSION": "lts/*" 13 | } 14 | }, 15 | 16 | // Configure tool-specific properties. 17 | "customizations": { 18 | // Configure properties specific to VS Code. 19 | "vscode": { 20 | // Set *default* container specific settings.json values on container create. 21 | "settings": { 22 | "python.defaultInterpreterPath": "/usr/local/bin/python", 23 | "python.linting.enabled": true, 24 | "python.linting.pylintEnabled": true, 25 | "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", 26 | "python.formatting.blackPath": "/usr/local/py-utils/bin/black", 27 | "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", 28 | "python.linting.banditPath": "/usr/local/py-utils/bin/bandit", 29 | "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", 30 | "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", 31 | "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", 32 | "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", 33 | "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint" 34 | }, 35 | 36 | // Add the IDs of extensions you want installed when the container is created. 37 | "extensions": [ 38 | "ms-python.python", 39 | "ms-python.vscode-pylance" 40 | ] 41 | } 42 | }, 43 | 44 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 45 | // "forwardPorts": [], 46 | 47 | // Use 'postCreateCommand' to run commands after the container is created. 48 | // "postCreateCommand": "pip3 install --user -r requirements.txt", 49 | 50 | // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. 51 | "remoteUser": "vscode" 52 | } -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | contact_links: 3 | - name: Contact 4 | about: contact@marketsetup.in 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/enhancements.yml: -------------------------------------------------------------------------------- 1 | name: Feature requests 2 | description: Describe your feature request 3 | labels: [enhancement] 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Thanks for your interest in Jugaad-data! 🚀 9 | - type: textarea 10 | id: description 11 | attributes: 12 | label: Describe the required feature 13 | description: | 14 | Describe what data extraction feature you are proposing. Is this data currently available on nseindia.com or niftyindices.com website. 15 | placeholder: | 16 | The bug description 17 | validations: 18 | required: true 19 | - type: textarea 20 | id: url 21 | attributes: 22 | label: URL where you observed this data 23 | description: | 24 | Share the url where you have seen this data 25 | placeholder: | 26 | https://nseindia.com/abc/xyz 27 | validations: 28 | required: true 29 | - type: textarea 30 | id: endpoint 31 | attributes: 32 | label: API endpoint involved 33 | description: | 34 | Were you able to locate the exact endpoint and the data exchange using developer tools? if yes, please share the API endpoint and parameters. 35 | placeholder: | 36 | url - https://nseindia.com/api/stockDataEndpoint 37 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/issue.yml: -------------------------------------------------------------------------------- 1 | name: Bug or issues 2 | description: Describe your problem in detail 3 | labels: [bug] 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Thanks for your interest in Jugaad-data! 🚀 9 | - type: textarea 10 | id: description 11 | attributes: 12 | label: Issue description 13 | description: | 14 | Describe what happened and what you expected 15 | placeholder: | 16 | The bug description 17 | validations: 18 | required: true 19 | - type: textarea 20 | id: example 21 | attributes: 22 | label: Example Code 23 | description: | 24 | Please add a self-contained, [minimal, reproducible, example](https://stackoverflow.com/help/minimal-reproducible-example) with your use case. 25 | 26 | If I (or someone) can copy it, run it, and see it right away, there's a much higher chance I (or someone) will be able to help you. 27 | 28 | placeholder: | 29 | from jugaad_data.nse import bhavcopy_save, bhavcopy_fo_save 30 | render: python 31 | validations: 32 | required: true 33 | - type: textarea 34 | id: error 35 | attributes: 36 | label: Error snippet 37 | description: | 38 | Please add your error message 39 | placeholder: | 40 | Exception 41 | render: python 42 | validations: 43 | required: true 44 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | name: Upload Python Package 5 | 6 | on: 7 | workflow_run: 8 | workflows: ["run-tests"] 9 | types: 10 | - completed 11 | branches: 12 | - master 13 | jobs: 14 | deploy: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Dump GitHub context 18 | env: 19 | GITHUB_CONTEXT: ${{ toJson(github) }} 20 | run: echo "$GITHUB_CONTEXT" 21 | - uses: actions/checkout@v4 22 | - name: Set up Python 23 | uses: actions/setup-python@v4 24 | with: 25 | python-version: "3.10" 26 | # Issue ref: https://github.com/actions/setup-python/issues/436 27 | # cache: "pip" 28 | cache-dependency-path: pyproject.toml 29 | - uses: actions/cache@v3 30 | id: cache 31 | with: 32 | path: ${{ env.pythonLocation }} 33 | key: ${{ runner.os }}-python-${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-publish 34 | - name: Install build dependencies 35 | if: steps.cache.outputs.cache-hit != 'true' 36 | run: pip install build 37 | - name: Build distribution 38 | run: python -m build 39 | - name: Publish 40 | uses: pypa/gh-action-pypi-publish@v1.8.10 41 | with: 42 | password: ${{ secrets.PYPI_API_TOKEN }} 43 | - name: Dump GitHub context 44 | env: 45 | GITHUB_CONTEXT: ${{ toJson(github) }} 46 | run: echo "$GITHUB_CONTEXT" -------------------------------------------------------------------------------- /.github/workflows/run-tests.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: run-tests 5 | 6 | on: 7 | push: 8 | branches: [ "master" ] 9 | pull_request: 10 | branches: [ "master" ] 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | build: 17 | 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v3 22 | - name: Set up Python 3.10 23 | uses: actions/setup-python@v3 24 | with: 25 | python-version: "3.10" 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install flake8 pytest 30 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 31 | if [ -f requirements.dev.txt ]; then pip install -r requirements.dev.txt; fi 32 | - name: Lint with flake8 33 | run: | 34 | # stop the build if there are Python syntax errors or undefined names 35 | # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 36 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 37 | # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 38 | echo "Disabled linting" 39 | - name: Test with pytest 40 | run: | 41 | pytest 42 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.csv 2 | .testmondata 3 | temp.py 4 | *.ipynb 5 | *.swp 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | # vim config 14 | .ycm_extra_conf.py 15 | 16 | # Distribution / packaging 17 | .Python 18 | build/ 19 | develop-eggs/ 20 | dist/ 21 | downloads/ 22 | eggs/ 23 | .eggs/ 24 | lib/ 25 | lib64/ 26 | parts/ 27 | sdist/ 28 | var/ 29 | wheels/ 30 | share/python-wheels/ 31 | *.egg-info/ 32 | .installed.cfg 33 | *.egg 34 | MANIFEST 35 | 36 | # PyInstaller 37 | # Usually these files are written by a python script from a template 38 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 39 | *.manifest 40 | *.spec 41 | 42 | # Installer logs 43 | pip-log.txt 44 | pip-delete-this-directory.txt 45 | 46 | # Unit test / coverage reports 47 | htmlcov/ 48 | .tox/ 49 | .nox/ 50 | .coverage 51 | .coverage.* 52 | .cache 53 | nosetests.xml 54 | coverage.xml 55 | *.cover 56 | *.py,cover 57 | .hypothesis/ 58 | .pytest_cache/ 59 | cover/ 60 | 61 | # Translations 62 | *.mo 63 | *.pot 64 | 65 | # Django stuff: 66 | *.log 67 | local_settings.py 68 | db.sqlite3 69 | db.sqlite3-journal 70 | 71 | # Flask stuff: 72 | instance/ 73 | .webassets-cache 74 | 75 | # Scrapy stuff: 76 | .scrapy 77 | 78 | # Sphinx documentation 79 | docs/_build/ 80 | 81 | # PyBuilder 82 | .pybuilder/ 83 | target/ 84 | 85 | # Jupyter Notebook 86 | .ipynb_checkpoints 87 | 88 | # IPython 89 | profile_default/ 90 | ipython_config.py 91 | 92 | # pyenv 93 | # For a library or package, you might want to ignore these files since the code is 94 | # intended to run in multiple environments; otherwise, check them in: 95 | # .python-version 96 | 97 | # pipenv 98 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 99 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 100 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 101 | # install all needed dependencies. 102 | #Pipfile.lock 103 | 104 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 105 | __pypackages__/ 106 | 107 | # Celery stuff 108 | celerybeat-schedule 109 | celerybeat.pid 110 | 111 | # VsCode 112 | .vscode/ 113 | 114 | # SageMath parsed files 115 | *.sage.py 116 | 117 | # Environments 118 | .env 119 | .venv 120 | env/ 121 | venv/ 122 | ENV/ 123 | env.bak/ 124 | venv.bak/ 125 | 126 | # Spyder project settings 127 | .spyderproject 128 | .spyproject 129 | 130 | # Rope project settings 131 | .ropeproject 132 | 133 | # mkdocs documentation 134 | /site 135 | 136 | # mypy 137 | .mypy_cache/ 138 | .dmypy.json 139 | dmypy.json 140 | 141 | # Pyre type checker 142 | .pyre/ 143 | 144 | # pytype static type analyzer 145 | .pytype/ 146 | 147 | # Cython debug symbols 148 | cython_debug/ 149 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | env: 2 | - PYTHONWARNINGS="ignore:Unverified HTTPS request" 3 | language: python 4 | python: 5 | - "3.6" # current default Python on Travis CI 6 | install: 7 | - pip install -r requirements.dev.txt 8 | - pip install -r requirements.txt 9 | 10 | script: 11 | - pytest 12 | -------------------------------------------------------------------------------- /LICENSE.YOLO.md: -------------------------------------------------------------------------------- 1 | YOLO LICENSE 2 | Version 2, July 29 2016 3 | 4 | THIS SOFTWARE LICENSE IS PROVIDED "ALL CAPS" SO THAT YOU KNOW IT IS SUPER 5 | SERIOUS AND YOU DON'T MESS AROUND WITH COPYRIGHT LAW BECAUSE YOU WILL GET IN 6 | TROUBLE HERE ARE SOME OTHER BUZZWORDS COMMONLY IN THESE THINGS WARRANTIES 7 | LIABILITY CONTRACT TORT LIABLE CLAIMS RESTRICTION MERCHANTABILITY. NOW HERE'S 8 | THE REAL LICENSE: 9 | 10 | 0. jugaad-data is in public domain. 11 | 1. Do whatever you want with it. 12 | 2. Stop emailing me about it! -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Documentation 2 | 3 | https://marketsetup.in/documentation/jugaad-data/ 4 | 5 | # Introduction 6 | 7 | `jugaad-data` is a python library to download historical/live stock, index as well as economic data from NSE and RBI website using. 8 | 9 | ![Build Badge](https://github.com/jugaad-py/jugaad-data/actions/workflows/run-tests.yml/badge.svg) 10 | 11 | 12 | # Features 13 | 14 | * Supports [new NSE website](https://www.nseindia.com/), (All libraries based on old NSE website might stop working) 15 | * Powerful CLI (Command line interface), Even non-coders can use it easily 16 | * Built-in caching mechanism to play nice with NSE. Avoid making un-necessary requests to NSE's website and getting blocked 17 | * Optional `pandas` support 18 | 19 | **Road map** 20 | 21 | | Website | Segment | Supported? | 22 | |----------|------------|------------| 23 | | NSE | Stocks | Yes | 24 | | NSE | Stocks F&O | Yes | 25 | | NSE | Index | Yes | 26 | | NSE | Index F&O | Yes | 27 | | RBI | Current Rates| Yes | 28 | 29 | # Installation 30 | 31 | `pip install jugaad-data` 32 | 33 | # Getting started 34 | 35 | ## Python inteface 36 | 37 | ### Historical data 38 | 39 | ```python 40 | from datetime import date 41 | from jugaad_data.nse import bhavcopy_save, bhavcopy_fo_save 42 | 43 | # Download bhavcopy 44 | bhavcopy_save(date(2020,1,1), "/path/to/directory") 45 | 46 | # Download bhavcopy for futures and options 47 | bhavcopy_fo_save(date(2020,1,1), "/path/to/directory") 48 | 49 | # Download stock data to pandas dataframe 50 | from jugaad_data.nse import stock_df 51 | df = stock_df(symbol="SBIN", from_date=date(2020,1,1), 52 | to_date=date(2020,1,30), series="EQ") 53 | ``` 54 | ### Live data 55 | 56 | ```python 57 | from jugaad_data.nse import NSELive 58 | n = NSELive() 59 | q = n.stock_quote("HDFC") 60 | print(q['priceInfo']) 61 | ``` 62 | 63 | ``` 64 | {'lastPrice': 2635, 65 | 'change': -49.05000000000018, 66 | 'pChange': -1.8274622305843848, 67 | 'previousClose': 2684.05, 68 | 'open': 2661, 69 | 'close': 2632.75, 70 | 'vwap': 2645.57, 71 | 'lowerCP': '2415.65', 72 | 'upperCP': '2952.45', 73 | 'pPriceBand': 'No Band', 74 | 'basePrice': 2684.05, 75 | 'intraDayHighLow': {'min': 2615.6, 'max': 2688.45, 'value': 2635}, 76 | 'weekHighLow': {'min': 1473.45, 77 | 'minDate': '24-Mar-2020', 78 | 'max': 2777.15, 79 | 'maxDate': '13-Jan-2021', 80 | 'value': 2635}} 81 | ``` 82 | 83 | ## Command line interface 84 | 85 | ``` 86 | $ jdata stock --help 87 | 88 | Usage: jdata stock [OPTIONS] 89 | 90 | Download historical stock data 91 | 92 | $jdata stock --symbol STOCK1 -f yyyy-mm-dd -t yyyy-mm-dd --o file_name.csv 93 | 94 | Options: 95 | -s, --symbol TEXT [required] 96 | -f, --from TEXT [required] 97 | -t, --to TEXT [required] 98 | -S, --series TEXT [default: EQ] 99 | -o, --output TEXT 100 | --help Show this message and exit. 101 | ``` 102 | 103 | ``` 104 | $ jdata stock -s SBIN -f 2020-01-01 -t 2020-01-31 -o SBIN-Jan.csv 105 | SBIN [####################################] 100% 106 | 107 | Saved file to : SBIN-Jan.csv 108 | ``` 109 | 110 | ## Download historical derivatives (F&O) data 111 | 112 | ``` 113 | $ jdata deriviatives --help 114 | Usage: cli.py derivatives [OPTIONS] 115 | 116 | Sample usage- 117 | 118 | Download stock futures- 119 | 120 | jdata derivatives -s SBIN -f 2020-01-01 -t 2020-01-30 -e 2020-01-30 -i FUTSTK -o file_name.csv 121 | 122 | Download index futures- 123 | 124 | jdata derivatives -s NIFTY -f 2020-01-01 -t 2020-01-30 -e 2020-01-30 -i FUTIDX -o file_name.csv 125 | 126 | Download stock options- 127 | 128 | jdata derivatives -s SBIN -f 2020-01-01 -t 2020-01-30 -e 2020-01-30 -i OPTSTK -p 330 --ce -o file_name.csv 129 | 130 | Download index options- 131 | 132 | jdata derivatives -s NIFTY -f 2020-01-01 -t 2020-01-30 -e 2020-01-23 -i OPTIDX -p 11000 --pe -o file_name.csv 133 | 134 | Options: 135 | -s, --symbol TEXT Stock/Index symbol [required] 136 | -f, --from TEXT From date - yyyy-mm-dd [required] 137 | -t, --to TEXT To date - yyyy-mm-dd [required] 138 | -e, --expiry TEXT Expiry date - yyyy-mm-dd [required] 139 | -i, --instru TEXT FUTSTK - Stock futures, FUTIDX - Index Futures, OPTSTK - 140 | Stock Options, OPTIDX - Index Options [required] 141 | 142 | -p, --price TEXT Strike price (Only for OPTSTK and OPTIDX) 143 | --ce / --pe --ce for call and --pe for put (Only for OPTSTK and 144 | OPTIDX) 145 | 146 | -o, --output TEXT Full path of output file 147 | --help Show this message and exit. 148 | ``` 149 | 150 | ## Buy me a coffee 151 | 152 | If my work has helped you in anyway, you can buy me a coffee 153 | 154 | [!["Buy Me A Coffee"](https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png)](https://www.buymeacoffee.com/Jugaader) 155 | -------------------------------------------------------------------------------- /blog_run_test.sh: -------------------------------------------------------------------------------- 1 | while true; do 2 | inotifywait jugaad_data/ tests/ -q -e create -e close_write -e attrib -e move 3 | clear 4 | env/bin/python3 -m unittest tests.test_util 5 | done 6 | ``` 7 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Introduction to jugaad-data 2 | =========================== 3 | 4 | ``jugad-data`` is a library to fetch live as well as historical stock data. The library supports below functionalities- 5 | 6 | - Download bhavcopy for stocks, index and derivatives 7 | - Download historical stock data 8 | - Download historical derivatives data 9 | - Fetch live quotes for stocks and derivatives 10 | - Fetch live index and turnover data 11 | - Fetch option chains 12 | 13 | Currently the library supports NSE. 14 | 15 | Documentation and Resources 16 | =========================== 17 | 18 | Detailed documentation: https://marketsetup.in/documentation/jugaad-data/ 19 | 20 | Example usage: https://marketsetup.in/tags/jugaad-data/ 21 | 22 | Installation 23 | ============ 24 | 25 | ``pip install jugaad-data`` 26 | 27 | 28 | Quick Start 29 | =========== 30 | 31 | .. code-block:: python 32 | 33 | from datetime import date 34 | from jugaad_data.nse import bhavcopy_save, bhavcopy_fo_save 35 | 36 | # Download bhavcopy 37 | bhavcopy_save(date(2020,1,1), "/path/to/directory") 38 | 39 | # Download bhavcopy for futures and options 40 | bhavcopy_fo_save(date(2020,1,1), "/path/to/directory") 41 | 42 | # Download stock data to pandas dataframe 43 | from jugaad_data.nse import stock_df 44 | df = stock_df(symbol="SBIN", from_date=date(2020,1,1), 45 | to_date=date(2020,1,30), series="EQ") 46 | -------------------------------------------------------------------------------- /jugaad_data/__init__.py: -------------------------------------------------------------------------------- 1 | __version__= "0.26" -------------------------------------------------------------------------------- /jugaad_data/cli.py: -------------------------------------------------------------------------------- 1 | import os 2 | import click 3 | from datetime import date, datetime, timedelta 4 | from concurrent.futures import ThreadPoolExecutor 5 | import requests 6 | from jugaad_data import nse 7 | 8 | 9 | 10 | @click.group() 11 | def cli(): 12 | """ This is a command line tool to download stock market data to csv files. 13 | 14 | """ 15 | 16 | def bhavcopy_wrapper(bhavcopy_function, dt, dest): 17 | try: 18 | bhavcopy_function(dt, dest) 19 | return True 20 | except: 21 | return False 22 | 23 | 24 | 25 | @cli.command("bhavcopy") 26 | @click.option("--dest", "-d", help="Destination directory path", required=True, type=click.Path(exists=True, file_okay=False, dir_okay=True)) 27 | @click.option("--from", "-f", "from_", help="From date", type=click.DateTime(["%Y-%m-%d"])) 28 | @click.option("--to", "-t", help="To date", type=click.DateTime(["%Y-%m-%d"])) 29 | @click.option("--fo/--no-fo", help="Downloads F&O bhavcopy", default=False, type=bool) 30 | @click.option("--idx/--no-idx", help="Downloads Index bhavcopy", default=False, type=bool) 31 | @click.option("--full/--no-full", help="Full Bhavcopy", default=False, type=bool) 32 | def bhavcopy(from_, to, dest, fo, idx, full): 33 | """Downloads bhavcopy from NSE's website 34 | 35 | Download today's bhavcopy 36 | 37 | $ jdata bhavcopy -d /path/to/dir 38 | 39 | Download bhavcopy for a date 40 | 41 | $ jdata bhavcopy -d /path/to/dir -f 2020-01-01 42 | 43 | Downlad bhavcopy for a date range 44 | 45 | $ jdata bhavcopy -d /path/to/dir -f 2020-01-01 -t 2020-02-01 46 | 47 | """ 48 | 49 | downloader = nse.bhavcopy_save 50 | if full: 51 | downloader = nse.full_bhavcopy_save 52 | if idx: 53 | downloader = nse.bhavcopy_index_save 54 | if fo: 55 | downloader = nse.bhavcopy_fo_save 56 | 57 | if not from_: 58 | dt = date.today() 59 | try: 60 | path = downloader(dt, dest) 61 | click.echo("Saved to : " + path) 62 | except requests.exceptions.ReadTimeout: 63 | click.echo("""Error: Timeout while downloading, This may be due to- 64 | \b1. Bad internet connection 65 | \b2. Today is holiday or file is not ready yet""", err=True) 66 | 67 | if from_ and not to: 68 | # if from_ provided but not to 69 | dt = from_.date() 70 | try: 71 | path = downloader(dt, dest) 72 | click.echo("Saved to : " + path) 73 | except requests.exceptions.ReadTimeout: 74 | click.echo("""Error: Timeout while downloading, This may be due to- 75 | \b1. Bad internet connection 76 | \b2. {} is holiday or file is not ready yet""".format(dt), err=True) 77 | 78 | if from_ and to: 79 | failed_downloads = [] 80 | date_range = [] 81 | delta = to - from_ 82 | for i in range(delta.days + 1): 83 | dt = from_ + timedelta(days=i) 84 | w = dt.weekday() 85 | if w not in [5,6]: 86 | date_range.append(dt.date()) 87 | 88 | with ThreadPoolExecutor() as executor: 89 | futures = [executor.submit(bhavcopy_wrapper, downloader, dt, dest) for dt in date_range] 90 | 91 | with click.progressbar(futures, label="Downloading Bhavcopies") as bar: 92 | for i, future in enumerate(bar): 93 | result = future.result() 94 | if not result: 95 | failed_downloads.append(date_range[i]) 96 | 97 | 98 | """ 99 | for dt in bar: 100 | try: 101 | nse.bhavcopy_save(dt, dest) 102 | except requests.exceptions.ReadTimeout: 103 | failed_downloads.append(dt) 104 | """ 105 | click.echo("Saved to : " + dest) 106 | if failed_downloads: 107 | click.echo("Failed to download for below dates, these might be holidays, please check -") 108 | for dt in failed_downloads: 109 | click.echo(dt) 110 | 111 | @cli.command("stock") 112 | @click.option("--symbol", "-s", required=True, help="Stock symbol") 113 | @click.option("--from", "-f", "from_", required=True, help="From date - yyyy-mm-dd") 114 | @click.option("--to", "-t", required=True, help="From date - yyyy-mm-dd") 115 | @click.option("--series", "-S", default="EQ", show_default=True, help="Series - EQ, BE etc.") 116 | @click.option("--output", "-o", default="", help="Full path for output file") 117 | def stock(symbol, from_, to, series, output): 118 | """Download historical stock data 119 | 120 | 121 | $jdata stock --symbol STOCK1 -f yyyy-mm-dd -t yyyy-mm-dd -o file_name.csv 122 | """ 123 | import traceback 124 | from_date = datetime.strptime(from_, "%Y-%m-%d").date() 125 | to_date = datetime.strptime(to, "%Y-%m-%d").date() 126 | try: 127 | o = nse.stock_csv(symbol, from_date, to_date, series, output, show_progress=True) 128 | except Exception as e: 129 | print(e) 130 | traceback.print_exc() 131 | click.echo("\nSaved file to : {}".format(o)) 132 | 133 | @cli.command("index") 134 | @click.option("--symbol", "-s", required=True, help="Stock symbol") 135 | @click.option("--from", "-f", "from_", required=True, help="From date - yyyy-mm-dd") 136 | @click.option("--to", "-t", required=True, help="From date - yyyy-mm-dd") 137 | @click.option("--output", "-o", default="", help="Full path for output file") 138 | def index(symbol, from_, to, output): 139 | """Download historical index data 140 | 141 | 142 | $jdata index --symbol "NIFTY 50" -f yyyy-mm-dd -t yyyy-mm-dd -o file_name.csv 143 | """ 144 | import traceback 145 | from_date = datetime.strptime(from_, "%Y-%m-%d").date() 146 | to_date = datetime.strptime(to, "%Y-%m-%d").date() 147 | try: 148 | o = nse.index_csv(symbol, from_date, to_date, output, show_progress=True) 149 | except Exception as e: 150 | print(e) 151 | traceback.print_exc() 152 | click.echo("\nSaved file to : {}".format(o)) 153 | 154 | @cli.command("derivatives") 155 | @click.option("--symbol", "-s", required=True, help="Stock/Index symbol") 156 | @click.option("--from", "-f", "from_", required=True, help="From date - yyyy-mm-dd") 157 | @click.option("--to", "-t", required=True, help="To date - yyyy-mm-dd") 158 | @click.option("--expiry", "-e", required=True, help="Expiry date - yyyy-mm-dd") 159 | @click.option("--instru", "-i", required=True, help="""FUTSTK - Stock futures, FUTIDX - Index Futures,\tOPTSTK - Stock Options, OPTIDX - Index Options""") 160 | @click.option("--price", "-p", help="Strike price (Only for OPTSTK and OPTIDX)") 161 | @click.option("--ce/--pe", help="--ce for call and --pe for put (Only for OPTSTK and OPTIDX)") 162 | @click.option("--output", "-o", default="", help="Full path of output file") 163 | def stock(symbol, from_, to, expiry, instru, price, ce, output): 164 | """Sample usage- 165 | 166 | Download stock futures- 167 | 168 | \b 169 | jdata derivatives -s SBIN -f 2020-01-01 -t 2020-01-30 -e 2020-01-30 -i FUTSTK -o file_name.csv 170 | 171 | Download index futures- 172 | 173 | \b 174 | jdata derivatives -s NIFTY -f 2020-01-01 -t 2020-01-30 -e 2020-01-30 -i FUTIDX -o file_name.csv 175 | 176 | Download stock options- 177 | 178 | \b 179 | jdata derivatives -s SBIN -f 2020-01-01 -t 2020-01-30 -e 2020-01-30 -i OPTSTK -p 330 --ce -o file_name.csv 180 | 181 | Download index options- 182 | 183 | \b 184 | jdata derivatives -s NIFTY -f 2020-01-01 -t 2020-01-30 -e 2020-01-23 -i OPTIDX -p 11000 --pe -o file_name.csv 185 | 186 | 187 | """ 188 | import traceback 189 | import sys 190 | from_date = datetime.strptime(from_, "%Y-%m-%d").date() 191 | to_date = datetime.strptime(to, "%Y-%m-%d").date() 192 | expiry = datetime.strptime(expiry, "%Y-%m-%d").date() 193 | if "OPT" in instru: 194 | if ce: 195 | ot = "CE" 196 | else: 197 | ot = "PE" 198 | 199 | if price: 200 | price = float(price) 201 | else: 202 | ot = None 203 | o = nse.derivatives_csv(symbol, from_date, to_date, expiry, instru, price, ot, 204 | output, show_progress=True) 205 | 206 | click.echo("\nSaved file to : {}".format(o)) 207 | 208 | 209 | 210 | 211 | if __name__ == "__main__": 212 | cli() 213 | 214 | 215 | -------------------------------------------------------------------------------- /jugaad_data/holidays.py: -------------------------------------------------------------------------------- 1 | from datetime import time, date, datetime 2 | 3 | """ 4 | Holiday list based on Zipline calendar library's holiday calendar 5 | Reference: https://github.com/quantopian/trading_calendars/blob/master/trading_calendars/exchange_calendar_xbom.py 6 | """ 7 | 8 | holidays_str = [ 9 | '1997-01-23', 10 | '1997-03-07', 11 | '1997-03-24', 12 | '1997-04-08', 13 | '1997-04-14', 14 | '1997-04-16', 15 | '1997-04-18', 16 | '1997-05-01', 17 | '1997-05-08', 18 | '1997-08-15', 19 | '1997-08-18', 20 | '1997-08-25', 21 | '1997-10-02', 22 | '1997-10-28', 23 | '1997-10-29', 24 | '1997-10-31', 25 | '1997-12-25', 26 | '1998-04-09', 27 | '1998-04-14', 28 | '1998-04-28', 29 | '1998-12-25', 30 | '1999-01-01', 31 | '1999-01-20', 32 | '1999-01-26', 33 | '1999-03-02', 34 | '1999-03-18', 35 | '1999-03-25', 36 | '1999-03-29', 37 | '1999-04-02', 38 | '1999-04-14', 39 | '1999-04-27', 40 | '1999-04-30', 41 | '1999-09-13', 42 | '1999-10-19', 43 | '1999-11-08', 44 | '1999-11-10', 45 | '1999-11-23', 46 | '1999-12-31', 47 | '2000-01-26', 48 | '2000-03-17', 49 | '2000-03-20', 50 | '2000-04-14', 51 | '2000-04-21', 52 | '2000-05-01', 53 | '2000-08-15', 54 | '2000-09-01', 55 | '2000-10-02', 56 | '2000-12-25', 57 | '2001-01-01', 58 | '2001-01-26', 59 | '2001-03-06', 60 | '2001-04-05', 61 | '2001-04-13', 62 | '2001-05-01', 63 | '2001-08-15', 64 | '2001-08-22', 65 | '2001-10-02', 66 | '2001-10-26', 67 | '2001-11-16', 68 | '2001-11-30', 69 | '2001-12-17', 70 | '2001-12-25', 71 | '2002-03-25', 72 | '2002-03-29', 73 | '2002-05-01', 74 | '2002-08-15', 75 | '2002-09-10', 76 | '2002-10-02', 77 | '2002-10-15', 78 | '2002-11-06', 79 | '2002-11-19', 80 | '2002-12-25', 81 | '2003-02-13', 82 | '2003-03-14', 83 | '2003-03-18', 84 | '2003-04-14', 85 | '2003-04-18', 86 | '2003-05-01', 87 | '2003-08-15', 88 | '2003-10-02', 89 | '2003-11-26', 90 | '2003-12-25', 91 | '2004-01-01', 92 | '2004-01-26', 93 | '2004-02-02', 94 | '2004-03-02', 95 | '2004-04-09', 96 | '2004-04-14', 97 | '2004-04-26', 98 | '2004-10-13', 99 | '2004-10-22', 100 | '2004-11-15', 101 | '2004-11-26', 102 | '2005-01-21', 103 | '2005-01-26', 104 | '2005-03-25', 105 | '2005-04-14', 106 | '2005-07-28', 107 | '2005-08-15', 108 | '2005-09-07', 109 | '2005-10-12', 110 | '2005-11-03', 111 | '2005-11-04', 112 | '2005-11-15', 113 | '2006-01-11', 114 | '2006-01-26', 115 | '2006-02-09', 116 | '2006-03-15', 117 | '2006-04-06', 118 | '2006-04-11', 119 | '2006-04-14', 120 | '2006-05-01', 121 | '2006-08-15', 122 | '2006-10-02', 123 | '2006-10-24', 124 | '2006-10-25', 125 | '2006-12-25', 126 | '2007-01-01', 127 | '2007-01-26', 128 | '2007-01-30', 129 | '2007-02-16', 130 | '2007-03-27', 131 | '2007-04-06', 132 | '2007-05-01', 133 | '2007-05-02', 134 | '2007-08-15', 135 | '2007-10-02', 136 | '2007-12-21', 137 | '2007-12-25', 138 | '2008-03-06', 139 | '2008-03-20', 140 | '2008-03-21', 141 | '2008-04-14', 142 | '2008-04-18', 143 | '2008-05-01', 144 | '2008-05-19', 145 | '2008-08-15', 146 | '2008-09-03', 147 | '2008-10-02', 148 | '2008-10-09', 149 | '2008-10-30', 150 | '2008-11-13', 151 | '2008-11-27', 152 | '2008-12-09', 153 | '2008-12-25', 154 | '2009-01-08', 155 | '2009-01-26', 156 | '2009-02-23', 157 | '2009-03-10', 158 | '2009-03-11', 159 | '2009-04-03', 160 | '2009-04-07', 161 | '2009-04-10', 162 | '2009-04-14', 163 | '2009-04-30', 164 | '2009-05-01', 165 | '2009-09-21', 166 | '2009-09-28', 167 | '2009-10-02', 168 | '2009-10-13', 169 | '2009-10-19', 170 | '2009-11-02', 171 | '2009-12-25', 172 | '2009-12-28', 173 | '2010-01-01', 174 | '2010-01-26', 175 | '2010-02-12', 176 | '2010-03-01', 177 | '2010-03-24', 178 | '2010-04-02', 179 | '2010-04-14', 180 | '2010-09-10', 181 | '2010-11-17', 182 | '2010-12-17', 183 | '2011-01-26', 184 | '2011-03-02', 185 | '2011-04-12', 186 | '2011-04-14', 187 | '2011-04-22', 188 | '2011-08-15', 189 | '2011-08-31', 190 | '2011-09-01', 191 | '2011-10-06', 192 | '2011-10-27', 193 | '2011-11-07', 194 | '2011-11-10', 195 | '2011-12-06', 196 | '2012-01-26', 197 | '2012-02-20', 198 | '2012-03-08', 199 | '2012-04-05', 200 | '2012-04-06', 201 | '2012-05-01', 202 | '2012-08-15', 203 | '2012-08-20', 204 | '2012-09-19', 205 | '2012-10-02', 206 | '2012-10-24', 207 | '2012-11-14', 208 | '2012-11-28', 209 | '2012-12-25', 210 | '2013-03-27', 211 | '2013-03-29', 212 | '2013-04-19', 213 | '2013-04-24', 214 | '2013-05-01', 215 | '2013-08-09', 216 | '2013-08-15', 217 | '2013-09-09', 218 | '2013-10-02', 219 | '2013-10-16', 220 | '2013-11-04', 221 | '2013-11-15', 222 | '2013-12-25', 223 | '2014-02-27', 224 | '2014-03-17', 225 | '2014-04-08', 226 | '2014-04-14', 227 | '2014-04-18', 228 | '2014-04-24', 229 | '2014-05-01', 230 | '2014-07-29', 231 | '2014-08-15', 232 | '2014-08-29', 233 | '2014-10-02', 234 | '2014-10-03', 235 | '2014-10-06', 236 | '2014-10-15', 237 | '2014-10-24', 238 | '2014-11-04', 239 | '2014-11-06', 240 | '2014-12-25', 241 | '2015-01-26', 242 | '2015-02-17', 243 | '2015-03-06', 244 | '2015-04-02', 245 | '2015-04-03', 246 | '2015-04-14', 247 | '2015-05-01', 248 | '2015-09-17', 249 | '2015-09-25', 250 | '2015-10-02', 251 | '2015-10-22', 252 | '2015-11-12', 253 | '2015-11-25', 254 | '2015-12-25', 255 | '2016-01-26', 256 | '2016-03-07', 257 | '2016-03-24', 258 | '2016-03-25', 259 | '2016-04-14', 260 | '2016-04-15', 261 | '2016-04-19', 262 | '2016-07-06', 263 | '2016-08-15', 264 | '2016-09-05', 265 | '2016-09-13', 266 | '2016-10-11', 267 | '2016-10-12', 268 | '2016-10-31', 269 | '2016-11-14', 270 | '2017-01-26', 271 | '2017-02-24', 272 | '2017-03-13', 273 | '2017-04-04', 274 | '2017-04-14', 275 | '2017-05-01', 276 | '2017-06-26', 277 | '2017-08-15', 278 | '2017-08-25', 279 | '2017-10-02', 280 | '2017-10-20', 281 | '2017-12-25', 282 | '2018-01-26', 283 | '2018-02-13', 284 | '2018-03-02', 285 | '2018-03-29', 286 | '2018-03-30', 287 | '2018-05-01', 288 | '2018-08-15', 289 | '2018-08-22', 290 | '2018-09-13', 291 | '2018-09-20', 292 | '2018-10-02', 293 | '2018-10-18', 294 | '2018-11-08', 295 | '2018-11-23', 296 | '2018-12-25', 297 | '2019-01-26', 298 | '2019-03-02', 299 | '2019-03-04', 300 | '2019-03-21', 301 | '2019-04-17', 302 | '2019-04-19', 303 | '2019-04-29', 304 | '2019-05-01', 305 | '2019-06-05', 306 | '2019-08-12', 307 | '2019-08-15', 308 | '2019-09-02', 309 | '2019-09-10', 310 | '2019-10-02', 311 | '2019-10-08', 312 | '2019-10-21', 313 | '2019-10-28', 314 | '2019-11-12', 315 | '2019-12-25', 316 | '2020-02-21', 317 | '2020-03-10', 318 | '2020-04-02', 319 | '2020-04-06', 320 | '2020-04-10', 321 | '2020-04-14', 322 | '2020-05-01', 323 | '2020-05-25', 324 | '2020-07-31', 325 | '2020-10-02', 326 | '2020-11-16', 327 | '2020-11-30', 328 | '2020-12-25', 329 | '2021-01-26', 330 | '2021-03-11', 331 | '2021-03-29', 332 | '2021-04-02', 333 | '2021-04-14', 334 | '2021-04-21', 335 | '2021-05-13', 336 | '2021-07-21', 337 | '2021-08-19', 338 | '2021-09-10', 339 | '2021-10-15', 340 | '2021-11-05', 341 | '2021-11-19', 342 | '2022-01-26', 343 | '2022-03-01', 344 | '2022-03-18', 345 | '2022-04-14', 346 | '2022-04-15', 347 | '2022-05-03', 348 | '2022-08-09', 349 | '2022-08-15', 350 | '2022-08-31', 351 | '2022-10-05', 352 | '2022-10-24', 353 | '2022-10-26', 354 | '2022-11-08', 355 | '2023-01-26', 356 | '2023-02-18', # weekend 357 | '2023-03-07', 358 | '2023-03-30', 359 | '2023-04-04', 360 | '2023-04-07', 361 | '2023-04-14', 362 | '2023-04-22', # weekend 363 | '2023-05-01', 364 | '2023-06-29', 365 | '2023-07-29', # weekend 366 | '2023-08-15', 367 | '2023-09-19', 368 | '2023-10-02', 369 | '2023-10-24', 370 | '2023-11-12', # weekend 371 | '2023-11-14', 372 | '2023-11-27', 373 | '2023-12-25' 374 | ] 375 | 376 | 377 | def holidays(year=None, month=None): 378 | h = [datetime.strptime(d, "%Y-%m-%d").date() for d in holidays_str] 379 | if year: 380 | h = [d for d in h if d.year==year] 381 | if month: 382 | h = [d for d in h if d.month==month] 383 | return h 384 | 385 | 386 | -------------------------------------------------------------------------------- /jugaad_data/nse/__init__.py: -------------------------------------------------------------------------------- 1 | from .history import * 2 | from .archives import * 3 | from .live import * 4 | -------------------------------------------------------------------------------- /jugaad_data/nse/archives.py: -------------------------------------------------------------------------------- 1 | """ 2 | Implements functionality to download archival data such as Bhavcopy, bulk 3 | deals from NSE and NSEIndices website 4 | """ 5 | from datetime import datetime, date 6 | import os 7 | import io 8 | import csv 9 | import zipfile 10 | import requests 11 | import pprint 12 | def unzip(function): 13 | 14 | def unzipper(*args, **kwargs): 15 | r = function(*args, **kwargs) 16 | fp = io.BytesIO(r) 17 | with zipfile.ZipFile(file=fp) as zf: 18 | fname = zf.namelist()[0] 19 | with zf.open(fname) as fp_bh: 20 | return fp_bh.read().decode('utf-8') 21 | return unzipper 22 | 23 | 24 | class NSEArchives: 25 | base_url = "https://nsearchives.nseindia.com/" 26 | """Conventions 27 | d - 1, 12 (without leading zero) 28 | dd - 01, 21 (day of the month with leading zero) 29 | mm - 01, 12 (month with leading zero) 30 | m - 1, 12 (month without leading zero) 31 | MMM - JAN, DEC 32 | yy - 19, 20 33 | yyyy - 2020, 2030 34 | """ 35 | timeout = 4 36 | 37 | def __init__(self): 38 | self.s = requests.Session() 39 | h = { 40 | "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.125 Safari/537.36", 41 | "accept-encoding": "gzip, deflate", 42 | "accept": 43 | """text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9""", 44 | 45 | } 46 | self.s.headers.update(h) 47 | self._routes = { 48 | "bhavcopy": "/content/historical/EQUITIES/{yyyy}/{MMM}/cm{dd}{MMM}{yyyy}bhav.csv.zip", 49 | "bhavcopy_full": "/products/content/sec_bhavdata_full_{dd}{mm}{yyyy}.csv", 50 | "bulk_deals": "/content/equities/bulk.csv", 51 | "bhavcopy_fo": "/content/historical/DERIVATIVES/{yyyy}/{MMM}/fo{dd}{MMM}{yyyy}bhav.csv.zip" 52 | } 53 | 54 | def get(self, rout, **params): 55 | url = self.base_url + self._routes[rout].format(**params) 56 | self.r = self.s.get(url, timeout=self.timeout) 57 | return self.r 58 | 59 | @unzip 60 | def bhavcopy_raw(self, dt): 61 | """Downloads raw bhavcopy text for a specific date""" 62 | dd = dt.strftime('%d') 63 | MMM = dt.strftime('%b').upper() 64 | yyyy = dt.year 65 | r = self.get("bhavcopy", yyyy=yyyy, MMM=MMM, dd=dd) 66 | return r.content 67 | 68 | def bhavcopy_save(self, dt, dest, skip_if_present=True): 69 | """Downloads and saves raw bhavcopy csv file for a specific date""" 70 | fmt = "cm%d%b%Ybhav.csv" 71 | fname = os.path.join(dest, dt.strftime(fmt)) 72 | if os.path.isfile(fname) and skip_if_present: 73 | return fname 74 | text = self.bhavcopy_raw(dt) 75 | with open(fname, 'w') as fp: 76 | fp.write(text) 77 | return fname 78 | 79 | def full_bhavcopy_raw(self, dt): 80 | """Downloads full raw bhavcopy text for a specific date""" 81 | 82 | dd = dt.strftime('%d') 83 | mm = dt.strftime('%m') 84 | yyyy = dt.year 85 | try: 86 | r = self.get("bhavcopy_full", yyyy=yyyy, mm=mm, dd=dd) 87 | except requests.exceptions.ReadTimeout: 88 | if dt < date(2020,1,1): # Receiving timeouts for dates before 2020 89 | raise requests.exceptions.ReadTimeout("""Either request timed 90 | out or full bhavcopy file is 91 | not available for given 92 | date (2019 and prior 93 | dates)""") 94 | return r.text 95 | 96 | def full_bhavcopy_save(self, dt, dest, skip_if_present=True): 97 | fmt = "sec_bhavdata_full_%d%b%Ybhav.csv" 98 | fname = os.path.join(dest, dt.strftime(fmt)) 99 | if os.path.isfile(fname) and skip_if_present: 100 | return fname 101 | if os.path.isfile(fname): 102 | return fname 103 | text = self.full_bhavcopy_raw(dt) 104 | with open(fname, 'w') as fp: 105 | fp.write(text) 106 | return fname 107 | 108 | def bulk_deals_raw(self): 109 | r = self.get("bulk_deals") 110 | return r.text 111 | 112 | def bulk_deals_save(self, fname): 113 | text = self.bulk_deals_raw() 114 | with open(fname, 'w') as fp: 115 | fp.write(text) 116 | 117 | @unzip 118 | def bhavcopy_fo_raw(self, dt): 119 | """Downloads raw bhavcopy text for a specific date""" 120 | dd = dt.strftime('%d') 121 | MMM = dt.strftime('%b').upper() 122 | yyyy = dt.year 123 | r = self.get("bhavcopy_fo", yyyy=yyyy, MMM=MMM, dd=dd) 124 | return r.content 125 | 126 | def bhavcopy_fo_save(self, dt, dest, skip_if_present=True): 127 | """ Saves Derivatives Bhavcopy to a directory """ 128 | fmt = "fo%d%b%Ybhav.csv" 129 | fname = os.path.join(dest, dt.strftime(fmt)) 130 | if os.path.isfile(fname) and skip_if_present: 131 | return fname 132 | text = self.bhavcopy_fo_raw(dt) 133 | with open(fname, 'w') as fp: 134 | fp.write(text) 135 | return fname 136 | 137 | class NSEIndicesArchives(NSEArchives): 138 | def __init__(self): 139 | super().__init__() 140 | self.base_url = "https://www.niftyindices.com" 141 | self._routes = { 142 | "bhavcopy": "/Daily_Snapshot/ind_close_all_{dd}{mm}{yyyy}.csv" 143 | } 144 | self.h = { 145 | "Host": "www.niftyindices.com", 146 | "Referer": "https://www.nseindia.com", 147 | "X-Requested-With": "XMLHttpRequest", 148 | "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", 149 | "Accept": "*/*", 150 | "Accept-Encoding": "gzip, deflate", 151 | "Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8", 152 | "Cache-Control": "no-cache", 153 | "Connection": "keep-alive", 154 | } 155 | 156 | self.s.headers.update(self.h) 157 | 158 | def bhavcopy_index_raw(self, dt): 159 | """Downloads raw index bhavcopy text for a specific date""" 160 | dd = dt.strftime('%d') 161 | mm = dt.strftime('%m').upper() 162 | yyyy = dt.year 163 | r = self.get("bhavcopy", yyyy=yyyy, mm=mm, dd=dd) 164 | return r.text 165 | 166 | def bhavcopy_index_save(self, dt, dest, skip_if_present=True): 167 | """Downloads and saves index bhavcopy csv for a specific date""" 168 | fmt = "ind_close_all_%d%m%Y.csv" 169 | fname = os.path.join(dest, dt.strftime(fmt)) 170 | if os.path.isfile(fname) and skip_if_present: 171 | return fname 172 | text = self.bhavcopy_index_raw(dt) 173 | with open(fname, 'w') as fp: 174 | fp.write(text) 175 | return fname 176 | 177 | a = NSEArchives() 178 | bhavcopy_raw = a.bhavcopy_raw 179 | bhavcopy_save = a.bhavcopy_save 180 | full_bhavcopy_raw = a.full_bhavcopy_raw 181 | full_bhavcopy_save = a.full_bhavcopy_save 182 | bhavcopy_fo_raw = a.bhavcopy_fo_raw 183 | bhavcopy_fo_save = a.bhavcopy_fo_save 184 | ia = NSEIndicesArchives() 185 | bhavcopy_index_raw = ia.bhavcopy_index_raw 186 | bhavcopy_index_save = ia.bhavcopy_index_save 187 | 188 | def expiry_dates(dt, instrument_type="", symbol="", contracts=0): 189 | txt = bhavcopy_fo_raw(dt) 190 | rows = txt.split("\n") 191 | rows.pop(0) # Remove headers 192 | if len(rows[-1].split(',')) <= 10: 193 | rows.pop(-1) # Remove last blank row 194 | cells = [row.split(',') for row in rows] 195 | if instrument_type: 196 | cells = filter(lambda x: x[0]==instrument_type, cells) 197 | if symbol: 198 | cells = filter(lambda x: x[1] == symbol, cells) 199 | 200 | cells = filter(lambda x: int(x[10])>contracts, cells) 201 | 202 | dts_txt = [row[2] for row in cells] 203 | dts = [datetime.strptime(d, "%d-%b-%Y").date() for d in dts_txt] 204 | return list(set(dts)) 205 | 206 | 207 | 208 | if __name__ == "__main__": 209 | 210 | url = "https://www.niftyindices.com/Daily_Snapshot/ind_close_all_20082020.csv" 211 | headers = { 212 | "Host": "www.niftyindices.com", 213 | "Referer": "https://www.nseindia.com", 214 | "X-Requested-With": "XMLHttpRequest", 215 | "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36", 216 | "Accept": "*/*", 217 | "Accept-Encoding": "gzip, deflate", 218 | "Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8", 219 | "Cache-Control": "no-cache", 220 | "Connection": "keep-alive", 221 | } 222 | d = requests.get(url, stream=True, timeout=10, headers=headers, verify=False) 223 | for chunk in d.iter_content(chunk_size=1024): 224 | print("Received") 225 | print(len(chunk)) 226 | 227 | 228 | -------------------------------------------------------------------------------- /jugaad_data/nse/history.py: -------------------------------------------------------------------------------- 1 | """ 2 | Implements functionality to download historical stock, index and 3 | derivatives data from NSE and 4 | NSEIndices website 5 | """ 6 | import os 7 | import json 8 | import itertools 9 | import csv 10 | from pprint import pprint 11 | from urllib.parse import urljoin 12 | from requests import Session 13 | #from bs4 import BeautifulSoup 14 | import click 15 | try: 16 | import pandas as pd 17 | import numpy as np 18 | except: 19 | pd = None 20 | 21 | from jugaad_data import util as ut 22 | from .archives import (bhavcopy_raw, bhavcopy_save, 23 | full_bhavcopy_raw, full_bhavcopy_save, 24 | bhavcopy_fo_raw, bhavcopy_fo_save, 25 | bhavcopy_index_raw, bhavcopy_index_save, expiry_dates) 26 | 27 | APP_NAME = "nsehistory" 28 | class NSEHistory: 29 | def __init__(self): 30 | 31 | self.headers = { 32 | "Host": "www.nseindia.com", 33 | "Referer": "https://www.nseindia.com/get-quotes/equity?symbol=SBIN", 34 | "X-Requested-With": "XMLHttpRequest", 35 | "pragma": "no-cache", 36 | "sec-fetch-dest": "empty", 37 | "sec-fetch-mode": "cors", 38 | "sec-fetch-site": "same-origin", 39 | "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", 40 | "Accept": "*/*", 41 | "Accept-Encoding": "gzip, deflate", 42 | "Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8", 43 | "Cache-Control": "no-cache", 44 | "Connection": "keep-alive", 45 | } 46 | self.path_map = { 47 | "stock_history": "/api/historical/cm/equity", 48 | "derivatives": "/api/historical/fo/derivatives", 49 | "equity_quote_page": "/get-quotes/equity", 50 | } 51 | self.base_url = "https://www.nseindia.com" 52 | self.cache_dir = ".cache" 53 | self.workers = 2 54 | self.use_threads = True 55 | self.show_progress = False 56 | 57 | self.s = Session() 58 | self.s.headers.update(self.headers) 59 | self.ssl_verify = True 60 | 61 | def _get(self, path_name, params): 62 | if "nseappid" not in self.s.cookies: 63 | path = self.path_map["equity_quote_page"] 64 | url = urljoin(self.base_url, path) 65 | self.s.get(url, verify=self.ssl_verify) 66 | path = self.path_map[path_name] 67 | url = urljoin(self.base_url, path) 68 | self.r = self.s.get(url, params=params, verify=self.ssl_verify) 69 | return self.r 70 | 71 | @ut.cached(APP_NAME + '-stock') 72 | def _stock(self, symbol, from_date, to_date, series="EQ"): 73 | params = { 74 | 'symbol': symbol, 75 | 'from': from_date.strftime('%d-%m-%Y'), 76 | 'to': to_date.strftime('%d-%m-%Y'), 77 | 'series': '["{}"]'.format(series), 78 | } 79 | self.r = self._get("stock_history", params) 80 | j = self.r.json() 81 | return j['data'] 82 | 83 | 84 | @ut.cached(APP_NAME + '-derivatives') 85 | def _derivatives(self, symbol, from_date, to_date, expiry_date, instrument_type, strike_price=None, option_type=None): 86 | valid_instrument_types = ["OPTIDX", "OPTSTK", "FUTIDX", "FUTSTK"] 87 | if instrument_type not in valid_instrument_types: 88 | raise Exception("Invalid instrument_type, should be one of {}".format(", ".join(valid_instrument_types))) 89 | 90 | params = { 91 | 'symbol': symbol, 92 | 'from': from_date.strftime('%d-%m-%Y'), 93 | 'to': to_date.strftime('%d-%m-%Y'), 94 | 'expiryDate': expiry_date.strftime('%d-%b-%Y').upper(), 95 | 'instrumentType': instrument_type 96 | } 97 | if "OPT" in instrument_type: 98 | if not(strike_price and option_type): 99 | raise Exception("Missing argument for OPTIDX or OPTSTK, require both strike_price and option_type") 100 | 101 | params['strikePrice'] = "{:.2f}".format(strike_price) 102 | params['optionType'] = option_type 103 | 104 | self.r = self._get("derivatives", params) 105 | j = self.r.json() 106 | return j['data'] 107 | 108 | def stock_raw(self, symbol, from_date, to_date, series="EQ"): 109 | date_ranges = ut.break_dates(from_date, to_date) 110 | params = [(symbol, x[0], x[1], series) for x in reversed(date_ranges)] 111 | chunks = ut.pool(self._stock, params, max_workers=self.workers) 112 | 113 | return list(itertools.chain.from_iterable(chunks)) 114 | 115 | def derivatives_raw(self, symbol, from_date, to_date, expiry_date, instrument_type, strike_price, option_type): 116 | date_ranges = ut.break_dates(from_date, to_date) 117 | params = [(symbol, x[0], x[1], expiry_date, instrument_type, strike_price, option_type) for x in reversed(date_ranges)] 118 | chunks = ut.pool(self._derivatives, params, max_workers=self.workers) 119 | return list(itertools.chain.from_iterable(chunks)) 120 | 121 | 122 | 123 | h = NSEHistory() 124 | stock_raw = h.stock_raw 125 | derivatives_raw = h.derivatives_raw 126 | stock_select_headers = [ "CH_TIMESTAMP", "CH_SERIES", 127 | "CH_OPENING_PRICE", "CH_TRADE_HIGH_PRICE", 128 | "CH_TRADE_LOW_PRICE", "CH_PREVIOUS_CLS_PRICE", 129 | "CH_LAST_TRADED_PRICE", "CH_CLOSING_PRICE", 130 | "VWAP", "CH_52WEEK_HIGH_PRICE", "CH_52WEEK_LOW_PRICE", 131 | "CH_TOT_TRADED_QTY", "CH_TOT_TRADED_VAL", "CH_TOTAL_TRADES", 132 | "CH_SYMBOL"] 133 | stock_final_headers = [ "DATE", "SERIES", 134 | "OPEN", "HIGH", 135 | "LOW", "PREV. CLOSE", 136 | "LTP", "CLOSE", 137 | "VWAP", "52W H", "52W L", 138 | "VOLUME", "VALUE", "NO OF TRADES", "SYMBOL"] 139 | stock_dtypes = [ ut.np_date, str, 140 | ut.np_float, ut.np_float, 141 | ut.np_float, ut.np_float, 142 | ut.np_float, ut.np_float, 143 | ut.np_float, ut.np_float, ut.np_float, 144 | ut.np_int, ut.np_float, ut.np_int, str] 145 | 146 | def stock_csv(symbol, from_date, to_date, series="EQ", output="", show_progress=True): 147 | if show_progress: 148 | h = NSEHistory() 149 | h.show_progress = show_progress 150 | date_ranges = ut.break_dates(from_date, to_date) 151 | params = [(symbol, x[0], x[1], series) for x in reversed(date_ranges)] 152 | with click.progressbar(params, label=symbol) as ps: 153 | chunks = [] 154 | for p in ps: 155 | r = h.stock_raw(*p) 156 | chunks.append(r) 157 | raw = list(itertools.chain.from_iterable(chunks)) 158 | else: 159 | raw = stock_raw(symbol, from_date, to_date, series) 160 | 161 | if not output: 162 | output = "{}-{}-{}-{}.csv".format(symbol, from_date, to_date, series) 163 | if raw: 164 | with open(output, 'w') as fp: 165 | fp.write(",".join(stock_final_headers) + '\n') 166 | for row in raw: 167 | row_select = [str(row[x]) for x in stock_select_headers] 168 | line = ",".join(row_select) + '\n' 169 | fp.write(line) 170 | return output 171 | 172 | def stock_df(symbol, from_date, to_date, series="EQ"): 173 | if not pd: 174 | raise ModuleNotFoundError("Please install pandas using \n pip install pandas") 175 | raw = stock_raw(symbol, from_date, to_date, series) 176 | df = pd.DataFrame(raw)[stock_select_headers] 177 | df.columns = stock_final_headers 178 | for i, h in enumerate(stock_final_headers): 179 | df[h] = df[h].apply(stock_dtypes[i]) 180 | return df 181 | 182 | futures_select_headers = [ "FH_TIMESTAMP", "FH_EXPIRY_DT", 183 | "FH_OPENING_PRICE", "FH_TRADE_HIGH_PRICE", 184 | "FH_TRADE_LOW_PRICE", "FH_CLOSING_PRICE", 185 | "FH_LAST_TRADED_PRICE", "FH_SETTLE_PRICE", "FH_TOT_TRADED_QTY", "FH_MARKET_LOT", 186 | "FH_TOT_TRADED_VAL", "FH_OPEN_INT", "FH_CHANGE_IN_OI", 187 | "FH_SYMBOL"] 188 | futures_final_headers = [ "DATE", "EXPIRY", 189 | "OPEN", "HIGH", 190 | "LOW", "CLOSE", 191 | "LTP", "SETTLE PRICE", "TOTAL TRADED QUANTITY", "MARKET LOT", 192 | "PREMIUM VALUE", "OPEN INTEREST", "CHANGE IN OI", 193 | "SYMBOL"] 194 | 195 | 196 | options_select_headers = [ "FH_TIMESTAMP", "FH_EXPIRY_DT", "FH_OPTION_TYPE", "FH_STRIKE_PRICE", 197 | "FH_OPENING_PRICE", "FH_TRADE_HIGH_PRICE", 198 | "FH_TRADE_LOW_PRICE", "FH_CLOSING_PRICE", 199 | "FH_LAST_TRADED_PRICE", "FH_SETTLE_PRICE", "FH_TOT_TRADED_QTY", "FH_MARKET_LOT", 200 | "FH_TOT_TRADED_VAL", "FH_OPEN_INT", "FH_CHANGE_IN_OI", 201 | "FH_SYMBOL"] 202 | options_final_headers = [ "DATE", "EXPIRY", "OPTION TYPE", "STRIKE PRICE", 203 | "OPEN", "HIGH", 204 | "LOW", "CLOSE", 205 | "LTP", "SETTLE PRICE", "TOTAL TRADED QUANTITY", "MARKET LOT", 206 | "PREMIUM VALUE", "OPEN INTEREST", "CHANGE IN OI", 207 | "SYMBOL"] 208 | 209 | def derivatives_csv(symbol, from_date, to_date, expiry_date, instrument_type, strike_price=None, option_type=None, output="", show_progress=False): 210 | if show_progress: 211 | h = NSEHistory() 212 | h.show_progress = show_progress 213 | date_ranges = ut.break_dates(from_date, to_date) 214 | params = [(symbol, x[0], x[1], expiry_date, instrument_type, strike_price, option_type) for x in reversed(date_ranges)] 215 | with click.progressbar(params, label=symbol) as ps: 216 | chunks = [] 217 | for p in ps: 218 | r = h.derivatives_raw(*p) 219 | chunks.append(r) 220 | raw = list(itertools.chain.from_iterable(chunks)) 221 | else: 222 | raw = derivatives_raw(symbol, from_date, to_date, expiry_date, instrument_type, strike_price, option_type) 223 | if not output: 224 | output = "{}-{}-{}-{}.csv".format(symbol, from_date, to_date, series) 225 | if "FUT" in instrument_type: 226 | final_headers = futures_final_headers 227 | select_headers = futures_select_headers 228 | if "OPT" in instrument_type: 229 | final_headers = options_final_headers 230 | select_headers = options_select_headers 231 | if raw: 232 | with open(output, 'w') as fp: 233 | fp.write(",".join(final_headers) + '\n') 234 | for row in raw: 235 | row_select = [str(row[x]) for x in select_headers] 236 | line = ",".join(row_select) + '\n' 237 | fp.write(line) 238 | return output 239 | 240 | def derivatives_df(symbol, from_date, to_date, expiry_date, instrument_type, strike_price=None, option_type=None): 241 | if not pd: 242 | raise ModuleNotFoundError("Please install pandas using \n pip install pandas") 243 | raw = derivatives_raw(symbol, from_date, to_date, expiry_date, instrument_type, 244 | strike_price=strike_price, option_type=option_type) 245 | futures_dtype = [ ut.np_date, ut.np_date, 246 | ut.np_float, ut.np_float, 247 | ut.np_float, ut.np_float, 248 | ut.np_float, ut.np_float, 249 | ut.np_int, ut.np_int, 250 | ut.np_float, ut.np_float, ut.np_float, 251 | str] 252 | 253 | options_dtype = [ ut.np_date, ut.np_date, str, ut.np_float, 254 | ut.np_float, ut.np_float, 255 | ut.np_float, ut.np_float, 256 | ut.np_float, ut.np_float, 257 | ut.np_int, ut.np_int, 258 | ut.np_float, ut.np_float, ut.np_float, 259 | str] 260 | 261 | if "FUT" in instrument_type: 262 | final_headers = futures_final_headers 263 | select_headers = futures_select_headers 264 | dtypes = futures_dtype 265 | if "OPT" in instrument_type: 266 | final_headers = options_final_headers 267 | select_headers = options_select_headers 268 | dtypes = options_dtype 269 | df = pd.DataFrame(raw)[select_headers] 270 | df.columns = final_headers 271 | for i, h in enumerate(final_headers): 272 | df[h] = df[h].apply(dtypes[i]) 273 | return df 274 | 275 | class NSEIndexHistory(NSEHistory): 276 | def __init__(self): 277 | super().__init__() 278 | self.headers = { 279 | "Host": "niftyindices.com", 280 | "Referer": "niftyindices.com", 281 | "X-Requested-With": "XMLHttpRequest", 282 | "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", 283 | "Origin": "https://niftyindices.com", 284 | "Accept": "*/*", 285 | "Accept-Encoding": "gzip, deflate", 286 | "Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8", 287 | "Cache-Control": "no-cache", 288 | "Connection": "keep-alive", 289 | "Content-Type": "application/json; charset=UTF-8" 290 | } 291 | self.path_map = { 292 | "index_history": "/Backpage.aspx/getHistoricaldatatabletoString", 293 | "index_pe_history": "/Backpage.aspx/getpepbHistoricaldataDBtoString" 294 | } 295 | self.base_url = "https://niftyindices.com" 296 | self.s = Session() 297 | self.s.headers.update(self.headers) 298 | self.ssl_verify = True 299 | 300 | def _post_json(self, path_name, params): 301 | path = self.path_map[path_name] 302 | url = urljoin(self.base_url, path) 303 | self.r = self.s.post(url, json=params, verify=self.ssl_verify) 304 | return self.r 305 | 306 | @ut.cached(APP_NAME + '-index') 307 | def _index(self, symbol, from_date, to_date): 308 | params = {'name': symbol, 309 | 'startDate': from_date.strftime("%d-%b-%Y"), 310 | 'endDate': to_date.strftime("%d-%b-%Y") 311 | } 312 | r = self._post_json("index_history", params=params) 313 | return json.loads(self.r.json()['d']) 314 | 315 | def index_raw(self, symbol, from_date, to_date): 316 | date_ranges = ut.break_dates(from_date, to_date) 317 | params = [(symbol, x[0], x[1]) for x in reversed(date_ranges)] 318 | chunks = ut.pool(self._index, params, max_workers=self.workers) 319 | return list(itertools.chain.from_iterable(chunks)) 320 | 321 | @ut.cached(APP_NAME + '-index_pe') 322 | def _index_pe(self, symbol, from_date, to_date): 323 | params = {'name': symbol, 324 | 'startDate': from_date.strftime("%d-%b-%Y"), 325 | 'endDate': to_date.strftime("%d-%b-%Y") 326 | } 327 | r = self._post_json("index_pe_history", params=params) 328 | return json.loads(self.r.json()['d']) 329 | 330 | def index_pe_raw(self, symbol, from_date, to_date): 331 | date_ranges = ut.break_dates(from_date, to_date) 332 | params = [(symbol, x[0], x[1]) for x in reversed(date_ranges)] 333 | chunks = ut.pool(self._index_pe, params, max_workers=self.workers) 334 | return list(itertools.chain.from_iterable(chunks)) 335 | 336 | 337 | ih = NSEIndexHistory() 338 | index_raw = ih.index_raw 339 | index_pe_raw = ih.index_pe_raw 340 | 341 | def index_csv(symbol, from_date, to_date, output="", show_progress=False): 342 | if show_progress: 343 | h = NSEIndexHistory() 344 | date_ranges = ut.break_dates(from_date, to_date) 345 | params = [(symbol, x[0], x[1]) for x in reversed(date_ranges)] 346 | with click.progressbar(params, label=symbol) as ps: 347 | chunks = [] 348 | for p in ps: 349 | r = h._index(*p) 350 | chunks.append(r) 351 | raw = list(itertools.chain.from_iterable(chunks)) 352 | else: 353 | raw = index_raw(symbol, from_date, to_date) 354 | 355 | if not output: 356 | output = "{}-{}-{}.csv".format(symbol, from_date, to_date) 357 | 358 | if raw: 359 | with open(output, 'w') as fp: 360 | fieldnames = ["INDEX_NAME", "HistoricalDate", "OPEN", "HIGH", "LOW", "CLOSE"] 361 | writer = csv.DictWriter(fp, fieldnames=fieldnames, extrasaction='ignore') 362 | writer.writeheader() 363 | writer.writerows(raw) 364 | return output 365 | 366 | def index_df(symbol, from_date, to_date): 367 | if not pd: 368 | raise ModuleNotFoundError("Please install pandas using \n pip install pandas") 369 | raw = index_raw(symbol, from_date, to_date) 370 | df = pd.DataFrame(raw) 371 | index_dtypes = {'OPEN': ut.np_float, 'HIGH': ut.np_float, 'LOW': ut.np_float, 'CLOSE': ut.np_float, 372 | 'Index Name': str, 'INDEX_NAME': str, 'HistoricalDate': ut.np_date} 373 | for col, dtype in index_dtypes.items(): 374 | df[col] = df[col].apply(dtype) 375 | return df 376 | 377 | def index_pe_df(symbol, from_date, to_date): 378 | if not pd: 379 | raise ModuleNotFoundError("Please install pandas using \n pip install pandas") 380 | raw = index_pe_raw(symbol, from_date, to_date) 381 | df = pd.DataFrame(raw) 382 | index_dtypes = {'pe': ut.np_float, 'pb': ut.np_float, 'divYield': ut.np_float, 383 | 'Index Name': str, 'DATE': ut.np_date} 384 | for col, dtype in index_dtypes.items(): 385 | df[col] = df[col].apply(dtype) 386 | return df 387 | 388 | -------------------------------------------------------------------------------- /jugaad_data/nse/live.py: -------------------------------------------------------------------------------- 1 | """ 2 | Implements live data fetch functionality 3 | """ 4 | from datetime import datetime 5 | from requests import Session 6 | from ..util import live_cache 7 | class NSELive: 8 | time_out = 5 9 | base_url = "https://www.nseindia.com/api" 10 | page_url = "https://www.nseindia.com/get-quotes/equity?symbol=LT" 11 | _routes = { 12 | "stock_meta": "/equity-meta-info", 13 | "stock_quote": "/quote-equity", 14 | "stock_derivative_quote": "/quote-derivative", 15 | "market_status": "/marketStatus", 16 | "chart_data": "/chart-databyindex", 17 | "market_turnover": "/market-turnover", 18 | "equity_derivative_turnover": "/equity-stock", 19 | "all_indices": "/allIndices", 20 | "live_index": "/equity-stockIndices", 21 | "index_option_chain": "/option-chain-indices", 22 | "equity_option_chain": "/option-chain-equities", 23 | "currency_option_chain": "/option-chain-currency", 24 | "pre_open_market": "/market-data-pre-open", 25 | "holiday_list": "/holiday-master?type=trading", 26 | "corporate_announcements": "/corporate-announcements" 27 | } 28 | 29 | def __init__(self): 30 | self.s = Session() 31 | h = { 32 | "Host": "www.nseindia.com", 33 | "Referer": "https://www.nseindia.com/get-quotes/equity?symbol=SBIN", 34 | "X-Requested-With": "XMLHttpRequest", 35 | "pragma": "no-cache", 36 | "sec-fetch-dest": "empty", 37 | "sec-fetch-mode": "cors", 38 | "sec-fetch-site": "same-origin", 39 | "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36", 40 | "Accept": "*/*", 41 | "Accept-Encoding": "gzip, deflate", 42 | "Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8", 43 | "Cache-Control": "no-cache", 44 | "Connection": "keep-alive", 45 | } 46 | self.s.headers.update(h) 47 | self.s.get(self.page_url) 48 | 49 | def get(self, route, payload={}): 50 | url = self.base_url + self._routes[route] 51 | r = self.s.get(url, params=payload) 52 | return r.json() 53 | 54 | @live_cache 55 | def stock_quote(self, symbol): 56 | data = {"symbol": symbol} 57 | return self.get("stock_quote", data) 58 | 59 | @live_cache 60 | def stock_quote_fno(self, symbol): 61 | data = {"symbol": symbol} 62 | return self.get("stock_derivative_quote", data) 63 | 64 | @live_cache 65 | def trade_info(self, symbol): 66 | data = {"symbol": symbol, "section": "trade_info"} 67 | return self.get("stock_quote", data) 68 | 69 | @live_cache 70 | def market_status(self): 71 | return self.get("market_status", {}) 72 | 73 | @live_cache 74 | def chart_data(self, symbol, indices=False): 75 | data = {"index" : symbol + "EQN"} 76 | if indices: 77 | data["index"] = symbol 78 | data["indices"] = "true" 79 | return self.get("chart_data", data) 80 | 81 | @live_cache 82 | def tick_data(self, symbol, indices=False): 83 | return self.chart_data(symbol, indices) 84 | 85 | @live_cache 86 | def market_turnover(self): 87 | return self.get("market_turnover") 88 | 89 | @live_cache 90 | def eq_derivative_turnover(self, type="allcontracts"): 91 | data = {"index": type} 92 | return self.get("equity_derivative_turnover", data) 93 | 94 | @live_cache 95 | def all_indices(self): 96 | return self.get("all_indices") 97 | 98 | def live_index(self, symbol="NIFTY 50"): 99 | data = {"index" : symbol} 100 | return self.get("live_index", data) 101 | 102 | @live_cache 103 | def index_option_chain(self, symbol="NIFTY"): 104 | data = {"symbol": symbol} 105 | return self.get("index_option_chain", data) 106 | 107 | @live_cache 108 | def equities_option_chain(self, symbol): 109 | data = {"symbol": symbol} 110 | return self.get("equity_option_chain", data) 111 | 112 | @live_cache 113 | def currency_option_chain(self, symbol="USDINR"): 114 | data = {"symbol": symbol} 115 | return self.get("currency_option_chain", data) 116 | 117 | @live_cache 118 | def live_fno(self): 119 | return self.live_index("SECURITIES IN F&O") 120 | 121 | @live_cache 122 | def pre_open_market(self, key="NIFTY"): 123 | data = {"key": key} 124 | return self.get("pre_open_market", data) 125 | 126 | @live_cache 127 | def holiday_list(self): 128 | return self.get("holiday_list", {}) 129 | 130 | def corporate_announcements(self, segment='equities', from_date=None, to_date=None, symbol=None): 131 | """ 132 | This function returns the corporate annoucements 133 | (https://www.nseindia.com/companies-listing/corporate-filings-announcements) 134 | """ 135 | 136 | #from_date: 02-12-2024 137 | #to_date: 06-12-2024 138 | #symbol: 139 | payload = {"index": segment} 140 | 141 | if from_date and to_date: 142 | payload['from_date'] = from_date.strftime("%d-%m-%Y") 143 | payload['to_date'] = to_date.strftime("%d-%m-%Y") 144 | elif from_date or to_date: 145 | raise Exception("Please provide both from_date and to_date") 146 | if symbol: 147 | payload['symbol'] = symbol 148 | return self.get("corporate_announcements", payload) 149 | 150 | -------------------------------------------------------------------------------- /jugaad_data/rbi/__init__.py: -------------------------------------------------------------------------------- 1 | from requests import Session 2 | from bs4 import BeautifulSoup 3 | 4 | 5 | 6 | def tr_to_json(wrapper): 7 | trs = wrapper.find_all("tr") 8 | op = {} 9 | for tr in trs: 10 | tds = tr.find_all('td') 11 | if len(tds) >= 2: 12 | key = tds[0].text.strip() 13 | val = tds[1].text.replace(':', '').replace('*','').replace('#', '').strip() 14 | 15 | op[key] = val 16 | return op 17 | 18 | 19 | 20 | class RBI: 21 | base_url = "https://www.rbi.org.in/" 22 | 23 | def __init__(self): 24 | self.s = Session() 25 | 26 | def current_rates(self): 27 | r = self.s.get(self.base_url) 28 | 29 | bs = BeautifulSoup(r.text, "html.parser") 30 | wrapper = bs.find('div', {"id": "wrapper"}) 31 | trs = wrapper.find_all('tr') 32 | return tr_to_json(wrapper) 33 | 34 | 35 | -------------------------------------------------------------------------------- /jugaad_data/util.py: -------------------------------------------------------------------------------- 1 | import os 2 | import collections 3 | import json 4 | import pickle 5 | import time 6 | from datetime import datetime, timedelta, date 7 | from concurrent.futures import ThreadPoolExecutor 8 | import click 9 | from appdirs import user_cache_dir 10 | 11 | import calendar 12 | 13 | import math 14 | 15 | try: 16 | import numpy as np 17 | except: 18 | np = None 19 | 20 | def np_exception(function): 21 | def wrapper(*args, **kwargs): 22 | if not np: 23 | raise ModuleNotFoundError("Please install pandas and numpy using \n pip install pandas") 24 | return function(*args, **kwargs) 25 | 26 | return wrapper 27 | 28 | @np_exception 29 | def np_float(num): 30 | try: 31 | return np.float64(num) 32 | except: 33 | return np.nan 34 | 35 | @np_exception 36 | def np_date(dt): 37 | try: 38 | return np.datetime64(dt) 39 | except: 40 | pass 41 | 42 | try: 43 | dt = datetime.strptime(dt, "%d-%b-%Y").date() 44 | return np.datetime64(dt) 45 | except: 46 | pass 47 | 48 | try: 49 | dt = datetime.strptime(dt, "%d %b %Y").date() 50 | return np.datetime64(dt) 51 | except: 52 | pass 53 | 54 | 55 | 56 | return np.datetime64('nat') 57 | 58 | 59 | @np_exception 60 | def np_int(num): 61 | try: 62 | return np.int64(num) 63 | except: 64 | return 0 65 | 66 | def break_dates(from_date, to_date): 67 | if from_date.replace(day=1) == to_date.replace(day=1): 68 | return [(from_date, to_date)] 69 | date_ranges = [] 70 | month_start = from_date 71 | month_end = month_start.replace(day=calendar.monthrange(month_start.year, from_date.month)[1]) 72 | while(month_end < to_date): 73 | date_ranges.append((month_start, month_end)) 74 | month_start = month_end + timedelta(days=1) 75 | month_end = month_start.replace(day=calendar.monthrange(month_start.year, month_start.month)[1]) 76 | if month_end >= to_date: 77 | date_ranges.append((month_start, to_date)) 78 | return date_ranges 79 | 80 | 81 | def kw_to_fname(**kw): 82 | name = "-".join([str(kw[k]) for k in sorted(kw) if k != "self"]) 83 | return name 84 | 85 | 86 | 87 | def cached(app_name): 88 | """ 89 | Note to self: 90 | This is a russian doll 91 | wrapper - actual caching mechanism 92 | _cached - actual decorator 93 | cached - wrapper around decorator to make 'app_name' dynamic 94 | """ 95 | def _cached(function): 96 | def wrapper(*args, **kw): 97 | kw.update(zip(function.__code__.co_varnames, args)) 98 | env_dir = os.environ.get("J_CACHE_DIR") 99 | if not env_dir: 100 | cache_dir = user_cache_dir(app_name, app_name) 101 | else: 102 | cache_dir = os.path.join(env_dir, app_name) 103 | 104 | file_name = kw_to_fname(**kw) 105 | path = os.path.join(cache_dir, file_name) 106 | if not os.path.isfile(path): 107 | if not os.path.exists(cache_dir): 108 | os.makedirs(cache_dir) 109 | j = function(**kw) 110 | with open(path, 'wb') as fp: 111 | pickle.dump(j, fp) 112 | else: 113 | with open(path, 'rb') as fp: 114 | j = pickle.load(fp) 115 | return j 116 | return wrapper 117 | return _cached 118 | 119 | 120 | def pool(function, params, use_threads=True, max_workers=2): 121 | if use_threads: 122 | with ThreadPoolExecutor(max_workers=max_workers) as ex: 123 | dfs = ex.map(function, *zip(*params)) 124 | else: 125 | dfs = [] 126 | for param in params: 127 | try: 128 | r = function(*param) 129 | except: 130 | raise 131 | dfs.append(r) 132 | return dfs 133 | 134 | def live_cache(app_name): 135 | """Caches the output for time_out specified. This is done in order to 136 | prevent hitting live quote requests to NSE too frequently. This wrapper 137 | will fetch the quote/live result first time and return the same result for 138 | any calls within 'time_out' seconds. 139 | 140 | Logic: 141 | key = concat of args 142 | try: 143 | cached_value = self._cache[key] 144 | if now - self._cache['tstamp'] < time_out 145 | return cached_value['value'] 146 | except AttributeError: # _cache attribute has not been created yet 147 | self._cache = {} 148 | finally: 149 | val = fetch-new-value 150 | new_value = {'tstamp': now, 'value': val} 151 | self._cache[key] = new_value 152 | return val 153 | 154 | """ 155 | def wrapper(self, *args, **kwargs): 156 | """Wrapper function which calls the function only after the timeout, 157 | otherwise returns value from the cache. 158 | 159 | """ 160 | # Get key by just concating the list of args and kwargs values and hope 161 | # that it does not break the code :P 162 | inputs = [str(a) for a in args] + [str(kwargs[k]) for k in kwargs] 163 | key = app_name.__name__ + '-'.join(inputs) 164 | now = datetime.now() 165 | time_out = self.time_out 166 | try: 167 | cache_obj = self._cache[key] 168 | if now - cache_obj['timestamp'] < timedelta(seconds=time_out): 169 | return cache_obj['value'] 170 | except: 171 | self._cache = {} 172 | value = app_name(self, *args, **kwargs) 173 | self._cache[key] = {'value': value, 'timestamp': now} 174 | return value 175 | 176 | return wrapper 177 | 178 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools >= 61.0"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "jugaad-data" 7 | version = "0.27" 8 | requires-python = ">= 3.6" 9 | authors = [{"name"= "jugaad-coder", "email"="abc@xyz.com"}] 10 | description = "Free Zerodha API python library" 11 | readme = "README.md" 12 | license = {file = "LICENSE.YOLO.md"} 13 | keywords = ["NSE", "Live", "Bhavcopy", "History", "Futures", "Options", "Stock Data"] 14 | dynamic = ["dependencies"] 15 | 16 | 17 | [project.scripts] 18 | jdata = "jugaad_data.cli:cli" 19 | 20 | [project.urls] 21 | Homepage = "https://marketsetup.in/documentation/jugaad-data/" 22 | Documentation = "https://marketsetup.in/documentation/jugaad-data/" 23 | Repository = "https://github.com/jugaad-py/jugaad-data" 24 | Issues = "https://github.com/jugaad-py/jugaad-data/issues" 25 | 26 | [tool.setuptools.dynamic] 27 | dependencies = {file = ["requirements.txt"]} -------------------------------------------------------------------------------- /requirements.dev.txt: -------------------------------------------------------------------------------- 1 | pytest==7.1.2 2 | pytest-testmon 3 | pytest-watch 4 | pyfakefs 5 | pandas 6 | jupyterlab 7 | twine 8 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | click==7.1.2 3 | appdirs==1.4.4 4 | beautifulsoup4==4.9.3 5 | 6 | -------------------------------------------------------------------------------- /run_tests.sh: -------------------------------------------------------------------------------- 1 | export PYTHONWARNINGS="ignore:Unverified HTTPS request" 2 | CNT=0 3 | while true; do 4 | OUTPUT=$(inotifywait jugaad_data/ tests/ -q -e create -e close_write -e attrib -e move ) 5 | clear 6 | echo $OUTPUT 7 | #TEST_OP=$(env/bin/python -m unittest tests.test_cli 2>&1) 8 | TEST_OP=$(env/bin/python -m unittest discover 2>&1) 9 | echo "$TEST_OP" 10 | CNT=$((CNT+1)) 11 | echo $CNT 12 | 13 | 14 | done 15 | 16 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jugaad-py/jugaad-data/2eb865ffb82e78ab73d1ac1de4173159ea9ab914/tests/__init__.py -------------------------------------------------------------------------------- /tests/test_bhav.py: -------------------------------------------------------------------------------- 1 | from datetime import date 2 | from jugaad_data.nse import bhavcopy_raw, full_bhavcopy_raw, bhavcopy_fo_raw, bhavcopy_index_raw, expiry_dates 3 | import pytest 4 | import requests 5 | 6 | def test_bhavcopy(): 7 | r = bhavcopy_raw(date(2020,1,1)) 8 | header = "SYMBOL,SERIES,OPEN,HIGH,LOW,CLOSE,LAST,PREVCLOSE,TOTTRDQTY,TOTTRDVAL,TIMESTAMP,TOTALTRADES,ISIN" 9 | assert "RELIANCE" in r 10 | assert header in r 11 | 12 | # def test_full_bhavcopy(): 13 | # r = full_bhavcopy_raw(date(2020,1,1)) 14 | # header = "SYMBOL, SERIES, DATE1, PREV_CLOSE, OPEN_PRICE, HIGH_PRICE, LOW_PRICE, LAST_PRICE, CLOSE_PRICE, AVG_PRICE, TTL_TRD_QNTY, TURNOVER_LACS, NO_OF_TRADES, DELIV_QTY, DELIV_PER" 15 | # assert "SBIN" in r 16 | # assert header in r 17 | 18 | # with pytest.raises(requests.exceptions.ReadTimeout) as e: 19 | # r = full_bhavcopy_raw(date(2019,1,1)) 20 | # assert '2019' in e.value.args[0] 21 | 22 | def test_bhavcopy_fo(): 23 | r = bhavcopy_fo_raw(date(2020,1,1)) 24 | header = "INSTRUMENT,SYMBOL,EXPIRY_DT,STRIKE_PR,OPTION_TYP,OPEN,HIGH,LOW,CLOSE,SETTLE_PR,CONTRACTS,VAL_INLAKH,OPEN_INT,CHG_IN_O" 25 | assert "SBIN" in r 26 | assert header in r 27 | 28 | # def test_bhavcopy_index(): 29 | # r = bhavcopy_index_raw(date(2020,1,1)) 30 | # header = "Index Name,Index Date,Open Index Value,High Index Value,Low Index Value,Closing Index Value,Points Change,Change(%)" 31 | # assert "NIFTY" in r 32 | # assert header in r 33 | 34 | def test_expiry_dates(): 35 | dt = date(2020, 9, 28) 36 | dts = expiry_dates(dt) 37 | assert date(2020, 10, 1) in dts 38 | assert date(2020, 10, 8) in dts 39 | dts = expiry_dates(dt, "OPTIDX", "NIFTY", 10000) 40 | assert date(2020, 10, 1) in dts 41 | assert date(2020, 10, 8) in dts 42 | dts = expiry_dates(dt, "FUTIDX", "NIFTY") 43 | assert len(dts) == 3 44 | dts = expiry_dates(dt, "FUTSTK", "RELIANCE") 45 | assert len(dts) == 3 46 | dts = expiry_dates(dt, "OPTSTK", "RELIANCE") 47 | assert date(2020, 10, 29) in dts 48 | assert date(2020, 11, 26) in dts 49 | 50 | """ 51 | def test_bhavcopy_on_holiday(): 52 | r = bhavcopy_raw(date(2020,1,5)) 53 | header = "SYMBOL,SERIES,OPEN,HIGH,LOW,CLOSE,LAST,PREVCLOSE,TOTTRDQTY,TOTTRDVAL,TIMESTAMP,TOTALTRADES,ISIN" 54 | assert "RELIANCE" in r 55 | assert header in r 56 | 57 | """ 58 | -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | import csv 3 | from click.testing import CliRunner 4 | from pyfakefs.fake_filesystem_unittest import TestCase 5 | from appdirs import user_cache_dir 6 | from jugaad_data.cli import cli 7 | 8 | class TestCli(TestCase): 9 | def setUp(self): 10 | """ 11 | FakeFS creates a fake file systems and in process looses the CA Certs 12 | Which fails the test while running stocks 13 | To fix that CA certificates will be read and then placed back 14 | """ 15 | import certifi 16 | self.path = certifi.where() 17 | with open(self.path) as fp: 18 | self.certs = fp.read() 19 | self.setUpPyfakefs() 20 | ## Restoring the CA certs 21 | self.fs.create_file(self.path) 22 | with open(self.path, "w") as fp: 23 | fp.write(self.certs) 24 | 25 | # def test_stock_cli(self): 26 | # symbol = "RELIANCE" 27 | # from_ = "2020-07-01" 28 | # to = "2020-07-07" 29 | # output = "/tmp/abc.csv" 30 | # runner = CliRunner() 31 | # cmd = "stock -s {} -f {} -t {} -o {}".format(symbol, from_, to, output) 32 | # result = runner.invoke(cli, cmd.split()) 33 | # print(cmd) 34 | # assert result.exit_code == 0 35 | # with open(output) as fp: 36 | # reader = csv.reader(fp) 37 | # rows = list(reader) 38 | # assert rows[1][0] == to 39 | # assert rows[-1][0] == from_ 40 | # assert len(rows) == 6 41 | 42 | # from_ = "2019-07-01" 43 | # to = "2020-07-07" 44 | # output = "/tmp/abc.csv" 45 | # runner = CliRunner() 46 | # cmd = "stock -s {} -f {} -t {} -o {}".format(symbol, from_, to, output) 47 | # result = runner.invoke(cli, cmd.split()) 48 | # print(cmd) 49 | # assert result.exit_code == 0 50 | # with open(output) as fp: 51 | # reader = csv.reader(fp) 52 | # rows = list(reader) 53 | # assert rows[1][0] == to 54 | # assert rows[-1][0] == from_ 55 | # assert len(rows) > 200 and len(rows) < 260 56 | 57 | # def test_derivatives_cli(self): 58 | # runner = CliRunner() 59 | # output = "file_name.csv" 60 | # cmd = "derivatives -s SBIN -f 2020-01-01 -t 2020-01-30 -e 2020-01-30 -i FUTSTK -o file_name.csv" 61 | # result = runner.invoke(cli, cmd.split()) 62 | # assert result.exit_code == 0 63 | # with open(output) as fp: 64 | # reader = csv.reader(fp) 65 | # rows = list(reader) 66 | # assert rows[1][0] == "30-Jan-2020" 67 | # assert rows[-1][0] == "01-JAN-2020" 68 | # assert len(rows) == 23 69 | # cmd = "derivatives -s NIFTY -f 2020-01-01 -t 2020-01-23 -e 2020-01-23 -i OPTIDX --pe -p 12000 -o file_name.csv" 70 | # result = runner.invoke(cli, cmd.split()) 71 | # assert result.exit_code == 0 72 | # with open(output) as fp: 73 | # reader = csv.reader(fp) 74 | # rows = list(reader) 75 | # assert rows[1][0] == "23-Jan-2020" 76 | # warnings.warn("Test cannot be completed, NSE's website is providing only partial data") 77 | 78 | # def test_index_cli(self): 79 | # symbol = "NIFTY 50" 80 | # from_ = "2020-01-01" 81 | 82 | # to = "2020-03-31" 83 | # output = "/tmp/abc.csv" 84 | # runner = CliRunner() 85 | # cmd = "index,-s,{},-f,{},-t,{},-o,{}".format(symbol, from_, to, output) 86 | # result = runner.invoke(cli, cmd.split(',')) 87 | # print(cmd.split('.')) 88 | # print(result.output) 89 | # assert result.exit_code == 0 90 | # with open(output) as fp: 91 | # reader = csv.reader(fp) 92 | # rows = list(reader) 93 | # assert rows[1][1] == "31 Mar 2020" 94 | # assert rows[-1][1] == "01 Jan 2020" 95 | # assert len(rows) > 50 96 | 97 | -------------------------------------------------------------------------------- /tests/test_holidays.py: -------------------------------------------------------------------------------- 1 | from datetime import date 2 | from jugaad_data.holidays import holidays 3 | 4 | 5 | def test_holidays(): 6 | # Check for random holiday 7 | assert date(2018,11,8) in holidays() 8 | assert date(2020,12,25) in holidays(year=2020) 9 | assert date(2020,12,25) in holidays(year=2020) 10 | assert date(2020,11,30) in holidays(year=2020) 11 | assert date(2018,11,8) not in holidays(year=2020) 12 | assert date(2020,12,25) in holidays(year=2020, month=12) 13 | assert date(2020,11,30) not in holidays(year=2020, month=12) 14 | -------------------------------------------------------------------------------- /tests/test_nse.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import csv 4 | from datetime import date, datetime, timedelta 5 | from pprint import pprint 6 | 7 | from pyfakefs.fake_filesystem_unittest import TestCase 8 | import pytest 9 | import numpy as np 10 | import pandas as pd 11 | from appdirs import user_cache_dir 12 | from jugaad_data import nse 13 | import click 14 | import warnings 15 | h = nse.NSEHistory() 16 | 17 | 18 | def get_data(symbol, from_date, to_date, series): 19 | params = { 20 | 'symbol': symbol, 21 | 'from': from_date.strftime('%d-%m-%Y'), 22 | 'to': to_date.strftime('%d-%m-%Y'), 23 | 'series': '["{}"]'.format(series), 24 | } 25 | 26 | return h._get("stock_history", params) 27 | 28 | def test_cookie(): 29 | r = h._get("equity_quote_page", params={}) 30 | assert r.status_code == 200 31 | assert "nseappid" in r.cookies 32 | symbol = "RELIANCE" 33 | from_date = date(2019,1,1) 34 | to_date = date(2019,1,31) 35 | series = "EQ" 36 | d = get_data(symbol, from_date, to_date, series) 37 | j = json.loads(d.text) 38 | assert 'data' in j 39 | assert j['data'][0]["CH_TIMESTAMP"] == "2019-01-31" 40 | assert j['data'][-1]["CH_TIMESTAMP"] == "2019-01-01" 41 | 42 | 43 | def test__get(): 44 | symbol = "RELIANCE" 45 | from_date = date(2019,1,1) 46 | to_date = date(2019,1,31) 47 | series = "EQ" 48 | d = get_data(symbol, from_date, to_date, series) 49 | print(d.text) 50 | j = json.loads(d.text) 51 | assert 'data' in j 52 | assert j['data'][0]["CH_TIMESTAMP"] == "2019-01-31" 53 | assert j['data'][-1]["CH_TIMESTAMP"] == "2019-01-01" 54 | 55 | def test__get_http_bin(): 56 | h = nse.NSEHistory() 57 | h.base_url = "https://httpbin.org" 58 | h.path_map['bin'] = '/get' 59 | 60 | params = {"p1":'1' , "p2": "a"} 61 | r = h._get("bin", params) 62 | _params = json.loads(r.text)['args'] 63 | assert params == _params 64 | 65 | def setup_test(self): 66 | """ 67 | FakeFS creates a fake file systems and in process looses the CA Certs 68 | Which fails the test while running stocks 69 | To fix that CA certificates will be read and then placed back 70 | """ 71 | import certifi 72 | self.path = certifi.where() 73 | with open(self.path) as fp: 74 | self.certs = fp.read() 75 | self.setUpPyfakefs() 76 | ## Restoring the CA certs 77 | self.fs.create_file(self.path) 78 | with open(self.path, "w") as fp: 79 | fp.write(self.certs) 80 | 81 | 82 | 83 | class TestNSECache(TestCase): 84 | def setUp(self): 85 | setup_test(self) 86 | """ 87 | FakeFS creates a fake file systems and in process looses the CA Certs 88 | Which fails the test while running stocks 89 | To fix that CA certificates will be read and then placed back 90 | import certifi 91 | self.path = certifi.where() 92 | with open(self.path) as fp: 93 | self.certs = fp.read() 94 | self.setUpPyfakefs() 95 | ## Restoring the CA certs 96 | self.fs.create_file(self.path) 97 | with open(self.path, "w") as fp: 98 | fp.write(self.certs) 99 | """ 100 | def test__stock(self): 101 | d = h._stock("SBIN", date(2001,1,1), date(2001,1,31)) 102 | assert d[0]["CH_TIMESTAMP"] == "2001-01-31" 103 | assert d[-1]["CH_TIMESTAMP"] == "2001-01-01" 104 | # Check if there's no data 105 | d = h._stock("SBIN", date(2020,7,4), date(2020,7,5)) 106 | assert len(d) == 0 107 | # Check future date 108 | from_date = datetime.now().date() + timedelta(days=1) 109 | to_date = from_date + timedelta(days=10) 110 | d = h._stock("SBIN", from_date, to_date) 111 | assert len(d) == 0 112 | 113 | def test_stock_raw(self): 114 | from_date = date(2001,1,15) 115 | to_date = date(2002,1,15) 116 | d = nse.stock_raw("SBIN", from_date, to_date) 117 | assert len(d) > 240 118 | assert len(d) < 250 119 | all_dates = [datetime.strptime(k["CH_TIMESTAMP"], "%Y-%m-%d").date() for k in d] 120 | assert to_date in all_dates 121 | assert from_date in all_dates 122 | assert d[-1]["CH_TIMESTAMP"] == str(from_date) 123 | assert d[0]["CH_TIMESTAMP"] == str(to_date) 124 | app_name = nse.APP_NAME + '-stock' 125 | files = os.listdir(user_cache_dir(app_name, app_name)) 126 | assert len(files) == 13 127 | 128 | def test_stock_csv(self): 129 | from_date = date(2001,1,15) 130 | to_date = date(2002,1,15) 131 | raw = nse.stock_raw("SBIN", from_date, to_date) 132 | output = nse.stock_csv("SBIN", from_date, to_date) 133 | with open(output) as fp: 134 | text = fp.read() 135 | rows = [x.split(',') for x in text.split('\n')] 136 | headers = [ "DATE", "SERIES", 137 | "OPEN", "HIGH", 138 | "LOW", "PREV. CLOSE", 139 | "LTP", "CLOSE", 140 | "VWAP", "52W H", "52W L", 141 | "VOLUME", "VALUE", "NO OF TRADES", "SYMBOL"] 142 | assert headers == rows[0] 143 | assert raw[0]['CH_TIMESTAMP'] == rows[1][0] 144 | assert raw[0]['CH_OPENING_PRICE'] == int(rows[1][2]) 145 | 146 | def test_stock_df(self): 147 | from_date = date(2001,1,15) 148 | to_date = date(2002,1,15) 149 | raw = nse.stock_raw("SBIN", from_date, to_date) 150 | df = nse.stock_df("SBIN", from_date, to_date) 151 | 152 | assert len(raw) == len(df) 153 | assert df['DATE'].iloc[0] == np.datetime64("2002-01-15") 154 | assert df['DATE'].iloc[-1] == np.datetime64("2001-01-15") 155 | assert df['OPEN'].iloc[0] == 220 156 | 157 | class TestDerivatives(TestCase): 158 | def setUp(self): 159 | setup_test(self) 160 | 161 | 162 | class TestIndexHistory(TestCase): 163 | def setUp(self): 164 | setup_test(self) 165 | 166 | def test__post(self): 167 | h = nse.NSEIndexHistory() 168 | h.base_url = "https://httpbin.org" 169 | h.path_map['mypath'] = '/post' 170 | params = {'a': 'b'} 171 | r = h._post_json("mypath", params=params) 172 | assert json.loads(r.json()['data']) == params 173 | 174 | """ 175 | def test_index_raw(self): 176 | symbol = "NIFTY 50" 177 | from_date = date(2020, 6, 1) 178 | to_date = date(2020, 7, 30) 179 | d = nse.index_raw(symbol, from_date, to_date) 180 | assert d[0]['Index Name'] == 'Nifty 50' 181 | assert d[0]['HistoricalDate'] == '30 Jul 2020' 182 | assert d[-1]['HistoricalDate'] == '01 Jun 2020' 183 | app_name = nse.APP_NAME + '-index' 184 | files = os.listdir(user_cache_dir(app_name, app_name)) 185 | assert len(files) == 2 186 | 187 | def test_index_csv(self): 188 | from_date = date(2001,1,15) 189 | to_date = date(2001,6,15) 190 | raw = nse.index_raw("NIFTY 50", from_date, to_date) 191 | output = nse.index_csv("NIFTY 50", from_date, to_date) 192 | with open(output) as fp: 193 | text = fp.read() 194 | rows = [x.split(',') for x in text.split('\n')] 195 | assert rows[1][2] == raw[0]['OPEN'] 196 | 197 | def test_index_df(self): 198 | from_date = date(2001,1,15) 199 | to_date = date(2001,6,15) 200 | index_df = nse.index_df("NIFTY 50", from_date, to_date) 201 | assert len(index_df) > 100 202 | assert list(index_df.columns) == ['Index Name', 'INDEX_NAME', 'HistoricalDate', 'OPEN', 'HIGH', 203 | 'LOW', 'CLOSE'] 204 | """ 205 | 206 | def test_expiry_dates(): 207 | dt = date(2020,1,1) 208 | expiry_dts = nse.expiry_dates(dt, "FUTIDX", "NIFTY") 209 | 210 | 211 | -------------------------------------------------------------------------------- /tests/test_nse_live.py: -------------------------------------------------------------------------------- 1 | from jugaad_data.nse.live import NSELive 2 | from datetime import date, datetime 3 | n = NSELive() 4 | def test_stock_quote(): 5 | r = n.stock_quote("HDFC") 6 | assert r['info']['symbol'] == 'HDFC' 7 | 8 | def test_stock_quote_fno(): 9 | r = n.stock_quote_fno("HDFC") 10 | assert 'strikePrices' in r 11 | assert 'info' in r 12 | assert 'stocks' in r 13 | 14 | def test_trade_info(): 15 | r = n.trade_info("HDFC") 16 | assert "bulkBlockDeals" in r 17 | assert "marketDeptOrderBook" in r 18 | 19 | def test_market_status(): 20 | r = n.market_status() 21 | assert "marketState" in r 22 | 23 | def test_tick_data(): 24 | d = n.tick_data("HDFC") 25 | assert "grapthData" in d 26 | d = n.tick_data("NIFTY 50", True) 27 | assert "grapthData" in d 28 | """ 29 | def test_market_turnover(): 30 | d = n.market_turnover() 31 | assert "data" in d 32 | assert len(d['data']) > 1 33 | assert 'name' in d['data'][0] 34 | """ 35 | def test_eq_derivative_turnover(): 36 | d = n.eq_derivative_turnover() 37 | assert "value" in d 38 | assert "volume" in d 39 | assert len(d['value']) > 1 40 | assert len(d['volume']) > 1 41 | 42 | d = n.eq_derivative_turnover(type="fu_nifty50") 43 | assert "value" in d 44 | assert "volume" in d 45 | assert len(d['value']) > 1 46 | assert len(d['volume']) > 1 47 | 48 | def test_all_indices(): 49 | d = n.all_indices() 50 | assert "advances" in d 51 | assert "declines" in d 52 | assert len(d['data']) > 1 53 | 54 | def test_live_index(): 55 | d = n.live_index("NIFTY 50") 56 | assert "advance" in d 57 | assert len(d['data']) == 51 58 | 59 | def test_index_option_chain(): 60 | d = n.index_option_chain("NIFTY") 61 | assert "filtered" in d 62 | assert "records" in d 63 | 64 | def test_equities_option_chain(): 65 | d = n.equities_option_chain("RELIANCE") 66 | assert "filtered" in d 67 | assert "records" in d 68 | assert "data" in d["records"] 69 | 70 | def test_currency_option_chain(): 71 | d = n.currency_option_chain("USDINR") 72 | assert "filtered" in d 73 | assert "records" in d 74 | assert "data" in d["records"] 75 | 76 | def test_live_fno(): 77 | d = n.live_fno() 78 | assert "SECURITIES IN F&O" == d['name'] 79 | 80 | def test_pre_open_market(): 81 | d = n.pre_open_market("NIFTY") 82 | assert "declines" in d 83 | assert "unchanged" in d 84 | assert "advances" in d 85 | 86 | def test_corporate_announcements(): 87 | d = n.corporate_announcements() 88 | assert type(d) == list 89 | if len(d) > 0: 90 | row = d[0] 91 | assert 'symbol' in row.keys() 92 | 93 | from_date = date(2024,1,1) 94 | to_date = date(2024,1,2) 95 | d = n.corporate_announcements(from_date=from_date, to_date=to_date) 96 | assert len(d) > 0 97 | for x in d: 98 | print(x['symbol']) 99 | if len(d) > 0: 100 | assert 'symbol' in d[0].keys() 101 | d = n.corporate_announcements(from_date=from_date, to_date=to_date, symbol='NESCO') 102 | 103 | assert d[0]['symbol'] == 'NESCO' -------------------------------------------------------------------------------- /tests/test_rbi.py: -------------------------------------------------------------------------------- 1 | from jugaad_data.rbi import RBI 2 | import pytest 3 | 4 | def test_current_rates(): 5 | r = RBI() 6 | rates = r.current_rates() 7 | assert '91 day T-bills' in rates 8 | assert 'Policy Repo Rate' in rates 9 | assert 'Savings Deposit Rate' in rates 10 | # Below should not raise exception 11 | val = float(rates['91 day T-bills'].replace('%',"")) 12 | -------------------------------------------------------------------------------- /tests/test_util.py: -------------------------------------------------------------------------------- 1 | import os 2 | import math 3 | import pickle 4 | import pytest 5 | from jugaad_data import util as ut 6 | from datetime import date, datetime, timedelta 7 | import time 8 | from pyfakefs.fake_filesystem_unittest import TestCase 9 | from appdirs import user_cache_dir 10 | 11 | def test_break_dates(): 12 | from_date = date(2000, 12, 14) 13 | to_date = date(2005, 1, 20) 14 | dates = ut.break_dates(from_date, to_date) 15 | assert from_date== dates[0][0] 16 | assert to_date == dates[-1][1] 17 | assert len(dates) == 50 18 | 19 | from_date = date(2019, 1, 1) 20 | to_date = date(2020, 1, 31) 21 | dates = ut.break_dates(from_date, to_date) 22 | assert from_date == dates[0][0] 23 | assert to_date == dates[-1][1] 24 | assert len(dates) == 13 25 | 26 | def test_np_float(): 27 | assert 3.3 == pytest.approx(ut.np_float("3.3")) 28 | assert math.isnan(ut.np_float("-")) 29 | 30 | 31 | def test_np_int(): 32 | assert 3 == ut.np_int('3') 33 | assert 0 == ut.np_int('-') 34 | 35 | def test_np_date(): 36 | assert date(2020,1,1) == ut.np_date("2020-01-01") 37 | assert date(2020,7,30) == datetime.strptime("30-Jul-2020", "%d-%b-%Y").date() 38 | assert date(2020,7,30) == ut.np_date("30-Jul-2020") 39 | assert ut.np_date("20 Aug 2020") == date(2020, 8, 20) 40 | 41 | def test_kw_to_fname(): 42 | x = ut.kw_to_fname(self=[0], z='last', a='first') 43 | assert x == 'first-last' 44 | x = ut.kw_to_fname(z='last', a='first', self=[0]) 45 | assert x == 'first-last' 46 | x = ut.kw_to_fname(self=[], symbol="SBIN", from_date=date(2020,1,1), to_date=date(2020,1,31)) 47 | assert x == "2020-01-01-SBIN-2020-01-31" 48 | 49 | def demo_for_pool(a, b): 50 | return (a + b)**2 51 | 52 | class DemoForPool: 53 | def demo_for_pool(self, a, b): 54 | return (a + b)**2 55 | 56 | def pooled(self, params, use_threds): 57 | return ut.pool(self.demo_for_pool, params, use_threds) 58 | 59 | def test_pool(): 60 | for use_threads in [True, False]: 61 | params = [ (0, 1), 62 | (1, 2), 63 | (2, 3)] 64 | expected = [1, 9, 25] 65 | actual = ut.pool(demo_for_pool, params, use_threads) 66 | assert expected == list(actual) 67 | d = DemoForPool() 68 | actual = d.pooled(params, use_threads) 69 | assert expected == list(actual) 70 | 71 | 72 | 73 | 74 | @ut.cached("testapp") 75 | def demo_function(self, x, y): 76 | return {'x': x, 'y': y} 77 | 78 | class DemoClass: 79 | @ut.cached("testapp") 80 | def demo_method(self, x, y): 81 | return {'x': x, 'y': y} 82 | 83 | @ut.cached("testapp") 84 | def demo_crash(a, b): 85 | raise Exception("Terrible") 86 | 87 | class TestCache(TestCase): 88 | def setUp(self): 89 | self.setUpPyfakefs() 90 | 91 | def test_demo_function(self): 92 | # Check if function reeturns correct value 93 | x = demo_function([0], 'v1', 'v2') 94 | self.assertEqual(x, {'x': 'v1', 'y': 'v2'}) 95 | # Check if path exists 96 | path = os.path.join(user_cache_dir("testapp"), 'v1-v2') 97 | self.assertTrue(os.path.isfile(path)) 98 | # Next time it should read from cache, let us see if cache reading works 99 | # update the file with new values 100 | j = {'x': 'x1', 'y': 'y1'} 101 | with open(path, 'wb') as fp: 102 | pickle.dump(j, fp) 103 | # run the function 104 | x = demo_function([0], 'v1', 'v2') 105 | self.assertEqual(x, j) 106 | 107 | def test_demo_method(self): 108 | d = DemoClass() 109 | x = d.demo_method('v1', 'v2') 110 | self.assertEqual(x, {'x': 'v1', 'y': 'v2'}) 111 | # Check if path exists 112 | path = os.path.join(user_cache_dir("testapp"), 'v1-v2') 113 | self.assertTrue(os.path.isfile(path)) 114 | # Next time it should read from cache, let us see if cache reading works 115 | # update the file with new values 116 | j = {'x': 'x1', 'y': 'y1'} 117 | with open(path, 'wb') as fp: 118 | pickle.dump(j, fp) 119 | # run the function 120 | x = d.demo_method('v1', 'v2') 121 | self.assertEqual(x, j) 122 | 123 | def test_demo_crashed(self): 124 | with pytest.raises(Exception): 125 | demo_crashed('fiz', 'buzz') 126 | demo_function([0], 'lorem', 'ipsem') 127 | path = os.path.join(user_cache_dir("testapp"), 'lorem-ipsem') 128 | assert os.path.isfile(path) 129 | try: 130 | demo_crashed('buzz', 'fizz') 131 | except: 132 | pass 133 | path = os.path.join(user_cache_dir("testapp"), 'buzz-fizz') 134 | assert not os.path.isfile(path) 135 | 136 | def test_demo_with_environment_var(self): 137 | os.environ['J_CACHE_DIR'] = '/tmp/' 138 | x = demo_function([0], 'v1', 'v2') 139 | self.assertEqual(x, {'x': 'v1', 'y': 'v2'}) 140 | 141 | # Check if path exists 142 | path = os.path.join("/tmp", 'testapp', 'v1-v2') 143 | self.assertTrue(os.path.isfile(path)) 144 | # Next time it should read from cache, let us see if cache reading works 145 | # update the file with new values 146 | j = {'x': 'x1', 'y': 'y1'} 147 | with open(path, 'wb') as fp: 148 | pickle.dump(j, fp) 149 | # run the function 150 | x = demo_function([0], 'v1', 'v2') 151 | self.assertEqual(x, j) 152 | 153 | class QuoteApp: 154 | time_out = 3 155 | @ut.live_cache 156 | def rt_quote(self): 157 | return datetime.now() 158 | 159 | def test_live_cache(): 160 | q = QuoteApp() 161 | r = q.rt_quote() 162 | v = q._cache['rt_quote']['value'] 163 | ts = q._cache['rt_quote']['timestamp'] 164 | assert q.rt_quote() == v 165 | time.sleep(3) 166 | assert q.rt_quote() > v 167 | 168 | --------------------------------------------------------------------------------