├── .gitignore ├── README.rst ├── VERSION.txt ├── docs ├── Makefile ├── make.bat ├── requirements.txt └── source │ ├── conf.py │ └── index.rst ├── examples ├── MA_Strategy.py └── __init__.py ├── requirements.txt ├── rqalpha_mod_fxdayu_source ├── __init__.py ├── const.py ├── data │ └── index_symbol_map.csv ├── data_source │ ├── __init__.py │ ├── bundle.py │ ├── common │ │ ├── __init__.py │ │ ├── cache.py │ │ ├── minite.py │ │ ├── odd.py │ │ └── realtime.py │ ├── mongo.py │ └── quantos.py ├── event_source.py ├── inday_bars │ ├── __init__.py │ ├── base.py │ ├── quantos.py │ └── redis.py ├── mod.py ├── price_board.py ├── share │ ├── __init__.py │ ├── astock_minute_reader.py │ ├── mongo_handler.py │ ├── trading_calendar.py │ ├── trading_session.py │ └── utils.py └── utils │ ├── __init__.py │ ├── asyncio.py │ ├── converter.py │ ├── instrument.py │ ├── mongo.py │ └── quantos.py ├── setup.py └── tests ├── __init__.py ├── common ├── __init__.py └── source.py ├── mongo ├── __init__.py ├── test_cache_run.py ├── test_run.py └── test_source.py ├── quantos ├── __init__.py ├── test_cache_run.py ├── test_realtime.py ├── test_run.py └── test_source.py ├── strategies ├── __init__.py └── simple.py ├── test_cache_source.py ├── test_events_source.py ├── test_real_strategy.py ├── test_redis_data_source.py ├── test_run_realtime.py └── utils ├── __init__.py └── quotation.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Python template 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | env/ 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | wheels/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *,cover 49 | .hypothesis/ 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # dotenv 85 | .env 86 | 87 | # virtualenv 88 | .venv 89 | venv/ 90 | ENV/ 91 | 92 | # Spyder project settings 93 | .spyderproject 94 | 95 | # Rope project settings 96 | .ropeproject 97 | 98 | # vscode 99 | .vscode/ 100 | .idea/ 101 | 102 | .cache/ 103 | .persist/ 104 | .report/ -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ************************************************ 2 | Expanded Rqalpha Data and Event Source by Fxdayu 3 | ************************************************ 4 | 5 | 介绍 6 | ======== 7 | rqalpha是一款开源的基于事件驱动的交易执行引擎,其架构高度解耦,支持各种mod拓展。 8 | 9 | 本mod主要拓展了rqalpha数据源和事件源,数据源共有3种方案,前2种为公司内部使用, 10 | 分别基于mongodb和bcolz的压缩格式文件,外部很难接入。最新加入第3种对接tushare-pro开源财经数据源(感谢米哥), 11 | 使用起来非常方便。 12 | 13 | 特点 14 | ======= 15 | + 依托于tushare-pro的数据服务,只要有网络通畅就可以进行回测和实时交易,无需额外数据文件下载或数据库搭建 16 | + 使用简单,只需安装、激活此mod,加入相应配置,并在quantos得到使用tushare-pro数据服务的权限即可 17 | + 支持按多种时间频率获取数据 18 | 19 | ======= ================== 20 | \*d 任意天,1d,5d等 21 | \*h quantos数据暂未实现 22 | \*m     任意分钟,1m,5m,10m等 23 | ======= ================== 24 | 25 | + 内置简单的数据缓存(Beta) 26 | 27 | 安装 28 | ====== 29 | .. code-block:: bash 30 | 31 | $ pip install git+https://github.com/xingetouzi/rqalpha-mod-fxdayu-source.git 32 | $ rqalpha mod install fxdayu_source 33 | 34 | 用例 35 | ====== 36 | strategy.py 37 | 38 | .. code-block:: python 39 | 40 | # -*- coding: utf-8 -*- 41 | import itertools 42 | import os 43 | from rqalpha.api import * 44 | import pandas as pd 45 | from rqalpha.utils.datetime_func import convert_dt_to_int 46 | from rqalpha import run 47 | 48 | def init(context): 49 | logger.info("init") 50 | context.s1 = "000001.XSHE" 51 | update_universe(context.s1) 52 | context.fired = False 53 | 54 | def before_trading(context): 55 | pass 56 | 57 | def handle_bar(context, bar_dict): 58 | bar = bar_dict[context.s1] 59 | print(bar) 60 | assert bar.datetime == context.now 61 | lengths = [5] 62 | frequencies = ["1m"] 63 | for l, f in itertools.product(lengths, frequencies): 64 | # print(pd.DataFrame(history_bars(context.s1, 5, "1d", include_now=True))) 65 | df = pd.DataFrame(history_bars(context.s1, l, f)) 66 | print(df) 67 | assert len(df) == l 68 | assert convert_dt_to_int(context.now) == df["datetime"].iloc[-1] 69 | if not context.fired: 70 | # order_percent并且传入1代表买入该股票并且使其占有投资组合的100% 71 | order_percent(context.s1, 1) 72 | context.fired = True 73 | 74 | config = { 75 | "base": { 76 | "start_date": "2016-06-01", 77 | "end_date": "2016-06-05", 78 | "accounts": {"stock": 100000}, 79 | "frequency": "1m", 80 | "benchmark": None, 81 | "strategy_file": __file__ 82 | }, 83 | "extra": { 84 | "log_level": "verbose", 85 | }, 86 | "mod": { 87 | "sys_analyser": { 88 | "enabled": True, 89 | # "report_save_path": ".", 90 | "plot": True 91 | }, 92 | "sys_simulation": { 93 | "enabled": True, 94 | # "matching_type": "last" 95 | }, 96 | "fxdayu_source": { 97 | "enabled": True, 98 | "source": "quantos", 99 | "quantos_user": "139xxxxxxxx", # 填入您的quantos用户名 100 | "quantos_token": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" # 填入您的quantos Token 101 | # 其他配置参数 102 | } 103 | } 104 | } 105 | 106 | if __name__ == "__main__": 107 | # 您可以指定您要传递的参数 108 | run(config=config) 109 | 110 | 运行strategy.py可以看到结果,没有发生AssertionError表示能够正常读取数据。 111 | 112 | .. code-block:: bash 113 | 114 | $ python strategy.py 115 | 116 | 117 | 配置选项 118 | ======== 119 | ============================= ============================== ================= ======================================= 120 | 选项 默认值 适用数据源类型 含义 121 | ============================= ============================== ================= ======================================= 122 | fxdayu_source.enabled "quantos" 通用 行情源类型,可选值为"mongo","bundle","quantos" 123 | fxdayu_source.bundle_path None bundle bundle数据文件位置,默认取"~/.fxdayu/bundle", 可以用环境变量覆盖,取值为"$FXDAYU_ROOT/bundle" 124 | fxdayu_source.mongo_url "mongodb://localhost:27017" mongo mongodb数据库地址 125 | fxdayu_source.enable_cache True 通用 bool型,是否开启分页读取缓存优化功能(缓存优化适用于回测)。 126 | fxdayu_source.cache_length 1000 通用 当开启缓存优化时,指定单页缓存的条目数 127 | fxdayu_source.quantos_url "tcp://data.quantos.org:8910" quantos 可选,tushare服务器地址,默认不需要配置 128 | fxdayu_source.quantos_user None quantos 必填,quantos用户名,可以从环境变量QUANTOS_USER传入 129 | fxdayu_source.quantos_token None quantos 必填,quantos Token,可以从环境变量QUANTOS_TOKEN传入 130 | ============================= ============================== ================= ======================================= 131 | 132 | 说明 133 | ========= 134 | 由于此mod使用了一些原来内部方案中的代码,故没有单独作为独立的模块。暂时不打算走正常的发布流程(旧的代码短期内可能有很大改动),也不会发布到pypi,只分为master和dev分支,master为稳定分支,dev为开发分支, 135 | 功能变更将按日期写入changelog中。 136 | 137 | 加入开发 138 | ========= 139 | github地址_ 140 | 141 | .. _github地址: https://github.com/xingetouzi/rqalpha-mod-fxdayu-source 142 | 143 | 欢迎提交各种Issue和Pull Request。 144 | -------------------------------------------------------------------------------- /VERSION.txt: -------------------------------------------------------------------------------- 1 | 0.1.0 -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = rqalpha-mod-fxdayu-source 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | set SPHINXPROJ=rqalpha-mod-fxdayu-source 13 | 14 | if "%1" == "" goto help 15 | 16 | if "%1" == "livehtml" goto livehtml 17 | 18 | if "%1" == "watch" goto watch 19 | 20 | %SPHINXBUILD% >NUL 2>NUL 21 | if errorlevel 9009 ( 22 | echo. 23 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 24 | echo.installed, then set the SPHINXBUILD environment variable to point 25 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 26 | echo.may add the Sphinx directory to PATH. 27 | echo. 28 | echo.If you don't have Sphinx installed, grab it from 29 | echo.http://sphinx-doc.org/ 30 | exit /b 1 31 | ) 32 | 33 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | goto end 35 | 36 | :help 37 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 38 | goto end 39 | 40 | :livehtml 41 | sphinx-autobuild -b html -B %SOURCEDIR% %BUILDDIR%/html %SPHINXOPTS% 42 | goto end 43 | 44 | :watch 45 | watchmedo shell-command \ 46 | --patterns="*.rst" \ 47 | --ignore-pattern="%BUILDDIR%" \ 48 | --recursive \ 49 | --command='make html' \ 50 | %SOURCEDIR% 51 | goto end 52 | 53 | :end 54 | popd 55 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | Sphinx 2 | watchdog 3 | sphinx-autobuild 4 | sphinx_rtd_theme 5 | nbsphinx 6 | jupyter_client -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/stable/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | import os 16 | # import sys 17 | # sys.path.insert(0, os.path.abspath('.')) 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = 'rqalpha-mod-fxdayu-source' 23 | copyright = '2018, BurdenBear' 24 | author = 'BurdenBear' 25 | 26 | # The short X.Y version 27 | version = '' 28 | # The full version, including alpha/beta/rc tags 29 | release = '' 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # If your documentation needs a minimal Sphinx version, state it here. 35 | # 36 | # needs_sphinx = '1.0' 37 | 38 | # Add any Sphinx extension module names here, as strings. They can be 39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 40 | # ones. 41 | extensions = [ 42 | 'sphinx.ext.viewcode', 43 | 'sphinx.ext.githubpages', 44 | ] 45 | 46 | # Add any paths that contain templates here, relative to this directory. 47 | templates_path = ['_templates'] 48 | 49 | # The suffix(es) of source filenames. 50 | # You can specify multiple suffix as a list of string: 51 | # 52 | source_suffix = ['.rst', '.md'] 53 | # source_suffix = '.rst' 54 | 55 | # The master toctree document. 56 | master_doc = 'index' 57 | 58 | # The language for content autogenerated by Sphinx. Refer to documentation 59 | # for a list of supported languages. 60 | # 61 | # This is also used if you do content translation via gettext catalogs. 62 | # Usually you set "language" from the command line for these cases. 63 | language = None 64 | 65 | # List of patterns, relative to source directory, that match files and 66 | # directories to ignore when looking for source files. 67 | # This pattern also affects html_static_path and html_extra_path . 68 | exclude_patterns = [] 69 | 70 | # The name of the Pygments (syntax highlighting) style to use. 71 | pygments_style = 'sphinx' 72 | 73 | 74 | # -- Options for HTML output ------------------------------------------------- 75 | 76 | # The theme to use for HTML and HTML Help pages. See the documentation for 77 | # a list of builtin themes. 78 | # 79 | # html_theme = 'alabaster' 80 | 81 | # import sphinx_rtd_theme 82 | # html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 83 | 84 | on_rtd = os.environ.get('READTHEDOCS', None) == 'True' 85 | if not on_rtd: # only import and set the theme if we're building docs locally 86 | try: 87 | import sphinx_rtd_theme 88 | except ImportError: 89 | html_theme = 'default' 90 | html_theme_path = [] 91 | else: 92 | html_theme = 'sphinx_rtd_theme' 93 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 94 | 95 | # Theme options are theme-specific and customize the look and feel of a theme 96 | # further. For a list of options available for each theme, see the 97 | # documentation. 98 | # 99 | # html_theme_options = {} 100 | 101 | # Add any paths that contain custom static files (such as style sheets) here, 102 | # relative to this directory. They are copied after the builtin static files, 103 | # so a file named "default.css" will overwrite the builtin "default.css". 104 | html_static_path = ['_static'] 105 | 106 | # If true, links to the reST sources are added to the pages. 107 | html_show_sourcelink = True 108 | 109 | # Custom sidebar templates, must be a dictionary that maps document names 110 | # to template names. 111 | # 112 | # The default sidebars (for documents that don't match any pattern) are 113 | # defined by theme itself. Builtin themes are using these templates by 114 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 115 | # 'searchbox.html']``. 116 | # 117 | # html_sidebars = {} 118 | 119 | 120 | # -- Options for HTMLHelp output --------------------------------------------- 121 | 122 | # Output file base name for HTML help builder. 123 | htmlhelp_basename = 'rqalpha-mod-fxdayu-sourcedoc' 124 | 125 | 126 | # -- Options for LaTeX output ------------------------------------------------ 127 | 128 | latex_elements = { 129 | # The paper size ('letterpaper' or 'a4paper'). 130 | # 131 | # 'papersize': 'letterpaper', 132 | 133 | # The font size ('10pt', '11pt' or '12pt'). 134 | # 135 | # 'pointsize': '10pt', 136 | 137 | # Additional stuff for the LaTeX preamble. 138 | # 139 | # 'preamble': '', 140 | 141 | # Latex figure (float) alignment 142 | # 143 | # 'figure_align': 'htbp', 144 | } 145 | 146 | # Grouping the document tree into LaTeX files. List of tuples 147 | # (source start file, target name, title, 148 | # author, documentclass [howto, manual, or own class]). 149 | latex_documents = [ 150 | (master_doc, 'rqalpha-mod-fxdayu-source.tex', 'rqalpha-mod-fxdayu-source Documentation', 151 | 'BurdenBear', 'manual'), 152 | ] 153 | 154 | 155 | # -- Options for manual page output ------------------------------------------ 156 | 157 | # One entry per manual page. List of tuples 158 | # (source start file, name, description, authors, manual section). 159 | man_pages = [ 160 | (master_doc, 'rqalpha-mod-fxdayu-source', 'rqalpha-mod-fxdayu-source Documentation', 161 | [author], 1) 162 | ] 163 | 164 | 165 | # -- Options for Texinfo output ---------------------------------------------- 166 | 167 | # Grouping the document tree into Texinfo files. List of tuples 168 | # (source start file, target name, title, author, 169 | # dir menu entry, description, category) 170 | texinfo_documents = [ 171 | (master_doc, 'rqalpha-mod-fxdayu-source', 'rqalpha-mod-fxdayu-source Documentation', 172 | author, 'rqalpha-mod-fxdayu-source', 'One line description of project.', 173 | 'Miscellaneous'), 174 | ] 175 | 176 | 177 | # -- Extension configuration ------------------------------------------------- -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../../README.rst -------------------------------------------------------------------------------- /examples/MA_Strategy.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # __author__ = "BurdenBear" 3 | 4 | import os 5 | 6 | import rqalpha 7 | import talib 8 | from rqalpha.api import * 9 | 10 | frequency = "1h" 11 | 12 | 13 | def init(context): 14 | context.s1 = "000001.XSHE" 15 | context.SHORTPERIOD = 10 16 | context.LONGPERIOD = 60 17 | 18 | 19 | def handle_bar(context, bar_dict): 20 | prices = history_bars(context.s1, context.LONGPERIOD + 1, frequency, 'close') 21 | short_avg = talib.SMA(prices, context.SHORTPERIOD) 22 | long_avg = talib.SMA(prices, context.LONGPERIOD) 23 | 24 | # 计算现在portfolio中股票的仓位 25 | cur_position = context.portfolio.positions[context.s1].quantity 26 | avg_price = context.portfolio.positions[context.s1].avg_price 27 | capital = cur_position * avg_price 28 | # 计算现在portfolio中的现金可以购买多少股票 29 | shares = context.portfolio.cash / bar_dict[context.s1].close 30 | # 图形显示当前占用资金 31 | plot('capital', capital) 32 | 33 | # 如果短均线从上往下跌破长均线,而上一个bar的短线平均值高于长线平均值 34 | if short_avg[-1] - long_avg[-1] < 0 < long_avg[-2] - short_avg[-2] and cur_position > 0: 35 | # 进行清仓 36 | order_target_value(context.s1, 0) 37 | 38 | # 如果短均线从下往上突破长均线,为入场信号 39 | if short_avg[-1] - long_avg[-1] > 0 > long_avg[-2] - short_avg[-2]: 40 | # 满仓入股 41 | order_shares(context.s1, shares) 42 | 43 | 44 | config = { 45 | "base": { 46 | "start_date": "2010-06-01", 47 | "end_date": "2016-12-01", 48 | "accounts": {'stock': 1000000}, 49 | "benchmark": "000300.XSHG", 50 | "frequency": frequency, 51 | "strategy_file_path": os.path.abspath(__file__) 52 | }, 53 | "extra": { 54 | "log_level": "verbose", 55 | }, 56 | "mod": { 57 | "sys_analyser": { 58 | "enabled": True, 59 | "plot": True 60 | }, 61 | "mongo_datasource": { 62 | "enabled": True, 63 | "plot": True, 64 | } 65 | } 66 | } 67 | 68 | # 您可以指定您要传递的参数 69 | rqalpha.run_func(init=init, handle_bar=handle_bar, config=config) 70 | -------------------------------------------------------------------------------- /examples/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # __author__ = "BurdenBear" 3 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | jaqs>=0.6.9 2 | rqalpha>=3.0.9 3 | redis>=2.2.5 4 | pymongo>=3.6.0 5 | motor>=1.2.1 6 | numpy>=1.11.1 7 | pandas==0.20.0 8 | lru_dict>=1.1.6 9 | numba>=0.33.0 10 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/__init__.py: -------------------------------------------------------------------------------- 1 | import click 2 | import os 3 | from rqalpha import cli 4 | 5 | __config__ = { 6 | "source": "quantos", 7 | "mongo_url": os.environ.get("MONGO_URL", "mongodb://127.0.0.1:27017"), 8 | "redis_url": os.environ.get("REDIS_URL", "redis://127.0.0.1:6379"), 9 | "bundle_path": None, 10 | # quantos 11 | "quantos_url": None, 12 | "quantos_user": None, 13 | "quantos_token": None, 14 | # cache 15 | "enable_cache": True, 16 | "cache_length": None, 17 | "max_cache_space": None, 18 | # other 19 | "fps": 60, 20 | "persist_path": ".persist", 21 | "priority": 200, 22 | } 23 | 24 | 25 | def load_mod(): 26 | from .mod import FxdayuSourceMod 27 | return FxdayuSourceMod() 28 | 29 | 30 | """ 31 | --force-init 32 | """ 33 | 34 | cli.commands['run'].params.append( 35 | click.Option( 36 | ('--force-init/--no-force-init', 'extra__force_run_init_when_pt_resume'), 37 | is_flag=True, default=False, show_default=True, 38 | help="[fxdayu_source]force run init when paper trading resume or not" 39 | ) 40 | ) 41 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/const.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class DataSourceType(Enum): 5 | MONGO = "mongo" 6 | BUNDLE = "bundle" 7 | QUANTOS = "quantos" 8 | REAL_TIME = "real_time" 9 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/data/index_symbol_map.csv: -------------------------------------------------------------------------------- 1 | name,symbol_ricequant,symbol_tushare 2 | 300运输,399957.XSHE,399957.SZ 3 | 300运输,399957.XSHE,000957.SH 4 | 上证商品,000066.XSHG,000066.SH 5 | 上证能源,000032.XSHG,000032.SH 6 | 上证RP,H50041.XSHG,h50041.CSI 7 | 中小低波,399663.XSHE,399663.SZ 8 | 大盘高贝,399405.XSHE,399405.SZ 9 | 国证医药,399394.XSHE,399394.SZ 10 | 国证医药,399394.XSHE,S99394.JZ 11 | 创业板指,399006.XSHE,399006.SZ 12 | 创业板指,399006.XSHE,S99006.JZ 13 | 深市精选,399013.XSHE,399013.SZ 14 | 深成信息,399687.XSHE,399687.SZ 15 | 内地低碳,000977.XSHG,399977.SZ 16 | 内地低碳,000977.XSHG,000977.SH 17 | 中小成长,399602.XSHE,399602.SZ 18 | 中小成长,399602.XSHE,159917.SZ 19 | 央视生态,399556.XSHE,399556.SZ 20 | 中证200,399904.XSHE,000904.SH 21 | 深证金融,399619.XSHE,399619.SZ 22 | 300基建,000950.XSHG,399950.SZ 23 | 300基建,000950.XSHG,000950.SH 24 | 基本面50,399925.XSHE,399925.SZ 25 | 基本面50,399925.XSHE,000925.SH 26 | 可选等权,000073.XSHG,000073.SH 27 | 上证沪企,000062.XSHG,000062.SH 28 | 深证综指,399106.XSHE,399106.SZ 29 | 金融等权,000076.XSHG,000076.SH 30 | 上证资源,000068.XSHG,000068.SH 31 | 中证材料,000929.XSHG,399929.SZ 32 | 中证材料,000929.XSHG,000929.SH 33 | 中证材料,000929.XSHG,000929.CSI 34 | 上证电信,000040.XSHG,000040.SH 35 | 300材料,399909.XSHE,000909.SH 36 | 300材料,399909.XSHE,L11502.CSI 37 | 内地地产,000948.XSHG,399948.SZ 38 | 内地地产,000948.XSHG,000948.SH 39 | 内地地产,000948.XSHG,000948.CSI 40 | 中证700,000907.XSHG,000907.SH 41 | 中证700,000907.XSHG,000907.CSI 42 | 深证装备,399636.XSHE,399636.SZ 43 | 消费领先,000148.XSHG,000148.SH 44 | 深证龙头,399653.XSHE,399653.SZ 45 | 上证全指,000047.XSHG,000047.SH 46 | 医药100,399978.XSHE,399978.SZ 47 | 医药100,399978.XSHE,000978.SH 48 | 科技100,399608.XSHE,399608.SZ 49 | 300公用,399917.XSHE,000917.SH 50 | 300公用,399917.XSHE,L11525.CSI 51 | 300价值,399919.XSHE,000919.SH 52 | 300价值,399919.XSHE,519671.SH 53 | 深证成指,399001.XSHE,399001.SZ 54 | 深证成指,399001.XSHE,S99001.JZ 55 | 800医药,000841.XSHG,000841.SH 56 | 持续产业,000114.XSHG,000114.SH 57 | 380金融,000110.XSHG,000110.SH 58 | 国证1000,399311.XSHE,399311.SZ 59 | 全R成长,000059.XSHG,000059.SH 60 | 中证金融,000934.XSHG,399934.SZ 61 | 中证金融,000934.XSHG,000934.SH 62 | 深证GDP,399648.XSHE,399648.SZ 63 | 国证有色,399395.XSHE,399395.SZ 64 | 国证有色,399395.XSHE,S99395.JZ 65 | 中证500,CSI500.INDX,000905.SH 66 | 中证500,CSI500.INDX,512500.SH 67 | 中证500,CSI500.INDX,S00905.JZ 68 | 国证治理,399322.XSHE,399322.SZ 69 | 申万传媒,801221.INDX,163117.SZ 70 | 深成可选,399683.XSHE,399683.SZ 71 | 380R成长,000119.XSHG,000119.SH 72 | 沪深300,CSI300.INDX,399300.SZ 73 | 沪深300,CSI300.INDX,000300.SH 74 | 沪深300,CSI300.INDX,S00300.JZ 75 | 上证材料,000033.XSHG,000033.SH 76 | 消费50,000126.XSHG,000126.SH 77 | 中证环保,000827.XSHG,000827.SH 78 | 中证环保,000827.XSHG,S00827.JZ 79 | 中小板指,399005.XSHE,399005.SZ 80 | 中小板指,399005.XSHE,S99005.JZ 81 | 上证可选,000035.XSHG,000035.SH 82 | 环渤海,399357.XSHE,399357.SZ 83 | 深成金融,399686.XSHE,399686.SZ 84 | 中证消费,000932.XSHG,399932.SZ 85 | 中证消费,000932.XSHG,000932.SH 86 | 中证国企,000955.XSHG,000955.SH 87 | 中证国企,000955.XSHG,000955.CSI 88 | 技术领先,000973.XSHG,000973.SH 89 | 材料等权,000071.XSHG,000071.SH 90 | 小盘低波,399408.XSHE,399408.SZ 91 | 中创成长,399626.XSHE,399626.SZ 92 | 300动态,000843.XSHG,000843.SH 93 | 中证红利,000922.XSHG,399922.SZ 94 | 中证红利,000922.XSHG,000922.SH 95 | 中证200,000904.XSHG,000904.SH 96 | 500高贝,000830.XSHG,000830.SH 97 | 央视回报,399553.XSHE,399553.SZ 98 | 中证消费,399932.XSHE,399932.SZ 99 | 中证消费,399932.XSHE,000932.SH 100 | 中小创新,399015.XSHE,399015.SZ 101 | 红利100,399411.XSHE,399411.SZ 102 | 深证可选,399616.XSHE,399616.SZ 103 | 基本400,000966.XSHG,000966.SH 104 | 中证TMT,000998.XSHG,000998.SH 105 | 180稳定,000125.XSHG,000125.SH 106 | 小盘高贝,399409.XSHE,399409.SZ 107 | 大盘价值,399373.XSHE,399373.SZ 108 | 上证环保,000158.XSHG,000158.SH 109 | 5年信用,000101.XSHG,000101.SH 110 | 380美元对冲,H50063.XSHG,h50063.CSI 111 | 300低贝,000829.XSHG,000829.SH 112 | 细分农业,000809.XSHG,000809.SH 113 | 细分农业,000809.XSHG,000809.CSI 114 | 300信息,000915.XSHG,000915.SH 115 | 中盘低波,399406.XSHE,399406.SZ 116 | 100低波,399645.XSHE,399645.SZ 117 | 国证基建,399359.XSHE,399359.SZ 118 | 深证责任,399341.XSHE,399341.SZ 119 | 大盘低波,399404.XSHE,399404.SZ 120 | CBN-兴全,399369.XSHE,399369.SZ 121 | 治理指数,000019.XSHG,000019.SH 122 | 1000材料,399382.XSHE,399382.SZ 123 | 1000材料,399382.XSHE,h30440.CSI 124 | 民企红利,000826.XSHG,000826.SH 125 | 国证50,399310.XSHE,399310.SZ 126 | 小盘价值,399377.XSHE,399377.SZ 127 | 小盘成长,399376.XSHE,399376.SZ 128 | 深互联网,399675.XSHE,399675.SZ 129 | 300能源,399908.XSHE,000908.SH 130 | 300能源,399908.XSHE,L11501.CSI 131 | 国证通信,399389.XSHE,399389.SZ 132 | 中证电信,000936.XSHG,399936.SZ 133 | 中证电信,000936.XSHG,000936.SH 134 | 中证电信,000936.XSHG,000936.CSI 135 | 中证工业,000930.XSHG,399930.SZ 136 | 中证工业,000930.XSHG,000930.SH 137 | 中证工业,000930.XSHG,000930.CSI 138 | 基金指数,000011.XSHG,399305.SZ 139 | 基金指数,000011.XSHG,000011.SH 140 | 大盘成长,399372.XSHE,399372.SZ 141 | 上证国企,000056.XSHG,000056.SH 142 | 180金融,000018.XSHG,000018.SH 143 | 深证ETF,399306.XSHE,399306.SZ 144 | 深证ETF,399306.XSHE,159943.SZ 145 | 中证金融净收益美元,H30314.XSHG,h30314.CSI 146 | 中证可选,000931.XSHG,399931.SZ 147 | 中证可选,000931.XSHG,000931.SH 148 | 申万证券,801193.INDX,163113.SZ 149 | 央视50,399550.XSHE,399550.SZ 150 | 380公用,000113.XSHG,000113.SH 151 | 创业基础,399640.XSHE,399640.SZ 152 | 食品饮料,000807.XSHG,000807.SH 153 | 300沪市,000972.XSHG,000972.SH 154 | 300沪市,000972.XSHG,000972.CSI 155 | 投资时钟,399391.XSHE,399391.SZ 156 | 中证信息净收益美元,H30315.XSHG,h30315.CSI 157 | 智能家居,399996.XSHE,399996.SZ 158 | 智能家居,399996.XSHE,165524.SZ 159 | 上证央企,000042.XSHG,000042.SH 160 | 中证军工,399967.XSHE,399967.SZ 161 | 中证军工,399967.XSHE,512560.SH 162 | 中证军工,399967.XSHE,S99967.JZ 163 | 中证军工,399967.XSHE,h30229.CSI 164 | 巨潮中盘,399315.XSHE,399315.SZ 165 | 上证银行,000134.XSHG,000134.SH 166 | 民企200,399939.XSHE,399939.SZ 167 | 民企200,399939.XSHE,000939.SH 168 | 内地农业,000949.XSHG,399949.SZ 169 | 内地农业,000949.XSHG,000949.SH 170 | 内地农业,000949.XSHG,000949.CSI 171 | 180R价值,000031.XSHG,000031.SH 172 | 380高贝,000137.XSHG,000137.SH 173 | 深证新兴,399641.XSHE,399641.SZ 174 | 能源等权,000070.XSHG,000070.SH 175 | 300地产,000952.XSHG,399952.SZ 176 | 300地产,000952.XSHG,000952.SH 177 | 深公司债,399302.XSHE,399302.SZ 178 | 上证金融,000038.XSHG,000038.SH 179 | 中证能源,000928.XSHG,399928.SZ 180 | 中证能源,000928.XSHG,000928.SH 181 | 国证食品,399396.XSHE,399396.SZ 182 | 国证食品,399396.XSHE,S99396.JZ 183 | 300地产,399952.XSHE,399952.SZ 184 | 300地产,399952.XSHE,000952.SH 185 | 周期100,399402.XSHE,399402.SZ 186 | 500等权,000982.XSHG,399982.SZ 187 | 500等权,000982.XSHG,000982.SH 188 | 500等权,000982.XSHG,502000.SH 189 | 深成指R,399002.XSHE,399002.SZ 190 | 中证100,399903.XSHE,399903.SZ 191 | 中证100,399903.XSHE,000903.SH 192 | 500深市,399802.XSHE,399802.SZ 193 | 300医药,399913.XSHE,000913.SH 194 | 沪投资品,000102.XSHG,000102.SH 195 | 细分化工,000813.XSHG,000813.SH 196 | 细分化工,000813.XSHG,000813.CSI 197 | 国证2000,399303.XSHE,399303.SZ 198 | 180价值,000029.XSHG,000029.SH 199 | 380稳定,000142.XSHG,000142.SH 200 | 上央红利,000152.XSHG,000152.SH 201 | 中小责任,399651.XSHE,399651.SZ 202 | 深证绩效,399655.XSHE,399655.SZ 203 | 上证380,000009.XSHG,000009.SH 204 | 深信用债,399301.XSHE,399301.SZ 205 | 上证海外,000054.XSHG,000054.SH 206 | 国证民营,399362.XSHE,399362.SZ 207 | 小康指数,399901.XSHE,399901.SZ 208 | 小康指数,399901.XSHE,000901.SH 209 | 中证医药,399933.XSHE,399933.SZ 210 | 中证医药,399933.XSHE,000933.SH 211 | 全指能源,000986.XSHG,159945.SZ 212 | 全指能源,000986.XSHG,000986.SH 213 | 中创低波,399665.XSHE,399665.SZ 214 | 全指材料,000987.XSHG,159944.SZ 215 | 全指材料,000987.XSHG,000987.SH 216 | 国证军工,399368.XSHE,399368.SZ 217 | 中小板综,399101.XSHE,399101.SZ 218 | 新综指,000017.XSHG,000017.SH 219 | 内地消费,000942.XSHG,399942.SZ 220 | 内地消费,000942.XSHG,000942.SH 221 | 内地消费,000942.XSHG,000942.CSI 222 | 中小板EW,399634.XSHE,399634.SZ 223 | 300医药,000913.XSHG,000913.SH 224 | 300可选,399911.XSHE,000911.SH 225 | 内地资源,399944.XSHE,399944.SZ 226 | 内地资源,399944.XSHE,000944.SH 227 | 300金融,000914.XSHG,000914.SH 228 | 深成能源,399680.XSHE,399680.SZ 229 | 巨潮大盘,399314.XSHE,399314.SZ 230 | 中小治理,399650.XSHE,399650.SZ 231 | 细分有色,000811.XSHG,000811.SH 232 | 细分有色,000811.XSHG,000811.CSI 233 | 央视成长,399552.XSHE,399552.SZ 234 | 380能源,000104.XSHG,000104.SH 235 | 深证治理,399328.XSHE,399328.SZ 236 | 380动态,000141.XSHG,000141.SH 237 | 上证F500,000100.XSHG,000100.SH 238 | 深成材料,399681.XSHE,399681.SZ 239 | 创业板EW,399635.XSHE,399635.SZ 240 | 央视责任,399555.XSHE,399555.SZ 241 | 上证上游,000094.XSHG,000094.SH 242 | 300R成长,000920.XSHG,399920.SZ 243 | 300R成长,000920.XSHG,000920.SH 244 | 中证工业净收益美元,H30316.XSHG,h30316.CSI 245 | 380医药,000109.XSHG,000109.SH 246 | 深消费50,399646.XSHE,399646.SZ 247 | 大消费,000997.XSHG,000997.SH 248 | 上证流通,000090.XSHG,000090.SH 249 | 国证基金,399379.XSHE,399379.SZ 250 | 300公用,000917.XSHG,000917.SH 251 | 300公用,000917.XSHG,L11525.CSI 252 | 国债指数,000012.XSHG,000012.SH 253 | 细分能源,000810.XSHG,000810.SH 254 | 细分能源,000810.XSHG,000810.CSI 255 | 深证300,399007.XSHE,399007.SZ 256 | 中小基础,399623.XSHE,399623.SZ 257 | 深医药50,399647.XSHE,399647.SZ 258 | 300高贝,000828.XSHG,000828.SH 259 | 中盘成长,399374.XSHE,399374.SZ 260 | 优势成长,399698.XSHE,399698.SZ 261 | 钱江30,000975.XSHG,000975.SH 262 | 红利指数,000015.XSHG,000015.SH 263 | 国证ETF,399380.XSHE,399380.SZ 264 | 中创400,399624.XSHE,399624.SZ 265 | 中创400,399624.XSHE,159918.SZ 266 | 300等权,000984.XSHG,159924.SZ 267 | 300等权,000984.XSHG,000984.SH 268 | 国证300,399312.XSHE,399312.SZ 269 | 国证转债,399413.XSHE,399413.SZ 270 | 公用指数,000007.XSHG,000007.SH 271 | 深周期50,399670.XSHE,399670.SZ 272 | 创业板R,399606.XSHE,399606.SZ 273 | 180高贝,000135.XSHG,000135.SH 274 | 180治理,000021.XSHG,000021.SH 275 | 中盘价值,399375.XSHE,399375.SZ 276 | 上证民企,000049.XSHG,000049.SH 277 | 300成长,399918.XSHE,000918.SH 278 | 中证能源,399928.XSHE,399928.SZ 279 | 中证能源,399928.XSHE,000928.SH 280 | 优势制造,000146.XSHG,000146.SH 281 | 300银行,000951.XSHG,399951.SZ 282 | 300银行,000951.XSHG,000951.SH 283 | 300银行,000951.XSHG,L11516.CSI 284 | 高端装备,000097.XSHG,000097.SH 285 | 深证F200,399703.XSHE,399703.SZ 286 | 绩效指数,399398.XSHE,399398.SZ 287 | 上证消费,000036.XSHG,000036.SH 288 | 深证电信,399621.XSHE,399621.SZ 289 | 上证周期,000063.XSHG,000063.SH 290 | 中证100,000903.XSHG,399903.SZ 291 | 中证100,000903.XSHG,000903.SH 292 | 大宗商品,399979.XSHE,399979.SZ 293 | 大宗商品,399979.XSHE,161715.SZ 294 | 大宗商品,399979.XSHE,000979.SH 295 | 民企200,000939.XSHG,399939.SZ 296 | 民企200,000939.XSHG,000939.SH 297 | 上证180,SSE180.INDX,000010.SH 298 | 上证180,SSE180.INDX,S00010.JZ 299 | 公用等权,000079.XSHG,000079.SH 300 | 深证材料,399614.XSHE,399614.SZ 301 | 180ERC,H50067.XSHG,h50067.CSI 302 | 深证F60,399701.XSHE,399701.SZ 303 | 市值百强,000155.XSHG,000155.SH 304 | 国证新兴,399392.XSHE,399392.SZ 305 | 南方低碳,399378.XSHE,399378.SZ 306 | 中创价值,399627.XSHE,399627.SZ 307 | 800等权,000842.XSHG,000842.SH 308 | 中金消费,399364.XSHE,399364.SZ 309 | 深证300R,399344.XSHE,399344.SZ 310 | 中小价值,399604.XSHE,399604.SZ 311 | 上证指数,000001.XSHG,000001.SH 312 | 300基建,399950.XSHE,399950.SZ 313 | 300基建,399950.XSHE,000950.SH 314 | OCT文化,399397.XSHE,399397.SZ 315 | 上证健康,H50044.XSHG,h50044.CSI 316 | 中证500净收益美元,H30310.XSHG,h30310.CSI 317 | 500红利,000822.XSHG,000822.SH 318 | 深证公用,399622.XSHE,399622.SZ 319 | 全指金融,000992.XSHG,159940.SZ 320 | 全指金融,000992.XSHG,000992.SH 321 | 深成医药,399685.XSHE,399685.SZ 322 | 深证红利,399324.XSHE,399324.SZ 323 | 380波动,000130.XSHG,000130.SH 324 | 央企100,000927.XSHG,399927.SZ 325 | 央企100,000927.XSHG,000927.SH 326 | 央企100,000927.XSHG,000927.CSI 327 | 浙江民企,000840.XSHG,000840.SH 328 | 500波动,000804.XSHG,000804.SH 329 | 500波动,000804.XSHG,000804.CSI 330 | 基本面50,000925.XSHG,399925.SZ 331 | 基本面50,000925.XSHG,000925.SH 332 | 深证环保,399638.XSHE,399638.SZ 333 | 中盘高贝,399407.XSHE,399407.SZ 334 | 创业新兴,399643.XSHE,399643.SZ 335 | 中小新兴,399642.XSHE,399642.SZ 336 | 深A医药,399674.XSHE,399674.SZ 337 | 深证创新,399016.XSHE,399332.SZ 338 | 深证创新,399016.XSHE,399016.SZ 339 | 申万证券,851931.INDX,163113.SZ 340 | 500美元对冲,H30406.XSHG,h30406.CSI 341 | 煤炭指数,000820.XSHG,000820.SH 342 | 煤炭指数,000820.XSHG,000820.CSI 343 | 中型综指,000020.XSHG,000020.SH 344 | 上证军工,H50036.XSHG,h50036.CSI 345 | 中创高贝,399666.XSHE,399666.SZ 346 | 内地金融,000946.XSHG,399946.SZ 347 | 内地金融,000946.XSHG,000946.SH 348 | 内地金融,000946.XSHG,000946.CSI 349 | 资源优势,399319.XSHE,399319.SZ 350 | 国证高铁,399419.XSHE,399419.SZ 351 | 基本600,000967.XSHG,000967.SH 352 | 基本600,000967.XSHG,000967.CSI 353 | 财富大盘,000940.XSHG,000940.SH 354 | 上证新兴,000067.XSHG,000067.SH 355 | 深证医药,399618.XSHE,399618.SZ 356 | 成长40,399326.XSHE,399326.SZ 357 | 国企红利,000824.XSHG,000824.SH 358 | 全指医药,000991.XSHG,000991.SH 359 | 中证可选净收益美元,H30311.XSHG,h30311.CSI 360 | 180资源,000026.XSHG,000026.SH 361 | 等权90,000971.XSHG,000971.SH 362 | 深证中游,399705.XSHE,399705.SZ 363 | 180成长,000028.XSHG,000028.SH 364 | 深红利50,399672.XSHE,399672.SZ 365 | 深证农业,399669.XSHE,399669.SZ 366 | 上证转债,000139.XSHG,000139.SH 367 | 上证150,000133.XSHG,000133.SH 368 | 资源80,000801.XSHG,000801.SH 369 | 资源80,000801.XSHG,000801.CSI 370 | 中经GDP,399399.XSHE,399399.SZ 371 | 上国改革,H50052.XSHG,h50052.CSI 372 | 国证价值,399371.XSHE,399371.SZ 373 | 两岸三地,000999.XSHG,000999.SH 374 | 两岸三地,000999.XSHG,h11108.CSI 375 | 国证红利,399321.XSHE,399321.SZ 376 | 上民红利,000153.XSHG,000153.SH 377 | 巨潮100,399313.XSHE,399313.SZ 378 | 上证养老,H50043.XSHG,h50043.CSI 379 | 300R价值,000921.XSHG,000921.SH 380 | 长三角,H30002.XSHG,399355.SZ 381 | 新能源,000941.XSHG,399941.SZ 382 | 新能源,000941.XSHG,160640.SZ 383 | 新能源,000941.XSHG,000941.SH 384 | 新能源,000941.XSHG,000941.CSI 385 | 深证大宗,399639.XSHE,399639.SZ 386 | 中证地企,000953.XSHG,399953.SZ 387 | 中证地企,000953.XSHG,000953.SH 388 | 中证地企,000953.XSHG,000953.CSI 389 | 巨潮小盘,399316.XSHE,399316.SZ 390 | 1000公用,399390.XSHE,399390.SZ 391 | 1000公用,399390.XSHE,h30448.CSI 392 | 深证下游,399706.XSHE,399706.SZ 393 | 上证工业,000034.XSHG,000034.SH 394 | 全指消费,000990.XSHG,159946.SZ 395 | 全指消费,000990.XSHG,000990.SH 396 | 创业板G,399667.XSHE,399667.SZ 397 | 领先行业,000996.XSHG,000996.SH 398 | 中创EW,399660.XSHE,399660.SZ 399 | 消费80,000069.XSHG,000069.SH 400 | 深证上游,399704.XSHE,399704.SZ 401 | 中证医药,000933.XSHG,399933.SZ 402 | 中证医药,000933.XSHG,000933.SH 403 | 深证时钟,399644.XSHE,399644.SZ 404 | 380等权,000115.XSHG,000115.SH 405 | 中高企债,000833.XSHG,000833.SH 406 | 1000工业,399383.XSHE,399383.SZ 407 | 1000工业,399383.XSHE,h30441.CSI 408 | 责任指数,000048.XSHG,000048.SH 409 | 380红利,000150.XSHG,000150.SH 410 | 300电信,000916.XSHG,000916.SH 411 | 180等权,000051.XSHG,000051.SH 412 | 180波动,000129.XSHG,000129.SH 413 | 上证中游,000095.XSHG,000095.SH 414 | 新兴成指,000171.XSHG,000171.SH 415 | 新兴成指,000171.XSHG,000171.CSI 416 | 50美元对冲,H50061.XSHG,h50061.CSI 417 | 深医药EW,399676.XSHE,399676.SZ 418 | 大中盘,399400.XSHE,399400.SZ 419 | 沪股通,000159.XSHG,000159.SH 420 | 巨潮地产,399367.XSHE,399367.SZ 421 | 农业主题,000122.XSHG,000122.SH 422 | 180分层,000093.XSHG,000093.SH 423 | 超大盘,000043.XSHG,000043.SH 424 | 深证F120,399702.XSHE,399702.SZ 425 | 中证可选,399931.XSHE,399931.SZ 426 | 中证可选,399931.XSHE,000931.SH 427 | 创业板50,399673.XSHE,399673.SZ 428 | 创业板50,399673.XSHE,159949.SZ 429 | 上国红利,000151.XSHG,000151.SH 430 | 380ERC,H50068.XSHG,h50068.CSI 431 | 消费等权,000074.XSHG,000074.SH 432 | 180动量,H50057.XSHG,h50057.CSI 433 | 800动量,H30400.XSHG,h30400.CSI 434 | 食品饮料,801120.INDX,000807.SH 435 | 中证红利,399922.XSHE,399922.SZ 436 | 中证红利,399922.XSHE,000922.SH 437 | 国证大宗,399366.XSHE,399366.SZ 438 | 创业300,399012.XSHE,399012.SZ 439 | SME创新,399017.XSHE,399017.SZ 440 | 300材料,000909.XSHG,000909.SH 441 | 300材料,000909.XSHG,L11502.CSI 442 | 中证信息,399935.XSHE,399935.SZ 443 | 中证信息,399935.XSHE,000935.SH 444 | 中证信息,399935.XSHE,S99935.JZ 445 | 深证100,399330.XSHE,399330.SZ 446 | 深证100,399330.XSHE,161227.SZ 447 | 优势资源,000145.XSHG,000145.SH 448 | 深100EW,399632.XSHE,399632.SZ 449 | 深证200R,399679.XSHE,399679.SZ 450 | 中证上游,000961.XSHG,399961.SZ 451 | 中证上游,000961.XSHG,000961.SH 452 | 380低贝,000138.XSHG,000138.SH 453 | 1000消费,399385.XSHE,399385.SZ 454 | 1000消费,399385.XSHE,h30443.CSI 455 | 上证信息,000039.XSHG,000039.SH 456 | 新硬件,399360.XSHE,399360.SZ 457 | 沪深300,000300.XSHG,399300.SZ 458 | 沪深300,000300.XSHG,000300.SH 459 | 沪深300,000300.XSHG,S00300.JZ 460 | 国证农业,399365.XSHE,399365.SZ 461 | 上证高新,000131.XSHG,000131.SH 462 | 中创高新,399652.XSHE,399652.SZ 463 | 上证50,000016.XSHG,000016.SH 464 | 上证50,000016.XSHG,510800.SH 465 | 上证50,000016.XSHG,S00016.JZ 466 | 上证50,000016.XSHG,K00016.JZ 467 | 中证下游,000963.XSHG,399963.SZ 468 | 中证下游,000963.XSHG,000963.SH 469 | 300价值,000919.XSHG,000919.SH 470 | 300价值,000919.XSHG,519671.SH 471 | 细分医药,000814.XSHG,000814.SH 472 | 沪新丝路,000160.XSHG,000160.SH 473 | 细分机械,000812.XSHG,000812.SH 474 | 细分机械,000812.XSHG,000812.CSI 475 | 深成指EW,399659.XSHE,399659.SZ 476 | 全指可选,000989.XSHG,000989.SH 477 | 环渤海,H30001.XSHG,399357.SZ 478 | 内地低碳,399977.XSHE,399977.SZ 479 | 内地低碳,399977.XSHE,000977.SH 480 | 细分地产,000816.XSHG,000816.SH 481 | 中小绩效,399658.XSHE,399658.SZ 482 | 沪大农业,H50055.XSHG,h50055.CSI 483 | 深次新股,399678.XSHE,399678.SZ 484 | 300能源,000908.XSHG,000908.SH 485 | 300能源,000908.XSHG,L11501.CSI 486 | 300深市,399972.XSHE,399972.SZ 487 | 50基本,000052.XSHG,000052.SH 488 | 深证文化,399654.XSHE,399654.SZ 489 | 中小红利,399649.XSHE,399649.SZ 490 | 中证公用,000937.XSHG,399937.SZ 491 | 中证公用,000937.XSHG,000937.SH 492 | 中证公用,000937.XSHG,000937.CSI 493 | 房地产,801180.INDX,160628.SZ 494 | 房地产,801180.INDX,512200.SH 495 | 央企红利,000825.XSHG,000825.SH 496 | 有色金属,801050.INDX,165316.SZ 497 | 有色金属,801050.INDX,000819.SH 498 | 有色金属,801050.INDX,512400.SH 499 | 有色金属,801050.INDX,CFNMSA.JZ 500 | 有色金属,801050.INDX,h30191.CSI 501 | 180基建,000025.XSHG,000025.SH 502 | 军工指数,399959.XSHE,399959.SZ 503 | 深证消费,399617.XSHE,399617.SZ 504 | 中证材料净收益美元,H30317.XSHG,h30317.CSI 505 | 深证转债,399307.XSHE,399307.SZ 506 | 工业指数,000004.XSHG,000004.SH 507 | 300消费,399912.XSHE,000912.SH 508 | 深证700,399010.XSHE,399010.SZ 509 | 创业成长,399958.XSHE,399958.SZ 510 | 创业成长,399958.XSHE,000958.SH 511 | 创业成长,000958.XSHG,399958.SZ 512 | 创业成长,000958.XSHG,000958.SH 513 | 中证金融,399934.XSHE,399934.SZ 514 | 中证金融,399934.XSHE,000934.SH 515 | 180运输,000027.XSHG,000027.SH 516 | 上证移动,H50053.XSHG,h50053.CSI 517 | 300非周,000969.XSHG,399969.SZ 518 | 300非周,000969.XSHG,000969.SH 519 | 信息等权,000077.XSHG,000077.SH 520 | 深证100R,399004.XSHE,399004.SZ 521 | 深证100R,399004.XSHE,S99004.JZ 522 | 小康指数,000901.XSHG,399901.SZ 523 | 小康指数,000901.XSHG,000901.SH 524 | 内地运输,000945.XSHG,399945.SZ 525 | 内地运输,000945.XSHG,000945.SH 526 | 内地运输,000945.XSHG,000945.CSI 527 | 医药等权,000075.XSHG,000075.SH 528 | 300工业,399910.XSHE,000910.SH 529 | 深成电信,399688.XSHE,399688.SZ 530 | 全指电信,000994.XSHG,000994.SH 531 | 全指电信,000994.XSHG,000994.CSI 532 | 深证地产,399637.XSHE,399637.SZ 533 | 有色金属,000819.XSHG,165316.SZ 534 | 有色金属,000819.XSHG,000819.SH 535 | 有色金属,000819.XSHG,512400.SH 536 | 有色金属,000819.XSHG,CFNMSA.JZ 537 | 有色金属,000819.XSHG,h30191.CSI 538 | 1000医药,399386.XSHE,399386.SZ 539 | 1000医药,399386.XSHE,h30444.CSI 540 | 中证800,000906.XSHG,000906.SH 541 | 中证800,000906.XSHG,S00906.JZ 542 | 中证信息,000935.XSHG,399935.SZ 543 | 中证信息,000935.XSHG,000935.SH 544 | 中证信息,000935.XSHG,S99935.JZ 545 | 中证超大,000980.XSHG,399980.SZ 546 | 中证超大,000980.XSHG,000980.SH 547 | 中证超大,000980.XSHG,000980.CSI 548 | 创业创新,399018.XSHE,399018.SZ 549 | 信用100,000116.XSHG,000116.SH 550 | 深证创新,399332.XSHE,399332.SZ 551 | 深证创新,399332.XSHE,399016.SZ 552 | 地企100,000954.XSHG,399954.SZ 553 | 地企100,000954.XSHG,000954.SH 554 | 地企100,000954.XSHG,000954.CSI 555 | 国证商业,399361.XSHE,399361.SZ 556 | 智能资产,000983.XSHG,000983.SH 557 | 380消费,000108.XSHG,000108.SH 558 | 长三角,399355.XSHE,399355.SZ 559 | TMT50,399610.XSHE,399610.SZ 560 | 沪财中小,000091.XSHG,000091.SH 561 | 380电信,000112.XSHG,000112.SH 562 | 中证医药净收益美元,H30313.XSHG,h30313.CSI 563 | 300运输,000957.XSHG,399957.SZ 564 | 300运输,000957.XSHG,000957.SH 565 | 深证200,399009.XSHE,399009.SZ 566 | 银河99,000959.XSHG,000959.SH 567 | 中创100R,399611.XSHE,399611.SZ 568 | 央视创新,399551.XSHE,399551.SZ 569 | 消费服务,000806.XSHG,000806.SH 570 | 180红利,000149.XSHG,000149.SH 571 | 家用电器,801110.INDX,930697.CSI 572 | 380成长,000117.XSHG,000117.SH 573 | 综合指数,000008.XSHG,000008.SH 574 | 深证创投,399696.XSHE,399696.SZ 575 | 中证龙头,000960.XSHG,399960.SZ 576 | 中证龙头,000960.XSHG,000960.SH 577 | 中证龙头,000960.XSHG,000960.CSI 578 | 中创100,399612.XSHE,399612.SZ 579 | 中创100,399612.XSHE,159942.SZ 580 | 深证1000,399011.XSHE,399011.SZ 581 | 深证价值,399348.XSHE,399348.SZ 582 | 大宗商品,000979.XSHG,399979.SZ 583 | 大宗商品,000979.XSHG,161715.SZ 584 | 大宗商品,000979.XSHG,000979.SH 585 | 深报综指,399352.XSHE,399352.SZ 586 | 安防产业,399693.XSHE,399693.SZ 587 | 商业指数,000005.XSHG,000005.SH 588 | 医药主题,000121.XSHG,000121.SH 589 | 中小300,399008.XSHE,399008.SZ 590 | 中小300,399008.XSHE,159907.SZ 591 | 医药100,000978.XSHG,399978.SZ 592 | 医药100,000978.XSHG,000978.SH 593 | 上证小盘,000045.XSHG,000045.SH 594 | 内地银行,000947.XSHG,399947.SZ 595 | 内地银行,000947.XSHG,000947.SH 596 | 内地银行,000947.XSHG,000947.CSI 597 | 上证地企,000055.XSHG,000055.SH 598 | 全指信息,000993.XSHG,000993.SH 599 | 深防御50,399671.XSHE,399671.SZ 600 | 深证科技,399339.XSHE,399339.SZ 601 | 上证F200,000098.XSHG,000098.SH 602 | 沪港AH溢价,H50066.XSHG,h50066.CSI 603 | 中证新兴,000964.XSHG,399964.SZ 604 | 中证新兴,000964.XSHG,000964.SH 605 | 细分食品,000815.XSHG,000815.SH 606 | 细分食品,000815.XSHG,000815.CSI 607 | 中小板R,399333.XSHE,399333.SZ 608 | 380动量,H50058.XSHG,h50058.CSI 609 | 100绩效,399656.XSHE,399656.SZ 610 | 非周期,000064.XSHG,000064.SH 611 | 计算机指,399363.XSHE,399363.SZ 612 | 中证转债,000832.XSHG,161826.SZ 613 | 中证转债,000832.XSHG,000832.SH 614 | 央视治理,399554.XSHE,399554.SZ 615 | 创业板V,399668.XSHE,399668.SZ 616 | 800地产,399965.XSHE,399965.SZ 617 | 央视文化,399557.XSHE,399557.SZ 618 | 300工业,000910.XSHG,000910.SH 619 | 300消费,000912.XSHG,000912.SH 620 | 500低贝,000831.XSHG,000831.SH 621 | 深证央企,399335.XSHE,399335.SZ 622 | 中关村60,399697.XSHE,399697.SZ 623 | 国证服务,399320.XSHE,399320.SZ 624 | 乐富指数,399103.XSHE,399103.SZ 625 | 中证上游,399961.XSHE,399961.SZ 626 | 中证上游,399961.XSHE,000961.SH 627 | 深证民营,399337.XSHE,399337.SZ 628 | 中证民企,000938.XSHG,399938.SZ 629 | 中证民企,000938.XSHG,000938.SH 630 | 中证民企,000938.XSHG,000938.CSI 631 | 300成长,000918.XSHG,000918.SH 632 | 380信息,000111.XSHG,000111.SH 633 | 沪消费品,000103.XSHG,000103.SH 634 | 国证新能,399412.XSHE,399412.SZ 635 | 上证中小,000046.XSHG,000046.SH 636 | 中小高贝,399664.XSHE,399664.SZ 637 | 深证信息,399620.XSHE,399620.SZ 638 | 中证中游,000962.XSHG,399962.SZ 639 | 中证中游,000962.XSHG,000962.SH 640 | 中证中游,000962.XSHG,000962.CSI 641 | 深300EW,399633.XSHE,399633.SZ 642 | 医药生物,801150.INDX,163118.SZ 643 | 医药生物,801150.INDX,000808.SH 644 | 中证央企,000926.XSHG,399926.SZ 645 | 中证央企,000926.XSHG,000926.SH 646 | 中证央企,000926.XSHG,000926.CSI 647 | 内地基建,000943.XSHG,399943.SZ 648 | 内地基建,000943.XSHG,000943.SH 649 | 内地基建,000943.XSHG,000943.CSI 650 | 工业等权,000072.XSHG,000072.SH 651 | 300美元对冲,H30405.XSHG,h30405.CSI 652 | 防御100,399403.XSHE,399403.SZ 653 | 国证地产,399393.XSHE,399393.SZ 654 | 国证地产,399393.XSHE,S99393.JZ 655 | 180基本,000053.XSHG,000053.SH 656 | 基本200,000965.XSHG,000965.SH 657 | 基本200,000965.XSHG,000965.CSI 658 | 新华金牛,000976.XSHG,000976.SH 659 | 300分层,000981.XSHG,000981.SH 660 | 300分层,000981.XSHG,000981.CSI 661 | 300可选,000911.XSHG,000911.SH 662 | 深报指数,399351.XSHE,399351.SZ 663 | 300红利,000821.XSHG,000821.SH 664 | 中证下游,399963.XSHE,399963.SZ 665 | 中证下游,399963.XSHE,000963.SH 666 | 资源50,000092.XSHG,000092.SH 667 | 380价值,000118.XSHG,000118.SH 668 | 380工业,000106.XSHG,000106.SH 669 | 380基本,000128.XSHG,000128.SH 670 | 地产指数,000006.XSHG,399241.SZ 671 | 地产指数,000006.XSHG,000006.SH 672 | 全指成长,000057.XSHG,000057.SH 673 | 皖江30,399350.XSHE,399350.SZ 674 | 380材料,000105.XSHG,000105.SH 675 | 国证物流,399353.XSHE,399353.SZ 676 | 180R成长,000030.XSHG,000030.SH 677 | 上证下游,000096.XSHG,000096.SH 678 | 50等权,000050.XSHG,000050.SH 679 | 50等权,000050.XSHG,510430.SH 680 | 380可选,000107.XSHG,000107.SH 681 | 全指价值,000058.XSHG,000058.SH 682 | 中证全指,000985.XSHG,000985.SH 683 | 中证全指,000985.XSHG,000985.CSI 684 | 内地资源,000944.XSHG,399944.SZ 685 | 内地资源,000944.XSHG,000944.SH 686 | 深成工业,399682.XSHE,399682.SZ 687 | 深证低波,399661.XSHE,399661.SZ 688 | 上证100,000132.XSHG,000132.SH 689 | 珠三角,399356.XSHE,399356.SZ 690 | 国证成长,399370.XSHE,399370.SZ 691 | 500沪市,000802.XSHG,000802.SH 692 | 500沪市,000802.XSHG,510440.SH 693 | 1000金融,399387.XSHE,399387.SZ 694 | 1000金融,399387.XSHE,h30445.CSI 695 | 上证180,000010.XSHG,000010.SH 696 | 上证180,000010.XSHG,S00010.JZ 697 | 深成公用,399689.XSHE,399689.SZ 698 | 上证医药,000037.XSHG,000037.SH 699 | 180低贝,000136.XSHG,000136.SH 700 | 创业专利,399691.XSHE,399691.SZ 701 | A股资源,000805.XSHG,000805.SH 702 | 300非周,399969.XSHE,399969.SZ 703 | 300非周,399969.XSHE,000969.SH 704 | 300波动,000803.XSHG,000803.SH 705 | 300波动,000803.XSHG,000803.CSI 706 | 优势消费,000147.XSHG,000147.SH 707 | 中证流通,000902.XSHG,000902.SH 708 | 中证流通,000902.XSHG,000902.CSI 709 | 上证休闲,H50054.XSHG,h50054.CSI 710 | 300稳定,000844.XSHG,000844.SH 711 | 180动态,000123.XSHG,000123.SH 712 | 上证中盘,000044.XSHG,000044.SH 713 | 中小盘,399401.XSHE,399401.SZ 714 | 1000能源,399381.XSHE,399381.SZ 715 | 1000能源,399381.XSHE,h30439.CSI 716 | 细分金融,000818.XSHG,000818.SH 717 | 细分金融,000818.XSHG,000818.CSI 718 | 300银行,399951.XSHE,399951.SZ 719 | 300银行,399951.XSHE,000951.SH 720 | 300银行,399951.XSHE,L11516.CSI 721 | 深互联EW,399677.XSHE,399677.SZ 722 | 深证成长,399346.XSHE,399346.SZ 723 | 180美元对冲,H50062.XSHG,h50062.CSI 724 | 上证50,SSE50.INDX,000016.SH 725 | 上证50,SSE50.INDX,510800.SH 726 | 上证50,SSE50.INDX,S00016.JZ 727 | 上证50,SSE50.INDX,K00016.JZ 728 | 深成消费,399684.XSHE,399684.SZ 729 | 1000可选,399384.XSHE,399384.SZ 730 | 1000可选,399384.XSHE,h30442.CSI 731 | 1000美元对冲,H30454.XSHG,h30454.CSI 732 | 中创500,399625.XSHE,399625.SZ 733 | 中证新兴,399964.XSHE,399964.SZ 734 | 中证新兴,399964.XSHE,000964.SH 735 | 全指工业,000988.XSHG,000988.SH 736 | 全指工业,000988.XSHG,000988.CSI 737 | 深证工业,399615.XSHE,399615.SZ 738 | 港股通,H50069.XSHG,h50069.CSI 739 | 全指公用,000995.XSHG,000995.SH 740 | 全指公用,000995.XSHG,000995.CSI 741 | 深证高贝,399662.XSHE,399662.SZ 742 | 深证能源,399613.XSHE,399613.SZ 743 | 上证龙头,000065.XSHG,000065.SH 744 | 300绩效,399657.XSHE,399657.SZ 745 | 全R价值,000060.XSHG,000060.SH 746 | 上证F300,000099.XSHG,000099.SH 747 | 国企200,000956.XSHG,399956.SZ 748 | 国企200,000956.XSHG,000956.SH 749 | 国企200,000956.XSHG,000956.CSI 750 | 浙企综指,000839.XSHG,000839.SH 751 | 300金融,399914.XSHE,000914.SH 752 | 中小专利,399690.XSHE,399690.SZ 753 | 1000信息,399388.XSHE,399388.SZ 754 | 1000信息,399388.XSHE,h30446.CSI 755 | 苏州率先,399410.XSHE,399410.SZ 756 | 500等权,399982.XSHE,399982.SZ 757 | 500等权,399982.XSHE,000982.SH 758 | 500等权,399982.XSHE,502000.SH 759 | 380R价值,000120.XSHG,000120.SH 760 | 电信等权,000078.XSHG,000078.SH 761 | 医药生物,000808.XSHG,163118.SZ 762 | 医药生物,000808.XSHG,000808.SH 763 | 300周期,000968.XSHG,399968.SZ 764 | 300周期,000968.XSHG,000968.SH 765 | 300周期,000968.XSHG,000968.CSI 766 | 沪企债30,000061.XSHG,000061.SH 767 | 创业价值,000838.XSHG,000838.SH 768 | 上证公用,000041.XSHG,000041.SH 769 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/data_source/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xingetouzi/rqalpha-mod-fxdayu-source/1595df9cc409c3bc8bad2bfcfda5b37d8d42739b/rqalpha_mod_fxdayu_source/data_source/__init__.py -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/data_source/bundle.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | from rqalpha_mod_fxdayu_source.data_source.common import CacheMixin 4 | from rqalpha_mod_fxdayu_source.data_source.common.odd import OddFrequencyBaseDataSource 5 | from rqalpha_mod_fxdayu_source.share.astock_minute_reader import AStockBcolzMinuteBarReader 6 | from rqalpha_mod_fxdayu_source.utils import Singleton 7 | 8 | 9 | class BundleDataSource(OddFrequencyBaseDataSource): 10 | __metaclass__ = Singleton 11 | 12 | def __init__(self, path, bundle_path): 13 | super(BundleDataSource, self).__init__(path) 14 | self._bundle_reader = AStockBcolzMinuteBarReader(bundle_path) 15 | 16 | def raw_history_bars(self, instrument, frequency, start_dt=None, end_dt=None, length=None): 17 | sid = instrument.order_book_id 18 | data = self._bundle_reader.raw_history_bars(sid, start_dt, end_dt, length) 19 | return data 20 | 21 | def available_data_range(self, frequency): 22 | return self._bundle_reader.available_data_range() 23 | 24 | def is_base_frequency(self, instrument, freq): 25 | return freq in ["1m"] 26 | 27 | 28 | class BundleCacheDataSource(BundleDataSource, CacheMixin): 29 | def __init__(self, path, bundle_path): 30 | super(BundleCacheDataSource, self).__init__(path, bundle_path) 31 | CacheMixin.__init__(self) 32 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/data_source/common/__init__.py: -------------------------------------------------------------------------------- 1 | from .cache import CacheMixin 2 | from .odd import OddFrequencyDataSource, OddFrequencyBaseDataSource, CompleteAbstractDataSource 3 | 4 | __all__ = ["CacheMixin", "OddFrequencyDataSource", "OddFrequencyBaseDataSource", 5 | "CompleteAbstractDataSource"] 6 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/data_source/common/cache.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | import functools 3 | from datetime import timedelta 4 | from lru import LRU 5 | from weakref import proxy 6 | 7 | import numpy as np 8 | from rqalpha.utils.datetime_func import convert_dt_to_int, convert_int_to_datetime 9 | from rqalpha.utils.logger import system_log 10 | 11 | 12 | class Cache(object): 13 | def __init__(self, source, chunk, instrument, frequency): 14 | self._source = proxy(source) 15 | self._data = None 16 | self._finished = False 17 | self._chunk = chunk 18 | self._instrument = instrument 19 | self._frequency = frequency 20 | 21 | def __len__(self): 22 | return len(self._data) if self._data is not None else 0 23 | 24 | @property 25 | def last_dt(self): 26 | if len(self): 27 | return convert_int_to_datetime(self._data[-1]["datetime"]) 28 | else: 29 | return None 30 | 31 | @property 32 | def chunk(self): 33 | return self._chunk 34 | 35 | @property 36 | def instrument(self): 37 | return self._instrument 38 | 39 | @property 40 | def frequency(self): 41 | return self._frequency 42 | 43 | @property 44 | def finished(self): 45 | return self._finished 46 | 47 | def raw_history_bars(self, start_dt=None, end_dt=None, length=None, updated=False): 48 | bars = self._data 49 | if bars is not None: 50 | if end_dt: 51 | if self._frequency.endswith("d"): 52 | # 日线策略替换为收盘时间 53 | end_dt = end_dt.replace(hour=15, minute=00) 54 | end_dti = np.uint64(convert_dt_to_int(end_dt)) 55 | end_pos = bars["datetime"].searchsorted(end_dti, side="right") 56 | if start_dt: 57 | start_dti = np.uint64(convert_dt_to_int(start_dt)) 58 | start_pos = bars["datetime"].searchsorted(start_dti, side="left") 59 | if start_dt and end_dt: 60 | if end_pos < len(bars) or bars[-1]["datetime"] == end_dti: 61 | if start_pos == 0 and bars[0]["datetime"] != start_dti: # start datetime is early than cache 62 | return None 63 | else: 64 | return bars[start_pos:end_pos] 65 | # else update the cache 66 | elif length is not None: 67 | if end_dt: 68 | if end_pos < len(bars) or bars[-1]["datetime"] == end_dti: 69 | if end_pos - length < 0: 70 | return None 71 | else: 72 | return bars[end_pos - length: end_pos] 73 | # else update the cache 74 | elif start_dt: 75 | if start_pos == 0 and bars[0]["datetime"] != start_dti: 76 | return None 77 | if start_pos + length <= len(bars): 78 | return bars[start_pos: start_pos + length] 79 | # else update the cache 80 | # update the cache 81 | if not self._finished and not updated: 82 | self._source.update_cache(self, end_dt or start_dt) 83 | return self.raw_history_bars(start_dt, end_dt, length, updated=True) 84 | return None 85 | 86 | def update_bars(self, bars, count): 87 | system_log.debug("缓存更新,品种:[{}],时间:[{}, {}]".format(self.instrument.order_book_id, 88 | bars["datetime"][0], bars["datetime"][-1])) 89 | old = self._data 90 | if old is not None and bars is not None: 91 | self._data = np.concatenate((self._data, bars), axis=0) 92 | else: 93 | if old is not None: 94 | self._data = old 95 | elif bars is not None: 96 | self._data = bars 97 | # self._data should never be None 98 | if self._data is not None and len(self._data) > self._chunk * 2: # 保留两倍缓存长度的空间到内存 99 | left = len(self._data) - self._chunk * 2 100 | self._data = self._data[left:] 101 | self._finished = bars is None or len(bars) < count 102 | # import pandas as pd 103 | # system_log.debug(pd.DataFrame(self._data)) 104 | 105 | def close(self): 106 | self._finished = True 107 | 108 | 109 | class CacheMixin(object): 110 | MAX_CACHE_SPACE = 40000000 111 | CACHE_LENGTH = 10000 112 | 113 | def __init__(self, *args, **kwargs): 114 | super(CacheMixin, self).__init__(*args, **kwargs) 115 | self._caches = None 116 | self.clear_cache() 117 | self._raw_history_bars = self.raw_history_bars 118 | self.raw_history_bars = self.decorator_raw_history_bars(self.raw_history_bars) 119 | 120 | @classmethod 121 | def set_cache_length(cls, value): 122 | cls.CACHE_LENGTH = value 123 | 124 | @classmethod 125 | def set_max_cache_space(cls, value): 126 | cls.MAX_CACHE_SPACE = value 127 | 128 | def clear_cache(self): 129 | if self._caches is None: 130 | self._caches = LRU(self.MAX_CACHE_SPACE // self.CACHE_LENGTH) 131 | else: 132 | self._caches.clear() 133 | 134 | def update_cache(self, cache, dt): 135 | if len(cache): 136 | last = cache.last_dt + timedelta(seconds=1) 137 | else: 138 | bar_data = self._raw_history_bars(cache.instrument, cache.frequency, 139 | end_dt=dt - timedelta(seconds=1), length=cache.chunk) 140 | if bar_data is not None and len(bar_data): 141 | cache.update_bars(bar_data, len(bar_data)) 142 | last = dt 143 | bar_data = self._raw_history_bars(cache.instrument, cache.frequency, start_dt=last, length=cache.chunk) 144 | if bar_data is not None and len(bar_data): 145 | cache.update_bars(bar_data, cache.chunk) 146 | else: 147 | cache.close() 148 | 149 | def decorator_raw_history_bars(self, func): 150 | @functools.wraps(func) 151 | def wrapped(instrument, frequency, start_dt=None, end_dt=None, length=None): 152 | key = (instrument.order_book_id, frequency) 153 | if key not in self._caches: 154 | self._caches[key] = Cache(self, self.CACHE_LENGTH, instrument, frequency) 155 | data = self._caches[key].raw_history_bars(start_dt, end_dt, length) 156 | if data is not None: 157 | return data 158 | else: 159 | system_log.debug("缓存未命中: 品种[{}]频率[{}] from {} to {}, length {}".format( 160 | instrument.order_book_id, frequency, start_dt, end_dt, length 161 | )) 162 | return func(instrument, frequency, start_dt=start_dt, end_dt=end_dt, length=length) 163 | 164 | return wrapped 165 | 166 | def raw_history_bars(self, instrument, frequency, start_dt=None, end_dt=None, length=None): 167 | raise NotImplementedError 168 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/data_source/common/minite.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import numpy as np 4 | import pandas as pd 5 | from rqalpha.const import INSTRUMENT_TYPE 6 | from rqalpha.data.base_data_source import BaseDataSource 7 | from rqalpha.utils.datetime_func import convert_date_to_int, convert_int_to_date 8 | from rqalpha.utils.logger import system_log 9 | 10 | 11 | def parse_time_int(n): 12 | hour, n = n // 10000, n % 10000 13 | minuter, second = n // 100, n % 100 14 | return hour, minuter, second 15 | 16 | 17 | def bar_count_in_section(start, end, base=60, offset=0): 18 | hs, ms, ss = parse_time_int(start) 19 | he, me, se = parse_time_int(end) 20 | n_s = (hs * 3600 + ms * 60 + ss - 1 + offset) // base + 1 21 | n_e = (he * 3600 + me * 60 + offset) // base 22 | return max(n_e - n_s, 0) 23 | 24 | 25 | def safe_searchsorted(a, v, side='left', sorter=None): 26 | assert side in ["left", "right"] 27 | if not len(a): 28 | raise RuntimeError("Can't search in a empty array!") 29 | pos = np.searchsorted(a, v, side=side, sorter=sorter) 30 | if pos >= len(a): 31 | system_log.warning(RuntimeWarning( 32 | "Value to search [%s] beyond array range [ %s - %s ], there may be some data missing." 33 | % (v, a[0], a[-1]) 34 | )) 35 | return len(a) - 1 if side == "left" else len(a) 36 | return pos 37 | 38 | 39 | class MiniteBarDataSourceMixin(BaseDataSource): 40 | def _dates_index(self, instrument, skip_suspend=True): 41 | if skip_suspend: 42 | bars = self._filtered_day_bars(instrument) 43 | else: 44 | bars = self._all_day_bars_of(instrument) 45 | dts = bars["datetime"] 46 | return dts 47 | 48 | def _get_bars_in_days(self, instrument, frequency, days): 49 | raise NotImplementedError 50 | 51 | def _post_handle_bars(self, bars): 52 | return bars 53 | 54 | def get_bar_count_in_day(self, instrument, frequency, trade_date=None, start_time=0, end_time=150000): 55 | """ 56 | Get bar count of given instrument and frequency in a signle trading day, 57 | supporting frequency of Xm and Xh. 58 | 59 | Parameters 60 | ---------- 61 | instrument: rqalpha.model.instrument.Instrument 62 | Instrument to query. 63 | frequency: 64 | Frequency to query. 65 | trade_date: date 66 | Trade date to query. 67 | start_time: int 68 | Int to represent start time, inf format "HHMMSS". 69 | end_time: int 70 | Int to represent end time, inf format "HHMMSS". 71 | Returns 72 | ------- 73 | int: Return bar count in a single trading day. 74 | """ 75 | 76 | if instrument.enum_type in [INSTRUMENT_TYPE.CS, INSTRUMENT_TYPE.INDX]: 77 | unit = frequency[-1] 78 | number = int(frequency[:-1]) 79 | if unit == "m": 80 | offset = 0 81 | factor = 60 82 | elif unit == "h": 83 | offset = 30 * 60 # 30min for A stock 84 | factor = 60 * 60 85 | start_time = max(93000, start_time) 86 | end_time = min(150000, end_time) 87 | if start_time > 113000: 88 | return bar_count_in_section(start_time, end_time, number * factor, offset) 89 | elif end_time <= 113000: 90 | return bar_count_in_section(start_time, end_time, number * factor, offset) 91 | else: 92 | end_time = max(end_time, 130000) 93 | return bar_count_in_section(start_time, 113000, number * factor, offset) + \ 94 | bar_count_in_section(130000, end_time, number * factor, offset) 95 | else: 96 | raise RuntimeError("Only stock is supported!") 97 | 98 | def raw_history_bars(self, instrument, frequency, start_dt=None, end_dt=None, length=None): 99 | if frequency[-1] == "m": 100 | days = [] 101 | if start_dt and end_dt: 102 | assert start_dt <= end_dt, "start datetime later then end datetime!" 103 | s_date, s_time = start_dt.date(), start_dt.time() 104 | e_date, e_time = end_dt.date(), end_dt.time() 105 | s_date_int = convert_date_to_int(s_date) 106 | e_date_int = convert_date_to_int(e_date) 107 | dates = self._dates_index(instrument) 108 | s_pos = safe_searchsorted(dates, s_date_int) 109 | e_pos = safe_searchsorted(dates, e_date_int, side="right") - 1 110 | if s_pos == e_pos: 111 | days.append(dict( 112 | trade_date=convert_int_to_date(dates[s_pos]), 113 | start_time=s_time, end_time=e_time, 114 | )) 115 | else: 116 | days.append(dict(trade_date=convert_int_to_date(dates[s_pos]), start_time=s_time)) 117 | days.extend(map( 118 | lambda x: dict(trade_date=convert_int_to_date(x)), 119 | dates[s_pos + 1: e_pos])) 120 | days.append(dict(trade_date=convert_int_to_date(dates[e_pos]), end_time=e_time)) 121 | post_handler = lambda x: x 122 | elif start_dt and length: 123 | s_date, s_time = start_dt.date(), int(start_dt.strftime("%H%M%S")) 124 | dates = self._dates_index(instrument) 125 | s_date_int = convert_date_to_int(s_date) 126 | s_pos = safe_searchsorted(dates, s_date_int) 127 | s_bar_count = self.get_bar_count_in_day(instrument, frequency, 128 | trade_date=s_date, start_time=s_time) 129 | total_bar_count = self.get_bar_count_in_day(instrument, frequency) 130 | extra_days = (max(length - s_bar_count, 0) - 1) // total_bar_count + 1 131 | days.append(dict(trade_date=s_date, start_time=s_time)) 132 | days.extend(map( 133 | lambda x: dict(trade_date=convert_int_to_date(x)), 134 | dates[s_pos + 1: s_pos + 1 + extra_days])) 135 | post_handler = lambda x: x[:length] 136 | elif end_dt and length: 137 | e_date, e_time = end_dt.date(), int(end_dt.strftime("%H%M%S")) 138 | dates = self._dates_index(instrument) 139 | e_date_int = convert_date_to_int(e_date) 140 | e_pos = safe_searchsorted(dates, e_date_int, side="right") - 1 141 | e_bar_count = self.get_bar_count_in_day(instrument, frequency, 142 | trade_date=e_date, end_time=e_time) 143 | total_bar_count = self.get_bar_count_in_day(instrument, frequency) 144 | extra_days = (max(length - e_bar_count, 0) - 1) // total_bar_count + 1 145 | days.extend(map( 146 | lambda x: dict(trade_date=convert_int_to_date(x)), 147 | dates[max(e_pos - extra_days, 0): e_pos])) 148 | days.append(dict(trade_date=e_date, end_time=e_time)) 149 | post_handler = lambda x: x[-length:] 150 | else: 151 | raise RuntimeError("At least two of [start_dt,end_dt,length] should be given.") 152 | data = post_handler(self._get_bars_in_days(instrument, frequency, days)) 153 | return data 154 | else: 155 | return None 156 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/data_source/common/odd.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | import pandas as pd 4 | from rqalpha.data.adjust import FIELDS_REQUIRE_ADJUSTMENT, adjust_bars 5 | from rqalpha.data.base_data_source import BaseDataSource 6 | from rqalpha.interface import AbstractDataSource 7 | from rqalpha.utils.datetime_func import convert_dt_to_int 8 | 9 | from rqalpha_mod_fxdayu_source.utils.converter import DataFrameConverter 10 | 11 | RESAMPLE_TAG_MAP = { 12 | "m": "T", 13 | "h": "h", 14 | "d": "d", 15 | } 16 | 17 | TIME_TOLERANCE = { 18 | "m": 100, 19 | "h": 10000, 20 | "d": 1000000, 21 | } 22 | 23 | 24 | class OddFrequencyDataSource(AbstractDataSource): 25 | def __init__(self, *args, **kwargs): 26 | super(OddFrequencyDataSource, self).__init__(*args, **kwargs) 27 | 28 | @staticmethod 29 | def _resample_bars(bars, frequency): 30 | num = int(frequency[:-1]) 31 | freq = frequency[-1] 32 | bar_data = DataFrameConverter.np2df(bars) 33 | bar_data = bar_data.set_index(bar_data["datetime"].values) 34 | resample_freq = str(num) + RESAMPLE_TAG_MAP[freq] 35 | resample_group = bar_data.resample(resample_freq, closed="right", label="right") 36 | resample_data = pd.DataFrame() 37 | resample_data["high"] = resample_group["high"].max().dropna() 38 | resample_data["low"] = resample_group["low"].min().dropna() 39 | resample_data["close"] = resample_group["close"].last().dropna() 40 | resample_data["open"] = resample_group["open"].first().dropna() 41 | resample_data["volume"] = resample_group["volume"].sum().dropna() 42 | resample_data["datetime"] = resample_group["datetime"].last().dropna() 43 | bar_data = resample_data.reset_index(list(range(len(resample_data))), drop=True) 44 | bar_data = DataFrameConverter.df2np(bar_data) 45 | return bar_data 46 | 47 | def get_bar(self, instrument, dt, frequency): 48 | num = int(frequency[:-1]) 49 | freq = frequency[-1] 50 | if self.is_base_frequency(instrument, frequency): 51 | bars = self.raw_history_bars(instrument, frequency, end_dt=dt, length=1) 52 | else: 53 | if freq == "m": 54 | bars = self.raw_history_bars(instrument, "1" + freq, end_dt=dt, length=num) 55 | bars = self._resample_bars(bars, frequency) 56 | else: 57 | return super(OddFrequencyDataSource, self).get_bar(instrument, dt, frequency) 58 | if bars is None or not bars.size: 59 | return super(OddFrequencyDataSource, self).get_bar( 60 | instrument, dt, frequency 61 | ) 62 | else: 63 | dti = convert_dt_to_int(dt) 64 | # TODO num * TIME_TOLERANCE[freq] maybe some problem in "d" frequency 65 | if abs(bars[-1]["datetime"] - dti) < num * TIME_TOLERANCE[freq]: 66 | return bars[-1] 67 | else: 68 | data = bars[-1].copy() 69 | data["datetime"] = dti 70 | data["open"] = data["close"] 71 | data["high"] = data["close"] 72 | data["low"] = data["close"] 73 | data["volume"] = 0 74 | return data 75 | 76 | def history_bars(self, instrument, bar_count, frequency, fields, dt, 77 | skip_suspended=True, include_now=False, 78 | adjust_type='pre', adjust_orig=None): 79 | if self.is_base_frequency(instrument, frequency): 80 | bars = self.raw_history_bars(instrument, frequency, end_dt=dt, length=bar_count) 81 | else: 82 | num = int(frequency[:-1]) 83 | freq = frequency[-1] 84 | if freq == "m": 85 | lower_bar_count = (bar_count + 1) * num 86 | bars = self.raw_history_bars(instrument, "1" + freq, end_dt=dt, length=lower_bar_count) 87 | if bars is None: 88 | return super(OddFrequencyDataSource, self).history_bars( 89 | instrument, bar_count, frequency, fields, dt, 90 | skip_suspended=skip_suspended, include_now=include_now, 91 | adjust_type=adjust_type, adjust_orig=adjust_orig 92 | ) 93 | else: 94 | if bars.size: 95 | bars = self._resample_bars(bars, frequency) 96 | dti = convert_dt_to_int(dt) 97 | if bars["datetime"][-1] != dti and not include_now: 98 | bars = bars[:-1] 99 | bars = bars[-bar_count:] 100 | else: 101 | bars = bars[-bar_count:] 102 | # TODO 跳过停牌 103 | else: 104 | return super(OddFrequencyDataSource, self).history_bars( 105 | instrument, bar_count, frequency, fields, dt, 106 | skip_suspended=skip_suspended, include_now=include_now, 107 | adjust_type=adjust_type, adjust_orig=adjust_orig 108 | ) 109 | # if fields is not None: 110 | # if not isinstance(fields, six.string_types): 111 | # fields = [field for field in fields if field in bar_data] 112 | if adjust_type == "none" or instrument.type in {"Future", "INDX"}: 113 | return bars if fields is None else bars[fields] 114 | if isinstance(fields, str) and fields not in FIELDS_REQUIRE_ADJUSTMENT: 115 | return bars if fields is None else bars[fields] 116 | return adjust_bars(bars, self.get_ex_cum_factor(instrument.order_book_id), 117 | fields, adjust_type, adjust_orig) 118 | 119 | def get_ex_cum_factor(self, order_book_id): 120 | raise NotImplementedError 121 | 122 | def raw_history_bars(self, *args, **kwargs): 123 | raise NotImplementedError 124 | 125 | def is_base_frequency(self, instrument, freq): 126 | num = int(freq[:-1]) 127 | return num == 1 128 | 129 | 130 | class CompleteAbstractDataSource(AbstractDataSource): 131 | def is_st_stock(self, order_book_id, dates): 132 | raise NotImplementedError 133 | 134 | def is_suspended(self, order_book_id, dates): 135 | raise NotImplementedError 136 | 137 | 138 | class OddFrequencyBaseDataSource(OddFrequencyDataSource, BaseDataSource): 139 | def get_ex_cum_factor(self, order_book_id): 140 | return BaseDataSource.get_ex_cum_factor(self, order_book_id) 141 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/data_source/common/realtime.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, time 2 | 3 | import numpy as np 4 | from rqalpha.environment import Environment 5 | from rqalpha.utils.datetime_func import convert_dt_to_int, convert_date_to_date_int 6 | 7 | from rqalpha_mod_fxdayu_source.data_source.common import OddFrequencyDataSource 8 | from rqalpha_mod_fxdayu_source.data_source.common.odd import CompleteAbstractDataSource 9 | 10 | EMPTY_BARS = None 11 | 12 | 13 | class RealtimeDataSource(OddFrequencyDataSource, CompleteAbstractDataSource): 14 | def is_suspended(self, order_book_id, dates): 15 | return self._hist_source.is_suspended(order_book_id, dates) 16 | 17 | def is_st_stock(self, order_book_id, dates): 18 | return self._hist_source.is_st_stock(order_book_id, dates) 19 | 20 | def get_trading_calendar(self): 21 | return self._hist_source.get_trading_calendar() 22 | 23 | def get_trading_minutes_for(self, instrument, trading_dt): 24 | return self._hist_source.get_trading_minutes_for(instrument, trading_dt) 25 | 26 | def get_all_instruments(self): 27 | return self._hist_source.get_all_instruments() 28 | 29 | def get_merge_ticks(self, order_book_id_list, trading_date, last_dt=None): 30 | return self._hist_source.get_merge_ticks(order_book_id_list, trading_date, last_dt) 31 | 32 | def current_snapshot(self, instrument, frequency, dt): 33 | return self._hist_source.current_snapshot(instrument, frequency, dt) 34 | 35 | def get_yield_curve(self, start_date, end_date, tenor=None): 36 | return self._hist_source.get_yield_curve(start_date, end_date, tenor) 37 | 38 | def get_settle_price(self, instrument, date): 39 | return self._hist_source.get_settle_price(instrument, date) 40 | 41 | def get_margin_info(self, instrument): 42 | return self._hist_source.get_margin_info(instrument) 43 | 44 | def get_split(self, order_book_id): 45 | return self._hist_source.get_split(order_book_id) 46 | 47 | def get_commission_info(self, instrument): 48 | return self._hist_source.get_commission_info(instrument) 49 | 50 | def get_dividend(self, order_book_id): 51 | return self._hist_source.get_dividend(order_book_id) 52 | 53 | def get_ex_cum_factor(self, order_book_id): 54 | return self._hist_source.get_ex_cum_factor(order_book_id) 55 | 56 | def __init__(self, inday_bars, hist_source): 57 | """ 58 | 59 | Parameters 60 | ---------- 61 | inday_bars: 62 | hist_source: rqalpha_mod_fxdayu_source.data_source.common.OddFrequencyBaseDataSource 63 | """ 64 | super(RealtimeDataSource, self).__init__() 65 | self._inday_bars = inday_bars 66 | self._hist_source = hist_source 67 | 68 | def raw_history_bars(self, instrument, frequency, start_dt=None, end_dt=None, length=None): 69 | env = Environment.get_instance() 70 | now = env.calendar_dt 71 | today = now.date() 72 | today_int = convert_date_to_date_int(today) 73 | yesterday = datetime.combine(env.data_proxy.get_previous_trading_date(today), 74 | time=time(hour=23, minute=59, second=59)) 75 | history_bars = EMPTY_BARS 76 | today_bars = EMPTY_BARS 77 | if end_dt and start_dt: 78 | end_dt = min(now, end_dt) 79 | if start_dt > end_dt: 80 | return EMPTY_BARS 81 | if end_dt.date == today: 82 | start_time = convert_dt_to_int(start_dt) % 1000000 if start_dt.date() == today else None 83 | end_time = convert_dt_to_int(end_dt) % 1000000 84 | today_bars = self._inday_bars.bars(instrument, frequency, today_int, 85 | start_time, end_time) 86 | if start_dt.date() < today: 87 | history_bars = self._hist_source.raw_history_bars( 88 | instrument, frequency, 89 | start_dt=start_dt, 90 | end_dt=min(end_dt, yesterday) 91 | ) 92 | elif start_dt and length: 93 | if start_dt.date() > today: 94 | return EMPTY_BARS 95 | if start_dt.date() < today: 96 | history_bars = self._hist_source.raw_history_bars( 97 | instrument, frequency, start_dt=start_dt, length=length) 98 | left = length - len(history_bars) if history_bars is not None else length 99 | start_time = convert_dt_to_int(start_dt) % 1000000 if start_dt.date() == today else None 100 | today_bars = self._inday_bars.get_bars(instrument, frequency, 101 | today_int, start_time)[:left] 102 | elif end_dt and length: 103 | end_dt = min(now, end_dt) 104 | if end_dt.date() == today: 105 | end_time = convert_dt_to_int(end_dt) % 1000000 106 | today_bars = self._inday_bars.get_bars(instrument, frequency, today_int, 107 | end_time=end_time)[-length:] 108 | left = length - len(today_bars) if today_bars is not None else length 109 | if left > 0: 110 | history_bars = self._hist_source.raw_history_bars( 111 | instrument, frequency, end_dt=min(end_dt, yesterday), length=left) 112 | else: 113 | raise RuntimeError 114 | if history_bars is not None and today_bars is not None: 115 | return np.concatenate([history_bars, today_bars]) 116 | elif history_bars is not None: 117 | return history_bars 118 | else: 119 | return today_bars 120 | 121 | # TODO logic of include_now was write in OddFrequencyDataSource and only support 1X frequencies 122 | # def is_base_frequency(self, instrument, freq): 123 | # return self._hist_source.is_base_frequency(instrument, freq) 124 | 125 | def available_data_range(self, frequency): 126 | start, end = self._hist_source.available_data_range(frequency) 127 | end = datetime.now().date() 128 | return start, end 129 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/data_source/mongo.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | from datetime import date, datetime, time 3 | 4 | import motor.motor_asyncio 5 | import numpy as np 6 | import pandas as pd 7 | from dateutil.relativedelta import relativedelta 8 | from rqalpha.const import INSTRUMENT_TYPE 9 | from rqalpha.model.instrument import Instrument 10 | from rqalpha.utils.datetime_func import convert_date_to_int 11 | from rqalpha.utils.py2 import lru_cache 12 | 13 | from rqalpha_mod_fxdayu_source.data_source.common import CacheMixin 14 | from rqalpha_mod_fxdayu_source.data_source.common.minite import MiniteBarDataSourceMixin 15 | from rqalpha_mod_fxdayu_source.data_source.common.odd import OddFrequencyBaseDataSource 16 | from rqalpha_mod_fxdayu_source.utils import Singleton 17 | from rqalpha_mod_fxdayu_source.utils.asyncio import get_asyncio_event_loop 18 | from rqalpha_mod_fxdayu_source.utils.converter import DataFrameConverter 19 | 20 | INSTRUMENT_TYPE_MAP = { 21 | INSTRUMENT_TYPE.CS: "stock", 22 | INSTRUMENT_TYPE.INDX: "stock", 23 | } 24 | 25 | 26 | class NoneDataError(BaseException): 27 | pass 28 | 29 | 30 | class MongoDataSource(OddFrequencyBaseDataSource, MiniteBarDataSourceMixin): 31 | __metaclass__ = Singleton 32 | 33 | def __init__(self, path, mongo_url): 34 | super(MongoDataSource, self).__init__(path) 35 | from rqalpha_mod_fxdayu_source.share.mongo_handler import MongoHandler 36 | self._handler = MongoHandler(mongo_url) 37 | self._client = motor.motor_asyncio.AsyncIOMotorClient(mongo_url) 38 | self._db_map = self._get_frequency_db_map() 39 | 40 | def _get_frequency_db_map(self): 41 | map_ = self._handler.client.get_database("meta").get_collection("db_map").find() 42 | dct = {item["type"]: item["map"] for item in map_} 43 | return dct 44 | 45 | def _get_db(self, instrument, frequency): 46 | try: 47 | if isinstance(instrument, Instrument): 48 | instrument_type = instrument.enum_type 49 | else: 50 | instrument_type = instrument 51 | type_ = INSTRUMENT_TYPE_MAP[instrument_type] 52 | return self._db_map[type_][frequency] 53 | except KeyError: 54 | message = instrument.order_book_id if isinstance(instrument, Instrument) else instrument 55 | raise NoneDataError("MongoDB 中没有品种%s的%s数据" % (message, frequency)) 56 | 57 | async def _do_get_bars(self, db, collection, filters, projection, fill=np.NaN): 58 | dct = {} 59 | l = 0 60 | async for doc in self._client[db][collection].find(filters, projection): 61 | _l = doc.pop('_l') 62 | l += _l 63 | for key, values in doc.items(): 64 | if isinstance(values, list) and (len(values) == _l): 65 | dct.setdefault(key, []).extend(values) 66 | for values in dct.values(): 67 | if len(values) != l: 68 | values.extend([fill] * l) 69 | df = pd.DataFrame(dct) 70 | if df.size: 71 | return df.sort_values("datetime") 72 | else: 73 | return None 74 | 75 | def _get_bars_in_days(self, instrument, frequency, params): 76 | s_date = params[0]["trade_date"] 77 | e_date = params[-1]["trade_date"] 78 | s_time = params[0]["start_time"] if "start_time" in params[0] else 0 79 | e_time = params[-1]["end_time"] if "end_time" in params[-1] else 150000 80 | s_dt_int = convert_date_to_int(s_date) + s_time 81 | e_dt_int = convert_date_to_int(e_date) + e_time 82 | db = self._get_db(instrument=instrument, frequency=frequency) 83 | collection = instrument.order_book_id 84 | filters = {"_d": {"$gte": datetime.combine(s_date, time=time()), "$lte": datetime.combine(e_date, time=time())}} 85 | projection = {"_id": 0, "_d": 0} 86 | loop = get_asyncio_event_loop() 87 | bars = loop.run_until_complete(self._do_get_bars(db, collection, filters, projection)) 88 | if bars is not None and bars.size: 89 | bars = DataFrameConverter.df2np(bars) 90 | else: 91 | bars = DataFrameConverter.empty() 92 | s_pos = np.searchsorted(bars["datetime"], s_dt_int) 93 | e_pos = np.searchsorted(bars["datetime"], e_dt_int, side="right") 94 | return bars[s_pos:e_pos] 95 | 96 | def raw_history_bars(self, instrument, frequency, start_dt=None, end_dt=None, length=None): 97 | # 转换到自建mongodb结构s 98 | if frequency.endswith("m"): 99 | return MiniteBarDataSourceMixin.raw_history_bars( 100 | self, instrument, frequency, start_dt=start_dt, end_dt=end_dt, length=length) 101 | else: 102 | code = instrument.order_book_id 103 | db = self._get_db(instrument, frequency) 104 | data = self._handler.read(code, db=db, start=start_dt, end=end_dt, length=length, sort=[("datetime", 1)]). \ 105 | reset_index() 106 | if data is not None and data.size: 107 | return DataFrameConverter.df2np(data) 108 | else: 109 | return DataFrameConverter.empty() 110 | 111 | def is_base_frequency(self, instrument, frequency): 112 | if isinstance(instrument, Instrument): 113 | instrument_type = instrument.enum_type 114 | else: 115 | instrument_type = instrument 116 | type_ = INSTRUMENT_TYPE_MAP[instrument_type] 117 | return type_ in self._db_map and frequency in self._db_map[type_] 118 | 119 | def current_snapshot(self, instrument, frequency, dt): 120 | pass 121 | 122 | def _get_date_range(self, frequency): 123 | from pymongo import DESCENDING 124 | try: 125 | db = self._get_db(INSTRUMENT_TYPE.CS, frequency) 126 | except NoneDataError: 127 | db = self._get_db(INSTRUMENT_TYPE.CS, "1" + frequency[-1]) 128 | key = "_d" if frequency.endswith("m") else "datetime" 129 | try: 130 | start = self._handler.client.get_database(db).get_collection("600000.XSHG").find() \ 131 | .sort(key).limit(1)[0][key] 132 | end = self._handler.client.get_database(db).get_collection("600000.XSHG").find() \ 133 | .sort(key, direction=DESCENDING).limit(1)[0][key] 134 | except IndexError: 135 | raise RuntimeError("无法从MongoDb获取数据时间范围") 136 | return start.date(), end.date() 137 | 138 | @lru_cache(maxsize=10) 139 | def available_data_range(self, frequency): 140 | if frequency.endswith("d") or frequency.endswith("h"): 141 | return date(2012, 6, 1), date.today() - relativedelta(days=1) 142 | return self._get_date_range(frequency) 143 | 144 | 145 | class MongoCacheDataSource(MongoDataSource, CacheMixin): 146 | def __init__(self, path, mongo_url): 147 | super(MongoCacheDataSource, self).__init__(path, mongo_url) 148 | CacheMixin.__init__(self) 149 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/data_source/quantos.py: -------------------------------------------------------------------------------- 1 | from datetime import date 2 | 3 | import asyncio 4 | import pandas as pd 5 | from dateutil.relativedelta import relativedelta 6 | from rqalpha.utils import lru_cache 7 | from rqalpha.utils.datetime_func import convert_date_to_int 8 | from rqalpha.utils.logger import system_log 9 | 10 | from rqalpha_mod_fxdayu_source.data_source.common import CacheMixin 11 | from rqalpha_mod_fxdayu_source.data_source.common.minite import safe_searchsorted, MiniteBarDataSourceMixin 12 | from rqalpha_mod_fxdayu_source.data_source.common.odd import OddFrequencyBaseDataSource 13 | from rqalpha_mod_fxdayu_source.utils import Singleton 14 | from rqalpha_mod_fxdayu_source.utils.asyncio import get_asyncio_event_loop 15 | from rqalpha_mod_fxdayu_source.utils.converter import QuantOsConverter 16 | from rqalpha_mod_fxdayu_source.utils.instrument import instrument_to_tushare 17 | from rqalpha_mod_fxdayu_source.utils.quantos import QuantOsDataApiMixin 18 | 19 | 20 | class QuantOsSource(OddFrequencyBaseDataSource, MiniteBarDataSourceMixin, QuantOsDataApiMixin): 21 | __metaclass__ = Singleton 22 | 23 | def __init__(self, path, api_url=None, user=None, token=None): 24 | super(QuantOsSource, self).__init__(path) 25 | QuantOsDataApiMixin.__init__(self, api_url, user, token) 26 | 27 | async def _get_bars_in_day(self, instrument=None, frequency=None, trade_date=None, start_time=0, end_time=150000): 28 | # TODO retry when net error occurs 29 | symbol = instrument_to_tushare(instrument) 30 | trade_date = convert_date_to_int(trade_date) // 1000000 31 | start_time = max(start_time, 80000) 32 | end_time = min(end_time, 160000) 33 | return self._api.bar(symbol=symbol, freq=frequency[:-1] + frequency[-1].upper(), 34 | trade_date=trade_date, start_time=start_time, end_time=end_time) 35 | 36 | def _get_bars_in_days(self, instrument, frequency, days): 37 | loop = get_asyncio_event_loop() 38 | tasks = [self._get_bars_in_day(instrument=instrument, frequency=frequency, **day) for day in days] 39 | results = loop.run_until_complete(asyncio.gather(*tasks)) 40 | dfs, msgs = zip(*results) 41 | for msg in msgs: 42 | if msg and msg != "0,": 43 | raise RuntimeError(msg) 44 | bars = pd.concat(dfs, axis=0) 45 | if bars is not None and bars.size: 46 | return QuantOsConverter.df2np(bars) 47 | else: 48 | return QuantOsConverter.empty() 49 | 50 | def raw_history_bars(self, instrument, frequency, start_dt=None, end_dt=None, length=None): 51 | symbol = instrument_to_tushare(instrument) 52 | if frequency in ["1d"]: 53 | if start_dt and end_dt: 54 | s_date_int = convert_date_to_int(start_dt.date()) 55 | e_date_int = convert_date_to_int(end_dt.date()) 56 | elif start_dt and length: 57 | dates = self._dates_index(instrument) 58 | s_date_int = convert_date_to_int(start_dt.date()) 59 | s_pos = safe_searchsorted(dates, s_date_int) 60 | s_date_int = int(dates[s_pos]) 61 | e_date_int = int(dates[min(s_pos + length, len(dates)) - 1]) 62 | elif end_dt and length: 63 | dates = self._dates_index(instrument) 64 | e_date_int = convert_date_to_int(end_dt.date()) 65 | e_pos = safe_searchsorted(dates, e_date_int, side="right") 66 | s_date_int = int(dates[max(e_pos - length, 0)]) 67 | e_date_int = int(dates[e_pos - 1]) 68 | else: 69 | raise RuntimeError("At least two of [start_dt,end_dt,length] should be given.") 70 | data, msg = self._api.daily(symbol, freq=frequency, adjust_mode=None, 71 | start_date=s_date_int // 1000000, 72 | end_date=e_date_int // 1000000) 73 | if isinstance(data, pd.DataFrame) and data.size: 74 | data = data[data["volume"] > 0] # TODO sikp_suspended? 75 | return QuantOsConverter.df2np(data) 76 | else: 77 | if msg: 78 | system_log.warning(msg) 79 | return QuantOsConverter.empty() 80 | else: 81 | return MiniteBarDataSourceMixin.raw_history_bars( 82 | self, instrument, frequency, start_dt=start_dt, end_dt=end_dt, length=length 83 | ) 84 | 85 | def is_base_frequency(self, instrument, frequency): 86 | return frequency in ["1d", "1m", "5m", "15m"] 87 | 88 | def current_snapshot(self, instrument, frequency, dt): 89 | pass 90 | 91 | @lru_cache(maxsize=10) 92 | def available_data_range(self, frequency): 93 | return date(2012, 6, 1), date.today() - relativedelta(days=1) 94 | 95 | 96 | class QuantOsCacheSource(QuantOsSource, CacheMixin): 97 | def __init__(self, *args, **kwargs): 98 | super(QuantOsCacheSource, self).__init__(*args, **kwargs) 99 | CacheMixin.__init__(self) 100 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/event_source.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | import time 3 | import datetime 4 | import re 5 | from itertools import islice 6 | 7 | try: 8 | from Queue import Empty 9 | except ImportError: 10 | from queue import Empty 11 | 12 | from rqalpha.const import DEFAULT_ACCOUNT_TYPE 13 | from rqalpha.events import Event, EVENT 14 | from rqalpha.mod.rqalpha_mod_sys_simulation.simulation_event_source import SimulationEventSource 15 | from rqalpha.mod.rqalpha_mod_sys_stock_realtime.event_source import RealtimeEventSource 16 | from rqalpha.mod.rqalpha_mod_sys_stock_realtime.utils import is_holiday_today 17 | from rqalpha.utils.i18n import gettext as _ 18 | from rqalpha.utils.logger import system_log 19 | 20 | from rqalpha_mod_fxdayu_source.utils import InDayTradingPointIndexer 21 | 22 | _unit_freq_template = re.compile("[^0-9]+") 23 | _freq_template = re.compile("[0-9]+(?Ph|m|d)|(?Ptick)") 24 | 25 | 26 | class IntervalEventSource(SimulationEventSource): 27 | def __init__(self, env): 28 | super(IntervalEventSource, self).__init__(env) 29 | self._indexer = InDayTradingPointIndexer() 30 | 31 | def _get_trading_points(self, trading_date, frequency): 32 | indexer = self._indexer 33 | trading_points = set() 34 | for account_type in self._config.base.accounts: 35 | if account_type == DEFAULT_ACCOUNT_TYPE.STOCK.name: 36 | trading_points.update(indexer.get_a_stock_trading_points(trading_date, frequency)) 37 | elif account_type == DEFAULT_ACCOUNT_TYPE.FUTURE.name: 38 | trading_points.update(indexer.get_future_trading_points(self._env, trading_date, frequency)) 39 | return sorted(list(trading_points)) 40 | 41 | def _get_events_for_d(self, start_date, end_date, frequency): 42 | num = int(frequency[:-1]) 43 | for day in islice(self._env.data_proxy.get_trading_dates(start_date, end_date), None, None, num): 44 | date = day.to_pydatetime() 45 | dt_before_trading = date.replace(hour=0, minute=0) 46 | dt_bar = date.replace(hour=15, minute=0) 47 | dt_after_trading = date.replace(hour=15, minute=30) 48 | dt_settlement = date.replace(hour=17, minute=0) 49 | yield Event(EVENT.BEFORE_TRADING, calendar_dt=dt_before_trading, trading_dt=dt_before_trading) 50 | yield Event(EVENT.BAR, calendar_dt=dt_bar, trading_dt=dt_bar) 51 | yield Event(EVENT.AFTER_TRADING, calendar_dt=dt_after_trading, trading_dt=dt_after_trading) 52 | yield Event(EVENT.SETTLEMENT, calendar_dt=dt_settlement, trading_dt=dt_settlement) 53 | 54 | def _get_events_in_day(self, start_date, end_date, frequency): 55 | for day in self._env.data_proxy.get_trading_dates(start_date, end_date): 56 | before_trading_flag = True 57 | date = day.to_pydatetime() 58 | last_dt = None 59 | done = False 60 | 61 | dt_before_day_trading = date.replace(hour=8, minute=30) 62 | 63 | while True: 64 | if done: 65 | break 66 | exit_loop = True 67 | trading_points = self._get_trading_points(date, frequency) 68 | for calendar_dt in trading_points: 69 | if last_dt is not None and calendar_dt < last_dt: 70 | continue 71 | 72 | if calendar_dt < dt_before_day_trading: 73 | trading_dt = calendar_dt.replace(year=date.year, 74 | month=date.month, 75 | day=date.day) 76 | else: 77 | trading_dt = calendar_dt 78 | if before_trading_flag: 79 | before_trading_flag = False 80 | yield Event(EVENT.BEFORE_TRADING, 81 | calendar_dt=calendar_dt - datetime.timedelta(minutes=30), 82 | trading_dt=trading_dt - datetime.timedelta(minutes=30)) 83 | if self._universe_changed: 84 | self._universe_changed = False 85 | last_dt = calendar_dt 86 | exit_loop = False 87 | break 88 | # yield handle bar 89 | yield Event(EVENT.BAR, calendar_dt=calendar_dt, trading_dt=trading_dt) 90 | if exit_loop: 91 | done = True 92 | 93 | dt = date.replace(hour=15, minute=30) 94 | yield Event(EVENT.AFTER_TRADING, calendar_dt=dt, trading_dt=dt) 95 | 96 | dt = date.replace(hour=17, minute=0) 97 | yield Event(EVENT.SETTLEMENT, calendar_dt=dt, trading_dt=dt) 98 | 99 | def _get_events_for_h(self, start_date, end_date, frequency): 100 | return self._get_events_in_day(start_date, end_date, frequency) 101 | 102 | def _get_events_for_m(self, start_date, end_date, frequency): 103 | return self._get_events_in_day(start_date, end_date, frequency) 104 | 105 | def _get_events_for_tick(self, start_date, end_date, frequency): 106 | data_proxy = self._env.data_proxy 107 | for day in data_proxy.get_trading_dates(start_date, end_date): 108 | date = day.to_pydatetime() 109 | last_tick = None 110 | last_dt = None 111 | dt_before_day_trading = date.replace(hour=8, minute=30) 112 | while True: 113 | for tick in data_proxy.get_merge_ticks(self._get_universe(), date, last_dt): 114 | # find before trading time 115 | if last_tick is None: 116 | last_tick = tick 117 | dt = tick.datetime 118 | before_trading_dt = dt - datetime.timedelta(minutes=30) 119 | yield Event(EVENT.BEFORE_TRADING, calendar_dt=before_trading_dt, 120 | trading_dt=before_trading_dt) 121 | 122 | dt = tick.datetime 123 | 124 | if dt < dt_before_day_trading: 125 | trading_dt = dt.replace(year=date.year, month=date.month, day=date.day) 126 | else: 127 | trading_dt = dt 128 | 129 | yield Event(EVENT.TICK, calendar_dt=dt, trading_dt=trading_dt, tick=tick) 130 | 131 | if self._universe_changed: 132 | self._universe_changed = False 133 | last_dt = dt 134 | break 135 | else: 136 | break 137 | 138 | dt = date.replace(hour=15, minute=30) 139 | yield Event(EVENT.AFTER_TRADING, calendar_dt=dt, trading_dt=dt) 140 | 141 | dt = date.replace(hour=17, minute=0) 142 | yield Event(EVENT.SETTLEMENT, calendar_dt=dt, trading_dt=dt) 143 | 144 | def events(self, start_date, end_date, frequency): 145 | try: 146 | result = _freq_template.match(frequency) 147 | freq = result.group("freq1") or result.group("freq2") 148 | return getattr(self, "_get_events_for_" + freq)(start_date, end_date, frequency) 149 | except Exception: 150 | raise NotImplementedError(_("Frequency {} is not support.").format(frequency)) 151 | 152 | 153 | # NOTE: should call handle_bar at 13:00:00? 154 | def is_tradetime_now(): 155 | # FIX unknown fatal error due to call time.localtime 156 | now_time = datetime.datetime.now() 157 | now = now_time.strftime("%H%M") 158 | if "0915" <= now <= "1130" or "1301" <= now <= "1500": 159 | return True 160 | return False 161 | 162 | 163 | class RealTimeEventSource(RealtimeEventSource): 164 | def clock_worker(self): 165 | once_before_trading = False 166 | while True: 167 | # time.sleep(self.fps) 168 | if is_holiday_today(): 169 | time.sleep(60) 170 | continue 171 | 172 | dt = datetime.datetime.now() 173 | next_dt = datetime.datetime.fromtimestamp((dt.timestamp() - 5) // self.fps * self.fps + self.fps) 174 | # NOTE: In real time debug, comment below code block 175 | # if next_dt > dt: 176 | # time.sleep(next_dt.timestamp() - dt.timestamp()) 177 | # dt = datetime.datetime.now() 178 | # END 179 | if dt.strftime("%H:%M:%S") >= "08:30:00" and dt.date() > self.before_trading_fire_date: 180 | self.event_queue.put((dt, EVENT.BEFORE_TRADING)) 181 | self.before_trading_fire_date = dt.date() 182 | once_before_trading = True 183 | elif dt.strftime("%H:%M:%S") >= "15:10:00" and dt.date() > self.after_trading_fire_date: 184 | self.event_queue.put((dt, EVENT.AFTER_TRADING)) 185 | self.after_trading_fire_date = dt.date() 186 | elif dt.strftime("%H:%M:%S") >= "15:10:00" and dt.date() > self.settlement_fire_date: 187 | self.event_queue.put((dt, EVENT.SETTLEMENT)) 188 | self.settlement_fire_date = dt.date() 189 | 190 | if not once_before_trading and self._env.config.extra.force_run_init_when_pt_resume \ 191 | and "09:00:00" <= dt.strftime("%H:%M:%S") <= "15:00:00": 192 | # restart with init during trading time, re-running before_trading 193 | self.event_queue.put((dt, EVENT.BEFORE_TRADING)) 194 | self.before_trading_fire_date = dt.date() 195 | once_before_trading = True 196 | continue 197 | if dt.strftime("%H:%M:%S") >= "09:31:00" and is_tradetime_now(): 198 | self.event_queue.put((dt, EVENT.BAR)) 199 | dt = datetime.datetime.now() 200 | if next_dt.timestamp() + self.fps > dt.timestamp(): 201 | time.sleep(next_dt.timestamp() + self.fps - dt.timestamp()) 202 | 203 | def events(self, start_date, end_date, frequency): 204 | running = True 205 | 206 | self.clock_engine_thread.start() 207 | 208 | if not self.mod_config.redis_uri: 209 | self.quotation_engine_thread.start() 210 | 211 | while running: 212 | while True: 213 | try: 214 | dt, event_type = self.event_queue.get(timeout=1) 215 | break 216 | except Empty: 217 | continue 218 | real_dt = datetime.datetime.now() 219 | system_log.debug("real_dt {}, dt {}, event {}", real_dt, dt, event_type) 220 | yield Event(event_type, calendar_dt=real_dt, trading_dt=dt) 221 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/inday_bars/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xingetouzi/rqalpha-mod-fxdayu-source/1595df9cc409c3bc8bad2bfcfda5b37d8d42739b/rqalpha_mod_fxdayu_source/inday_bars/__init__.py -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/inday_bars/base.py: -------------------------------------------------------------------------------- 1 | class AbstractIndayBars(object): 2 | def get_bars(self, instrument, frequency, trade_date=None, start_time=None, end_time=None): 3 | raise NotImplementedError 4 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/inday_bars/quantos.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | import pandas as pd 4 | from rqalpha.utils.logger import user_system_log 5 | 6 | from rqalpha_mod_fxdayu_source.inday_bars.base import AbstractIndayBars 7 | from rqalpha_mod_fxdayu_source.utils.converter import QuantOsConverter 8 | from rqalpha_mod_fxdayu_source.utils.instrument import instrument_to_tushare 9 | from rqalpha_mod_fxdayu_source.utils.quantos import QuantOsDataApiMixin, QuantOsQueryError, ensure_api_login 10 | 11 | 12 | class QuantOsIndayBars(AbstractIndayBars, QuantOsDataApiMixin): 13 | MAX_RETRY = 3 14 | 15 | def __init__(self, api_url, user, token): 16 | super(QuantOsIndayBars, self).__init__() 17 | QuantOsDataApiMixin.__init__(self, api_url, user, token) 18 | 19 | @ensure_api_login 20 | def get_bars(self, instrument, frequency, trade_date=None, start_time=None, end_time=None): 21 | symbol = instrument_to_tushare(instrument) 22 | kwargs = {} 23 | if start_time is not None: 24 | kwargs["start_time"] = start_time 25 | elif end_time is not None: 26 | kwargs["end_time"] = end_time 27 | retry = 0 28 | while retry < self.MAX_RETRY: 29 | retry += 1 30 | try: 31 | freq = frequency[:-1] + frequency[-1].upper() 32 | params = dict(symbol=symbol, freq=freq, trade_date=0, **kwargs) 33 | bars, msg = self._api.bar(**params) 34 | code = msg.split(",")[0] 35 | if not isinstance(bars, pd.DataFrame) or code != "0": 36 | raise QuantOsQueryError(msg) 37 | else: 38 | break 39 | except QuantOsQueryError as e: 40 | if retry <= self.MAX_RETRY: 41 | user_system_log.warning("[japs] Exception occurs when call api.bar with param [%s]: %s" % (params, e)) 42 | time.sleep(retry) 43 | else: 44 | raise e 45 | return QuantOsConverter.df2np(bars) 46 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/inday_bars/redis.py: -------------------------------------------------------------------------------- 1 | from bisect import bisect_left, bisect_right 2 | from collections import OrderedDict 3 | from datetime import datetime 4 | 5 | import numpy as np 6 | from dateutil.parser import parse 7 | from rqalpha.data.converter import StockBarConverter 8 | from rqalpha.utils import Singleton 9 | from rqalpha.utils.datetime_func import convert_int_to_datetime, convert_dt_to_int 10 | from rqalpha.utils.logger import system_log 11 | 12 | from rqalpha_mod_fxdayu_source.inday_bars.base import AbstractIndayBars 13 | from rqalpha_mod_fxdayu_source.utils import InDayTradingPointIndexer 14 | 15 | CONVERTER = StockBarConverter 16 | 17 | 18 | class InDayIndexCache(object): 19 | __metaclass__ = Singleton 20 | 21 | def __init__(self): 22 | self._index = {} 23 | self._index_date = None 24 | 25 | def _trans_order_book_id(self, order_book_id): 26 | return "STOCK" 27 | # TODO need rqalpha environment to be create first 28 | # if get_account_type(order_book_id) == DEFAULT_ACCOUNT_TYPE.STOCK: 29 | # return "STOCK" 30 | # else: 31 | # return order_book_id 32 | 33 | def _ensure_index(self, frequency, order_book_id): 34 | today = datetime.now().date() 35 | if self._index_date != today: 36 | self._index.clear() 37 | self._index_date = today 38 | order_book_id = self._trans_order_book_id(order_book_id) 39 | if order_book_id not in self._index: 40 | self._index[order_book_id] = {} 41 | if frequency not in self._index: 42 | if order_book_id == "STOCK": 43 | self._index[order_book_id][frequency] = \ 44 | sorted(InDayTradingPointIndexer.get_a_stock_trading_points(today, frequency)) 45 | else: 46 | raise RuntimeError("Future not support now") 47 | return self._index[order_book_id][frequency] 48 | 49 | def get_index(self, frequency, order_book_id): 50 | return self._ensure_index(frequency, order_book_id) 51 | 52 | 53 | class RedisClient(object): 54 | __metaclass__ = Singleton 55 | 56 | def __init__(self, redis_url): 57 | import redis 58 | self._client = redis.from_url(redis_url) 59 | 60 | def get(self, order_book_id, frequency): 61 | return RedisBars(self._client, order_book_id, frequency) 62 | 63 | 64 | class RedisBars(object): 65 | ALL_FIELDS = [ 66 | "datetime", "open", "high", "low", "close", "volume" 67 | ] 68 | 69 | def __init__(self, client, order_book_id, frequency, indexer=None): 70 | """ 71 | 72 | Parameters 73 | ---------- 74 | client: redis.Redis 75 | redis connection 76 | order_book_id: str 77 | order book id of instruments 78 | frequency: 79 | frequency of data 80 | """ 81 | self._client = client 82 | self._order_book_id = order_book_id 83 | self._frequency = frequency 84 | self._indexer = None 85 | self._converter = CONVERTER 86 | 87 | def _get_redis_key(self, key): 88 | return ":".join([self._order_book_id, key]) 89 | 90 | @property 91 | def index(self): 92 | if self._indexer: 93 | return self._indexer.get_index(self._frequency, self._order_book_id) 94 | else: 95 | return [parse(item) for item in self._client.lrange(self._get_redis_key("datetime"), 0, -1)] 96 | 97 | def bars(self, l, r, fields=None): 98 | if fields is None: 99 | fields = self.ALL_FIELDS 100 | dtype = OrderedDict([(f, np.uint64 if f == "datetime" else np.float64) for f in fields]) 101 | length = r - l 102 | result = np.empty(shape=(length,), dtype=list(dtype.items())) 103 | if not length: 104 | return result 105 | result.fill(np.nan) 106 | for field in fields: 107 | value = self._client.lrange(self._get_redis_key(field), l, r - 1) 108 | if field == "datetime": 109 | value = list(map(lambda x: convert_dt_to_int(parse(x.decode())), value)) 110 | else: 111 | value = np.array(list(map(lambda x: x.decode(), value)), dtype=np.str) 112 | value = value.astype(np.float64) 113 | result[:len(value)][field] = value[:] 114 | return result 115 | 116 | def __len__(self): 117 | return 118 | 119 | def start(self): 120 | return 121 | 122 | def end(self): 123 | return 124 | 125 | def find(self, date, side="left"): 126 | dts = self.index 127 | if side == "left": 128 | index = bisect_left(dts, date) 129 | elif side == "right": 130 | index = bisect_right(dts, date) 131 | else: 132 | raise RuntimeError("unsupported side of find method, please use [left, right]") 133 | return index 134 | 135 | 136 | class RedisIndayBars(AbstractIndayBars): 137 | def __init__(self, redis_url): 138 | super(AbstractIndayBars, self).__init__() 139 | if not (redis_url.startswith("redis://") or redis_url.startswith("tcp://")): 140 | redis_url = "redis://" + redis_url.splits("//")[-1] 141 | system_log.info("Connected to Redis on: %s" % redis_url) 142 | self._client = RedisClient(redis_url) 143 | 144 | def get_bars(self, instrument, frequency, trade_date=None, start_time=None, end_time=None): 145 | start_time = 0 if start_time is None else start_time 146 | end_time = 235959 if end_time is None else end_time 147 | start_dt = convert_int_to_datetime(trade_date * 1000000 + start_time) 148 | end_dt = convert_int_to_datetime(trade_date * 1000000 + end_time) 149 | bars = self._client.get(instrument.order_book_id, frequency) 150 | start_pos = bars.find(start_dt) 151 | end_pos = bars.find(end_dt) 152 | return bars.bars(start_pos, end_pos) 153 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/mod.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | from datetime import datetime 3 | 4 | from rqalpha.const import RUN_TYPE, PERSIST_MODE 5 | from rqalpha.interface import AbstractMod 6 | from rqalpha.utils.disk_persist_provider import DiskPersistProvider 7 | from rqalpha.utils.i18n import gettext as _ 8 | from rqalpha.utils.logger import user_system_log, system_log 9 | 10 | from rqalpha_mod_fxdayu_source.const import DataSourceType 11 | from rqalpha_mod_fxdayu_source.data_source.common import CacheMixin 12 | from rqalpha_mod_fxdayu_source.data_source.common.realtime import RealtimeDataSource 13 | from rqalpha_mod_fxdayu_source.event_source import IntervalEventSource, RealTimeEventSource 14 | from rqalpha_mod_fxdayu_source.inday_bars.quantos import QuantOsIndayBars 15 | from rqalpha_mod_fxdayu_source.inday_bars.redis import RedisIndayBars 16 | from rqalpha_mod_fxdayu_source.price_board import StockLimitUpDownPriceBoard 17 | 18 | 19 | class FxdayuSourceMod(AbstractMod): 20 | def __init__(self): 21 | self._old_cache_length = CacheMixin.CACHE_LENGTH 22 | self._old_max_cache_space = CacheMixin.MAX_CACHE_SPACE 23 | 24 | def start_up(self, env, mod_config): 25 | env.set_price_board(StockLimitUpDownPriceBoard()) 26 | type_ = DataSourceType(mod_config.source) 27 | if type_ in [DataSourceType.MONGO, DataSourceType.REAL_TIME]: 28 | from rqalpha_mod_fxdayu_source.data_source.mongo import MongoDataSource, MongoCacheDataSource 29 | args = (env.config.base.data_bundle_path, mod_config.mongo_url) 30 | data_source_cls = MongoCacheDataSource if mod_config.enable_cache else MongoDataSource 31 | elif type_ == DataSourceType.BUNDLE: 32 | from rqalpha_mod_fxdayu_source.data_source.bundle import BundleCacheDataSource, BundleDataSource 33 | args = (env.config.base.data_bundle_path, mod_config.bundle_path) 34 | data_source_cls = BundleCacheDataSource if mod_config.enable_cache else BundleDataSource 35 | elif type_ == DataSourceType.QUANTOS: 36 | from rqalpha_mod_fxdayu_source.data_source.quantos import QuantOsSource, QuantOsCacheSource 37 | args = (env.config.base.data_bundle_path, mod_config.quantos_url, 38 | mod_config.quantos_user, mod_config.quantos_token) 39 | data_source_cls = QuantOsCacheSource if mod_config.enable_cache else QuantOsSource 40 | else: 41 | raise RuntimeError("data source type [%s] is not supported" % mod_config.source) 42 | if mod_config.enable_cache: 43 | if mod_config.cache_length: 44 | CacheMixin.set_cache_length(int(mod_config.cache_length)) 45 | if mod_config.max_cache_space: 46 | CacheMixin.set_max_cache_space(int(mod_config.max_cache_space)) 47 | data_source = data_source_cls(*args) 48 | mod_config.redis_uri = mod_config.redis_url # fit rqalpha 49 | if env.config.base.run_type is RUN_TYPE.BACKTEST and env.config.base.persist_mode == PERSIST_MODE.ON_NORMAL_EXIT: 50 | # generate user context using backtest 51 | persist_provider = DiskPersistProvider(mod_config.persist_path) 52 | env.set_persist_provider(persist_provider) 53 | 54 | is_real_time = env.config.base.run_type in (RUN_TYPE.PAPER_TRADING, RUN_TYPE.LIVE_TRADING) 55 | if is_real_time or type_ == DataSourceType.REAL_TIME: 56 | user_system_log.warn(_("[Warning] When you use this version of RealtimeTradeMod, history_bars can only " 57 | "get data from yesterday.")) 58 | if type_ == DataSourceType.QUANTOS: 59 | inday_bars = QuantOsIndayBars(mod_config.quantos_url, 60 | mod_config.quantos_user, 61 | mod_config.quantos_token) 62 | elif mod_config.redis_url: 63 | inday_bars = RedisIndayBars(mod_config.redis_url) 64 | system_log.info(_("RealtimeTradeMod using market from redis")) 65 | else: 66 | raise RuntimeError("No Inday bar data source with valid config") 67 | data_source = RealtimeDataSource(inday_bars=inday_bars, hist_source=data_source) 68 | if is_real_time: 69 | event_source = RealTimeEventSource(mod_config.fps, mod_config) 70 | # add persist 71 | persist_provider = DiskPersistProvider(mod_config.persist_path) 72 | env.set_persist_provider(persist_provider) 73 | 74 | env.config.base.persist = True 75 | env.config.base.persist_mode = PERSIST_MODE.REAL_TIME 76 | else: 77 | event_source = IntervalEventSource(env) 78 | env.set_data_source(data_source) 79 | # a patch to start_date allowed it to be setted large than last trade date. 80 | if env.config.base.start_date == datetime.now().date(): 81 | trading_dates = data_source.get_trading_calendar() 82 | pos = trading_dates.searchsorted(env.config.base.start_date) 83 | if trading_dates[pos].to_pydatetime().date() != env.config.base.start_date: 84 | env.config.base.start_date = trading_dates[max(0, pos - 1)].to_pydatetime().date() 85 | env.set_event_source(event_source) 86 | 87 | def tear_down(self, code, exception=None): 88 | CacheMixin.set_cache_length(self._old_cache_length) 89 | CacheMixin.set_max_cache_space(self._old_max_cache_space) 90 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/price_board.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from rqalpha.const import INSTRUMENT_TYPE 3 | from rqalpha.core.bar_dict_price_board import BarDictPriceBoard 4 | 5 | 6 | class StockLimitUpDownPriceBoard(BarDictPriceBoard): 7 | def __init__(self): 8 | super(StockLimitUpDownPriceBoard, self).__init__() 9 | self._previous_close = {} 10 | 11 | def _get_prev_close(self, instrument): 12 | order_book_id = instrument.order_book_id 13 | date = self._env.data_proxy.get_previous_trading_date(self._env.calendar_dt) 14 | if order_book_id not in self._previous_close or date > self._previous_close[order_book_id][0]: 15 | bar = self._env.data_source.history_bars( 16 | instrument, 1, "1d", "close", date, adjust_type="none" 17 | ) 18 | if bar is not None: 19 | prev_close = np.squeeze(bar) 20 | else: 21 | prev_close = np.nan 22 | self._previous_close[order_book_id] = (date, prev_close) 23 | return self._previous_close[order_book_id][1] 24 | 25 | def _get_limit_up(self, instrument): 26 | return round(self._get_prev_close(instrument) * 1.1, 2) 27 | 28 | def _get_limit_down(self, instrument): 29 | return round(self._get_prev_close(instrument) * 0.9, 2) 30 | 31 | def get_limit_up(self, order_book_id): 32 | instrument = self._env.get_instrument(order_book_id) 33 | if instrument.enum_type in [INSTRUMENT_TYPE.CS, INSTRUMENT_TYPE.INDX]: 34 | return self._get_limit_up(instrument) 35 | else: 36 | return super(StockLimitUpDownPriceBoard, self).get_limit_up(order_book_id) 37 | 38 | def get_limit_down(self, order_book_id): 39 | instrument = self._env.get_instrument(order_book_id) 40 | if instrument.enum_type in [INSTRUMENT_TYPE.CS, INSTRUMENT_TYPE.INDX]: 41 | return self._get_limit_down(instrument) 42 | else: 43 | return super(StockLimitUpDownPriceBoard, self).get_limit_down(order_book_id) 44 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/share/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xingetouzi/rqalpha-mod-fxdayu-source/1595df9cc409c3bc8bad2bfcfda5b37d8d42739b/rqalpha_mod_fxdayu_source/share/__init__.py -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/share/astock_minute_reader.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | import os 4 | from lru import LRU 5 | 6 | import bcolz 7 | import numba as nb 8 | import numpy as np 9 | import pandas as pd 10 | import six 11 | from rqalpha.data.converter import StockBarConverter 12 | from rqalpha.utils.datetime_func import convert_dt_to_int 13 | from zipline.data._minute_bar_internal import find_position_of_minute 14 | from zipline.data.minute_bars import BcolzMinuteBarReader 15 | from zipline.gens.sim_engine import NANOS_IN_MINUTE 16 | 17 | from .utils import FXDAYU_BUNDLE_PATH 18 | from .trading_calendar import ASTOCK_TRADING_CALENDAR as _ 19 | from .trading_session import ASTOCK_TRADING_SESSION 20 | from .utils import sid_subdir_path as _sid_subdir_path, calc_minute_index as _calc_minute_index 21 | 22 | _ # register the ASTOCK_TRADING_CALENDAR 23 | 24 | 25 | class AStockBcolzMinuteBarReader(BcolzMinuteBarReader): 26 | FIELDS = ["open", "high", "low", "close", "volume"] 27 | 28 | def __init__(self, rootdir=FXDAYU_BUNDLE_PATH, sid_cache_size=1000, converter=StockBarConverter, 29 | trading_session=ASTOCK_TRADING_SESSION): 30 | minutes_dir = os.path.join(rootdir if rootdir is not None else FXDAYU_BUNDLE_PATH, "minutes") 31 | super(AStockBcolzMinuteBarReader, self).__init__(minutes_dir, sid_cache_size=sid_cache_size) 32 | self._index_skip_suspending = LRU(sid_cache_size) 33 | self._converter = converter 34 | self._minute_index = _calc_minute_index(self._market_opens, trading_session) 35 | 36 | def _get_carray_path(self, sid, field): 37 | sid, pa_dir = sid.split(".") 38 | return os.path.join(self._rootdir, pa_dir, _sid_subdir_path(int(sid)), field) 39 | 40 | def _open_minute_file(self, field, sid): 41 | try: 42 | carray = self._carrays[field][sid] 43 | except KeyError: 44 | carray = self._carrays[field][sid] = \ 45 | bcolz.carray(rootdir=self._get_carray_path(sid, field), 46 | mode='r') 47 | 48 | return carray 49 | 50 | def _find_position_of_minute(self, minute_dt): 51 | """ 52 | Internal method that returns the position of the given minute in the 53 | list of every trading minute since market open of the first trading 54 | day. Adjusts non market minutes to the last close. 55 | 56 | ex. this method would return 1 for 2002-01-02 9:32 AM Eastern, if 57 | 2002-01-02 is the first trading day of the dataset. 58 | 59 | Parameters 60 | ---------- 61 | minute_dt: pd.Timestamp 62 | The minute whose position should be calculated. 63 | 64 | Returns 65 | ------- 66 | int: The position of the given minute in the list of all trading 67 | minutes since market open on the first trading day. 68 | """ 69 | return find_position_of_minute( 70 | self._market_open_values, 71 | self._market_close_values, 72 | minute_dt.value / NANOS_IN_MINUTE, 73 | self._minutes_per_day, 74 | True, 75 | ) 76 | 77 | def _filtered_index(self, instrument): 78 | # TODO 确认是否跳过日内涨跌停 79 | if instrument not in self._index_skip_suspending: 80 | carray = self._open_minute_file("close", instrument) 81 | sub_index = bcolz.eval("carray != 0", vm="numexpr") 82 | index = self._minute_index[:len(sub_index)][sub_index[:]] 83 | self._index_skip_suspending[instrument] = index 84 | return self._index_skip_suspending[instrument] 85 | 86 | def get_dt_slice(self, instrument, start_dt=None, end_dt=None, length=None, skip_suspended=True): 87 | """ 88 | 89 | Parameters 90 | ---------- 91 | instrument 92 | start_dt 93 | end_dt 94 | length 95 | skip_suspended 96 | 97 | Returns 98 | ------- 99 | 100 | """ 101 | if not start_dt and not end_dt and not length: 102 | raise RuntimeError("At least two of start_dt, end_dt and length must be given") 103 | if not (start_dt and end_dt): 104 | if skip_suspended: 105 | index = self._filtered_index(instrument) 106 | else: 107 | index = self._minute_index 108 | if end_dt and length: 109 | start_dt = index[max(0, np.searchsorted(index, end_dt, side="right") - length)] 110 | elif start_dt and length: 111 | end_dt = index[min(index.size - 1, np.searchsorted(index, start_dt) + length)] 112 | slicer = self._minute_index.slice_indexer(start_dt, end_dt) 113 | return slicer.start, max(slicer.stop, slicer.start) 114 | 115 | @staticmethod 116 | @nb.jit 117 | def numba_loops_ffill(arr): 118 | """Numba decorator solution provided by shx2. 119 | 120 | Parameters 121 | ---------- 122 | arr 123 | """ 124 | out = arr.copy() 125 | for row_idx in range(out.shape[0]): 126 | for col_idx in range(1, out.shape[1]): 127 | if np.isnan(out[row_idx, col_idx]): 128 | out[row_idx, col_idx] = out[row_idx, col_idx - 1] 129 | return out 130 | 131 | @staticmethod 132 | @nb.jit 133 | def numba_loops_dropna(arr): 134 | mask = np.full((arr.shape[0], len(arr.dtype)), True, dtype=np.bool) 135 | for n, name in enumerate(arr.dtype.names): 136 | mask[:, n] = ~np.isnan(arr[name]) 137 | mask = mask.min(axis=1) 138 | return arr[mask] 139 | 140 | def raw_history_bars(self, instrument, start_dt=None, end_dt=None, length=None, fields=None, skip_suspended=True): 141 | """ 142 | 143 | Parameters 144 | ---------- 145 | instrument 146 | start_dt 147 | end_dt 148 | length 149 | fields 150 | skip_suspended 151 | 152 | Returns 153 | ------- 154 | 155 | """ 156 | start_idx, end_idx = self.get_dt_slice(instrument, start_dt, end_dt, length, skip_suspended) 157 | if fields is None: 158 | fields_ = self.FIELDS 159 | elif isinstance(fields, six.string_types): 160 | fields_ = [fields] 161 | else: 162 | fields_ = [field for field in fields if field != "datetime"] 163 | num_minutes = end_idx - start_idx 164 | types = {f: self._converter.field_type(f, np.float64) for f in fields_} 165 | dtype = np.dtype([("datetime", np.uint64)] + 166 | [(f, self._converter.field_type(f, np.float64)) for f in fields_]) 167 | shape = (num_minutes,) 168 | result = np.empty((num_minutes,), dtype=dtype) 169 | for field in fields_: 170 | if field != 'volume': 171 | out = np.full(shape, np.nan, dtype=types[field]) 172 | else: 173 | out = np.zeros(shape, dtype=types[field]) 174 | carray = self._open_minute_file(field, instrument) 175 | values = carray[start_idx: end_idx] 176 | where = values != 0 177 | if field != 'volume': 178 | out[:len(where)][where] = values[where] * self._ohlc_ratio_inverse_for_sid(instrument) 179 | else: 180 | out[:len(where)][where] = values[where] 181 | result[field] = out 182 | result["datetime"] = list(map(convert_dt_to_int, self._minute_index[start_idx: end_idx].to_pydatetime())) 183 | result = result if fields is None else result[fields] 184 | return self.numba_loops_dropna(result) if skip_suspended else self.numba_loops_ffill(result) 185 | 186 | def load_raw_arrays(self, instruments, start_dt=None, end_dt=None, fields=None, length=None): 187 | """ 188 | Load raw arrays from bundles 189 | Mainly used for Data API 190 | 191 | Parameters 192 | ---------- 193 | instruments: 194 | list of instrument, The asset identifiers in the window. 195 | start_dt: Timestamp 196 | Beginning of the window range. 197 | end_dt: Timestamp 198 | End of the window range. 199 | length: 200 | Length of the window range. 201 | fields : list of str 202 | 'open', 'high', 'low', 'close', or 'volume' 203 | 204 | Returns 205 | ------- 206 | list of np.ndarray 207 | A list with an entry per field of ndarrays with shape 208 | (minutes in range, sids) with a dtype of float64, containing the 209 | values for the respective field over start and end dt range. 210 | """ 211 | # 修改部分 212 | ### 213 | if not (start_dt or end_dt or length): 214 | raise RuntimeError("At least two of start_dt, end_dt and length must be given") 215 | if end_dt is not None: 216 | end_idx = self._find_position_of_minute(end_dt) 217 | if length is not None: 218 | start_idx = end_idx - length + 1 219 | else: 220 | start_idx = self._find_position_of_minute(start_dt) 221 | else: 222 | start_idx = self._find_position_of_minute(start_dt) 223 | end_idx = start_idx + length - 1 224 | ### 225 | 226 | num_minutes = (end_idx - start_idx + 1) 227 | 228 | results = [] 229 | 230 | # 修改部分 231 | ### 232 | # indices_to_exclude = self._exclusion_indices_for_range( 233 | # start_idx, end_idx) 234 | indices_to_exclude = None # 暂时不处理交易时间奇异的情况 235 | if indices_to_exclude is not None: 236 | for excl_start, excl_stop in indices_to_exclude: 237 | length = excl_stop - excl_start + 1 238 | num_minutes -= length 239 | ### 240 | 241 | shape = num_minutes, len(instruments) 242 | if fields is None: 243 | fields = ["open", "high", "low", "close", "volume"] 244 | for field in fields: 245 | if field != 'volume': 246 | out = np.full(shape, np.nan) 247 | else: 248 | out = np.zeros(shape, dtype=np.uint32) 249 | 250 | for i, sid in enumerate(instruments): 251 | carray = self._open_minute_file(field, sid) 252 | values = carray[start_idx:end_idx + 1] 253 | if indices_to_exclude is not None: 254 | for excl_start, excl_stop in indices_to_exclude[::-1]: 255 | excl_slice = np.s_[ 256 | excl_start - start_idx:excl_stop - start_idx + 1] 257 | values = np.delete(values, excl_slice) 258 | 259 | where = values != 0 260 | # first slice down to len(where) because we might not have 261 | # written data for all the minutes requested 262 | if field != 'volume': 263 | out[:len(where), i][where] = ( 264 | values[where] * self._ohlc_ratio_inverse_for_sid(sid)) 265 | else: 266 | out[:len(where), i][where] = values[where] 267 | results.append(out) 268 | return results 269 | 270 | def available_data_range(self): 271 | return self.calendar.first_session.to_pydatetime().date(), self.calendar.last_session.to_pydatetime().date() 272 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/share/mongo_handler.py: -------------------------------------------------------------------------------- 1 | # encoding:utf-8 2 | from pymongo.mongo_client import database 3 | import pandas as pd 4 | import pymongo 5 | 6 | 7 | class DataHandler(object): 8 | def write(self, *args, **kwargs): 9 | pass 10 | 11 | def read(self, *args, **kwargs): 12 | pass 13 | 14 | def inplace(self, *args, **kwargs): 15 | pass 16 | 17 | def update(self, *args, **kwargs): 18 | pass 19 | 20 | def delete(self, *args, **kwargs): 21 | pass 22 | 23 | def table_names(self, *args, **kwargs): 24 | pass 25 | 26 | 27 | class MongoHandler(DataHandler): 28 | def __init__(self, host='localhost', port=27017, users=None, db=None, **kwargs): 29 | self.client = pymongo.MongoClient(host, port, **kwargs) 30 | self.db = self.client[db] if db else None 31 | 32 | if isinstance(users, dict): 33 | for db in users: 34 | self.client[db].authenticate(users[db]['id'], users[db]['password']) 35 | 36 | def _locate(self, collection, db=None): 37 | if isinstance(collection, database.Collection): 38 | return collection 39 | else: 40 | if db is None: 41 | return self.db[collection] 42 | elif isinstance(db, database.Database): 43 | return db[collection] 44 | else: 45 | return self.client[db][collection] 46 | 47 | def write(self, data, collection, db=None, index=None): 48 | """ 49 | 50 | :param data(DataFrame|list(dict)): 要存的数据 51 | :param collection(str): 表名 52 | :param db(str): 数据库名 53 | :param index(str): 以index值建索引, None不建索引 54 | :return: 55 | """ 56 | collection = self._locate(collection, db) 57 | data = self.normalize(data, index) 58 | collection.insert_many(data) 59 | if index: 60 | collection.create_index(index) 61 | return {'collection': collection.name, 'start': data[0], 'end': data[-1]} 62 | 63 | def read(self, collection, db=None, index='datetime', start=None, end=None, length=None, **kwargs): 64 | """ 65 | 66 | :param collection(str): 表名 67 | :param db(str): 数据库名 68 | :param index(str): 读取索引方式 69 | :param start(datetime): 70 | :param end(datetime): 71 | :param length(int): 72 | :param kwargs: 73 | :return: 74 | """ 75 | 76 | if index: 77 | if start: 78 | fter = {index: {'$gte': start}} 79 | if end: 80 | fter[index]['$lte'] = end 81 | elif length: 82 | kwargs['limit'] = length 83 | kwargs['filter'] = fter 84 | elif length: 85 | kwargs['sort'] = [(index, -1)] 86 | kwargs['limit'] = length 87 | if end: 88 | kwargs['filter'] = {index: {'$lte': end}} 89 | elif end: 90 | kwargs['filter'] = {index: {'$lte': end}} 91 | 92 | db = self.db if db is None else self.client[db] 93 | 94 | if isinstance(collection, str): 95 | # print(collection) 96 | return self._read(db[collection], index, **kwargs) 97 | if isinstance(collection, database.Collection): 98 | return self._read(collection, index, **kwargs) 99 | elif isinstance(collection, (list, tuple)): 100 | panel = {} 101 | for col in collection: 102 | try: 103 | if isinstance(col, database.Collection): 104 | panel[col.name] = self._read(col, index, **kwargs) 105 | else: 106 | panel[col] = self._read(db[col], index, **kwargs) 107 | except KeyError as ke: 108 | if index in str(ke): 109 | pass 110 | else: 111 | raise ke 112 | return pd.Panel.from_dict(panel) 113 | else: 114 | return self._read(db[collection], index, **kwargs) 115 | 116 | @staticmethod 117 | def _read(collection, index=None, **kwargs): 118 | data = list(collection.find(**kwargs)) 119 | 120 | for key, value in kwargs.get('sort', []): 121 | if value < 0: 122 | data.reverse() 123 | data = pd.DataFrame(data) 124 | if not data.size: 125 | return data 126 | if index: 127 | data.index = data.pop(index) 128 | data.pop('_id') 129 | return data 130 | 131 | def inplace(self, data, collection, db=None, index='datetime'): 132 | """ 133 | 以替换的方式存(存入不重复) 134 | 135 | :param data(DataFrame|list(dict)): 要存的数据 136 | :param collection(str): 表名 137 | :param db(str): 数据库名 138 | :param index(str): 默认以datetime为索引替换 139 | :return: 140 | """ 141 | 142 | collection = self._locate(collection, db) 143 | data = self.normalize(data, index) 144 | 145 | collection.delete_many({index: {'$gte': data[0][index], '$lte': data[-1][index]}}) 146 | collection.insert_many(data) 147 | collection.create_index(index) 148 | return {'collection': collection.name, 'start': data[0], 'end': data[-1]} 149 | 150 | def update(self, data, collection, db=None, index='datetime', how='$set'): 151 | collection = self._locate(collection, db) 152 | 153 | if isinstance(data, pd.DataFrame): 154 | if index in data.columns: 155 | data.index = data[index] 156 | for name, doc in data.iterrows(): 157 | collection.update_one({index: name}, {how: doc.to_dict()}) 158 | else: 159 | for doc in data: 160 | collection.update_one({index: doc.pop(index)}, doc) 161 | 162 | def delete(self, filter, collection, db=None): 163 | collection = self._locate(collection, db) 164 | collection.delete_many(filter) 165 | 166 | def normalize(self, data, index=None): 167 | if isinstance(data, pd.DataFrame): 168 | if index and (index not in data.columns): 169 | data[index] = data.index 170 | return [doc[1].to_dict() for doc in data.iterrows()] 171 | elif isinstance(data, dict): 172 | key, value = list(map(lambda *args: args, *data.iteritems())) 173 | return list(map(lambda *args: dict(map(lambda x, y: (x, y), key, args)), *value)) 174 | elif isinstance(data, pd.Series): 175 | if data.name is None: 176 | raise ValueError('name of series: data is None') 177 | name = data.name 178 | if index is not None: 179 | return list(map(lambda k, v: {index: k, name: v}, data.index, data)) 180 | else: 181 | return list(map(lambda v: {data.name: v}, data)) 182 | else: 183 | return data 184 | 185 | def table_names(self, db=None): 186 | if not db: 187 | return self.db.collection_names() 188 | else: 189 | return self.client[db].collection_names() 190 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/share/trading_calendar.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | import os 4 | from datetime import time 5 | from lru import LRU 6 | 7 | import numpy as np 8 | import pandas as pd 9 | from pytz import timezone 10 | from rqalpha.data.base_data_source import BaseDataSource 11 | from zipline.utils.calendars import register_calendar 12 | from zipline.utils.calendars.trading_calendar import TradingCalendar, days_at_time 13 | 14 | RQALPHA_ROOT = os.environ.get("RQALPHA_ROOT", os.path.expanduser("~/.rqalpha")) 15 | RQALPHA_BUNDLE_PATH = os.path.join(RQALPHA_ROOT, "bundle") 16 | _CALENDAR_NAME = "ASTOCK" 17 | 18 | start_default = pd.Timestamp('2012-05-01', tz='UTC') 19 | end_base = pd.Timestamp('today', tz='UTC') 20 | # Give an aggressive buffer for logic that needs to use the next trading 21 | # day or minute. 22 | end_default = end_base + pd.Timedelta(days=365) 23 | 24 | 25 | class RqalphaAStockTradingCalendar(TradingCalendar): 26 | def __init__(self, start=start_default, end=end_default, path=RQALPHA_BUNDLE_PATH): 27 | super(RqalphaAStockTradingCalendar, self).__init__() 28 | self._data_source = BaseDataSource(path) 29 | _all_days = self._data_source.get_trading_calendar() 30 | _all_days = _all_days[_all_days.slice_indexer(start, end)] 31 | # `DatetimeIndex`s of standard opens/closes for each day. 32 | self._opens = days_at_time(_all_days, self.open_time, self.tz, 33 | self.open_offset) 34 | self._closes = days_at_time( 35 | _all_days, self.close_time, self.tz, self.close_offset 36 | ) 37 | 38 | # In pandas 0.16.1 _opens and _closes will lose their timezone 39 | # information. This looks like it has been resolved in 0.17.1. 40 | # http://pandas.pydata.org/pandas-docs/stable/whatsnew.html#datetime-with-tz # noqa 41 | self.schedule = pd.DataFrame( 42 | index=_all_days, 43 | columns=['market_open', 'market_close'], 44 | data={ 45 | 'market_open': self._opens, 46 | 'market_close': self._closes, 47 | }, 48 | dtype='datetime64[ns]', 49 | ) 50 | 51 | # Simple cache to avoid recalculating the same minute -> session in 52 | # "next" mode. Analysis of current zipline code paths show that 53 | # `minute_to_session_label` is often called consecutively with the same 54 | # inputs. 55 | self._minute_to_session_label_cache = LRU(1) 56 | 57 | self.market_opens_nanos = self.schedule.market_open.values. \ 58 | astype(np.int64) 59 | 60 | self.market_closes_nanos = self.schedule.market_close.values. \ 61 | astype(np.int64) 62 | 63 | self._trading_minutes_nanos = self.all_minutes.values. \ 64 | astype(np.int64) 65 | 66 | self.first_trading_session = _all_days[0] 67 | self.last_trading_session = _all_days[-1] 68 | 69 | @property 70 | def name(self): 71 | return _CALENDAR_NAME 72 | 73 | @property 74 | def tz(self): 75 | return timezone("UTC") 76 | 77 | @property 78 | def open_time(self): 79 | return time(9, 31) 80 | 81 | @property 82 | def close_time(self): 83 | return time(15, 00) 84 | 85 | 86 | ASTOCK_TRADING_CALENDAR = RqalphaAStockTradingCalendar() 87 | register_calendar(_CALENDAR_NAME, ASTOCK_TRADING_CALENDAR) 88 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/share/trading_session.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | from abc import ABCMeta, abstractmethod 4 | from datetime import date, datetime, time 5 | 6 | from six import with_metaclass 7 | 8 | 9 | class TradingSession(with_metaclass(ABCMeta)): 10 | def __init__(self): 11 | self._minuter_per_day = None 12 | 13 | @property 14 | @abstractmethod 15 | def sessions(self): 16 | raise NotImplementedError 17 | 18 | @property 19 | def minute_per_day(self): 20 | total = 0 21 | for offset, number in self.sessions: 22 | total += number 23 | return total 24 | 25 | 26 | class AStockTradingSession(TradingSession): 27 | @property 28 | def sessions(self): 29 | return [ 30 | (0, self.cal_delta_minute(time(9, 31), time(11, 30))), 31 | (self.cal_delta_minute(time(9, 31), time(13, 00)), self.cal_delta_minute(time(13, 1), time(15, 00))) 32 | ] 33 | 34 | def cal_delta_minute(self, start, end): 35 | """ 36 | 37 | Args: 38 | start(datetime.time): period start 39 | end(datetime.time): period end 40 | Returns: 41 | int: how many minutes between this period 42 | """ 43 | dt = datetime.combine(date.today(), end) - datetime.combine(date.today(), start) 44 | result = (dt.days * 24 * 60 + dt.seconds // 60) + 1 45 | if result < 0: 46 | raise RuntimeError("period end should be after period start") 47 | else: 48 | return result 49 | 50 | 51 | ASTOCK_TRADING_SESSION = AStockTradingSession() 52 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/share/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import numpy as np 4 | import pandas as pd 5 | 6 | 7 | def sid_subdir_path(sid): 8 | """ 9 | Format subdir path to limit the number directories in any given 10 | subdirectory to 100. 11 | 12 | The number in each directory is designed to support at least 100000 13 | equities. 14 | 15 | Parameters 16 | ---------- 17 | sid : int 18 | Asset identifier. 19 | 20 | Returns 21 | ------- 22 | out : string 23 | A path for the bcolz rootdir, including subdirectory prefixes based on 24 | the padded string representation of the given sid. 25 | 26 | e.g. 1 is formatted as 00/00/000001.bcolz 27 | 28 | """ 29 | padded_sid = format(sid, '06') 30 | return os.path.join( 31 | # subdir 2 00/XX 32 | padded_sid[0:2], 33 | # subdir 2 XXX/0 34 | padded_sid[2:4], 35 | "{0}.bcolz".format(str(padded_sid)) 36 | ) 37 | 38 | 39 | def calc_minute_index(market_opens, trading_session): 40 | """ 41 | Cal all trading minutes according to input daily market open and trading session information. 42 | 43 | Parameters 44 | ---------- 45 | market_opens: datetime64 array 46 | array of every day market open. 47 | trading_session: set -> list 48 | list of time offset in minutes for every trading session in a day. 49 | Returns 50 | ------- 51 | out : datetime64 array 52 | all trading minutes. 53 | """ 54 | minutes_per_day = trading_session.minute_per_day 55 | minutes = np.zeros(len(market_opens) * minutes_per_day, dtype="datetime64[ns]") 56 | deltas_lst = [] 57 | session_offsets = [] 58 | for offset, duration in trading_session.sessions: 59 | deltas_lst.append(np.arange(0, duration, dtype="timedelta64[m]")) 60 | session_offsets.append(pd.Timedelta(minutes=offset)) 61 | for i, marker_open in enumerate(market_opens): 62 | start = marker_open.asm8 63 | sessions = [] 64 | for deltas, session_offset in zip(deltas_lst, session_offsets): 65 | sessions.append(deltas + start + session_offset) 66 | minute_values = np.concatenate(sessions) 67 | start_ix = minutes_per_day * i 68 | end_ix = start_ix + minutes_per_day 69 | minutes[start_ix:end_ix] = minute_values 70 | return pd.to_datetime(minutes, utc=True, box=True) 71 | 72 | 73 | FXDAYU_ROOT = os.environ.get("FXDAYU_ROOT", os.path.expanduser("~/.fxdayu")) 74 | FXDAYU_BUNDLE_PATH = os.path.join(FXDAYU_ROOT, "bundle") 75 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/utils/__init__.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import numpy as np 4 | import pandas as pd 5 | from rqalpha.const import DEFAULT_ACCOUNT_TYPE 6 | from rqalpha.utils import get_account_type 7 | from rqalpha.utils.datetime_func import convert_int_to_datetime 8 | 9 | 10 | class Singleton(type): 11 | SINGLETON_ENABLED = True 12 | 13 | def __init__(cls, *args, **kwargs): 14 | cls._instance = None 15 | super(Singleton, cls).__init__(*args, **kwargs) 16 | 17 | def __call__(cls, *args, **kwargs): 18 | if cls.SINGLETON_ENABLED: 19 | if cls._instance is None: 20 | cls._instance = super(Singleton, cls).__call__(*args, **kwargs) 21 | return cls._instance 22 | else: 23 | return cls._instance 24 | else: 25 | return super(Singleton, cls).__call__(*args, **kwargs) 26 | 27 | 28 | _freq_map = { 29 | "m": "T", 30 | "h": "H", 31 | "d": "D" 32 | } 33 | 34 | 35 | def _cal_date_range(start, end, freq): 36 | unit_freq = freq[-1] 37 | dates = pd.date_range(start, end, freq=freq[:-1] + _freq_map[unit_freq]) - pd.Timedelta(minutes=1) 38 | dates = dates.to_pydatetime() 39 | if dates.size: 40 | dates = dates[1:] 41 | if not dates.size or dates[-1] != end: 42 | dates = np.concatenate([dates, [end]]) 43 | return dates 44 | 45 | 46 | class InDayTradingPointIndexer(object): 47 | @staticmethod 48 | def get_a_stock_trading_points(trading_date, frequency): 49 | trading_points = set() 50 | current_dt = datetime.datetime.combine(trading_date, datetime.time(9, 31)) 51 | am_end_dt = current_dt.replace(hour=11, minute=30) 52 | pm_start_dt = current_dt.replace(hour=13, minute=1) 53 | pm_end_dt = current_dt.replace(hour=15, minute=0) 54 | sessions = [(current_dt, am_end_dt), (pm_start_dt, pm_end_dt)] 55 | for start, end in sessions: 56 | trading_points.update(_cal_date_range(start, end, frequency)) 57 | return trading_points 58 | 59 | @staticmethod 60 | def get_future_trading_points(env, trading_date, frequency): 61 | if frequency == "1m": 62 | trading_minutes = set() 63 | universe = env.get_universe() 64 | for order_book_id in universe: 65 | if get_account_type(order_book_id) == DEFAULT_ACCOUNT_TYPE.STOCK: 66 | continue 67 | trading_minutes.update(env.data_proxy.get_trading_minutes_for(order_book_id, trading_date)) 68 | return set([convert_int_to_datetime(minute) for minute in trading_minutes]) 69 | # TODO future hours 70 | return set() 71 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/utils/asyncio.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | 4 | def get_asyncio_event_loop(): 5 | try: 6 | return asyncio.get_event_loop() 7 | except RuntimeError: 8 | loop = asyncio.new_event_loop() 9 | asyncio.set_event_loop(loop) 10 | return loop 11 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/utils/converter.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | import numpy as np 4 | import pandas as pd 5 | from rqalpha.data.converter import StockBarConverter 6 | from rqalpha.utils.datetime_func import convert_dt_to_int, convert_int_to_datetime 7 | 8 | 9 | class DataFrameConverter(object): 10 | @classmethod 11 | def df2np(cls, df, fields=None): 12 | if fields is None: 13 | fields = ["datetime", "open", "high", "low", "close", "volume"] 14 | dtypes = [(f, StockBarConverter.field_type(f, df[f].dtype)) if f != "datetime" else ('datetime', np.uint64) 15 | for f in fields] 16 | if "datetime" in fields: 17 | dt = df["datetime"] 18 | df["datetime"] = np.empty(len(df), dtype=np.uint64) 19 | result = df[fields].values.ravel().view(dtype=np.dtype(dtypes)) 20 | if "datetime" in fields: 21 | result["datetime"] = dt.apply(convert_dt_to_int) 22 | return result[fields] 23 | 24 | @classmethod 25 | def empty(cls, fields=None): 26 | if fields is None: 27 | fields = ["datetime", "open", "high", "low", "close", "volume"] 28 | dtypes = [(f, StockBarConverter.field_type(f, np.float64)) if f != "datetime" else ('datetime', np.uint64) 29 | for f in fields] 30 | return np.empty((0,), dtype=dtypes) 31 | 32 | @classmethod 33 | def np2df(cls, np_arr): 34 | df = pd.DataFrame(np_arr) 35 | df["datetime"] = df["datetime"].apply(convert_int_to_datetime) 36 | return df 37 | 38 | 39 | class QuantOsConverter(DataFrameConverter): 40 | @classmethod 41 | def df2np(cls, df, fields=None): 42 | # daily bar 43 | if "time" not in df or (df["time"] == 0).all(): 44 | df["time"] = 150000 45 | df["datetime"] = (df["trade_date"] * 1000000 + df["time"]).astype("int64").apply( 46 | lambda x: datetime.strptime(str(x), "%Y%m%d%H%M%S") 47 | ) 48 | return super(QuantOsConverter, cls).df2np(df, fields) 49 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/utils/instrument.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | import pandas as pd 5 | from rqalpha.const import INSTRUMENT_TYPE 6 | from rqalpha.utils.logger import system_log 7 | 8 | path = Path(os.path.abspath(__file__)).parent.parent / "data" / "index_symbol_map.csv" 9 | _map_instrument_to_tushare = pd.read_csv(path).drop_duplicates("symbol_ricequant").set_index("symbol_ricequant") 10 | _suffix_map = { 11 | "XSHE": "SZ", 12 | "XSHG": "SH" 13 | } 14 | 15 | 16 | def instrument_to_tushare(instrument): 17 | """ 18 | 19 | Parameters 20 | ---------- 21 | instrument: rqalpha.model.instrument.Instrument 22 | 23 | Returns 24 | ------- 25 | string: tushare code of the instrument 26 | """ 27 | if instrument.enum_type in [INSTRUMENT_TYPE.INDX, INSTRUMENT_TYPE.CS]: 28 | if instrument.enum_type == INSTRUMENT_TYPE.INDX: 29 | try: 30 | return _map_instrument_to_tushare["symbol_tushare"].loc[instrument.order_book_id] 31 | except KeyError: 32 | # raise system_log.warning("Index %s may be not supported!" % instrument.order_book_id) 33 | pass 34 | code, suffix = instrument.order_book_id.split(".") 35 | return ".".join([code, _suffix_map[suffix]]) 36 | else: 37 | # TODO 期货等 38 | raise RuntimeError("Unsupported instrument type.") 39 | -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/utils/mongo.py: -------------------------------------------------------------------------------- 1 | from pymongo import MongoClient 2 | import pandas as pd 3 | from collections import Iterable 4 | import numpy as np 5 | 6 | 7 | def read(collection, filters=None, projection=None): 8 | return pd.DataFrame(get_docs(collection, filters, projection)).set_index("datetime") 9 | 10 | 11 | def get_docs(collection, filters=None, projection=None, fill=np.NaN): 12 | dct = {} 13 | if isinstance(projection, dict): 14 | projection['_id'] = 0 15 | projection["_l"] = 1 16 | elif isinstance(projection, Iterable): 17 | projection = dict.fromkeys(projection, 1) 18 | projection["_id"] = 0 19 | projection["_l"] = 1 20 | else: 21 | projection = {"_id": 0} 22 | cursor = collection.find(filters, projection) 23 | LENGTH = 0 24 | for doc in cursor: 25 | l = doc.pop('_l') 26 | LENGTH += l 27 | for key, values in doc.items(): 28 | if isinstance(values, list) and (len(values) == l): 29 | dct.setdefault(key, []).extend(values) 30 | for values in dct.values(): 31 | if len(values) != LENGTH: 32 | values.extend([fill]*l) 33 | return dct 34 | 35 | 36 | if __name__ == '__main__': 37 | client = MongoClient("192.168.0.102") 38 | print(read(client["Stock_1M"]["000001.XSHE"])) -------------------------------------------------------------------------------- /rqalpha_mod_fxdayu_source/utils/quantos.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | import functools 4 | 5 | from rqalpha.utils.logger import user_system_log 6 | 7 | _api = None 8 | _user = None 9 | _token = None 10 | _max_retry = 3 11 | 12 | def ensure_api_login(func): 13 | @functools.wraps(func) 14 | def wrapper(*args, **kwargs): 15 | try: 16 | return func(*args, **kwargs) 17 | except QuantOsQueryError: 18 | api_login() 19 | return func(*args, **kwargs) 20 | return wrapper 21 | 22 | def api_login(): 23 | global _api 24 | 25 | retry = 0 26 | while retry < _max_retry: 27 | retry += 1 28 | try: 29 | _, msg = _api.login(_user, _token) 30 | code = msg.split(",")[0] 31 | if code != "0": 32 | raise QuantOsQueryError(msg) 33 | else: 34 | break 35 | except QuantOsQueryError as e: 36 | user_system_log.warn("[japs] Exception occurs when call api.login: %s" % e) 37 | if retry > _max_retry: 38 | raise e 39 | else: 40 | time.sleep(retry) 41 | 42 | class QuantOsQueryError(Exception): 43 | """Error occurrs when make query from quantos.""" 44 | 45 | class QuantOsDataApiMixin(object): 46 | def __init__(self, api_url=None, user=None, token=None): 47 | global _api, _user, _token 48 | from jaqs.data import DataApi 49 | if _api is None: 50 | url = api_url or os.environ.get("QUANTOS_URL", "tcp://data.quantos.org:8910") 51 | _user = user or os.environ.get("QUANTOS_USER") 52 | _token = token or os.environ.get("QUANTOS_TOKEN") 53 | _api = DataApi(addr=url) 54 | api_login() 55 | self._api = _api -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | try: 2 | from pip._internal.req import parse_requirements 3 | except ImportError: 4 | from pip.req import parse_requirements 5 | from os.path import dirname, join 6 | from setuptools import ( 7 | find_packages, 8 | setup, 9 | ) 10 | 11 | with open(join(dirname(__file__), 'VERSION.txt'), 'rb') as f: 12 | version = f.read().decode('ascii').strip() 13 | 14 | requirements = [str(ir.req) for ir in parse_requirements("requirements.txt", session=False)] 15 | setup( 16 | name='rqalpha-mod-fxdayu-source', 17 | version=version, 18 | description='RQAlpha DataSource Mod supporting reading day or minute bar from mongodb', 19 | packages=find_packages(exclude=["examples", "tests", "tests.*", "docs"]), 20 | author='BurdenBear', 21 | author_email='public@fxdayu.com', 22 | license='Apache License v2', 23 | package_data={'rqalpha_mod_fxdayu_source': ['data/*.*']}, 24 | url='https://github.com/xingetouzi/rqalpha-mod-fxdayu-source', 25 | install_requires=requirements, 26 | zip_safe=False, 27 | classifiers=[ 28 | 'Programming Language :: Python', 29 | 'Operating System :: Microsoft :: Windows', 30 | 'Operating System :: Unix', 31 | 'Programming Language :: Python :: 3.5', 32 | 'Programming Language :: Python :: 3.6', 33 | ], 34 | ) 35 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xingetouzi/rqalpha-mod-fxdayu-source/1595df9cc409c3bc8bad2bfcfda5b37d8d42739b/tests/__init__.py -------------------------------------------------------------------------------- /tests/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xingetouzi/rqalpha-mod-fxdayu-source/1595df9cc409c3bc8bad2bfcfda5b37d8d42739b/tests/common/__init__.py -------------------------------------------------------------------------------- /tests/common/source.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | import pathlib 4 | import unittest 5 | import itertools 6 | from functools import lru_cache 7 | from datetime import datetime, date, time 8 | 9 | import numpy as np 10 | import pandas as pd 11 | from rqalpha.const import INSTRUMENT_TYPE 12 | from rqalpha.model.instrument import Instrument 13 | from rqalpha.utils.datetime_func import convert_dt_to_int 14 | 15 | from rqalpha_mod_fxdayu_source.data_source.quantos import QuantOsSource 16 | 17 | indexes = { 18 | "000300.XSHG", # 中小板指 19 | "000016.XSHG", # 上证50 20 | "000905.XSHG", # 中证500 21 | "399006.XSHE", # 创业板指 22 | "399005.XSHE", # 中小板指 23 | } 24 | 25 | lengths = [10, 100] 26 | frequencies = ["1d", "1m", "5m", "15m"] 27 | 28 | 29 | class TestDataSourceMixin(object): 30 | # 初始化工作 31 | def __init__(self, *args, **kwargs): 32 | super(TestDataSourceMixin, self).__init__(*args, **kwargs) 33 | self.source = None 34 | 35 | @lru_cache(None) 36 | def get_stock(self): 37 | date = self.get_last_trading_date() 38 | for i in self.source.get_all_instruments(): 39 | if i.enum_type == INSTRUMENT_TYPE.CS: 40 | if not self.source.is_suspended(i.order_book_id, [date])[0]: 41 | return i 42 | 43 | raise RuntimeError("Data is missing!") 44 | 45 | @lru_cache(None) 46 | def get_letv(self): 47 | for i in self.source.get_all_instruments(): 48 | if i.order_book_id == "300104.XSHE": 49 | return i 50 | 51 | @lru_cache(None) 52 | def get_indexes(self): 53 | result = [] 54 | for i in self.source.get_all_instruments(): 55 | if i.enum_type == INSTRUMENT_TYPE.INDX and i.order_book_id in indexes: 56 | result.append(i) 57 | if len(result) == len(indexes): 58 | break 59 | return result 60 | 61 | @lru_cache(None) 62 | def get_last_trading_date(self): 63 | dates = self.source.get_trading_calendar() 64 | d = dates[np.searchsorted(dates, datetime.now()) - 2] 65 | return datetime.combine(d, time=time(hour=15)) 66 | 67 | def test_instrument(self): 68 | i1 = self.get_stock() 69 | i2 = self.get_letv() 70 | i3 = self.get_indexes() 71 | assert isinstance(i1, Instrument) and i1.enum_type == INSTRUMENT_TYPE.CS 72 | assert isinstance(i2, Instrument) and i2.order_book_id == "300104.XSHE" 73 | assert len(i3) == len(indexes) 74 | 75 | def test_data_range(self): 76 | start, end = self.source.available_data_range("1m") 77 | print(start, end) 78 | assert isinstance(start, date) and isinstance(end, date) 79 | 80 | def test_get_bar(self): 81 | instrument = self.get_stock() 82 | dt = self.get_last_trading_date() 83 | a1 = self.source.get_bar(instrument, dt, "1d") 84 | print(a1) 85 | a2 = self.source.get_bar(instrument, dt, "1m") 86 | print(a2) 87 | assert convert_dt_to_int(dt) == a1[0] 88 | assert convert_dt_to_int(dt) == a2[0] 89 | 90 | def test_stock(self): 91 | fields = ["datetime", "close", "low", "high", "open", "volume"] 92 | instrument = self.get_stock() 93 | dt = self.get_last_trading_date() 94 | for l, f in itertools.product(lengths, frequencies): 95 | data = self.source.history_bars(instrument, l, f, fields, dt, adjust_type=None) 96 | df = pd.DataFrame(data) 97 | print(instrument, l, f) 98 | print(df) 99 | assert set(df.columns) == set(fields) 100 | assert len(df) == l 101 | assert convert_dt_to_int(dt) == df["datetime"].iloc[-1] 102 | 103 | def test_suspended(self): 104 | fields = ["datetime", "close", "low", "high", "open", "volume"] 105 | instrument = self.get_letv() 106 | dt = datetime(year=2018, month=1, day=24, hour=15) 107 | for l, f in itertools.product(lengths, frequencies): 108 | data = self.source.history_bars(instrument, l, f, fields, dt, adjust_type=None) 109 | df = pd.DataFrame(data) 110 | print(instrument, l, f) 111 | print(df) 112 | assert set(df.columns) == set(fields) 113 | assert len(df) == l 114 | assert convert_dt_to_int(dt) == df["datetime"].iloc[-1] 115 | 116 | def test_index(self): 117 | fields = ["datetime", "close", "low", "high", "open", "volume"] 118 | instruments = self.get_indexes() 119 | dt = self.get_last_trading_date() 120 | for i, l, f in itertools.product(instruments, lengths, frequencies): 121 | data = self.source.history_bars(i, l, f, fields, dt, adjust_type=None) 122 | df = pd.DataFrame(data) 123 | print(i, l, f) 124 | print(df) 125 | assert set(df.columns) == set(fields) 126 | assert len(df) == l 127 | assert convert_dt_to_int(dt) == df["datetime"].iloc[-1] 128 | 129 | 130 | if __name__ == '__main__': 131 | unittest.main() 132 | -------------------------------------------------------------------------------- /tests/mongo/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xingetouzi/rqalpha-mod-fxdayu-source/1595df9cc409c3bc8bad2bfcfda5b37d8d42739b/tests/mongo/__init__.py -------------------------------------------------------------------------------- /tests/mongo/test_cache_run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | import time 4 | from pathlib import Path 5 | 6 | from rqalpha import run 7 | 8 | path = Path(os.path.abspath(__file__)).parent.parent / "strategies" / "simple.py" 9 | frequency = "1m" 10 | 11 | config = { 12 | "base": { 13 | "start_date": "2015-12-17", 14 | "end_date": "2015-12-31", 15 | "accounts": {"stock": 100000}, 16 | "frequency": frequency, 17 | "benchmark": None, 18 | "strategy_file": str(path) 19 | }, 20 | "extra": { 21 | "log_level": "verbose", 22 | }, 23 | "mod": { 24 | "sys_analyser": { 25 | "enabled": True, 26 | # "report_save_path": ".", 27 | "plot": True 28 | }, 29 | "sys_simulation": { 30 | "enabled": True, 31 | # "matching_type": "last" 32 | }, 33 | "fxdayu_source": { 34 | "enabled": True, 35 | "source": "mongo", 36 | "mongo_url": "mongodb://192.168.0.101:27017", 37 | "enable_cache": True, 38 | "cache_length": 10000 39 | } 40 | } 41 | } 42 | 43 | 44 | if __name__ == "__main__": 45 | start = time.time() 46 | run(config=config) 47 | print("Time Cost: %s seconds" % (time.time() - start)) 48 | -------------------------------------------------------------------------------- /tests/mongo/test_run.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | from rqalpha import run 5 | 6 | path = Path(os.path.abspath(__file__)).parent.parent / "strategies" / "simple.py" 7 | 8 | config = { 9 | "base": { 10 | "start_date": "2016-06-01", 11 | "end_date": "2016-06-05", 12 | "accounts": {"stock": 100000}, 13 | "frequency": "1m", 14 | "benchmark": None, 15 | "strategy_file": str(path) 16 | }, 17 | "extra": { 18 | "log_level": "verbose", 19 | }, 20 | "mod": { 21 | "sys_analyser": { 22 | "enabled": True, 23 | # "report_save_path": ".", 24 | "plot": True 25 | }, 26 | "sys_simulation": { 27 | "enabled": True, 28 | # "matching_type": "last" 29 | }, 30 | "fxdayu_source": { 31 | "enabled": True, 32 | "mongo_url": "mongodb://192.168.0.101:27017" 33 | } 34 | } 35 | } 36 | 37 | if __name__ == "__main__": 38 | # 您可以指定您要传递的参数 39 | run(config=config) 40 | -------------------------------------------------------------------------------- /tests/mongo/test_source.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | import os 3 | import pathlib 4 | import unittest 5 | 6 | from rqalpha_mod_fxdayu_source.data_source.mongo import MongoDataSource 7 | from tests.common.source import TestDataSourceMixin 8 | 9 | 10 | class TestMongoDataSource(unittest.TestCase, TestDataSourceMixin): 11 | # 初始化工作 12 | def setUp(self): 13 | path = pathlib.Path("~/.rqalpha/bundle").expanduser() 14 | self.source = MongoDataSource(str(path), os.environ.get("MONGO_URL")) 15 | 16 | def test_index(self): 17 | pass 18 | 19 | if __name__ == '__main__': 20 | unittest.main() 21 | -------------------------------------------------------------------------------- /tests/quantos/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xingetouzi/rqalpha-mod-fxdayu-source/1595df9cc409c3bc8bad2bfcfda5b37d8d42739b/tests/quantos/__init__.py -------------------------------------------------------------------------------- /tests/quantos/test_cache_run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | import time 4 | from pathlib import Path 5 | 6 | from rqalpha import run 7 | 8 | path = Path(os.path.abspath(__file__)).parent.parent / "strategies" / "simple.py" 9 | frequency = "1m" 10 | 11 | config = { 12 | "base": { 13 | "start_date": "2015-12-17", 14 | "end_date": "2015-12-31", 15 | "accounts": {"stock": 100000}, 16 | "frequency": frequency, 17 | "benchmark": None, 18 | "strategy_file": str(path) 19 | }, 20 | "extra": { 21 | "log_level": "verbose", 22 | }, 23 | "mod": { 24 | "sys_analyser": { 25 | "enabled": True, 26 | # "report_save_path": ".", 27 | "plot": True 28 | }, 29 | "sys_simulation": { 30 | "enabled": True, 31 | # "matching_type": "last" 32 | }, 33 | "fxdayu_source": { 34 | "enabled": True, 35 | "source": "quantos", 36 | } 37 | } 38 | } 39 | 40 | 41 | if __name__ == "__main__": 42 | start = time.time() 43 | run(config=config) 44 | print("Time Cost: %s seconds" % (time.time() - start)) 45 | -------------------------------------------------------------------------------- /tests/quantos/test_realtime.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import itertools 3 | 4 | from datetime import timedelta 5 | from rqalpha.api import * 6 | from rqalpha import run_func 7 | import pandas as pd 8 | from rqalpha.utils.datetime_func import convert_dt_to_int 9 | 10 | 11 | def init(context): 12 | logger.info("init") 13 | context.s1 = "000001.XSHE" 14 | update_universe(context.s1) 15 | context.fired = False 16 | 17 | 18 | def before_trading(context): 19 | pass 20 | 21 | 22 | def handle_bar(context, bar_dict): 23 | logger.info(bar_dict[context.s1]) 24 | bar = bar_dict[context.s1] 25 | now_int = convert_dt_to_int(context.now) 26 | bar_int = convert_dt_to_int(bar.datetime) 27 | try: 28 | assert - timedelta(minutes=1) < bar.datetime - context.now < timedelta(minutes=1) 29 | except AssertionError as e: 30 | print(now_int) 31 | print(bar) 32 | raise e 33 | frequencies = ["1m", "5m", "15m"] 34 | lengths = [10, 100] 35 | for l, f in itertools.product(lengths, frequencies): 36 | df1 = pd.DataFrame(history_bars(context.s1, l, f)) 37 | df2 = pd.DataFrame(history_bars(context.s1, l, f, include_now=True)) 38 | try: 39 | assert df2["datetime"].iloc[-1] == bar_int 40 | assert df1["datetime"].iloc[-1] == df2["datetime"].iloc[-1] or \ 41 | df1["datetime"].iloc[-1] == df2["datetime"].iloc[-2] 42 | except AssertionError as e: 43 | print(now_int) 44 | print(bar_int) 45 | print(df1) 46 | print(df2) 47 | raise e 48 | 49 | # test order 50 | if not context.fired: 51 | # order_percent并且传入1代表买入该股票并且使其占有投资组合的100% 52 | order_percent(context.s1, 1) 53 | context.fired = True 54 | else: 55 | order_percent(context.s1, 0) 56 | context.fired = False 57 | 58 | 59 | config = { 60 | "base": { 61 | "start_date": "2016-06-01", 62 | "end_date": "2016-06-05", 63 | "accounts": {"stock": 100000}, 64 | "frequency": "1m", 65 | "benchmark": None, 66 | "strategy_file": __file__, 67 | "run_type": "p" 68 | }, 69 | "extra": { 70 | "log_level": "verbose", 71 | }, 72 | "mod": { 73 | "sys_analyser": { 74 | "enabled": True, 75 | # "report_save_path": ".", 76 | "plot": True 77 | }, 78 | "fxdayu_source": { 79 | "enabled": True, 80 | "source": "quantos", 81 | "enable_cache": False 82 | } 83 | } 84 | } 85 | 86 | # 您可以指定您要传递的参数 87 | run_func(init=init, before_trading=before_trading, handle_bar=handle_bar, config=config) 88 | 89 | # 如果你的函数命名是按照 API 规范来,则可以直接按照以下方式来运行 90 | # run_func(**globals()) 91 | -------------------------------------------------------------------------------- /tests/quantos/test_run.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | from rqalpha import run 5 | 6 | path = Path(os.path.abspath(__file__)).parent.parent / "strategies" / "simple.py" 7 | 8 | config = { 9 | "base": { 10 | "start_date": "2016-06-01", 11 | "end_date": "2016-06-05", 12 | "accounts": {"stock": 100000}, 13 | "frequency": "1m", 14 | "benchmark": None, 15 | "strategy_file": str(path) 16 | }, 17 | "extra": { 18 | "log_level": "verbose", 19 | }, 20 | "mod": { 21 | "sys_analyser": { 22 | "enabled": True, 23 | # "report_save_path": ".", 24 | "plot": True 25 | }, 26 | "sys_simulation": { 27 | "enabled": True, 28 | # "matching_type": "last" 29 | }, 30 | "fxdayu_source": { 31 | "enabled": True, 32 | "source": "quantos", 33 | } 34 | } 35 | } 36 | 37 | if __name__ == "__main__": 38 | # 您可以指定您要传递的参数 39 | run(config=config) 40 | -------------------------------------------------------------------------------- /tests/quantos/test_source.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | import pathlib 4 | import unittest 5 | 6 | from rqalpha_mod_fxdayu_source.data_source.quantos import QuantOsSource 7 | from tests.common.source import TestDataSourceMixin 8 | 9 | 10 | class TestQuantOsDataSource(unittest.TestCase, TestDataSourceMixin): 11 | # 初始化工作 12 | def setUp(self): 13 | path = pathlib.Path("~/.rqalpha/bundle").expanduser() 14 | self.source = QuantOsSource(str(path)) 15 | 16 | 17 | if __name__ == '__main__': 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /tests/strategies/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xingetouzi/rqalpha-mod-fxdayu-source/1595df9cc409c3bc8bad2bfcfda5b37d8d42739b/tests/strategies/__init__.py -------------------------------------------------------------------------------- /tests/strategies/simple.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import itertools 3 | 4 | from rqalpha.api import * 5 | import pandas as pd 6 | from rqalpha.utils.datetime_func import convert_dt_to_int 7 | 8 | 9 | def init(context): 10 | logger.info("init") 11 | context.s1 = "000001.XSHE" 12 | update_universe(context.s1) 13 | context.fired = False 14 | 15 | 16 | def before_trading(context): 17 | pass 18 | 19 | 20 | def handle_bar(context, bar_dict): 21 | bar = bar_dict[context.s1] 22 | print(bar) 23 | assert bar.datetime == context.now 24 | lengths = [5] 25 | frequencies = ["1m"] 26 | for l, f in itertools.product(lengths, frequencies): 27 | # print(pd.DataFrame(history_bars(context.s1, 5, "1d", include_now=True))) 28 | df = pd.DataFrame(history_bars(context.s1, l, f)) 29 | print(df) 30 | assert len(df) == l 31 | assert convert_dt_to_int(context.now) == df["datetime"].iloc[-1] 32 | if not context.fired: 33 | # order_percent并且传入1代表买入该股票并且使其占有投资组合的100% 34 | order_percent(context.s1, 1) 35 | context.fired = True 36 | -------------------------------------------------------------------------------- /tests/test_cache_source.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pathlib 3 | import unittest 4 | from datetime import datetime, timedelta 5 | 6 | import pandas as pd 7 | from dateutil.parser import parse 8 | from rqalpha.utils.datetime_func import convert_int_to_datetime, convert_dt_to_int 9 | 10 | from rqalpha_mod_fxdayu_source.data_source.mongo import MongoCacheDataSource 11 | 12 | 13 | class mytest(unittest.TestCase): 14 | # 初始化工作 15 | def setUp(self): 16 | self.path = pathlib.Path("~/.rqalpha/bundle").expanduser() 17 | self.mongo_url = os.environ.get("MONGO_URL") 18 | self._instrument = MongoCacheDataSource(self.path, self.mongo_url).get_all_instruments()[0] 19 | 20 | # 退出清理工作 21 | def tearDown(self): 22 | pass 23 | 24 | def test_instrument(self): 25 | print(self._instrument) 26 | 27 | def test_data_range(self): 28 | source = MongoCacheDataSource(self.path, self.mongo_url) 29 | print(source.available_data_range("1m")) 30 | 31 | def test_get_bar(self): 32 | source = MongoCacheDataSource(self.path, self.mongo_url) 33 | print(type(source.get_bar(self._instrument, datetime.now() - timedelta(days=2), "1d"))) 34 | print(source.get_bar(self._instrument, datetime.now(), "1m")) 35 | 36 | def test_history_bars(self): 37 | source = MongoCacheDataSource(self.path, self.mongo_url) 38 | data = source.history_bars(self._instrument, 10, "1m", 39 | ["datetime", "close", "low", "high", "open", "volume"], 40 | datetime.now()) 41 | source.clear_cache() 42 | print(pd.DataFrame(data)) 43 | 44 | def get_cache_info(self, source, frequency): 45 | cache = source._caches[(self._instrument.order_book_id, frequency)] 46 | return cache._data[0]["datetime"], cache._data[-1]["datetime"], len(cache._data) 47 | 48 | def test_raw_history_bars(self): 49 | source = MongoCacheDataSource(self.path, self.mongo_url) 50 | start = parse("2012-06-01 9:31:00") 51 | si = convert_dt_to_int(start) 52 | frequency = "1m" 53 | first = source.raw_history_bars(self._instrument, frequency, start_dt=start, 54 | length=source.CACHE_LENGTH) 55 | s, e, l = self.get_cache_info(source, frequency) 56 | assert s == si and l == source.CACHE_LENGTH 57 | data = source.raw_history_bars(self._instrument, frequency, 58 | end_dt=convert_int_to_datetime(first["datetime"][-1]), 59 | length=source.CACHE_LENGTH) 60 | s, e, l = self.get_cache_info(source, frequency) 61 | assert s == si and l == source.CACHE_LENGTH 62 | next_ = source.raw_history_bars(self._instrument, frequency, 63 | start_dt=convert_int_to_datetime(first["datetime"][5]), 64 | length=source.CACHE_LENGTH) 65 | s, e, l = self.get_cache_info(source, frequency) 66 | assert s == si and l == source.CACHE_LENGTH * 2 67 | assert (first == data).all() 68 | print(pd.DataFrame(next_)) 69 | 70 | 71 | if __name__ == '__main__': 72 | unittest.main() 73 | -------------------------------------------------------------------------------- /tests/test_events_source.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | import datetime 4 | import unittest 5 | 6 | import pandas as pd 7 | 8 | from rqalpha_mod_fxdayu_source.event_source import IntervalEventSource 9 | 10 | 11 | class TestEventSource(unittest.TestCase): 12 | # 初始化工作 13 | def setUp(self): 14 | pass 15 | 16 | # 退出清理工作 17 | def tearDown(self): 18 | pass 19 | 20 | def test_trading_points(self): 21 | date = datetime.datetime.now().date() 22 | data = pd.DataFrame(sorted(list(IntervalEventSource._get_stock_trading_points(date, "13m")))) 23 | print(data) 24 | 25 | if __name__ == '__main__': 26 | unittest.main() 27 | -------------------------------------------------------------------------------- /tests/test_real_strategy.py: -------------------------------------------------------------------------------- 1 | # encoding:utf-8 2 | 3 | import talib as ta 4 | from rqalpha import run_file 5 | from rqalpha.api import * 6 | 7 | frequency = "1m" 8 | report_path = ".report" 9 | 10 | 11 | def calculate(close, period): 12 | mas = ta.MA(close, period) # 第三发 13 | mal = ta.MA(close, 5 * period) 14 | if mas[-1] > mal[-1]: 15 | return 1 16 | else: 17 | return 0 18 | 19 | 20 | def init(context): 21 | context.s1 = "000001.XSHE" 22 | context.PERIOD = 80 23 | context.stoplossmultipler = 0.97 24 | context.takepofitmultipler = 4 25 | scheduler.run_daily(run_daily) 26 | 27 | 28 | def statistic(close): 29 | count = 0 30 | for i in range(1, 21): 31 | count += calculate(close[-i * 5:], i) 32 | return count * 5 33 | 34 | 35 | def run_daily(context, bar_dict): 36 | print("run daily :{}".format(context.now)) 37 | 38 | 39 | def handle_bar(context, bar_dict): 40 | print("heatbeat: {}".format(context.now)) 41 | stop_loss(context, bar_dict) 42 | entry_exit(context, bar_dict) 43 | 44 | 45 | def entry_exit(context, bar_dict): 46 | close = history_bars(context.s1, context.PERIOD + 1, frequency, 'close') 47 | if len(close) == context.PERIOD + 1: 48 | ma_statistic1 = statistic(close[:-1]) 49 | ma_statistic0 = statistic(close[1:]) 50 | cur_position = context.portfolio.positions[context.s1].quantity 51 | shares = context.portfolio.cash / bar_dict[context.s1].close 52 | if ma_statistic1 > 50 > ma_statistic0 and cur_position > 0: 53 | order_target_value(context.s1, 0) 54 | if ma_statistic1 < 65 < ma_statistic0 and cur_position == 0: 55 | order_shares(context.s1, shares) 56 | 57 | 58 | def stop_loss(context, bar_dict): 59 | for stock in context.portfolio.positions: 60 | avg_price = context.portfolio.positions[stock].avg_price 61 | if bar_dict[stock].last < avg_price * context.stoplossmultipler: 62 | order_target_percent(stock, 0) 63 | elif bar_dict[stock].last > avg_price * context.takepofitmultipler: 64 | order_target_percent(stock, 0) 65 | 66 | 67 | config = { 68 | "base": { 69 | "start_date": "2012-01-01", 70 | "end_date": "2016-12-01", 71 | "accounts": {'stock': 100000}, 72 | "benchmark": None, 73 | "frequency": frequency, 74 | # "strategy_file_path": os.path.abspath(__file__) 75 | }, 76 | "extra": { 77 | "log_level": "verbose", 78 | }, 79 | "mod": { 80 | "sys_analyser": { 81 | "enabled": True, 82 | "report_save_path": ".report", 83 | "plot": True 84 | }, 85 | "fxdayu_source": { 86 | "enabled": True, 87 | "mongo_url": "mongodb://192.168.0.101:27017,192.168.0.102:27017", 88 | "enable_cache": True, 89 | "cache_length": 10000 90 | } 91 | } 92 | } 93 | 94 | # config["mod"]["fxdayu_source"] = { 95 | # "enabled": True, 96 | # "source": "bundle", 97 | # "enable_cache": True, 98 | # "cache_length": 10000 99 | # } 100 | 101 | if __name__ == "__main__": 102 | import time 103 | import os 104 | 105 | st = time.time() 106 | os.makedirs(report_path, exist_ok=True) 107 | run_file(__file__, config) 108 | print("Time Cost: %s seconds" % (time.time() - st)) 109 | -------------------------------------------------------------------------------- /tests/test_redis_data_source.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from datetime import datetime, timedelta, time 4 | 5 | from rqalpha.utils.datetime_func import convert_dt_to_int 6 | 7 | from rqalpha_mod_fxdayu_source.data_source.common.realtime import RedisDataSource 8 | from rqalpha_mod_fxdayu_source.data_source.mongo import MongoDataSource 9 | 10 | RQALPHA_ROOT = os.environ.get("RQALPHA_ROOT") 11 | REDIS_URL = os.environ.get("REDIS_URL") 12 | 13 | 14 | class TestRawHistoryBars(unittest.TestCase): 15 | # 初始化工作 16 | def setUp(self): 17 | path = os.path.join(RQALPHA_ROOT, "bundle") 18 | history_source = self.source = MongoDataSource(str(path), mongo_url="mongodb://192.168.0.101:27017") 19 | self.source = RedisDataSource(path, REDIS_URL, history_source) 20 | self.instrument = self.source.get_all_instruments()[0] 21 | 22 | def test_end_length_with_history(self): 23 | try: 24 | start = None 25 | end = datetime.now() 26 | ei = convert_dt_to_int(end) 27 | today_i = ei // 1000000 * 1000000 28 | yesterday_i = convert_dt_to_int((end - timedelta(days=1)).replace(hour=0, minute=0, second=0)) 29 | frequency = "1m" 30 | length = 300 31 | data = self.source.raw_history_bars(self.instrument, frequency, end_dt=end, 32 | length=length) 33 | dts = data["datetime"] 34 | assert dts[dts > today_i][0] == today_i + 93100 35 | assert dts[dts < today_i][-1] == yesterday_i + 150000 36 | assert len(data) == length 37 | assert 0 < dts[-1] - ei < 100 38 | except Exception as e: 39 | print("start: {}".format(start)) 40 | print("end: {}".format(end)) 41 | print("length: {}".format(length)) 42 | print("data:\n{}".format(data)) 43 | raise e 44 | 45 | def test_end_length_without_history(self): 46 | end = datetime.now() 47 | ei = convert_dt_to_int(end) 48 | frequency = "1m" 49 | data = self.source.raw_history_bars(self.instrument, frequency, end_dt=end, 50 | length=2) 51 | assert len(data) == 2 52 | assert data["datetime"][-1] - ei < 100 53 | 54 | def test_start_end_with_history(self): 55 | end = datetime.now() 56 | start = datetime.combine(end.date() - timedelta(days=1), time(hour=9, minute=33)) 57 | frequency = "1m" 58 | ei = convert_dt_to_int(end) 59 | si = convert_dt_to_int(start) 60 | today_i = ei // 1000000 * 1000000 61 | yesterday_i = si // 1000000 * 1000000 62 | data = self.source.raw_history_bars(self.instrument, frequency, start_dt=start, end_dt=end) 63 | dts = data["datetime"] 64 | assert dts[dts > today_i][0] == today_i + 93100 65 | assert dts[dts < today_i][-1] == yesterday_i + 150000 66 | assert dts[0] == si 67 | assert 0 < dts[-1] - ei < 100 68 | 69 | def test_start_end_without_history(self): 70 | end = datetime.now() 71 | start = datetime.combine(end.date(), time=time(hour=9, minute=33)) 72 | ei = convert_dt_to_int(end) 73 | si = convert_dt_to_int(start) 74 | frequency = "1m" 75 | data = self.source.raw_history_bars(self.instrument, frequency, start_dt=start, end_dt=end) 76 | dts = data["datetime"] 77 | if start > end: 78 | assert len(data) == 0 79 | else: 80 | assert dts[0] == si 81 | assert 0 < dts[-1] - ei < 100 82 | 83 | def test_start_length_with_history(self): 84 | end = datetime.now() 85 | start = datetime.combine(end.date() - timedelta(days=1), time=time(hour=9, minute=31)) 86 | ei = convert_dt_to_int(end) 87 | si = convert_dt_to_int(start) 88 | today_i = ei // 1000000 * 1000000 89 | yesterday_i = si // 1000000 * 1000000 90 | length = 300 91 | frequency = "1m" 92 | data = self.source.raw_history_bars(self.instrument, frequency, start_dt=start, length=length) 93 | dts = data["datetime"] 94 | assert dts[dts > today_i][0] == today_i + 93100 95 | assert dts[dts < today_i][-1] == yesterday_i + 150000 96 | assert dts[0] == si 97 | assert len(dts) == length 98 | 99 | def test_start_length_without_history(self): 100 | end = datetime.now() 101 | start = datetime.combine(end.date(), time=time(hour=9, minute=31)) 102 | ei = convert_dt_to_int(end) 103 | si = convert_dt_to_int(start) 104 | frequency = "1m" 105 | length = 2 106 | data = self.source.raw_history_bars(self.instrument, frequency, start_dt=start, length=length) 107 | dts = data["datetime"] 108 | if len(dts) and start <= end: 109 | assert dts[0] == si 110 | assert len(dts) == length 111 | 112 | 113 | if __name__ == '__main__': 114 | unittest.main() 115 | -------------------------------------------------------------------------------- /tests/test_run_realtime.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from rqalpha.api import * 4 | from rqalpha import run_func 5 | import pandas as pd 6 | 7 | 8 | def init(context): 9 | logger.info("init") 10 | context.s1 = "000001.XSHE" 11 | update_universe(context.s1) 12 | context.fired = False 13 | 14 | 15 | def before_trading(context): 16 | pass 17 | 18 | 19 | def handle_bar(context, bar_dict): 20 | logger.info(bar_dict[context.s1]) 21 | print(bar_dict[context.s1]) 22 | # print(pd.DataFrame(history_bars(context.s1, 5, "1d", include_now=True))) 23 | print(pd.DataFrame(history_bars(context.s1, 5, "5m"))) 24 | print(pd.DataFrame(history_bars(context.s1, 5, "5m", include_now=True))) 25 | if not context.fired: 26 | # order_percent并且传入1代表买入该股票并且使其占有投资组合的100% 27 | order_percent(context.s1, 1) 28 | context.fired = True 29 | else: 30 | order_percent(context.s1, 0) 31 | context.fired = False 32 | 33 | 34 | config = { 35 | "base": { 36 | "start_date": "2016-06-01", 37 | "end_date": "2016-06-05", 38 | "accounts": {"stock": 100000}, 39 | "frequency": "1m", 40 | "benchmark": None, 41 | "strategy_file": __file__, 42 | "run_type": "p" 43 | }, 44 | "extra": { 45 | "log_level": "verbose", 46 | }, 47 | "mod": { 48 | "sys_analyser": { 49 | "enabled": True, 50 | # "report_save_path": ".", 51 | "plot": True 52 | }, 53 | "fxdayu_source": { 54 | "enabled": True, 55 | "source": "quantos", 56 | "quantos_user": "", 57 | "quantos_token": "", 58 | } 59 | } 60 | } 61 | 62 | # 您可以指定您要传递的参数 63 | run_func(init=init, before_trading=before_trading, handle_bar=handle_bar, config=config) 64 | 65 | # 如果你的函数命名是按照 API 规范来,则可以直接按照以下方式来运行 66 | # run_func(**globals()) 67 | -------------------------------------------------------------------------------- /tests/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xingetouzi/rqalpha-mod-fxdayu-source/1595df9cc409c3bc8bad2bfcfda5b37d8d42739b/tests/utils/__init__.py -------------------------------------------------------------------------------- /tests/utils/quotation.py: -------------------------------------------------------------------------------- 1 | 2 | --------------------------------------------------------------------------------