├── .gitignore ├── LICENSE ├── QARealtimeCollector ├── __init__.py ├── calculator │ └── stock_calculator.py ├── clients │ ├── __init__.py │ ├── subscribe_clients.py │ └── test.py ├── collectors │ ├── __init__.py │ ├── crawlercollector.py │ ├── ctpbeecollector.py │ ├── ctpbeecollector_second.py │ ├── simcollector.py │ ├── simmarket.py │ ├── stockbarcollector.py │ ├── stockcollector.py │ └── wscollector.py ├── connector │ └── QATdx_adv.py ├── datahandler │ ├── __init__.py │ ├── realtime_resampler.py │ └── stock_resampler.py ├── management.py ├── setting.py ├── util.py ├── utils │ ├── common.py │ └── logconf.py └── webserver.py ├── README.md ├── docker ├── Dockerfile ├── start_collector.sh └── wait_for_it.sh ├── example └── stock_sub.py ├── requirements.txt └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Vincent yu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /QARealtimeCollector/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.0.10' 2 | __author__ = 'yutiansut' 3 | 4 | import click 5 | import threading 6 | from QARealtimeCollector.clients import QARTC_Clients 7 | from QARealtimeCollector.collectors import (QARTC_CtpBeeCollector, 8 | QARTC_CTPTickCollector, 9 | QARTC_RandomTick, QARTC_Stock, 10 | QARTC_WsCollector) 11 | from QARealtimeCollector.datahandler import QARTC_Resampler 12 | 13 | 14 | @click.command() 15 | @click.option('--code', default='rb2001') 16 | def start(code): 17 | r = QARTC_CtpBeeCollector(code) 18 | r.start() 19 | 20 | 21 | @click.command() 22 | @click.option('--code', default='rb2001') 23 | def start_ctp(code): 24 | r = QARTC_CTPTickCollector(code) 25 | r.start() 26 | 27 | 28 | @click.command() 29 | @click.option('--code', default='rb2001') 30 | def faststart(code): 31 | r = QARTC_CtpBeeCollector(code) 32 | r.start() 33 | r1 = QARTC_Resampler(code, '1min', 'tb') 34 | r1.start() 35 | r2 = QARTC_Resampler(code, '5min', 'tb') 36 | r2.start() 37 | r3 = QARTC_Resampler(code, '15min', 'tb') 38 | r3.start() 39 | r4 = QARTC_Resampler(code, '30min', 'tb') 40 | r4.start() 41 | r5 = QARTC_Resampler(code, '60min', 'tb') 42 | r5.start() 43 | 44 | 45 | @click.command() 46 | @click.option('--code', default='rb2001') 47 | @click.option('--freq', default='5min') 48 | @click.option('--model', default='tb') 49 | def resample(code, freq, model): 50 | r = QARTC_Resampler(code, freq, model) 51 | r.start() 52 | 53 | 54 | @click.command() 55 | @click.option('--code', default='rb2001') 56 | @click.option('--date', default='20191119') 57 | @click.option('--price', default=3646) 58 | @click.option('--interval', default=0) 59 | def random(code, date, price, interval): 60 | r = QARTC_RandomTick(code, date, price, interval) 61 | r.start() 62 | 63 | 64 | def stock_collector(): 65 | QARTC_Stock().start() 66 | -------------------------------------------------------------------------------- /QARealtimeCollector/calculator/stock_calculator.py: -------------------------------------------------------------------------------- 1 | # **************************************************************************** # 2 | # # 3 | # ::: :::::::: # 4 | # stock_calculator.py :+: :+: :+: # 5 | # +:+ +:+ +:+ # 6 | # By: zhongjy1992 +#+ +:+ +#+ # 7 | # +#+#+#+#+#+ +#+ # 8 | # Created: 2019/10/02 13:44:50 by zhongjy1992 #+# #+# # 9 | # Updated: 2019/10/02 21:41:13 by zhongjy1992 ### ########.fr # 10 | # # 11 | # **************************************************************************** # 12 | import json 13 | import pandas as pd 14 | import threading 15 | import time 16 | 17 | from QAPUBSUB.consumer import subscriber, subscriber_routing 18 | from QAPUBSUB.producer import publisher, publisher_topic 19 | from QUANTAXIS.QAEngine.QAThreadEngine import QA_Thread 20 | from QARealtimeCollector.setting import eventmq_ip 21 | from QARealtimeCollector.datahandler.realtime_resampler import NpEncoder 22 | from QUANTAXIS import QA_indicator_BOLL 23 | 24 | class RTCCaluator(QA_Thread): 25 | # 只写了个样例框架 26 | def __init__(self, code_list: list, frequency='60min', strategy="HS300Enhance", init_data=None): 27 | """ 28 | 29 | :param code_list: 30 | :param indicator_fun: 31 | :param frequency: 32 | :param strategy: 33 | """ 34 | super().__init__() 35 | if isinstance(frequency, float): 36 | self.frequency = int(frequency) 37 | elif isinstance(frequency, str): 38 | _frequency = frequency.replace('min', '') 39 | if str.isnumeric(_frequency): 40 | self.frequency = int(_frequency) 41 | else: 42 | print("unknown frequency: %s" % frequency) 43 | return 44 | elif isinstance(frequency, int): 45 | self.frequency = frequency 46 | else: 47 | print("unknown frequency: %s" % frequency) 48 | return 49 | self.market_data = init_data 50 | self.stock_code_list = code_list 51 | self.strategy = strategy 52 | 53 | # 接收stock 重采样的数据 54 | self.sub = subscriber( 55 | host=eventmq_ip, exchange='realtime_stock_{}_min'.format(self.frequency)) 56 | self.sub.callback = self.stock_min_callback 57 | # 发送stock indicator result 58 | self.pub = publisher_topic( 59 | host=eventmq_ip, exchange='realtime_stock_calculator_{}_{}_min'.format(self.strategy, self.frequency)) 60 | threading.Thread(target=self.sub.start).start() 61 | 62 | print("REALTIME_STOCK_CACULATOR INIT, strategy: %s frequency: %s" % (self.strategy, self.frequency)) 63 | 64 | def unsubscribe(self, item): 65 | # remove code from market data 66 | pass 67 | 68 | def stock_min_callback(self, a, b, c, data): 69 | latest_data = json.loads(str(data, encoding='utf-8')) 70 | # print("latest data", latest_data) 71 | context = pd.DataFrame(latest_data) 72 | 73 | # merge update 74 | if self.market_data is None: 75 | self.market_data = context 76 | else: 77 | self.market_data.update(context) 78 | # print(self.market_data) 79 | 80 | # calculate indicator 81 | ind = self.market_data.groupby(['code']).apply(QA_indicator_BOLL) 82 | res = ind.join(self.market_data).dropna().round(2) 83 | res.set_value(index=res[res['LB'] >= res.close].index, col='buyorsell', value=1) # 买入信号 84 | res.set_value(index=res[res['UB'] < res.close].index, col='buyorsell', value=-1) # 卖出信号 85 | res['change'] = res['buyorsell'].diff() # 计算指标信号是否反转 86 | res = res.groupby('code').tail(1) # 取最新的信号 87 | # Buy信号的股票池 88 | res_buy: pd.DataFrame = res[res.change > 0].reset_index() 89 | # res_buy_code_list = res_buy['code'] 90 | print("calculator.buy", res_buy) 91 | # Sell信号的股票池 92 | res_sell: pd.DataFrame = res[res.change < 0].reset_index() 93 | # res_sell_code_list = res_sell['code'] 94 | print("calculator.sell", res_sell) 95 | 96 | self.pub.pub(json.dumps(res_buy.to_dict(), cls=NpEncoder), routing_key="calculator.buy") 97 | self.pub.pub(json.dumps(res_sell.to_dict(), cls=NpEncoder), routing_key="calculator.sell") 98 | 99 | def run(self): 100 | import datetime 101 | while True: 102 | print(datetime.datetime.now(), "realtime stock calculator is running") 103 | time.sleep(1) 104 | 105 | 106 | if __name__ == '__main__': 107 | import QUANTAXIS as QA 108 | from QUANTAXIS import SUM 109 | code_list = ['000001', '000002'] # TODO HS300 STOCK CODE LIST 110 | start_date = '2019-09-29' 111 | end_date = '2019-09-30' 112 | # TODO 若遇上当天除权除息可能出现计算错误 113 | # TODO should resample and the data format is same to the mq_min_data 114 | init_min_data = QA.QA_fetch_stock_min_adv(code_list, start_date, end_date) 115 | if init_min_data is not None: 116 | init_min_data = init_min_data.data 117 | from QUANTAXIS import QA_indicator_BOLL 118 | RTCCaluator( 119 | code_list=code_list, frequency='5min', strategy="HS300Enhance", init_data=init_min_data 120 | ).start() 121 | -------------------------------------------------------------------------------- /QARealtimeCollector/clients/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | from QARealtimeCollector.clients.subscribe_clients import QARTC_Clients -------------------------------------------------------------------------------- /QARealtimeCollector/clients/subscribe_clients.py: -------------------------------------------------------------------------------- 1 | from QAPUBSUB.consumer import subscriber 2 | 3 | class QARTC_Clients(): 4 | def __init__(self): 5 | pass -------------------------------------------------------------------------------- /QARealtimeCollector/clients/test.py: -------------------------------------------------------------------------------- 1 | 2 | from QAPUBSUB.consumer import subscriber 3 | 4 | if __name__ == "__main__": 5 | 6 | sub = subscriber(host='192.168.2.116',user='admin', password='admin' ,exchange= 'realtime_60min_rb1910') 7 | 8 | sub.start() -------------------------------------------------------------------------------- /QARealtimeCollector/collectors/__init__.py: -------------------------------------------------------------------------------- 1 | from QARealtimeCollector.collectors.ctpbeecollector import QARTC_CtpBeeCollector 2 | from QARealtimeCollector.collectors.wscollector import QARTC_WsCollector 3 | from QARealtimeCollector.collectors.stockcollector import QARTC_Stock 4 | from QARealtimeCollector.collectors.simmarket import QARTC_RandomTick 5 | from QARealtimeCollector.collectors.simcollector import QARTC_CTPTickCollector -------------------------------------------------------------------------------- /QARealtimeCollector/collectors/crawlercollector.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yutiansut/QUANTAXIS_RealtimeCollector/316113cb049539b4cba5bca8bfbc05f8f14e7508/QARealtimeCollector/collectors/crawlercollector.py -------------------------------------------------------------------------------- /QARealtimeCollector/collectors/ctpbeecollector.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | 4 | from QAPUBSUB import consumer, producer 5 | from QUANTAXIS.QAUtil.QALogs import QA_util_log_info 6 | from QARealtimeCollector.setting import mongo_ip, eventmq_ip, market_data_password, market_data_user 7 | from QUANTAXIS.QAEngine.QAThreadEngine import QA_Thread 8 | 9 | 10 | class QARTC_CtpBeeCollector(QA_Thread): 11 | """这是接收重采样部分 12 | 13 | Returns: 14 | [type] -- [description] 15 | """ 16 | 17 | def __init__(self, code): 18 | super().__init__() 19 | self.data = {} 20 | self.min5_data = {} 21 | 22 | self.pro = producer.publisher(host=eventmq_ip, exchange='bar_1min_{}'.format(code), 23 | user=market_data_user, password=market_data_password) 24 | self.pro_realtimemin = producer.publisher(host=eventmq_ip, exchange='realtime_min_{}'.format( 25 | code), user=market_data_user, password=market_data_password) 26 | self.is_send = False 27 | self.last_volume = 0 28 | self.c = consumer.subscriber_routing(host=eventmq_ip, 29 | exchange='CTPX', routing_key=code, user=market_data_user, password=market_data_password) 30 | 31 | def create_new(self, new_tick): 32 | """ 33 | {'gateway_name': 'ctp', 'symbol': 'au2004', 'exchange': 'SHFE', 34 | 'datetime': '2019-07-02 23:40:19.500000', 'name': '黄金2004', 35 | 'volume': 918, 'last_price': 318.35, 'last_volume': 0, 36 | 'limit_up': 325.95, 'limit_down': 300.9, 'open_interest':4940.0, 37 | 'average_price': 315256.2091503268, 'preSettlementPrice': 313.45, 38 | 'open_price': 314.0, 'high_price': 318.35, 'low_price': 313.9, 39 | 'pre_close': 314.05, 'bid_price_1': 318.25, 'bid_price_2': 0, 'bid_price_3': 0, 40 | 'bid_price_4': 0, 'bid_price_5': 0, 'ask_price_1': 318.45, 'ask_price_2': 0, 41 | 'ask_price_3': 0, 'ask_price_4': 0, 'ask_price_5': 0, 'bid_volume_1': 6, 42 | 'bid_volume_2': 0, 'bid_volume_3': 0, 'bid_volume_4': 0, 'bid_volume_5': 0, 43 | 'ask_volume_1': 3, 'ask_volume_2': 0, 'ask_volume_3': 0, 'ask_volume_4': 0, 44 | 'ask_volume_5': 0, 'vt_symbol': 'au2004.SHFE'} 45 | """ 46 | # time = '{}-{}-{} '.format(new_tick['ActionDay'][0:4], new_tick['ActionDay'][4:6], new_tick['ActionDay'] 47 | # [6:8]) + new_tick['datetime'] + str('%.6f' % (new_tick['UpdateMillisec']/1000000))[1:] 48 | self.data[new_tick['symbol']] = {'open': new_tick['last_price'], 49 | 'high': new_tick['last_price'], 50 | 'low': new_tick['last_price'], 51 | 'close': new_tick['last_price'], 52 | 'code': str(new_tick['symbol']).upper(), 53 | 'datetime': new_tick['datetime'], 54 | 'volume': new_tick['volume']-self.last_volume} 55 | 56 | def update_bar(self, new_tick): 57 | 58 | time = new_tick['datetime'] 59 | old_data = self.data[new_tick['symbol']] 60 | # print(old_data) 61 | old_data['close'] = new_tick['last_price'] 62 | old_data['high'] = old_data['high'] if old_data['high'] > new_tick['last_price'] else new_tick['last_price'] 63 | old_data['low'] = old_data['low'] if old_data['low'] < new_tick['last_price'] else new_tick['last_price'] 64 | old_data['datetime'] = time 65 | old_data['volume'] = new_tick['volume'] - self.last_volume 66 | self.data[new_tick['symbol']] = old_data 67 | return old_data 68 | 69 | def publish_bar(self, symbol): 70 | QA_util_log_info('=================================') 71 | QA_util_log_info('publish') 72 | QA_util_log_info('=================================') 73 | print(self.data[symbol]) 74 | self.pro.pub(json.dumps(self.data[symbol])) 75 | self.is_send = True 76 | 77 | def upcoming_data(self, new_tick): 78 | curtime = new_tick['datetime'] 79 | time = curtime 80 | if curtime[11:13] in ['00', '01', '02', 81 | '09', '10', '11', 82 | '13', '14', '15', 83 | '21', '22', '23']: 84 | 85 | try: 86 | if new_tick['datetime'][17:19] == '00' and len(new_tick['datetime']) == 19: 87 | # print(True) 88 | old_data = self.update_bar(new_tick) 89 | self.last_volume = new_tick['volume'] 90 | self.publish_bar(new_tick['symbol']) 91 | self.pro_realtimemin.pub(json.dumps(old_data)) 92 | self.data[new_tick['symbol']] = {} 93 | self.data[new_tick['symbol']]['datetime'] = time 94 | 95 | elif new_tick['datetime'][17:19] == '00' and len(new_tick['datetime']) > 19: 96 | if self.is_send: 97 | self.is_send = False 98 | else: 99 | self.publish_bar(new_tick['symbol']) 100 | 101 | QA_util_log_info('xxx') 102 | self.create_new(new_tick) 103 | self.pro_realtimemin.pub(json.dumps( 104 | self.data[new_tick['symbol']])) 105 | QA_util_log_info(self.data) 106 | else: 107 | try: 108 | self.update_bar(new_tick) 109 | except: 110 | self.create_new(new_tick) 111 | self.pro_realtimemin.pub(json.dumps( 112 | self.data[new_tick['symbol']])) 113 | except Exception as e: 114 | print(e) 115 | 116 | def callback(self, a, b, c, body): 117 | self.upcoming_data(json.loads(body)) 118 | 119 | def run(self): 120 | self.c.callback = self.callback 121 | self.c.start() 122 | 123 | 124 | if __name__ == '__main__': 125 | pass 126 | # import click 127 | # @click.command() 128 | # @click.option('--code', default='au1910') 129 | # def handler(code): 130 | # r = QARealtimeCollector_CtpBeeCollector(code) 131 | 132 | # r.start() 133 | 134 | # handler() 135 | -------------------------------------------------------------------------------- /QARealtimeCollector/collectors/ctpbeecollector_second.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | 4 | from QAPUBSUB import consumer, producer 5 | from QUANTAXIS.QAUtil.QALogs import QA_util_log_info 6 | from QARealtimeCollector.setting import mongo_ip, eventmq_ip, market_data_password, market_data_user 7 | from QUANTAXIS.QAEngine.QAThreadEngine import QA_Thread 8 | 9 | 10 | class QARTC_CtpBeeCollector(QA_Thread): 11 | """这是接收重采样部分 ==> 变成秒级策略 12 | 13 | Returns: 14 | [type] -- [description] 15 | """ 16 | 17 | def __init__(self, code): 18 | super().__init__() 19 | self.data = {} 20 | self.min5_data = {} 21 | 22 | self.pro = producer.publisher(host=eventmq_ip, exchange='bar_1min_{}'.format(code), 23 | user=market_data_user, password=market_data_password) 24 | self.pro_realtimemin = producer.publisher(host=eventmq_ip, exchange='realtime_min_{}'.format( 25 | code), user=market_data_user, password=market_data_password) 26 | self.is_send = False 27 | self.last_volume = 0 28 | self.c = consumer.subscriber_routing(host=eventmq_ip, 29 | exchange='CTPX', routing_key=code, user=market_data_user, password=market_data_password) 30 | 31 | def create_new(self, new_tick): 32 | """ 33 | {'gateway_name': 'ctp', 'symbol': 'au2004', 'exchange': 'SHFE', 34 | 'datetime': '2019-07-02 23:40:19.500000', 'name': '黄金2004', 35 | 'volume': 918, 'last_price': 318.35, 'last_volume': 0, 36 | 'limit_up': 325.95, 'limit_down': 300.9, 'open_interest':4940.0, 37 | 'average_price': 315256.2091503268, 'preSettlementPrice': 313.45, 38 | 'open_price': 314.0, 'high_price': 318.35, 'low_price': 313.9, 39 | 'pre_close': 314.05, 'bid_price_1': 318.25, 'bid_price_2': 0, 'bid_price_3': 0, 40 | 'bid_price_4': 0, 'bid_price_5': 0, 'ask_price_1': 318.45, 'ask_price_2': 0, 41 | 'ask_price_3': 0, 'ask_price_4': 0, 'ask_price_5': 0, 'bid_volume_1': 6, 42 | 'bid_volume_2': 0, 'bid_volume_3': 0, 'bid_volume_4': 0, 'bid_volume_5': 0, 43 | 'ask_volume_1': 3, 'ask_volume_2': 0, 'ask_volume_3': 0, 'ask_volume_4': 0, 44 | 'ask_volume_5': 0, 'vt_symbol': 'au2004.SHFE'} 45 | """ 46 | # time = '{}-{}-{} '.format(new_tick['ActionDay'][0:4], new_tick['ActionDay'][4:6], new_tick['ActionDay'] 47 | # [6:8]) + new_tick['datetime'] + str('%.6f' % (new_tick['UpdateMillisec']/1000000))[1:] 48 | self.data[new_tick['symbol']] = {'open': new_tick['last_price'], 49 | 'high': new_tick['last_price'], 50 | 'low': new_tick['last_price'], 51 | 'close': new_tick['last_price'], 52 | 'code': str(new_tick['symbol']).upper(), 53 | 'datetime': new_tick['datetime'], 54 | 'volume': new_tick['volume']-self.last_volume} 55 | 56 | def update_bar(self, new_tick): 57 | 58 | time = new_tick['datetime'] 59 | old_data = self.data[new_tick['symbol']] 60 | # print(old_data) 61 | old_data['close'] = new_tick['last_price'] 62 | old_data['high'] = old_data['high'] if old_data['high'] > new_tick['last_price'] else new_tick['last_price'] 63 | old_data['low'] = old_data['low'] if old_data['low'] < new_tick['last_price'] else new_tick['last_price'] 64 | old_data['datetime'] = time 65 | old_data['volume'] = new_tick['volume'] - self.last_volume 66 | self.data[new_tick['symbol']] = old_data 67 | return old_data 68 | 69 | def publish_bar(self, symbol): 70 | QA_util_log_info('=================================') 71 | QA_util_log_info('publish') 72 | QA_util_log_info('=================================') 73 | print(self.data[symbol]) 74 | self.pro.pub(json.dumps(self.data[symbol])) 75 | self.is_send = True 76 | 77 | def upcoming_data(self, new_tick): 78 | curtime = new_tick['datetime'] 79 | time = curtime 80 | if curtime[11:13] in ['00', '01', '02', 81 | '09', '10', '11', 82 | '13', '14', '15', 83 | '21', '22', '23']: 84 | 85 | try: 86 | if new_tick['datetime'][17:19] == '00' and len(new_tick['datetime']) == 19: 87 | # print(True) 88 | old_data = self.update_bar(new_tick) 89 | self.last_volume = new_tick['volume'] 90 | self.publish_bar(new_tick['symbol']) 91 | self.pro_realtimemin.pub(json.dumps(old_data)) 92 | self.data[new_tick['symbol']] = {} 93 | self.data[new_tick['symbol']]['datetime'] = time 94 | 95 | elif new_tick['datetime'][17:19] == '00' and len(new_tick['datetime']) > 19: 96 | if self.is_send: 97 | self.is_send = False 98 | else: 99 | self.publish_bar(new_tick['symbol']) 100 | 101 | QA_util_log_info('xxx') 102 | self.create_new(new_tick) 103 | self.pro_realtimemin.pub(json.dumps( 104 | self.data[new_tick['symbol']])) 105 | QA_util_log_info(self.data) 106 | else: 107 | try: 108 | self.update_bar(new_tick) 109 | except: 110 | self.create_new(new_tick) 111 | self.pro_realtimemin.pub(json.dumps( 112 | self.data[new_tick['symbol']])) 113 | except Exception as e: 114 | print(e) 115 | 116 | def callback(self, a, b, c, body): 117 | self.upcoming_data(json.loads(body)) 118 | 119 | def run(self): 120 | self.c.callback = self.callback 121 | self.c.start() 122 | 123 | 124 | if __name__ == '__main__': 125 | pass 126 | # import click 127 | # @click.command() 128 | # @click.option('--code', default='au1910') 129 | # def handler(code): 130 | # r = QARealtimeCollector_CtpBeeCollector(code) 131 | 132 | # r.start() 133 | 134 | # handler() 135 | -------------------------------------------------------------------------------- /QARealtimeCollector/collectors/simcollector.py: -------------------------------------------------------------------------------- 1 | import click 2 | import datetime 3 | import json 4 | 5 | from QAPUBSUB import consumer, producer 6 | from QARealtimeCollector.setting import eventmq_ip, mongo_ip 7 | from QUANTAXIS.QAUtil.QALogs import QA_util_log_info 8 | 9 | 10 | class QARTC_CTPTickCollector(): 11 | def __init__(self, code, subexchange='tick'): 12 | self.data = {} 13 | self.is_send = False 14 | self.last_volume = 0 15 | 16 | self.pro = producer.publisher(exchange='bar_1min_{}'.format( 17 | code), user='admin', password='admin', host=eventmq_ip) 18 | self.pro_realtimemin = producer.publisher(exchange='realtime_min_{}'.format( 19 | code), user='admin', password='admin', host=eventmq_ip) 20 | self.c = consumer.subscriber_routing( 21 | exchange='tick', routing_key=code, user='admin', password='admin', host=eventmq_ip) 22 | 23 | print('start ctptick collector {}'.format(code)) 24 | 25 | def create_new(self, new_tick): 26 | 27 | time = '{}-{}-{} '.format(str(new_tick['TradingDay'])[0:4], str(new_tick['TradingDay'])[4:6], str(new_tick['TradingDay']) 28 | [6:8]) + new_tick['UpdateTime'] + str('%.6f' % (new_tick['UpdateMillisec']/1000))[1:] 29 | print(time) 30 | self.data[new_tick['InstrumentID']] = {'open': new_tick['LastPrice'], 31 | 'high': new_tick['LastPrice'], 32 | 'low': new_tick['LastPrice'], 33 | 'close': new_tick['LastPrice'], 34 | 'code': str(new_tick['InstrumentID']).upper(), 35 | 'datetime': time, 36 | 'volume': new_tick['Volume']-self.last_volume} 37 | 38 | def update_bar(self, new_tick): 39 | time = '{}-{}-{} '.format(str(new_tick['TradingDay'])[0:4], str(new_tick['TradingDay'])[4:6], str(new_tick['TradingDay']) 40 | [6:8]) + new_tick['UpdateTime'] + str('%.6f' % (new_tick['UpdateMillisec']/1000))[1:] 41 | old_data = self.data[new_tick['InstrumentID']] 42 | old_data['close'] = new_tick['LastPrice'] 43 | old_data['high'] = old_data['high'] if old_data['high'] > new_tick['LastPrice'] else new_tick['LastPrice'] 44 | old_data['low'] = old_data['low'] if old_data['low'] < new_tick['LastPrice'] else new_tick['LastPrice'] 45 | old_data['datetime'] = time 46 | old_data['volume'] = new_tick['Volume'] - self.last_volume 47 | self.data[new_tick['InstrumentID']] = old_data 48 | return old_data 49 | 50 | def publish_bar(self, InstrumentID): 51 | QA_util_log_info('=================================') 52 | QA_util_log_info('publish bar') 53 | QA_util_log_info('=================================') 54 | print(self.data) 55 | self.pro.pub(json.dumps(self.data[InstrumentID])) 56 | self.is_send = True 57 | 58 | def publish_realtime(self, data): 59 | QA_util_log_info('=================================') 60 | QA_util_log_info('publish realtime') 61 | QA_util_log_info('=================================') 62 | print(data) 63 | self.pro_realtimemin.pub(json.dumps(data)) 64 | 65 | def upcoming_data(self, new_tick): 66 | 67 | curtime = '{}-{}-{} '.format(str(new_tick['TradingDay'])[0:4], str(new_tick['TradingDay'])[4:6], str(new_tick['TradingDay']) 68 | [6:8]) + new_tick['UpdateTime'] + str('%.6f' % (new_tick['UpdateMillisec']/1000))[1:] 69 | time = curtime 70 | 71 | print('{} === get update tick {}'.format(time, new_tick)) 72 | if new_tick['UpdateTime'][-2:] == '00' and new_tick['UpdateMillisec'] == 0: 73 | 74 | old_data=self.update_bar(new_tick) 75 | self.last_volume=new_tick['Volume'] 76 | self.publish_bar(new_tick['InstrumentID']) 77 | self.publish_realtime(old_data) 78 | 79 | self.data[new_tick['InstrumentID']]={} 80 | self.data[new_tick['InstrumentID']]['datetime']=time 81 | 82 | elif new_tick['UpdateTime'][-2:] == '00' and new_tick['UpdateMillisec'] == 500: 83 | if self.is_send: 84 | self.is_send=False 85 | else: 86 | self.publish_bar(new_tick['InstrumentID']) 87 | self.create_new(new_tick) 88 | self.publish_realtime(self.data[new_tick['InstrumentID']]) 89 | QA_util_log_info(self.data) 90 | else: 91 | try: 92 | self.update_bar(new_tick) 93 | except: 94 | self.create_new(new_tick) 95 | self.publish_realtime(self.data[new_tick['InstrumentID']]) 96 | 97 | def callback(self, a, b, c, body): 98 | self.upcoming_data(json.loads(body)) 99 | 100 | def start(self): 101 | self.c.callback=self.callback 102 | 103 | self.c.start() 104 | -------------------------------------------------------------------------------- /QARealtimeCollector/collectors/simmarket.py: -------------------------------------------------------------------------------- 1 | # 2 | import json 3 | import time 4 | 5 | from QAPUBSUB.producer import publisher_routing 6 | from QUANTAXIS_RandomPrice import get_random_price 7 | from QARealtimeCollector.setting import eventmq_ip, mongo_ip 8 | 9 | 10 | class QARTC_RandomTick(): 11 | def __init__(self, code, date, price, interval): 12 | self.code = code 13 | self.date = date 14 | self.price = price 15 | self.interval = interval 16 | 17 | self.pub = publisher_routing( 18 | exchange='tick', routing_key=code, host=eventmq_ip, user='admin', password='admin') 19 | 20 | @property 21 | def data(self): 22 | return get_random_price(self.price, self.code, self.date) 23 | 24 | def start(self): 25 | for _, item in self.data.iterrows(): 26 | print(item.to_dict()) 27 | time.sleep(self.interval) 28 | self.pub.pub( 29 | json.dumps(item.to_dict()), routing_key=self.code) 30 | -------------------------------------------------------------------------------- /QARealtimeCollector/collectors/stockbarcollector.py: -------------------------------------------------------------------------------- 1 | # **************************************************************************** # 2 | # # 3 | # ::: :::::::: # 4 | # stockBarCollector.py :+: :+: :+: # 5 | # +:+ +:+ +:+ # 6 | # By: zhongjy1992 +#+ +:+ +#+ # 7 | # +#+#+#+#+#+ +#+ # 8 | # Created: 2019/10/01 22:07:05 by zhongjy1992 #+# #+# # 9 | # Updated: 2020/03/07 13:10:45 by zhongjy1992 ### ########.fr # 10 | # # 11 | # **************************************************************************** # 12 | 13 | import datetime 14 | import json 15 | import logging 16 | import os 17 | import threading 18 | import time 19 | 20 | import click 21 | from QAPUBSUB.consumer import subscriber_routing 22 | from QAPUBSUB.producer import publisher 23 | from QARealtimeCollector.setting import eventmq_ip 24 | from QUANTAXIS.QAFetch.QAQuery_Advance import QA_fetch_stock_min_adv, QA_fetch_stock_day_adv, QA_fetch_index_day_adv 25 | from QUANTAXIS.QAUtil.QADate_trade import QA_util_get_pre_trade_date 26 | from pandas import concat, DataFrame, DatetimeIndex 27 | 28 | from QARealtimeCollector.utils.QATdx_adv import QA_Tdx_Executor 29 | # from utils.TdxAdv import QA_Tdx_Executor 30 | from QARealtimeCollector.utils.common import util_is_trade_time, get_file_name_by_date, logging_csv 31 | 32 | logger = logging.getLogger(__name__) 33 | 34 | 35 | class QARTCStockBar(QA_Tdx_Executor): 36 | # TODO tdx的问题请自行修正,此处只是给出一个分钟bar的采集分发重采样的思路 37 | # TODO 股票订阅请按文档中说明进行http请求 38 | def __init__(self, delay=10.5, date: datetime.datetime = None, log_dir='./log', debug=False): 39 | super().__init__(name='QA_REALTIME_COLLECTOR_STOCK_BAR', thread_num=None, timeout=0.5) 40 | cur_time = datetime.datetime.now() if date is None else date 41 | # set qa_tdx_excutor is debug mode 42 | self.debug = debug 43 | self.cur_year = cur_time.year 44 | self.cur_month = cur_time.month 45 | self.cur_day = cur_time.day 46 | self.isRequesting = False 47 | self.delay = delay # 数据获取请求间隔 48 | self.code_list = [] 49 | self.sub = subscriber_routing(host=eventmq_ip, exchange='QARealtime_Market', routing_key='stock') 50 | self.sub.callback = self.callback 51 | self.pub = publisher(host=eventmq_ip, exchange='realtime_stock_min') 52 | self.log_dir = log_dir 53 | self.pre_market_data = None 54 | self.last_update_time = cur_time 55 | threading.Thread(target=self.sub.start, daemon=True).start() 56 | logger.info("QA_REALTIME_COLLECTOR_STOCK_BAR INIT, delay %s" % self.delay) 57 | 58 | def subscribe_callback(self, code): 59 | """ 60 | 订阅回调 61 | :param code: 62 | :return: 63 | """ 64 | if not isinstance(code, str): 65 | logger.error('not string , %s' % code) 66 | return 67 | today = datetime.datetime(self.cur_year, self.cur_month, self.cur_day).isoformat()[:10] 68 | end_date = QA_util_get_pre_trade_date(cursor_date=today, n=1)[:10] 69 | if code not in self.code_list: 70 | self.code_list.append(code) 71 | # ETF or Stock, 获取前天的收盘价格 72 | logger.info("try fetch %s ,%s" % (code, end_date)) 73 | if code.startswith('5') or code.startswith('1'): 74 | _data = QA_fetch_index_day_adv(code, end_date, end_date) 75 | else: 76 | _data = QA_fetch_stock_day_adv(code, end_date, end_date) 77 | if _data is not None: 78 | self.pre_market_data = concat([self.pre_market_data, _data.data.reset_index()]) 79 | logger.info("fetch %s" % _data.data.to_csv(header=False)) 80 | # initial data from server 81 | # self.get_history_data(code, frequency="1min") 82 | 83 | def unsubscribe_callback(self, code): 84 | """ 85 | 取消订阅回调 86 | :param code: 87 | :return: 88 | """ 89 | self.code_list.remove(code) 90 | 91 | def publish_msg(self, msg): 92 | self.pub.pub(msg) 93 | 94 | def callback(self, a, b, c, data): 95 | """ 96 | 监听订阅信息的回调处理 97 | :param a: 98 | :param b: 99 | :param c: 100 | :param data: 101 | :return: 102 | """ 103 | data = json.loads(data) 104 | if data['topic'].lower() == 'subscribe': 105 | logger.info('stock bar collector service receive new subscribe: {}'.format(data['code'])) 106 | new_ins = data['code'].replace('_', '.').split(',') 107 | 108 | if isinstance(new_ins, list): 109 | for item in new_ins: 110 | self.subscribe_callback(item) 111 | else: 112 | self.subscribe_callback(new_ins) 113 | elif data['topic'].lower() == 'unsubscribe': 114 | logger.info('stock bar collector service receive new unsubscribe: {}'.format(data['code'])) 115 | new_ins = data['code'].replace('_', '.').split(',') 116 | 117 | if isinstance(new_ins, list): 118 | for item in new_ins: 119 | self.unsubscribe_callback(item) 120 | else: 121 | self.unsubscribe_callback(new_ins) 122 | 123 | def get_data(self, frequency="1min", lens=5): 124 | """ 125 | 调用tdx获取数据 126 | :param frequency: 127 | :param lens: increasing data len , default: 获取当前及上一bar 128 | :return: 129 | """ 130 | cur_time = datetime.datetime.now() 131 | data = self.get_security_bar_concurrent(self.code_list, frequency, lens) 132 | if len(data) > 0: 133 | self.last_update_time = datetime.datetime.now() 134 | end_time = datetime.datetime.now() 135 | cost_time = (end_time - cur_time).total_seconds() 136 | logger.info("request请求数据完成,耗时, cost: %s 秒" % cost_time) 137 | return concat(data, sort=False).drop_duplicates() 138 | 139 | def get_history_data(self, code_list, frequency="1min", n=1): 140 | """ 141 | 获取历史数据 142 | :param code_list: 143 | :param frequency: k线数据级别 144 | :param n: (当天)前n个交易日 n = QA_util_get_trade_gap(start_date, today_) 145 | :return: 146 | """ 147 | # TODO 历史数据部分应放在策略计算,而不是数据采集部分 148 | # TODO get history bar data 149 | # TODO 调用QA_fetch_stock_min_adv(code, start, end) 从数据库获取数据 150 | today = datetime.datetime(self.cur_year, self.cur_month, self.cur_day).isoformat()[:10] 151 | start_date = QA_util_get_pre_trade_date(cursor_date=today, n=n)[:10] 152 | end_date = QA_util_get_pre_trade_date(cursor_date=today, n=1)[:10] 153 | # start='2019-05-08', end='2019-05-09' means start from 2019-05-08 9:30 and end to 2019-05-09 15:00 154 | data = None 155 | try: 156 | data = QA_fetch_stock_min_adv(code_list, start=start_date, end=end_date) 157 | except Exception as e: 158 | logger.error("fetch stock min history data failure. " + e.__str__()) 159 | 160 | if data is not None: 161 | for code in data.code.to_list(): 162 | qa_data = data.select_code(code) 163 | if qa_data is not None: 164 | # TODO 规定标准columns 165 | self.publish_msg(qa_data.data.to_msgpack()) 166 | else: 167 | lens = 0 # initial data len 168 | if frequency in ['5', '5m', '5min', 'five']: 169 | lens = 48 * n 170 | elif frequency in ['1', '1m', '1min', 'one']: 171 | lens = 240 * n 172 | elif frequency in ['15', '15m', '15min', 'fifteen']: 173 | lens = 16 * n 174 | elif frequency in ['30', '30m', '30min', 'half']: 175 | lens = 8 * n 176 | elif frequency in ['60', '60m', '60min', '1h']: 177 | lens = 4 * n 178 | lens = 20800 if lens > 20800 else lens 179 | # TODO 如果获取失败则在线获取 参考save stock min 180 | # data = self.get_security_bar_concurrent(code_list, frequency, lens) 181 | # TODO 规定标准columns 182 | # self.publish_msg(qa_data.data.to_msgpack()) 183 | pass 184 | 185 | def update_date(self, date: datetime.datetime = None): 186 | # TODO auto update every day 187 | cur_time = datetime.datetime.now() if date is None else date 188 | self.cur_year = cur_time.year 189 | self.cur_month = cur_time.month 190 | self.cur_day = cur_time.day 191 | 192 | def length(self): 193 | """ 194 | 返回当前订阅列表的大小 195 | :return: 196 | """ 197 | return len(self.code_list) 198 | 199 | def update_data_job(self): 200 | cur_time = datetime.datetime.now() 201 | context = self.get_data() 202 | if "code" not in context.columns or "datetime" not in context.columns: 203 | logger.info("the requested data has no columns name like 'code'") 204 | return 205 | if context.shape[0] == 0: 206 | logger.info("the requested data has no rows") 207 | return 208 | # 修正tdx在11:30的数据的时间直接跳至13:00的问题 209 | # if isinstance(context.code[0], str): 210 | context.datetime = context.datetime.apply(lambda x: datetime.datetime.fromisoformat( 211 | x.replace('13:00', '11:30'))) 212 | # TODO tdx实时获取可能存在非正常的数据: 1.非交易时间错误 2.OHLC振幅超过上以交易日的10% 213 | # Fixed: 1.非交易时间错误 214 | if "year" in context.columns: 215 | context = context[ 216 | (context.year == self.cur_year) & (context.month == self.cur_month) & ( 217 | context.day <= self.cur_day)] 218 | # 自动补充0开头的完整股票代码 219 | # context["code"] = context["code"].apply(fill_stock_code) 220 | # TODO 过滤振幅异常的数据 221 | context = context.merge(self.pre_market_data[['code', 'close']], on='code', suffixes=('', '_y')) 222 | # 异常的数据 223 | _context = context[ 224 | ( 225 | (context.open / context.close_y - 1).abs() >= 0.101 226 | ) & ( 227 | (context.high / context.close_y - 1).abs() >= 0.101 228 | ) & ( 229 | (context.low / context.close_y - 1).abs() >= 0.101 230 | ) & ( 231 | (context.close / context.close_y - 1).abs() >= 0.101 232 | ) 233 | ] 234 | if _context.shape[0] > 0: 235 | logger.info("异常数据输出START") 236 | logger.info(_context.to_csv()) 237 | logger.info("异常数据输出END") 238 | # 过滤异常数据 239 | context = context[ 240 | ( 241 | (context.open / context.close_y - 1).abs() < 0.101 242 | ) & ( 243 | (context.high / context.close_y - 1).abs() < 0.101 244 | ) & ( 245 | (context.low / context.close_y - 1).abs() < 0.101 246 | ) & ( 247 | (context.close / context.close_y - 1).abs() < 0.101 248 | ) 249 | ] 250 | # 转换日期数据格式 datetime data type from str to Timestamp('2019-10-24 13:00:00', freq='1T') 251 | context["datetime"] = DatetimeIndex(context.datetime).to_list() 252 | context = context.drop([ 253 | "year", "month", "day", "hour", "minute", "close_y"], axis=1 254 | ).reset_index(drop=True).set_index(["datetime", "code"]).sort_index() 255 | # TODO context.groupby(code) 256 | end_time = datetime.datetime.now() 257 | self.last_update_time = end_time 258 | cost_time = (end_time - cur_time).total_seconds() 259 | logger.info("clean数据初步清洗, 耗时, cost: %s 秒" % cost_time) 260 | # 数据原始记录输出到csv 261 | logger.info(context.to_csv(float_format='%.3f')) 262 | filename = get_file_name_by_date('stock.collector.%s.csv', self.log_dir) 263 | logging_csv(context, filename, index=True) 264 | self.publish_msg(context.to_msgpack()) # send with maspack 265 | del context 266 | 267 | def run(self): 268 | # 循环定时获取数据 269 | count = 0 270 | while 1: 271 | # code list not empty 272 | count += 1 273 | logger.info("stock bar collector service requested data start. count %s" % count) 274 | if self.length() <= 0: 275 | logger.info("code list is empty") 276 | time.sleep(1) 277 | continue 278 | self.isRequesting = True 279 | # 9:15 - 11:31 and 12:58 - 15:00 获取 280 | cur_time = datetime.datetime.now() 281 | _pass = (cur_time - self.last_update_time).total_seconds() 282 | if self.debug or util_is_trade_time(cur_time): # 如果在交易时间 283 | if _pass > 55: 284 | logger.warning("超时未收到更新数据") 285 | self.update_data_job() 286 | else: 287 | logger.info('current time %s not in trade time' % cur_time.isoformat()) 288 | 289 | logger.info("stock bar collector service requested data end. count %s" % count) 290 | self.isRequesting = False 291 | time.sleep(self.delay) 292 | 293 | 294 | @click.command() 295 | # @click.argument() 296 | @click.option('-t', '--delay', default=20.5, help="fetch data interval, float", type=click.FLOAT) 297 | @click.option('-log', '--logfile', help="log file path", type=click.Path(exists=False)) 298 | @click.option('-log_dir', '--log_dir', help="log path", type=click.Path(exists=False)) 299 | def main(delay: float = 20.5, logfile: str = None, log_dir: str = None): 300 | try: 301 | from QARealtimeCollector.utils.logconf import update_log_file_config 302 | logfile = 'stock.collector.log' if logfile is None else logfile 303 | logging.config.dictConfig(update_log_file_config(logfile)) 304 | except Exception as e: 305 | print(e.__str__()) 306 | QARTCStockBar(delay=delay, log_dir=log_dir.replace('~', os.path.expanduser('~')), debug=False).start() 307 | 308 | 309 | if __name__ == "__main__": 310 | # normal 311 | main() 312 | -------------------------------------------------------------------------------- /QARealtimeCollector/collectors/stockcollector.py: -------------------------------------------------------------------------------- 1 | import json 2 | import threading 3 | import datetime 4 | 5 | from QAPUBSUB.consumer import subscriber_routing 6 | from QAPUBSUB.producer import publisher, publisher_routing 7 | from QARealtimeCollector.setting import eventmq_ip 8 | from QUANTAXIS.QAARP.QAUser import QA_User 9 | from QUANTAXIS.QAEngine.QAThreadEngine import QA_Thread 10 | from QUANTAXIS.QAFetch.QATdx_adv import QA_Tdx_Executor 11 | from QUANTAXIS.QAUtil.QATransform import QA_util_to_json_from_pandas 12 | 13 | 14 | class QARTC_Stock(QA_Tdx_Executor): 15 | def __init__(self): 16 | super().__init__(name='QAREALTIME_COLLECTOR_STOCK') 17 | self.codelist = [] 18 | self.sub = subscriber_routing(host=eventmq_ip, 19 | exchange='QARealtime_Market', routing_key='stock') 20 | self.sub.callback = self.callback 21 | self.pub = publisher( 22 | host=eventmq_ip, exchange='stocktransaction') 23 | threading.Thread(target=self.sub.start, daemon=True).start() 24 | 25 | def subscribe(self, code): 26 | """继续订阅 27 | 28 | Arguments: 29 | code {[type]} -- [description] 30 | """ 31 | if code not in self.codelist: 32 | self.codelist.append(code) 33 | 34 | def unsubscribe(self, code): 35 | self.codelist.remove(code) 36 | 37 | def callback(self, a, b, c, data): 38 | data = json.loads(data) 39 | if data['topic'] == 'subscribe': 40 | print('receive new subscribe: {}'.format(data['code'])) 41 | new_ins = data['code'].replace('_', '.').split(',') 42 | 43 | import copy 44 | if isinstance(new_ins, list): 45 | for item in new_ins: 46 | self.subscribe(item) 47 | else: 48 | self.subscribe(new_ins) 49 | if data['topic'] == 'unsubscribe': 50 | print('receive new unsubscribe: {}'.format(data['code'])) 51 | new_ins = data['code'].replace('_', '.').split(',') 52 | 53 | import copy 54 | if isinstance(new_ins, list): 55 | for item in new_ins: 56 | self.unsubscribe(item) 57 | else: 58 | self.unsubscribe(new_ins) 59 | 60 | def get_data(self): 61 | data, time = self.get_realtime_concurrent(self.codelist) 62 | data = QA_util_to_json_from_pandas(data.reset_index()) 63 | self.pub.pub(json.dumps(data)) 64 | 65 | def run(self): 66 | while 1: 67 | self.get_data() 68 | import time 69 | print(datetime.datetime.now()) 70 | time.sleep(1) 71 | 72 | 73 | if __name__ == "__main__": 74 | r = QARTC_Stock() 75 | r.subscribe('000001') 76 | r.subscribe('000002') 77 | r.start() 78 | 79 | r.subscribe('600010') 80 | 81 | import json 82 | import time 83 | time.sleep(2) 84 | publisher_routing(exchange='QARealtime_Market', routing_key='stock').pub(json.dumps({ 85 | 'topic': 'subscribe', 86 | 'code': '600012' 87 | }), routing_key='stock') 88 | 89 | r.unsubscribe('000001') 90 | -------------------------------------------------------------------------------- /QARealtimeCollector/collectors/wscollector.py: -------------------------------------------------------------------------------- 1 | # 2 | from QAPUBSUB.producer import publisher_routing 3 | from QAPUBSUB.consumer import subscriber_routing 4 | from QUANTAXIS.QAEngine import QA_Thread 5 | from QA_OTGBroker import on_pong, on_message, on_error, subscribe_quote, on_close, login, peek 6 | import websocket 7 | import threading 8 | import click 9 | import time 10 | import json 11 | import pymongo 12 | from QARealtimeCollector.util import fix_dict 13 | from QARealtimeCollector.setting import mongo_ip, eventmq_ip 14 | 15 | class QARTC_WsCollector(QA_Thread): 16 | def __init__(self): 17 | 18 | super().__init__() 19 | self.ws = websocket.WebSocketApp('wss://openmd.shinnytech.com/t/md/front/mobile', 20 | on_pong=on_pong, 21 | on_message=self.on_message, 22 | on_error=on_error, 23 | on_close=on_close) 24 | 25 | def _onopen(ws): 26 | def run(): 27 | ws.send(peek()) 28 | threading.Thread(target=run, daemon=False).start() 29 | 30 | self.quoteclient = pymongo.MongoClient(host=mongo_ip).QAREALTIME.realtimeQuote 31 | self.ws.on_open = _onopen 32 | self.data = {} 33 | self.subscribe_list = ['SHFE.rb1910', 'DCE.j1909'] 34 | self.sub = subscriber_routing(host=eventmq_ip, exchange='QARealtime_Market', routing_key='future') 35 | self.sub.callback = self.callback 36 | threading.Thread(target=self.ws.run_forever, 37 | name='market_websock', daemon=False).start() 38 | threading.Thread(target=self.sub.start, 39 | name='market_subscriber', daemon=True).start() 40 | 41 | def on_message(self, message): 42 | print(message) 43 | message = json.loads(message) 44 | if 'data' in message.keys(): 45 | data = message['data'][0] 46 | if 'quotes' in data.keys(): 47 | data = data['quotes'] 48 | for items in data.keys(): 49 | try: 50 | item = items.replace('.', '_') 51 | if item not in self.data.keys(): 52 | self.data[item] = data[items] 53 | else: 54 | for keys in data[items].keys(): 55 | self.data[item][keys] = data[items][keys] 56 | self.data[item]['instrument_id'] = item 57 | self.quoteclient.update_one({'instrument_id': item}, 58 | {'$set': self.data[item]}, upsert=True) 59 | except Exception as e: 60 | print(e) 61 | 62 | self.ws.send(peek()) 63 | 64 | def callback(self, a, b, c, data): 65 | data = json.loads(data) 66 | if data['topic'] == 'subscribe': 67 | new_ins = data['code'].replace('_', '.').split(',') 68 | import copy 69 | 70 | old = len(self.subscribe_list) 71 | self.subscribe_list.extend(new_ins) 72 | self.subscribe_list = list( 73 | set(self.subscribe_list)) 74 | if old < len(self.subscribe_list): 75 | self.ws.send(subscribe_quote(','.join(self.subscribe_list))) 76 | 77 | def run(self): 78 | time.sleep(2) 79 | self.ws.send(subscribe_quote('SHFE.rb1910,DCE.j1909')) 80 | while True: 81 | time.sleep(1) 82 | 83 | 84 | if __name__ == "__main__": 85 | QARTC_WsCollector().start() 86 | -------------------------------------------------------------------------------- /QARealtimeCollector/connector/QATdx_adv.py: -------------------------------------------------------------------------------- 1 | # **************************************************************************** # 2 | # # 3 | # ::: :::::::: # 4 | # QATdx_adv.py :+: :+: :+: # 5 | # +:+ +:+ +:+ # 6 | # By: zhongjy1992 +#+ +:+ +#+ # 7 | # +#+#+#+#+#+ +#+ # 8 | # Created: 2020/03/06 13:42:58 by zhongjy1992 #+# #+# # 9 | # Updated: 2020/03/06 13:43:02 by zhongjy1992 ### ########.fr # 10 | # # 11 | # **************************************************************************** # 12 | 13 | # coding:utf-8 14 | # 15 | # The MIT License (MIT) 16 | # 17 | # Copyright (c) 2016-2019 yutiansut/QUANTAXIS 18 | # 19 | # Permission is hereby granted, free of charge, to any person obtaining a copy 20 | # of this software and associated documentation files (the "Software"), to deal 21 | # in the Software without restriction, including without limitation the rights 22 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 23 | # copies of the Software, and to permit persons to whom the Software is 24 | # furnished to do so, subject to the following conditions: 25 | # 26 | # The above copyright notice and this permission notice shall be included in all 27 | # copies or substantial portions of the Software. 28 | # 29 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 30 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 31 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 32 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 33 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 34 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 35 | # SOFTWARE. 36 | 37 | 38 | import datetime 39 | import os 40 | import queue 41 | import logging 42 | import time 43 | from concurrent.futures import ThreadPoolExecutor 44 | from threading import Thread, Timer 45 | 46 | import click 47 | import pandas as pd 48 | from QUANTAXIS.QAEngine.QAThreadEngine import QA_Thread 49 | from QUANTAXIS.QAUtil.QADate_trade import QA_util_if_tradetime 50 | from QUANTAXIS.QAUtil.QASetting import DATABASE, stock_ip_list 51 | from QUANTAXIS.QAUtil.QASql import QA_util_sql_mongo_sort_ASCENDING 52 | from QUANTAXIS.QAUtil.QATransform import QA_util_to_json_from_pandas 53 | from pytdx.hq import TdxHq_API 54 | 55 | from QARealtimeCollector.utils.common import get_file_name_by_date, logging_csv 56 | logger = logging.getLogger(__name__) 57 | 58 | """ 59 | 准备做一个多连接的连接池执行器Executor 60 | 当持续获取数据/批量数据的时候,可以减小服务器的压力,并且可以更快的进行并行处理 61 | """ 62 | 63 | 64 | class QA_Tdx_Executor(QA_Thread): 65 | def __init__(self, thread_num=2, timeout:float=1, sleep_time=1, *args, **kwargs): 66 | super().__init__(name='QATdxExecutor') 67 | self.thread_num = thread_num 68 | self._queue = queue.Queue(maxsize=200) 69 | self.api_no_connection = TdxHq_API() 70 | self._api_worker = Thread( 71 | target=self.api_worker, args=(), name='API Worker') 72 | self._api_worker.start() 73 | self.timeout = timeout 74 | self.executor = ThreadPoolExecutor(self.thread_num) 75 | self.sleep_time = sleep_time 76 | 77 | def __getattr__(self, item): 78 | try: 79 | api = self.get_available() 80 | func = api.__getattribute__(item) 81 | 82 | def wrapper(*args, **kwargs): 83 | res = self.executor.submit(func, *args, **kwargs) 84 | self._queue.put(api) 85 | return res 86 | return wrapper 87 | except: 88 | return self.__getattr__(item) 89 | 90 | def _queue_clean(self): 91 | self._queue = queue.Queue(maxsize=200) 92 | 93 | def _test_speed(self, ip, port=7709): 94 | 95 | api = TdxHq_API(raise_exception=True, auto_retry=False) 96 | _time = datetime.datetime.now() 97 | # print(self.timeout) 98 | try: 99 | with api.connect(ip, port, time_out=1): 100 | # api.get_security_list(0, 1) 101 | # res = api.get_security_list(0, 1) 102 | # print(res) 103 | # print(len(res)) 104 | if len(api.get_security_list(0, 1)) > 800: 105 | return (datetime.datetime.now() - _time).total_seconds() 106 | else: 107 | return datetime.timedelta(9, 9, 0).total_seconds() 108 | except: 109 | return datetime.timedelta(9, 9, 0).total_seconds() 110 | 111 | def get_market(self, code): 112 | code = str(code) 113 | if code[0] in ['5', '6', '9'] or code[:3] in ["009", "126", "110", "201", "202", "203", "204"]: 114 | return 1 115 | return 0 116 | 117 | def get_frequence(self, frequence): 118 | if frequence in ['day', 'd', 'D', 'DAY', 'Day']: 119 | frequence = 9 120 | elif frequence in ['w', 'W', 'Week', 'week']: 121 | frequence = 5 122 | elif frequence in ['month', 'M', 'm', 'Month']: 123 | frequence = 6 124 | elif frequence in ['Q', 'Quarter', 'q']: 125 | frequence = 10 126 | elif frequence in ['y', 'Y', 'year', 'Year']: 127 | frequence = 11 128 | elif str(frequence) in ['5', '5m', '5min', 'five']: 129 | frequence = 0 130 | elif str(frequence) in ['1', '1m', '1min', 'one']: 131 | frequence = 8 132 | elif str(frequence) in ['15', '15m', '15min', 'fifteen']: 133 | frequence = 1 134 | elif str(frequence) in ['30', '30m', '30min', 'half']: 135 | frequence = 2 136 | elif str(frequence) in ['60', '60m', '60min', '1h']: 137 | frequence = 3 138 | 139 | return frequence 140 | 141 | @property 142 | def ipsize(self): 143 | return len(self._queue.qsize()) 144 | 145 | @property 146 | def api(self): 147 | return self.get_available() 148 | 149 | def get_available(self): 150 | 151 | if self._queue.empty() is False: 152 | return self._queue.get_nowait() 153 | else: 154 | Timer(0, self.api_worker).start() 155 | return self._queue.get() 156 | 157 | def api_worker(self): 158 | # data = [] 159 | if self._queue.qsize() < 80: 160 | for item in stock_ip_list: 161 | if self._queue.full(): 162 | break 163 | _sec = self._test_speed(ip=item['ip'], port=item['port']) 164 | if _sec < self.timeout*3: 165 | try: 166 | self._queue.put(TdxHq_API(heartbeat=False).connect( 167 | ip=item['ip'], port=item['port'], time_out=self.timeout*2)) 168 | except: 169 | pass 170 | else: 171 | self._queue_clean() 172 | Timer(0, self.api_worker).start() 173 | Timer(300, self.api_worker).start() 174 | 175 | def _singal_job(self, context, id_, code, time_out=0.7): 176 | try: 177 | _api = self.get_available() 178 | # self.api_no_connection.to_df 为了调用 to_df的方法 179 | __data = context.append(self.api_no_connection.to_df(_api.get_security_quotes( 180 | [(self._select_market_code(x), x) for x in code[80 * id_:80 * (id_ + 1)]]))) 181 | __data['datetime'] = datetime.datetime.now() 182 | self._queue.put(_api) # 加入注销 183 | return __data 184 | except: 185 | return self.singal_job(context, id_) 186 | 187 | def get_realtime(self, code): 188 | context = pd.DataFrame() 189 | 190 | code = [code] if isinstance(code, str) is str else code 191 | try: 192 | for id_ in range(int(len(code) / 80) + 1): 193 | context = self._singal_job(context, id_, code) 194 | 195 | data = context[['datetime', 'last_close', 'code', 'open', 'high', 'low', 'price', 'cur_vol', 196 | 's_vol', 'b_vol', 'vol', 'ask1', 'ask_vol1', 'bid1', 'bid_vol1', 'ask2', 'ask_vol2', 197 | 'bid2', 'bid_vol2', 'ask3', 'ask_vol3', 'bid3', 'bid_vol3', 'ask4', 198 | 'ask_vol4', 'bid4', 'bid_vol4', 'ask5', 'ask_vol5', 'bid5', 'bid_vol5']] 199 | data['datetime'] = data['datetime'].apply(lambda x: str(x)) 200 | return data.set_index('code', drop=False, inplace=False) 201 | except: 202 | return None 203 | 204 | def get_realtime_concurrent(self, code): 205 | code = [code] if isinstance(code, str) is str else code 206 | 207 | try: 208 | data = {self.get_security_quotes([(self.get_market( 209 | x), x) for x in code[80 * pos:80 * (pos + 1)]]) for pos in range(int(len(code) / 80) + 1)} 210 | return (pd.concat([self.api_no_connection.to_df(i.result()) for i in data]), datetime.datetime.now()) 211 | except: 212 | pass 213 | 214 | def get_security_bar_concurrent(self, code_list, _type, lens): 215 | """ 216 | 217 | :param code_list: 218 | :param _type: 219 | :param lens: 220 | :return: [Dataframe, df, df] 221 | """ 222 | try: 223 | context = [] 224 | for item in code_list: 225 | _code = str(item) 226 | _request_time = datetime.datetime.now().isoformat(sep=' ', timespec='seconds') 227 | # print(_code, lens) 228 | _data = {self.get_security_bars(self.get_frequence(_type), self.get_market(_code), _code, 0, lens)} 229 | data = pd.concat([ 230 | pd.DataFrame(i.result()) for i in _data if i is not None 231 | ]).assign(code=_code, update=_request_time) 232 | # data = pd.concat([ 233 | # self.api_no_connection.to_df(i.result()) for i in _data if i is not None 234 | # ]).assign(code=_code, update=_request_time) 235 | context.append(data) 236 | # record the dta 237 | filename = get_file_name_by_date('stock.pytdx.%s.csv', os.path.join(os.path.expanduser('~'), './log/')) 238 | logging_csv(data, filename, index=False, mode='a') 239 | return context 240 | except: 241 | raise Exception 242 | 243 | def _get_security_bars(self, context, code, _type, lens): 244 | try: 245 | _api = self.get_available() 246 | _code = str(code) if not isinstance(code, str) else code 247 | for i in range(1, int(lens / 800) + 2): 248 | context.extend(_api.get_security_bars(self.get_frequence( 249 | _type), self.get_market(_code), _code, (i - 1) * 800, 800)) 250 | self._queue.put(_api) 251 | return context 252 | except Exception as e: 253 | logger.error('pytdx get bars failure' + e.__str__()) 254 | return self._get_security_bars(context, code, _type, lens) 255 | 256 | def get_security_bar(self, code, _type, lens): 257 | code = [code] if isinstance(code, str) is str else code 258 | context = [] 259 | try: 260 | for item in code: 261 | context = self._get_security_bars(context, item, _type, lens) 262 | return context 263 | except Exception as e: 264 | raise e 265 | 266 | def save_mongo(self, data, client=DATABASE): 267 | database = DATABASE.get_collection( 268 | 'realtime_{}'.format(datetime.date.today())) 269 | 270 | database.insert_many(QA_util_to_json_from_pandas(data)) 271 | 272 | def run(self): 273 | 274 | sleep = int(self.sleep_time) 275 | _time1 = datetime.datetime.now() 276 | database = DATABASE.get_collection( 277 | 'realtime_{}'.format(datetime.date.today())) 278 | database.create_index([('code', QA_util_sql_mongo_sort_ASCENDING)]) 279 | database.create_index([('datetime', QA_util_sql_mongo_sort_ASCENDING)]) 280 | 281 | from QUANTAXIS.QAFetch.QAQuery_Advance import QA_fetch_stock_block_adv 282 | code = QA_fetch_stock_block_adv().code 283 | 284 | while True: 285 | _time = datetime.datetime.now() 286 | if QA_util_if_tradetime(_time): # 如果在交易时间 287 | data = self.get_realtime_concurrent(code) 288 | 289 | data[0]['datetime'] = data[1] 290 | self.save_mongo(data[0]) 291 | 292 | logger.debug('Cost Time {}'.format( 293 | (datetime.datetime.now() - _time).total_seconds())) 294 | time.sleep(sleep) 295 | logger.debug('Connection Pool NOW LEFT {} Available IP'.format( 296 | self._queue.qsize())) 297 | logger.debug('Program Last Time {}'.format( 298 | (datetime.datetime.now() - _time1).total_seconds())) 299 | else: 300 | logger.warning('Not Trading time {}'.format(_time)) 301 | time.sleep(sleep) 302 | 303 | 304 | def get_bar(timeout=1, sleep=1): 305 | sleep = int(sleep) 306 | _time1 = datetime.datetime.now() 307 | from QUANTAXIS.QAFetch.QAQuery_Advance import QA_fetch_stock_block_adv 308 | code = QA_fetch_stock_block_adv().code 309 | x = QA_Tdx_Executor(timeout=float(timeout)) 310 | 311 | while True: 312 | _time = datetime.datetime.now() 313 | if QA_util_if_tradetime(_time): # 如果在交易时间 314 | data = x.get_security_bar_concurrent(code, 'day', 1) 315 | 316 | logger.debug('Cost Time {}'.format( 317 | (datetime.datetime.now() - _time).total_seconds())) 318 | time.sleep(sleep) 319 | logger.debug('Connection Pool NOW LEFT {} Available IP'.format( 320 | x._queue.qsize())) 321 | logger.debug('Program Last Time {}'.format( 322 | (datetime.datetime.now() - _time1).total_seconds())) 323 | 324 | return data 325 | else: 326 | logger.warning('Not Trading time {}'.format(_time)) 327 | time.sleep(sleep) 328 | 329 | 330 | def get_day_once(): 331 | 332 | _time1 = datetime.datetime.now() 333 | from QUANTAXIS.QAFetch.QAQuery_Advance import QA_fetch_stock_block_adv 334 | code = QA_fetch_stock_block_adv().code 335 | x = QA_Tdx_Executor() 336 | return x.get_security_bar_concurrent(code, 'day', 1) 337 | 338 | 339 | @click.command() 340 | @click.option('--timeout', default=0.2, help='timeout param') 341 | @click.option('--sleep', default=1, help='sleep step') 342 | def bat(timeout=0.2, sleep=1): 343 | QA_Tdx_Executor(timeout=timeout, sleep_time=sleep).start() 344 | 345 | 346 | if __name__ == '__main__': 347 | QA_Tdx_Executor().start() 348 | -------------------------------------------------------------------------------- /QARealtimeCollector/datahandler/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | 3 | from QARealtimeCollector.datahandler.realtime_resampler import QARTC_Resampler -------------------------------------------------------------------------------- /QARealtimeCollector/datahandler/realtime_resampler.py: -------------------------------------------------------------------------------- 1 | # 2 | from QAPUBSUB.consumer import subscriber 3 | from QAPUBSUB.producer import publisher 4 | from QUANTAXIS.QAEngine.QAThreadEngine import QA_Thread 5 | from QUANTAXIS.QAData.data_resample import QA_data_futuremin_resample, QA_data_futuremin_resample_tb_kq 6 | from QUANTAXIS.QAUtil.QADate_trade import QA_util_future_to_tradedatetime 7 | from QARealtimeCollector.setting import eventmq_ip 8 | import json 9 | import pandas as pd 10 | import numpy as np 11 | import threading 12 | import time 13 | 14 | 15 | class NpEncoder(json.JSONEncoder): 16 | def default(self, obj): 17 | if isinstance(obj, np.integer): 18 | return int(obj) 19 | elif isinstance(obj, np.floating): 20 | return float(obj) 21 | elif isinstance(obj, np.ndarray): 22 | return obj.tolist() 23 | elif isinstance(obj, pd.Timestamp): 24 | return str(obj) 25 | else: 26 | return super(NpEncoder, self).default(obj) 27 | 28 | 29 | class QARTC_Resampler(QA_Thread): 30 | def __init__(self, code='rb1910', freqence='60min', model='tb'): 31 | super().__init__() 32 | self.code = code 33 | self.freqence = freqence 34 | self.sub = subscriber( 35 | host=eventmq_ip, exchange='realtime_min_{}'.format(self.code)) 36 | self.pub = publisher( 37 | host=eventmq_ip, exchange='realtime_{}_{}'.format(self.freqence, self.code)) 38 | self.sub.callback = self.callback 39 | self.market_data = [] 40 | self.dt = None 41 | self.model = model 42 | 43 | def callback(self, a, b, c, data): 44 | lastest_data = json.loads(str(data, encoding='utf-8')) 45 | print(lastest_data) 46 | if self.dt != lastest_data['datetime'][15:16] or len(self.market_data) < 1: 47 | self.dt = lastest_data['datetime'][15:16] 48 | # print('new') 49 | self.market_data.append(lastest_data) 50 | else: 51 | # print('update') 52 | self.market_data[-1] = lastest_data 53 | df = pd.DataFrame(self.market_data) 54 | df = df.assign(datetime=pd.to_datetime(df.datetime), code=self.code, position=0, 55 | tradetime=df.datetime.apply(QA_util_future_to_tradedatetime)).set_index('datetime') 56 | # print(df) 57 | if self.model == 'tb': 58 | 59 | res = QA_data_futuremin_resample_tb_kq(df, self.freqence) 60 | else: 61 | res = QA_data_futuremin_resample(df, self.freqence) 62 | # print(res) 63 | # print(res.iloc[-1].to_dict()) 64 | self.pub.pub(json.dumps( 65 | res.reset_index().iloc[-1].to_dict(), cls=NpEncoder)) 66 | 67 | def run(self): 68 | self.sub.start() 69 | 70 | 71 | if __name__ == "__main__": 72 | QARTC_Resampler().start() 73 | -------------------------------------------------------------------------------- /QARealtimeCollector/datahandler/stock_resampler.py: -------------------------------------------------------------------------------- 1 | # **************************************************************************** # 2 | # # 3 | # ::: :::::::: # 4 | # stockResampler.py :+: :+: :+: # 5 | # +:+ +:+ +:+ # 6 | # By: zhongjy1992 +#+ +:+ +#+ # 7 | # +#+#+#+#+#+ +#+ # 8 | # Created: 2020/03/03 22:19:37 by zhongjy1992 #+# #+# # 9 | # Updated: 2020/03/06 14:06:35 by zhongjy1992 ### ########.fr # 10 | # # 11 | # **************************************************************************** # 12 | import datetime 13 | import json 14 | import logging 15 | import multiprocessing 16 | import os 17 | import threading 18 | import time 19 | 20 | import click 21 | import pandas as pd 22 | from QAPUBSUB.consumer import subscriber, subscriber_routing 23 | from QAPUBSUB.producer import publisher 24 | from QARealtimeCollector.setting import eventmq_ip 25 | from QUANTAXIS.QAEngine.QAThreadEngine import QA_Thread 26 | 27 | from utils.common import create_empty_stock_df, tdx_stock_bar_resample_parallel, util_is_trade_time, \ 28 | get_file_name_by_date, logging_csv 29 | 30 | logger = logging.getLogger(__name__) 31 | 32 | 33 | class QARTCStockBarResampler(QA_Thread): 34 | """ 35 | 应启动一个线程单独重采样1min的数据,然后按需求根据1min的数据重新采样为多周期数据 36 | 若有一个内存数据库,则可以把数据先写入数据库,然后再根据订阅读取进行拉取(redis, mongo?) 37 | """ 38 | 39 | def __init__(self, frequency='5min', date: datetime.datetime = None, log_dir='./log'): 40 | """ 41 | 暂时不支持单个股票重采样 42 | :param frequency: 43 | """ 44 | super().__init__() 45 | logger.info("QA实时股票Bar重采样,初始化...周期: %s" % frequency) 46 | if isinstance(frequency, float): 47 | self.frequency = int(frequency) 48 | elif isinstance(frequency, str): 49 | _frequency = frequency.replace('min', '') 50 | if str.isnumeric(_frequency): 51 | self.frequency = int(_frequency) 52 | else: 53 | logger.error("不支持的周期 unknownFrequency: %s" % frequency) 54 | return 55 | elif isinstance(frequency, int): 56 | self.frequency = frequency 57 | else: 58 | logger.error("不支持的周期 unknownFrequency: %s" % frequency) 59 | return 60 | 61 | self.market_data = None 62 | 63 | # 接收stock tick 数据 64 | self.sub = subscriber( 65 | host=eventmq_ip, exchange='realtime_stock_min') 66 | self.sub.callback = self.on_message_callback 67 | self.stock_sub = subscriber_routing(host=eventmq_ip, exchange='QARealtime_Market', routing_key='stock') 68 | self.stock_sub.callback = self.on_stock_subscribe_message_callback 69 | # 发送重采样的数据 70 | self.pub = publisher(host=eventmq_ip, exchange='realtime_stock_{}_min'.format(self.frequency)) 71 | self.count = 0 72 | self.code_list = [] 73 | cur_time = datetime.datetime.now() if date is None else date 74 | self.cur_year = cur_time.year 75 | self.cur_month = cur_time.month 76 | self.cur_day = cur_time.day 77 | # 多进程计算 78 | self.cpu_count = multiprocessing.cpu_count() - 1 79 | self.log_dir = log_dir 80 | threading.Thread(target=self.sub.start, daemon=True).start() 81 | threading.Thread(target=self.stock_sub.start, daemon=True).start() 82 | 83 | 84 | def publish_msg(self, text): 85 | self.pub.pub(text) 86 | 87 | def on_stock_subscribe_message_callback(self, channel, method, properties, data): 88 | data = json.loads(data) 89 | if data['topic'].lower() == 'subscribe': 90 | logger.info('股票重采样,新的订阅: {}'.format(data['code'])) 91 | new_ins = data['code'].replace('_', '.').split(',') 92 | 93 | if isinstance(new_ins, list): 94 | for item in new_ins: 95 | self.subscribe_callback(item) 96 | else: 97 | self.subscribe_callback(new_ins) 98 | if data['topic'].lower() == 'unsubscribe': 99 | logger.info('股票重采样,取消订阅: {}'.format(data['code'])) 100 | new_ins = data['code'].replace('_', '.').split(',') 101 | 102 | if isinstance(new_ins, list): 103 | for item in new_ins: 104 | self.unsubscribe_callback(item) 105 | else: 106 | self.unsubscribe_callback(new_ins) 107 | 108 | def subscribe_callback(self, code): 109 | if code not in self.code_list: 110 | self.code_list.append(code) 111 | # initial time series data 112 | # date=datetime.datetime(2019, 5, 9) 113 | self.market_data = pd.concat([ 114 | self.market_data, create_empty_stock_df(code, date=datetime.datetime(self.cur_year, self.cur_month, 115 | self.cur_day)) 116 | ]) 117 | logger.info("当日数据初始化中,%s" % code) 118 | pass 119 | 120 | def unsubscribe_callback(self, item): 121 | # remove code from market data 122 | pass 123 | 124 | def on_message_callback(self, channel, method, properties, body): 125 | context = pd.read_msgpack(body) 126 | # merge update 127 | if self.market_data is None: 128 | # self.market_data = context 129 | pass 130 | else: 131 | logger.info("Before market_data, concat and update start, 合并市场数据") 132 | cur_time = datetime.datetime.now() 133 | self.market_data.update(context) 134 | end_time = datetime.datetime.now() 135 | cost_time = (end_time - cur_time).total_seconds() 136 | logger.info("Before market_data, concat and update end, 合并市场数据, 耗时,cost: %s s" % cost_time) 137 | logger.info(self.market_data.to_csv(float_format='%.3f')) 138 | filename = get_file_name_by_date('stock.market.%s.csv', self.log_dir) 139 | # 不追加,复写 140 | logging_csv(self.market_data, filename, index=True, mode='w') 141 | 142 | # group by code and resample 143 | try: 144 | cur_time = datetime.datetime.now() 145 | bar_data: pd.DataFrame = tdx_stock_bar_resample_parallel( 146 | self.market_data[self.market_data.close > 0], self.frequency, jobs=self.cpu_count 147 | ) 148 | end_time = datetime.datetime.now() 149 | cost_time = (end_time - cur_time).total_seconds() 150 | logger.info("数据重采样耗时,cost: %s" % cost_time) 151 | logger.info("发送重采样数据中start") 152 | self.publish_msg(bar_data.to_msgpack()) 153 | logger.info("发送重采样数据完毕end") 154 | 155 | logger.info(bar_data.to_csv(float_format='%.3f')) 156 | filename = get_file_name_by_date('stock.bar.%s.csv', self.log_dir) 157 | # 不追加,复写 158 | logging_csv(bar_data, filename, index=True, mode='w') 159 | del bar_data 160 | except Exception as e: 161 | logger.error("failure股票重采样数据. " + e.__str__()) 162 | finally: 163 | logger.info("重采样计数 count : %s" % self.count) 164 | self.count += 1 165 | del context 166 | 167 | def run(self): 168 | while True: 169 | # 9:15 - 11:31 and 12:58 - 15:00 获取 170 | cur_time = datetime.datetime.now() 171 | if util_is_trade_time(cur_time): # 如果在交易时间 172 | time.sleep(0.2) 173 | else: 174 | time.sleep(1) 175 | 176 | 177 | @click.command() 178 | # @click.argument() 179 | @click.option('-F', '--frequency', default='5min', help='calculate frequency', type=click.STRING) 180 | @click.option('-log', '--logfile', help="log file path", type=click.Path(exists=False)) 181 | @click.option('-log_dir', '--log_dir', help="log path", type=click.Path(exists=False)) 182 | def main(frequency: str, logfile: str = None, log_dir: str = None): 183 | try: 184 | from utils.logconf import update_log_file_config 185 | logfile = 'stock.resample.log' if logfile is None else logfile 186 | logging.config.dictConfig(update_log_file_config(logfile)) 187 | except Exception as e: 188 | print(e.__str__()) 189 | # TODO suuport codelist file 190 | QARTCStockBarResampler(frequency=frequency, log_dir=log_dir.replace('~', os.path.expanduser('~'))).run() 191 | 192 | 193 | if __name__ == '__main__': 194 | main() 195 | -------------------------------------------------------------------------------- /QARealtimeCollector/management.py: -------------------------------------------------------------------------------- 1 | # 用于拉起进程 2 | import datetime 3 | import json 4 | import threading 5 | import time 6 | 7 | import pymongo 8 | 9 | # 定义一个准备作为线程任务的函数 10 | 11 | import QUANTAXIS as QA 12 | from QUANTAXIS.QAEngine.QAThreadEngine import QA_Thread 13 | from QARealtimeCollector.collectors import QARTC_CtpBeeCollector 14 | from QARealtimeCollector.datahandler import QARTC_Resampler 15 | 16 | 17 | class QARC_Management(QA_Thread): 18 | def __init__(self, group): 19 | super().__init__(name='QAPBManagementGroup {}'.format(group), daemon=False) 20 | self.group = group 21 | -------------------------------------------------------------------------------- /QARealtimeCollector/setting.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | mongo_ip = os.environ.get('MONGODB', '127.0.0.1') 5 | eventmq_ip = os.environ.get('EventMQ_IP', '127.0.0.1') 6 | market_data_user = 'admin' 7 | market_data_password = 'admin' -------------------------------------------------------------------------------- /QARealtimeCollector/util.py: -------------------------------------------------------------------------------- 1 | import copy 2 | def fix_dict(datax, ignore_duplicate_key=False): 3 | """ 4 | Removes dots "." from keys, as mongo doesn't like that. 5 | If the key is already there without the dot, the dot-value get's lost. 6 | This modifies the existing dict! 7 | 8 | :param ignore_duplicate_key: True: if the replacement key is already in the dict, now the dot-key value will be ignored. 9 | False: raise ValueError in that case. 10 | """ 11 | #datax = copy.deepcopy(data) 12 | 13 | if isinstance(datax, (list, tuple)): 14 | list2 = list() 15 | for e in datax: 16 | list2.append(fix_dict(e)) 17 | # end if 18 | return list2 19 | if isinstance(datax, dict): 20 | # end if 21 | for key, value in datax.items(): 22 | value = fix_dict(value) 23 | old_key = key 24 | if "." in key: 25 | key = old_key.replace(".", "_") 26 | #if key not in datax: 27 | datax[key] = value 28 | # else: 29 | # error_msg = "Dict key {key} containing a \".\" was ignored, as {replacement} already exists".format( 30 | # key=old_key, replacement=key) 31 | # # if force: 32 | # import warnings 33 | # warnings.warn(error_msg, category=RuntimeWarning) 34 | # # else: 35 | # # raise ValueError(error_msg) 36 | # # end if 37 | # end if 38 | del datax[old_key] 39 | # end if 40 | datax[key] = value 41 | # end for 42 | return datax 43 | # end if 44 | return datax 45 | # end def 46 | -------------------------------------------------------------------------------- /QARealtimeCollector/utils/common.py: -------------------------------------------------------------------------------- 1 | # **************************************************************************** # 2 | # # 3 | # ::: :::::::: # 4 | # common.py :+: :+: :+: # 5 | # +:+ +:+ +:+ # 6 | # By: zhongjy1992 +#+ +:+ +#+ # 7 | # +#+#+#+#+#+ +#+ # 8 | # Created: 2019/10/13 16:08:30 by zhongjy1992 #+# #+# # 9 | # Updated: 2020/03/05 13:31:13 by zhongjy1992 ### ########.fr # 10 | # # 11 | # **************************************************************************** # 12 | import datetime 13 | import os 14 | 15 | from QUANTAXIS.QAUtil.QADate_trade import QA_util_if_trade 16 | from QUANTAXIS.QAUtil.QAParameter import MARKET_TYPE 17 | from joblib import Parallel, delayed 18 | from pandas import concat, date_range, DataFrame, DatetimeIndex 19 | 20 | 21 | def create_empty_stock_df(code, date: datetime.datetime = None, frequency=1): 22 | """ 23 | 创建空K线表并填0 24 | :param code: 25 | :param date: 26 | :param frequency: 27 | :return: 28 | """ 29 | if isinstance(code, list): 30 | return 31 | code = fill_stock_code(code) 32 | cur_date = datetime.datetime.now() if date is None else date 33 | cur_day = cur_date.isoformat()[:10] 34 | # TODO confirm , 9:31 - 11:29, 13:00 - 15:00 35 | # morning = date_range('%s 9:31' % cur_day, periods=119, freq='T').to_list() 36 | morning = date_range('%s 9:30' % cur_day, end='%s 11:30' % cur_day, freq='%sT' % frequency).to_list()[1:] 37 | # afternoon = date_range('%s 13:00' % cur_day, periods=121, freq='T').to_list() 38 | afternoon = date_range('%s 13:00' % cur_day, end='%s 15:00' % cur_day, freq='%sT' % frequency).to_list()[1:] 39 | # if frequency == 1: 40 | # # 1min remove 11:30 41 | # morning = morning[:-1] 42 | # elif frequency in [15, 30, 60, 120]: 43 | # # remove 13:00 44 | # afternoon = afternoon[1:] 45 | morning.extend(afternoon) 46 | # datetime type is Timestamp('2019-10-24 13:00:00', freq='1T') 47 | df = DataFrame({'datetime': morning}) 48 | df['code'] = code 49 | df['open'] = 0 50 | df['high'] = 0 51 | df['low'] = 0 52 | df['close'] = 0 53 | df['vol'] = 0 54 | df['amount'] = 0 55 | # df['year'] = cur_date.year 56 | # df['month'] = cur_date.month 57 | # df['day'] = cur_date.day 58 | # hour , minute, month 59 | return df.set_index(['datetime', 'code']) 60 | 61 | 62 | def fill_stock_code(data): 63 | """ 64 | 深市代码不足6位补0 65 | :param data: 66 | :return: 67 | """ 68 | if not isinstance(data, str): 69 | data = str(data) 70 | length = len(data) 71 | if length < 6: 72 | return "0" * (6 - length) + data 73 | return data 74 | 75 | 76 | def get_file_name_by_date(filename='stock.%s.log', log_dir='./log'): 77 | """ 78 | 返回填充日期的文件名 79 | :param filename: 80 | :param log_dir: 81 | :return: 82 | """ 83 | _filename = filename % datetime.datetime.today().isoformat()[:10] 84 | if log_dir is None: 85 | return _filename 86 | else: 87 | if not os.path.exists(log_dir): 88 | os.system('mkdir -p %s' % log_dir) 89 | return os.path.join(log_dir, _filename) 90 | 91 | 92 | def logging_csv(df, filename, float_format='%.3f', index=False, mode='a'): 93 | """ 94 | dataframe 输出为csv 格式,追加模型 95 | :param df: pd.DataFrame 96 | :param filename: 97 | :param float_format: 98 | :param index: 99 | :param mode: a/w, a+/w+, 100 | :return: 101 | """ 102 | if os.path.exists(filename): 103 | df.to_csv(filename, float_format=float_format, index=index, mode=mode, header=False) 104 | else: 105 | df.to_csv(filename, float_format=float_format, index=index, mode=mode, header=True) 106 | 107 | 108 | def tdx_bar_data_stock_resample(min_data, period=5): 109 | """ 110 | 1min 分钟线采样成 1,5,15,30,60,120 级别的分钟线 111 | TODO 240时间戳有问题 112 | :param min_data: 113 | :param period: 114 | :return: 115 | """ 116 | min_data = min_data.reset_index() 117 | if 'datetime' not in min_data.columns: 118 | return None 119 | 120 | if isinstance(period, float): 121 | period = int(period) 122 | elif isinstance(period, str): 123 | period = int(period.replace('min', '')) 124 | elif isinstance(period, int): 125 | pass 126 | _period = '%sT' % period 127 | # TODO 确认时间格式 yyyy-mm-dd HH:MM:SS 128 | # min_data.datetime = min_data.datetime.apply(datetime.datetime.fromisoformat) 129 | min_data = min_data.set_index('datetime') 130 | # 9:30 - 11:30 131 | min_data_morning = min_data.loc[datetime.time(9, 30):datetime.time(11, 30)] 132 | min_data_morning.index = DatetimeIndex(min_data_morning.index).to_period('T') 133 | # 13:00 - 15:00 134 | min_data_afternoon = min_data.loc[datetime.time(13, 00):datetime.time(15, 00)] 135 | min_data_afternoon.index = DatetimeIndex(min_data_afternoon.index).to_period('T') 136 | 137 | _conversion = { 138 | 'code' : 'first', 139 | 'open' : 'first', 140 | 'high' : 'max', 141 | 'low' : 'min', 142 | 'close': 'last', 143 | } 144 | if 'vol' in min_data.columns: 145 | _conversion["vol"] = "sum" 146 | elif 'volume' in min_data.columns: 147 | _conversion["volume"] = "sum" 148 | if 'amount' in min_data.columns: 149 | _conversion['amount'] = 'sum' 150 | _base = 0 151 | if period > 60: 152 | _base = 60 153 | res = concat([ 154 | min_data_morning.resample( 155 | _period, label="right", closed="right", kind="period", loffset="0min", base=30 + _base).apply( 156 | _conversion), 157 | min_data_afternoon.resample( 158 | _period, label="right", closed="right", kind="period", loffset="0min", base=_base).apply( 159 | _conversion) 160 | ]) 161 | return res.dropna().reset_index().set_index(["datetime", "code"]).sort_index() 162 | 163 | 164 | def tdx_bar_data_stock_resample_parallel(min_data, period=5): 165 | """ 166 | 1min 分钟线采样成 1,5,15,30,60,120 级别的分钟线 167 | TODO 240时间戳有问题 168 | :param min_data: 169 | :param period: 170 | :return: 171 | """ 172 | min_data = min_data.reset_index() 173 | if 'datetime' not in min_data.columns: 174 | return None 175 | 176 | if isinstance(period, float): 177 | period = int(period) 178 | elif isinstance(period, str): 179 | period = int(period.replace('min', '')) 180 | elif isinstance(period, int): 181 | pass 182 | _period = '%sT' % period 183 | # TODO 确认时间格式 yyyy-mm-dd HH:MM:SS 184 | # min_data.datetime = min_data.datetime.apply(datetime.datetime.fromisoformat) 185 | min_data = min_data.set_index('datetime') 186 | # 9:30 - 11:30 187 | min_data_morning = min_data.loc[datetime.time(9, 30):datetime.time(11, 30)] 188 | min_data_morning.index = DatetimeIndex(min_data_morning.index).to_period('T') 189 | # 13:00 - 15:00 190 | min_data_afternoon = min_data.loc[datetime.time(13, 00):datetime.time(15, 00)] 191 | min_data_afternoon.index = DatetimeIndex(min_data_afternoon.index).to_period('T') 192 | 193 | _conversion = { 194 | 'code' : 'first', 195 | 'open' : 'first', 196 | 'high' : 'max', 197 | 'low' : 'min', 198 | 'close': 'last', 199 | } 200 | if 'vol' in min_data.columns: 201 | _conversion["vol"] = "sum" 202 | elif 'volume' in min_data.columns: 203 | _conversion["volume"] = "sum" 204 | if 'amount' in min_data.columns: 205 | _conversion['amount'] = 'sum' 206 | _base = 0 207 | if period > 60: 208 | _base = 60 209 | return [ 210 | min_data_morning.resample( 211 | _period, label="right", closed="right", kind="period", loffset="0min", base=30 + _base).apply( 212 | _conversion), 213 | min_data_afternoon.resample( 214 | _period, label="right", closed="right", kind="period", loffset="0min", base=_base).apply( 215 | _conversion) 216 | ] 217 | 218 | 219 | def pandas_apply_parallel(df_grouped, func, period: int or str, jobs: int = 2): 220 | ret_lst = Parallel(n_jobs=jobs)(delayed(func)(group, period) for name, group in df_grouped) 221 | ret = [] 222 | for i in ret_lst: 223 | ret.extend(i) 224 | return concat(ret).dropna().reset_index().set_index(["datetime", "code"]).sort_index() 225 | 226 | 227 | def tdx_stock_bar_resample_parallel(data, frequency: int or str = "5min", jobs: int = 2): 228 | return pandas_apply_parallel( 229 | data.reset_index().groupby('code'), tdx_bar_data_stock_resample_parallel, frequency, jobs) 230 | 231 | 232 | def util_is_trade_time( 233 | _time=datetime.datetime.now(), 234 | market=MARKET_TYPE.STOCK_CN, 235 | code=None 236 | ): 237 | """判断当前是否为交易时间""" 238 | date_today = _time.isoformat()[0:10] 239 | if market is MARKET_TYPE.STOCK_CN: 240 | if QA_util_if_trade(date_today): 241 | if _time.hour in [10, 13, 14]: 242 | return True 243 | elif _time.hour == 9 and _time.minute >= 15: # 修改成9:15 加入 9:15-9:30的盘前竞价时间 244 | return True 245 | elif _time.hour == 11 and _time.minute <= 32: # 11:30 -> 11:31 也刷新数据 246 | return True 247 | # elif _time.hour == 12 and _time.minute >= 58: # 12:58 - 13:00 也刷新数据 248 | # return True 249 | elif _time.hour == 15 and _time.minute <= 2: # 15:00 - 15:02 也刷新数据 250 | return True 251 | else: 252 | return False 253 | else: 254 | return False 255 | elif market is MARKET_TYPE.FUTURE_CN: 256 | date_yesterday = str((_time - datetime.timedelta(days=1)).date()) 257 | 258 | is_today_open = QA_util_if_trade(date_today) 259 | is_yesterday_open = QA_util_if_trade(date_yesterday) 260 | 261 | # 考虑周六日的期货夜盘情况 262 | if is_today_open == False: # 可能是周六或者周日 263 | if is_yesterday_open == False or (_time.hour > 2 or _time.hour == 2 and _time.minute > 30): 264 | return False 265 | 266 | shortName = "" # i , p 267 | for i in range(len(code)): 268 | ch = code[i] 269 | if ch.isdigit(): # ch >= 48 and ch <= 57: 270 | break 271 | shortName += code[i].upper() 272 | 273 | period = [ 274 | [9, 0, 10, 15], 275 | [10, 30, 11, 30], 276 | [13, 30, 15, 0] 277 | ] 278 | 279 | if (shortName in ["IH", 'IF', 'IC']): 280 | period = [ 281 | [9, 30, 11, 30], 282 | [13, 0, 15, 0] 283 | ] 284 | elif (shortName in ["T", "TF"]): 285 | period = [ 286 | [9, 15, 11, 30], 287 | [13, 0, 15, 15] 288 | ] 289 | 290 | if 0 <= _time.weekday <= 4: 291 | for i in range(len(period)): 292 | p = period[i] 293 | if ((_time.hour > p[0] or (_time.hour == p[0] and _time.minute >= p[1])) and ( 294 | _time.hour < p[2] or (_time.hour == p[2] and _time.minute < p[3]))): 295 | return True 296 | 297 | # 最新夜盘时间表_2019.03.29 298 | nperiod = [ 299 | [ 300 | ['AU', 'AG', 'SC'], 301 | [21, 0, 2, 30] 302 | ], 303 | [ 304 | ['CU', 'AL', 'ZN', 'PB', 'SN', 'NI'], 305 | [21, 0, 1, 0] 306 | ], 307 | [ 308 | ['RU', 'RB', 'HC', 'BU', 'FU', 'SP'], 309 | [21, 0, 23, 0] 310 | ], 311 | [ 312 | ['A', 'B', 'Y', 'M', 'JM', 'J', 'P', 'I', 'L', 'V', 'PP', 'EG', 'C', 'CS'], 313 | [21, 0, 23, 0] 314 | ], 315 | [ 316 | ['SR', 'CF', 'RM', 'MA', 'TA', 'ZC', 'FG', 'IO', 'CY'], 317 | [21, 0, 23, 30] 318 | ], 319 | ] 320 | 321 | for i in range(len(nperiod)): 322 | for j in range(len(nperiod[i][0])): 323 | if nperiod[i][0][j] == shortName: 324 | p = nperiod[i][1] 325 | condA = _time.hour > p[0] or (_time.hour == p[0] and _time.minute >= p[1]) 326 | condB = _time.hour < p[2] or (_time.hour == p[2] and _time.minute < p[3]) 327 | # in one day 328 | if p[2] >= p[0]: 329 | if ((_time.weekday >= 0 and _time.weekday <= 4) and condA and condB): 330 | return True 331 | else: 332 | if (((_time.weekday >= 0 and _time.weekday <= 4) and condA) or ( 333 | (_time.weekday >= 1 and _time.weekday <= 5) and condB)): 334 | return True 335 | return False 336 | return False 337 | -------------------------------------------------------------------------------- /QARealtimeCollector/utils/logconf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # The MIT License (MIT) 3 | # 4 | # Copyright 2019 zhongjy 5 | # 6 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software 7 | # and associated documentation files (the "Software"), to deal in the Software without 8 | # restriction, including without limitation the rights to use, copy, modify, merge, publish, 9 | # distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the 10 | # Software is furnished to do so, subject to the following conditions: 11 | # 12 | # The above copyright notice and this permission notice shall be included in all copies or 13 | # substantial portions of the Software. 14 | # 15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | # THE SOFTWARE. 22 | 23 | # **************************************************************************** # 24 | # # 25 | # ::: :::::::: # 26 | # logconf.py :+: :+: :+: # 27 | # +:+ +:+ +:+ # 28 | # By: zhongjy1992 +#+ +:+ +#+ # 29 | # +#+#+#+#+#+ +#+ # 30 | # Created: 2019/05/03 02:33:45 by zhongjy1992 #+# #+# # 31 | # Updated: 2019/11/10 20:55:34 by zhongjy1992 ### ########.fr # 32 | # # 33 | # **************************************************************************** # 34 | import os 35 | import logging.config 36 | 37 | # 其中name为getlogger指定的名字 38 | standard_format = '[%(asctime)s][%(threadName)s:%(thread)d][task_id:%(name)s][%(filename)s:%(lineno)d][%(levelname)s][%(message)s]' 39 | 40 | simple_format = '[%(levelname)s][%(asctime)s][%(filename)s:%(lineno)d]%(message)s' 41 | 42 | id_simple_format = '[%(levelname)s][%(asctime)s] %(message)s' 43 | 44 | # logfile_dir = os.path.dirname(os.path.abspath(__file__)) 45 | logfile_dir = os.getcwd() 46 | 47 | logfile_name = 'test.log' 48 | 49 | if not os.path.isdir(logfile_dir): 50 | os.mkdir(logfile_dir) 51 | 52 | # logfile_path = os.path.join(logfile_dir, logfile_name) 53 | 54 | 55 | def getLoggingConfigDict(filepath): 56 | return { 57 | 'version' : 1, 58 | 'disable_existing_loggers': False, 59 | 'formatters' : { 60 | 'verbose' : { 61 | 'format' : "[%(asctime)s] %(levelname)s [%(filename)s:%(lineno)s] %(message)s", 62 | 'datefmt': "%Y-%m-%d %H:%M:%S" 63 | }, 64 | 'simple' : { 65 | 'format': '%(levelname)s %(message)s' 66 | }, 67 | 'standard': { 68 | 'format': standard_format 69 | }, 70 | }, 71 | 'handlers' : { 72 | 'null' : { 73 | 'level': 'DEBUG', 74 | 'class': 'logging.NullHandler', 75 | }, 76 | 'console': { 77 | 'level' : 'INFO', 78 | 'class' : 'logging.StreamHandler', 79 | 'formatter': 'verbose' 80 | }, 81 | 'file' : { 82 | 'level' : 'INFO', 83 | 'class' : 'logging.handlers.RotatingFileHandler', 84 | # 当达到100MB时分割日志 85 | 'maxBytes' : 1024 * 1 * 100, 86 | # 最多保留50份文件 87 | 'backupCount': 0, 88 | # If delay is true, then file opening is deferred until the first call to emit(). 89 | 'delay' : True, 90 | 'filename' : filepath, 91 | 'formatter' : 'verbose', 92 | 'encoding' : 'utf-8' 93 | }, 94 | 'file2' : { 95 | 'level' : 'INFO', 96 | 'class' : 'logging.handlers.TimedRotatingFileHandler', 97 | 'formatter' : 'verbose', 98 | 'encoding' : 'utf-8', 99 | 'utc' : False, 100 | 'filename' : filepath, 101 | 'when' : 'h', 102 | 'interval' : 1, 103 | 'backupCount': 0, 104 | # 'delay': True, 105 | } 106 | }, 107 | 'loggers' : { 108 | '': { 109 | 'handlers': ['console', 'file2'], 110 | 'level' : 'INFO', 111 | # 向上(更高level的logger)传递 112 | # 'propagate': True, 113 | }, 114 | } 115 | } 116 | 117 | 118 | def update_log_file_config(logfilepath:str): 119 | """ 120 | 更新日志文件路径 121 | :param logfilepath: logfile.log, ./log/logfile.log, /tmp/logifle.log 122 | :return: 123 | """ 124 | root = os.getcwd() 125 | logfile_dir = os.path.join(root, './log') 126 | if logfilepath.startswith('/'): 127 | logfile_dir = '/'.join(logfilepath.split('/')[:-1]) 128 | elif logfilepath.startswith('./'): 129 | logfile_dir = os.path.join(root, '/'.join(logfilepath.split('/')[:-1])) 130 | else: 131 | logfilepath = os.path.join(logfile_dir, logfilepath) 132 | 133 | logfile_dir = os.path.abspath(logfile_dir) 134 | if not os.path.exists(logfile_dir): 135 | os.system('mkdir -p ' + logfile_dir) 136 | 137 | return getLoggingConfigDict(logfilepath) 138 | 139 | 140 | if __name__ == '__main__': 141 | import logging 142 | 143 | # 导入上面定义的logging配置 144 | logging.config.dictConfig(getLoggingConfigDict(logfile_name)) 145 | # 生成一个log实例 146 | logger = logging.getLogger(__name__) 147 | # 记录该文件的运行状态 148 | for i in range(1, 1000000): 149 | logger.info('It works!') 150 | -------------------------------------------------------------------------------- /QARealtimeCollector/webserver.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import sys 4 | import threading 5 | import json 6 | import tornado 7 | from tornado.options import (define, options, parse_command_line, 8 | parse_config_file) 9 | from tornado.web import Application, RequestHandler, authenticated 10 | 11 | import QUANTAXIS as QA 12 | from QAPUBSUB.producer import publisher, publisher_routing 13 | from QARealtimeCollector.collectors import (QARTC_CtpBeeCollector, 14 | QARTC_CTPTickCollector, 15 | QARTC_RandomTick, QARTC_Stock, 16 | QARTC_WsCollector) 17 | from QARealtimeCollector.datahandler import QARTC_Resampler 18 | from QARealtimeCollector.setting import eventmq_ip 19 | from QAWebServer import QABaseHandler, QAWebSocketHandler 20 | 21 | 22 | class SUBSCRIBE_SERVER(QABaseHandler): 23 | handler = {'stock_cn': {}, 'future_cn': {}} 24 | resampler = {'stock_cn': {}, 'future_cn': {}} 25 | 26 | def get(self): 27 | action = self.get_argument('action') 28 | if action == 'get_current_handler': 29 | print(self.handler) 30 | self.write({'result': 31 | {'stock_cn': list(self.handler['stock_cn'].keys()), 32 | 'future_cn': list(self.handler['future_cn'].keys())}}) 33 | elif action == 'get_current_resampler': 34 | print(self.resampler) 35 | self.write({'result': 36 | {'stock_cn': list(self.resampler['stock_cn'].keys()), 37 | 'future_cn': list(self.resampler['future_cn'].keys())}}) 38 | 39 | def post(self): 40 | action = self.get_argument('action') 41 | market_type = self.get_argument('market_type') 42 | code = self.get_argument('code') 43 | if action == 'new_handler': 44 | 45 | if code not in self.handler.keys(): 46 | if market_type == 'future_cn': 47 | self.handler[market_type][code] = QARTC_CtpBeeCollector( 48 | code) 49 | self.handler[market_type][code].start() 50 | self.write({'result': 'success'}) 51 | else: 52 | publisher_routing(host=eventmq_ip, exchange='QARealtime_Market', routing_key='stock').pub(json.dumps({ 53 | 'topic': 'subscribe', 54 | 'code': code 55 | }), routing_key='stock') 56 | self.handler[market_type][code] = True 57 | self.write({'result': 'success'}) 58 | 59 | else: 60 | self.write({'result': 'already exist'}) 61 | elif action == 'new_resampler': 62 | frequence = self.get_argument('frequence') 63 | if (code, frequence) not in self.resampler.keys(): 64 | if market_type == 'future_cn': 65 | self.resampler[market_type][(code, frequence)] = QARTC_Resampler( 66 | code, frequence) 67 | self.resampler[market_type][(code, frequence)].start() 68 | self.write({'result': 'success'}) 69 | else: 70 | pass 71 | else: 72 | self.write({'result': 'already exist'}) 73 | 74 | 75 | handlers = [ 76 | (r"/", 77 | SUBSCRIBE_SERVER) 78 | ] 79 | 80 | 81 | def main(): 82 | asyncio.set_event_loop(asyncio.new_event_loop()) 83 | define("port", default=8011, type=int, help="服务器监听端口号") 84 | 85 | define("address", default='0.0.0.0', type=str, help='服务器地址') 86 | define("content", default=[], type=str, multiple=True, help="控制台输出内容") 87 | parse_command_line() 88 | apps = Application( 89 | handlers=handlers, 90 | debug=True, 91 | autoreload=True, 92 | compress_response=True 93 | ) 94 | port = options.port 95 | 96 | # stock_coll = QARTC_Stock(username='quantaxis', password='quantaxis') 97 | 98 | # threading.Thread(target=) 99 | 100 | http_server = tornado.httpserver.HTTPServer(apps) 101 | http_server.bind(port=options.port, address=options.address) 102 | """增加了对于非windows下的机器多进程的支持 103 | """ 104 | http_server.start(1) 105 | tornado.ioloop.IOLoop.current().start() 106 | 107 | 108 | if __name__ == '__main__': 109 | main() 110 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # QAREALTIMEMARKETCOLLECTOR 2 | 3 | > quantaxis 实时行情采集/分发。本项目从QAREALTIME_SOLUTION 拆出来 4 | 5 | - 流程 6 | 7 | 策略订阅(行情端) --> 基于QATrader的账户信息 --> 下单到EventMQ 业务总线 8 | 9 | ![image.png](http://pic.yutiansut.com/Flzp4Cr6qSDZmkd9H43-Quugm5oO) 10 | 11 | ## 数据 12 | 13 | > 期货源 14 | 15 | - 快期的5挡websocket行情 16 | - ctp直连的tick 17 | 18 | > 股票源 19 | 20 | - 股票的5挡行情订阅推送(包括A股/指数/创业板) 21 | 22 | > RabbitMQ 23 | 24 | - 可订阅的实时tick exchange 25 | - 可订阅的实时bar exchange 26 | 27 | ## 安装 28 | 29 | ```bash 30 | pip install qarealtime_collector 31 | # ubuntu install rabbitmq-server 32 | sudo apt install rabbitmq-server -y 33 | sudo rabbitmq-plugins enable rabbitmq_management 34 | sudo service rabbitmq-server restart 35 | # 为QAREALTIMEMARKETCOLLECTOR增加一个rabbitmq的账户 36 | sudo rabbitmqctl add_user admin admin 37 | # 用户组具体请根据需要调整 38 | sudo rabbitmqctl set_user_tags admin administrator 39 | # 用户权具体请根据需要调整 40 | sudo rabbitmqctl set_permissions -p / admin '.*' '.*' '.*' 41 | # 期货需要安装QACTPBEE、QACTPBeeBroker 42 | pip install ctpbee==0.24 43 | pip install git+https://github.com/yutiansut/QACTPBeeBroker 44 | sudo locale-gen zh_CN.GB18030 45 | ``` 46 | 47 | ## 系统环境 48 | 49 | > 为了适配 QA_Service的docker采用了以下默认参数(若非docker请按实际情况修改QARealtimeCollector/setting.py) 50 | 51 | ```python 52 | mongo_ip = os.environ.get('MONGODB', '127.0.0.1') 53 | eventmq_ip = os.environ.get('EventMQ_IP', '127.0.0.1') 54 | market_data_user = 'admin' 55 | market_data_password = 'admin' 56 | ``` 57 | 58 | ## 启动服务 59 | 60 | - realtime 61 | 62 | ```bash 63 | QARC_WEBSERVER 64 | # 股票 65 | QARC_Stock 66 | # 期货 67 | nohup QACTPBEE --userid 133496 >> ./output_ctpbee.log 2>&1 & 68 | # 缺少说明 69 | nohup QARC_Start --code rb1910 >> ./output_qarcCollect.log 2>&1 & 70 | # 缺少说明 71 | nohup QARC_Resample --code rb1910 --freq 60min >> ./output_resample.log 2>&1 & 72 | ``` 73 | 74 | - 虚拟行情 75 | 76 | ```bash 77 | QARC_WEBSERVER 78 | # 虚拟行情测试, 切记: 此命令会污染实时行情源, 切记不能和实时行情同时运行 79 | # price是设定的初始价格, 会基于ou行情伪造实时tick 80 | # interval是tick间隔, 1 指的是1秒一个 81 | nohup QARC_Random --code rb1910 --date 20190619 --price 3800 --interval 1 82 | ``` 83 | 84 | ## 关于订阅申请 85 | 86 | 此环节已经被docker集成, 具体参见QUANTAXIS的 qaservice [https://github.com/QUANTAXIS/QUANTAXIS/tree/master/docker/qa-service-future] 87 | 88 | > 标准化订阅topic合约流程: 89 | 90 | - 1.发起订阅请求 91 | 92 | ```bash 93 | # 期货订阅请求 94 | curl -X POST "http://127.0.0.1:8011?action=new_handler&market_type=future_cn&code=au1911" 95 | ```bash 96 | # 股票订阅请求 97 | curl -X POST "http://127.0.0.1:8011?action=new_handler&market_type=stock_cn&code=000001" 98 | # 二次采样请求 99 | curl -X POST "http://127.0.0.1:8011?action=new_resampler&market_type=future_cn&code=au1911&frequence=2min" 100 | ``` 101 | 102 | - 2.开始订阅数据 103 | 104 | ```bash 105 | # eventmq: 股票的主推的 exchange 为 stocktransction 106 | qaps_sub --exchange stocktransaction --model fanout 107 | ``` 108 | 109 | - 3.取消订阅(系统释放资源) 110 | 111 | ## EXCHANGE格式 112 | 113 | > 格式: $type_$freq_$code 114 | 115 | - example: realtime_1min_rb1910, bar_15min_jm1909 116 | 117 | > 期货 118 | 119 | |key|value|comment| 120 | |:-|:-|:-| 121 | |type|realtime,bar|realtime就是在这个级别下的实时更新| 122 | |freq|1min,5min, 15min, 30min ,60min|周期| 123 | |code|rb1910,j1909, etc.|期货合约代码| 124 | 125 | > 股票 126 | 127 | |key|value|comment| 128 | |:-|:-|:-| 129 | |type|realtime,tick, bar|realtime就是在这个级别下的实时更新| 130 | |freq|1min,5min, 15min, 30min ,60min|周期| 131 | |code|000001,000002, etc.|股票代码| 132 | 133 | ## 数据格式 134 | 135 | - realtime 136 | 137 | |key|value_type|comment| 138 | |:-|:-|:-| 139 | |open|float|开盘价| 140 | |high|float|最高价| 141 | |low|float|最低价| 142 | |close|float|收盘价| 143 | |code|str|代码,UPPERCASE(大写)| 144 | |datetime|str|2019-08-16 09:25:00:500000| 145 | |volume|float|成交量| 146 | 147 | ```python 148 | { 149 | 'open': float, 150 | 'high': float, 151 | 'low': float, 152 | 'close': float, 153 | 'code': str, 154 | 'datetime': str '2019-08-16 09:25:00:500000' 155 | 'volume': float 156 | } 157 | ``` 158 | -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM daocloud.io/quantaxis/qactpbee:latest 2 | 3 | RUN cd /root \ 4 | && pip install simplejson \ 5 | && pip install https://github.com/yutiansut/tornado_http2/archive/master.zip\ 6 | && pip install tornado==5.1.1\ 7 | && pip install quantaxis-servicedetect\ 8 | && git clone https://github.com/yutiansut/QUANTAXIS_RealtimeCollector \ 9 | && cd /root/QUANTAXIS_RealtimeCollector && pip install -e . \ 10 | && chmod +x /root/QUANTAXIS_RealtimeCollector/docker/start_collector.sh \ 11 | && chmod +x /root/QUANTAXIS_RealtimeCollector/docker/wait_for_it.sh 12 | 13 | 14 | 15 | EXPOSE 8011 16 | CMD ["bash", "/root/QUANTAXIS_RealtimeCollector/docker/start_collector.sh"] 17 | -------------------------------------------------------------------------------- /docker/start_collector.sh: -------------------------------------------------------------------------------- 1 | QARC_WEBSERVER & 2 | QARC_Stock -------------------------------------------------------------------------------- /docker/wait_for_it.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Use this script to test if a given TCP host/port are available 3 | 4 | WAITFORIT_cmdname=${0##*/} 5 | 6 | echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } 7 | 8 | usage() 9 | { 10 | cat << USAGE >&2 11 | Usage: 12 | $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args] 13 | -h HOST | --host=HOST Host or IP under test 14 | -p PORT | --port=PORT TCP port under test 15 | Alternatively, you specify the host and port as host:port 16 | -s | --strict Only execute subcommand if the test succeeds 17 | -q | --quiet Don't output any status messages 18 | -t TIMEOUT | --timeout=TIMEOUT 19 | Timeout in seconds, zero for no timeout 20 | -- COMMAND ARGS Execute command with args after the test finishes 21 | USAGE 22 | exit 1 23 | } 24 | 25 | wait_for() 26 | { 27 | if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then 28 | echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" 29 | else 30 | echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout" 31 | fi 32 | WAITFORIT_start_ts=$(date +%s) 33 | while : 34 | do 35 | if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then 36 | nc -z $WAITFORIT_HOST $WAITFORIT_PORT 37 | WAITFORIT_result=$? 38 | else 39 | (echo > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1 40 | WAITFORIT_result=$? 41 | fi 42 | if [[ $WAITFORIT_result -eq 0 ]]; then 43 | WAITFORIT_end_ts=$(date +%s) 44 | echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds" 45 | break 46 | fi 47 | sleep 1 48 | done 49 | return $WAITFORIT_result 50 | } 51 | 52 | wait_for_wrapper() 53 | { 54 | # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 55 | if [[ $WAITFORIT_QUIET -eq 1 ]]; then 56 | timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & 57 | else 58 | timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & 59 | fi 60 | WAITFORIT_PID=$! 61 | trap "kill -INT -$WAITFORIT_PID" INT 62 | wait $WAITFORIT_PID 63 | WAITFORIT_RESULT=$? 64 | if [[ $WAITFORIT_RESULT -ne 0 ]]; then 65 | echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" 66 | fi 67 | return $WAITFORIT_RESULT 68 | } 69 | 70 | # process arguments 71 | while [[ $# -gt 0 ]] 72 | do 73 | case "$1" in 74 | *:* ) 75 | WAITFORIT_hostport=(${1//:/ }) 76 | WAITFORIT_HOST=${WAITFORIT_hostport[0]} 77 | WAITFORIT_PORT=${WAITFORIT_hostport[1]} 78 | shift 1 79 | ;; 80 | --child) 81 | WAITFORIT_CHILD=1 82 | shift 1 83 | ;; 84 | -q | --quiet) 85 | WAITFORIT_QUIET=1 86 | shift 1 87 | ;; 88 | -s | --strict) 89 | WAITFORIT_STRICT=1 90 | shift 1 91 | ;; 92 | -h) 93 | WAITFORIT_HOST="$2" 94 | if [[ $WAITFORIT_HOST == "" ]]; then break; fi 95 | shift 2 96 | ;; 97 | --host=*) 98 | WAITFORIT_HOST="${1#*=}" 99 | shift 1 100 | ;; 101 | -p) 102 | WAITFORIT_PORT="$2" 103 | if [[ $WAITFORIT_PORT == "" ]]; then break; fi 104 | shift 2 105 | ;; 106 | --port=*) 107 | WAITFORIT_PORT="${1#*=}" 108 | shift 1 109 | ;; 110 | -t) 111 | WAITFORIT_TIMEOUT="$2" 112 | if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi 113 | shift 2 114 | ;; 115 | --timeout=*) 116 | WAITFORIT_TIMEOUT="${1#*=}" 117 | shift 1 118 | ;; 119 | --) 120 | shift 121 | WAITFORIT_CLI=("$@") 122 | break 123 | ;; 124 | --help) 125 | usage 126 | ;; 127 | *) 128 | echoerr "Unknown argument: $1" 129 | usage 130 | ;; 131 | esac 132 | done 133 | 134 | if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then 135 | echoerr "Error: you need to provide a host and port to test." 136 | usage 137 | fi 138 | 139 | WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15} 140 | WAITFORIT_STRICT=${WAITFORIT_STRICT:-0} 141 | WAITFORIT_CHILD=${WAITFORIT_CHILD:-0} 142 | WAITFORIT_QUIET=${WAITFORIT_QUIET:-0} 143 | 144 | # check to see if timeout is from busybox? 145 | WAITFORIT_TIMEOUT_PATH=$(type -p timeout) 146 | WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH) 147 | if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then 148 | WAITFORIT_ISBUSY=1 149 | WAITFORIT_BUSYTIMEFLAG="-t" 150 | 151 | else 152 | WAITFORIT_ISBUSY=0 153 | WAITFORIT_BUSYTIMEFLAG="" 154 | fi 155 | 156 | if [[ $WAITFORIT_CHILD -gt 0 ]]; then 157 | wait_for 158 | WAITFORIT_RESULT=$? 159 | exit $WAITFORIT_RESULT 160 | else 161 | if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then 162 | wait_for_wrapper 163 | WAITFORIT_RESULT=$? 164 | else 165 | wait_for 166 | WAITFORIT_RESULT=$? 167 | fi 168 | fi 169 | 170 | if [[ $WAITFORIT_CLI != "" ]]; then 171 | if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then 172 | echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess" 173 | exit $WAITFORIT_RESULT 174 | fi 175 | exec "${WAITFORIT_CLI[@]}" 176 | else 177 | exit $WAITFORIT_RESULT 178 | fi -------------------------------------------------------------------------------- /example/stock_sub.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pandas as pd 4 | import threading 5 | from QAPUBSUB.consumer import subscriber 6 | from QAPUBSUB.producer import publisher_routing 7 | from QUANTAXIS.QAEngine import QA_Thread, QA_Task 8 | 9 | # 接受并分析 10 | 11 | 12 | class client(QA_Thread): 13 | 14 | def __init__(self): 15 | super().__init__(name='qasubclient') 16 | 17 | self.req = publisher_routing( 18 | exchange='QARealtime_Market', routing_key='stock') 19 | self.last_ab = pd.DataFrame() 20 | self.sub = subscriber(exchange='stocktransaction') 21 | self.sub.callback = self.callback 22 | 23 | def subscribe(self, code='000007'): 24 | req.pub(json.dumps({'topic': 'subscribe', 'code': code}), 25 | routing_key='stock') 26 | 27 | def callback(self, a, b, c, data): 28 | data = json.loads(data) 29 | 30 | data = pd.DataFrame(data).set_index(['code']).loc[:, [ 31 | 'ask_vol2', 'ask2', 'ask_vol1', 'ask1', 'price', 'bid1', 'bid_vol1', 'bid2', 'bid_vol2']] 32 | 33 | self.put({'topic': 'new_data', 34 | 'data': data}) 35 | 36 | def run(self): 37 | threading.Thread(target=self.sub.start, daemon=True).start() 38 | while True: 39 | try: 40 | jobs = self.queue.get_nowait() 41 | if jobs['topic'] == 'new_data': 42 | print(jobs['data'] - self.last_ab) 43 | 44 | self.last_ab = jobs['data'] 45 | 46 | except Exception as e: 47 | print(e) 48 | 49 | import time 50 | time.sleep(1) 51 | 52 | client().start() 53 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | quantaxis 2 | quantaxis_pubsub 3 | quantaxis_otgbroker 4 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | 2 | import codecs 3 | import io 4 | import os 5 | import re 6 | import sys 7 | import webbrowser 8 | import platform 9 | import configparser 10 | try: 11 | from setuptools import setup 12 | except: 13 | from distutils.core import setup 14 | """ 15 | """ 16 | 17 | if sys.version_info.major != 3 or sys.version_info.minor not in [4, 5, 6, 7, 8]: 18 | print('wrong version, should be 3.4/3.5/3.6/3.7/3.8 version') 19 | sys.exit() 20 | 21 | with io.open('QARealtimeCollector/__init__.py', 'rt', encoding='utf8') as f: 22 | context = f.read() 23 | VERSION = re.search(r'__version__ = \'(.*?)\'', context).group(1) 24 | AUTHOR = re.search(r'__author__ = \'(.*?)\'', context).group(1) 25 | 26 | 27 | def read(fname): 28 | 29 | return codecs.open(os.path.join(os.path.dirname(__file__), fname)).read() 30 | 31 | 32 | NAME = "qarealtime_collector" 33 | """ 34 | """ 35 | PACKAGES = ["QARealtimeCollector", "QARealtimeCollector.collectors", 36 | "QARealtimeCollector.clients", "QARealtimeCollector.datahandler"] 37 | """ 38 | """ 39 | 40 | DESCRIPTION = "QARealtimeCollector: QUANTAXIS REALTIME MARKETDATA COLLECTORS" 41 | 42 | 43 | KEYWORDS = ["quantaxis", "quant", "finance", "Backtest", 'Framework'] 44 | """ 45 | """ 46 | 47 | AUTHOR_EMAIL = "yutiansut@qq.com" 48 | 49 | URL = "https://github.com/yutiansut/QUANTAXIS_RealtimeCollector" 50 | 51 | 52 | LICENSE = "MIT" 53 | 54 | 55 | setup( 56 | name=NAME, 57 | version=VERSION, 58 | description=DESCRIPTION, 59 | long_description=DESCRIPTION, 60 | classifiers=[ 61 | 'License :: OSI Approved :: MIT License', 62 | 'Programming Language :: Python', 63 | 'Intended Audience :: Developers', 64 | 'Operating System :: OS Independent', 65 | ], 66 | install_requires=['quantaxis', 'quantaxis_pubsub', 67 | 'quantaxis-otgbroker', 'quantaxis-randomprice','quantaxis_webserver'], 68 | entry_points={ 69 | 'console_scripts': [ 70 | 'QARC_Start = QARealtimeCollector.__init__:start', 71 | 'QARC_Resample = QARealtimeCollector.__init__:resample', 72 | 'QARC_Random = QARealtimeCollector.__init__:random', 73 | 'QARC_Fast = QARealtimeCollector.__init__:faststart', 74 | 'QARC_CTP = QARealtimeCollector.__init__:start_ctp', 75 | 'QARC_Stock = QARealtimeCollector.__init__:stock_collector', 76 | 'QARC_WEBSERVER = QARealtimeCollector.webserver:main' 77 | ] 78 | }, 79 | # install_requires=requirements, 80 | keywords=KEYWORDS, 81 | author=AUTHOR, 82 | author_email=AUTHOR_EMAIL, 83 | url=URL, 84 | license=LICENSE, 85 | packages=PACKAGES, 86 | include_package_data=True, 87 | zip_safe=True 88 | ) 89 | --------------------------------------------------------------------------------