├── .gitignore ├── LICENSE ├── README.md ├── setup.py └── simple ├── __init__.py ├── czsc ├── CzscModelEngine.py ├── CzscModelEngineFenbi.py ├── CzscModelEngineFenxing.py ├── CzscModelEngineHebing.py ├── CzscModelEngineXianduan.py ├── CzscModelEngineZhongshu.py ├── CzscModelEngineZhongshuXd.py ├── CzscRealtimeModelEngineFenbi.py ├── CzscRealtimeModelEngineXianduan.py ├── CzscRealtimeModelEngineZhongshu.py ├── Utils.py └── __init__.py ├── draw ├── HtmlAutoFit.py ├── KdrawGrid.py ├── KdrawRealtimeMultiPeriod.py └── __init__.py ├── factor ├── FactorAbstract.py ├── Macd.py ├── XianduanSanmai.py └── __init__.py ├── logger ├── __init__.py └── logger.py ├── pusher ├── __init__.py └── serverj.py └── requirements.txt /.gitignore: -------------------------------------------------------------------------------- 1 | build/ 2 | dist/ 3 | *.egg-info/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) [2022-2023] [simple-trade] 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Simple缠论量化工具集 2 | 3 | ## 简介 4 | 5 | ​ 缠论只是一个工具,请不要将它神化,发现一些客观的规律远比追求虚无缥缈的神论更有意义。 6 | 7 | ​ 本模型按照简单可控的方式实现,与原著的描述不尽相同,为了模型可推导做了一些调整,模型整体按照零参数设计,不必关注策略应该设置定什么参数,是否过度拟合等问题。 8 | 9 | ​ 缠论是一种结构化分析方法,但每个人对于缠论的理解不同,如果有计算机基础,每个人量化的缠论模型也不会完全一致,所以没必要过多纠结是否与自己的实现方式一样,学缠的人一旦纠于细节,理论化、精度化、完美化,基本也就缠进去了。从另一个角度说,缠论是一种视觉体现,是对市场阶段性顶底和交易密集区的量化展示,本模型重在交易,在保证无任何未来函数上实现了实时模型,如果仔细观察最后一个笔和线段是动态的,随着行情的变化而变化,但是确认了的构件是不会改变的,这就保证了回测的稳定性,在本模型基础上按照规则开发出的策略没有任何漂移。 10 | 11 | ​ 开源本模型,提供一种无参的量化实现,爱好者可以继续摸索,形成自己一套技术分析体系。在市场上仅仅懂得技术分析是远远不够的,但是作为散户,没有精力读研报做调研,技术分析仍是一种有效的市场解读方法。技术分析体系的建立意味着面对杂乱无章的市场,你有了自己的解读语言,可以与之对话、交流、提升,进一步可以形成一套交易策略,知道自己在自己的体系下什么情况亏什么时候赚,在概率基础上可以形成持久的可调整的交易框架。希望可以帮助各位找到自己的方法论。 12 | 13 | 14 | ![上证指数](https://user-images.githubusercontent.com/104715342/166645675-89aff9e2-826d-47a8-aef6-2e3200098f2a.png) 15 | 16 | 演示地址:http://simple-trade.cn:18188/ 账户:guest/1 17 | 18 | 19 | ## 概览图 20 | 21 | 本项目基本实现了缠论模型计算、可视化、选股、策略模板等功能,部分功能仍在开发中。 22 | 23 | ![量化架构-逻辑架构](https://user-images.githubusercontent.com/104715342/166646137-5af0371d-3e2a-4776-86db-10450d251879.png) 24 | 25 | ## 安装 26 | pip install simple-czsc==0.1.0 27 | 28 | ## 策略及选股参考Demo 29 | https://github.com/simple-trade/simple-czsc-demo 30 | 31 | ## 交流 32 | ![WechatIMG9](https://user-images.githubusercontent.com/104715342/166646837-8c63702f-518a-463e-99af-c5c2c832cd5c.jpeg) 33 | 34 | 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Note: To use the 'upload' functionality of this file, you must: 5 | # $ pipenv install twine --dev 6 | 7 | import io 8 | import os 9 | import sys 10 | from shutil import rmtree 11 | 12 | from setuptools import find_packages, setup, Command 13 | 14 | # Package meta-data. 15 | NAME = 'simple-czsc' 16 | DESCRIPTION = '缠论量化工具集' 17 | URL = 'https://github.com/simple-trade/simple-czsc' 18 | EMAIL = 'simple_czsc@163.com' 19 | AUTHOR = 'simple-trade' 20 | REQUIRES_PYTHON = '>=3.6.0' 21 | VERSION = '0.1.0' 22 | 23 | # What packages are required for this module to be executed? 24 | REQUIRED = [ 25 | 'numpy', 'pandas', 'pyecharts', 'requests', 'selenium', 'snapshot_selenium', 'TA_Lib' 26 | ] 27 | 28 | # What packages are optional? 29 | EXTRAS = { 30 | # 'fancy feature': ['django'], 31 | } 32 | 33 | # The rest you shouldn't have to touch too much :) 34 | # ------------------------------------------------ 35 | # Except, perhaps the License and Trove Classifiers! 36 | # If you do change the License, remember to change the Trove Classifier for that! 37 | 38 | here = os.path.abspath(os.path.dirname(__file__)) 39 | 40 | # Import the README and use it as the long-description. 41 | # Note: this will only work if 'README.md' is present in your MANIFEST.in file! 42 | try: 43 | with io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f: 44 | long_description = '\n' + f.read() 45 | except FileNotFoundError: 46 | long_description = DESCRIPTION 47 | 48 | # Load the package's __version__.py module as a dictionary. 49 | about = {} 50 | if not VERSION: 51 | project_slug = NAME.lower().replace("-", "_").replace(" ", "_") 52 | with open(os.path.join(here, project_slug, '__version__.py')) as f: 53 | exec(f.read(), about) 54 | else: 55 | about['__version__'] = VERSION 56 | 57 | 58 | class UploadCommand(Command): 59 | """Support setup.py upload.""" 60 | 61 | description = 'Build and publish the package.' 62 | user_options = [] 63 | 64 | @staticmethod 65 | def status(s): 66 | """Prints things in bold.""" 67 | print('\033[1m{0}\033[0m'.format(s)) 68 | 69 | def initialize_options(self): 70 | pass 71 | 72 | def finalize_options(self): 73 | pass 74 | 75 | def run(self): 76 | try: 77 | self.status('Removing previous builds…') 78 | rmtree(os.path.join(here, 'dist')) 79 | except OSError: 80 | pass 81 | 82 | self.status('Building Source and Wheel (universal) distribution…') 83 | os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable)) 84 | 85 | self.status('Uploading the package to PyPI via Twine…') 86 | os.system('twine upload dist/*') 87 | 88 | self.status('Pushing git tags…') 89 | os.system('git tag v{0}'.format(about['__version__'])) 90 | os.system('git push --tags') 91 | 92 | sys.exit() 93 | 94 | 95 | # Where the magic happens: 96 | setup( 97 | name=NAME, 98 | version=about['__version__'], 99 | description=DESCRIPTION, 100 | long_description=long_description, 101 | long_description_content_type='text/markdown', 102 | author=AUTHOR, 103 | author_email=EMAIL, 104 | python_requires=REQUIRES_PYTHON, 105 | url=URL, 106 | packages=find_packages(exclude=["tests", "*.tests", "*.tests.*", "tests.*"]), 107 | # If your package is a single module, use this instead of 'packages': 108 | # py_modules=['mypackage'], 109 | 110 | # entry_points={ 111 | # 'console_scripts': ['mycli=mymodule:cli'], 112 | # }, 113 | install_requires=REQUIRED, 114 | extras_require=EXTRAS, 115 | include_package_data=True, 116 | license='MIT', 117 | classifiers=[ 118 | # Trove classifiers 119 | # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers 120 | 'License :: OSI Approved :: MIT License', 121 | 'Programming Language :: Python', 122 | 'Programming Language :: Python :: 3', 123 | 'Programming Language :: Python :: 3.6', 124 | 'Programming Language :: Python :: 3.7', 125 | 'Programming Language :: Python :: Implementation :: CPython', 126 | 'Programming Language :: Python :: Implementation :: PyPy' 127 | ], 128 | # $ setup.py publish support. 129 | cmdclass={ 130 | 'upload': UploadCommand, 131 | }, 132 | ) 133 | -------------------------------------------------------------------------------- /simple/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from simple.czsc.CzscModelEngine import CzscModelEngine 3 | from simple.czsc.Utils import * 4 | from simple.draw.KdrawGrid import KdrawGrid 5 | from simple.draw.KdrawRealtimeMultiPeriod import KdrawRealtimeMultiPeriod 6 | from simple.logger.logger import LoggerFactory 7 | from simple.pusher.serverj import wx_push -------------------------------------------------------------------------------- /simple/czsc/CzscModelEngine.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | from simple.czsc.CzscModelEngineZhongshu import CzscModelEngineZhongshu 4 | from simple.czsc.CzscModelEngineXianduan import CzscModelEngineXianduan 5 | from simple.czsc.CzscModelEngineFenbi import CzscModelEngineFenbi 6 | from simple.czsc.CzscModelEngineFenxing import CzscModelEngineFenxing 7 | from simple.czsc.CzscModelEngineHebing import CzscModelEngineHebing 8 | from simple.czsc.CzscRealtimeModelEngineFenbi import CzscRealtimeModelEngineFenbi 9 | from simple.czsc.CzscRealtimeModelEngineXianduan import CzscRealtimeModelEngineXianduan 10 | from simple.czsc.CzscRealtimeModelEngineZhongshu import CzscRealtimeModelEngineZhongshu 11 | import simple.factor as factor 12 | import datetime 13 | import pandas as pd 14 | import numpy as np 15 | from simple.logger.logger import LoggerFactory 16 | 17 | 18 | logger = LoggerFactory.getLogger(__name__) 19 | 20 | class CzscModelEngine: 21 | def __init__( 22 | self, 23 | stock_code, 24 | frequency, 25 | is_debug=False, 26 | realtime_drive_time='', 27 | subscribe_factors=[], 28 | ): 29 | self.stock_code = stock_code 30 | self.frequency = frequency 31 | self.is_debug = is_debug 32 | self.realtime_drive_time = realtime_drive_time 33 | self.last_history_k_index = None 34 | 35 | self.hebing_engine = CzscModelEngineHebing(stock_code, frequency) 36 | self.fenxing_engine = CzscModelEngineFenxing(stock_code, frequency) 37 | self.fenbi_engine = CzscModelEngineFenbi(stock_code, frequency) 38 | self.xianduan_engine = CzscModelEngineXianduan(stock_code, frequency) 39 | self.zhongshu_engine = CzscModelEngineZhongshu(stock_code, frequency) 40 | self.realtime_fenbi_engine = CzscRealtimeModelEngineFenbi(stock_code, frequency) 41 | self.realtime_xianduan_engine = CzscRealtimeModelEngineXianduan(stock_code, frequency) 42 | self.realtime_zhongshu_engine = CzscRealtimeModelEngineZhongshu(stock_code, frequency) 43 | 44 | self.stock_df = pd.DataFrame(columns=['open', 'close', 'high', 'low']) 45 | self.hebing_df = pd.DataFrame(columns=['current_time', 'open', 'close', 'high', 'low']) 46 | self.fenxing_df = pd.DataFrame(columns=['price', 'type']) 47 | 48 | self.fenbi_df = pd.DataFrame( 49 | columns=[ 50 | 'confirm_time', 51 | 'price', 52 | 'type', 53 | 'status', 54 | 'price2', 55 | ] 56 | ) 57 | 58 | self.xianduan_df = pd.DataFrame( 59 | columns=[ 60 | 'confirm_time', 61 | 'start_point', 62 | 'end_point', 63 | 'dynamic_end_point', 64 | 'type', 65 | 'status', 66 | 'forward_break_point', 67 | 'reverse_break_point', 68 | 'fenbi_idx', 69 | 'fenbi_idx_snapshot', 70 | 'real_reverse_break_point', 71 | 'real_forward_break_point', 72 | ] 73 | ) 74 | self.zhongshu_df = pd.DataFrame( 75 | columns=[ 76 | 'start_point', 77 | 'end_point', 78 | 'high_point', 79 | 'last_high_point', 80 | 'low_point', 81 | 'last_low_point', 82 | 'top_point', 83 | 'bottom_point', 84 | 'type', 85 | 'status', 86 | ] 87 | ) 88 | 89 | self.realtime_fenbi_df = self.fenbi_df.copy(deep=True) 90 | self.realtime_xianduan_df = self.xianduan_df.copy(deep=True) 91 | self.realtime_zhongshu_df = self.zhongshu_df.copy(deep=True) 92 | self.factor_result_dic = {} 93 | self.factor_engines = [] 94 | if subscribe_factors: 95 | sub_ft_np = np.array(subscribe_factors) 96 | 97 | if (sub_ft_np == factor.factors['xianduansanmai']).any(): 98 | self.factor_engines.append(factor.XianduanSanmai.XianduanSanmai(self)) 99 | if (sub_ft_np == factor.factors['macd']).any(): 100 | self.factor_engines.append(factor.Macd.Macd(self)) 101 | logger.info( 102 | '初始化引擎完成 %s %s ,实时结构计算开始时间: %s ,因子订阅:%s', 103 | stock_code, 104 | frequency, 105 | '无限制' if ('' == self.realtime_drive_time) else self.realtime_drive_time, 106 | subscribe_factors, 107 | ) 108 | 109 | 110 | def k_receive(self, k_df, is_realtime=False): 111 | ''' 112 | tick或k线数据接收 113 | ''' 114 | if self.is_debug: 115 | logger.debug('============接收到【%s】k线============\n%s' % (('实时' if (is_realtime) else '历史'), k_df)) 116 | starttime = datetime.datetime.now() 117 | k_df_ochl = k_df.loc[:, ['open', 'close', 'high', 'low']] 118 | self.stock_df.loc[k_df_ochl.index[0], :] = k_df_ochl.values[0] 119 | if not is_realtime: 120 | if not self.last_history_k_index is None and self.last_history_k_index == k_df_ochl.index[0]: 121 | logger.warn('当前k线【%s】【%s】历史结构计算已经执行过,不再重复执行', self.stock_code, self.last_history_k_index) 122 | else: 123 | self.__confirm_model(k_df_ochl) 124 | self.last_history_k_index = k_df_ochl.index[0] 125 | 126 | if self.realtime_drive_time == '' or k_df.index[-1] >= pd.to_datetime(self.realtime_drive_time).tz_localize('Asia/Shanghai'): 127 | self.__realtime_model(k_df_ochl) 128 | self.__execute_factors() 129 | 130 | endtime = datetime.datetime.now() 131 | if self.is_debug: 132 | logger.debug('计算一根k线耗时: %d ms' % ((endtime - starttime).seconds * 1000 + (endtime - starttime).microseconds / 1000)) 133 | 134 | 135 | 136 | def __confirm_model(self, k_df): 137 | if self.is_debug: 138 | logger.debug('====开始历史结构计算====') 139 | 140 | hebing_result = self.hebing_engine.execute(k_df=k_df, hebing_df=self.hebing_df) 141 | 142 | if 1 == hebing_result: 143 | return 144 | 145 | fenxing_result = self.fenxing_engine.execute(self.hebing_df, self.fenxing_df) 146 | if 1 != fenxing_result: 147 | return 148 | 149 | fenbi_result = self.fenbi_engine.execute(self.hebing_df, self.fenxing_df, self.fenbi_df) 150 | if 1 != fenbi_result: 151 | return 152 | 153 | xianduan_result = self.xianduan_engine.execute(self.fenbi_df, self.xianduan_df) 154 | if 2 != xianduan_result: 155 | return 156 | 157 | zhongshu_result = self.zhongshu_engine.execute(self.stock_df, self.xianduan_df, self.zhongshu_df) 158 | 159 | 160 | def __realtime_model(self, k_df): 161 | self.realtime_fenbi_df = self.realtime_fenbi_engine.execute(stock_df=self.stock_df, k_df=k_df, fenbi_df=self.fenbi_df) 162 | self.realtime_xianduan_df = self.xianduan_df.copy(deep=True) 163 | self.realtime_zhongshu_df = self.zhongshu_df.copy(deep=True) 164 | if not self.realtime_fenbi_df.empty and self.fenbi_df.shape[0] > 3 and not self.realtime_xianduan_df.empty: 165 | self.realtime_fenbi_df.loc[self.fenbi_df.index[-4], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 166 | self.fenbi_df.iloc[-4]['confirm_time'], 167 | self.fenbi_df.iloc[-4]['price'], 168 | self.fenbi_df.iloc[-4]['type'], 169 | self.fenbi_df.iloc[-4]['status'], 170 | np.nan, 171 | ] 172 | self.realtime_fenbi_df.loc[self.fenbi_df.index[-3], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 173 | self.fenbi_df.iloc[-3]['confirm_time'], 174 | self.fenbi_df.iloc[-3]['price'], 175 | self.fenbi_df.iloc[-3]['type'], 176 | self.fenbi_df.iloc[-3]['status'], 177 | np.nan, 178 | ] 179 | self.realtime_fenbi_df = self.realtime_fenbi_df.sort_index() 180 | 181 | self.realtime_xianduan_engine.execute(self.xianduan_engine, self.realtime_fenbi_df, self.realtime_xianduan_df) 182 | 183 | temp_fenbi_df = self.fenbi_df.copy(deep=True) 184 | temp_fenbi_df.drop(axis=0, index=temp_fenbi_df.index[-1], inplace=True) 185 | self.realtime_fenbi_df.drop(axis=0, index=self.realtime_fenbi_df.index[0:3], inplace=True) 186 | self.realtime_fenbi_df = pd.concat([temp_fenbi_df, self.realtime_fenbi_df], axis=0) 187 | self.realtime_zhongshu_engine.execute(self.zhongshu_engine, self.stock_df, self.realtime_xianduan_df, self.realtime_zhongshu_df) 188 | 189 | def __execute_factors(self): 190 | for e in self.factor_engines: 191 | self.factor_result_dic[e.get_factor_name()] = e.execute() 192 | 193 | def get_factor_result(self): 194 | return self.factor_result_dic 195 | 196 | 197 | def get_containers(self): 198 | return ( 199 | self.hebing_df, 200 | self.fenxing_df, 201 | self.fenbi_df, 202 | self.xianduan_df, 203 | self.zhongshu_df, 204 | self.realtime_fenbi_df, 205 | self.realtime_xianduan_df, 206 | self.realtime_zhongshu_df, 207 | ) 208 | 209 | def get_hebing_df(self): 210 | return self.hebing_df 211 | 212 | def get_fenxing_df(self): 213 | return self.fenxing_df 214 | 215 | def get_fenbi_df(self): 216 | return self.fenbi_df 217 | 218 | def get_xianduan_df(self): 219 | return self.xianduan_df 220 | 221 | def get_zhongshu_df(self): 222 | return self.zhongshu_df 223 | 224 | def get_realtime_fenbi_df(self): 225 | return self.realtime_fenbi_df 226 | 227 | def get_realtime_xianduan_df(self): 228 | return self.realtime_xianduan_df 229 | 230 | def get_realtime_zhongshu_df(self): 231 | return self.realtime_zhongshu_df 232 | 233 | def get_klines(self): 234 | return self.stock_df 235 | 236 | 237 | def print_containers(self): 238 | 239 | pd.set_option('display.max_columns', None) 240 | 241 | pd.set_option('display.max_rows', None) 242 | 243 | pd.set_option('display.width', 100000) 244 | 245 | pd.set_option('display.max_colwidth', 10000) 246 | 247 | logger.info('====zhongshu_df==== \n %s', self.zhongshu_df) 248 | 249 | logger.info('====realtime_zhongshu_df==== \n %s', self.realtime_zhongshu_df) 250 | -------------------------------------------------------------------------------- /simple/czsc/CzscModelEngineFenbi.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | import numpy as np 4 | from simple.logger.logger import LoggerFactory 5 | logger = LoggerFactory.getLogger(__name__) 6 | 7 | 8 | class CzscModelEngineFenbi: 9 | def __init__(self, stock_code, frequency, is_debug=False): 10 | self.is_debug = is_debug 11 | 12 | 13 | def human_result(self, result): 14 | return '分笔' 15 | 16 | def execute(self, hebing_df, fenxing_df, fenbi_df): 17 | 18 | knocking_price = fenxing_df.iloc[-1]['price'] 19 | knocking_type = fenxing_df.iloc[-1]['type'] 20 | knocking_index = fenxing_df.index[-1] 21 | 22 | if fenxing_df.shape[0] == 1: 23 | 24 | fenbi_df.loc[knocking_index, ['confirm_time', 'price', 'type', 'status']] = [ 25 | np.nan, knocking_price, knocking_type, 0] 26 | return 0 27 | 28 | last_to_confirm_type = fenbi_df.iloc[-1]['type'] 29 | last_to_confirm_price = fenbi_df.iloc[-1]['price'] 30 | last_to_confirm_index = fenbi_df.index[-1] 31 | 32 | if -1 == last_to_confirm_type: 33 | if 1 == knocking_type: 34 | if self.__distanceSatisfied(hebing_df, last_to_confirm_index, knocking_index) \ 35 | or (fenbi_df.shape[0] > 1 and knocking_price <= fenbi_df.iloc[-2]['price']): 36 | 37 | min_price_idx=self.__lowerBottomFenxingExistBetween(fenxing_df,fenbi_df.index[-1],knocking_index,hebing_df,fenbi_df) 38 | if min_price_idx is None: 39 | return 0 40 | fenbi_df.iloc[-1]['confirm_time'] = knocking_index 41 | fenbi_df.iloc[-1]['status'] = 1 42 | fenbi_df.loc[min_price_idx, ['confirm_time', 'price', 'type', 'status']] = [ 43 | np.nan, fenxing_df.loc[min_price_idx,'price'], 1, 0 44 | ] 45 | return 1 46 | else: 47 | return 0 48 | else: 49 | if knocking_price >= last_to_confirm_price: 50 | 51 | fenbi_df.drop(axis=0, labels=[ 52 | fenbi_df.index[-1]], inplace=True) 53 | fenbi_df.loc[knocking_index, ['confirm_time', 'price', 'type', 'status']] = [ 54 | np.nan, knocking_price, knocking_type, 0 55 | ] 56 | return 2 57 | else: 58 | return 0 59 | 60 | if 1 == last_to_confirm_type: 61 | if -1 == knocking_type: 62 | if self.__distanceSatisfied(hebing_df, last_to_confirm_index, knocking_index) \ 63 | or (fenbi_df.shape[0] > 1 and knocking_price >= fenbi_df.iloc[-2]['price']): 64 | 65 | max_price_idx=self.__higherTopFenxingExistBetween(fenxing_df,fenbi_df.index[-1],knocking_index,hebing_df,fenbi_df) 66 | if max_price_idx is None: 67 | return 0 68 | fenbi_df.iloc[-1]['confirm_time'] = knocking_index 69 | fenbi_df.iloc[-1]['status'] = 1 70 | fenbi_df.loc[max_price_idx, ['confirm_time', 'price', 'type', 'status']] = [ 71 | np.nan, fenxing_df.loc[max_price_idx,'price'], -1, 0 72 | ] 73 | return 1 74 | else: 75 | return 0 76 | else: 77 | if knocking_price <= last_to_confirm_price: 78 | 79 | fenbi_df.drop(axis=0, labels=[ 80 | fenbi_df.index[-1]], inplace=True) 81 | 82 | fenbi_df.loc[knocking_index, ['confirm_time', 'price', 'type', 'status']] = [ 83 | np.nan, knocking_price, knocking_type, 0 84 | ] 85 | return 2 86 | else: 87 | return 0 88 | 89 | raise Exception('Mars Area') 90 | 91 | def __distanceSatisfied(self, hebing_df, index1, index2): 92 | distance = hebing_df[(hebing_df.index >= index1) & ( 93 | hebing_df.index <= index2)].shape[0] 94 | if distance >= 5: 95 | return True 96 | else: 97 | return False 98 | 99 | def __higherTopFenxingExistBetween(self, fenxing_df, last_fenbi_confirm_index, knocking_index, hebing_df,fenbi_df): 100 | tmp_df=fenxing_df[(fenxing_df.index >= last_fenbi_confirm_index) & ( 101 | fenxing_df.index <= knocking_index)].loc[:, 'price'].infer_objects() 102 | tmp_df=tmp_df.reindex(index=tmp_df.index[::-1]) 103 | max_price_idx = tmp_df.idxmax() 104 | if max_price_idx > last_fenbi_confirm_index and max_price_idx < knocking_index: 105 | max_price = fenxing_df.loc[max_price_idx,'price'] 106 | N=5 107 | high_between_bottomfenxing_and_backwardN = hebing_df.loc[:fenbi_df.index[-1]].iloc[-N:]['high'].max() 108 | low_between_bottomfenxing_and_backwardN = hebing_df.loc[:fenbi_df.index[-1]].iloc[-N:]['low'].min() 109 | diff1=max_price - low_between_bottomfenxing_and_backwardN 110 | diff2=high_between_bottomfenxing_and_backwardN - low_between_bottomfenxing_and_backwardN 111 | if diff1 < 0 or diff2 < 0: raise Exception('Mars Area') 112 | vision_satisfied = diff1> diff2 113 | if vision_satisfied: 114 | return max_price_idx 115 | else: 116 | return None 117 | return knocking_index 118 | 119 | def __lowerBottomFenxingExistBetween(self, fenxing_df, last_fenbi_confirm_index, knocking_index, hebing_df,fenbi_df): 120 | 121 | tmp_df = fenxing_df[(fenxing_df.index >= last_fenbi_confirm_index) & (fenxing_df.index <= knocking_index)].loc[:, 'price'].infer_objects() 122 | tmp_df=tmp_df.reindex(index=tmp_df.index[::-1]) 123 | min_price_index = tmp_df.idxmin() 124 | if min_price_index > last_fenbi_confirm_index and min_price_index < knocking_index: 125 | min_price = fenxing_df.loc[min_price_index,'price'] 126 | N=5 127 | high_between_bottomfenxing_and_backwardN = hebing_df.loc[:fenbi_df.index[-1]].iloc[-N:]['high'].max() 128 | low_between_bottomfenxing_and_backwardN = hebing_df.loc[:fenbi_df.index[-1]].iloc[-N:]['low'].min() 129 | diff1=high_between_bottomfenxing_and_backwardN - min_price 130 | diff2=high_between_bottomfenxing_and_backwardN - low_between_bottomfenxing_and_backwardN 131 | if diff1 < 0 or diff2 < 0: raise Exception('Mars Area') 132 | vision_satisfied = diff1 > diff2 133 | if vision_satisfied: 134 | return min_price_index 135 | else: 136 | return None 137 | return knocking_index 138 | -------------------------------------------------------------------------------- /simple/czsc/CzscModelEngineFenxing.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | from simple.logger.logger import LoggerFactory 4 | logger = LoggerFactory.getLogger(__name__) 5 | 6 | class CzscModelEngineFenxing: 7 | def __init__(self,stock_code,frequency,is_debug=False): 8 | self.is_debug = is_debug 9 | 10 | def human_result(self, result): 11 | return '分型' 12 | def execute(self, hebing_df,fenxing_df): 13 | if hebing_df.shape[0] < 3: 14 | return 0 15 | 16 | h0 = hebing_df.iloc[-3]['high'] 17 | h1 = hebing_df.iloc[-2]['high'] 18 | h2 = hebing_df.iloc[-1]['high'] 19 | l0 = hebing_df.iloc[-3]['low'] 20 | l1 = hebing_df.iloc[-2]['low'] 21 | l2 = hebing_df.iloc[-1]['low'] 22 | 23 | if h1 > h0 and h1 > h2 and l1 > l0 and l1 > l2: 24 | fenxing_df.loc[hebing_df.index[-2],['price','type']]=[h1,-1] 25 | return 1 26 | 27 | if h1 < h0 and h1 < h2 and l1 < l0 and l1 < l2: 28 | fenxing_df.loc[hebing_df.index[-2],['price','type']]=[l1,1] 29 | return 1 30 | 31 | return 0 32 | 33 | 34 | -------------------------------------------------------------------------------- /simple/czsc/CzscModelEngineHebing.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | from simple.logger.logger import LoggerFactory 4 | logger = LoggerFactory.getLogger(__name__) 5 | class CzscModelEngineHebing: 6 | def __init__(self,stock_code,frequency,is_debug=False): 7 | pass 8 | def human_result(self, result): 9 | return '合并' 10 | 11 | def execute(self, 12 | k_df, 13 | hebing_df, 14 | ): 15 | hebing_df_size=hebing_df.shape[0] 16 | if hebing_df_size >= 2: 17 | if self.__contain(hebing_df.iloc[-1]['high'],hebing_df.iloc[-1]['low'],k_df.iloc[-1]['high'],k_df.iloc[-1]['low']): 18 | if 1==self.__trend(hebing_df): 19 | higher_index=k_df.index[-1] 20 | if hebing_df.iloc[-1]['high'] > k_df.iloc[-1]['high']: 21 | higher_index = hebing_df.index[-1] 22 | h = max(hebing_df.iloc[-1]['high'],k_df.iloc[-1]['high']) 23 | l = max(hebing_df.iloc[-1]['low'],k_df.iloc[-1]['low']) 24 | o,c = l,h 25 | hebing_df.drop(axis=0,labels=[hebing_df.index[-1]],inplace=True) 26 | hebing_df.loc[higher_index,['current_time','open','close','high','low']]=[k_df.index[0],o,c,h,l] 27 | return 1 28 | else: 29 | lower_index=k_df.index[-1] 30 | if hebing_df.iloc[-1]['low'] < k_df.iloc[-1]['low']: 31 | lower_index = hebing_df.index[-1] 32 | h = min(hebing_df.iloc[-1]['high'],k_df.iloc[-1]['high']) 33 | l = min(hebing_df.iloc[-1]['low'],k_df.iloc[-1]['low']) 34 | o,c = h,l 35 | hebing_df.drop(axis=0,labels=[hebing_df.index[-1]],inplace=True) 36 | hebing_df.loc[lower_index,['current_time','open','close','high','low']]=[k_df.index[0],o,c,h,l] 37 | return 1 38 | else: 39 | hebing_df.loc[k_df.index[0],['open','close','high','low']]=k_df.values[0] 40 | hebing_df.loc[k_df.index[0],['current_time']]=k_df.index[0] 41 | return 0 42 | elif hebing_df_size == 1: 43 | if self.__contain(hebing_df.iloc[-1]['high'],hebing_df.iloc[-1]['low'],k_df.iloc[-1]['high'],k_df.iloc[-1]['low']): 44 | higher_index=k_df.index[-1] 45 | if hebing_df.iloc[-1]['high'] > k_df.iloc[-1]['high']: 46 | higher_index = hebing_df.index[-1] 47 | h = max(hebing_df.iloc[-1]['high'],k_df.iloc[-1]['high']) 48 | l = max(hebing_df.iloc[-1]['low'],k_df.iloc[-1]['low']) 49 | o,c = l,h 50 | hebing_df.drop(axis=0,labels=[hebing_df.index[-1]],inplace=True) 51 | hebing_df.loc[higher_index,['current_time','open','close','high','low']]=[k_df.index[0],o,c,h,l] #设置新合并后的k线 52 | return 1 53 | else: 54 | hebing_df.loc[k_df.index[0],['open','close','high','low']]=k_df.values[0] 55 | hebing_df.loc[k_df.index[0],['current_time']]=k_df.index[0] 56 | return 0 57 | else: 58 | hebing_df.loc[k_df.index[0],['open','close','high','low']]=k_df.values[0] 59 | hebing_df.loc[k_df.index[0],['current_time']]=k_df.index[0] 60 | return 0 61 | 62 | 63 | def __contain(self, h1, l1, h2, l2): 64 | if (h1 >= h2 and l1 <= l2) or (h1 <= h2 and l1 >= l2): 65 | return True 66 | return False 67 | 68 | def __trend(self, hebing_df): 69 | if hebing_df.iloc[-1]['high'] > hebing_df.iloc[-2]['high'] and hebing_df.iloc[-1]['low'] > hebing_df.iloc[-2]['low']: 70 | return 1 71 | if hebing_df.iloc[-1]['high'] < hebing_df.iloc[-2]['high'] and hebing_df.iloc[-1]['low'] < hebing_df.iloc[-2]['low']: 72 | return -1 73 | return 1 74 | -------------------------------------------------------------------------------- /simple/czsc/CzscModelEngineXianduan.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | import copy 4 | from simple.logger.logger import LoggerFactory 5 | 6 | logger = LoggerFactory.getLogger(__name__) 7 | 8 | class CzscModelEngineXianduan: 9 | def __init__(self, stock_code, frequency): 10 | pass 11 | 12 | def human_result(self, result): 13 | return '线段' 14 | 15 | 16 | def execute(self, fenbi_df, xianduan_df, is_realtime=False 17 | ): 18 | 19 | if is_realtime: 20 | confirmed_fenbi_df = fenbi_df 21 | else: 22 | confirmed_fenbi_df = fenbi_df.drop(axis=0, labels=[fenbi_df.index[-1]], inplace=False) 23 | 24 | if confirmed_fenbi_df.shape[0] < 2: 25 | return 0 26 | 27 | if confirmed_fenbi_df.shape[0] == 2: 28 | 29 | xianduan_df.loc[ 30 | confirmed_fenbi_df.index[-2], 31 | [ 32 | 'start_point', 33 | 'end_point', 34 | 'dynamic_end_point', 35 | 'type', 36 | 'status', 37 | 'forward_break_point', 38 | 'reverse_break_point', 39 | 'fenbi_idx', 40 | 'real_reverse_break_point', 41 | 'real_forward_break_point', 42 | ] 43 | ] = [ 44 | [confirmed_fenbi_df.index[-2], confirmed_fenbi_df.iloc[-2]['price']], 45 | [confirmed_fenbi_df.index[-1], confirmed_fenbi_df.iloc[-1]['price']], 46 | [confirmed_fenbi_df.index[-1], confirmed_fenbi_df.iloc[-1]['price']], 47 | confirmed_fenbi_df.iloc[-2]['type'], 48 | 0, 49 | [confirmed_fenbi_df.index[-1], confirmed_fenbi_df.iloc[-1]['price']], 50 | [confirmed_fenbi_df.index[-2], confirmed_fenbi_df.iloc[-2]['price']], 51 | [confirmed_fenbi_df.index[-2], confirmed_fenbi_df.index[-1]], 52 | [confirmed_fenbi_df.index[-2], confirmed_fenbi_df.iloc[-2]['price']], 53 | [confirmed_fenbi_df.index[-1], confirmed_fenbi_df.iloc[-1]['price']] 54 | ] 55 | return 1 56 | 57 | 58 | knocking_fenbi_idx = confirmed_fenbi_df.index[-1] 59 | knocking_fenbi_price = confirmed_fenbi_df.iloc[-1]['price'] 60 | knocking_fenbi_type = confirmed_fenbi_df.iloc[-1]['type'] 61 | to_confirm_type = xianduan_df.iloc[-1]['type'] 62 | to_confirm_forward_break_price = xianduan_df.iloc[-1]['forward_break_point'][1] 63 | to_confirm_reverse_break_price = xianduan_df.iloc[-1]['reverse_break_point'][1] 64 | if to_confirm_type == -knocking_fenbi_type: 65 | xianduan_df.iloc[-1]['forward_break_point'] = [knocking_fenbi_idx, knocking_fenbi_price] 66 | xianduan_df.iloc[-1]['dynamic_end_point'] = [knocking_fenbi_idx, knocking_fenbi_price] 67 | xianduan_df.iloc[-1]['fenbi_idx'].append(knocking_fenbi_idx) 68 | if -1 == to_confirm_type: 69 | if knocking_fenbi_price <= to_confirm_forward_break_price: 70 | xianduan_df.iloc[-1]['end_point'] = [knocking_fenbi_idx, knocking_fenbi_price] 71 | if is_realtime: 72 | xianduan_df.iloc[-1]['real_reverse_break_point'] = [ 73 | xianduan_df.iloc[-1]['reverse_break_point'][0], 74 | xianduan_df.iloc[-1]['reverse_break_point'][1]] 75 | return 3 76 | else: 77 | if is_realtime: 78 | xianduan_df.iloc[-1]['real_reverse_break_point'] = [ 79 | xianduan_df.iloc[-1]['reverse_break_point'][0], 80 | xianduan_df.iloc[-1]['reverse_break_point'][1]] 81 | return 4 82 | else: 83 | if knocking_fenbi_price >= to_confirm_forward_break_price: 84 | xianduan_df.iloc[-1]['end_point'] = [knocking_fenbi_idx, knocking_fenbi_price] 85 | if is_realtime: 86 | xianduan_df.iloc[-1]['real_reverse_break_point'] = [ 87 | xianduan_df.iloc[-1]['reverse_break_point'][0], 88 | xianduan_df.iloc[-1]['reverse_break_point'][1]] 89 | return 3 90 | else: 91 | if is_realtime: 92 | xianduan_df.iloc[-1]['real_reverse_break_point'] = [ 93 | xianduan_df.iloc[-1]['reverse_break_point'][0], 94 | xianduan_df.iloc[-1]['reverse_break_point'][1]] 95 | return 4 96 | else: 97 | if -1 == to_confirm_type: 98 | if knocking_fenbi_price >= to_confirm_reverse_break_price: 99 | self.__break_xianduan(xianduan_df, knocking_fenbi_idx, confirmed_fenbi_df) 100 | return 2 101 | else: 102 | if is_realtime: 103 | xianduan_df.iloc[-1]['real_reverse_break_point'] = [ 104 | xianduan_df.iloc[-1]['reverse_break_point'][0], 105 | xianduan_df.iloc[-1]['reverse_break_point'][1]] 106 | xianduan_df.iloc[-1]['reverse_break_point'] = [knocking_fenbi_idx, knocking_fenbi_price] 107 | xianduan_df.iloc[-1]['dynamic_end_point'] = [knocking_fenbi_idx, knocking_fenbi_price] 108 | xianduan_df.iloc[-1]['fenbi_idx'].append(knocking_fenbi_idx) 109 | return 4 110 | else: 111 | if knocking_fenbi_price <= to_confirm_reverse_break_price: 112 | self.__break_xianduan(xianduan_df, knocking_fenbi_idx, confirmed_fenbi_df) 113 | return 2 114 | else: 115 | if is_realtime: 116 | xianduan_df.iloc[-1]['real_reverse_break_point'] = [ 117 | xianduan_df.iloc[-1]['reverse_break_point'][0], 118 | xianduan_df.iloc[-1]['reverse_break_point'][1]] 119 | xianduan_df.iloc[-1]['reverse_break_point'] = [knocking_fenbi_idx, knocking_fenbi_price] 120 | xianduan_df.iloc[-1]['dynamic_end_point'] = [knocking_fenbi_idx, knocking_fenbi_price] 121 | xianduan_df.iloc[-1]['fenbi_idx'].append(knocking_fenbi_idx) 122 | return 4 123 | 124 | def __break_xianduan(self, xianduan_df, knocking_fenbi_idx, confirmed_fenbi_df): 125 | 126 | xianduan_df.iloc[-1]['fenbi_idx_snapshot'] = copy.deepcopy(xianduan_df.iloc[-1]['fenbi_idx']) 127 | xianduan_df.iloc[-1]['confirm_time'] = knocking_fenbi_idx 128 | xianduan_df.iloc[-1]['status'] = 1 129 | 130 | xianduan_end_idx = xianduan_df.iloc[-1]['end_point'][0] 131 | xianduan_end_price = xianduan_df.iloc[-1]['end_point'][1] 132 | fenbi_idx_end_idx = xianduan_df.iloc[-1]['fenbi_idx'].index(xianduan_end_idx) 133 | xianduan_df.iloc[-1]['fenbi_idx'] = xianduan_df.iloc[-1]['fenbi_idx'][:fenbi_idx_end_idx + 1] 134 | 135 | 136 | 137 | xianduan_df.loc[ 138 | xianduan_end_idx, 139 | [ 140 | 'start_point', 141 | 'end_point', 142 | 'dynamic_end_point', 143 | 'type', 144 | 'status', 145 | 'forward_break_point', 146 | 'reverse_break_point', 147 | 'fenbi_idx', 148 | 'real_reverse_break_point', 149 | 'real_forward_break_point', 150 | ] 151 | ] = [ 152 | [xianduan_end_idx, xianduan_end_price], 153 | [confirmed_fenbi_df.index[-1], confirmed_fenbi_df.iloc[-1]['price']], 154 | [confirmed_fenbi_df.index[-1], confirmed_fenbi_df.iloc[-1]['price']], 155 | confirmed_fenbi_df.iloc[-2]['type'], 156 | 0, 157 | [confirmed_fenbi_df.index[-1], confirmed_fenbi_df.iloc[-1]['price']], 158 | [confirmed_fenbi_df.index[-2], confirmed_fenbi_df.iloc[-2]['price']], 159 | [confirmed_fenbi_df.index[-2], confirmed_fenbi_df.index[-1]], 160 | [confirmed_fenbi_df.index[-2], confirmed_fenbi_df.iloc[-2]['price']], 161 | [confirmed_fenbi_df.index[-1], confirmed_fenbi_df.iloc[-1]['price']] 162 | ] 163 | 164 | -------------------------------------------------------------------------------- /simple/czsc/CzscModelEngineZhongshu.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | from simple.logger.logger import LoggerFactory 4 | logger = LoggerFactory.getLogger(__name__) 5 | class CzscModelEngineZhongshu: 6 | def __init__(self,stock_code,frequency,is_debug=False): 7 | self.is_debug=is_debug 8 | pass 9 | 10 | def human_result(self,result): 11 | return '中枢' 12 | 13 | def execute(self, stock_df, xianduan_df,zhongshu_df,is_realtime=False): 14 | 15 | if is_realtime: 16 | confirmed_xianduan_df = xianduan_df 17 | else: 18 | confirmed_xianduan_df = xianduan_df.drop(axis=0,labels=[xianduan_df.index[-1]],inplace=False) 19 | if confirmed_xianduan_df.shape[0] < 3: 20 | return -1 21 | 22 | 23 | if -1 == confirmed_xianduan_df.iloc[-1]['type']: 24 | if zhongshu_df.shape[0] > 0 and zhongshu_df.iloc[-1]['status']==0 and confirmed_xianduan_df.iloc[-1]['end_point'][1]>zhongshu_df.iloc[-1]['top_point'][1]: 25 | zhongshu_df.iloc[-1]['status']=1 26 | 27 | zhongshu_df.iloc[-1]['end_point']=confirmed_xianduan_df.iloc[-2]['start_point'] 28 | 29 | lowest_point = self.__find_lowest_point(zhongshu_df.iloc[-1]['start_point'][0],zhongshu_df.iloc[-1]['end_point'][0],stock_df) 30 | zhongshu_df.iloc[-1]['low_point']=lowest_point 31 | highest_point = self.__find_highest_point(zhongshu_df.iloc[-1]['start_point'][0],zhongshu_df.iloc[-1]['end_point'][0],stock_df) 32 | zhongshu_df.iloc[-1]['high_point']=highest_point 33 | if self.is_debug: 34 | logger.info(zhongshu_df) 35 | return 2 36 | else: 37 | pass 38 | 39 | 40 | low1_point=confirmed_xianduan_df.iloc[-1]['end_point'] 41 | low2_point=confirmed_xianduan_df.iloc[-3]['end_point'] 42 | high1_point=confirmed_xianduan_df.iloc[-1]['start_point'] 43 | high2_point=confirmed_xianduan_df.iloc[-3]['start_point'] 44 | 45 | bottom_point = low1_point if(low1_point[1]>low2_point[1]) else low2_point 46 | top_point = high1_point if(high1_point[1] zhongshu_df.iloc[-1]['top_point'][1] and bottom_point[1] < top_point[1]: 54 | is_create_zhongshu=True 55 | if is_create_zhongshu: 56 | 57 | 58 | highest_point = self.__find_highest_point(high2_point[0],low1_point[0],stock_df) 59 | lowest_point = self.__find_lowest_point(high2_point[0],low1_point[0],stock_df) 60 | 61 | _type = 1 62 | if zhongshu_df.shape[0]>=1: 63 | if bottom_point[1] > zhongshu_df.iloc[-1]['top_point'][1]: _type = 1 64 | if top_point[1] < zhongshu_df.iloc[-1]['bottom_point'][1]: _type = -1 65 | 66 | zhongshu_df.loc[high2_point[0], 67 | ['start_point', 68 | 'end_point', 69 | 'high_point', 70 | 'last_high_point', 71 | 'low_point', 72 | 'last_low_point', 73 | 'top_point', 74 | 'bottom_point', 75 | 'type', 76 | 'status' 77 | ] 78 | ] = [high2_point,low1_point,highest_point,highest_point,lowest_point,lowest_point,top_point,bottom_point,_type,0] 79 | if self.is_debug: 80 | logger.info(zhongshu_df) 81 | return 1 82 | 83 | 84 | 85 | if zhongshu_df.shape[0] > 0 and zhongshu_df.iloc[-1]['status']==0: 86 | zhongshu_df.iloc[-1]['end_point']=confirmed_xianduan_df.iloc[-1]['end_point'] 87 | lowest_point = self.__find_lowest_point(zhongshu_df.iloc[-1]['start_point'][0],zhongshu_df.iloc[-1]['end_point'][0],stock_df) 88 | if lowest_point[1] <= zhongshu_df.iloc[-1]['low_point'][1]: 89 | zhongshu_df.iloc[-1]['low_point']=lowest_point 90 | return 3 91 | 92 | else: 93 | if zhongshu_df.shape[0] > 0 and zhongshu_df.iloc[-1]['status']==0 and confirmed_xianduan_df.iloc[-1]['end_point'][1]low2_point[1]) else low2_point 115 | top_point = high1_point if(high1_point[1]=1: 133 | if bottom_point[1] > zhongshu_df.iloc[-1]['top_point'][1]: _type = 1 134 | if top_point[1] < zhongshu_df.iloc[-1]['bottom_point'][1]: _type = -1 135 | 136 | zhongshu_df.loc[low2_point[0], 137 | ['start_point', 138 | 'end_point', 139 | 'high_point', 140 | 'last_high_point', 141 | 'low_point', 142 | 'last_low_point', 143 | 'top_point', 144 | 'bottom_point', 145 | 'type', 146 | 'status' 147 | ] 148 | ]=[low2_point,high1_point,highest_point,highest_point,lowest_point,lowest_point,top_point,bottom_point,_type,0] 149 | if self.is_debug: 150 | logger.info(zhongshu_df) 151 | return 1 152 | 153 | 154 | 155 | if zhongshu_df.shape[0] > 0 and zhongshu_df.iloc[-1]['status']==0: 156 | zhongshu_df.iloc[-1]['end_point']=confirmed_xianduan_df.iloc[-1]['end_point'] 157 | highest_point = self.__find_highest_point(zhongshu_df.iloc[-1]['start_point'][0],zhongshu_df.iloc[-1]['end_point'][0],stock_df) 158 | if highest_point[1] >= zhongshu_df.iloc[-1]['high_point'][1]: 159 | zhongshu_df.iloc[-1]['high_point']=highest_point 160 | return 3 161 | 162 | 163 | def __find_highest_point(self, index1,index2,stock_df): 164 | tmp_df=stock_df.loc[index1:index2,'high'].infer_objects() 165 | max_price = tmp_df.max() 166 | max_time = tmp_df.idxmax() 167 | return [max_time,max_price] 168 | 169 | 170 | def __find_lowest_point(self, index1,index2,stock_df): 171 | tmp_df=stock_df.loc[index1:index2,'low'].infer_objects() 172 | min_price = tmp_df.min() 173 | min_time = tmp_df.idxmin() 174 | return [min_time,min_price] -------------------------------------------------------------------------------- /simple/czsc/CzscModelEngineZhongshuXd.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | class CzscModelEngineZhongshuXd: 4 | def __init__(self,stock_code,frequency,is_debug=False): 5 | self.is_debug=is_debug 6 | pass 7 | def execute(self, hebing_df, fenxing_df, fenbi_df, xianduan_df,zhongshu_df): 8 | pass -------------------------------------------------------------------------------- /simple/czsc/CzscRealtimeModelEngineFenbi.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | import pandas as pd 4 | import numpy as np 5 | from simple.czsc.Utils import find_highest_point, find_lowest_point 6 | 7 | from simple.logger.logger import LoggerFactory 8 | 9 | logger = LoggerFactory.getLogger(__name__) 10 | 11 | 12 | class CzscRealtimeModelEngineFenbi: 13 | def __init__(self, stock_code, frequency, is_debug=False): 14 | self.is_debug = is_debug 15 | 16 | def execute(self, stock_df, k_df, fenbi_df): 17 | realtime_fenbi_df = pd.DataFrame(columns=['confirm_time', 'price', 'type', 'status', 'price2']) 18 | 19 | if fenbi_df.shape[0] < 2: 20 | return realtime_fenbi_df 21 | 22 | forward_price = fenbi_df.iloc[-1]['price'] 23 | forward_type = fenbi_df.iloc[-1]['type'] 24 | forward_index = fenbi_df.index[-1] 25 | reverse_price = fenbi_df.iloc[-2]['price'] 26 | reverse_type = fenbi_df.iloc[-2]['type'] 27 | reverse_index = fenbi_df.index[-2] 28 | 29 | b_f_a_k_h_p = find_highest_point(forward_index, k_df.index[-1], stock_df) 30 | 31 | b_f_a_k_l_p = find_lowest_point(forward_index, k_df.index[-1], stock_df) 32 | 33 | if 1 == fenbi_df.iloc[-1]['type']: 34 | 35 | if b_f_a_k_h_p[1] < reverse_price and b_f_a_k_l_p[1] == forward_price and b_f_a_k_l_p[0] == forward_index: 36 | 37 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 38 | k_df.index[-1], 39 | reverse_price, 40 | reverse_type, 41 | -1, 42 | np.nan, 43 | ] 44 | realtime_fenbi_df.loc[forward_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 45 | k_df.index[-1], 46 | forward_price, 47 | forward_type, 48 | -1, 49 | np.nan, 50 | ] 51 | return realtime_fenbi_df 52 | 53 | if b_f_a_k_h_p[1] < reverse_price and b_f_a_k_l_p[1] <= forward_price and b_f_a_k_l_p[0] != forward_index: 54 | 55 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 56 | k_df.index[-1], 57 | reverse_price, 58 | reverse_type, 59 | -1, 60 | np.nan, 61 | ] 62 | realtime_fenbi_df.loc[b_f_a_k_l_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [k_df.index[-1], b_f_a_k_l_p[1], 1, -1, np.nan] 63 | return realtime_fenbi_df 64 | 65 | if b_f_a_k_h_p[1] >= reverse_price and b_f_a_k_l_p[1] == forward_price and b_f_a_k_l_p[0] == forward_index: 66 | 67 | if b_f_a_k_h_p[0] == b_f_a_k_l_p[0]: 68 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 69 | k_df.index[-1], 70 | reverse_price, 71 | reverse_type, 72 | -1, 73 | np.nan, 74 | ] 75 | realtime_fenbi_df.loc[b_f_a_k_l_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 76 | k_df.index[-1], 77 | b_f_a_k_l_p[1], 78 | 1, 79 | -1, 80 | b_f_a_k_h_p[1], 81 | ] 82 | return realtime_fenbi_df 83 | 84 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 85 | k_df.index[-1], 86 | reverse_price, 87 | reverse_type, 88 | -1, 89 | np.nan, 90 | ] 91 | realtime_fenbi_df.loc[forward_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 92 | k_df.index[-1], 93 | forward_price, 94 | forward_type, 95 | -1, 96 | np.nan, 97 | ] 98 | realtime_fenbi_df.loc[b_f_a_k_h_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [k_df.index[-1], b_f_a_k_h_p[1], -1, -1, np.nan] 99 | return realtime_fenbi_df 100 | 101 | if b_f_a_k_h_p[1] >= reverse_price and b_f_a_k_l_p[1] <= forward_price and b_f_a_k_l_p[0] != forward_index: 102 | 103 | if b_f_a_k_l_p[0] < b_f_a_k_h_p[0]: 104 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 105 | k_df.index[-1], 106 | reverse_price, 107 | reverse_type, 108 | -1, 109 | np.nan, 110 | ] 111 | realtime_fenbi_df.loc[b_f_a_k_l_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 112 | k_df.index[-1], 113 | b_f_a_k_l_p[1], 114 | 1, 115 | -1, 116 | np.nan, 117 | ] 118 | realtime_fenbi_df.loc[b_f_a_k_h_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 119 | k_df.index[-1], 120 | b_f_a_k_h_p[1], 121 | -1, 122 | -1, 123 | np.nan, 124 | ] 125 | return realtime_fenbi_df 126 | 127 | if b_f_a_k_l_p[0] > b_f_a_k_h_p[0]: 128 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 129 | k_df.index[-1], 130 | reverse_price, 131 | reverse_type, 132 | -1, 133 | np.nan, 134 | ] 135 | realtime_fenbi_df.loc[forward_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 136 | k_df.index[-1], 137 | forward_price, 138 | forward_type, 139 | -1, 140 | np.nan, 141 | ] 142 | realtime_fenbi_df.loc[b_f_a_k_h_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 143 | k_df.index[-1], 144 | b_f_a_k_h_p[1], 145 | -1, 146 | -1, 147 | np.nan, 148 | ] 149 | realtime_fenbi_df.loc[b_f_a_k_l_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 150 | k_df.index[-1], 151 | b_f_a_k_l_p[1], 152 | 1, 153 | -1, 154 | np.nan, 155 | ] 156 | 157 | if forward_index == b_f_a_k_h_p[0]: 158 | realtime_fenbi_df.drop(axis=0, index=reverse_index, inplace=True) 159 | return realtime_fenbi_df 160 | 161 | if b_f_a_k_l_p[0] == b_f_a_k_h_p[0]: 162 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 163 | k_df.index[-1], 164 | reverse_price, 165 | reverse_type, 166 | -1, 167 | np.nan, 168 | ] 169 | realtime_fenbi_df.loc[b_f_a_k_l_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 170 | k_df.index[-1], 171 | b_f_a_k_l_p[1], 172 | 1, 173 | -1, 174 | b_f_a_k_h_p[1], 175 | ] 176 | return realtime_fenbi_df 177 | 178 | raise Exception('Mars Area') 179 | else: 180 | if b_f_a_k_l_p[1] > reverse_price and b_f_a_k_h_p[1] == forward_price and b_f_a_k_h_p[0] == forward_index: 181 | 182 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 183 | k_df.index[-1], 184 | reverse_price, 185 | reverse_type, 186 | -1, 187 | np.nan, 188 | ] 189 | realtime_fenbi_df.loc[forward_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 190 | k_df.index[-1], 191 | forward_price, 192 | forward_type, 193 | -1, 194 | np.nan, 195 | ] 196 | return realtime_fenbi_df 197 | 198 | if b_f_a_k_l_p[1] > reverse_price and b_f_a_k_h_p[1] >= forward_price and b_f_a_k_h_p[0] != forward_index: 199 | 200 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 201 | k_df.index[-1], 202 | reverse_price, 203 | reverse_type, 204 | -1, 205 | np.nan, 206 | ] 207 | realtime_fenbi_df.loc[b_f_a_k_h_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [k_df.index[-1], b_f_a_k_h_p[1], -1, -1, np.nan] 208 | return realtime_fenbi_df 209 | 210 | if b_f_a_k_l_p[1] <= reverse_price and b_f_a_k_h_p[1] == forward_price and b_f_a_k_h_p[0] == forward_index: 211 | 212 | if b_f_a_k_h_p[0] == b_f_a_k_l_p[0]: 213 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 214 | k_df.index[-1], 215 | reverse_price, 216 | reverse_type, 217 | -1, 218 | np.nan, 219 | ] 220 | realtime_fenbi_df.loc[b_f_a_k_h_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 221 | k_df.index[-1], 222 | b_f_a_k_h_p[1], 223 | -1, 224 | -1, 225 | b_f_a_k_l_p[1], 226 | ] 227 | return realtime_fenbi_df 228 | 229 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 230 | k_df.index[-1], 231 | reverse_price, 232 | reverse_type, 233 | -1, 234 | np.nan, 235 | ] 236 | realtime_fenbi_df.loc[forward_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 237 | k_df.index[-1], 238 | forward_price, 239 | forward_type, 240 | -1, 241 | np.nan, 242 | ] 243 | realtime_fenbi_df.loc[b_f_a_k_l_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [k_df.index[-1], b_f_a_k_l_p[1], 1, -1, np.nan] 244 | return realtime_fenbi_df 245 | 246 | 247 | if b_f_a_k_l_p[1] <= reverse_price and b_f_a_k_h_p[1] >= forward_price and b_f_a_k_h_p[0] != forward_index: 248 | 249 | if b_f_a_k_h_p[0] < b_f_a_k_l_p[0]: 250 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 251 | k_df.index[-1], 252 | reverse_price, 253 | reverse_type, 254 | -1, 255 | np.nan, 256 | ] 257 | realtime_fenbi_df.loc[b_f_a_k_h_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 258 | k_df.index[-1], 259 | b_f_a_k_h_p[1], 260 | -1, 261 | -1, 262 | np.nan, 263 | ] 264 | realtime_fenbi_df.loc[b_f_a_k_l_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 265 | k_df.index[-1], 266 | b_f_a_k_l_p[1], 267 | 1, 268 | -1, 269 | np.nan, 270 | ] 271 | return realtime_fenbi_df 272 | 273 | if b_f_a_k_h_p[0] > b_f_a_k_l_p[0]: 274 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 275 | k_df.index[-1], 276 | reverse_price, 277 | reverse_type, 278 | -1, 279 | np.nan, 280 | ] 281 | realtime_fenbi_df.loc[forward_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 282 | k_df.index[-1], 283 | forward_price, 284 | forward_type, 285 | -1, 286 | np.nan, 287 | ] 288 | realtime_fenbi_df.loc[b_f_a_k_l_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 289 | k_df.index[-1], 290 | b_f_a_k_l_p[1], 291 | 1, 292 | -1, 293 | np.nan, 294 | ] 295 | realtime_fenbi_df.loc[b_f_a_k_h_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 296 | k_df.index[-1], 297 | b_f_a_k_h_p[1], 298 | -1, 299 | -1, 300 | np.nan, 301 | ] 302 | 303 | if forward_index == b_f_a_k_l_p[0]: 304 | realtime_fenbi_df.drop(axis=0, index=reverse_index, inplace=True) 305 | return realtime_fenbi_df 306 | 307 | if b_f_a_k_l_p[0] == b_f_a_k_h_p[0]: 308 | realtime_fenbi_df.loc[reverse_index, ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 309 | k_df.index[-1], 310 | reverse_price, 311 | reverse_type, 312 | -1, 313 | np.nan, 314 | ] 315 | realtime_fenbi_df.loc[b_f_a_k_h_p[0], ['confirm_time', 'price', 'type', 'status', 'price2']] = [ 316 | k_df.index[-1], 317 | b_f_a_k_h_p[1], 318 | -1, 319 | -1, 320 | b_f_a_k_l_p[1], 321 | ] 322 | return realtime_fenbi_df 323 | 324 | raise Exception('Mars Area') 325 | -------------------------------------------------------------------------------- /simple/czsc/CzscRealtimeModelEngineXianduan.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | from simple.logger.logger import LoggerFactory 4 | logger = LoggerFactory.getLogger(__name__) 5 | class CzscRealtimeModelEngineXianduan: 6 | def __init__(self, stock_code, frequency, is_debug=False): 7 | self.is_debug = is_debug 8 | 9 | def execute(self, xianduan_engine, realtime_fenbi_df, realtime_xianduan_df): 10 | confirmed_xianduan_size = realtime_xianduan_df.shape[0] - 1 11 | 12 | cal_count = realtime_fenbi_df.shape[0]-3 13 | for i in range(cal_count): 14 | end_index = i+4 15 | current_realtime_fenbi_df = realtime_fenbi_df.iloc[0:end_index, :] 16 | xianduan_engine.execute(current_realtime_fenbi_df, realtime_xianduan_df,True) 17 | 18 | all_xianduan_size = realtime_xianduan_df.shape[0] 19 | realtime_xianduan_size = all_xianduan_size - confirmed_xianduan_size 20 | 21 | for i in range(realtime_xianduan_size): 22 | idx = -(i + 1) 23 | realtime_xianduan_df.iloc[idx]['status'] = -1 24 | -------------------------------------------------------------------------------- /simple/czsc/CzscRealtimeModelEngineZhongshu.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | class CzscRealtimeModelEngineZhongshu: 4 | 5 | 6 | def __init__(self, stock_code, frequency, is_debug=False): 7 | self.is_debug = is_debug 8 | 9 | 10 | def execute(self, zhongshu_engine,stock_df,realtime_xianduan_df,realtime_zhongshu_df): 11 | 12 | to_execute_base_xianduan_df = realtime_xianduan_df 13 | 14 | realtime_count = to_execute_base_xianduan_df[to_execute_base_xianduan_df['status']==-1].shape[0] 15 | 16 | for i in range(realtime_count): 17 | idx = i - realtime_count + 1 18 | if idx == 0: 19 | to_execute_xianduan_df=to_execute_base_xianduan_df 20 | else: 21 | to_execute_xianduan_df=to_execute_base_xianduan_df.iloc[:idx,:] 22 | zhongshu_engine.execute(stock_df, to_execute_xianduan_df,realtime_zhongshu_df,True) 23 | -------------------------------------------------------------------------------- /simple/czsc/Utils.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | 4 | def find_highest_point(index1, index2, k_df, reverse=True): 5 | tmp_df = k_df.loc[index1:index2, 'high'].infer_objects() 6 | if reverse: 7 | tmp_df.sort_index(inplace=True, ascending=False) 8 | max_price = tmp_df.max() 9 | max_time = tmp_df.idxmax() 10 | return [max_time, max_price] 11 | 12 | 13 | def find_lowest_point(index1, index2, k_df, reverse=True): 14 | tmp_df = k_df.loc[index1:index2, 'low'].infer_objects() 15 | if reverse: 16 | tmp_df.sort_index(inplace=True, ascending=False) 17 | min_price = tmp_df.min() 18 | min_time = tmp_df.idxmin() 19 | return [min_time, min_price] 20 | 21 | -------------------------------------------------------------------------------- /simple/czsc/__init__.py: -------------------------------------------------------------------------------- 1 | from simple.czsc import * 2 | __all__ = ["CzscModelEngine","Utils"] -------------------------------------------------------------------------------- /simple/draw/HtmlAutoFit.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | import os 4 | 5 | 6 | def fit(src_html_path, create_new_file=True): 7 | ''' 8 | html文件自适应窗口大小 9 | ''' 10 | target_path = src_html_path 11 | if create_new_file: 12 | target_path = '%s.replaced.html' % target_path 13 | 14 | file_src = open(src_html_path, "r") 15 | src_lines = file_src.readlines() 16 | chart_id = '' 17 | sec1_idx = -1 18 | sec2_idx = -1 19 | for idx, line in enumerate(src_lines): 20 | if chart_id == '' and line.find("class=\"chart-container\"") != -1: 21 | # print(line) 22 | first = line.find("\"") 23 | second = line.find("\"", first + 1) 24 | chart_id = line[first + 1 : second] 25 | # print(chart_id) 26 | continue 27 | if sec1_idx == -1 and line.find("echarts.init") != -1: 28 | # print(line) 29 | sec1_idx = idx 30 | continue 31 | if sec2_idx == -1 and line.find("setOption") != -1: 32 | # print(line) 33 | sec2_idx = idx 34 | break 35 | file_src.close() 36 | 37 | if sec1_idx == -1 or sec2_idx == -1: 38 | raise Exception('解析html错误') 39 | 40 | src_lines.insert(sec1_idx, sec1(chart_id=chart_id)) 41 | src_lines.insert(sec2_idx + 2, sec2(chart_id=chart_id)) 42 | 43 | file_target = open(target_path, "w") 44 | file_target.writelines(src_lines) 45 | file_target.close() 46 | 47 | 48 | def sec1(chart_id): 49 | sec = ( 50 | "/*sec1 start*/\n var worldMapContainer = document.getElementById('%s');\nworldMapContainer.style.width = window.innerWidth+'px';\nworldMapContainer.style.height = (window.innerHeight - 120)+'px';\n/*sec1 end*/\n" 51 | % (chart_id) 52 | ) 53 | return sec 54 | 55 | 56 | def sec2(chart_id): 57 | sec = "/*sec2 start*/\n window.onresize = function(){chart_%s.resize();}\n/*sec2 end*/\n" % chart_id 58 | return sec 59 | -------------------------------------------------------------------------------- /simple/draw/KdrawGrid.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | from pyecharts.charts import Bar 4 | from pyecharts.charts import Kline 5 | from pyecharts.charts import Line 6 | from pyecharts.charts import Grid 7 | from pyecharts.charts import Page 8 | from pyecharts.charts import Scatter 9 | from pyecharts.commons.utils import JsCode 10 | from pyecharts import options as opts 11 | 12 | import talib 13 | import numpy as np 14 | from pyecharts.render import make_snapshot 15 | from snapshot_selenium import snapshot 16 | from selenium.webdriver.chrome.options import Options 17 | 18 | from pyecharts.globals import CurrentConfig, NotebookType 19 | 20 | # 使用jupyter-lab时需要 21 | # CurrentConfig.NOTEBOOK_TYPE = NotebookType.JUPYTER_LAB 22 | # 可自定义pyecharts资源引用 https://pyecharts.org/#/zh-cn/assets_host 23 | CurrentConfig.ONLINE_HOST = "https://assets.pyecharts.org/assets/" 24 | ''' 25 | 创建Grid 26 | 27 | --- 28 | 29 | 开平点位标记 30 | 31 | points_slices=[ 32 | [id,name,time,price,type], 33 | [id,name,time,price,type] 34 | ] 35 | 36 | type: 37 | bl -> 开多 38 | bs -> 开空 39 | sl -> 平多 40 | ss -> 平空 41 | 42 | 43 | 44 | --- 45 | 46 | long_short_range 多空区间,以markarea方式绘制 47 | { 48 | long:[ 49 | [startindex,endindex], 50 | [startindex,endindex], 51 | ... 52 | ], 53 | short:[ 54 | [startindex,endindex], 55 | [startindex,endindex], 56 | ... 57 | ] 58 | } 59 | 60 | 61 | ''' 62 | 63 | 64 | class KdrawGrid: 65 | def __init__( 66 | self, 67 | title, 68 | data_df, 69 | fb_df, 70 | xd_df, 71 | zs_df, 72 | macd_or_natr='macd', 73 | grid_width=None, 74 | grid_height=None, 75 | datazoom_start='70', 76 | points=[], 77 | start_idx=None, 78 | long_short_range={}, 79 | xd_reverse_points=[], 80 | xd_forward_points=[], 81 | precision=2, 82 | macd_normalized_df=None, 83 | target_point=[], 84 | ma_df=None, 85 | nav_df=None, 86 | second_type_bs_point=[], 87 | deviation_range={}, 88 | rizhou_df=None, 89 | rizhou_signal_df=None, 90 | minor_draw=['macd'], 91 | ): 92 | # 精度处理 93 | data_df[['open', 'close', 'high', 'low']] = data_df[['open', 'close', 'high', 'low']].astype(float) 94 | data_df[['open', 'close', 'high', 'low']] = data_df[['open', 'close', 'high', 'low']].apply(lambda x: round(x, 2)) 95 | fb_df[['price']] = fb_df[['price']].astype(float) 96 | fb_df[['price']] = fb_df[['price']].apply(lambda x: round(x, 2)) 97 | 98 | if 'macd' in data_df.columns.values.tolist(): 99 | data_df[['macd', 'dif', 'dea']] = data_df[['macd', 'dif', 'dea']].astype(float) 100 | data_df[['macd', 'dif', 'dea']] = data_df[['macd', 'dif', 'dea']].apply(lambda x: round(x, 2)) 101 | 102 | if macd_normalized_df is not None: 103 | macd_normalized_df[['area']] = macd_normalized_df[['area']].astype(float) 104 | macd_normalized_df[['area']] = macd_normalized_df[['area']].apply(lambda x: round(x, 2)) 105 | 106 | # 画图开始时间 107 | if not start_idx is None: 108 | data_df_slices = data_df[data_df.index >= start_idx] 109 | rizhou_df_slices = None if rizhou_df is None else rizhou_df[rizhou_df.index >= start_idx] 110 | rizhou_signal_df_slices = None if rizhou_signal_df is None else rizhou_signal_df[rizhou_df.index >= start_idx] 111 | fb_df_slices = fb_df[fb_df.index >= start_idx] 112 | xd_df_slices = xd_df[xd_df.index >= start_idx] 113 | zs_df_slices = zs_df[zs_df.index >= start_idx] 114 | points_slices = [] 115 | if points: 116 | for p in points: 117 | if p[2] >= start_idx: 118 | points_slices.append(p) 119 | target_point_slices = [] 120 | if target_point: 121 | for p in target_point: 122 | if p[0] >= start_idx: 123 | target_point_slices.append(p) 124 | rizhou_signal_slices = None if rizhou_signal_df_slices is None else rizhou_signal_df_slices['signal_point'].values 125 | ma_df_slices = None if ma_df is None else ma_df[ma_df.index >= start_idx] 126 | nav_df_slices = None if nav_df is None else nav_df[nav_df.index >= start_idx] 127 | 128 | second_type_bs_point_slices = [] 129 | if second_type_bs_point: 130 | for p in second_type_bs_point: 131 | if p[2] >= start_idx: 132 | second_type_bs_point_slices.append(p) 133 | else: 134 | data_df_slices = data_df 135 | rizhou_df_slices = rizhou_df 136 | fb_df_slices = fb_df 137 | xd_df_slices = xd_df 138 | zs_df_slices = zs_df 139 | points_slices = points 140 | target_point_slices = target_point 141 | rizhou_signal_slices = None if rizhou_signal_df is None else rizhou_signal_df['signal_point'].values 142 | ma_df_slices = ma_df 143 | nav_df_slices = nav_df 144 | second_type_bs_point_slices = second_type_bs_point 145 | self.precision = precision 146 | self.title = title 147 | self.macd_or_natr = macd_or_natr 148 | self.grid_width = grid_width 149 | self.grid_height = grid_height 150 | self.datazoom_start = datazoom_start 151 | self.long_short_range = long_short_range 152 | self.deviation_range = deviation_range 153 | self.macd_normalized_df = macd_normalized_df 154 | # K线 155 | self.kline = None 156 | # 分笔 157 | self.fb_line = None 158 | # 实时分笔 159 | self.realtime_fb_line = None 160 | # 线段 161 | self.xd_line = None 162 | # 实时线段 163 | self.realtime_xd_line = None 164 | # macd line,即dif、dea 165 | self.macd_line = None 166 | self.macd_bar = None 167 | # natr 168 | self.natr_line = None 169 | # 日周 170 | self.dailyk_line = None 171 | self.weekk_line = None 172 | # 买卖点标记 173 | self.scatter_point_bl = None 174 | self.scatter_point_bs = None 175 | self.scatter_point_sl = None 176 | self.scatter_point_ss = None 177 | # scatter_point 178 | self.scatter_point = None 179 | # 反转点连线 180 | self.xd_reverse_line = None 181 | # 正向点连线 182 | self.xd_forward_line = None 183 | # macd标准模型 184 | self.macd_normalized_scatter_red = None 185 | self.macd_normalized_scatter_green = None 186 | # 目标点 187 | self.scatter_target_point = None 188 | # 日周信号 189 | self.scatter_rizhou_signal_long = None 190 | self.scatter_rizhou_signal_short = None 191 | # MA 192 | self.ma_line_5 = None 193 | self.ma_line_10 = None 194 | # 净值 195 | self.nav_line = None 196 | 197 | # 模拟买卖点 198 | self.scatter_second_type_bs_point_bl = None 199 | self.scatter_second_type_bs_point_bs = None 200 | 201 | # 幅图 202 | self.minor_draw = minor_draw 203 | 204 | # 获取K线数据 205 | def get_K_data(df): 206 | kdata = df.loc[:, ['open', 'close', 'low', 'high']].to_dict('split')['data'] 207 | xaxis = list(df.index.tolist()) 208 | return xaxis, kdata 209 | 210 | # 获取分笔数据 用于line展示 211 | def get_fb_data(df): 212 | xddata = [] 213 | xaxis = [] 214 | if not df.empty and df.shape[0] > 0: 215 | complete_df = df[df['status'] == 1] 216 | xaxis = list(complete_df.index.tolist()) 217 | xddata = list(complete_df.loc[:, 'price']) 218 | return xaxis, xddata 219 | 220 | # 获取实时分笔 用于line展示 221 | def get_realtime_fb_data(df): 222 | xddata = [] 223 | xaxis = [] 224 | if not df.empty and df.shape[0] > 0: 225 | complete_df = df[df['status'] == 1] 226 | if complete_df.shape[0] > 0: 227 | # 先放入已完成分笔的最后一个 228 | xaxis.append(complete_df.index[-1]) 229 | xddata.append(complete_df.iloc[-1]['price']) 230 | # 再追加所有实时分笔 231 | realtime_df = df[df['status'] == -1] 232 | xaxis.extend(list(realtime_df.index.tolist())) 233 | xddata.extend(list(realtime_df.loc[:, 'price'])) 234 | return xaxis, xddata 235 | 236 | # 获取实时分笔极端行情price2 237 | def get_realtime_extreme_point(df): 238 | data = [] 239 | for idx, row in df.iterrows(): 240 | if not np.isnan(row['price2']): # 如果price2不是nan,表示极端行情出现 241 | data.append({'name': '极端行情', 'coord': [idx, row['price2']]}) 242 | return data 243 | 244 | # 获取已完成的线段数据 用于line展示 245 | def get_xd_data(df): 246 | xddata = [] 247 | xaxis = [] 248 | if not df.empty and df.shape[0] > 0: 249 | complete_df = df[df['status'] == 1] 250 | if complete_df.shape[0] > 0: 251 | xaxis = list(complete_df.index.tolist()) 252 | xddata = [i[1] for i in list(complete_df.loc[:, 'start_point'])] 253 | # 追加最后一个结束点 254 | xaxis.append(complete_df.iloc[-1]['end_point'][0]) 255 | xddata.append(complete_df.iloc[-1]['end_point'][1]) 256 | # 精度处理 257 | xddata = [round(x, self.precision) for x in xddata] 258 | return xaxis, xddata 259 | 260 | # 获取实时线段 用于line展示 261 | def get_realtime_xd_data(df): 262 | xddata = [] 263 | xaxis = [] 264 | if not df.empty and df.shape[0] > 0: 265 | realtime_df = df[df['status'] == -1] 266 | if realtime_df.shape[0] > 0: 267 | xaxis = list(realtime_df.index.tolist()) 268 | xddata = [i[1] for i in list(realtime_df.loc[:, 'start_point'])] 269 | # 追加最后一个结束点 270 | xaxis.append(realtime_df.iloc[-1]['end_point'][0]) 271 | xddata.append(realtime_df.iloc[-1]['end_point'][1]) 272 | # 精度处理 273 | xddata = [round(x, self.precision) for x in xddata] 274 | return xaxis, xddata 275 | 276 | # 获取中枢数据 277 | def get_zs(zs_df_slices): 278 | data = [] 279 | for idx, row in zs_df_slices.iterrows(): 280 | data.append( 281 | [ 282 | { 283 | 'name': '', 284 | 'coord': [row['start_point'][0], round(row['top_point'][1], self.precision)], 285 | 'itemStyle': {'color': 'rgba(196,204,211,0.5)'}, 286 | }, # 起始点时间、上沿价格 287 | {'coord': [row['end_point'][0], round(row['bottom_point'][1], self.precision)]}, # 结束点时间、下沿价格 288 | ] 289 | ) 290 | return data 291 | 292 | # 获取中枢高低点数据 293 | def get_zs_high_low(zs_df_slices): 294 | data = [] 295 | for idx, row in zs_df_slices.iterrows(): 296 | data.append({'name': '', 'coord': [row['high_point'][0], round(row['high_point'][1], self.precision)]}) # 最高点时间、最高点价格 297 | data.append({'name': '', 'coord': [row['low_point'][0], round(row['low_point'][1], self.precision)]}) # 最低点时间、最低点价格 298 | return data 299 | 300 | # 获取MACD数据 301 | def get_macd_data(df): 302 | dif = [] 303 | dea = [] 304 | macd = [] 305 | if 'dif' in df.columns.values: 306 | dif = [l for l in df.dif.iteritems()] 307 | dea = [l for l in df.dea.iteritems()] 308 | macd = [l for l in df.macd.iteritems()] 309 | return dif, dea, macd 310 | 311 | # 获取atr数据 312 | def get_natr_data(df): 313 | if 'natr' in df.columns.values: 314 | return [l for l in df.natr.iteritems()] 315 | return [] 316 | 317 | # 获取日周数据dailyk 318 | def get_rizhou_data_dailyk(df): 319 | if df is None or 'dailyk' not in df.columns.values: 320 | return [] 321 | return [l for l in df.dailyk.iteritems()] 322 | 323 | # 获取日周数据weekk 324 | def get_rizhou_data_weekk(df): 325 | if df is None or 'weekk' not in df.columns.values: 326 | return [] 327 | return [l for l in df.weekk.iteritems()] 328 | 329 | # macd柱样式即数值设置 330 | def get_macd_yaxis_data(macd): 331 | data = [] 332 | for t, v in macd: 333 | if v >= 0: # 红柱 334 | data.append(opts.BarItem(name='', value=v, itemstyle_opts=opts.ItemStyleOpts(color="#ef232a"),)) 335 | else: # 绿柱 336 | data.append(opts.BarItem(name='', value=v, itemstyle_opts=opts.ItemStyleOpts(color="#14b143"),)) 337 | return data 338 | 339 | def get_scatter_points_data(points_slices, _type): 340 | if not points_slices: 341 | return [], [] 342 | pts = [] 343 | for p in points_slices: 344 | if p[4] == _type: 345 | pts.append(p) 346 | if not pts: 347 | return [], [] 348 | xdata = [time[2] for time in pts] 349 | ydata = [[round(p[3], self.precision), p[1]] for p in pts] 350 | return xdata, ydata 351 | 352 | def get_long_short_markarea_data(long_short_range): 353 | ''' 354 | 多空数据 355 | ''' 356 | data = [] 357 | if 'long' in long_short_range: 358 | for l in long_short_range['long']: 359 | # 如果时间超过k线最新时间,重新赋值,以避免出现循环画图的情况 360 | ls_start_idx = l[0] 361 | ls_end_idx = min(l[1], data_df_slices.index[-1]) 362 | data.append([{'name': '', 'itemStyle': {'color': 'rgba(255, 71, 0, 0.1)'}, 'xAxis': ls_start_idx}, {'xAxis': ls_end_idx}]) # 多 363 | if 'short' in long_short_range: 364 | for l in long_short_range['short']: 365 | # 如果时间超过k线最新时间,重新赋值,以避免出现循环画图的情况 366 | ls_start_idx = l[0] 367 | ls_end_idx = min(l[1], data_df_slices.index[-1]) 368 | data.append([{'name': '', 'itemStyle': {'color': 'rgba(0, 255, 140, 0.1)'}, 'xAxis': ls_start_idx}, {'xAxis': ls_end_idx}]) # 空 369 | return data 370 | 371 | def get_xd_reverse_data(xd_reverse_points): 372 | if not xd_reverse_points: 373 | return [], [] 374 | return [p[0] for p in xd_reverse_points], [round(p[1], self.precision) for p in xd_reverse_points] 375 | 376 | def get_macd_extremum_point(macd_normalized_df, _type): 377 | ''' 378 | macd标准模型 379 | ''' 380 | xdata = [] 381 | ydata = [] 382 | if macd_normalized_df is None or macd_normalized_df.shape[0] < 1: 383 | return xdata, ydata 384 | if 1 == _type: 385 | tmp_macd_normalized_df = macd_normalized_df[macd_normalized_df['type'] == 1] 386 | else: 387 | tmp_macd_normalized_df = macd_normalized_df[macd_normalized_df['type'] == -1] 388 | for idx, row in tmp_macd_normalized_df.iterrows(): 389 | xdata.append(row['extremum_point'][0]) 390 | ydata.append([round(row['extremum_point'][1], self.precision), row['area']]) 391 | return xdata, ydata 392 | 393 | def get_scatter_target_point_data(target_point_slices): 394 | ''' 395 | 目标点 396 | ''' 397 | if not target_point_slices: 398 | return [], [] 399 | xdata = [p[0] for p in target_point_slices] 400 | ydata = [round(p[1], self.precision) for p in target_point_slices] 401 | return xdata, ydata 402 | 403 | def get_scatter_rizhou_signal_data(rizhou_signal_slices, _type): 404 | ''' 405 | 日周信号 406 | ''' 407 | if rizhou_signal_slices is None or len(rizhou_signal_slices) <= 0: 408 | return [], [] 409 | pts = [] 410 | for p in rizhou_signal_slices: 411 | if p[2] == _type: 412 | pts.append(p) 413 | if len(pts) <= 0: 414 | return [], [] 415 | xdata = [p[0] for p in pts] 416 | ydata = [round(p[1], self.precision) for p in pts] 417 | return xdata, ydata 418 | 419 | # 获取ma数据 420 | def get_ma_data(ma_series): 421 | return list(ma_series.index), list(ma_series) 422 | 423 | def get_deviation_markarea_data(deviation_range): 424 | ''' 425 | 背驰时间窗口 426 | ''' 427 | data = [] 428 | if '1' in deviation_range: 429 | for l in deviation_range['1']: 430 | # 如果时间超过k线最新时间,重新赋值,以避免出现循环画图的情况 431 | ls_start_idx = l[0] 432 | ls_end_idx = min(l[1], data_df_slices.index[-1]) 433 | # data.append([{'name': '', 'itemStyle': {'color': 'rgba(139,0,139, 0.0)'}, 'xAxis': ls_start_idx}, {'xAxis': ls_end_idx}]) # 背离 434 | if '2' in deviation_range: 435 | for l in deviation_range['2']: 436 | # 如果时间超过k线最新时间,重新赋值,以避免出现循环画图的情况 437 | ls_start_idx = l[0] 438 | ls_end_idx = min(l[1], data_df_slices.index[-1]) 439 | data.append([{'name': '', 'itemStyle': {'color': 'rgba(255,255,0, 0.12)'}, 'xAxis': ls_start_idx}, {'xAxis': ls_end_idx}]) # 背驰 440 | if '3' in deviation_range: 441 | for l in deviation_range['3']: 442 | # 如果时间超过k线最新时间,重新赋值,以避免出现循环画图的情况 443 | ls_start_idx = l[0] 444 | ls_end_idx = min(l[1], data_df_slices.index[-1]) 445 | data.append([{'name': '', 'itemStyle': {'color': 'rgba(128,0,128, 0.12)'}, 'xAxis': ls_start_idx}, {'xAxis': ls_end_idx}]) # 背离 + 背驰 446 | if '-1' in deviation_range: 447 | for l in deviation_range['-1']: 448 | # 如果时间超过k线最新时间,重新赋值,以避免出现循环画图的情况 449 | ls_start_idx = l[0] 450 | ls_end_idx = min(l[1], data_df_slices.index[-1]) 451 | # data.append([{'name': '', 'itemStyle': {'color': 'rgba(139,0,139, 0.0)'}, 'xAxis': ls_start_idx}, {'xAxis': ls_end_idx}]) # 背离 452 | if '-2' in deviation_range: 453 | for l in deviation_range['-2']: 454 | # 如果时间超过k线最新时间,重新赋值,以避免出现循环画图的情况 455 | ls_start_idx = l[0] 456 | ls_end_idx = min(l[1], data_df_slices.index[-1]) 457 | data.append([{'name': '', 'itemStyle': {'color': 'rgba(0,255,0, 0.12)'}, 'xAxis': ls_start_idx}, {'xAxis': ls_end_idx}]) # 背驰 458 | if '-3' in deviation_range: 459 | for l in deviation_range['-3']: 460 | # 如果时间超过k线最新时间,重新赋值,以避免出现循环画图的情况 461 | ls_start_idx = l[0] 462 | ls_end_idx = min(l[1], data_df_slices.index[-1]) 463 | data.append([{'name': '', 'itemStyle': {'color': 'rgba(30,144,255, 0.12)'}, 'xAxis': ls_start_idx}, {'xAxis': ls_end_idx}]) # 背离 + 背驰 464 | return data 465 | 466 | ## 主图 k线 467 | kx, ky = get_K_data(data_df_slices) 468 | self.kline = ( 469 | Kline() 470 | .add_xaxis(xaxis_data=kx) # 时间轴 471 | .add_yaxis( 472 | series_name="K线", 473 | yaxis_index=0, 474 | y_axis=ky, # y轴数据 475 | itemstyle_opts=opts.ItemStyleOpts(color="#ec0000", color0="#00da3c", border_color="#ec0000", border_color0="#00da3c",), 476 | ) 477 | # 背驰区域 478 | .set_series_opts(markarea_opts=opts.MarkAreaOpts(data=get_deviation_markarea_data(self.deviation_range),)) 479 | .set_global_opts( 480 | title_opts=opts.TitleOpts(title=self.title,), # 标题 481 | xaxis_opts=opts.AxisOpts( 482 | type_="category", axistick_opts=opts.AxisTickOpts(is_show=False), axislabel_opts=opts.LabelOpts(is_show=False), # 分组 # 不显示横坐标点 # 不显示横轴坐标 483 | ), 484 | yaxis_opts=opts.AxisOpts(is_scale=True, splitarea_opts=opts.SplitAreaOpts(is_show=False, areastyle_opts=opts.AreaStyleOpts(opacity=1)),), 485 | datazoom_opts=[ # 缩放组件 486 | opts.DataZoomOpts(is_show=False, type_="inside", xaxis_index=[0, 1], range_start=self.datazoom_start, range_end=100,), 487 | opts.DataZoomOpts(is_show=True, xaxis_index=[0, 1], type_="slider", pos_top="bottom", range_start=self.datazoom_start, range_end=100,), 488 | ], 489 | tooltip_opts=opts.TooltipOpts( # 提示框 490 | trigger="axis", 491 | axis_pointer_type="cross", 492 | background_color="rgba(245, 245, 245, 0.8)", 493 | border_width=1, 494 | border_color="#ccc", 495 | textstyle_opts=opts.TextStyleOpts(color="#000"), 496 | ), 497 | # brush_opts=opts.BrushOpts(x_axis_index="all", brush_link="all", out_of_brush={"colorAlpha": 0.1}, brush_type="lineX",), # 刷选 498 | axispointer_opts=opts.AxisPointerOpts(is_show=True, link=[{"xAxisIndex": "all"}],), # 光标 499 | # toolbox_opts=opts.ToolboxOpts(pos_left='75%', feature=opts.ToolBoxFeatureOpts(data_zoom={"show": False,})), 500 | ) 501 | ) 502 | 503 | ## 主图 分笔 504 | fbx, fby = get_fb_data(fb_df_slices) 505 | self.fb_line = ( 506 | Line() 507 | .add_xaxis(xaxis_data=fbx) 508 | .add_yaxis(series_name="分笔", linestyle_opts=opts.LineStyleOpts(color='#d48265',), y_axis=fby) 509 | .set_global_opts(xaxis_opts=opts.AxisOpts(type_="category")) 510 | ) 511 | 512 | rfbx, rfby = get_realtime_fb_data(fb_df_slices) 513 | self.realtime_fb_line = ( 514 | Line() 515 | .add_xaxis(xaxis_data=rfbx) 516 | .add_yaxis(series_name="实时分笔", linestyle_opts=opts.LineStyleOpts(color='#d48265', type_='dashed',), y_axis=rfby) 517 | .set_series_opts(markpoint_opts=opts.MarkPointOpts(data=get_realtime_extreme_point(fb_df_slices), symbol='pin', symbol_size='30'),) # 极端行情price2 518 | .set_global_opts(xaxis_opts=opts.AxisOpts(type_="category")) 519 | ) 520 | 521 | ## 主图 线段 522 | xdx, xdy = get_xd_data(xd_df_slices) 523 | ls_mk_data = get_long_short_markarea_data(self.long_short_range) 524 | zs_data = get_zs(zs_df_slices) 525 | zs_data.extend(ls_mk_data) # 中枢 + 多空区域 526 | self.xd_line = ( 527 | Line() 528 | .add_xaxis(xaxis_data=xdx) 529 | .add_yaxis(series_name="线段", linestyle_opts=opts.LineStyleOpts(color='#2f4554',), y_axis=xdy,) 530 | .set_series_opts( 531 | markarea_opts=opts.MarkAreaOpts(data=zs_data,), # 中枢 + 多空区域 532 | markpoint_opts=opts.MarkPointOpts(data=get_zs_high_low(zs_df_slices), symbol='arrow', symbol_size='10'), # 中枢最高点最低点标记 533 | itemstyle_opts=opts.ItemStyleOpts(color='#2f4554',), 534 | ) 535 | .set_global_opts(xaxis_opts=opts.AxisOpts(type_="category")) 536 | ) 537 | 538 | ## 主图 实时线段 539 | rxdx, rxdy = get_realtime_xd_data(xd_df_slices) 540 | self.realtime_xd_line = ( 541 | Line() 542 | .add_xaxis(xaxis_data=rxdx) 543 | .add_yaxis(series_name="实时线段", linestyle_opts=opts.LineStyleOpts(color='#2f4554', type_='dashed',), y_axis=rxdy,) 544 | .set_global_opts(xaxis_opts=opts.AxisOpts(type_="category")) 545 | ) 546 | 547 | ## 幅图1 macd数据 548 | dif, dea, macd = get_macd_data(data_df_slices) 549 | self.macd_bar = ( 550 | Bar() 551 | .add_xaxis(xaxis_data=[t for t, v in macd]) 552 | .add_yaxis(series_name='macd', yaxis_data=get_macd_yaxis_data(macd)) 553 | .set_global_opts( 554 | xaxis_opts=opts.AxisOpts(type_="category", axislabel_opts=opts.LabelOpts(is_show=False),), # 不限显示刻度标签(时间) 555 | legend_opts=opts.LegendOpts(is_show=False), # 不显示图例 556 | ) 557 | .set_series_opts(label_opts=opts.LabelOpts(is_show=False)) # 不显示标记 558 | ) 559 | # # 幅图1 dif dea 560 | self.macd_line = ( 561 | Line() 562 | .add_xaxis(xaxis_data=[t for t, v in dea]) 563 | .add_yaxis( 564 | series_name="dea", 565 | y_axis=[v for t, v in dea], 566 | is_smooth=True, 567 | label_opts=opts.LabelOpts(is_show=False), 568 | linestyle_opts=opts.LineStyleOpts(color="#0484E8"), 569 | is_symbol_show=False, 570 | ) 571 | .add_yaxis( 572 | series_name="dif", 573 | y_axis=[v for t, v in dif], 574 | is_smooth=True, 575 | label_opts=opts.LabelOpts(is_show=False), 576 | linestyle_opts=opts.LineStyleOpts(color="#E7DA05"), 577 | is_symbol_show=False, 578 | ) 579 | .set_global_opts( 580 | xaxis_opts=opts.AxisOpts(type_="category", axislabel_opts=opts.LabelOpts(is_show=False),), # 不限显示刻度标签(时间) 581 | legend_opts=opts.LegendOpts(is_show=False), # 不显示图例 582 | ) 583 | ) 584 | 585 | ## 幅图2 natr 586 | natr = get_natr_data(data_df_slices) 587 | self.natr_line = ( 588 | Line() 589 | .add_xaxis(xaxis_data=[t for t, v in natr]) 590 | .add_yaxis( 591 | series_name="natr", 592 | y_axis=[v for t, v in natr], 593 | is_smooth=True, 594 | label_opts=opts.LabelOpts(is_show=False), 595 | linestyle_opts=opts.LineStyleOpts(color="#0484E8"), 596 | is_symbol_show=False, 597 | ) 598 | .set_global_opts(xaxis_opts=opts.AxisOpts(type_="category",), legend_opts=opts.LegendOpts(is_show=False),) # 不显示图例 599 | ) 600 | 601 | ## 幅图2 dailyk 602 | dailyk = get_rizhou_data_dailyk(rizhou_df_slices) 603 | self.dailyk_line = ( 604 | Line() 605 | .add_xaxis(xaxis_data=[t for t, v in dailyk]) 606 | .add_yaxis( 607 | series_name="dailyk", 608 | y_axis=[v for t, v in dailyk], 609 | is_smooth=True, 610 | label_opts=opts.LabelOpts(is_show=False), 611 | linestyle_opts=opts.LineStyleOpts(color="#ec0000"), 612 | is_symbol_show=False, 613 | ) 614 | .set_series_opts(markline_opts=opts.MarkLineOpts(is_silent=True, data=[{'yAxis': 0}, {'yAxis': 100}])) 615 | .set_global_opts( 616 | xaxis_opts=opts.AxisOpts(type_="category", axislabel_opts=opts.LabelOpts(is_show=False),), # 不限显示刻度标签(时间) 617 | legend_opts=opts.LegendOpts(is_show=False), # 不显示图例 618 | ) 619 | ) 620 | ## 幅图2 weekk 621 | weekk = get_rizhou_data_weekk(rizhou_df_slices) 622 | self.weekk_line = ( 623 | Line() 624 | .add_xaxis(xaxis_data=[t for t, v in weekk]) 625 | .add_yaxis( 626 | series_name="weekk", 627 | y_axis=[v for t, v in weekk], 628 | is_smooth=True, 629 | label_opts=opts.LabelOpts(is_show=False), 630 | linestyle_opts=opts.LineStyleOpts(color="#ffa500"), 631 | is_symbol_show=False, 632 | ) 633 | .set_global_opts( 634 | xaxis_opts=opts.AxisOpts(type_="category", axislabel_opts=opts.LabelOpts(is_show=False),), # 不限显示刻度标签(时间) 635 | legend_opts=opts.LegendOpts(is_show=False), # 不显示图例 636 | ) 637 | ) 638 | 639 | xdata, ydata = get_scatter_points_data(points_slices, 'bl') 640 | if xdata: 641 | self.scatter_point_bl = ( 642 | Scatter() 643 | .add_xaxis(xdata) 644 | .add_yaxis( 645 | series_name='开多', 646 | y_axis=ydata, 647 | label_opts=opts.LabelOpts( 648 | is_show=True, 649 | formatter=JsCode("function(params){return params.value[2] + ' : ' + params.value[1];}"), 650 | position='bottom', 651 | color='#9c009c', 652 | ), 653 | color='#9c009c', 654 | symbol='triangle', 655 | symbol_rotate=0, 656 | itemstyle_opts=opts.ItemStyleOpts(color='#9c009c'), 657 | ) 658 | ) 659 | 660 | xdata, ydata = get_scatter_points_data(points_slices, 'bs') 661 | if xdata: 662 | self.scatter_point_bs = ( 663 | Scatter() 664 | .add_xaxis(xdata) 665 | .add_yaxis( 666 | series_name='开空', 667 | y_axis=ydata, 668 | label_opts=opts.LabelOpts( 669 | is_show=True, formatter=JsCode("function(params){return params.value[2] + ' : ' + params.value[1];}"), position='top', color='#006600', 670 | ), 671 | color='#006600', 672 | symbol='arrow', 673 | symbol_rotate=180, 674 | itemstyle_opts=opts.ItemStyleOpts(color='#006600'), 675 | ) 676 | ) 677 | 678 | xdata, ydata = get_scatter_points_data(points_slices, 'sl') 679 | if xdata: 680 | self.scatter_point_sl = ( 681 | Scatter() 682 | .add_xaxis(xdata) 683 | .add_yaxis( 684 | series_name='平多', 685 | y_axis=ydata, 686 | label_opts=opts.LabelOpts( 687 | is_show=True, formatter=JsCode("function(params){return params.value[2] + ' : ' + params.value[1];}"), position='top', color='#9c009c', 688 | ), 689 | color='#9c009c', 690 | symbol='triangle', 691 | symbol_rotate=180, 692 | ) 693 | ) 694 | 695 | xdata, ydata = get_scatter_points_data(points_slices, 'ss') 696 | if xdata: 697 | self.scatter_point_ss = ( 698 | Scatter() 699 | .add_xaxis(xdata) 700 | .add_yaxis( 701 | series_name='平空', 702 | y_axis=ydata, 703 | label_opts=opts.LabelOpts( 704 | is_show=True, 705 | formatter=JsCode("function(params){return params.value[2] + ' : ' + params.value[1];}"), 706 | position='bottom', 707 | color='#006600', 708 | ), 709 | color='#006600', 710 | symbol='arrow', 711 | symbol_rotate=0, 712 | ) 713 | ) 714 | 715 | ## 反转点连线 716 | rxdx, rxdy = get_xd_reverse_data(xd_reverse_points) 717 | if rxdx: 718 | self.xd_reverse_line = ( 719 | Line() 720 | .add_xaxis(xaxis_data=rxdx) 721 | .add_yaxis( 722 | series_name="反转点", 723 | linestyle_opts=opts.LineStyleOpts( 724 | # color='#2f4554', 725 | type_='dashed', 726 | ), 727 | is_step='end', 728 | y_axis=rxdy, 729 | ) 730 | .set_global_opts(xaxis_opts=opts.AxisOpts(type_="category")) 731 | ) 732 | 733 | ## 正向点连线 734 | rxdx, rxdy = get_xd_reverse_data(xd_forward_points) 735 | if rxdx: 736 | self.xd_forward_line = ( 737 | Line() 738 | .add_xaxis(xaxis_data=rxdx) 739 | .add_yaxis( 740 | series_name="正向点", 741 | linestyle_opts=opts.LineStyleOpts( 742 | # color='#2f4554', 743 | type_='dashed', 744 | ), 745 | is_step='end', 746 | y_axis=rxdy, 747 | ) 748 | .set_global_opts(xaxis_opts=opts.AxisOpts(type_="category")) 749 | ) 750 | 751 | ## macd标准模型-红柱标记 752 | xdata, ydata = get_macd_extremum_point(self.macd_normalized_df, 1) 753 | self.macd_normalized_scatter_red = ( 754 | Scatter() 755 | .add_xaxis(xdata) 756 | .add_yaxis( 757 | series_name='macd标准模型', 758 | y_axis=ydata, 759 | label_opts=opts.LabelOpts(is_show=True, formatter=JsCode("function(params){return params.value[2];}"), position='top', color='#ef232a',), 760 | itemstyle_opts=opts.ItemStyleOpts(color='#ef232a'), 761 | symbol='circle', 762 | symbol_size=1, 763 | symbol_rotate=0, 764 | ) 765 | ) 766 | ## macd标准模型-绿柱标记 767 | xdata, ydata = get_macd_extremum_point(self.macd_normalized_df, -1) 768 | self.macd_normalized_scatter_green = ( 769 | Scatter() 770 | .add_xaxis(xdata) 771 | .add_yaxis( 772 | series_name='macd标准模型', 773 | y_axis=ydata, 774 | label_opts=opts.LabelOpts(is_show=True, formatter=JsCode("function(params){return params.value[2];}"), position='bottom', color='#14b143',), 775 | itemstyle_opts=opts.ItemStyleOpts(color='#14b143'), 776 | symbol='circle', 777 | symbol_size=1, 778 | symbol_rotate=0, 779 | ) 780 | ) 781 | 782 | # 目标点 783 | xdata, ydata = get_scatter_target_point_data(target_point_slices) 784 | if xdata: 785 | self.scatter_target_point = ( 786 | Scatter() 787 | .add_xaxis(xdata) 788 | .add_yaxis( 789 | series_name='目标点', 790 | y_axis=ydata, 791 | label_opts=opts.LabelOpts(formatter=JsCode("function(params){return params.value[1];}"), position='top', color='#ffa500',), 792 | color='#ffa500', 793 | symbol='circle', 794 | symbol_rotate=0, 795 | symbol_size=5, 796 | ) 797 | ) 798 | 799 | xdata, ydata = get_scatter_rizhou_signal_data(rizhou_signal_slices, 1) 800 | if xdata: 801 | self.scatter_rizhou_signal_long = ( 802 | Scatter() 803 | .add_xaxis(xdata) 804 | .add_yaxis( 805 | series_name='日周多', 806 | y_axis=ydata, 807 | # label_opts=opts.LabelOpts(formatter=JsCode("function(params){return params.value[1];}"), position='top', color='#5c50e6',), 808 | color='#f200ff', 809 | symbol='diamond', 810 | symbol_rotate=0, 811 | symbol_size=15, 812 | ) 813 | ) 814 | xdata, ydata = get_scatter_rizhou_signal_data(rizhou_signal_slices, -1) 815 | if xdata: 816 | self.scatter_rizhou_signal_short = ( 817 | Scatter() 818 | .add_xaxis(xdata) 819 | .add_yaxis( 820 | series_name='日周空', 821 | y_axis=ydata, 822 | # label_opts=opts.LabelOpts(formatter=JsCode("function(params){return params.value[1];}"), position='top', color='#007aff',), 823 | color='#0000ff', 824 | symbol='diamond', 825 | symbol_rotate=0, 826 | symbol_size=15, 827 | ) 828 | ) 829 | 830 | ## ma 831 | if ma_df_slices is not None: 832 | ## ma5 833 | maxdx, maxdy = get_ma_data(ma_df_slices.ma5) 834 | self.ma_line_5 = ( 835 | Line() 836 | .add_xaxis(xaxis_data=maxdx) 837 | .add_yaxis( 838 | series_name="MA5", 839 | linestyle_opts=opts.LineStyleOpts( 840 | # color='#2f4554', 841 | type_='dashed', 842 | ), 843 | label_opts=opts.LabelOpts(is_show=False), 844 | is_symbol_show=False, 845 | y_axis=maxdy, 846 | ) 847 | .set_global_opts(xaxis_opts=opts.AxisOpts(type_="category")) 848 | ) 849 | 850 | ## ma10 851 | maxdx, maxdy = get_ma_data(ma_df_slices.ma10) 852 | self.ma_line_10 = ( 853 | Line() 854 | .add_xaxis(xaxis_data=maxdx) 855 | .add_yaxis( 856 | series_name="MA10", 857 | linestyle_opts=opts.LineStyleOpts( 858 | # color='#2f4554', 859 | type_='dashed', 860 | ), 861 | label_opts=opts.LabelOpts(is_show=False), 862 | is_symbol_show=False, 863 | y_axis=maxdy, 864 | ) 865 | .set_global_opts(xaxis_opts=opts.AxisOpts(type_="category")) 866 | ) 867 | 868 | ## 净值 869 | if nav_df_slices is not None: 870 | self.nav_line = ( 871 | Line() 872 | .add_xaxis(xaxis_data=list(nav_df_slices.index)) 873 | .add_yaxis( 874 | series_name="净值", 875 | xaxis_index=0, 876 | yaxis_index=3, 877 | linestyle_opts=opts.LineStyleOpts( 878 | # color='#2f4554', 879 | type_='dashed', 880 | ), 881 | label_opts=opts.LabelOpts(is_show=False), 882 | is_symbol_show=False, 883 | y_axis=list(nav_df_slices.nav), 884 | ) 885 | ) 886 | 887 | # 模拟买卖点-开多 888 | xdata, ydata = get_scatter_points_data(second_type_bs_point_slices, 'bl') 889 | if xdata: 890 | self.scatter_second_type_bs_point_bl = ( 891 | Scatter() 892 | .add_xaxis(xdata) 893 | .add_yaxis( 894 | series_name='模拟多', 895 | y_axis=ydata, 896 | # label_opts=opts.LabelOpts( 897 | # is_show=True, formatter=JsCode("function(params){return params.value[2] + ' : ' + params.value[1];}"), position='bottom', color='#9c009c', 898 | # ), 899 | color='#9c009c', 900 | symbol='circle', 901 | symbol_rotate=0, 902 | itemstyle_opts=opts.ItemStyleOpts(color='#9c009c'), 903 | ) 904 | ) 905 | # 模拟买卖点-开空 906 | xdata, ydata = get_scatter_points_data(second_type_bs_point_slices, 'bs') 907 | if xdata: 908 | self.scatter_second_type_bs_point_bs = ( 909 | Scatter() 910 | .add_xaxis(xdata) 911 | .add_yaxis( 912 | series_name='模拟空', 913 | y_axis=ydata, 914 | # label_opts=opts.LabelOpts( 915 | # is_show=True, formatter=JsCode("function(params){return params.value[2] + ' : ' + params.value[1];}"), position='top', color='#006600', 916 | # ), 917 | color='#006600', 918 | symbol='circle', 919 | itemstyle_opts=opts.ItemStyleOpts(color='#006600'), 920 | ) 921 | ) 922 | 923 | # 主图 924 | def __get_main_chart(self): 925 | overlap = self.kline.overlap(self.fb_line) 926 | overlap = overlap.overlap(self.realtime_fb_line) 927 | overlap = overlap.overlap(self.xd_line) 928 | overlap = overlap.overlap(self.realtime_xd_line) 929 | if self.scatter_point_bl is not None: 930 | overlap = overlap.overlap(self.scatter_point_bl) 931 | if self.scatter_point_bs is not None: 932 | overlap = overlap.overlap(self.scatter_point_bs) 933 | if self.scatter_point_sl is not None: 934 | overlap = overlap.overlap(self.scatter_point_sl) 935 | if self.scatter_point_ss is not None: 936 | overlap = overlap.overlap(self.scatter_point_ss) 937 | if self.xd_reverse_line is not None: 938 | overlap = overlap.overlap(self.xd_reverse_line) 939 | if self.xd_forward_line is not None: 940 | overlap = overlap.overlap(self.xd_forward_line) 941 | if self.scatter_target_point is not None: 942 | overlap = overlap.overlap(self.scatter_target_point) 943 | if self.scatter_rizhou_signal_long is not None: 944 | overlap = overlap.overlap(self.scatter_rizhou_signal_long) 945 | if self.scatter_rizhou_signal_short is not None: 946 | overlap = overlap.overlap(self.scatter_rizhou_signal_short) 947 | if self.ma_line_5 is not None: 948 | overlap = overlap.overlap(self.ma_line_5) 949 | if self.ma_line_10 is not None: 950 | overlap = overlap.overlap(self.ma_line_10) 951 | if self.scatter_second_type_bs_point_bl is not None: 952 | overlap = overlap.overlap(self.scatter_second_type_bs_point_bl) 953 | if self.scatter_second_type_bs_point_bs is not None: 954 | overlap = overlap.overlap(self.scatter_second_type_bs_point_bs) 955 | return overlap 956 | 957 | # 幅图1 958 | def __get_macd_chart(self): 959 | overlap1 = self.macd_line.overlap(self.macd_bar) 960 | overlap1 = overlap1.overlap(self.macd_normalized_scatter_red) 961 | overlap1 = overlap1.overlap(self.macd_normalized_scatter_green) 962 | return overlap1 963 | 964 | # 幅图2 965 | def __get_natr_chart(self): 966 | return self.natr_line 967 | 968 | # 幅图2 969 | def __get_rizhou_chart(self): 970 | overlap2 = self.dailyk_line.overlap(self.weekk_line) 971 | return overlap2 972 | 973 | # 暴露Grid 974 | def get_grid(self): 975 | if self.grid_width and self.grid_height: 976 | grid = Grid(init_opts=opts.InitOpts(width=self.grid_width, height=self.grid_height)) # 设置容器宽高 977 | else: 978 | grid = Grid() 979 | 980 | main_ov = self.__get_main_chart() 981 | macd_ov = self.__get_macd_chart() 982 | natr_ov = self.__get_natr_chart() 983 | rizhou_ov = self.__get_rizhou_chart() 984 | 985 | if self.minor_draw: 986 | if 'macd' in self.minor_draw and 'rizhou' in self.minor_draw: 987 | grid.add(main_ov, grid_opts=opts.GridOpts(pos_left="left", pos_top='10%', pos_right='50px', height="50%")) 988 | grid.add(macd_ov, grid_opts=opts.GridOpts(pos_left="left", pos_top='60%', pos_right='50px', height="15%")) 989 | grid.add(rizhou_ov, grid_opts=opts.GridOpts(pos_left="left", pos_top='75%', pos_right='50px', height="15%")) 990 | else: 991 | grid.add(main_ov, grid_opts=opts.GridOpts(pos_left="left", pos_top='10%', pos_right='50px', height="60%")) 992 | if 'macd' == self.macd_or_natr: 993 | grid.add(macd_ov, grid_opts=opts.GridOpts(pos_left="left", pos_top='70%', pos_right='50px', height="20%")) 994 | elif 'natr' == self.macd_or_natr: 995 | grid.add(natr_ov, grid_opts=opts.GridOpts(pos_left="left", pos_top='70%', pos_right='50px', height="20%")) 996 | elif 'rizhou' == self.macd_or_natr: 997 | grid.add(rizhou_ov, grid_opts=opts.GridOpts(pos_left="left", pos_top='70%', pos_right='50px', height="20%")) 998 | else: 999 | grid.add(main_ov, grid_opts=opts.GridOpts(pos_left="left", pos_top='10%', pos_right='50px', height="60%")) 1000 | if 'macd' == self.macd_or_natr: 1001 | grid.add(macd_ov, grid_opts=opts.GridOpts(pos_left="left", pos_top='70%', pos_right='50px', height="20%")) 1002 | elif 'natr' == self.macd_or_natr: 1003 | grid.add(natr_ov, grid_opts=opts.GridOpts(pos_left="left", pos_top='70%', pos_right='50px', height="20%")) 1004 | elif 'rizhou' == self.macd_or_natr: 1005 | grid.add(rizhou_ov, grid_opts=opts.GridOpts(pos_left="left", pos_top='70%', pos_right='50px', height="20%")) 1006 | 1007 | return grid 1008 | -------------------------------------------------------------------------------- /simple/draw/KdrawRealtimeMultiPeriod.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | from pyecharts.charts import Bar 4 | from pyecharts.charts import Kline 5 | from pyecharts.charts import Line 6 | from pyecharts.charts import Grid 7 | from pyecharts.charts import Page 8 | from pyecharts import options as opts 9 | 10 | import talib 11 | import numpy as np 12 | import os 13 | 14 | from pyecharts.render import make_snapshot 15 | from snapshot_selenium import snapshot 16 | from selenium.webdriver.chrome.options import Options 17 | from selenium import webdriver 18 | 19 | from simple.draw.KdrawGrid import KdrawGrid 20 | from simple.draw.HtmlAutoFit import fit as html_auto_fit 21 | 22 | class KdrawRealtimeMultiPeriod: 23 | def __init__(self, base_name='', html_path='', pic_path='', extends_name='', grids=[],chromedriver_path=''): 24 | if len(grids) == 0: 25 | raise Exception('至少指定一个Grid') 26 | self.base_name = base_name 27 | self.html_path = html_path 28 | self.pic_path = pic_path 29 | self.extends_name = extends_name 30 | self.grids = grids 31 | self.chromedriver_path = chromedriver_path 32 | 33 | def gen_html(self): 34 | ''' 35 | 生成HTML 36 | ''' 37 | if not os.path.exists(self.html_path): 38 | os.makedirs(self.html_path) 39 | print('%s 文件夹不存在,创建完成', self.html_path) 40 | html = '%s/%s-%s-multi.html' % (self.html_path, self.base_name, self.extends_name) 41 | page = Page(layout=Page.SimplePageLayout) 42 | for grid in self.grids: 43 | page.add(grid) 44 | 45 | page.render(path=html) 46 | html_auto_fit(html, create_new_file=False) 47 | return html 48 | 49 | def gen_pic(self): 50 | ''' 51 | 生成图片 52 | ''' 53 | if not os.path.exists(self.pic_path): 54 | os.makedirs(self.pic_path) 55 | print('%s 文件夹不存在,创建完成', self.pic_path) 56 | pic = '%s/%s-%s-multi.png' % (self.pic_path, self.base_name, self.extends_name) 57 | page = Page(layout=Page.SimplePageLayout) 58 | for grid in self.grids: 59 | page.add(grid) 60 | # 生成图片 61 | chrome_options = Options() 62 | chrome_options.binary_location(self.chromedriver_path) 63 | chrome_options.add_argument('--headless') 64 | # chrome_options.add_argument('--disable-gpu') 65 | make_snapshot(snapshot, page.render(), pic, is_remove_html=True) 66 | return pic 67 | -------------------------------------------------------------------------------- /simple/draw/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simple-trade/simple-czsc/21abb63d690057a800920fc686e3fd21e9610017/simple/draw/__init__.py -------------------------------------------------------------------------------- /simple/factor/FactorAbstract.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | from simple.czsc import CzscModelEngine 4 | 5 | class FactorAbstract: 6 | def __init__(self, czsc: CzscModelEngine): 7 | self.czsc = czsc 8 | 9 | def execute(self): 10 | pass 11 | 12 | def get_factor_name(self): 13 | return 'abstract' 14 | -------------------------------------------------------------------------------- /simple/factor/Macd.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | import numpy as np 4 | import pandas as pd 5 | import talib as ta 6 | import datetime 7 | from simple.factor import factors 8 | from simple.czsc import CzscModelEngine 9 | from simple.factor.FactorAbstract import FactorAbstract 10 | from simple.logger.logger import LoggerFactory 11 | 12 | logger = LoggerFactory.getLogger(__name__) 13 | 14 | 15 | ''' 16 | 17 | ''' 18 | 19 | 20 | class Macd(FactorAbstract): 21 | def __init__(self, czsc: CzscModelEngine): 22 | super(Macd, self).__init__(czsc=czsc) 23 | self.macd_df = pd.DataFrame(columns=['macd', 'dif', 'dea']) 24 | self.last_index = datetime.datetime.now() 25 | 26 | self.normalized_df = pd.DataFrame(columns=['start_point', 'end_point', 'type', 'area', 'extremum_point', 'status']) 27 | self.realtime_normalized_df = None 28 | 29 | def get_factor_name(self): 30 | return factors['macd'] 31 | 32 | def execute(self): 33 | klines = self.czsc.get_klines() 34 | self.__cal_macd(klines) 35 | self.__cal_normalized_df(klines) 36 | return {'macd_df': self.macd_df, 'normalized_df': self.realtime_normalized_df} 37 | 38 | def __cal_macd(self, klines): 39 | if klines.shape[0] > 33: 40 | closes = klines.iloc[-34:].close.values.astype('float') 41 | else: 42 | closes = klines.close.values.astype('float') 43 | 44 | _dif, _dea, _macd = ta.MACDEXT(closes, fastperiod=12, fastmatype=1, slowperiod=26, slowmatype=1, signalperiod=9, signalmatype=1) 45 | if klines.index[-1] == self.last_index: 46 | self.macd_df.loc[self.macd_df.index[-1], 'macd'] = 2 * _macd[-1] 47 | self.macd_df.loc[self.macd_df.index[-1], 'dif'] = _dif[-1] 48 | self.macd_df.loc[self.macd_df.index[-1], 'dea'] = _dea[-1] 49 | else: 50 | self.macd_df.loc[klines.index[-1], ['macd', 'dif', 'dea']] = [2 * _macd[-1], _dif[-1], _dea[-1]] 51 | self.last_index = klines.index[-1] 52 | 53 | def __cal_normalized_df(self, klines): 54 | if self.macd_df.shape[0] < 2: 55 | return 56 | macd_2 = self.macd_df.iloc[-2]['macd'] 57 | if np.isnan(macd_2): 58 | return 59 | if self.normalized_df.shape[0] < 1: 60 | if macd_2 <= 0: 61 | self.__add_normalized(klines, -1, macd_2) 62 | else: 63 | self.__add_normalized(klines, 1, macd_2) 64 | else: 65 | if macd_2 <= 0 and self.normalized_df.iloc[-1]['type'] == 1: 66 | self.normalized_df.iloc[-1]['status'] = 1 67 | self.__add_normalized(klines, -1, macd_2) 68 | elif macd_2 >= 0 and self.normalized_df.iloc[-1]['type'] == -1: 69 | self.normalized_df.iloc[-1]['status'] = 1 70 | self.__add_normalized(klines, 1, macd_2) 71 | else: 72 | self.__update_normalized(klines, macd_2) 73 | 74 | self.realtime_normalized_df = self.normalized_df.copy(deep=True) 75 | macd_1 = self.macd_df.iloc[-1]['macd'] 76 | if macd_1 <= 0 and self.realtime_normalized_df.iloc[-1]['type'] == 1: 77 | self.__add_normalized_realtime(klines, -1, macd_1) 78 | elif macd_1 >= 0 and self.realtime_normalized_df.iloc[-1]['type'] == -1: 79 | self.__add_normalized_realtime(klines, 1, macd_1) 80 | else: 81 | self.__update_normalized_realtime(klines, macd_1) 82 | 83 | def __add_normalized(self, klines, _type, macd_2): 84 | self.normalized_df.loc[klines.index[-2], ['start_point', 'end_point', 'type', 'area', 'extremum_point', 'status']] = [ 85 | [klines.index[-2], macd_2], 86 | [klines.index[-2], macd_2], 87 | _type, 88 | macd_2, 89 | [klines.index[-2], macd_2], 90 | 0, 91 | ] 92 | 93 | def __update_normalized(self, klines, macd_2): 94 | extremum_macd = self.normalized_df.iloc[-1]['extremum_point'][1] 95 | _type = self.normalized_df.iloc[-1]['type'] 96 | if (macd_2 <= extremum_macd and _type == -1) or (macd_2 >= extremum_macd and _type == 1): 97 | self.normalized_df.iloc[-1]['extremum_point'][0] = klines.index[-2] 98 | self.normalized_df.iloc[-1]['extremum_point'][1] = macd_2 99 | 100 | self.normalized_df.loc[self.normalized_df.index[-1], 'area'] = self.normalized_df.iloc[-1]['area'] + macd_2 101 | self.normalized_df.iloc[-1]['end_point'][0] = klines.index[-2] 102 | self.normalized_df.iloc[-1]['end_point'][1] = macd_2 103 | 104 | def __add_normalized_realtime(self, klines, _type, macd_1): 105 | realtime_index = klines.index[-1] 106 | self.realtime_normalized_df.loc[realtime_index, ['start_point', 'end_point', 'type', 'area', 'extremum_point', 'status']] = [ 107 | [realtime_index, macd_1], 108 | [realtime_index, macd_1], 109 | _type, 110 | macd_1, 111 | [realtime_index, macd_1], 112 | -1, 113 | ] 114 | 115 | def __update_normalized_realtime(self, klines, macd_1): 116 | extremum_macd = self.realtime_normalized_df.iloc[-1]['extremum_point'][1] 117 | _type = self.realtime_normalized_df.iloc[-1]['type'] 118 | if (macd_1 <= extremum_macd and _type == -1) or (macd_1 >= extremum_macd and _type == 1): 119 | self.realtime_normalized_df.iloc[-1]['extremum_point'][0] = klines.index[-1] 120 | self.realtime_normalized_df.iloc[-1]['extremum_point'][1] = macd_1 121 | self.realtime_normalized_df.loc[self.realtime_normalized_df.index[-1], 'area'] = self.realtime_normalized_df.iloc[-1]['area'] + macd_1 122 | self.realtime_normalized_df.iloc[-1]['end_point'][0] = klines.index[-1] 123 | self.realtime_normalized_df.iloc[-1]['end_point'][1] = macd_1 124 | -------------------------------------------------------------------------------- /simple/factor/XianduanSanmai.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | 3 | from simple.factor import factors 4 | from simple.factor.FactorAbstract import FactorAbstract 5 | 6 | class XianduanSanmai(FactorAbstract): 7 | def get_factor_name(self): 8 | return factors['xianduansanmai'] 9 | 10 | ''' 11 | 返回: 12 | [ 13 | type, 14 | start_point, 15 | end_point 16 | ] 17 | ''' 18 | 19 | def execute(self): 20 | hebing_df, fenxing_df, fenbi_df, xianduan_df, zhongshu_df, realtime_fenbi_df, realtime_xianduan_df, realtime_zhongshu_df = self.czsc.get_containers() 21 | 22 | if realtime_xianduan_df.shape[0] < 4: 23 | return [] 24 | 25 | if realtime_xianduan_df.iloc[-1]['type'] == -1: 26 | cond1 = realtime_xianduan_df.iloc[-4]['start_point'][1] < realtime_xianduan_df.iloc[-2]['start_point'][1] 27 | cond2 = realtime_xianduan_df.iloc[-1]['start_point'][1] > realtime_xianduan_df.iloc[-3]['start_point'][1] 28 | cond3 = realtime_xianduan_df.iloc[-1]['end_point'][1] > realtime_xianduan_df.iloc[-3]['start_point'][1] 29 | if cond1 and cond2 and cond3: 30 | return [1, realtime_xianduan_df.iloc[-1]['start_point'], realtime_xianduan_df.iloc[-1]['end_point']] 31 | else: 32 | return [] 33 | else: 34 | cond1 = realtime_xianduan_df.iloc[-4]['start_point'][1] > realtime_xianduan_df.iloc[-2]['start_point'][1] 35 | cond2 = realtime_xianduan_df.iloc[-1]['start_point'][1] < realtime_xianduan_df.iloc[-3]['start_point'][1] 36 | cond3 = realtime_xianduan_df.iloc[-1]['end_point'][1] < realtime_xianduan_df.iloc[-3]['start_point'][1] 37 | if cond1 and cond2 and cond3: 38 | return [-1, realtime_xianduan_df.iloc[-1]['start_point'], realtime_xianduan_df.iloc[-1]['end_point']] 39 | else: 40 | return [] 41 | -------------------------------------------------------------------------------- /simple/factor/__init__.py: -------------------------------------------------------------------------------- 1 | # 因子枚举 2 | factors = { 3 | 'xianduansanmai': 'xianduansanmai', 4 | 'macd': 'macd', 5 | } 6 | 7 | 8 | from simple.factor import FactorAbstract 9 | from simple.factor import Macd 10 | from simple.factor import XianduanSanmai 11 | 12 | -------------------------------------------------------------------------------- /simple/logger/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simple-trade/simple-czsc/21abb63d690057a800920fc686e3fd21e9610017/simple/logger/__init__.py -------------------------------------------------------------------------------- /simple/logger/logger.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | ''' 3 | @File : logger.py 4 | @Time : 2020/03/18 22:08:18 5 | ''' 6 | import logging 7 | import os 8 | 9 | class LoggerFactory: 10 | @staticmethod 11 | def getLogger(name, filepath='.', filename='simple.log'): 12 | 13 | name = name.split('.')[-1] # 截取文件夹名 14 | 15 | logger = logging.getLogger(name) 16 | logger.setLevel(logging.INFO) 17 | formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") 18 | 19 | # if not os.path.exists(filepath): 20 | # os.makedirs(filepath) 21 | # print('%s 文件夹不存在,创建完成', filepath) 22 | # 创建 handler 输出到文件 23 | # handler = logging.FileHandler(filepath + '/' + filename, mode='w') 24 | # handler.setLevel(logging.INFO) 25 | # handler.setFormatter(formatter) 26 | # handler 输出到控制台 27 | ch = logging.StreamHandler() 28 | ch.setLevel(logging.INFO) 29 | ch.setFormatter(formatter) 30 | # add the handlers to the logger 31 | # logger.addHandler(handler) 32 | logger.addHandler(ch) 33 | return logger 34 | -------------------------------------------------------------------------------- /simple/pusher/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simple-trade/simple-czsc/21abb63d690057a800920fc686e3fd21e9610017/simple/pusher/__init__.py -------------------------------------------------------------------------------- /simple/pusher/serverj.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import urllib.parse as p 3 | 4 | def wx_push(text='计算完成', desp='',url=''): 5 | data = {'text': text, 'desp': desp} 6 | resp = requests.post(url=url, data=data) 7 | print('push resp: %s' % resp) 8 | 9 | -------------------------------------------------------------------------------- /simple/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy==1.18.1 2 | pandas==0.24.2 3 | pyecharts==1.7.1 4 | requests==2.23.0 5 | selenium==3.141.0 6 | simple==0.1.1 7 | snapshot_selenium==0.0.2 8 | TA_Lib==0.4.17 9 | --------------------------------------------------------------------------------