├── LICENSE ├── README.md ├── config.ini ├── plot.py ├── plot_ctl.py ├── read_data.py ├── utils.py ├── zen_plot2.py └── zp_logging.py /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ZenPlot 2 | 缠论(缠中说禅)绘图代码,绘图实现ZenTheory工程分析出来的分型、笔、线段、走势中枢、走势类型,第1/2/3类买卖点 3 | 4 | 大家在使用过程中有问题请联系我的邮箱:liaoshuilv@163.com 5 | -------------------------------------------------------------------------------- /config.ini: -------------------------------------------------------------------------------- 1 | [conf] 2 | port=3308 3 | dbname=gp 4 | host=10.118.32.90 5 | username=root 6 | password=Cass2021? 7 | -------------------------------------------------------------------------------- /plot.py: -------------------------------------------------------------------------------- 1 | 2 | # -*- coding:utf-8 -*- 3 | #! python3 4 | 5 | from pandas import DataFrame, Series 6 | import pandas as pd 7 | import numpy as np 8 | import matplotlib.pyplot as plt 9 | from matplotlib import dates as mdates 10 | from matplotlib import ticker as mticker 11 | #from mpl_finance import candlestick_ohlc 12 | from mplfinance.original_flavor import candlestick_ohlc 13 | from zp_logging import movingaverage 14 | 15 | import matplotlib.dates as mpl_dt 16 | from matplotlib.dates import DateFormatter, WeekdayLocator, DayLocator, MONDAY,YEARLY 17 | from matplotlib.dates import MonthLocator,MONTHLY 18 | import datetime as dt 19 | import pylab 20 | import talib 21 | 22 | #E:\otherdata\stock_data\1min\utf8\SH000001.csv 23 | # E:\otherdata\stock_data\day\SH999999.csv 24 | daylinefilespath = 'E:\otherdata\stock_data\day' 25 | stock_b_code = 'SH999999' #平安银行 26 | MA1 = 5 27 | MA2 = 10 28 | startdate = dt.date(2016, 6, 29) 29 | enddate = dt.date(2017, 1, 30) 30 | # mpl_dt.date2num() 31 | # np.datetime64() 32 | 33 | def readstkData(rootpath, stockcode): 34 | 35 | returndata = pd.DataFrame() 36 | # for yearnum in range(0,int((eday - sday).days / 365.25)+1): 37 | # theyear = sday + dt.timedelta(days = yearnum * 365) 38 | # # build file name 39 | # filename = rootpath + theyear.strftime('%Y') + '\\' + str(stockcode).zfill(6) + '.csv' 40 | # 41 | # try: 42 | # rawdata = pd.read_csv(filename, parse_dates = True, index_col = 0, encoding = 'gbk') 43 | # except IOError: 44 | # raise Exception('IoError when reading dayline data file: ' + filename) 45 | # 46 | # returndata = pd.concat([rawdata, returndata]) 47 | 48 | filename = rootpath + '\\' + stockcode + '.csv' 49 | 50 | try: 51 | # rawdata = pd.read_csv(filename, parse_dates = True, index_col = 0, encoding = 'utf8') 52 | rawdata = pd.read_csv(filename) 53 | except IOError: 54 | raise Exception('IoError when reading dayline data file: ' + filename) 55 | 56 | # Wash data 57 | returndata = pd.concat([rawdata, returndata]) 58 | returndata = returndata.sort_index() 59 | # returndata.index.name = 'DateTime' 60 | returndata.drop('Amount', axis=1, inplace = True) 61 | # returndata.columns = ['Open', 'High', 'Close', 'Low', 'Volume'] 62 | 63 | returndata = returndata[returndata.index < 100] 64 | 65 | return returndata 66 | 67 | 68 | def main(): 69 | days = readstkData(daylinefilespath, stock_b_code) 70 | 71 | # drop the date index from the dateframe & make a copy 72 | daysreshape = days.reset_index() 73 | # convert the datetime64 column in the dataframe to 'float days' 74 | # daysreshape['DateTime']=mdates.date2num(daysreshape['DateTime'].astype(dt.date)) 75 | daysreshape['DateTime'] = mdates.date2num(pd.to_datetime(daysreshape['DateTime'])) 76 | 77 | # clean day data for candle view 78 | daysreshape.drop('Volume', axis=1, inplace = True) 79 | daysreshape = daysreshape.reindex(columns=['DateTime','Open','High','Low','Close']) 80 | 81 | Av1 = list(movingaverage(daysreshape.Close.values, MA1)) 82 | Av2 = list(movingaverage(daysreshape.Close.values, MA2)) 83 | SP = len(daysreshape.DateTime.values[MA2-1:]) 84 | fig = plt.figure(facecolor='#07000d', figsize=(15,10)) 85 | 86 | ax1 = plt.subplot2grid((6,4), (1,0), rowspan=4, colspan=4, facecolor='#07000d') 87 | kline_data = daysreshape.values[-SP:] 88 | candlestick_ohlc(ax1, kline_data, width=.6, colorup='#ff1717', colordown='#53c156') 89 | 90 | Label1 = str(MA1)+' SMA' 91 | Label2 = str(MA2)+' SMA' 92 | # 93 | ax1.plot(daysreshape.DateTime.values[-SP:],Av1[-SP:],'#e1edf9',label=Label1, linewidth=1) 94 | ax1.plot(daysreshape.DateTime.values[-SP:],Av2[-SP:],'#4ee6fd',label=Label2, linewidth=1.5) 95 | ax1.grid(True, color='w', linestyle='--') 96 | ax1.xaxis.set_major_locator(mticker.MaxNLocator(10)) 97 | ax1.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d')) 98 | ax1.xaxis.label.set_color("w") 99 | ax1.yaxis.label.set_color("w") 100 | ax1.spines['bottom'].set_color("#5998ff") 101 | ax1.spines['top'].set_color("#5998ff") 102 | ax1.spines['left'].set_color("#5998ff") 103 | ax1.spines['right'].set_color("#5998ff") 104 | ax1.tick_params(axis='y', colors='w') 105 | plt.gca().yaxis.set_major_locator(mticker.MaxNLocator(prune='upper')) 106 | ax1.tick_params(axis='x', colors='w') 107 | 108 | plt.rcParams['font.sans-serif'] = ['SimHei'] # 中文字体设置 109 | plt.rcParams['font.size'] = 9 110 | plt.rcParams['axes.unicode_minus'] = False 111 | plt.xlabel('日期') 112 | plt.ylabel('Stock Price and Volume') 113 | 114 | # 绘制成交量 115 | volumeMin = 0 116 | ax1v = ax1.twinx() 117 | ax1v.fill_between(daysreshape.DateTime.values[-SP:],volumeMin, days.Volume.values[-SP:], facecolor='#00ffe8', alpha=.4) 118 | ax1v.axes.yaxis.set_ticklabels([]) 119 | ax1v.grid(False) 120 | ###Edit this to 3, so it's a bit larger 121 | ax1v.set_ylim(0, 3*days.Volume.values.max()) 122 | ax1v.spines['bottom'].set_color("#5998ff") 123 | ax1v.spines['top'].set_color("#5998ff") 124 | ax1v.spines['left'].set_color("#5998ff") 125 | ax1v.spines['right'].set_color("#5998ff") 126 | ax1v.tick_params(axis='x', colors='w') 127 | ax1v.tick_params(axis='y', colors='w') 128 | 129 | # 绘制RSI 130 | maLeg = plt.legend(loc=9, ncol=2, prop={'size':7}, fancybox=True, borderaxespad=0.) 131 | maLeg.get_frame().set_alpha(0.4) 132 | textEd = pylab.gca().get_legend().get_texts() 133 | pylab.setp(textEd[0:5], color = 'w') 134 | 135 | ax0 = plt.subplot2grid((6,4), (0,0), sharex=ax1, rowspan=1, colspan=4, facecolor='#07000d') 136 | # rsi = rsiFunc(daysreshape.Close.values) 137 | rsi =talib.RSI(daysreshape.Close.values, timeperiod=6) 138 | rsiCol = '#c1f9f7' 139 | posCol = '#386d13' 140 | negCol = '#8f2020' 141 | 142 | ax0.plot(daysreshape.DateTime.values[-SP:], rsi[-SP:], rsiCol, linewidth=1.5) 143 | ax0.axhline(70, color=negCol) 144 | ax0.axhline(30, color=posCol) 145 | ax0.fill_between(daysreshape.DateTime.values[-SP:], rsi[-SP:], 70, where=(rsi[-SP:]>=70), facecolor=negCol, edgecolor=negCol, alpha=0.5) 146 | ax0.fill_between(daysreshape.DateTime.values[-SP:], rsi[-SP:], 30, where=(rsi[-SP:]<=30), facecolor=posCol, edgecolor=posCol, alpha=0.5) 147 | ax0.set_yticks([30,70]) 148 | ax0.yaxis.label.set_color("w") 149 | ax0.spines['bottom'].set_color("#5998ff") 150 | ax0.spines['top'].set_color("#5998ff") 151 | ax0.spines['left'].set_color("#5998ff") 152 | ax0.spines['right'].set_color("#5998ff") 153 | ax0.tick_params(axis='y', colors='w') 154 | ax0.tick_params(axis='x', colors='w') 155 | plt.ylabel('RSI') 156 | 157 | # 绘制MACD 158 | ax2 = plt.subplot2grid((6,4), (5,0), sharex=ax1, rowspan=1, colspan=4, facecolor='#07000d') 159 | fillcolor = '#00ffe8' 160 | nslow = 26 161 | nfast = 12 162 | nema = 9 163 | # emaslow, emafast, macd = computeMACD(daysreshape.Close.values) 164 | emafast, emaslow, macd = talib.MACD(daysreshape.Close.values) 165 | # ema9 = ExpMovingAverage(macd, nema) 166 | ema9 = talib.EMA(macd,timeperiod=9) 167 | ax2.plot(daysreshape.DateTime.values[-SP:], macd[-SP:], color='#4ee6fd', lw=2) 168 | ax2.plot(daysreshape.DateTime.values[-SP:], ema9[-SP:], color='#e1edf9', lw=1) 169 | ax2.fill_between(daysreshape.DateTime.values[-SP:], macd[-SP:]-ema9[-SP:], 0, alpha=0.5, facecolor=fillcolor, edgecolor=fillcolor) 170 | plt.gca().yaxis.set_major_locator(mticker.MaxNLocator(prune='upper')) 171 | ax2.spines['bottom'].set_color("#5998ff") 172 | ax2.spines['top'].set_color("#5998ff") 173 | ax2.spines['left'].set_color("#5998ff") 174 | ax2.spines['right'].set_color("#5998ff") 175 | ax2.tick_params(axis='x', colors='w') 176 | ax2.tick_params(axis='y', colors='w') 177 | plt.ylabel('MACD', color='w') 178 | ax2.yaxis.set_major_locator(mticker.MaxNLocator(nbins=5, prune='upper')) 179 | for label in ax2.xaxis.get_ticklabels(): 180 | label.set_rotation(45) 181 | 182 | 183 | plt.show() 184 | i=1 185 | # https://blog.csdn.net/weixin_34498545/article/details/112631706 186 | # ax.set_xticklabels(['A','B','C','D','E','F','G']) 187 | # ax.set_yticklabels(['鉴','图','化','视','可','注','关'],family = 'SimHei',fontsize = 14) 188 | 189 | 190 | if __name__ == "__main__": 191 | main() 192 | 193 | 194 | -------------------------------------------------------------------------------- /plot_ctl.py: -------------------------------------------------------------------------------- 1 | 2 | # -*- coding:utf-8 -*- 3 | #! python3 4 | 5 | import datetime as dt 6 | import pylab 7 | import talib 8 | import pytz 9 | import tzlocal 10 | import time 11 | import sys 12 | import os 13 | from zp_logging import g_logger 14 | from read_data import * 15 | from utils import * 16 | 17 | if __name__ == "__main__": 18 | 19 | # levels = ['1min', '3min', '5min', '15min', '30min'] 20 | # levels = ['5min', '15min', '30min'] 21 | levels = ['1min'] 22 | # stocks = ['000001.XSHG','000002.XSHG','000003.XSHG','000004.XSHG','000005.XSHG','000006.XSHG','000007.XSHG','000008.XSHG','000009.XSHG','000010.XSHG'] 23 | stocks = ['RB8888.XSGE'] 24 | 25 | zen_ms_data = ZenMsData('futures', 'config.ini') 26 | zen_ms_data.LoadAllSecurities() 27 | 28 | bStart = False 29 | for stock_code in stocks: 30 | # if stock_code=="SH600438": 31 | # bStart = True 32 | # # bStart = True 33 | # 34 | # if bStart==False: 35 | # continue 36 | for level in levels: 37 | g_logger.info("stock_code=%s, level=%s start", stock_code, level) 38 | all_count = zen_ms_data.GetSecuritiesKlineCount(stock_code, level, 1577808000) 39 | g_logger.info("all_count=%d", all_count) 40 | offset = 0 41 | plot_count = 10 42 | while(all_count>offset*ONE_PLOT_KLINE_NUM): 43 | str_cmd = 'python zen_plot2.py ' + stock_code + ' ' + level + ' ' + str(offset) + ' ' + str(plot_count) 44 | g_logger.info(str_cmd) 45 | os.system(str_cmd) 46 | offset += plot_count 47 | time.sleep(3) 48 | # break 49 | 50 | 51 | -------------------------------------------------------------------------------- /read_data.py: -------------------------------------------------------------------------------- 1 | 2 | # -*- coding:utf-8 -*- 3 | #! python3 4 | 5 | from pandas import DataFrame, Series 6 | import pandas as pd 7 | import numpy as np 8 | 9 | import time 10 | import sys 11 | import os 12 | import mysql.connector 13 | import pymysql 14 | import sqlalchemy 15 | from sqlalchemy import create_engine 16 | 17 | import configparser 18 | 19 | from zp_logging import g_logger 20 | from utils import * 21 | 22 | #每张图最多画的K线数目 23 | ONE_PLOT_KLINE_NUM=2000 24 | 25 | #print("%s:remove watermark end!" % (time.ctime())) 26 | def readKlineData(filepath): 27 | g_logger.info('readKlineData filepath=%s', filepath) 28 | file_skiprows = [] 29 | file = open(filepath, encoding = 'utf8') 30 | idx = -1 31 | while True: 32 | idx += 1 33 | line = file.readline() 34 | if not line: 35 | break 36 | if line.startswith( '20' )==False: 37 | file_skiprows.append(idx) 38 | 39 | returndata = pd.DataFrame() 40 | try: 41 | # rawdata = pd.read_csv(filename, parse_dates = True, index_col = 0, encoding = 'utf8') 42 | rawdata = pd.read_table(filepath, sep='\t', index_col = False, skiprows=file_skiprows, 43 | names=['Date','Time','Open','High','Low','Close', 'Volume'], 44 | dtype={'Time': np.str}) 45 | except IOError: 46 | raise Exception('IoError when reading dayline data file:' + filepath) 47 | 48 | # Wash data 49 | returndata = pd.concat([rawdata, returndata]) 50 | kline_num = len(returndata.index) 51 | 52 | returndata['ModifyDateTime'] = pd.Series(range(kline_num),index = range(kline_num)) 53 | for i in range(kline_num): 54 | sdate = returndata.loc[i,'Date'] 55 | stime = returndata.loc[i,'Time'] 56 | sdatetime = sdate.replace("/", "", -1).replace("-", "", -1) + stime + "00" 57 | idatetime = int(sdatetime) 58 | its = IntTimeToTs(idatetime) 59 | its -= 60 60 | idatetime = TsToIntTime(its) 61 | # sdatetime = str(idatetime) 62 | # sdatetime = sdatetime[:-2] 63 | returndata.loc[i, 'ModifyDateTime'] = idatetime 64 | returndata.loc[i, 'OriginalIndex'] = i 65 | 66 | returndata = returndata.reindex(columns=['Date','Time','Open','High','Low','Close', 'Volume', 'ModifyDateTime', 'OriginalIndex']) 67 | # returndata = returndata.iloc[0:5000, :] 68 | 69 | return returndata 70 | 71 | def readStrokeData(filepath, start_time, end_time): 72 | g_logger.info('readStrokeData filepath=%s, start_time=%d, end_time=%d', filepath, start_time, end_time) 73 | 74 | returndata = pd.DataFrame() 75 | try: 76 | # rawdata = pd.read_csv(filename, parse_dates = True, index_col = 0, encoding = 'utf8') 77 | rawdata = pd.read_table(filepath, sep='\t', index_col = False, 78 | names=['SeqNum','StartKlineIndex','EndKlineIndex','StartTime','EndTime','Volume','Direction','KlineCount', 'High', 'Low', 'Amount'], 79 | dtype={'SeqNum': np.int, 'StartKlineIndex': np.int, 'EndKlineIndex': np.int}) 80 | except IOError: 81 | raise Exception('IoError when reading stroke data file:' + filepath) 82 | 83 | # Wash data 84 | returndata = pd.concat([rawdata, returndata]) 85 | # returndata = returndata[returndata['StartTime']<=end_time] 86 | # returndata = returndata[returndata['EndTime']>=start_time] 87 | # returndata.reset_index(drop=True, inplace=True) 88 | returndata = returndata.reindex(columns=['SeqNum','StartKlineIndex','EndKlineIndex','StartTime','EndTime','Volume','Direction','KlineCount', 'High', 'Low', 'Amount']) 89 | 90 | return returndata 91 | 92 | def readLineSegmentData(filepath): 93 | g_logger.info('readLineSegmentData filepath=%s', filepath) 94 | 95 | returndata = pd.DataFrame() 96 | try: 97 | rawdata = pd.read_table(filepath, sep='\t', index_col = False, 98 | names=['SeqNum','StartTime','EndTime','StartStrokeIndex','EndStrokeIndex','Volume','Direction','KlineCount', 'High', 'Low', 'Amount'], 99 | dtype={'SeqNum': np.int, 'StartStrokeIndex': np.int, 'EndStrokeIndex': np.int}) 100 | except IOError: 101 | raise Exception('IoError when reading linesegment data file:' + filepath) 102 | 103 | # Wash data 104 | returndata = pd.concat([rawdata, returndata]) 105 | returndata = returndata.reindex(columns=['SeqNum','StartStrokeIndex','EndStrokeIndex','StartTime','EndTime','Volume','Direction','KlineCount', 'High', 'Low', 'Amount']) 106 | return returndata 107 | 108 | 109 | def readTrendCentralData(filepath): 110 | g_logger.info('readTrendCentralData filepath=%s', filepath) 111 | 112 | returndata = pd.DataFrame() 113 | try: 114 | rawdata = pd.read_table(filepath, sep='\t', index_col = False, 115 | names=['SeqNum','StartStrokeIndex','EndStrokeIndex','TrendType','High','Low','Highest', 'Lowest'], 116 | dtype={'SeqNum': np.int, 'StartStrokeIndex': np.int, 'EndStrokeIndex': np.int}) 117 | except IOError: 118 | raise Exception('IoError when reading TrendCentral data file:' + filepath) 119 | 120 | # Wash data 121 | returndata = pd.concat([rawdata, returndata]) 122 | returndata = returndata.reindex(columns=['SeqNum','StartStrokeIndex','EndStrokeIndex','TrendType','High','Low','Highest', 'Lowest']) 123 | return returndata 124 | 125 | def readOrderData(filepath): 126 | g_logger.info('readOrderData filepath=%s', filepath) 127 | 128 | returndata = pd.DataFrame() 129 | try: 130 | rawdata = pd.read_table(filepath, sep='\t', index_col = False, 131 | names=['OrderId','OpenTransTime','OpenTransPrice','CoverTransTime','CoverTransPrice', 132 | 'StopLossPrice','Profit','Direction','OpenKlineIdx', 'CoverKlineIdx','IsOpen','IsCover'], 133 | dtype={'Direction': np.int, 'OpenKlineIdx': np.int, 'CoverKlineIdx': np.int}) 134 | except IOError: 135 | raise Exception('IoError when reading TrendCentral data file:' + filepath) 136 | 137 | # Wash data 138 | returndata = pd.concat([rawdata, returndata]) 139 | returndata = returndata.reindex(columns=['Direction','OpenKlineIdx','CoverKlineIdx']) 140 | return returndata 141 | 142 | #从mysql中读取数据类 143 | class ZenMsData: 144 | mydb = None 145 | connect_info = None 146 | engine = None 147 | securities_type='' 148 | gp_securities = dict() 149 | gp_trade_days = list() 150 | def __init__(self, securities_type, path): 151 | self.securities_type = securities_type 152 | if path=='': 153 | g_logger.info("ZenMsData init, path is null!") 154 | 155 | try: 156 | g_logger.info('cfg_path=%s', path) 157 | cf = configparser.ConfigParser() 158 | cf.read(path) 159 | host = cf.get("conf", "host") 160 | username = cf.get("conf", "username") 161 | password = cf.get("conf", "password") 162 | dbname = cf.get("conf", "dbname") 163 | port = cf.get("conf", "port") 164 | self.mydb = mysql.connector.connect( 165 | host=host, 166 | port=port, 167 | user=username, 168 | passwd=password, 169 | database=dbname 170 | ) 171 | self.connect_info = 'mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8'.format(username, password, host, port, dbname) #1 172 | self.engine = create_engine(self.connect_info) 173 | except Exception as e: 174 | g_logger.warning(str(e)) 175 | g_logger.exception(e) 176 | 177 | def __del__(self): 178 | self.mydb.close() 179 | 180 | #加载所有的gp代码 181 | def LoadAllSecurities(self): 182 | g_logger.info('LoadAllSecurities begin! securities_type=%s', self.securities_type) 183 | db_name = '' 184 | if self.securities_type=='stock': 185 | db_name = 'gp' 186 | elif self.securities_type=='index': 187 | db_name='idx' 188 | elif self.securities_type=='futures': 189 | db_name='futures' 190 | 191 | mycursor = self.mydb.cursor() 192 | try: 193 | sec_sql = "SELECT id, code, start_date, end_date FROM " + db_name + ".securities" 194 | mycursor.execute(sec_sql) 195 | # 获取所有记录列表 196 | results = mycursor.fetchall() 197 | for row in results: 198 | oneSecurities = dict() 199 | oneSecurities['id'] = int(row[0]) 200 | oneSecurities['code'] = row[1] 201 | oneSecurities['start_date'] = row[2] 202 | oneSecurities['end_date'] = row[3] 203 | self.gp_securities[row[1]] = oneSecurities 204 | # g_logger.debug("code=%s, securities=%s", row[1], str(oneSecurities)) 205 | g_logger.debug("securities length=%d", len(self.gp_securities)) 206 | except Exception as e: 207 | g_logger.warning(str(e)) 208 | g_logger.exception(e) 209 | 210 | #加载所有的交易日 211 | def LoadTradeDays(self): 212 | g_logger.info('LoadTradeDays begin!') 213 | mycursor = self.mydb.cursor() 214 | try: 215 | sec_sql = "SELECT day FROM gp.gp_trade_days ORDER BY day ASC" 216 | mycursor.execute(sec_sql) 217 | # 获取所有记录列表 218 | results = mycursor.fetchall() 219 | for row in results: 220 | self.gp_trade_days.append(row[0]) 221 | 222 | g_logger.debug("gp_trade_days length=%d", len(self.gp_trade_days)) 223 | except Exception as e: 224 | g_logger.warning(str(e)) 225 | g_logger.exception(e) 226 | 227 | #加载所有的gp的kline数据 228 | def LoadSecuritiesKlineData(self, code, period, start_ts): 229 | g_logger.info('LoadSecuritiesKlineData begin! code=%s, period=%s, start_ts=%d', code, period, start_ts) 230 | db_name = '' 231 | if self.securities_type=='stock': 232 | db_name = 'gp' 233 | elif self.securities_type=='index': 234 | db_name='idx' 235 | elif self.securities_type=='futures': 236 | db_name='futures' 237 | 238 | #先找出gp_id 239 | if code not in self.gp_securities: 240 | g_logger.warning("code:%s not in gp_securities", code) 241 | return None 242 | 243 | gp_id = self.gp_securities[code]["id"] 244 | 245 | codes = code.split(".") 246 | if len(codes) != 2: 247 | g_logger.warning("error code:%s", code) 248 | return None 249 | 250 | try: 251 | g_logger.debug("pd.read_sql begin") 252 | table_name = period + "_prices_" + codes[0][-2:] 253 | if self.securities_type=='futures': 254 | table_name = period + "_prices_" + codes[0][0:2] 255 | table_name = table_name.lower() 256 | sec_sql = "SELECT CAST(FROM_UNIXTIME(ts, '%%Y%%m%%d%%H%%i%%s') AS UNSIGNED) AS ModifyDateTime, open, high, low, close, volume, money, factor, IFNULL(divergence, 0) as Divergence FROM " + db_name + "." + table_name \ 257 | + " WHERE gp_id='" + str(gp_id) + "' AND ts>=" + str(start_ts) + " ORDER BY ts ASC" 258 | df = pd.read_sql(sql=sec_sql, con=self.engine) 259 | df['OriginalIndex'] = df.index 260 | df.rename(columns={'open':'Open', 'high':'High', 'low':'Low', 'close':'Close', 'volume':'Volume'}, inplace=True) 261 | g_logger.debug("pd.read_sql end") 262 | 263 | df = df.reindex(columns=['Open','High','Low','Close', 'Volume', 'Divergence', 'ModifyDateTime', 'OriginalIndex']) 264 | g_logger.debug("kline length=%d", len(df)) 265 | return df 266 | except Exception as e: 267 | g_logger.warning(str(e)) 268 | g_logger.exception(e) 269 | return None 270 | 271 | #获取gp的kline count 272 | def GetSecuritiesKlineCount(self, code, period, start_ts): 273 | g_logger.info('GetSecuritiesKlineCount begin! code=%s, period=%s, start_ts=%d', code, period, start_ts) 274 | db_name = '' 275 | if self.securities_type=='stock': 276 | db_name = 'gp' 277 | elif self.securities_type=='index': 278 | db_name='idx' 279 | elif self.securities_type=='futures': 280 | db_name='futures' 281 | 282 | #先找出gp_id 283 | if code not in self.gp_securities: 284 | g_logger.warning("code:%s not in gp_securities", code) 285 | return -1 286 | 287 | gp_id = self.gp_securities[code]["id"] 288 | 289 | codes = code.split(".") 290 | if len(codes) != 2: 291 | g_logger.warning("error code:%s", code) 292 | return -1 293 | 294 | mycursor = self.mydb.cursor() 295 | try: 296 | table_name = period + "_prices_" + codes[0][-2:] 297 | if self.securities_type=='futures': 298 | table_name = period + "_prices_" + codes[0][0:2] 299 | table_name = table_name.lower() 300 | sec_sql = "SELECT count(1) FROM " + db_name + "." + table_name + " WHERE gp_id='" + str(gp_id) + "' AND ts>=" + str(start_ts) 301 | mycursor.execute(sec_sql) 302 | # 获取所有记录列表 303 | results = mycursor.fetchall() 304 | count =0 305 | for row in results: 306 | count = int(row[0]) 307 | g_logger.debug("GetSecuritiesKlineCount code=%s, count=%d", code, count) 308 | return count 309 | except Exception as e: 310 | g_logger.warning(str(e)) 311 | g_logger.exception(e) 312 | return -1 313 | 314 | 315 | if __name__ == "__main__": 316 | 317 | # dataDir = r"E:\otherdata\stock_data\1min\\" 318 | # 319 | # stock_code = sys.argv[1] 320 | # g_logger.info('stock_code:%s', stock_code) 321 | # 322 | # #读取数据 323 | # file_path = dataDir + "utf8\\" + stock_code[:2] + "#" + stock_code[2:] + ".txt" 324 | # returndata = readKlineData(file_path) 325 | # left_data_len = len(returndata.index) 326 | # g_logger.debug("left_data_len=%d", left_data_len) 327 | # 328 | # file_path = dataDir + "stroke\\" + stock_code + ".txt" 329 | # strokes = readStrokeData(file_path) 330 | # strokes_len = len(strokes.index) 331 | # g_logger.debug("strokes_len=%d", strokes_len) 332 | # 333 | # file_path = dataDir + "line_segment\\" + stock_code + ".txt" 334 | # lines = readLineSegmentData(file_path) 335 | # lines_len = len(lines.index) 336 | # g_logger.debug("lines_len=%d", lines_len) 337 | # 338 | # file_path = dataDir + "trend_central\\" + stock_code + ".txt" 339 | # trend_centrals = readTrendCentralData(file_path) 340 | # trend_centrals_len = len(trend_centrals.index) 341 | # g_logger.debug("trend_centrals_len=%d", trend_centrals_len) 342 | 343 | zen_ms_data = ZenMsData('index', 'config.ini') 344 | zen_ms_data.LoadAllSecurities() 345 | zen_ms_data.LoadSecuritiesKlineData('000001.XSHG') 346 | 347 | 348 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | 2 | # -*- coding:utf-8 -*- 3 | #! python3 4 | 5 | import time 6 | import sys 7 | import os 8 | 9 | 10 | def IntTimeToTs(starttime): 11 | tm_year = starttime // 10000000000 12 | tm_mon = (starttime % 10000000000) //100000000 13 | tm_mday = (starttime % 100000000) // 1000000 14 | tm_hour = (starttime % 1000000) // 10000 15 | tm_min = (starttime % 10000) // 100 16 | tm_sec = (starttime % 100) 17 | tm_isdst = 0 18 | t = (tm_year, tm_mon, tm_mday, tm_hour, tm_min, tm_sec, 0, 0, 0) 19 | ts = int(time.mktime(t)) 20 | return ts 21 | 22 | def TsToIntTime(ts): 23 | if (ts < 0): 24 | return -1 25 | 26 | tm = time.localtime(ts) 27 | #time.struct_time(tm_year=2016, tm_mon=4, tm_mday=7, tm_hour=10, tm_min=3, tm_sec=27, tm_wday=3, tm_yday=98, tm_isdst=0) 28 | 29 | iTime = tm.tm_year * 10000000000 30 | iTime += tm.tm_mon * 100000000 31 | iTime += tm.tm_mday * 1000000 32 | iTime += tm.tm_hour * 10000 33 | iTime += tm.tm_min * 100 34 | iTime += tm.tm_sec 35 | return iTime 36 | 37 | 38 | def FloatCmp(number1, number2): 39 | diff = number1-number2 40 | if (diff > 0.000001): 41 | return 1 42 | 43 | if (diff < -0.000001): 44 | return -1 45 | 46 | return 0 47 | 48 | if __name__ == "__main__": 49 | print(FloatCmp(0.001, 0.001)) 50 | 51 | -------------------------------------------------------------------------------- /zen_plot2.py: -------------------------------------------------------------------------------- 1 | 2 | # -*- coding:utf-8 -*- 3 | #! python3 4 | 5 | from pandas import DataFrame, Series 6 | import pandas as pd 7 | import numpy as np 8 | import matplotlib.pyplot as plt 9 | import matplotlib.patches as mpathes 10 | from matplotlib import dates as mdates 11 | from matplotlib import ticker as mticker 12 | import matplotlib.gridspec as gridspec #分割子图 13 | #from mpl_finance import candlestick_ohlc 14 | from mplfinance.original_flavor import candlestick_ohlc 15 | import mplfinance as mpf 16 | 17 | import talib 18 | import time 19 | import sys 20 | import os 21 | import mysql.connector 22 | 23 | from zp_logging import g_logger 24 | from read_data import * 25 | from utils import * 26 | 27 | 28 | def plot_stock(stock_code, level, idx, offset, df_stockload, strokes, first_kline_stroke_val, lines, first_kline_line_val, trend_centrals): 29 | g_logger.debug("start stock_code:%s, level:%s, idx:%d, offset:%d", stock_code, level, idx, offset) 30 | np.seterr(divide='ignore', invalid='ignore') # 忽略warning 31 | plt.rcParams['font.sans-serif']=['SimHei'] #用来正常显示中文标签 32 | plt.rcParams['axes.unicode_minus']=False #用来正常显示负号 33 | 34 | # file_path = r"E:\otherdata\stock_data\1min\stroke\SH" + stock_code + ".txt" 35 | # strokes = readStrokeData(file_path) 36 | 37 | kline_num = len(df_stockload.index) 38 | count = kline_num/300 39 | 40 | #创建fig对象 41 | fig = plt.figure(figsize=(20*count, 10.8), dpi=100, facecolor="white") 42 | 43 | #设置四个绘图区域 包括 K线(均线),成交量,MACD 44 | gs = gridspec.GridSpec(3, 1, left=0.01, bottom=0.1, right=0.99, top=0.96, wspace=None, hspace=0, height_ratios=[3.5,1,1]) 45 | graph_KAV = fig.add_subplot(gs[0,:]) 46 | graph_VOL = fig.add_subplot(gs[1,:]) 47 | graph_MACD = fig.add_subplot(gs[2,:]) 48 | 49 | # 添加网格 50 | graph_KAV.grid(linestyle='--') 51 | graph_KAV.legend(loc='best') 52 | graph_KAV.set_title(stock_code) 53 | graph_KAV.set_ylabel(u"价格") 54 | graph_KAV.set_xlim(0, len(df_stockload.index)) # 设置一下x轴的范围 55 | 56 | #绘制K线图 57 | g_logger.debug('draw klines') 58 | klines = df_stockload.copy(deep=True) 59 | klines['DateTime'] = pd.Series(range(kline_num),index = range(kline_num)) 60 | if 'Date' in klines.columns: 61 | klines.drop('Date', axis=1, inplace = True) 62 | if 'Time' in klines.columns: 63 | klines.drop('Time', axis=1, inplace = True) 64 | # columns=['Date','Time','Open','High','Low','Close', 'Volume', 'ModifyDateTime'] 65 | klines = klines.reindex(columns=['DateTime','Open','High','Low','Close','Volume','ModifyDateTime']) 66 | candlestick_ohlc(graph_KAV, klines.values, width=0.5, colorup='r', colordown='g') # 绘制K线走势 67 | 68 | # 绘制笔 69 | g_logger.debug('draw strokes') 70 | stroke_len = len(strokes.index) 71 | stroke_idx_arr = [] 72 | stroke_val_arr = [] 73 | this_stroke_idx = 0 74 | this_stroke_direction = 0 75 | last_j = 0 76 | for i in range(kline_num): 77 | kline_time = df_stockload.loc[i,'ModifyDateTime'] 78 | kline_ori_idx = int(df_stockload.loc[i,'OriginalIndex']) 79 | # g_logger.debug('stroke kline i:%d' , i) 80 | 81 | for j in range(this_stroke_idx, stroke_len): 82 | # g_logger.debug('stroke j:%d' , j) 83 | 84 | high = strokes.loc[j,'High'] 85 | low = strokes.loc[j,'Low'] 86 | direction = strokes.loc[j,'Direction'] 87 | start_time = strokes.loc[j,'StartTime'] 88 | end_time = strokes.loc[j,'EndTime'] 89 | start_kline_idx = strokes.loc[j,'StartKlineIndex'] 90 | end_kline_idx = strokes.loc[j,'EndKlineIndex'] 91 | 92 | if i==0 and first_kline_stroke_val>0.001: 93 | stroke_idx_arr.append(i) 94 | stroke_val_arr.append(first_kline_stroke_val) 95 | break 96 | elif i==(kline_num-1): 97 | this_stroke_start_kline_idx = strokes.loc[this_stroke_idx, 'StartKlineIndex'] 98 | this_stroke_end_kline_idx = strokes.loc[this_stroke_idx, 'EndKlineIndex'] 99 | if kline_time>=start_time and kline_timekline_time: 128 | this_stroke_idx = j-1 129 | if this_stroke_idx<0: 130 | this_stroke_idx = 0 131 | break 132 | 133 | graph_KAV.plot(stroke_idx_arr, stroke_val_arr, 'black', label='stroke', lw=0.6) 134 | 135 | # 绘制线段 136 | g_logger.debug('draw lines') 137 | line_len = len(lines.index) 138 | line_idx_arr = [] 139 | line_val_arr = [] 140 | this_line_idx = 0 141 | this_line_direction = 0 142 | for i in range(kline_num): 143 | kline_time = df_stockload.loc[i,'ModifyDateTime'] 144 | kline_ori_idx = int(df_stockload.loc[i,'OriginalIndex']) 145 | 146 | for j in range(this_line_idx, line_len): 147 | high = lines.loc[j,'High'] 148 | low = lines.loc[j,'Low'] 149 | direction = lines.loc[j,'Direction'] 150 | start_time = lines.loc[j,'StartTime'] 151 | end_time = lines.loc[j,'EndTime'] 152 | start_stroke_idx = lines.loc[j,'StartStrokeIndex'] 153 | end_stroke_idx = lines.loc[j,'EndStrokeIndex'] 154 | start_kline_idx = strokes.loc[start_stroke_idx,'StartKlineIndex'] 155 | end_kline_idx = strokes.loc[end_stroke_idx,'EndKlineIndex'] 156 | 157 | if i==0 and first_kline_line_val>0.001: 158 | line_idx_arr.append(i) 159 | line_val_arr.append(first_kline_line_val) 160 | break 161 | elif i==(kline_num-1): 162 | start_stroke_idx = lines.loc[this_line_idx,'StartStrokeIndex'] 163 | end_stroke_idx = lines.loc[this_line_idx,'EndStrokeIndex'] 164 | this_line_start_kline_idx = strokes.loc[start_stroke_idx, 'StartKlineIndex'] 165 | this_line_end_kline_idx = strokes.loc[end_stroke_idx, 'EndKlineIndex'] 166 | if kline_time>=start_time and kline_timekline_time: 197 | this_line_idx = j-1 198 | if this_line_idx<0: 199 | this_line_idx = 0 200 | break 201 | 202 | graph_KAV.plot(line_idx_arr, line_val_arr, 'blue', label='line', lw=1) 203 | 204 | 205 | #绘制走势中枢 206 | g_logger.debug('draw trend centrals') 207 | trend_central_len = len(trend_centrals.index) 208 | this_trend_central_idx = 0 209 | x = 0 210 | y = 0 211 | width = 0 212 | height = 0 213 | haveRect = False 214 | edge_color = 'black' 215 | for i in range(kline_num): 216 | kline_ori_idx = int(df_stockload.loc[i,'OriginalIndex']) 217 | 218 | for j in range(this_trend_central_idx, trend_central_len): 219 | trend_type = trend_centrals.loc[j,'TrendType'] 220 | high = trend_centrals.loc[j,'High'] 221 | low = trend_centrals.loc[j,'Low'] 222 | # start_line_idx = trend_centrals.loc[j,'StartLineIndex'] 223 | # end_line_idx = trend_centrals.loc[j,'EndLineIndex'] 224 | # start_stroke_idx = lines.loc[start_line_idx,'StartStrokeIndex'] 225 | # end_stroke_idx = lines.loc[end_line_idx,'EndStrokeIndex'] 226 | start_stroke_idx = trend_centrals.loc[j, 'StartStrokeIndex'] 227 | end_stroke_idx = trend_centrals.loc[j, 'EndStrokeIndex'] 228 | start_kline_idx = strokes.loc[start_stroke_idx,'StartKlineIndex'] 229 | end_kline_idx = strokes.loc[end_stroke_idx,'EndKlineIndex'] 230 | 231 | if i==0: 232 | if kline_ori_idx>=end_kline_idx: 233 | continue 234 | elif kline_ori_idx>=start_kline_idx and kline_ori_idx=360: 269 | graph_KAV.text(text_x-width/2, text_y, text_str, ha='center', family='fantasy', fontsize=14, style='normal', color=edge_color) 270 | graph_KAV.text(text_x+width/2, text_y, text_str, ha='center', family='fantasy', fontsize=14, style='normal', color=edge_color) 271 | 272 | rect = mpathes.Rectangle(xy , width, height, color=None, edgecolor=edge_color, fill=False, label='trend_central', lw=3) 273 | graph_KAV.add_patch(rect) 274 | this_trend_central_idx = j+1 275 | haveRect = False 276 | break 277 | 278 | # 标注顶底背驰点 279 | for i in range(kline_num): 280 | divergence = int(df_stockload.loc[i, 'Divergence']) 281 | high = df_stockload.loc[i, 'High'] 282 | low = df_stockload.loc[i, 'Low'] 283 | 284 | if divergence==1: 285 | text_x = i 286 | text_y = high*1.01 287 | graph_KAV.text(text_x, text_y, 'TDiv', ha='center', family='fantasy', fontsize=14, style='normal', color='Crimson') 288 | elif divergence==2: 289 | text_x = i 290 | text_y = low*0.99 291 | graph_KAV.text(text_x, text_y, 'BDiv', ha='center', family='fantasy', fontsize=14, style='normal', color='LimeGreen') 292 | 293 | # 标注开平仓点 0:无操作 1:open-buy 2: open-sell 3:cover-buy 4: cover-sell 294 | for i in range(kline_num): 295 | trade_type = int(df_stockload.loc[i, 'TradeType']) 296 | high = df_stockload.loc[i, 'High'] 297 | low = df_stockload.loc[i, 'Low'] 298 | 299 | if trade_type==1: 300 | text_x = i 301 | text_y = low*0.99 302 | graph_KAV.text(text_x, text_y, 'O-B', ha='center', family='fantasy', fontsize=14, style='normal', color='Crimson') 303 | elif trade_type==2: 304 | text_x = i 305 | text_y = high*1.01 306 | graph_KAV.text(text_x, text_y, 'O-S', ha='center', family='fantasy', fontsize=14, style='normal', color='LimeGreen') 307 | elif trade_type==3: 308 | text_x = i 309 | text_y = low*0.99 310 | graph_KAV.text(text_x, text_y, 'C-B', ha='center', family='fantasy', fontsize=14, style='normal', color='Crimson') 311 | elif trade_type==4: 312 | text_x = i 313 | text_y = high*1.01 314 | graph_KAV.text(text_x, text_y, 'C-S', ha='center', family='fantasy', fontsize=14, style='normal', color='LimeGreen') 315 | 316 | #绘制移动平均线图 317 | # print('draw mv') 318 | # df_stockload['Ma5'] = df_stockload.Close.rolling(window=5).mean()#pd.rolling_mean(df_stockload.close,window=20) 319 | # df_stockload['Ma10'] = df_stockload.Close.rolling(window=10).mean()#pd.rolling_mean(df_stockload.close,window=30) 320 | # df_stockload['Ma20'] = df_stockload.Close.rolling(window=20).mean()#pd.rolling_mean(df_stockload.close,window=60) 321 | # df_stockload['Ma30'] = df_stockload.Close.rolling(window=30).mean()#pd.rolling_mean(df_stockload.close,window=60) 322 | # df_stockload['Ma60'] = df_stockload.Close.rolling(window=60).mean()#pd.rolling_mean(df_stockload.close,window=60) 323 | # 324 | # graph_KAV.plot(np.arange(0, len(df_stockload.index)), df_stockload['Ma5'],'black', label='M5',lw=1.0) 325 | # graph_KAV.plot(np.arange(0, len(df_stockload.index)), df_stockload['Ma10'],'green',label='M10', lw=1.0) 326 | # graph_KAV.plot(np.arange(0, len(df_stockload.index)), df_stockload['Ma20'],'blue',label='M20', lw=1.0) 327 | # graph_KAV.plot(np.arange(0, len(df_stockload.index)), df_stockload['Ma30'],'pink', label='M30',lw=1.0) 328 | # graph_KAV.plot(np.arange(0, len(df_stockload.index)), df_stockload['Ma60'],'yellow',label='M60', lw=1.0) 329 | 330 | 331 | #绘制成交量图 332 | g_logger.debug('draw vol') 333 | graph_VOL.bar(np.arange(0, len(df_stockload.index)), df_stockload.Volume,color=['g' if df_stockload.Open[x] > df_stockload.Close[x] else 'r' for x in range(0,len(df_stockload.index))]) 334 | graph_VOL.set_ylabel(u"成交量") 335 | graph_VOL.set_xlim(0,len(df_stockload.index)) #设置一下x轴的范围 336 | graph_VOL.set_xticks(range(0,len(df_stockload.index),15))#X轴刻度设定 每15天标一个日期 337 | 338 | #绘制MACD 339 | g_logger.debug('draw macd') 340 | macd_dif, macd_dea, macd_bar = talib.MACD(df_stockload['Close'].values, fastperiod=12, slowperiod=26, signalperiod=9) 341 | graph_MACD.plot(np.arange(0, len(df_stockload.index)), macd_dif, 'red', label='macd dif') # dif 342 | graph_MACD.plot(np.arange(0, len(df_stockload.index)), macd_dea, 'blue', label='macd dea') # dea 343 | 344 | bar_red = np.where(macd_bar > 0, 2 * macd_bar, 0)# 绘制BAR>0 柱状图 345 | bar_green = np.where(macd_bar < 0, 2 * macd_bar, 0)# 绘制BAR<0 柱状图 346 | graph_MACD.bar(np.arange(0, len(df_stockload.index)), bar_red, facecolor='red') 347 | graph_MACD.bar(np.arange(0, len(df_stockload.index)), bar_green, facecolor='green') 348 | 349 | graph_MACD.legend(loc='best',shadow=True, fontsize ='10') 350 | graph_MACD.set_ylabel(u"MACD") 351 | graph_MACD.set_xlabel("日期") 352 | graph_MACD.set_xlim(0,len(df_stockload.index)) #设置一下x轴的范围 353 | graph_MACD.set_xticks(range(0,len(df_stockload.index), 15))#X轴刻度设定 每15天标一个日期 354 | 355 | #绘制x轴标签 356 | #先生成DateTime数据 357 | date_times = [] 358 | col_len = len(df_stockload.columns) 359 | df_stockload.insert(col_len, 'DateTime', '' ) 360 | for index in df_stockload.index.values: 361 | # sdate = df_stockload.Date.values[index] 362 | # stime = df_stockload.Time.values[index] 363 | # sdate = sdate.replace("2020", "") 364 | # sdate = sdate.replace("2021", "") 365 | # sdate = sdate.replace("/", "") 366 | # date_time = sdate+stime 367 | date_time = str(df_stockload.ModifyDateTime.values[index]) 368 | # date_time = date_time.replace("2020", "") 369 | # date_time = date_time.replace("2021", "") 370 | date_time = date_time[:-2] 371 | # date_times.append(date_time) 372 | df_stockload.loc[index, 'DateTime'] = date_time 373 | # df_stockload['DateTime'] = pd.Series(date_times) 374 | macd_xticklabels = [df_stockload.DateTime.values[index] for index in graph_MACD.get_xticks()] 375 | # graph_MACD.set_xticklabels(pd.to_datetime(macd_xticklabels).strftime('%Y-%m-%d')) # 标签设置为日期 376 | graph_MACD.set_xticklabels(macd_xticklabels) # 标签设置为日期 377 | 378 | # X-轴每个ticker标签都向右倾斜45度 379 | for label in graph_KAV.xaxis.get_ticklabels(): 380 | label.set_visible(False) 381 | 382 | for label in graph_VOL.xaxis.get_ticklabels(): 383 | label.set_visible(False) 384 | 385 | for label in graph_MACD.xaxis.get_ticklabels(): 386 | label.set_rotation(45) 387 | label.set_fontsize(10) # 设置标签字体 388 | 389 | # plt.show() 390 | dir_path = "E:\\othercode\\quant\\plot\\" + level + "\\" 391 | if os.path.exists(dir_path)==False: 392 | os.makedirs( dir_path ) 393 | 394 | str_start_time = str(df_stockload.loc[0, 'ModifyDateTime']) 395 | str_end_time = str(df_stockload.loc[kline_num-1, 'ModifyDateTime']) 396 | plot_name = dir_path + stock_code + "_" + str(idx+offset+1) + "_" + str_start_time[2:8] + "_" + str_end_time[2:8] + ".png" 397 | g_logger.debug("savefig plot_name:%s", plot_name) 398 | plt.savefig(plot_name) 399 | g_logger.debug("end stock_code:" + stock_code) 400 | i=1 401 | # 直线方程的公式有以下几种: 402 | # 两点式:(x-x1)/(x2-x1)=(y-y1)/(y2-y1) 403 | 404 | def plot_highlevel_trend_centrals(stock_code, idx, offset, df_stockload, strokes, lines, trend_centrals): 405 | g_logger.debug("plot_highlevel_trend_centrals start stock_code:%s, idx:%d, offset:%d", stock_code, idx, offset) 406 | np.seterr(divide='ignore', invalid='ignore') # 忽略warning 407 | plt.rcParams['font.sans-serif']=['SimHei'] #用来正常显示中文标签 408 | plt.rcParams['axes.unicode_minus']=False #用来正常显示负号 409 | 410 | kline_num = len(df_stockload.index) 411 | count = kline_num/300 412 | 413 | #创建fig对象 414 | fig = plt.figure(figsize=(20*count, 10.8), dpi=100, facecolor="white") 415 | 416 | #设置四个绘图区域 包括 K线(均线),成交量,MACD 417 | gs = gridspec.GridSpec(3, 1, left=0.01, bottom=0.1, right=0.99, top=0.96, wspace=None, hspace=0, height_ratios=[3.5,1,1]) 418 | graph_KAV = fig.add_subplot(gs[0,:]) 419 | graph_VOL = fig.add_subplot(gs[1,:]) 420 | graph_MACD = fig.add_subplot(gs[2,:]) 421 | 422 | # 添加网格 423 | graph_KAV.grid(linestyle='--') 424 | graph_KAV.legend(loc='best') 425 | graph_KAV.set_title(stock_code) 426 | graph_KAV.set_ylabel(u"价格") 427 | graph_KAV.set_xlim(0, len(df_stockload.index)) # 设置一下x轴的范围 428 | 429 | #绘制K线图 430 | g_logger.debug('draw klines') 431 | klines = df_stockload.copy(deep=True) 432 | klines['DateTime'] = pd.Series(range(kline_num),index = range(kline_num)) 433 | if 'Date' in klines.columns: 434 | klines.drop('Date', axis=1, inplace = True) 435 | if 'Time' in klines.columns: 436 | klines.drop('Time', axis=1, inplace = True) 437 | # columns=['Date','Time','Open','High','Low','Close', 'Volume', 'ModifyDateTime'] 438 | klines = klines.reindex(columns=['DateTime','Open','High','Low','Close','Volume','ModifyDateTime']) 439 | candlestick_ohlc(graph_KAV, klines.values, width=0.5, colorup='r', colordown='g') # 绘制K线走势 440 | 441 | #绘制走势中枢 442 | g_logger.debug('draw trend centrals') 443 | trend_central_len = len(trend_centrals.index) 444 | this_trend_central_idx = 0 445 | x = 0 446 | y = 0 447 | width = 0 448 | height = 0 449 | haveRect = False 450 | edge_color = 'black' 451 | for i in range(kline_num): 452 | kline_ori_idx = int(df_stockload.loc[i,'OriginalIndex']) 453 | 454 | for j in range(this_trend_central_idx, trend_central_len): 455 | level = trend_centrals.loc[j, 'Level'] 456 | #只画5分钟级别以上的中枢 457 | if level<=1: 458 | continue 459 | 460 | trend_type = trend_centrals.loc[j,'TrendType'] 461 | high = trend_centrals.loc[j,'High'] 462 | low = trend_centrals.loc[j,'Low'] 463 | start_line_idx = trend_centrals.loc[j,'StartLineIndex'] + 1 464 | end_line_idx = trend_centrals.loc[j,'EndLineIndex'] -1 465 | start_stroke_idx = lines.loc[start_line_idx,'StartStrokeIndex'] 466 | end_stroke_idx = lines.loc[end_line_idx,'EndStrokeIndex'] 467 | start_kline_idx = strokes.loc[start_stroke_idx,'StartKlineIndex'] 468 | end_kline_idx = strokes.loc[end_stroke_idx,'EndKlineIndex'] 469 | 470 | if i==0: 471 | if kline_ori_idx>=end_kline_idx: 472 | continue 473 | elif kline_ori_idx>=start_kline_idx and kline_ori_idx=360: 508 | graph_KAV.text(text_x-width/2, text_y, text_str, ha='center', family='fantasy', fontsize=14, style='normal', color=edge_color) 509 | graph_KAV.text(text_x+width/2, text_y, text_str, ha='center', family='fantasy', fontsize=14, style='normal', color=edge_color) 510 | 511 | rect = mpathes.Rectangle(xy , width, height, color=None, edgecolor=edge_color, fill=False, label='trend_central', lw=3) 512 | graph_KAV.add_patch(rect) 513 | this_trend_central_idx = j+1 514 | haveRect = False 515 | break 516 | 517 | #绘制成交量图 518 | g_logger.debug('draw vol') 519 | graph_VOL.bar(np.arange(0, len(df_stockload.index)), df_stockload.Volume,color=['g' if df_stockload.Open[x] > df_stockload.Close[x] else 'r' for x in range(0,len(df_stockload.index))]) 520 | graph_VOL.set_ylabel(u"成交量") 521 | graph_VOL.set_xlim(0,len(df_stockload.index)) #设置一下x轴的范围 522 | graph_VOL.set_xticks(range(0,len(df_stockload.index),15))#X轴刻度设定 每15天标一个日期 523 | 524 | #绘制MACD 525 | g_logger.debug('draw macd') 526 | macd_dif, macd_dea, macd_bar = talib.MACD(df_stockload['Close'].values, fastperiod=12, slowperiod=26, signalperiod=9) 527 | graph_MACD.plot(np.arange(0, len(df_stockload.index)), macd_dif, 'red', label='macd dif') # dif 528 | graph_MACD.plot(np.arange(0, len(df_stockload.index)), macd_dea, 'blue', label='macd dea') # dea 529 | 530 | bar_red = np.where(macd_bar > 0, 2 * macd_bar, 0)# 绘制BAR>0 柱状图 531 | bar_green = np.where(macd_bar < 0, 2 * macd_bar, 0)# 绘制BAR<0 柱状图 532 | graph_MACD.bar(np.arange(0, len(df_stockload.index)), bar_red, facecolor='red') 533 | graph_MACD.bar(np.arange(0, len(df_stockload.index)), bar_green, facecolor='green') 534 | 535 | graph_MACD.legend(loc='best',shadow=True, fontsize ='10') 536 | graph_MACD.set_ylabel(u"MACD") 537 | graph_MACD.set_xlabel("日期") 538 | graph_MACD.set_xlim(0,len(df_stockload.index)) #设置一下x轴的范围 539 | graph_MACD.set_xticks(range(0,len(df_stockload.index), 15))#X轴刻度设定 每15天标一个日期 540 | 541 | #绘制x轴标签 542 | #先生成DateTime数据 543 | date_times = [] 544 | col_len = len(df_stockload.columns) 545 | df_stockload.insert(col_len, 'DateTime', '' ) 546 | for index in df_stockload.index.values: 547 | date_time = str(df_stockload.ModifyDateTime.values[index]) 548 | date_time = date_time[:-2] 549 | df_stockload.loc[index, 'DateTime'] = date_time 550 | # df_stockload['DateTime'] = pd.Series(date_times) 551 | macd_xticklabels = [df_stockload.DateTime.values[index] for index in graph_MACD.get_xticks()] 552 | graph_MACD.set_xticklabels(macd_xticklabels) # 标签设置为日期 553 | 554 | # X-轴每个ticker标签都向右倾斜45度 555 | for label in graph_KAV.xaxis.get_ticklabels(): 556 | label.set_visible(False) 557 | 558 | for label in graph_VOL.xaxis.get_ticklabels(): 559 | label.set_visible(False) 560 | 561 | for label in graph_MACD.xaxis.get_ticklabels(): 562 | label.set_rotation(45) 563 | label.set_fontsize(10) # 设置标签字体 564 | 565 | # plt.show() 566 | str_start_time = str(df_stockload.loc[0, 'ModifyDateTime']) 567 | str_end_time = str(df_stockload.loc[kline_num-1, 'ModifyDateTime']) 568 | plot_name = "E:\\othercode\\quant\\plot_5min\\" + stock_code + "_" + str(idx+offset+1) + "_" + str_start_time[2:8] + "_" + str_end_time[2:8] + ".png" 569 | g_logger.debug("savefig plot_name:%s", plot_name) 570 | plt.savefig(plot_name) 571 | g_logger.debug("end stock_code:" + stock_code) 572 | i=1 573 | 574 | 575 | if __name__ == "__main__": 576 | 577 | dataDir = r"E:\otherdata\stock_data\\" 578 | orderDir = r"E:\otherdata\stock_data\order\\" 579 | # dataDir = "/data/stock_data/" 580 | # orderDir = "/data/stock_data/order/" 581 | # dataDir5Min = r"E:\otherdata\stock_data\5min\\" 582 | 583 | stock_code = sys.argv[1] # '000001.XSHG' 584 | level = sys.argv[2] # '5min' 585 | offset = int(sys.argv[3]) # 0 586 | count = int(sys.argv[4]) # 10 587 | g_logger.info('stock_code:%s, level:%s, offset:%d, count:%d', stock_code, level, offset, count) 588 | dataDir += level 589 | dataDir += "/" 590 | 591 | #读取数据 592 | zen_ms_data = ZenMsData('futures', 'config.ini') 593 | zen_ms_data.LoadAllSecurities() 594 | klines = zen_ms_data.LoadSecuritiesKlineData(stock_code, level, 1577808000) 595 | if klines is None: 596 | g_logger.warning("read code:%s, level:%s, klines is None!", stock_code, level) 597 | sys.exit(-1) 598 | 599 | left_klines_len = len(klines.index) 600 | g_logger.debug("left_klines_len=%d", left_klines_len) 601 | 602 | start_time = klines.loc[0, 'ModifyDateTime'] 603 | end_time = klines.loc[left_klines_len-1, 'ModifyDateTime'] 604 | 605 | file_path = dataDir + "stroke/" + stock_code + ".txt" 606 | strokes = readStrokeData(file_path, start_time, end_time) 607 | 608 | file_path = dataDir + "line_segment/" + stock_code + ".txt" 609 | lines = readLineSegmentData(file_path) 610 | 611 | file_path = dataDir + "trend_central/" + stock_code + ".txt" 612 | trend_centrals = readTrendCentralData(file_path) 613 | 614 | file_path = orderDir + stock_code + ".txt" 615 | orders = readOrderData(file_path) 616 | 617 | # stroke_len = len(strokes.index) 618 | # for i in range(stroke_len): 619 | # stroke_end_kline_idx = strokes.loc[i,'EndKlineIndex'] 620 | # if i==24245 or stroke_end_kline_idx==0: 621 | # print(i) 622 | 623 | #标注o-c位置 624 | kline_num = len(klines.index) 625 | order_num = len(orders.index) 626 | this_kline_idx = 0 627 | klines['TradeType'] = pd.Series(np.zeros((kline_num,), dtype=int),index = range(kline_num)) 628 | for i in range(order_num): 629 | open_kline_idx = orders.loc[i,'OpenKlineIdx'] 630 | cover_kline_idx = orders.loc[i,'CoverKlineIdx'] 631 | direction = orders.loc[i,'Direction'] 632 | 633 | # g_logger.debug('stroke kline i:%d' , i) 634 | 635 | for j in range(this_kline_idx, kline_num): 636 | if j%10000==0: 637 | g_logger.debug('kline j:%d' , j) 638 | kline_ori_idx = int(klines.loc[j,'OriginalIndex']) 639 | if kline_ori_idx!=open_kline_idx and kline_ori_idx!=cover_kline_idx: 640 | continue # 0:无操作 1:open-buy 2: open-sell 3:cover-buy 4: cover-sell 641 | elif kline_ori_idx==open_kline_idx: 642 | if direction==1: 643 | klines.loc[j,'TradeType'] = 1 644 | elif direction==2: 645 | klines.loc[j,'TradeType'] = 2 646 | else: 647 | g_logger.debug('order error direction:%d' , direction) 648 | klines.loc[j,'TradeType'] = 0 649 | elif kline_ori_idx==cover_kline_idx: 650 | if direction==1: 651 | klines.loc[j,'TradeType'] = 4 652 | elif direction==2: 653 | klines.loc[j,'TradeType'] = 3 654 | else: 655 | g_logger.debug('order error direction:%d' , direction) 656 | this_kline_idx = j+1 657 | break 658 | 659 | start_time = TsToIntTime(0) 660 | idx=0 661 | while(idx=start_time) and idx>0: 664 | klines = klines.iloc[idx:, :] 665 | break 666 | elif (modify_time>=start_time) and idx==0: 667 | break 668 | idx+=1 669 | g_logger.debug('start idx:%d' , idx) 670 | 671 | #level级别画图 672 | last_kline_idx = 0 673 | idx = 0 674 | while(idx=ONE_PLOT_KLINE_NUM: 676 | df_stockload = klines.iloc[(idx+offset)*ONE_PLOT_KLINE_NUM:(idx+offset+1)*ONE_PLOT_KLINE_NUM, :] 677 | else: 678 | df_stockload = klines.iloc[(idx+offset)*ONE_PLOT_KLINE_NUM:(idx+offset)*ONE_PLOT_KLINE_NUM+left_klines_len, :] 679 | 680 | df_stockload.reset_index(drop=True, inplace=True) 681 | 682 | # 第一根K线的时间 683 | first_kline_time = df_stockload.loc[0, 'ModifyDateTime'] 684 | first_kline_idx = int(df_stockload.loc[0, 'OriginalIndex']) 685 | first_kline_stroke_val = 0.0 686 | first_kline_line_val = 0.0 687 | stroke_start_kline_idx = 0 688 | stroke_end_kline_idx = 0 689 | high = 0.0 690 | low = 0.0 691 | direction = 0 692 | 693 | # 查找第一个kline对应的笔值 694 | stroke_len = len(strokes.index) 695 | for i in range(stroke_len): 696 | high = strokes.loc[i,'High'] 697 | low = strokes.loc[i,'Low'] 698 | direction = strokes.loc[i,'Direction'] 699 | stroke_start_kline_idx = strokes.loc[i,'StartKlineIndex'] 700 | stroke_end_kline_idx = strokes.loc[i,'EndKlineIndex'] 701 | kline_count = strokes.loc[i,'KlineCount'] 702 | 703 | if stroke_start_kline_idx!=0 and stroke_end_kline_idx!=0 and first_kline_idx>=stroke_start_kline_idx and first_kline_idxfirst_kline_idx: 712 | break 713 | 714 | g_logger.debug("first_kline_stroke_val=%.2f", first_kline_stroke_val) 715 | 716 | # 查找第一个kline对应的线段值 717 | line_len = len(lines.index) 718 | for i in range(line_len): 719 | high = lines.loc[i,'High'] 720 | low = lines.loc[i,'Low'] 721 | direction = lines.loc[i,'Direction'] 722 | line_start_stroke_idx = lines.loc[i,'StartStrokeIndex'] 723 | line_end_stroke_idx = lines.loc[i,'EndStrokeIndex'] 724 | line_start_kline_idx = strokes.loc[line_start_stroke_idx,'StartKlineIndex'] 725 | line_end_kline_idx = strokes.loc[line_end_stroke_idx,'EndKlineIndex'] 726 | kline_count = lines.loc[i,'KlineCount'] 727 | 728 | if line_start_kline_idx!=0 and line_end_kline_idx!=0 and first_kline_idx>=line_start_kline_idx and first_kline_idx