├── .gitignore ├── README.md ├── easyhistory ├── __init__.py ├── api.py ├── day.py ├── helpers.py ├── history.py └── store.py ├── requirements.txt ├── setup.py ├── test.py └── test_history.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | 55 | # Sphinx documentation 56 | docs/_build/ 57 | 58 | # PyBuilder 59 | target/ 60 | 61 | #Ipython Notebook 62 | .ipynb_checkpoints 63 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # easyhistory 2 | 用于获取维护股票的历史数据 3 | 4 | ### 引入 5 | 6 | ```python 7 | import easyhistory 8 | ``` 9 | 10 | ### 读取 rqalpha 保存的历史数据 11 | 12 | 读取平安银行 13 | 14 | ```python 15 | easyhistory.history('000001') 16 | 17 | return pandas dataframe: 18 | open close high low volume total_turnover \ 19 | datetime 20 | 20050104000000 1.6668 1.6491 1.6668 1.6338 6961738 11465602 21 | 20050105000000 1.6491 1.6338 1.6567 1.6061 12739274 20718558 22 | 20050106000000 1.6440 1.6491 1.6668 1.6314 10542101 17333840 23 | 20050107000000 1.6643 1.6466 1.6693 1.6338 7457207 12302853 24 | 20050110000000 1.6466 1.6668 1.6668 1.6112 10406261 17111498 25 | 26 | limit_up limit_down 27 | datetime 28 | 20050104000000 1.8337 1.4999 29 | 20050105000000 1.8135 1.4846 30 | 20050106000000 1.7983 1.4695 31 | 20050107000000 1.8135 1.4846 32 | 20050110000000 1.8110 1.4822 33 | ``` 34 | 35 | 读取上证指数 36 | 37 | ```python 38 | easyhistory.history('000001', market='sh') 39 | ``` 40 | 41 | ### 初始化日线历史数据 42 | 43 | ```python 44 | easyhistory.init('D', export='csv', path='history') 45 | ``` 46 | 47 | 注1: 下载后的原始数据在 `path/day/raw_data` 下, 复权后数据在 `path/day/data` 下 48 | 49 | 注2: 下载所有股票的历史数据需要很长时间,推荐直接从[百度盘](http://pan.baidu.com/s/1o7rwH0e)(数据到 20160318 )下载, 50 | 51 | ### 更新 52 | 53 | ```python 54 | easyhistory.update('D', export='csv', path='history') 55 | ``` 56 | 57 | ### 指标系统 58 | 59 | 目前还在测试中,指标计算使用了 `talib` 和 `pandas`, 可以直接调用 `talib` 计算一百多种指标,包括 `MACD, EMA, MA` 等 60 | 61 | * tablib 安装: https://github.com/mrjbq7/ta-lib 62 | * pandas: pip install pandas 63 | 64 | #### 使用 65 | 66 | ```python 67 | his = easyhistory.History(dtype='D', path='行情目录') 68 | 69 | # MA 计算, 直接调用的 talib 的对应函数 70 | res = his['000001'].MA(5) 71 | 72 | 73 | # 返回的是 pandas 的 dataframe 格式 74 | 75 | open high close low volume amount factor MA5 76 | date 77 | 2016-03-10 10.24 10.35 10.15 10.13 506112.94 5193459.68 93.659 10.268 78 | 2016-03-11 10.10 10.22 10.16 10.04 409716.87 4160186.89 93.659 10.220 79 | 80 | ``` 81 | 82 | 注: [talib 可用指标以及相关参数](https://github.com/mrjbq7/ta-lib) 以及 [pandas 相关](https://github.com/pydata/pandas) 83 | 84 | 85 | ### Q&A 86 | 87 | Q:安装 `talib` 提示找不到 `vcvarsall.bat` ? 88 | A: 去 `http://www.lfd.uci.edu/~gohlke/pythonlibs` 下载 `wheels`版本的包使用 `pip install xxx.whl` 安装 89 | -------------------------------------------------------------------------------- /easyhistory/__init__.py: -------------------------------------------------------------------------------- 1 | # coding:utf-8 2 | from .api import * 3 | 4 | __version__ = '0.0.1' 5 | -------------------------------------------------------------------------------- /easyhistory/api.py: -------------------------------------------------------------------------------- 1 | # coding:utf-8 2 | from rqalpha.data.base_data_source import BaseDataSource 3 | import pandas as pd 4 | import easyutils 5 | import datetime 6 | import os 7 | 8 | from .day import Day 9 | 10 | 11 | def init(dtype='D', export='csv', path='history'): 12 | return Day(path=path, export=export).init() 13 | 14 | 15 | def update_single_code(dtype='D', stock_code=None, path='history', export='csv'): 16 | if stock_code is None: 17 | raise Exception('stock code is None') 18 | return Day(path=path, export=export).update_single_code(stock_code) 19 | 20 | 21 | def update(dtype='D', export='csv', path='history'): 22 | return Day(path=path, export=export).update() 23 | 24 | 25 | def history(stock_code, market=None, bundle_path='~/.rqalpha/bundle'): 26 | d = BaseDataSource(os.path.expanduser(bundle_path)) 27 | 28 | instruments = d._instruments.get_all_instruments() 29 | 30 | stock_map = {i.order_book_id: i for i in instruments} 31 | if not market: 32 | market = easyutils.get_stock_type(stock_code) 33 | if market == 'sh': 34 | stock_code += '.XSHG' 35 | else: 36 | stock_code += '.XSHE' 37 | raw = d._all_day_bars_of(stock_map[stock_code]) 38 | df = pd.DataFrame.from_dict(raw) 39 | df.set_index('datetime', inplace=True) 40 | return df 41 | 42 | # 43 | -------------------------------------------------------------------------------- /easyhistory/day.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | import math 3 | import re 4 | import time 5 | from datetime import datetime 6 | from datetime import timedelta 7 | from multiprocessing.pool import ThreadPool 8 | 9 | import requests 10 | from pyquery import PyQuery 11 | 12 | from . import helpers 13 | from . import store 14 | 15 | 16 | class Day: 17 | SINA_API = 'http://vip.stock.finance.sina.com.cn/corp/go.php/vMS_FuQuanMarketHistory/stockid/{stock_code}.phtml' 18 | SINA_API_HOSTNAME = 'vip.stock.finance.sina.com.cn' 19 | STOCK_CODE_API = 'http://218.244.146.57/static/all.csv' 20 | 21 | def __init__(self, path='history', export='csv'): 22 | self.store = store.use(export=export, path=path, dtype='D') 23 | 24 | def init(self): 25 | stock_codes = self.store.init_stock_codes 26 | pool = ThreadPool(10) 27 | pool.map(self.init_stock_history, stock_codes) 28 | 29 | def update(self): 30 | """ 更新已经下载的历史数据 """ 31 | stock_codes = self.store.update_stock_codes 32 | pool = ThreadPool(2) 33 | pool.map(self.update_single_code, stock_codes) 34 | 35 | def update_single_code(self, stock_code): 36 | """ 更新对应的股票文件历史行情 37 | :param stock_code: 股票代码 38 | :return: 39 | """ 40 | latest_date = self.store.get_his_stock_date(stock_code) 41 | updated_data = self.get_update_day_history(stock_code, latest_date) 42 | 43 | if len(updated_data) == 0 or len(updated_data[0]) == 0: 44 | return 45 | 46 | self.store.write(stock_code, updated_data) 47 | 48 | def get_update_day_history(self, stock_code, latest_date): 49 | data_year = latest_date.year 50 | data_quarter = helpers.get_quarter(latest_date.month) 51 | now_year = datetime.now().year 52 | # 使用下一天的日期作为更新起始日,避免季度末时多更新上一季度的内容 53 | tomorrow = datetime.now() + timedelta(days=1) 54 | now_quarter = helpers.get_quarter(tomorrow.month) 55 | 56 | updated_data = list() 57 | for year in range(data_year, now_year + 1): 58 | for quarter in range(1, 5): 59 | if year == data_year: 60 | if quarter < data_quarter: 61 | continue 62 | if year == now_year: 63 | if quarter > now_quarter: 64 | continue 65 | # if year == now_year: 66 | # if quarter > now_quarter: 67 | # continue 68 | # elif year == data_year: 69 | # if quarter < data_quarter: 70 | # continue 71 | updated_data += self.get_quarter_history(stock_code, year, quarter) 72 | updated_data.sort(key=lambda day: day[0]) 73 | return updated_data 74 | 75 | def init_stock_history(self, stock_code): 76 | all_history = self.get_all_history(stock_code) 77 | if len(all_history) <= 0: 78 | return 79 | self.store.write(stock_code, all_history) 80 | 81 | def get_all_history(self, stock_code): 82 | years = self.get_stock_time(stock_code) 83 | all_history = [] 84 | for year in years: 85 | year_history = self.get_year_history(stock_code, year) 86 | all_history += year_history 87 | all_history.sort(key=lambda day: day[0]) 88 | return all_history 89 | 90 | def get_year_history(self, stock_code, year): 91 | year_history = [] 92 | now_year = datetime.now().year 93 | now_month = datetime.now().month 94 | end_quarter = 5 if str(year) != str(now_year) else math.ceil(now_month / 3) + 1 95 | for quarter in range(1, end_quarter): 96 | quarter_data = self.get_quarter_history(stock_code, year, quarter) 97 | if quarter_data is None: 98 | continue 99 | year_history += quarter_data 100 | return year_history 101 | 102 | def get_stock_time(self, stock_code): 103 | # 获取年月日 104 | url = self.SINA_API.format(stock_code=stock_code) 105 | try: 106 | dom = PyQuery(url) 107 | except requests.ConnectionError: 108 | return [] 109 | year_options = dom('select[name=year] option') 110 | years = [o.text for o in year_options][::-1] 111 | return years 112 | 113 | def get_quarter_history(self, stock_code, year, quarter): 114 | year = int(year) 115 | if year < 1990: 116 | return list() 117 | params = dict( 118 | year=year, 119 | jidu=quarter 120 | ) 121 | headers = { 122 | 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko' 123 | } 124 | print('request {},{},{}'.format(stock_code, year, quarter)) 125 | url = self.SINA_API.format(stock_code=stock_code) 126 | rep = list() 127 | loop_nums = 10 128 | for i in range(loop_nums): 129 | try: 130 | rep = requests.get(url, params, timeout=3, headers=headers) 131 | break 132 | except requests.ConnectionError: 133 | time.sleep(60) 134 | except Exception as e: 135 | with open('error.log', 'a+') as f: 136 | f.write(str(e)) 137 | 138 | print('end request {}, {}, {}'.format(stock_code, year, quarter)) 139 | if rep is None: 140 | with open('error.txt', 'a+') as f: 141 | f.write('{},{},{}'.format(stock_code, year, quarter)) 142 | return list() 143 | res = self.handle_quarter_history(rep.text) 144 | return res 145 | 146 | def handle_quarter_history(self, rep_html): 147 | dom = PyQuery(rep_html) 148 | raw_trows = dom('#FundHoldSharesTable tr') 149 | empty_history_nodes = 2 150 | if len(raw_trows) <= empty_history_nodes: 151 | return list() 152 | 153 | unused_head_index_end = 2 154 | trows = raw_trows[unused_head_index_end:] 155 | 156 | res = list() 157 | for row_td_list in trows: 158 | td_list = row_td_list.getchildren() 159 | day_history = [] 160 | for i, td in enumerate(td_list): 161 | td_content = td.text_content() 162 | date_index = 0 163 | if i == date_index: 164 | td_content = re.sub(r'\r|\n|\t', '', td_content) 165 | day_history.append(td_content) 166 | self.convert_stock_data_type(day_history) 167 | res.append(day_history) 168 | return res 169 | 170 | def convert_stock_data_type(self, day_data): 171 | """将获取的对应日期股票数据除了日期之外,转换为正确的 float / int 类型 172 | :param day_data: ['2016-02-19', '945.019', '949.701', '940.336', '935.653', '31889824.000', '320939648.000', '93.659'] 173 | :return: ['2016-02-19', 945.019, 949.701, 940.336, 935.653, 31889824.000, 320939648.000, 93.659] 174 | """ 175 | date_index = 0 176 | for i, val in enumerate(day_data): 177 | if i == date_index: 178 | continue 179 | day_data[i] = float(val) 180 | -------------------------------------------------------------------------------- /easyhistory/helpers.py: -------------------------------------------------------------------------------- 1 | # coding:utf-8 2 | import math 3 | 4 | 5 | def get_quarter(month): 6 | return math.ceil(int(month) / 3) 7 | -------------------------------------------------------------------------------- /easyhistory/history.py: -------------------------------------------------------------------------------- 1 | # coding:utf-8 2 | import os 3 | 4 | import pandas as pd 5 | import talib 6 | 7 | 8 | class Indicator(object): 9 | def __init__(self, stock_code, history): 10 | self.stock_code = stock_code 11 | self.history = history 12 | self.hisarg = {} 13 | 14 | def load_csv_files(self, path): 15 | file_list = [f for f in os.listdir(path) if f.endswith('.csv')] 16 | for stock_csv in file_list: 17 | csv_ext_index_start = -4 18 | stock_code = stock_csv[:csv_ext_index_start] 19 | self.market[stock_code] = pd.read_csv(stock_csv, index_col='date') 20 | 21 | def __getattr__(self, item): 22 | def talib_func(*args, **kwargs): 23 | str_args = ''.join(map(str, args)) 24 | index = item + str_args 25 | if index in self.hisarg and self.hisarg[index] is not None: 26 | return self.hisarg[index] 27 | func = getattr(talib, item) 28 | res_arr = func(self.history['close'].values, *args, **kwargs) 29 | self.hisarg[index] = res_arr 30 | return self.hisarg[index] 31 | 32 | return talib_func 33 | 34 | 35 | class History(object): 36 | def __init__(self, dtype='D', path='history', stock=None): 37 | self.market = dict() 38 | data_path = os.path.join(path, 'day', 'data') 39 | self.load_csv_files(data_path, stock) 40 | 41 | def load_csv_files(self, path, stock=None): 42 | if stock and os.path.exists( os.path.join(path, stock+'.csv') ): 43 | stock_csv = stock+'.csv' 44 | stock_code = stock 45 | csv_path = os.path.join(path, stock_csv) 46 | self.market[stock_code] = Indicator(stock_code, pd.read_csv(csv_path, index_col='date')) 47 | return 48 | 49 | file_list = [f for f in os.listdir(path) if f.endswith('.csv')] 50 | for stock_csv in file_list: 51 | csv_ext_index_start = -4 52 | stock_code = stock_csv[:csv_ext_index_start] 53 | 54 | csv_path = os.path.join(path, stock_csv) 55 | self.market[stock_code] = Indicator(stock_code, pd.read_csv(csv_path, index_col='date')) 56 | 57 | def __getitem__(self, item): 58 | return self.market[item] 59 | -------------------------------------------------------------------------------- /easyhistory/store.py: -------------------------------------------------------------------------------- 1 | # coding: utf8 2 | import json 3 | import os 4 | from datetime import datetime 5 | 6 | import easyutils 7 | import pandas as pd 8 | 9 | 10 | def use(export='csv', **kwargs): 11 | if export.lower() in ['csv']: 12 | return CSVStore(**kwargs) 13 | 14 | 15 | class Store: 16 | def load(self, stock_data): 17 | pass 18 | 19 | def write(self, stock_code, data): 20 | pass 21 | 22 | 23 | class CSVStore(Store): 24 | def __init__(self, path, dtype): 25 | if dtype.lower() in ['d']: 26 | self.path = os.path.join(path, 'day') 27 | self.result_path = os.path.join(self.path, 'data') 28 | self.raw_path = os.path.join(self.path, 'raw_data') 29 | 30 | def write(self, stock_code, updated_data): 31 | if not os.path.exists(self.result_path): 32 | os.makedirs(self.result_path) 33 | if not os.path.exists(self.raw_path): 34 | os.makedirs(self.raw_path) 35 | 36 | csv_file_path = os.path.join(self.raw_path, '{}.csv'.format(stock_code)) 37 | if os.path.exists(csv_file_path): 38 | try: 39 | his = pd.read_csv(csv_file_path) 40 | except ValueError: 41 | return 42 | 43 | updated_data_start_date = updated_data[0][0] 44 | old_his = his[his.date < updated_data_start_date] 45 | updated_his = pd.DataFrame(updated_data, columns=his.columns) 46 | his = old_his.append(updated_his) 47 | else: 48 | his = pd.DataFrame(updated_data, 49 | columns=['date', 'open', 'high', 'close', 'low', 'volume', 'amount', 'factor']) 50 | his.to_csv(csv_file_path, index=False) 51 | date = his.iloc[-1].date 52 | self.write_summary(stock_code, date) 53 | self.write_factor_his(stock_code, his) 54 | 55 | def get_his_stock_date(self, stock_code): 56 | summary_path = os.path.join(self.raw_path, '{}_summary.json'.format(stock_code)) 57 | with open(summary_path) as f: 58 | summary = json.load(f) 59 | latest_date = datetime.strptime(summary['date'], '%Y-%m-%d') 60 | return latest_date 61 | 62 | def write_summary(self, stock_code, date): 63 | file_path = os.path.join(self.raw_path, '{}_summary.json'.format(stock_code)) 64 | with open(file_path, 'w') as f: 65 | latest_day = datetime.strptime(date, '%Y-%m-%d') 66 | summary = dict( 67 | year=latest_day.year, 68 | month=latest_day.month, 69 | day=latest_day.day, 70 | date=date 71 | ) 72 | json.dump(summary, f) 73 | 74 | def write_factor_his(self, stock_code, his): 75 | result_file_path = os.path.join(self.result_path, '{}.csv'.format(stock_code)) 76 | factor_cols = his.columns.difference(['date']) 77 | his[factor_cols] = his[factor_cols] / his.factor.max() 78 | his.to_csv(result_file_path, index=False) 79 | 80 | @property 81 | def init_stock_codes(self): 82 | stock_codes = easyutils.stock.get_all_stock_codes() 83 | exists_codes = set() 84 | if os.path.exists(self.raw_path): 85 | code_slice = slice(-4) 86 | exists_codes = {code[code_slice] for code in os.listdir(self.raw_path) if code.endswith('.csv')} 87 | return set(stock_codes).difference(exists_codes) 88 | 89 | @property 90 | def update_stock_codes(self): 91 | code_slice = slice(6) 92 | return [f[code_slice] for f in os.listdir(self.raw_path) if f.endswith('.json')] 93 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | pandas 3 | easyutils 4 | 5 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # coding:utf8 2 | from setuptools import setup 3 | 4 | import easyhistory 5 | 6 | long_desc = """ 7 | easyhistory 8 | =============== 9 | 10 | * easy to use rqalpha history data 11 | 12 | Installation 13 | -------------- 14 | 15 | pip install easytrader 16 | 17 | Upgrade 18 | --------------- 19 | 20 | pip install easytrader --upgrade 21 | """ 22 | 23 | setup( 24 | name='easyhistory', 25 | version=easyhistory.__version__, 26 | description='A utility for rqalpha history', 27 | long_description=long_desc, 28 | author='shidenggui', 29 | author_email='longlyshidenggui@gmail.com', 30 | license='BSD', 31 | url='https://github.com/shidenggui/easyhistory', 32 | keywords='China stock trade', 33 | install_requires=[ 34 | 'rqalpha', 35 | 'requests', 36 | 'six', 37 | 'easyutils', 38 | ], 39 | classifiers=['Development Status :: 4 - Beta', 40 | 'Programming Language :: Python :: 2.6', 41 | 'Programming Language :: Python :: 2.7', 42 | 'Programming Language :: Python :: 3.2', 43 | 'Programming Language :: Python :: 3.3', 44 | 'Programming Language :: Python :: 3.4', 45 | 'Programming Language :: Python :: 3.5', 46 | 'License :: OSI Approved :: BSD License'], 47 | packages=['easyhistory'], 48 | package_data={}, 49 | ) 50 | -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | import easyhistory 2 | 3 | easyhistory.update() 4 | 5 | -------------------------------------------------------------------------------- /test_history.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from datetime import datetime 3 | 4 | import easyhistory 5 | 6 | 7 | class TestHistory(unittest.TestCase): 8 | def test_get_history(self): 9 | test_date = '000001' 10 | normal_data = [str(y) for y in range(1991, datetime.now().year + 1)] 11 | res = easyhistory.Day().get_stock_time(test_date) 12 | 13 | self.assertListEqual(res, normal_data) 14 | 15 | def test_get_quarter_history(self): 16 | test_data = ['000001', 2016, 1] 17 | normal_data = [['2016-03-31', 1003.087, 1006.833, 996.53, 996.53, 41838792.0, 447266272.0, 93.659], 18 | ['2016-03-30', 981.545, 1002.15, 1002.15, 980.608, 53970000.0, 572627392.0, 93.659], 19 | ['2016-03-29', 984.354, 985.291, 976.862, 972.179, 31831788.0, 332422400.0, 93.659], 20 | ['2016-03-28', 994.657, 997.466, 981.544, 978.735, 35862100.0, 378973312.0, 93.659], 21 | ['2016-03-25', 984.354, 992.783, 991.847, 983.417, 23707048.0, 250338496.0, 93.659], 22 | ['2016-03-24', 993.72, 995.593, 985.291, 983.417, 37240624.0, 393411552.0, 93.659], 23 | ['2016-03-23', 1003.085, 1008.705, 1002.149, 993.72, 43027816.0, 458963264.0, 93.659], 24 | ['2016-03-22', 1008.705, 1024.627, 1004.022, 1001.213, 62548248.0, 675406592.0, 93.659], 25 | ['2016-03-21', 988.1, 1017.134, 1011.515, 988.1, 92043280.0, 987764480.0, 93.659], 26 | ['2016-03-18', 975.924, 989.973, 987.163, 974.051, 79721584.0, 836565568.0, 93.659], 27 | ['2016-03-17', 970.305, 981.544, 975.925, 964.685, 61099640.0, 635181312.0, 93.659], 28 | ['2016-03-16', 961.876, 978.735, 969.369, 960.003, 66488620.0, 690087744.0, 93.659], 29 | ['2016-03-15', 962.813, 970.305, 966.559, 951.573, 41792036.0, 428786688.0, 93.659], 30 | ['2016-03-14', 956.256, 979.671, 960.939, 956.256, 65515824.0, 679161280.0, 93.659], 31 | ['2016-03-11', 945.953, 957.192, 951.573, 940.334, 38373672.0, 389638944.0, 93.659], 32 | ['2016-03-10', 959.066, 969.368, 950.637, 948.763, 47402032.0, 486414240.0, 93.659], 33 | ['2016-03-09', 949.7, 957.193, 952.51, 940.334, 32590064.0, 330124896.0, 93.659], 34 | ['2016-03-08', 970.305, 970.305, 962.812, 930.968, 64315648.0, 650471616.0, 93.659], 35 | ['2016-03-07', 969.369, 982.481, 968.432, 964.686, 60635296.0, 630155584.0, 93.659], 36 | ['2016-03-04', 945.017, 983.417, 974.051, 943.144, 138124912.0, 1429273088.0, 93.659], 37 | ['2016-03-03', 945.018, 953.447, 946.891, 940.335, 55308940.0, 559045184.0, 93.659], 38 | ['2016-03-02', 913.174, 948.764, 945.954, 910.364, 67661376.0, 673626240.0, 93.659], 39 | ['2016-03-01', 900.998, 915.047, 908.491, 897.252, 37791080.0, 365149024.0, 93.659], 40 | ['2016-02-29', 916.92, 918.794, 895.379, 882.267, 56689640.0, 542184000.0, 93.659], 41 | ['2016-02-26', 915.047, 920.666, 916.92, 904.744, 39215440.0, 382634656.0, 93.659], 42 | ['2016-02-25', 947.827, 948.764, 905.681, 899.124, 62207284.0, 615004736.0, 93.659], 43 | ['2016-02-24', 942.208, 950.637, 950.637, 937.525, 30010360.0, 302498016.0, 93.659], 44 | ['2016-02-23', 963.75, 963.75, 947.828, 941.272, 42587436.0, 432315296.0, 93.659], 45 | ['2016-02-22', 948.764, 965.623, 963.75, 942.208, 61773944.0, 630251520.0, 93.659], 46 | ['2016-02-19', 945.019, 949.701, 940.336, 935.653, 31889824.0, 320939648.0, 93.659], 47 | ['2016-02-18', 953.448, 957.194, 945.018, 945.018, 40617824.0, 412337568.0, 93.659], 48 | ['2016-02-17', 939.398, 957.194, 950.637, 935.652, 58516704.0, 590538944.0, 93.659], 49 | ['2016-02-16', 921.603, 939.398, 937.525, 920.667, 42838640.0, 427507776.0, 93.659], 50 | ['2016-02-15', 904.744, 922.54, 916.92, 903.808, 27849946.0, 271173376.0, 93.659], 51 | ['2016-02-05', 932.842, 933.779, 929.096, 928.159, 27089334.0, 269184384.0, 93.659], 52 | ['2016-02-04', 926.286, 936.589, 931.906, 925.35, 37309948.0, 370586176.0, 93.659], 53 | ['2016-02-03', 922.54, 926.286, 922.54, 915.047, 27457216.0, 269997824.0, 93.659], 54 | ['2016-02-02', 917.857, 939.398, 931.906, 915.984, 36910416.0, 367360512.0, 93.659], 55 | ['2016-02-01', 934.716, 937.525, 917.857, 912.237, 41773216.0, 412635648.0, 93.659], 56 | ['2016-01-29', 912.237, 944.081, 936.589, 907.554, 54443576.0, 540544448.0, 93.659], 57 | ['2016-01-28', 919.73, 926.286, 907.554, 903.808, 30254078.0, 296055328.0, 93.659], 58 | ['2016-01-27', 930.033, 934.716, 925.35, 899.125, 56903704.0, 558510656.0, 93.659], 59 | ['2016-01-26', 966.56, 966.56, 924.413, 923.477, 64790112.0, 653561600.0, 93.659], 60 | ['2016-01-25', 974.052, 977.799, 971.243, 967.496, 37643172.0, 390734880.0, 93.659], 61 | ['2016-01-22', 974.053, 978.736, 974.053, 957.194, 46675216.0, 482984448.0, 93.659], 62 | ['2016-01-21', 981.545, 1006.833, 966.56, 966.56, 60614512.0, 638127872.0, 93.659], 63 | ['2016-01-20', 1002.151, 1011.516, 987.165, 977.799, 60375248.0, 640968960.0, 93.659], 64 | ['2016-01-19', 978.736, 1009.643, 1003.087, 974.989, 50110908.0, 532074688.0, 93.659], 65 | ['2016-01-18', 968.434, 989.039, 974.99, 964.687, 42104088.0, 439917824.0, 93.659], 66 | ['2016-01-15', 998.404, 1011.517, 979.673, 975.926, 44820216.0, 474908128.0, 93.659], 67 | ['2016-01-14', 991.849, 1011.517, 1008.707, 981.546, 66631456.0, 708534976.0, 93.659], 68 | ['2016-01-13', 1019.947, 1024.63, 1003.088, 1002.152, 39170948.0, 424371712.0, 93.659], 69 | ['2016-01-12', 1014.327, 1021.82, 1012.454, 996.532, 56164232.0, 605970816.0, 93.659], 70 | ['2016-01-11', 1030.249, 1037.742, 1007.771, 1000.278, 73201400.0, 800683648.0, 93.659], 71 | ['2016-01-08', 1049.918, 1057.41, 1041.488, 1020.883, 74752760.0, 831334528.0, 93.659], 72 | ['2016-01-07', 1068.65, 1068.65, 1024.63, 1021.82, 17476110.0, 194869488.0, 93.659], 73 | ['2016-01-06', 1069.587, 1082.699, 1079.889, 1066.777, 51570644.0, 591698496.0, 93.659], 74 | ['2016-01-05', 1055.537, 1083.635, 1067.713, 1044.298, 66326996.0, 755531328.0, 93.659], 75 | ['2016-01-04', 1123.909, 1126.718, 1061.157, 1051.791, 56349788.0, 660376128.0, 93.659]] 76 | res = easyhistory.Day().get_quarter_history(*test_data) 77 | self.assertListEqual(res, normal_data) 78 | 79 | def test_day_data_type_convert(self): 80 | test_data = ['2016-02-19', '945.019', '949.701', '940.336', '935.653', '31889824.000', '320939648.000', 81 | '93.659'] 82 | normal_data = ['2016-02-19', 945.019, 949.701, 940.336, 935.653, 31889824.000, 320939648.000, 93.659] 83 | easyhistory.Day().convert_stock_data_type(test_data) 84 | self.assertListEqual(test_data, normal_data) 85 | 86 | 87 | if __name__ == '__main__': 88 | unittest.main() 89 | --------------------------------------------------------------------------------